]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/expr.c
except.c (start_catch_handler): Use emit_cmp_and_jump_insns.
[thirdparty/gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
fdf004cf 2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
d6f4ec51
KG
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
bbf6f052
RK
38#include "recog.h"
39#include "output.h"
bbf6f052 40#include "typeclass.h"
ca55abae 41#include "defaults.h"
10f0ad3d 42#include "toplev.h"
bbf6f052
RK
43
44#define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46/* Decide whether a function's arguments should be processed
bbc8a071
RK
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
bbf6f052 51
bbf6f052 52#ifdef PUSH_ROUNDING
bbc8a071 53
3319a347 54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
55#define PUSH_ARGS_REVERSED /* If it's last to first */
56#endif
bbc8a071 57
bbf6f052
RK
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
18543a22
ILT
68/* Assume that case vectors are not pc-relative. */
69#ifndef CASE_VECTOR_PC_RELATIVE
70#define CASE_VECTOR_PC_RELATIVE 0
71#endif
72
bbf6f052
RK
73/* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79int cse_not_expected;
80
81/* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84int do_preexpand_calls = 1;
85
86/* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88int pending_stack_adjust;
89
90/* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94int inhibit_defer_pop;
95
bbf6f052
RK
96/* Nonzero means __builtin_saveregs has already been done in this function.
97 The value is the pseudoreg containing the value __builtin_saveregs
98 returned. */
99static rtx saveregs_value;
100
dcf76fff
TW
101/* Similarly for __builtin_apply_args. */
102static rtx apply_args_value;
103
956d6950 104/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
105 usage. Used when current_function_check_memory_usage is true, to avoid
106 infinite recursion. */
956d6950
JL
107static int in_check_memory_usage;
108
fdc46fbe
JM
109/* Postincrements that still need to be expanded. */
110static rtx pending_chain;
111
4969d05d
RK
112/* This structure is used by move_by_pieces to describe the move to
113 be performed. */
4969d05d
RK
114struct move_by_pieces
115{
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
e9cf6a97 120 int to_struct;
4969d05d
RK
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
e9cf6a97 125 int from_struct;
4969d05d
RK
126 int len;
127 int offset;
128 int reverse;
129};
130
9de08200
RK
131/* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
133
134struct clear_by_pieces
135{
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
144};
145
292b1216 146extern struct obstack permanent_obstack;
4ed67205 147extern rtx arg_pointer_save_area;
c02bd5d9 148
03566575
JW
149static rtx get_push_address PROTO ((int));
150
4969d05d 151static rtx enqueue_insn PROTO((rtx, rtx));
4969d05d 152static void init_queue PROTO((void));
4969d05d 153static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 154static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 155 struct move_by_pieces *));
9de08200 156static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 157static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
158 struct clear_by_pieces *));
159static int is_zeros_p PROTO((tree));
160static int mostly_zeros_p PROTO((tree));
d77fac3b
JL
161static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
162 tree, tree, int));
e1a43f73 163static void store_constructor PROTO((tree, rtx, int));
4969d05d 164static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
ece32014
MM
165 enum machine_mode, int, int,
166 int, int));
e009aaf3
JL
167static enum memory_use_mode
168 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
169static tree save_noncopied_parts PROTO((tree, tree));
170static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 171static int safe_from_p PROTO((rtx, tree, int));
4969d05d 172static int fixed_type_p PROTO((tree));
01c8a7c8 173static rtx var_rtx PROTO((tree));
4969d05d
RK
174static int get_pointer_alignment PROTO((tree, unsigned));
175static tree string_constant PROTO((tree, tree *));
176static tree c_strlen PROTO((tree));
55a6ba9f 177static rtx get_memory_rtx PROTO((tree));
307b821c
RK
178static rtx expand_builtin PROTO((tree, rtx, rtx,
179 enum machine_mode, int));
0006469d
TW
180static int apply_args_size PROTO((void));
181static int apply_result_size PROTO((void));
182static rtx result_vector PROTO((int, rtx));
183static rtx expand_builtin_apply_args PROTO((void));
184static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
185static void expand_builtin_return PROTO((rtx));
7b8b9722 186static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
187static void preexpand_calls PROTO((tree));
188static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
189static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
4969d05d
RK
190static void do_jump_for_compare PROTO((rtx, rtx, rtx));
191static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
192static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 193
4fa52007
RK
194/* Record for each mode whether we can move a register directly to or
195 from an object of that mode in memory. If we can't, we won't try
196 to use that mode directly when accessing a field of that mode. */
197
198static char direct_load[NUM_MACHINE_MODES];
199static char direct_store[NUM_MACHINE_MODES];
200
7e24ffc9
HPN
201/* If a memory-to-memory move would take MOVE_RATIO or more simple
202 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
203
204#ifndef MOVE_RATIO
266007a7 205#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
206#define MOVE_RATIO 2
207#else
996d9dac
MM
208/* If we are optimizing for space (-Os), cut down the default move ratio */
209#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
210#endif
211#endif
e87b4f3f 212
fbe1758d
AM
213/* This macro is used to determine whether move_by_pieces should be called
214 to perform a structure copy. */
215#ifndef MOVE_BY_PIECES_P
216#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
217 (SIZE, ALIGN) < MOVE_RATIO)
218#endif
219
266007a7 220/* This array records the insn_code of insns to perform block moves. */
e6677db3 221enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 222
9de08200
RK
223/* This array records the insn_code of insns to perform block clears. */
224enum insn_code clrstr_optab[NUM_MACHINE_MODES];
225
0f41302f 226/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
227
228#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 229#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 230#endif
0006469d
TW
231
232/* Register mappings for target machines without register windows. */
233#ifndef INCOMING_REGNO
234#define INCOMING_REGNO(OUT) (OUT)
235#endif
236#ifndef OUTGOING_REGNO
237#define OUTGOING_REGNO(IN) (IN)
238#endif
bbf6f052 239\f
4fa52007 240/* This is run once per compilation to set up which modes can be used
266007a7 241 directly in memory and to initialize the block move optab. */
4fa52007
RK
242
243void
244init_expr_once ()
245{
246 rtx insn, pat;
247 enum machine_mode mode;
cff48d8f 248 int num_clobbers;
9ec36da5
JL
249 rtx mem, mem1;
250 char *free_point;
251
252 start_sequence ();
253
254 /* Since we are on the permanent obstack, we must be sure we save this
255 spot AFTER we call start_sequence, since it will reuse the rtl it
256 makes. */
257 free_point = (char *) oballoc (0);
258
e2549997
RS
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
9ec36da5
JL
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 264
38a448ca 265 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
266 pat = PATTERN (insn);
267
268 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
269 mode = (enum machine_mode) ((int) mode + 1))
270 {
271 int regno;
272 rtx reg;
4fa52007
RK
273
274 direct_load[(int) mode] = direct_store[(int) mode] = 0;
275 PUT_MODE (mem, mode);
e2549997 276 PUT_MODE (mem1, mode);
4fa52007 277
e6fe56a4
RK
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
280
7308a047
RS
281 if (mode != VOIDmode && mode != BLKmode)
282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
283 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
284 regno++)
285 {
286 if (! HARD_REGNO_MODE_OK (regno, mode))
287 continue;
e6fe56a4 288
38a448ca 289 reg = gen_rtx_REG (mode, regno);
e6fe56a4 290
7308a047
RS
291 SET_SRC (pat) = mem;
292 SET_DEST (pat) = reg;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_load[(int) mode] = 1;
e6fe56a4 295
e2549997
RS
296 SET_SRC (pat) = mem1;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
300
7308a047
RS
301 SET_SRC (pat) = reg;
302 SET_DEST (pat) = mem;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_store[(int) mode] = 1;
e2549997
RS
305
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem1;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
7308a047 310 }
4fa52007
RK
311 }
312
313 end_sequence ();
9ec36da5 314 obfree (free_point);
4fa52007 315}
cff48d8f 316
bbf6f052
RK
317/* This is run at the start of compiling a function. */
318
319void
320init_expr ()
321{
322 init_queue ();
323
324 pending_stack_adjust = 0;
325 inhibit_defer_pop = 0;
bbf6f052 326 saveregs_value = 0;
0006469d 327 apply_args_value = 0;
e87b4f3f 328 forced_labels = 0;
bbf6f052
RK
329}
330
331/* Save all variables describing the current status into the structure *P.
332 This is used before starting a nested function. */
333
334void
335save_expr_status (p)
336 struct function *p;
337{
fdc46fbe 338 p->pending_chain = pending_chain;
bbf6f052
RK
339 p->pending_stack_adjust = pending_stack_adjust;
340 p->inhibit_defer_pop = inhibit_defer_pop;
bbf6f052 341 p->saveregs_value = saveregs_value;
0006469d 342 p->apply_args_value = apply_args_value;
e87b4f3f 343 p->forced_labels = forced_labels;
bbf6f052 344
fdc46fbe 345 pending_chain = NULL_RTX;
bbf6f052
RK
346 pending_stack_adjust = 0;
347 inhibit_defer_pop = 0;
bbf6f052 348 saveregs_value = 0;
0006469d 349 apply_args_value = 0;
e87b4f3f 350 forced_labels = 0;
bbf6f052
RK
351}
352
353/* Restore all variables describing the current status from the structure *P.
354 This is used after a nested function. */
355
356void
357restore_expr_status (p)
358 struct function *p;
359{
fdc46fbe 360 pending_chain = p->pending_chain;
bbf6f052
RK
361 pending_stack_adjust = p->pending_stack_adjust;
362 inhibit_defer_pop = p->inhibit_defer_pop;
bbf6f052 363 saveregs_value = p->saveregs_value;
0006469d 364 apply_args_value = p->apply_args_value;
e87b4f3f 365 forced_labels = p->forced_labels;
bbf6f052
RK
366}
367\f
368/* Manage the queue of increment instructions to be output
369 for POSTINCREMENT_EXPR expressions, etc. */
370
bbf6f052
RK
371/* Queue up to increment (or change) VAR later. BODY says how:
372 BODY should be the same thing you would pass to emit_insn
373 to increment right away. It will go to emit_insn later on.
374
375 The value is a QUEUED expression to be used in place of VAR
376 where you want to guarantee the pre-incrementation value of VAR. */
377
378static rtx
379enqueue_insn (var, body)
380 rtx var, body;
381{
38a448ca
RH
382 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
383 var, NULL_RTX, NULL_RTX, body,
384 pending_chain);
bbf6f052
RK
385 return pending_chain;
386}
387
388/* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
394
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
398
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
402
403rtx
404protect_from_queue (x, modify)
405 register rtx x;
406 int modify;
407{
408 register RTX_CODE code = GET_CODE (x);
409
410#if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
413 return x;
414#endif
415
416 if (code != QUEUED)
417 {
e9baa644
RK
418 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
419 use of autoincrement. Make a copy of the contents of the memory
420 location rather than a copy of the address, but not if the value is
421 of mode BLKmode. Don't modify X in place since it might be
422 shared. */
bbf6f052
RK
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
425 {
426 register rtx y = XEXP (x, 0);
38a448ca 427 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 428
e9baa644 429 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 430 MEM_COPY_ATTRIBUTES (new, x);
41472af8 431 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 432
bbf6f052
RK
433 if (QUEUED_INSN (y))
434 {
e9baa644
RK
435 register rtx temp = gen_reg_rtx (GET_MODE (new));
436 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
437 QUEUED_INSN (y));
438 return temp;
439 }
e9baa644 440 return new;
bbf6f052
RK
441 }
442 /* Otherwise, recursively protect the subexpressions of all
443 the kinds of rtx's that can contain a QUEUED. */
444 if (code == MEM)
3f15938e
RS
445 {
446 rtx tem = protect_from_queue (XEXP (x, 0), 0);
447 if (tem != XEXP (x, 0))
448 {
449 x = copy_rtx (x);
450 XEXP (x, 0) = tem;
451 }
452 }
bbf6f052
RK
453 else if (code == PLUS || code == MULT)
454 {
3f15938e
RS
455 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
456 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
457 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
458 {
459 x = copy_rtx (x);
460 XEXP (x, 0) = new0;
461 XEXP (x, 1) = new1;
462 }
bbf6f052
RK
463 }
464 return x;
465 }
466 /* If the increment has not happened, use the variable itself. */
467 if (QUEUED_INSN (x) == 0)
468 return QUEUED_VAR (x);
469 /* If the increment has happened and a pre-increment copy exists,
470 use that copy. */
471 if (QUEUED_COPY (x) != 0)
472 return QUEUED_COPY (x);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
477 QUEUED_INSN (x));
478 return QUEUED_COPY (x);
479}
480
481/* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
485
1f06ee8d 486int
bbf6f052
RK
487queued_subexp_p (x)
488 rtx x;
489{
490 register enum rtx_code code = GET_CODE (x);
491 switch (code)
492 {
493 case QUEUED:
494 return 1;
495 case MEM:
496 return queued_subexp_p (XEXP (x, 0));
497 case MULT:
498 case PLUS:
499 case MINUS:
e9a25f70
JL
500 return (queued_subexp_p (XEXP (x, 0))
501 || queued_subexp_p (XEXP (x, 1)));
502 default:
503 return 0;
bbf6f052 504 }
bbf6f052
RK
505}
506
507/* Perform all the pending incrementations. */
508
509void
510emit_queue ()
511{
512 register rtx p;
381127e8 513 while ((p = pending_chain))
bbf6f052 514 {
41b083c4
R
515 rtx body = QUEUED_BODY (p);
516
517 if (GET_CODE (body) == SEQUENCE)
518 {
519 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
520 emit_insn (QUEUED_BODY (p));
521 }
522 else
523 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
524 pending_chain = QUEUED_NEXT (p);
525 }
526}
527
528static void
529init_queue ()
530{
531 if (pending_chain)
532 abort ();
533}
534\f
535/* Copy data from FROM to TO, where the machine modes are not the same.
536 Both modes may be integer, or both may be floating.
537 UNSIGNEDP should be nonzero if FROM is an unsigned type.
538 This causes zero-extension instead of sign-extension. */
539
540void
541convert_move (to, from, unsignedp)
542 register rtx to, from;
543 int unsignedp;
544{
545 enum machine_mode to_mode = GET_MODE (to);
546 enum machine_mode from_mode = GET_MODE (from);
547 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
548 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
549 enum insn_code code;
550 rtx libcall;
551
552 /* rtx code for making an equivalent value. */
553 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
554
555 to = protect_from_queue (to, 1);
556 from = protect_from_queue (from, 0);
557
558 if (to_real != from_real)
559 abort ();
560
1499e0a8
RK
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
563 TO here. */
564
565 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
567 >= GET_MODE_SIZE (to_mode))
568 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
569 from = gen_lowpart (to_mode, from), from_mode = to_mode;
570
571 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
572 abort ();
573
bbf6f052
RK
574 if (to_mode == from_mode
575 || (from_mode == VOIDmode && CONSTANT_P (from)))
576 {
577 emit_move_insn (to, from);
578 return;
579 }
580
581 if (to_real)
582 {
81d79e2c
RS
583 rtx value;
584
2b01c326 585 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 586 {
2b01c326
RK
587 /* Try converting directly if the insn is supported. */
588 if ((code = can_extend_p (to_mode, from_mode, 0))
589 != CODE_FOR_nothing)
590 {
591 emit_unop_insn (code, to, from, UNKNOWN);
592 return;
593 }
bbf6f052 594 }
2b01c326 595
b424402e
RS
596#ifdef HAVE_trunchfqf2
597 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
598 {
599 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
600 return;
601 }
602#endif
704af6a1
JL
603#ifdef HAVE_trunctqfqf2
604 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
605 {
606 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
607 return;
608 }
609#endif
b424402e
RS
610#ifdef HAVE_truncsfqf2
611 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
612 {
613 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
614 return;
615 }
616#endif
617#ifdef HAVE_truncdfqf2
618 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
619 {
620 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
621 return;
622 }
623#endif
624#ifdef HAVE_truncxfqf2
625 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
626 {
627 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
628 return;
629 }
630#endif
631#ifdef HAVE_trunctfqf2
632 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
633 {
634 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
635 return;
636 }
637#endif
03747aa3
RK
638
639#ifdef HAVE_trunctqfhf2
640 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
641 {
642 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
643 return;
644 }
645#endif
b424402e
RS
646#ifdef HAVE_truncsfhf2
647 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
648 {
649 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
650 return;
651 }
652#endif
653#ifdef HAVE_truncdfhf2
654 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
655 {
656 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
657 return;
658 }
659#endif
660#ifdef HAVE_truncxfhf2
661 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
662 {
663 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
664 return;
665 }
666#endif
667#ifdef HAVE_trunctfhf2
668 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
669 {
670 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
671 return;
672 }
673#endif
2b01c326
RK
674
675#ifdef HAVE_truncsftqf2
676 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
677 {
678 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
679 return;
680 }
681#endif
682#ifdef HAVE_truncdftqf2
683 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
684 {
685 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
686 return;
687 }
688#endif
689#ifdef HAVE_truncxftqf2
690 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
691 {
692 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
693 return;
694 }
695#endif
696#ifdef HAVE_trunctftqf2
697 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
698 {
699 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
700 return;
701 }
702#endif
703
bbf6f052
RK
704#ifdef HAVE_truncdfsf2
705 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
706 {
707 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
708 return;
709 }
710#endif
b092b471
JW
711#ifdef HAVE_truncxfsf2
712 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
713 {
714 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
715 return;
716 }
717#endif
bbf6f052
RK
718#ifdef HAVE_trunctfsf2
719 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
720 {
721 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
722 return;
723 }
724#endif
b092b471
JW
725#ifdef HAVE_truncxfdf2
726 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
727 {
728 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
729 return;
730 }
731#endif
bbf6f052
RK
732#ifdef HAVE_trunctfdf2
733 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
734 {
735 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
736 return;
737 }
738#endif
739
b092b471
JW
740 libcall = (rtx) 0;
741 switch (from_mode)
742 {
743 case SFmode:
744 switch (to_mode)
745 {
746 case DFmode:
747 libcall = extendsfdf2_libfunc;
748 break;
749
750 case XFmode:
751 libcall = extendsfxf2_libfunc;
752 break;
753
754 case TFmode:
755 libcall = extendsftf2_libfunc;
756 break;
e9a25f70
JL
757
758 default:
759 break;
b092b471
JW
760 }
761 break;
762
763 case DFmode:
764 switch (to_mode)
765 {
766 case SFmode:
767 libcall = truncdfsf2_libfunc;
768 break;
769
770 case XFmode:
771 libcall = extenddfxf2_libfunc;
772 break;
773
774 case TFmode:
775 libcall = extenddftf2_libfunc;
776 break;
e9a25f70
JL
777
778 default:
779 break;
b092b471
JW
780 }
781 break;
782
783 case XFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = truncxfsf2_libfunc;
788 break;
789
790 case DFmode:
791 libcall = truncxfdf2_libfunc;
792 break;
e9a25f70
JL
793
794 default:
795 break;
b092b471
JW
796 }
797 break;
798
799 case TFmode:
800 switch (to_mode)
801 {
802 case SFmode:
803 libcall = trunctfsf2_libfunc;
804 break;
805
806 case DFmode:
807 libcall = trunctfdf2_libfunc;
808 break;
e9a25f70
JL
809
810 default:
811 break;
b092b471
JW
812 }
813 break;
e9a25f70
JL
814
815 default:
816 break;
b092b471
JW
817 }
818
819 if (libcall == (rtx) 0)
820 /* This conversion is not implemented yet. */
bbf6f052
RK
821 abort ();
822
81d79e2c
RS
823 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
824 1, from, from_mode);
825 emit_move_insn (to, value);
bbf6f052
RK
826 return;
827 }
828
829 /* Now both modes are integers. */
830
831 /* Handle expanding beyond a word. */
832 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
833 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
834 {
835 rtx insns;
836 rtx lowpart;
837 rtx fill_value;
838 rtx lowfrom;
839 int i;
840 enum machine_mode lowpart_mode;
841 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
842
843 /* Try converting directly if the insn is supported. */
844 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
845 != CODE_FOR_nothing)
846 {
cd1b4b44
RK
847 /* If FROM is a SUBREG, put it into a register. Do this
848 so that we always generate the same set of insns for
849 better cse'ing; if an intermediate assignment occurred,
850 we won't be doing the operation directly on the SUBREG. */
851 if (optimize > 0 && GET_CODE (from) == SUBREG)
852 from = force_reg (from_mode, from);
bbf6f052
RK
853 emit_unop_insn (code, to, from, equiv_code);
854 return;
855 }
856 /* Next, try converting via full word. */
857 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
858 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
859 != CODE_FOR_nothing))
860 {
a81fee56 861 if (GET_CODE (to) == REG)
38a448ca 862 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
863 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
864 emit_unop_insn (code, to,
865 gen_lowpart (word_mode, to), equiv_code);
866 return;
867 }
868
869 /* No special multiword conversion insn; do it by hand. */
870 start_sequence ();
871
5c5033c3
RK
872 /* Since we will turn this into a no conflict block, we must ensure
873 that the source does not overlap the target. */
874
875 if (reg_overlap_mentioned_p (to, from))
876 from = force_reg (from_mode, from);
877
bbf6f052
RK
878 /* Get a copy of FROM widened to a word, if necessary. */
879 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
880 lowpart_mode = word_mode;
881 else
882 lowpart_mode = from_mode;
883
884 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
885
886 lowpart = gen_lowpart (lowpart_mode, to);
887 emit_move_insn (lowpart, lowfrom);
888
889 /* Compute the value to put in each remaining word. */
890 if (unsignedp)
891 fill_value = const0_rtx;
892 else
893 {
894#ifdef HAVE_slt
895 if (HAVE_slt
896 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
897 && STORE_FLAG_VALUE == -1)
898 {
906c4e36
RK
899 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
900 lowpart_mode, 0, 0);
bbf6f052
RK
901 fill_value = gen_reg_rtx (word_mode);
902 emit_insn (gen_slt (fill_value));
903 }
904 else
905#endif
906 {
907 fill_value
908 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
909 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 910 NULL_RTX, 0);
bbf6f052
RK
911 fill_value = convert_to_mode (word_mode, fill_value, 1);
912 }
913 }
914
915 /* Fill the remaining words. */
916 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
917 {
918 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
919 rtx subword = operand_subword (to, index, 1, to_mode);
920
921 if (subword == 0)
922 abort ();
923
924 if (fill_value != subword)
925 emit_move_insn (subword, fill_value);
926 }
927
928 insns = get_insns ();
929 end_sequence ();
930
906c4e36 931 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 932 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
933 return;
934 }
935
d3c64ee3
RS
936 /* Truncating multi-word to a word or less. */
937 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
938 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 939 {
431a6eca
JW
940 if (!((GET_CODE (from) == MEM
941 && ! MEM_VOLATILE_P (from)
942 && direct_load[(int) to_mode]
943 && ! mode_dependent_address_p (XEXP (from, 0)))
944 || GET_CODE (from) == REG
945 || GET_CODE (from) == SUBREG))
946 from = force_reg (from_mode, from);
bbf6f052
RK
947 convert_move (to, gen_lowpart (word_mode, from), 0);
948 return;
949 }
950
951 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
952 if (to_mode == PQImode)
953 {
954 if (from_mode != QImode)
955 from = convert_to_mode (QImode, from, unsignedp);
956
957#ifdef HAVE_truncqipqi2
958 if (HAVE_truncqipqi2)
959 {
960 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
961 return;
962 }
963#endif /* HAVE_truncqipqi2 */
964 abort ();
965 }
966
967 if (from_mode == PQImode)
968 {
969 if (to_mode != QImode)
970 {
971 from = convert_to_mode (QImode, from, unsignedp);
972 from_mode = QImode;
973 }
974 else
975 {
976#ifdef HAVE_extendpqiqi2
977 if (HAVE_extendpqiqi2)
978 {
979 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
980 return;
981 }
982#endif /* HAVE_extendpqiqi2 */
983 abort ();
984 }
985 }
986
bbf6f052
RK
987 if (to_mode == PSImode)
988 {
989 if (from_mode != SImode)
990 from = convert_to_mode (SImode, from, unsignedp);
991
1f584163
DE
992#ifdef HAVE_truncsipsi2
993 if (HAVE_truncsipsi2)
bbf6f052 994 {
1f584163 995 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
996 return;
997 }
1f584163 998#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
999 abort ();
1000 }
1001
1002 if (from_mode == PSImode)
1003 {
1004 if (to_mode != SImode)
1005 {
1006 from = convert_to_mode (SImode, from, unsignedp);
1007 from_mode = SImode;
1008 }
1009 else
1010 {
1f584163
DE
1011#ifdef HAVE_extendpsisi2
1012 if (HAVE_extendpsisi2)
bbf6f052 1013 {
1f584163 1014 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1015 return;
1016 }
1f584163 1017#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1018 abort ();
1019 }
1020 }
1021
0407367d
RK
1022 if (to_mode == PDImode)
1023 {
1024 if (from_mode != DImode)
1025 from = convert_to_mode (DImode, from, unsignedp);
1026
1027#ifdef HAVE_truncdipdi2
1028 if (HAVE_truncdipdi2)
1029 {
1030 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1031 return;
1032 }
1033#endif /* HAVE_truncdipdi2 */
1034 abort ();
1035 }
1036
1037 if (from_mode == PDImode)
1038 {
1039 if (to_mode != DImode)
1040 {
1041 from = convert_to_mode (DImode, from, unsignedp);
1042 from_mode = DImode;
1043 }
1044 else
1045 {
1046#ifdef HAVE_extendpdidi2
1047 if (HAVE_extendpdidi2)
1048 {
1049 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1050 return;
1051 }
1052#endif /* HAVE_extendpdidi2 */
1053 abort ();
1054 }
1055 }
1056
bbf6f052
RK
1057 /* Now follow all the conversions between integers
1058 no more than a word long. */
1059
1060 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1061 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1062 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1063 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1064 {
d3c64ee3
RS
1065 if (!((GET_CODE (from) == MEM
1066 && ! MEM_VOLATILE_P (from)
1067 && direct_load[(int) to_mode]
1068 && ! mode_dependent_address_p (XEXP (from, 0)))
1069 || GET_CODE (from) == REG
1070 || GET_CODE (from) == SUBREG))
1071 from = force_reg (from_mode, from);
34aa3599
RK
1072 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1073 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1074 from = copy_to_reg (from);
bbf6f052
RK
1075 emit_move_insn (to, gen_lowpart (to_mode, from));
1076 return;
1077 }
1078
d3c64ee3 1079 /* Handle extension. */
bbf6f052
RK
1080 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1081 {
1082 /* Convert directly if that works. */
1083 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1084 != CODE_FOR_nothing)
1085 {
1086 emit_unop_insn (code, to, from, equiv_code);
1087 return;
1088 }
1089 else
1090 {
1091 enum machine_mode intermediate;
2b28d92e
NC
1092 rtx tmp;
1093 tree shift_amount;
bbf6f052
RK
1094
1095 /* Search for a mode to convert via. */
1096 for (intermediate = from_mode; intermediate != VOIDmode;
1097 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1098 if (((can_extend_p (to_mode, intermediate, unsignedp)
1099 != CODE_FOR_nothing)
1100 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1101 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1102 && (can_extend_p (intermediate, from_mode, unsignedp)
1103 != CODE_FOR_nothing))
1104 {
1105 convert_move (to, convert_to_mode (intermediate, from,
1106 unsignedp), unsignedp);
1107 return;
1108 }
1109
2b28d92e
NC
1110 /* No suitable intermediate mode.
1111 Generate what we need with shifts. */
1112 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1113 - GET_MODE_BITSIZE (from_mode), 0);
1114 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1115 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1116 to, unsignedp);
1117 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1118 to, unsignedp);
1119 if (tmp != to)
1120 emit_move_insn (to, tmp);
1121 return;
bbf6f052
RK
1122 }
1123 }
1124
1125 /* Support special truncate insns for certain modes. */
1126
1127 if (from_mode == DImode && to_mode == SImode)
1128 {
1129#ifdef HAVE_truncdisi2
1130 if (HAVE_truncdisi2)
1131 {
1132 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1133 return;
1134 }
1135#endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1138 }
1139
1140 if (from_mode == DImode && to_mode == HImode)
1141 {
1142#ifdef HAVE_truncdihi2
1143 if (HAVE_truncdihi2)
1144 {
1145 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1146 return;
1147 }
1148#endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1151 }
1152
1153 if (from_mode == DImode && to_mode == QImode)
1154 {
1155#ifdef HAVE_truncdiqi2
1156 if (HAVE_truncdiqi2)
1157 {
1158 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1159 return;
1160 }
1161#endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1164 }
1165
1166 if (from_mode == SImode && to_mode == HImode)
1167 {
1168#ifdef HAVE_truncsihi2
1169 if (HAVE_truncsihi2)
1170 {
1171 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1172 return;
1173 }
1174#endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == SImode && to_mode == QImode)
1180 {
1181#ifdef HAVE_truncsiqi2
1182 if (HAVE_truncsiqi2)
1183 {
1184 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1185 return;
1186 }
1187#endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == HImode && to_mode == QImode)
1193 {
1194#ifdef HAVE_trunchiqi2
1195 if (HAVE_trunchiqi2)
1196 {
1197 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1198 return;
1199 }
1200#endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
b9bcad65
RK
1205 if (from_mode == TImode && to_mode == DImode)
1206 {
1207#ifdef HAVE_trunctidi2
1208 if (HAVE_trunctidi2)
1209 {
1210 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1211 return;
1212 }
1213#endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
1218 if (from_mode == TImode && to_mode == SImode)
1219 {
1220#ifdef HAVE_trunctisi2
1221 if (HAVE_trunctisi2)
1222 {
1223 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1224 return;
1225 }
1226#endif
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1228 return;
1229 }
1230
1231 if (from_mode == TImode && to_mode == HImode)
1232 {
1233#ifdef HAVE_trunctihi2
1234 if (HAVE_trunctihi2)
1235 {
1236 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1237 return;
1238 }
1239#endif
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1241 return;
1242 }
1243
1244 if (from_mode == TImode && to_mode == QImode)
1245 {
1246#ifdef HAVE_trunctiqi2
1247 if (HAVE_trunctiqi2)
1248 {
1249 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1250 return;
1251 }
1252#endif
1253 convert_move (to, force_reg (from_mode, from), unsignedp);
1254 return;
1255 }
1256
bbf6f052
RK
1257 /* Handle truncation of volatile memrefs, and so on;
1258 the things that couldn't be truncated directly,
1259 and for which there was no special instruction. */
1260 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1261 {
1262 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1263 emit_move_insn (to, temp);
1264 return;
1265 }
1266
1267 /* Mode combination is not recognized. */
1268 abort ();
1269}
1270
1271/* Return an rtx for a value that would result
1272 from converting X to mode MODE.
1273 Both X and MODE may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1275 This can be done by referring to a part of X in place
5d901c31
RS
1276 or by copying to a new temporary with conversion.
1277
1278 This function *must not* call protect_from_queue
1279 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1280
1281rtx
1282convert_to_mode (mode, x, unsignedp)
1283 enum machine_mode mode;
1284 rtx x;
1285 int unsignedp;
5ffe63ed
RS
1286{
1287 return convert_modes (mode, VOIDmode, x, unsignedp);
1288}
1289
1290/* Return an rtx for a value that would result
1291 from converting X from mode OLDMODE to mode MODE.
1292 Both modes may be floating, or both integer.
1293 UNSIGNEDP is nonzero if X is an unsigned value.
1294
1295 This can be done by referring to a part of X in place
1296 or by copying to a new temporary with conversion.
1297
1298 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1299
1300 This function *must not* call protect_from_queue
1301 except when putting X into an insn (in which case convert_move does it). */
1302
1303rtx
1304convert_modes (mode, oldmode, x, unsignedp)
1305 enum machine_mode mode, oldmode;
1306 rtx x;
1307 int unsignedp;
bbf6f052
RK
1308{
1309 register rtx temp;
5ffe63ed 1310
1499e0a8
RK
1311 /* If FROM is a SUBREG that indicates that we have already done at least
1312 the required extension, strip it. */
1313
1314 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1315 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1316 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1317 x = gen_lowpart (mode, x);
bbf6f052 1318
64791b18
RK
1319 if (GET_MODE (x) != VOIDmode)
1320 oldmode = GET_MODE (x);
1321
5ffe63ed 1322 if (mode == oldmode)
bbf6f052
RK
1323 return x;
1324
1325 /* There is one case that we must handle specially: If we are converting
906c4e36 1326 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1327 we are to interpret the constant as unsigned, gen_lowpart will do
1328 the wrong if the constant appears negative. What we want to do is
1329 make the high-order word of the constant zero, not all ones. */
1330
1331 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1332 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1333 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1334 {
1335 HOST_WIDE_INT val = INTVAL (x);
1336
1337 if (oldmode != VOIDmode
1338 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1339 {
1340 int width = GET_MODE_BITSIZE (oldmode);
1341
1342 /* We need to zero extend VAL. */
1343 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1344 }
1345
1346 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1347 }
bbf6f052
RK
1348
1349 /* We can do this with a gen_lowpart if both desired and current modes
1350 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1351 non-volatile MEM. Except for the constant case where MODE is no
1352 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1353
ba2e110c
RK
1354 if ((GET_CODE (x) == CONST_INT
1355 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1356 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1357 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1358 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1359 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1360 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1361 && direct_load[(int) mode])
2bf29316
JW
1362 || (GET_CODE (x) == REG
1363 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1364 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1365 {
1366 /* ?? If we don't know OLDMODE, we have to assume here that
1367 X does not need sign- or zero-extension. This may not be
1368 the case, but it's the best we can do. */
1369 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1370 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1371 {
1372 HOST_WIDE_INT val = INTVAL (x);
1373 int width = GET_MODE_BITSIZE (oldmode);
1374
1375 /* We must sign or zero-extend in this case. Start by
1376 zero-extending, then sign extend if we need to. */
1377 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1378 if (! unsignedp
1379 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1380 val |= (HOST_WIDE_INT) (-1) << width;
1381
1382 return GEN_INT (val);
1383 }
1384
1385 return gen_lowpart (mode, x);
1386 }
bbf6f052
RK
1387
1388 temp = gen_reg_rtx (mode);
1389 convert_move (temp, x, unsignedp);
1390 return temp;
1391}
1392\f
fbe1758d
AM
1393
1394/* This macro is used to determine what the largest unit size that
1395 move_by_pieces can use is. */
1396
1397/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1398 move efficiently, as opposed to MOVE_MAX which is the maximum
1399 number of bhytes we can move with a single instruction. */
1400
1401#ifndef MOVE_MAX_PIECES
1402#define MOVE_MAX_PIECES MOVE_MAX
1403#endif
1404
1405/* Some architectures do not have complete pre/post increment/decrement
1406 instruction sets, or only move some modes efficiently. these macros
1407 allow us to fine tune move_by_pieces for these targets. */
1408
1409#ifndef USE_LOAD_POST_INCREMENT
1410#define USE_LOAD_POST_INCREMENT(MODE) HAVE_POST_INCREMENT
1411#endif
1412
1413#ifndef USE_LOAD_PRE_DECREMENT
1414#define USE_LOAD_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT
1415#endif
1416
1417#ifndef USE_STORE_POST_INCREMENT
1418#define USE_STORE_POST_INCREMENT(MODE) HAVE_POST_INCREMENT
1419#endif
1420
1421#ifndef USE_STORE_PRE_DECREMENT
1422#define USE_STORE_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT
1423#endif
1424
bbf6f052
RK
1425/* Generate several move instructions to copy LEN bytes
1426 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1427 The caller must pass FROM and TO
1428 through protect_from_queue before calling.
1429 ALIGN (in bytes) is maximum alignment we can assume. */
1430
2e245dac 1431void
bbf6f052
RK
1432move_by_pieces (to, from, len, align)
1433 rtx to, from;
1434 int len, align;
1435{
1436 struct move_by_pieces data;
1437 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
fbe1758d
AM
1438 int max_size = MOVE_MAX_PIECES + 1;
1439 enum machine_mode mode = VOIDmode, tmode;
1440 enum insn_code icode;
bbf6f052
RK
1441
1442 data.offset = 0;
1443 data.to_addr = to_addr;
1444 data.from_addr = from_addr;
1445 data.to = to;
1446 data.from = from;
1447 data.autinc_to
1448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1450 data.autinc_from
1451 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1452 || GET_CODE (from_addr) == POST_INC
1453 || GET_CODE (from_addr) == POST_DEC);
1454
1455 data.explicit_inc_from = 0;
1456 data.explicit_inc_to = 0;
1457 data.reverse
1458 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1459 if (data.reverse) data.offset = len;
1460 data.len = len;
1461
e9cf6a97
JW
1462 data.to_struct = MEM_IN_STRUCT_P (to);
1463 data.from_struct = MEM_IN_STRUCT_P (from);
1464
bbf6f052
RK
1465 /* If copying requires more than two move insns,
1466 copy addresses to registers (to make displacements shorter)
1467 and use post-increment if available. */
1468 if (!(data.autinc_from && data.autinc_to)
1469 && move_by_pieces_ninsns (len, align) > 2)
1470 {
fbe1758d
AM
1471 /* Find the mode of the largest move... */
1472 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1473 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1474 if (GET_MODE_SIZE (tmode) < max_size)
1475 mode = tmode;
1476
1477 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1478 {
1479 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1480 data.autinc_from = 1;
1481 data.explicit_inc_from = -1;
1482 }
fbe1758d 1483 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1484 {
1485 data.from_addr = copy_addr_to_reg (from_addr);
1486 data.autinc_from = 1;
1487 data.explicit_inc_from = 1;
1488 }
bbf6f052
RK
1489 if (!data.autinc_from && CONSTANT_P (from_addr))
1490 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1491 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1492 {
1493 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1494 data.autinc_to = 1;
1495 data.explicit_inc_to = -1;
1496 }
fbe1758d 1497 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1498 {
1499 data.to_addr = copy_addr_to_reg (to_addr);
1500 data.autinc_to = 1;
1501 data.explicit_inc_to = 1;
1502 }
bbf6f052
RK
1503 if (!data.autinc_to && CONSTANT_P (to_addr))
1504 data.to_addr = copy_addr_to_reg (to_addr);
1505 }
1506
c7a7ac46 1507 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1508 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1509 align = MOVE_MAX;
bbf6f052
RK
1510
1511 /* First move what we can in the largest integer mode, then go to
1512 successively smaller modes. */
1513
1514 while (max_size > 1)
1515 {
e7c33f54
RK
1516 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1517 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1518 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1519 mode = tmode;
1520
1521 if (mode == VOIDmode)
1522 break;
1523
1524 icode = mov_optab->handlers[(int) mode].insn_code;
1525 if (icode != CODE_FOR_nothing
1526 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1527 GET_MODE_SIZE (mode)))
1528 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1529
1530 max_size = GET_MODE_SIZE (mode);
1531 }
1532
1533 /* The code above should have handled everything. */
2a8e278c 1534 if (data.len > 0)
bbf6f052
RK
1535 abort ();
1536}
1537
1538/* Return number of insns required to move L bytes by pieces.
1539 ALIGN (in bytes) is maximum alignment we can assume. */
1540
1541static int
1542move_by_pieces_ninsns (l, align)
1543 unsigned int l;
1544 int align;
1545{
1546 register int n_insns = 0;
e87b4f3f 1547 int max_size = MOVE_MAX + 1;
bbf6f052 1548
c7a7ac46 1549 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1550 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1551 align = MOVE_MAX;
bbf6f052
RK
1552
1553 while (max_size > 1)
1554 {
1555 enum machine_mode mode = VOIDmode, tmode;
1556 enum insn_code icode;
1557
e7c33f54
RK
1558 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1559 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1560 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1561 mode = tmode;
1562
1563 if (mode == VOIDmode)
1564 break;
1565
1566 icode = mov_optab->handlers[(int) mode].insn_code;
1567 if (icode != CODE_FOR_nothing
1568 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1569 GET_MODE_SIZE (mode)))
1570 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1571
1572 max_size = GET_MODE_SIZE (mode);
1573 }
1574
1575 return n_insns;
1576}
1577
1578/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1579 with move instructions for mode MODE. GENFUN is the gen_... function
1580 to make a move insn for that mode. DATA has all the other info. */
1581
1582static void
1583move_by_pieces_1 (genfun, mode, data)
eae4b970 1584 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1585 enum machine_mode mode;
1586 struct move_by_pieces *data;
1587{
1588 register int size = GET_MODE_SIZE (mode);
1589 register rtx to1, from1;
1590
1591 while (data->len >= size)
1592 {
1593 if (data->reverse) data->offset -= size;
1594
1595 to1 = (data->autinc_to
38a448ca 1596 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1597 : copy_rtx (change_address (data->to, mode,
1598 plus_constant (data->to_addr,
1599 data->offset))));
e9cf6a97 1600 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1601
db3cf6fb
MS
1602 from1
1603 = (data->autinc_from
38a448ca 1604 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1605 : copy_rtx (change_address (data->from, mode,
1606 plus_constant (data->from_addr,
1607 data->offset))));
e9cf6a97 1608 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052 1609
940da324 1610 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1611 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1612 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1613 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1614
1615 emit_insn ((*genfun) (to1, from1));
940da324 1616 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1617 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1618 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1619 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1620
1621 if (! data->reverse) data->offset += size;
1622
1623 data->len -= size;
1624 }
1625}
1626\f
1627/* Emit code to move a block Y to a block X.
1628 This may be done with string-move instructions,
1629 with multiple scalar move instructions, or with a library call.
1630
1631 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1632 with mode BLKmode.
1633 SIZE is an rtx that says how long they are.
1634 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1635 measured in bytes.
bbf6f052 1636
e9a25f70
JL
1637 Return the address of the new block, if memcpy is called and returns it,
1638 0 otherwise. */
1639
1640rtx
bbf6f052
RK
1641emit_block_move (x, y, size, align)
1642 rtx x, y;
1643 rtx size;
1644 int align;
1645{
e9a25f70 1646 rtx retval = 0;
52cf7115
JL
1647#ifdef TARGET_MEM_FUNCTIONS
1648 static tree fn;
1649 tree call_expr, arg_list;
1650#endif
e9a25f70 1651
bbf6f052
RK
1652 if (GET_MODE (x) != BLKmode)
1653 abort ();
1654
1655 if (GET_MODE (y) != BLKmode)
1656 abort ();
1657
1658 x = protect_from_queue (x, 1);
1659 y = protect_from_queue (y, 0);
5d901c31 1660 size = protect_from_queue (size, 0);
bbf6f052
RK
1661
1662 if (GET_CODE (x) != MEM)
1663 abort ();
1664 if (GET_CODE (y) != MEM)
1665 abort ();
1666 if (size == 0)
1667 abort ();
1668
fbe1758d 1669 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1670 move_by_pieces (x, y, INTVAL (size), align);
1671 else
1672 {
1673 /* Try the most limited insn first, because there's no point
1674 including more than one in the machine description unless
1675 the more limited one has some advantage. */
266007a7 1676
0bba3f6f 1677 rtx opalign = GEN_INT (align);
266007a7
RK
1678 enum machine_mode mode;
1679
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1682 {
266007a7 1683 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1684
1685 if (code != CODE_FOR_nothing
803090c4
RK
1686 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1687 here because if SIZE is less than the mode mask, as it is
8008b228 1688 returned by the macro, it will definitely be less than the
803090c4 1689 actual mode mask. */
8ca00751
RK
1690 && ((GET_CODE (size) == CONST_INT
1691 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1692 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1693 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1694 && (insn_operand_predicate[(int) code][0] == 0
1695 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1696 && (insn_operand_predicate[(int) code][1] == 0
1697 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1698 && (insn_operand_predicate[(int) code][3] == 0
1699 || (*insn_operand_predicate[(int) code][3]) (opalign,
1700 VOIDmode)))
bbf6f052 1701 {
1ba1e2a8 1702 rtx op2;
266007a7
RK
1703 rtx last = get_last_insn ();
1704 rtx pat;
1705
1ba1e2a8 1706 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1707 if (insn_operand_predicate[(int) code][2] != 0
1708 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1709 op2 = copy_to_mode_reg (mode, op2);
1710
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1712 if (pat)
1713 {
1714 emit_insn (pat);
e9a25f70 1715 return 0;
266007a7
RK
1716 }
1717 else
1718 delete_insns_since (last);
bbf6f052
RK
1719 }
1720 }
bbf6f052
RK
1721
1722#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1723 /* It is incorrect to use the libcall calling conventions to call
1724 memcpy in this context.
1725
1726 This could be a user call to memcpy and the user may wish to
1727 examine the return value from memcpy.
1728
1729 For targets where libcalls and normal calls have different conventions
1730 for returning pointers, we could end up generating incorrect code.
1731
1732 So instead of using a libcall sequence we build up a suitable
1733 CALL_EXPR and expand the call in the normal fashion. */
1734 if (fn == NULL_TREE)
1735 {
1736 tree fntype;
1737
1738 /* This was copied from except.c, I don't know if all this is
1739 necessary in this context or not. */
1740 fn = get_identifier ("memcpy");
1741 push_obstacks_nochange ();
1742 end_temporary_allocation ();
1743 fntype = build_pointer_type (void_type_node);
1744 fntype = build_function_type (fntype, NULL_TREE);
1745 fn = build_decl (FUNCTION_DECL, fn, fntype);
1746 DECL_EXTERNAL (fn) = 1;
1747 TREE_PUBLIC (fn) = 1;
1748 DECL_ARTIFICIAL (fn) = 1;
1749 make_decl_rtl (fn, NULL_PTR, 1);
1750 assemble_external (fn);
1751 pop_obstacks ();
1752 }
1753
1754 /* We need to make an argument list for the function call.
1755
1756 memcpy has three arguments, the first two are void * addresses and
1757 the last is a size_t byte count for the copy. */
1758 arg_list
1759 = build_tree_list (NULL_TREE,
1760 make_tree (build_pointer_type (void_type_node),
1761 XEXP (x, 0)));
1762 TREE_CHAIN (arg_list)
1763 = build_tree_list (NULL_TREE,
1764 make_tree (build_pointer_type (void_type_node),
1765 XEXP (y, 0)));
1766 TREE_CHAIN (TREE_CHAIN (arg_list))
1767 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1768 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1769
1770 /* Now we have to build up the CALL_EXPR itself. */
1771 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1772 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1773 call_expr, arg_list, NULL_TREE);
1774 TREE_SIDE_EFFECTS (call_expr) = 1;
1775
1776 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1777#else
d562e42e 1778 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1779 VOIDmode, 3, XEXP (y, 0), Pmode,
1780 XEXP (x, 0), Pmode,
3b6f75e2
JW
1781 convert_to_mode (TYPE_MODE (integer_type_node), size,
1782 TREE_UNSIGNED (integer_type_node)),
1783 TYPE_MODE (integer_type_node));
bbf6f052
RK
1784#endif
1785 }
e9a25f70
JL
1786
1787 return retval;
bbf6f052
RK
1788}
1789\f
1790/* Copy all or part of a value X into registers starting at REGNO.
1791 The number of registers to be filled is NREGS. */
1792
1793void
1794move_block_to_reg (regno, x, nregs, mode)
1795 int regno;
1796 rtx x;
1797 int nregs;
1798 enum machine_mode mode;
1799{
1800 int i;
381127e8
RL
1801#ifdef HAVE_load_multiple
1802 rtx pat;
1803 rtx last;
1804#endif
bbf6f052 1805
72bb9717
RK
1806 if (nregs == 0)
1807 return;
1808
bbf6f052
RK
1809 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1810 x = validize_mem (force_const_mem (mode, x));
1811
1812 /* See if the machine can do this with a load multiple insn. */
1813#ifdef HAVE_load_multiple
c3a02afe 1814 if (HAVE_load_multiple)
bbf6f052 1815 {
c3a02afe 1816 last = get_last_insn ();
38a448ca 1817 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1818 GEN_INT (nregs));
1819 if (pat)
1820 {
1821 emit_insn (pat);
1822 return;
1823 }
1824 else
1825 delete_insns_since (last);
bbf6f052 1826 }
bbf6f052
RK
1827#endif
1828
1829 for (i = 0; i < nregs; i++)
38a448ca 1830 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1831 operand_subword_force (x, i, mode));
1832}
1833
1834/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1835 The number of registers to be filled is NREGS. SIZE indicates the number
1836 of bytes in the object X. */
1837
bbf6f052
RK
1838
1839void
0040593d 1840move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1841 int regno;
1842 rtx x;
1843 int nregs;
0040593d 1844 int size;
bbf6f052
RK
1845{
1846 int i;
381127e8
RL
1847#ifdef HAVE_store_multiple
1848 rtx pat;
1849 rtx last;
1850#endif
58a32c5c 1851 enum machine_mode mode;
bbf6f052 1852
58a32c5c
DE
1853 /* If SIZE is that of a mode no bigger than a word, just use that
1854 mode's store operation. */
1855 if (size <= UNITS_PER_WORD
1856 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1857 {
1858 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1859 gen_rtx_REG (mode, regno));
58a32c5c
DE
1860 return;
1861 }
1862
0040593d 1863 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1864 to the left before storing to memory. Note that the previous test
1865 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1866 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1867 {
1868 rtx tem = operand_subword (x, 0, 1, BLKmode);
1869 rtx shift;
1870
1871 if (tem == 0)
1872 abort ();
1873
1874 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1875 gen_rtx_REG (word_mode, regno),
0040593d
JW
1876 build_int_2 ((UNITS_PER_WORD - size)
1877 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1878 emit_move_insn (tem, shift);
1879 return;
1880 }
1881
bbf6f052
RK
1882 /* See if the machine can do this with a store multiple insn. */
1883#ifdef HAVE_store_multiple
c3a02afe 1884 if (HAVE_store_multiple)
bbf6f052 1885 {
c3a02afe 1886 last = get_last_insn ();
38a448ca 1887 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1888 GEN_INT (nregs));
1889 if (pat)
1890 {
1891 emit_insn (pat);
1892 return;
1893 }
1894 else
1895 delete_insns_since (last);
bbf6f052 1896 }
bbf6f052
RK
1897#endif
1898
1899 for (i = 0; i < nregs; i++)
1900 {
1901 rtx tem = operand_subword (x, i, 1, BLKmode);
1902
1903 if (tem == 0)
1904 abort ();
1905
38a448ca 1906 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1907 }
1908}
1909
aac5cc16
RH
1910/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1911 registers represented by a PARALLEL. SSIZE represents the total size of
1912 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1913 SRC in bits. */
1914/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1915 the balance will be in what would be the low-order memory addresses, i.e.
1916 left justified for big endian, right justified for little endian. This
1917 happens to be true for the targets currently using this support. If this
1918 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1919 would be needed. */
fffa9c1d
JW
1920
1921void
aac5cc16
RH
1922emit_group_load (dst, orig_src, ssize, align)
1923 rtx dst, orig_src;
1924 int align, ssize;
fffa9c1d 1925{
aac5cc16
RH
1926 rtx *tmps, src;
1927 int start, i;
fffa9c1d 1928
aac5cc16 1929 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1930 abort ();
1931
1932 /* Check for a NULL entry, used to indicate that the parameter goes
1933 both on the stack and in registers. */
aac5cc16
RH
1934 if (XEXP (XVECEXP (dst, 0, 0), 0))
1935 start = 0;
fffa9c1d 1936 else
aac5cc16
RH
1937 start = 1;
1938
1939 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1940
1941 /* If we won't be loading directly from memory, protect the real source
1942 from strange tricks we might play. */
1943 src = orig_src;
1944 if (GET_CODE (src) != MEM)
1945 {
1946 src = gen_reg_rtx (GET_MODE (orig_src));
1947 emit_move_insn (src, orig_src);
1948 }
1949
1950 /* Process the pieces. */
1951 for (i = start; i < XVECLEN (dst, 0); i++)
1952 {
1953 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1954 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1955 int bytelen = GET_MODE_SIZE (mode);
1956 int shift = 0;
1957
1958 /* Handle trailing fragments that run over the size of the struct. */
1959 if (ssize >= 0 && bytepos + bytelen > ssize)
1960 {
1961 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1962 bytelen = ssize - bytepos;
1963 if (bytelen <= 0)
1964 abort();
1965 }
1966
1967 /* Optimize the access just a bit. */
1968 if (GET_CODE (src) == MEM
1969 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1970 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1971 && bytelen == GET_MODE_SIZE (mode))
1972 {
1973 tmps[i] = gen_reg_rtx (mode);
1974 emit_move_insn (tmps[i],
1975 change_address (src, mode,
1976 plus_constant (XEXP (src, 0),
1977 bytepos)));
fffa9c1d
JW
1978 }
1979 else
aac5cc16
RH
1980 {
1981 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1982 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1983 mode, mode, align, ssize);
1984 }
fffa9c1d 1985
aac5cc16
RH
1986 if (BYTES_BIG_ENDIAN && shift)
1987 {
1988 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1989 tmps[i], 0, OPTAB_WIDEN);
1990 }
fffa9c1d 1991 }
aac5cc16
RH
1992 emit_queue();
1993
1994 /* Copy the extracted pieces into the proper (probable) hard regs. */
1995 for (i = start; i < XVECLEN (dst, 0); i++)
1996 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1997}
1998
aac5cc16
RH
1999/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2000 registers represented by a PARALLEL. SSIZE represents the total size of
2001 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
2002
2003void
aac5cc16
RH
2004emit_group_store (orig_dst, src, ssize, align)
2005 rtx orig_dst, src;
2006 int ssize, align;
fffa9c1d 2007{
aac5cc16
RH
2008 rtx *tmps, dst;
2009 int start, i;
fffa9c1d 2010
aac5cc16 2011 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2012 abort ();
2013
2014 /* Check for a NULL entry, used to indicate that the parameter goes
2015 both on the stack and in registers. */
aac5cc16
RH
2016 if (XEXP (XVECEXP (src, 0, 0), 0))
2017 start = 0;
fffa9c1d 2018 else
aac5cc16
RH
2019 start = 1;
2020
2021 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 2022
aac5cc16
RH
2023 /* Copy the (probable) hard regs into pseudos. */
2024 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2025 {
aac5cc16
RH
2026 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2027 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2028 emit_move_insn (tmps[i], reg);
2029 }
2030 emit_queue();
fffa9c1d 2031
aac5cc16
RH
2032 /* If we won't be storing directly into memory, protect the real destination
2033 from strange tricks we might play. */
2034 dst = orig_dst;
10a9f2be
JW
2035 if (GET_CODE (dst) == PARALLEL)
2036 {
2037 rtx temp;
2038
2039 /* We can get a PARALLEL dst if there is a conditional expression in
2040 a return statement. In that case, the dst and src are the same,
2041 so no action is necessary. */
2042 if (rtx_equal_p (dst, src))
2043 return;
2044
2045 /* It is unclear if we can ever reach here, but we may as well handle
2046 it. Allocate a temporary, and split this into a store/load to/from
2047 the temporary. */
2048
2049 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2050 emit_group_store (temp, src, ssize, align);
2051 emit_group_load (dst, temp, ssize, align);
2052 return;
2053 }
2054 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2055 {
2056 dst = gen_reg_rtx (GET_MODE (orig_dst));
2057 /* Make life a bit easier for combine. */
2058 emit_move_insn (dst, const0_rtx);
2059 }
2060 else if (! MEM_IN_STRUCT_P (dst))
2061 {
2062 /* store_bit_field requires that memory operations have
2063 mem_in_struct_p set; we might not. */
fffa9c1d 2064
aac5cc16 2065 dst = copy_rtx (orig_dst);
c6df88cb 2066 MEM_SET_IN_STRUCT_P (dst, 1);
aac5cc16
RH
2067 }
2068
2069 /* Process the pieces. */
2070 for (i = start; i < XVECLEN (src, 0); i++)
2071 {
2072 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2073 enum machine_mode mode = GET_MODE (tmps[i]);
2074 int bytelen = GET_MODE_SIZE (mode);
2075
2076 /* Handle trailing fragments that run over the size of the struct. */
2077 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2078 {
aac5cc16
RH
2079 if (BYTES_BIG_ENDIAN)
2080 {
2081 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2082 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2083 tmps[i], 0, OPTAB_WIDEN);
2084 }
2085 bytelen = ssize - bytepos;
71bc0330 2086 }
fffa9c1d 2087
aac5cc16
RH
2088 /* Optimize the access just a bit. */
2089 if (GET_CODE (dst) == MEM
2090 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2091 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2092 && bytelen == GET_MODE_SIZE (mode))
2093 {
2094 emit_move_insn (change_address (dst, mode,
2095 plus_constant (XEXP (dst, 0),
2096 bytepos)),
2097 tmps[i]);
2098 }
2099 else
2100 {
2101 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2102 mode, tmps[i], align, ssize);
2103 }
fffa9c1d 2104 }
aac5cc16
RH
2105 emit_queue();
2106
2107 /* Copy from the pseudo into the (probable) hard reg. */
2108 if (GET_CODE (dst) == REG)
2109 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2110}
2111
c36fce9a
GRK
2112/* Generate code to copy a BLKmode object of TYPE out of a
2113 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2114 is null, a stack temporary is created. TGTBLK is returned.
2115
2116 The primary purpose of this routine is to handle functions
2117 that return BLKmode structures in registers. Some machines
2118 (the PA for example) want to return all small structures
2119 in registers regardless of the structure's alignment.
2120 */
2121
2122rtx
2123copy_blkmode_from_reg(tgtblk,srcreg,type)
2124 rtx tgtblk;
2125 rtx srcreg;
2126 tree type;
2127{
2128 int bytes = int_size_in_bytes (type);
2129 rtx src = NULL, dst = NULL;
c84e2712 2130 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
c36fce9a
GRK
2131 int bitpos, xbitpos, big_endian_correction = 0;
2132
2133 if (tgtblk == 0)
2134 {
2135 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
c6df88cb 2136 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
c36fce9a
GRK
2137 preserve_temp_slots (tgtblk);
2138 }
2139
2140 /* This code assumes srcreg is at least a full word. If it isn't,
2141 copy it into a new pseudo which is a full word. */
2142 if (GET_MODE (srcreg) != BLKmode
2143 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2144 srcreg = convert_to_mode (word_mode, srcreg,
2145 TREE_UNSIGNED (type));
2146
2147 /* Structures whose size is not a multiple of a word are aligned
2148 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2149 machine, this means we must skip the empty high order bytes when
2150 calculating the bit offset. */
2151 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2152 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2153 * BITS_PER_UNIT));
2154
2155 /* Copy the structure BITSIZE bites at a time.
2156
2157 We could probably emit more efficient code for machines
2158 which do not use strict alignment, but it doesn't seem
2159 worth the effort at the current time. */
2160 for (bitpos = 0, xbitpos = big_endian_correction;
2161 bitpos < bytes * BITS_PER_UNIT;
2162 bitpos += bitsize, xbitpos += bitsize)
2163 {
2164
2165 /* We need a new source operand each time xbitpos is on a
2166 word boundary and when xbitpos == big_endian_correction
2167 (the first time through). */
2168 if (xbitpos % BITS_PER_WORD == 0
2169 || xbitpos == big_endian_correction)
2170 src = operand_subword_force (srcreg,
2171 xbitpos / BITS_PER_WORD,
2172 BLKmode);
2173
2174 /* We need a new destination operand each time bitpos is on
2175 a word boundary. */
2176 if (bitpos % BITS_PER_WORD == 0)
2177 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2178
2179 /* Use xbitpos for the source extraction (right justified) and
2180 xbitpos for the destination store (left justified). */
2181 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2182 extract_bit_field (src, bitsize,
2183 xbitpos % BITS_PER_WORD, 1,
2184 NULL_RTX, word_mode,
2185 word_mode,
2186 bitsize / BITS_PER_UNIT,
2187 BITS_PER_WORD),
2188 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2189 }
2190 return tgtblk;
2191}
2192
2193
94b25f81
RK
2194/* Add a USE expression for REG to the (possibly empty) list pointed
2195 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2196
2197void
b3f8cf4a
RK
2198use_reg (call_fusage, reg)
2199 rtx *call_fusage, reg;
2200{
0304dfbb
DE
2201 if (GET_CODE (reg) != REG
2202 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2203 abort();
2204
2205 *call_fusage
38a448ca
RH
2206 = gen_rtx_EXPR_LIST (VOIDmode,
2207 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2208}
2209
94b25f81
RK
2210/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2211 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2212
2213void
0304dfbb
DE
2214use_regs (call_fusage, regno, nregs)
2215 rtx *call_fusage;
bbf6f052
RK
2216 int regno;
2217 int nregs;
2218{
0304dfbb 2219 int i;
bbf6f052 2220
0304dfbb
DE
2221 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2222 abort ();
2223
2224 for (i = 0; i < nregs; i++)
38a448ca 2225 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2226}
fffa9c1d
JW
2227
2228/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2229 PARALLEL REGS. This is for calls that pass values in multiple
2230 non-contiguous locations. The Irix 6 ABI has examples of this. */
2231
2232void
2233use_group_regs (call_fusage, regs)
2234 rtx *call_fusage;
2235 rtx regs;
2236{
2237 int i;
2238
6bd35f86
DE
2239 for (i = 0; i < XVECLEN (regs, 0); i++)
2240 {
2241 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2242
6bd35f86
DE
2243 /* A NULL entry means the parameter goes both on the stack and in
2244 registers. This can also be a MEM for targets that pass values
2245 partially on the stack and partially in registers. */
e9a25f70 2246 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2247 use_reg (call_fusage, reg);
2248 }
fffa9c1d 2249}
bbf6f052 2250\f
9de08200
RK
2251/* Generate several move instructions to clear LEN bytes of block TO.
2252 (A MEM rtx with BLKmode). The caller must pass TO through
2253 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2254 we can assume. */
2255
2256static void
2257clear_by_pieces (to, len, align)
2258 rtx to;
2259 int len, align;
2260{
2261 struct clear_by_pieces data;
2262 rtx to_addr = XEXP (to, 0);
fbe1758d
AM
2263 int max_size = MOVE_MAX_PIECES + 1;
2264 enum machine_mode mode = VOIDmode, tmode;
2265 enum insn_code icode;
9de08200
RK
2266
2267 data.offset = 0;
2268 data.to_addr = to_addr;
2269 data.to = to;
2270 data.autinc_to
2271 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2272 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2273
2274 data.explicit_inc_to = 0;
2275 data.reverse
2276 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2277 if (data.reverse) data.offset = len;
2278 data.len = len;
2279
2280 data.to_struct = MEM_IN_STRUCT_P (to);
2281
2282 /* If copying requires more than two move insns,
2283 copy addresses to registers (to make displacements shorter)
2284 and use post-increment if available. */
2285 if (!data.autinc_to
2286 && move_by_pieces_ninsns (len, align) > 2)
2287 {
fbe1758d
AM
2288 /* Determine the main mode we'll be using */
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2290 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) < max_size)
2292 mode = tmode;
2293
2294 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
9de08200
RK
2295 {
2296 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2297 data.autinc_to = 1;
2298 data.explicit_inc_to = -1;
2299 }
fbe1758d 2300 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
9de08200
RK
2301 {
2302 data.to_addr = copy_addr_to_reg (to_addr);
2303 data.autinc_to = 1;
2304 data.explicit_inc_to = 1;
2305 }
9de08200
RK
2306 if (!data.autinc_to && CONSTANT_P (to_addr))
2307 data.to_addr = copy_addr_to_reg (to_addr);
2308 }
2309
2310 if (! SLOW_UNALIGNED_ACCESS
2311 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2312 align = MOVE_MAX;
2313
2314 /* First move what we can in the largest integer mode, then go to
2315 successively smaller modes. */
2316
2317 while (max_size > 1)
2318 {
9de08200
RK
2319 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2320 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2321 if (GET_MODE_SIZE (tmode) < max_size)
2322 mode = tmode;
2323
2324 if (mode == VOIDmode)
2325 break;
2326
2327 icode = mov_optab->handlers[(int) mode].insn_code;
2328 if (icode != CODE_FOR_nothing
2329 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2330 GET_MODE_SIZE (mode)))
2331 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2332
2333 max_size = GET_MODE_SIZE (mode);
2334 }
2335
2336 /* The code above should have handled everything. */
2337 if (data.len != 0)
2338 abort ();
2339}
2340
2341/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2342 with move instructions for mode MODE. GENFUN is the gen_... function
2343 to make a move insn for that mode. DATA has all the other info. */
2344
2345static void
2346clear_by_pieces_1 (genfun, mode, data)
eae4b970 2347 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2348 enum machine_mode mode;
2349 struct clear_by_pieces *data;
2350{
2351 register int size = GET_MODE_SIZE (mode);
2352 register rtx to1;
2353
2354 while (data->len >= size)
2355 {
2356 if (data->reverse) data->offset -= size;
2357
2358 to1 = (data->autinc_to
38a448ca 2359 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2360 : copy_rtx (change_address (data->to, mode,
2361 plus_constant (data->to_addr,
2362 data->offset))));
9de08200
RK
2363 MEM_IN_STRUCT_P (to1) = data->to_struct;
2364
940da324 2365 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
9de08200 2366 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
9de08200
RK
2367
2368 emit_insn ((*genfun) (to1, const0_rtx));
940da324 2369 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2370 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200
RK
2371
2372 if (! data->reverse) data->offset += size;
2373
2374 data->len -= size;
2375 }
2376}
2377\f
bbf6f052 2378/* Write zeros through the storage of OBJECT.
9de08200 2379 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2380 the maximum alignment we can is has, measured in bytes.
bbf6f052 2381
e9a25f70
JL
2382 If we call a function that returns the length of the block, return it. */
2383
2384rtx
9de08200 2385clear_storage (object, size, align)
bbf6f052 2386 rtx object;
4c08eef0 2387 rtx size;
9de08200 2388 int align;
bbf6f052 2389{
52cf7115
JL
2390#ifdef TARGET_MEM_FUNCTIONS
2391 static tree fn;
2392 tree call_expr, arg_list;
2393#endif
e9a25f70
JL
2394 rtx retval = 0;
2395
bbf6f052
RK
2396 if (GET_MODE (object) == BLKmode)
2397 {
9de08200
RK
2398 object = protect_from_queue (object, 1);
2399 size = protect_from_queue (size, 0);
2400
2401 if (GET_CODE (size) == CONST_INT
fbe1758d 2402 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200
RK
2403 clear_by_pieces (object, INTVAL (size), align);
2404
2405 else
2406 {
2407 /* Try the most limited insn first, because there's no point
2408 including more than one in the machine description unless
2409 the more limited one has some advantage. */
2410
2411 rtx opalign = GEN_INT (align);
2412 enum machine_mode mode;
2413
2414 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2415 mode = GET_MODE_WIDER_MODE (mode))
2416 {
2417 enum insn_code code = clrstr_optab[(int) mode];
2418
2419 if (code != CODE_FOR_nothing
2420 /* We don't need MODE to be narrower than
2421 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2422 the mode mask, as it is returned by the macro, it will
2423 definitely be less than the actual mode mask. */
2424 && ((GET_CODE (size) == CONST_INT
2425 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2426 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2427 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2428 && (insn_operand_predicate[(int) code][0] == 0
2429 || (*insn_operand_predicate[(int) code][0]) (object,
2430 BLKmode))
2431 && (insn_operand_predicate[(int) code][2] == 0
2432 || (*insn_operand_predicate[(int) code][2]) (opalign,
2433 VOIDmode)))
2434 {
2435 rtx op1;
2436 rtx last = get_last_insn ();
2437 rtx pat;
2438
2439 op1 = convert_to_mode (mode, size, 1);
2440 if (insn_operand_predicate[(int) code][1] != 0
2441 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2442 mode))
2443 op1 = copy_to_mode_reg (mode, op1);
2444
2445 pat = GEN_FCN ((int) code) (object, op1, opalign);
2446 if (pat)
2447 {
2448 emit_insn (pat);
e9a25f70 2449 return 0;
9de08200
RK
2450 }
2451 else
2452 delete_insns_since (last);
2453 }
2454 }
2455
2456
bbf6f052 2457#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
2458 /* It is incorrect to use the libcall calling conventions to call
2459 memset in this context.
2460
2461 This could be a user call to memset and the user may wish to
2462 examine the return value from memset.
2463
2464 For targets where libcalls and normal calls have different conventions
2465 for returning pointers, we could end up generating incorrect code.
2466
2467 So instead of using a libcall sequence we build up a suitable
2468 CALL_EXPR and expand the call in the normal fashion. */
2469 if (fn == NULL_TREE)
2470 {
2471 tree fntype;
2472
2473 /* This was copied from except.c, I don't know if all this is
2474 necessary in this context or not. */
2475 fn = get_identifier ("memset");
2476 push_obstacks_nochange ();
2477 end_temporary_allocation ();
2478 fntype = build_pointer_type (void_type_node);
2479 fntype = build_function_type (fntype, NULL_TREE);
2480 fn = build_decl (FUNCTION_DECL, fn, fntype);
2481 DECL_EXTERNAL (fn) = 1;
2482 TREE_PUBLIC (fn) = 1;
2483 DECL_ARTIFICIAL (fn) = 1;
2484 make_decl_rtl (fn, NULL_PTR, 1);
2485 assemble_external (fn);
2486 pop_obstacks ();
2487 }
2488
2489 /* We need to make an argument list for the function call.
2490
2491 memset has three arguments, the first is a void * addresses, the
2492 second a integer with the initialization value, the last is a size_t
2493 byte count for the copy. */
2494 arg_list
2495 = build_tree_list (NULL_TREE,
2496 make_tree (build_pointer_type (void_type_node),
2497 XEXP (object, 0)));
2498 TREE_CHAIN (arg_list)
2499 = build_tree_list (NULL_TREE,
2500 make_tree (integer_type_node, const0_rtx));
2501 TREE_CHAIN (TREE_CHAIN (arg_list))
2502 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2503 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2504
2505 /* Now we have to build up the CALL_EXPR itself. */
2506 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2507 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2508 call_expr, arg_list, NULL_TREE);
2509 TREE_SIDE_EFFECTS (call_expr) = 1;
2510
2511 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2512#else
9de08200
RK
2513 emit_library_call (bzero_libfunc, 0,
2514 VOIDmode, 2,
2515 XEXP (object, 0), Pmode,
e9a25f70
JL
2516 convert_to_mode
2517 (TYPE_MODE (integer_type_node), size,
2518 TREE_UNSIGNED (integer_type_node)),
9de08200 2519 TYPE_MODE (integer_type_node));
bbf6f052 2520#endif
9de08200 2521 }
bbf6f052
RK
2522 }
2523 else
66ed0683 2524 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2525
2526 return retval;
bbf6f052
RK
2527}
2528
2529/* Generate code to copy Y into X.
2530 Both Y and X must have the same mode, except that
2531 Y can be a constant with VOIDmode.
2532 This mode cannot be BLKmode; use emit_block_move for that.
2533
2534 Return the last instruction emitted. */
2535
2536rtx
2537emit_move_insn (x, y)
2538 rtx x, y;
2539{
2540 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2541
2542 x = protect_from_queue (x, 1);
2543 y = protect_from_queue (y, 0);
2544
2545 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2546 abort ();
2547
ee5332b8
RH
2548 /* Never force constant_p_rtx to memory. */
2549 if (GET_CODE (y) == CONSTANT_P_RTX)
2550 ;
2551 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
bbf6f052
RK
2552 y = force_const_mem (mode, y);
2553
2554 /* If X or Y are memory references, verify that their addresses are valid
2555 for the machine. */
2556 if (GET_CODE (x) == MEM
2557 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2558 && ! push_operand (x, GET_MODE (x)))
2559 || (flag_force_addr
2560 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2561 x = change_address (x, VOIDmode, XEXP (x, 0));
2562
2563 if (GET_CODE (y) == MEM
2564 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2565 || (flag_force_addr
2566 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2567 y = change_address (y, VOIDmode, XEXP (y, 0));
2568
2569 if (mode == BLKmode)
2570 abort ();
2571
261c4230
RS
2572 return emit_move_insn_1 (x, y);
2573}
2574
2575/* Low level part of emit_move_insn.
2576 Called just like emit_move_insn, but assumes X and Y
2577 are basically valid. */
2578
2579rtx
2580emit_move_insn_1 (x, y)
2581 rtx x, y;
2582{
2583 enum machine_mode mode = GET_MODE (x);
2584 enum machine_mode submode;
2585 enum mode_class class = GET_MODE_CLASS (mode);
2586 int i;
2587
bbf6f052
RK
2588 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2589 return
2590 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2591
89742723 2592 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2593 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2594 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2595 * BITS_PER_UNIT),
2596 (class == MODE_COMPLEX_INT
2597 ? MODE_INT : MODE_FLOAT),
2598 0))
7308a047
RS
2599 && (mov_optab->handlers[(int) submode].insn_code
2600 != CODE_FOR_nothing))
2601 {
2602 /* Don't split destination if it is a stack push. */
2603 int stack = push_operand (x, GET_MODE (x));
7308a047 2604
7308a047
RS
2605 /* If this is a stack, push the highpart first, so it
2606 will be in the argument order.
2607
2608 In that case, change_address is used only to convert
2609 the mode, not to change the address. */
c937357e
RS
2610 if (stack)
2611 {
e33c0d66
RS
2612 /* Note that the real part always precedes the imag part in memory
2613 regardless of machine's endianness. */
c937357e
RS
2614#ifdef STACK_GROWS_DOWNWARD
2615 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2616 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2617 gen_imagpart (submode, y)));
c937357e 2618 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2619 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2620 gen_realpart (submode, y)));
c937357e
RS
2621#else
2622 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2623 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2624 gen_realpart (submode, y)));
c937357e 2625 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2626 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2627 gen_imagpart (submode, y)));
c937357e
RS
2628#endif
2629 }
2630 else
2631 {
c14c6529
RH
2632 /* Show the output dies here. This is necessary for pseudos;
2633 hard regs shouldn't appear here except as return values.
2634 We never want to emit such a clobber after reload. */
2635 if (x != y
2636 && ! (reload_in_progress || reload_completed))
b2e7e6fb 2637 {
c14c6529 2638 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2639 }
2638126a 2640
c937357e 2641 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2642 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2643 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2644 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2645 }
7308a047 2646
7a1ab50a 2647 return get_last_insn ();
7308a047
RS
2648 }
2649
bbf6f052
RK
2650 /* This will handle any multi-word mode that lacks a move_insn pattern.
2651 However, you will get better code if you define such patterns,
2652 even if they must turn into multiple assembler instructions. */
a4320483 2653 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2654 {
2655 rtx last_insn = 0;
6551fa4d 2656
a98c9f1a
RK
2657#ifdef PUSH_ROUNDING
2658
2659 /* If X is a push on the stack, do the push now and replace
2660 X with a reference to the stack pointer. */
2661 if (push_operand (x, GET_MODE (x)))
2662 {
2663 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2664 x = change_address (x, VOIDmode, stack_pointer_rtx);
2665 }
2666#endif
2667
c14c6529
RH
2668 /* Show the output dies here. This is necessary for pseudos;
2669 hard regs shouldn't appear here except as return values.
2670 We never want to emit such a clobber after reload. */
2671 if (x != y
2672 && ! (reload_in_progress || reload_completed))
b2e7e6fb 2673 {
c14c6529 2674 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2675 }
15a7a8ec 2676
bbf6f052
RK
2677 for (i = 0;
2678 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2679 i++)
2680 {
2681 rtx xpart = operand_subword (x, i, 1, mode);
2682 rtx ypart = operand_subword (y, i, 1, mode);
2683
2684 /* If we can't get a part of Y, put Y into memory if it is a
2685 constant. Otherwise, force it into a register. If we still
2686 can't get a part of Y, abort. */
2687 if (ypart == 0 && CONSTANT_P (y))
2688 {
2689 y = force_const_mem (mode, y);
2690 ypart = operand_subword (y, i, 1, mode);
2691 }
2692 else if (ypart == 0)
2693 ypart = operand_subword_force (y, i, mode);
2694
2695 if (xpart == 0 || ypart == 0)
2696 abort ();
2697
2698 last_insn = emit_move_insn (xpart, ypart);
2699 }
6551fa4d 2700
bbf6f052
RK
2701 return last_insn;
2702 }
2703 else
2704 abort ();
2705}
2706\f
2707/* Pushing data onto the stack. */
2708
2709/* Push a block of length SIZE (perhaps variable)
2710 and return an rtx to address the beginning of the block.
2711 Note that it is not possible for the value returned to be a QUEUED.
2712 The value may be virtual_outgoing_args_rtx.
2713
2714 EXTRA is the number of bytes of padding to push in addition to SIZE.
2715 BELOW nonzero means this padding comes at low addresses;
2716 otherwise, the padding comes at high addresses. */
2717
2718rtx
2719push_block (size, extra, below)
2720 rtx size;
2721 int extra, below;
2722{
2723 register rtx temp;
88f63c77
RK
2724
2725 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2726 if (CONSTANT_P (size))
2727 anti_adjust_stack (plus_constant (size, extra));
2728 else if (GET_CODE (size) == REG && extra == 0)
2729 anti_adjust_stack (size);
2730 else
2731 {
2732 rtx temp = copy_to_mode_reg (Pmode, size);
2733 if (extra != 0)
906c4e36 2734 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2735 temp, 0, OPTAB_LIB_WIDEN);
2736 anti_adjust_stack (temp);
2737 }
2738
e1a9b2ab
HB
2739#if defined (STACK_GROWS_DOWNWARD) \
2740 || (defined (ARGS_GROW_DOWNWARD) \
2741 && !defined (ACCUMULATE_OUTGOING_ARGS))
2742
2743 /* Return the lowest stack address when STACK or ARGS grow downward and
2744 we are not aaccumulating outgoing arguments (the c4x port uses such
2745 conventions). */
bbf6f052
RK
2746 temp = virtual_outgoing_args_rtx;
2747 if (extra != 0 && below)
2748 temp = plus_constant (temp, extra);
2749#else
2750 if (GET_CODE (size) == CONST_INT)
2751 temp = plus_constant (virtual_outgoing_args_rtx,
2752 - INTVAL (size) - (below ? 0 : extra));
2753 else if (extra != 0 && !below)
38a448ca 2754 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2755 negate_rtx (Pmode, plus_constant (size, extra)));
2756 else
38a448ca 2757 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2758 negate_rtx (Pmode, size));
2759#endif
2760
2761 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2762}
2763
87e38d84 2764rtx
bbf6f052
RK
2765gen_push_operand ()
2766{
38a448ca 2767 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2768}
2769
921b3427
RK
2770/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2771 block of SIZE bytes. */
2772
2773static rtx
2774get_push_address (size)
2775 int size;
2776{
2777 register rtx temp;
2778
2779 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2780 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2781 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2782 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2783 else
2784 temp = stack_pointer_rtx;
2785
c85f7c16 2786 return copy_to_reg (temp);
921b3427
RK
2787}
2788
bbf6f052
RK
2789/* Generate code to push X onto the stack, assuming it has mode MODE and
2790 type TYPE.
2791 MODE is redundant except when X is a CONST_INT (since they don't
2792 carry mode info).
2793 SIZE is an rtx for the size of data to be copied (in bytes),
2794 needed only if X is BLKmode.
2795
2796 ALIGN (in bytes) is maximum alignment we can assume.
2797
cd048831
RK
2798 If PARTIAL and REG are both nonzero, then copy that many of the first
2799 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2800 The amount of space pushed is decreased by PARTIAL words,
2801 rounded *down* to a multiple of PARM_BOUNDARY.
2802 REG must be a hard register in this case.
cd048831
RK
2803 If REG is zero but PARTIAL is not, take any all others actions for an
2804 argument partially in registers, but do not actually load any
2805 registers.
bbf6f052
RK
2806
2807 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2808 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2809
2810 On a machine that lacks real push insns, ARGS_ADDR is the address of
2811 the bottom of the argument block for this call. We use indexing off there
2812 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2813 argument block has not been preallocated.
2814
e5e809f4
JL
2815 ARGS_SO_FAR is the size of args previously pushed for this call.
2816
2817 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2818 for arguments passed in registers. If nonzero, it will be the number
2819 of bytes required. */
bbf6f052
RK
2820
2821void
2822emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2823 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2824 register rtx x;
2825 enum machine_mode mode;
2826 tree type;
2827 rtx size;
2828 int align;
2829 int partial;
2830 rtx reg;
2831 int extra;
2832 rtx args_addr;
2833 rtx args_so_far;
e5e809f4 2834 int reg_parm_stack_space;
bbf6f052
RK
2835{
2836 rtx xinner;
2837 enum direction stack_direction
2838#ifdef STACK_GROWS_DOWNWARD
2839 = downward;
2840#else
2841 = upward;
2842#endif
2843
2844 /* Decide where to pad the argument: `downward' for below,
2845 `upward' for above, or `none' for don't pad it.
2846 Default is below for small data on big-endian machines; else above. */
2847 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2848
2849 /* Invert direction if stack is post-update. */
2850 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2851 if (where_pad != none)
2852 where_pad = (where_pad == downward ? upward : downward);
2853
2854 xinner = x = protect_from_queue (x, 0);
2855
2856 if (mode == BLKmode)
2857 {
2858 /* Copy a block into the stack, entirely or partially. */
2859
2860 register rtx temp;
2861 int used = partial * UNITS_PER_WORD;
2862 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2863 int skip;
2864
2865 if (size == 0)
2866 abort ();
2867
2868 used -= offset;
2869
2870 /* USED is now the # of bytes we need not copy to the stack
2871 because registers will take care of them. */
2872
2873 if (partial != 0)
2874 xinner = change_address (xinner, BLKmode,
2875 plus_constant (XEXP (xinner, 0), used));
2876
2877 /* If the partial register-part of the arg counts in its stack size,
2878 skip the part of stack space corresponding to the registers.
2879 Otherwise, start copying to the beginning of the stack space,
2880 by setting SKIP to 0. */
e5e809f4 2881 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2882
2883#ifdef PUSH_ROUNDING
2884 /* Do it with several push insns if that doesn't take lots of insns
2885 and if there is no difficulty with push insns that skip bytes
2886 on the stack for alignment purposes. */
2887 if (args_addr == 0
2888 && GET_CODE (size) == CONST_INT
2889 && skip == 0
15914757 2890 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
2891 /* Here we avoid the case of a structure whose weak alignment
2892 forces many pushes of a small amount of data,
2893 and such small pushes do rounding that causes trouble. */
c7a7ac46 2894 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2895 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2896 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2897 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2898 {
2899 /* Push padding now if padding above and stack grows down,
2900 or if padding below and stack grows up.
2901 But if space already allocated, this has already been done. */
2902 if (extra && args_addr == 0
2903 && where_pad != none && where_pad != stack_direction)
906c4e36 2904 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2905
38a448ca 2906 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2907 INTVAL (size) - used, align);
921b3427 2908
7d384cc0 2909 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2910 {
2911 rtx temp;
2912
956d6950 2913 in_check_memory_usage = 1;
921b3427 2914 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2915 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2916 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2917 temp, ptr_mode,
2918 XEXP (xinner, 0), ptr_mode,
2919 GEN_INT (INTVAL(size) - used),
2920 TYPE_MODE (sizetype));
2921 else
2922 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2923 temp, ptr_mode,
2924 GEN_INT (INTVAL(size) - used),
2925 TYPE_MODE (sizetype),
956d6950
JL
2926 GEN_INT (MEMORY_USE_RW),
2927 TYPE_MODE (integer_type_node));
2928 in_check_memory_usage = 0;
921b3427 2929 }
bbf6f052
RK
2930 }
2931 else
2932#endif /* PUSH_ROUNDING */
2933 {
2934 /* Otherwise make space on the stack and copy the data
2935 to the address of that space. */
2936
2937 /* Deduct words put into registers from the size we must copy. */
2938 if (partial != 0)
2939 {
2940 if (GET_CODE (size) == CONST_INT)
906c4e36 2941 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2942 else
2943 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2944 GEN_INT (used), NULL_RTX, 0,
2945 OPTAB_LIB_WIDEN);
bbf6f052
RK
2946 }
2947
2948 /* Get the address of the stack space.
2949 In this case, we do not deal with EXTRA separately.
2950 A single stack adjust will do. */
2951 if (! args_addr)
2952 {
2953 temp = push_block (size, extra, where_pad == downward);
2954 extra = 0;
2955 }
2956 else if (GET_CODE (args_so_far) == CONST_INT)
2957 temp = memory_address (BLKmode,
2958 plus_constant (args_addr,
2959 skip + INTVAL (args_so_far)));
2960 else
2961 temp = memory_address (BLKmode,
38a448ca
RH
2962 plus_constant (gen_rtx_PLUS (Pmode,
2963 args_addr,
2964 args_so_far),
bbf6f052 2965 skip));
7d384cc0 2966 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2967 {
2968 rtx target;
2969
956d6950 2970 in_check_memory_usage = 1;
921b3427 2971 target = copy_to_reg (temp);
c85f7c16 2972 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2973 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2974 target, ptr_mode,
2975 XEXP (xinner, 0), ptr_mode,
2976 size, TYPE_MODE (sizetype));
2977 else
2978 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2979 target, ptr_mode,
2980 size, TYPE_MODE (sizetype),
956d6950
JL
2981 GEN_INT (MEMORY_USE_RW),
2982 TYPE_MODE (integer_type_node));
2983 in_check_memory_usage = 0;
921b3427 2984 }
bbf6f052
RK
2985
2986 /* TEMP is the address of the block. Copy the data there. */
2987 if (GET_CODE (size) == CONST_INT
fbe1758d 2988 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
bbf6f052 2989 {
38a448ca 2990 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2991 INTVAL (size), align);
2992 goto ret;
2993 }
e5e809f4 2994 else
bbf6f052 2995 {
e5e809f4
JL
2996 rtx opalign = GEN_INT (align);
2997 enum machine_mode mode;
9e6a5703 2998 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
2999
3000 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3001 mode != VOIDmode;
3002 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3003 {
e5e809f4
JL
3004 enum insn_code code = movstr_optab[(int) mode];
3005
3006 if (code != CODE_FOR_nothing
3007 && ((GET_CODE (size) == CONST_INT
3008 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3009 <= (GET_MODE_MASK (mode) >> 1)))
3010 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3011 && (insn_operand_predicate[(int) code][0] == 0
3012 || ((*insn_operand_predicate[(int) code][0])
3013 (target, BLKmode)))
3014 && (insn_operand_predicate[(int) code][1] == 0
3015 || ((*insn_operand_predicate[(int) code][1])
3016 (xinner, BLKmode)))
3017 && (insn_operand_predicate[(int) code][3] == 0
3018 || ((*insn_operand_predicate[(int) code][3])
3019 (opalign, VOIDmode))))
3020 {
3021 rtx op2 = convert_to_mode (mode, size, 1);
3022 rtx last = get_last_insn ();
3023 rtx pat;
3024
3025 if (insn_operand_predicate[(int) code][2] != 0
3026 && ! ((*insn_operand_predicate[(int) code][2])
3027 (op2, mode)))
3028 op2 = copy_to_mode_reg (mode, op2);
3029
3030 pat = GEN_FCN ((int) code) (target, xinner,
3031 op2, opalign);
3032 if (pat)
3033 {
3034 emit_insn (pat);
3035 goto ret;
3036 }
3037 else
3038 delete_insns_since (last);
3039 }
c841050e 3040 }
bbf6f052 3041 }
bbf6f052
RK
3042
3043#ifndef ACCUMULATE_OUTGOING_ARGS
3044 /* If the source is referenced relative to the stack pointer,
3045 copy it to another register to stabilize it. We do not need
3046 to do this if we know that we won't be changing sp. */
3047
3048 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3049 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3050 temp = copy_to_reg (temp);
3051#endif
3052
3053 /* Make inhibit_defer_pop nonzero around the library call
3054 to force it to pop the bcopy-arguments right away. */
3055 NO_DEFER_POP;
3056#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3057 emit_library_call (memcpy_libfunc, 0,
bbf6f052 3058 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3059 convert_to_mode (TYPE_MODE (sizetype),
3060 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3061 TYPE_MODE (sizetype));
bbf6f052 3062#else
d562e42e 3063 emit_library_call (bcopy_libfunc, 0,
bbf6f052 3064 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3065 convert_to_mode (TYPE_MODE (integer_type_node),
3066 size,
3067 TREE_UNSIGNED (integer_type_node)),
3068 TYPE_MODE (integer_type_node));
bbf6f052
RK
3069#endif
3070 OK_DEFER_POP;
3071 }
3072 }
3073 else if (partial > 0)
3074 {
3075 /* Scalar partly in registers. */
3076
3077 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3078 int i;
3079 int not_stack;
3080 /* # words of start of argument
3081 that we must make space for but need not store. */
3082 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3083 int args_offset = INTVAL (args_so_far);
3084 int skip;
3085
3086 /* Push padding now if padding above and stack grows down,
3087 or if padding below and stack grows up.
3088 But if space already allocated, this has already been done. */
3089 if (extra && args_addr == 0
3090 && where_pad != none && where_pad != stack_direction)
906c4e36 3091 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3092
3093 /* If we make space by pushing it, we might as well push
3094 the real data. Otherwise, we can leave OFFSET nonzero
3095 and leave the space uninitialized. */
3096 if (args_addr == 0)
3097 offset = 0;
3098
3099 /* Now NOT_STACK gets the number of words that we don't need to
3100 allocate on the stack. */
3101 not_stack = partial - offset;
3102
3103 /* If the partial register-part of the arg counts in its stack size,
3104 skip the part of stack space corresponding to the registers.
3105 Otherwise, start copying to the beginning of the stack space,
3106 by setting SKIP to 0. */
e5e809f4 3107 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3108
3109 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3110 x = validize_mem (force_const_mem (mode, x));
3111
3112 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3113 SUBREGs of such registers are not allowed. */
3114 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3115 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3116 x = copy_to_reg (x);
3117
3118 /* Loop over all the words allocated on the stack for this arg. */
3119 /* We can do it by words, because any scalar bigger than a word
3120 has a size a multiple of a word. */
3121#ifndef PUSH_ARGS_REVERSED
3122 for (i = not_stack; i < size; i++)
3123#else
3124 for (i = size - 1; i >= not_stack; i--)
3125#endif
3126 if (i >= not_stack + offset)
3127 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3128 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3129 0, args_addr,
3130 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
3131 * UNITS_PER_WORD)),
3132 reg_parm_stack_space);
bbf6f052
RK
3133 }
3134 else
3135 {
3136 rtx addr;
921b3427 3137 rtx target = NULL_RTX;
bbf6f052
RK
3138
3139 /* Push padding now if padding above and stack grows down,
3140 or if padding below and stack grows up.
3141 But if space already allocated, this has already been done. */
3142 if (extra && args_addr == 0
3143 && where_pad != none && where_pad != stack_direction)
906c4e36 3144 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3145
3146#ifdef PUSH_ROUNDING
3147 if (args_addr == 0)
3148 addr = gen_push_operand ();
3149 else
3150#endif
921b3427
RK
3151 {
3152 if (GET_CODE (args_so_far) == CONST_INT)
3153 addr
3154 = memory_address (mode,
3155 plus_constant (args_addr,
3156 INTVAL (args_so_far)));
3157 else
38a448ca
RH
3158 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3159 args_so_far));
921b3427
RK
3160 target = addr;
3161 }
bbf6f052 3162
38a448ca 3163 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3164
7d384cc0 3165 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3166 {
956d6950 3167 in_check_memory_usage = 1;
921b3427
RK
3168 if (target == 0)
3169 target = get_push_address (GET_MODE_SIZE (mode));
3170
c85f7c16 3171 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
3172 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3173 target, ptr_mode,
3174 XEXP (x, 0), ptr_mode,
3175 GEN_INT (GET_MODE_SIZE (mode)),
3176 TYPE_MODE (sizetype));
3177 else
3178 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3179 target, ptr_mode,
3180 GEN_INT (GET_MODE_SIZE (mode)),
3181 TYPE_MODE (sizetype),
956d6950
JL
3182 GEN_INT (MEMORY_USE_RW),
3183 TYPE_MODE (integer_type_node));
3184 in_check_memory_usage = 0;
921b3427 3185 }
bbf6f052
RK
3186 }
3187
3188 ret:
3189 /* If part should go in registers, copy that part
3190 into the appropriate registers. Do this now, at the end,
3191 since mem-to-mem copies above may do function calls. */
cd048831 3192 if (partial > 0 && reg != 0)
fffa9c1d
JW
3193 {
3194 /* Handle calls that pass values in multiple non-contiguous locations.
3195 The Irix 6 ABI has examples of this. */
3196 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3197 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3198 else
3199 move_block_to_reg (REGNO (reg), x, partial, mode);
3200 }
bbf6f052
RK
3201
3202 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3203 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3204}
3205\f
bbf6f052
RK
3206/* Expand an assignment that stores the value of FROM into TO.
3207 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3208 (This may contain a QUEUED rtx;
3209 if the value is constant, this rtx is a constant.)
3210 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3211
3212 SUGGEST_REG is no longer actually used.
3213 It used to mean, copy the value through a register
3214 and return that register, if that is possible.
709f5be1 3215 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3216
3217rtx
3218expand_assignment (to, from, want_value, suggest_reg)
3219 tree to, from;
3220 int want_value;
3221 int suggest_reg;
3222{
3223 register rtx to_rtx = 0;
3224 rtx result;
3225
3226 /* Don't crash if the lhs of the assignment was erroneous. */
3227
3228 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3229 {
3230 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3231 return want_value ? result : NULL_RTX;
3232 }
bbf6f052
RK
3233
3234 /* Assignment of a structure component needs special treatment
3235 if the structure component's rtx is not simply a MEM.
6be58303
JW
3236 Assignment of an array element at a constant index, and assignment of
3237 an array element in an unaligned packed structure field, has the same
3238 problem. */
bbf6f052 3239
08293add
RK
3240 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3241 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3242 {
3243 enum machine_mode mode1;
3244 int bitsize;
3245 int bitpos;
7bb0943f 3246 tree offset;
bbf6f052
RK
3247 int unsignedp;
3248 int volatilep = 0;
0088fcb1 3249 tree tem;
d78d243c 3250 int alignment;
0088fcb1
RK
3251
3252 push_temp_slots ();
839c4796
RK
3253 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3254 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3255
3256 /* If we are going to use store_bit_field and extract_bit_field,
3257 make sure to_rtx will be safe for multiple use. */
3258
3259 if (mode1 == VOIDmode && want_value)
3260 tem = stabilize_reference (tem);
3261
921b3427 3262 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3263 if (offset != 0)
3264 {
906c4e36 3265 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3266
3267 if (GET_CODE (to_rtx) != MEM)
3268 abort ();
bd070e1a
RH
3269
3270 if (GET_MODE (offset_rtx) != ptr_mode)
3271 {
3272#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3273 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3274#else
3275 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3276#endif
3277 }
3278
89752202
HB
3279 if (GET_CODE (to_rtx) == MEM
3280 && GET_MODE (to_rtx) == BLKmode
3281 && bitsize
3282 && (bitpos % bitsize) == 0
3283 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3284 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3285 {
3286 rtx temp = change_address (to_rtx, mode1,
3287 plus_constant (XEXP (to_rtx, 0),
3288 (bitpos /
3289 BITS_PER_UNIT)));
3290 if (GET_CODE (XEXP (temp, 0)) == REG)
3291 to_rtx = temp;
3292 else
3293 to_rtx = change_address (to_rtx, mode1,
3294 force_reg (GET_MODE (XEXP (temp, 0)),
3295 XEXP (temp, 0)));
3296 bitpos = 0;
3297 }
3298
7bb0943f 3299 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
3300 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3301 force_reg (ptr_mode, offset_rtx)));
7bb0943f 3302 }
bbf6f052
RK
3303 if (volatilep)
3304 {
3305 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3306 {
3307 /* When the offset is zero, to_rtx is the address of the
3308 structure we are storing into, and hence may be shared.
3309 We must make a new MEM before setting the volatile bit. */
3310 if (offset == 0)
effbcc6a
RK
3311 to_rtx = copy_rtx (to_rtx);
3312
01188446
JW
3313 MEM_VOLATILE_P (to_rtx) = 1;
3314 }
bbf6f052
RK
3315#if 0 /* This was turned off because, when a field is volatile
3316 in an object which is not volatile, the object may be in a register,
3317 and then we would abort over here. */
3318 else
3319 abort ();
3320#endif
3321 }
3322
956d6950
JL
3323 if (TREE_CODE (to) == COMPONENT_REF
3324 && TREE_READONLY (TREE_OPERAND (to, 1)))
3325 {
8bd6ecc2 3326 if (offset == 0)
956d6950
JL
3327 to_rtx = copy_rtx (to_rtx);
3328
3329 RTX_UNCHANGING_P (to_rtx) = 1;
3330 }
3331
921b3427 3332 /* Check the access. */
7d384cc0 3333 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3334 {
3335 rtx to_addr;
3336 int size;
3337 int best_mode_size;
3338 enum machine_mode best_mode;
3339
3340 best_mode = get_best_mode (bitsize, bitpos,
3341 TYPE_ALIGN (TREE_TYPE (tem)),
3342 mode1, volatilep);
3343 if (best_mode == VOIDmode)
3344 best_mode = QImode;
3345
3346 best_mode_size = GET_MODE_BITSIZE (best_mode);
3347 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3348 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3349 size *= GET_MODE_SIZE (best_mode);
3350
3351 /* Check the access right of the pointer. */
e9a25f70
JL
3352 if (size)
3353 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3354 to_addr, ptr_mode,
3355 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3356 GEN_INT (MEMORY_USE_WO),
3357 TYPE_MODE (integer_type_node));
921b3427
RK
3358 }
3359
bbf6f052
RK
3360 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3361 (want_value
3362 /* Spurious cast makes HPUX compiler happy. */
3363 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3364 : VOIDmode),
3365 unsignedp,
3366 /* Required alignment of containing datum. */
d78d243c 3367 alignment,
ece32014
MM
3368 int_size_in_bytes (TREE_TYPE (tem)),
3369 get_alias_set (to));
bbf6f052
RK
3370 preserve_temp_slots (result);
3371 free_temp_slots ();
0088fcb1 3372 pop_temp_slots ();
bbf6f052 3373
709f5be1
RS
3374 /* If the value is meaningful, convert RESULT to the proper mode.
3375 Otherwise, return nothing. */
5ffe63ed
RS
3376 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3377 TYPE_MODE (TREE_TYPE (from)),
3378 result,
3379 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 3380 : NULL_RTX);
bbf6f052
RK
3381 }
3382
cd1db108
RS
3383 /* If the rhs is a function call and its value is not an aggregate,
3384 call the function before we start to compute the lhs.
3385 This is needed for correct code for cases such as
3386 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3387 requires loading up part of an address in a separate insn.
3388
3389 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3390 a promoted variable where the zero- or sign- extension needs to be done.
3391 Handling this in the normal way is safe because no computation is done
3392 before the call. */
3393 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3394 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3395 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3396 {
0088fcb1
RK
3397 rtx value;
3398
3399 push_temp_slots ();
3400 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3401 if (to_rtx == 0)
921b3427 3402 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3403
fffa9c1d
JW
3404 /* Handle calls that return values in multiple non-contiguous locations.
3405 The Irix 6 ABI has examples of this. */
3406 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16
RH
3407 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3408 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
fffa9c1d 3409 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3410 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3411 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
3412 else
3413 emit_move_insn (to_rtx, value);
cd1db108
RS
3414 preserve_temp_slots (to_rtx);
3415 free_temp_slots ();
0088fcb1 3416 pop_temp_slots ();
709f5be1 3417 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3418 }
3419
bbf6f052
RK
3420 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3421 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3422
3423 if (to_rtx == 0)
41472af8
MM
3424 {
3425 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3426 if (GET_CODE (to_rtx) == MEM)
3427 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3428 }
bbf6f052 3429
86d38d25
RS
3430 /* Don't move directly into a return register. */
3431 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3432 {
0088fcb1
RK
3433 rtx temp;
3434
3435 push_temp_slots ();
3436 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3437 emit_move_insn (to_rtx, temp);
3438 preserve_temp_slots (to_rtx);
3439 free_temp_slots ();
0088fcb1 3440 pop_temp_slots ();
709f5be1 3441 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3442 }
3443
bbf6f052
RK
3444 /* In case we are returning the contents of an object which overlaps
3445 the place the value is being stored, use a safe function when copying
3446 a value through a pointer into a structure value return block. */
3447 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3448 && current_function_returns_struct
3449 && !current_function_returns_pcc_struct)
3450 {
0088fcb1
RK
3451 rtx from_rtx, size;
3452
3453 push_temp_slots ();
33a20d10 3454 size = expr_size (from);
921b3427
RK
3455 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3456 EXPAND_MEMORY_USE_DONT);
3457
3458 /* Copy the rights of the bitmap. */
7d384cc0 3459 if (current_function_check_memory_usage)
921b3427
RK
3460 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3461 XEXP (to_rtx, 0), ptr_mode,
3462 XEXP (from_rtx, 0), ptr_mode,
3463 convert_to_mode (TYPE_MODE (sizetype),
3464 size, TREE_UNSIGNED (sizetype)),
3465 TYPE_MODE (sizetype));
bbf6f052
RK
3466
3467#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3468 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3469 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3470 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3471 convert_to_mode (TYPE_MODE (sizetype),
3472 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3473 TYPE_MODE (sizetype));
bbf6f052 3474#else
d562e42e 3475 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3476 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3477 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3478 convert_to_mode (TYPE_MODE (integer_type_node),
3479 size, TREE_UNSIGNED (integer_type_node)),
3480 TYPE_MODE (integer_type_node));
bbf6f052
RK
3481#endif
3482
3483 preserve_temp_slots (to_rtx);
3484 free_temp_slots ();
0088fcb1 3485 pop_temp_slots ();
709f5be1 3486 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3487 }
3488
3489 /* Compute FROM and store the value in the rtx we got. */
3490
0088fcb1 3491 push_temp_slots ();
bbf6f052
RK
3492 result = store_expr (from, to_rtx, want_value);
3493 preserve_temp_slots (result);
3494 free_temp_slots ();
0088fcb1 3495 pop_temp_slots ();
709f5be1 3496 return want_value ? result : NULL_RTX;
bbf6f052
RK
3497}
3498
3499/* Generate code for computing expression EXP,
3500 and storing the value into TARGET.
bbf6f052
RK
3501 TARGET may contain a QUEUED rtx.
3502
709f5be1
RS
3503 If WANT_VALUE is nonzero, return a copy of the value
3504 not in TARGET, so that we can be sure to use the proper
3505 value in a containing expression even if TARGET has something
3506 else stored in it. If possible, we copy the value through a pseudo
3507 and return that pseudo. Or, if the value is constant, we try to
3508 return the constant. In some cases, we return a pseudo
3509 copied *from* TARGET.
3510
3511 If the mode is BLKmode then we may return TARGET itself.
3512 It turns out that in BLKmode it doesn't cause a problem.
3513 because C has no operators that could combine two different
3514 assignments into the same BLKmode object with different values
3515 with no sequence point. Will other languages need this to
3516 be more thorough?
3517
3518 If WANT_VALUE is 0, we return NULL, to make sure
3519 to catch quickly any cases where the caller uses the value
3520 and fails to set WANT_VALUE. */
bbf6f052
RK
3521
3522rtx
709f5be1 3523store_expr (exp, target, want_value)
bbf6f052
RK
3524 register tree exp;
3525 register rtx target;
709f5be1 3526 int want_value;
bbf6f052
RK
3527{
3528 register rtx temp;
3529 int dont_return_target = 0;
3530
3531 if (TREE_CODE (exp) == COMPOUND_EXPR)
3532 {
3533 /* Perform first part of compound expression, then assign from second
3534 part. */
3535 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3536 emit_queue ();
709f5be1 3537 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3538 }
3539 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3540 {
3541 /* For conditional expression, get safe form of the target. Then
3542 test the condition, doing the appropriate assignment on either
3543 side. This avoids the creation of unnecessary temporaries.
3544 For non-BLKmode, it is more efficient not to do this. */
3545
3546 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3547
3548 emit_queue ();
3549 target = protect_from_queue (target, 1);
3550
dabf8373 3551 do_pending_stack_adjust ();
bbf6f052
RK
3552 NO_DEFER_POP;
3553 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3554 start_cleanup_deferral ();
709f5be1 3555 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3556 end_cleanup_deferral ();
bbf6f052
RK
3557 emit_queue ();
3558 emit_jump_insn (gen_jump (lab2));
3559 emit_barrier ();
3560 emit_label (lab1);
956d6950 3561 start_cleanup_deferral ();
709f5be1 3562 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3563 end_cleanup_deferral ();
bbf6f052
RK
3564 emit_queue ();
3565 emit_label (lab2);
3566 OK_DEFER_POP;
a3a58acc 3567
709f5be1 3568 return want_value ? target : NULL_RTX;
bbf6f052 3569 }
bbf6f052 3570 else if (queued_subexp_p (target))
709f5be1
RS
3571 /* If target contains a postincrement, let's not risk
3572 using it as the place to generate the rhs. */
bbf6f052
RK
3573 {
3574 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3575 {
3576 /* Expand EXP into a new pseudo. */
3577 temp = gen_reg_rtx (GET_MODE (target));
3578 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3579 }
3580 else
906c4e36 3581 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3582
3583 /* If target is volatile, ANSI requires accessing the value
3584 *from* the target, if it is accessed. So make that happen.
3585 In no case return the target itself. */
3586 if (! MEM_VOLATILE_P (target) && want_value)
3587 dont_return_target = 1;
bbf6f052 3588 }
12f06d17
CH
3589 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3590 && GET_MODE (target) != BLKmode)
3591 /* If target is in memory and caller wants value in a register instead,
3592 arrange that. Pass TARGET as target for expand_expr so that,
3593 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3594 We know expand_expr will not use the target in that case.
3595 Don't do this if TARGET is volatile because we are supposed
3596 to write it and then read it. */
3597 {
3598 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3599 GET_MODE (target), 0);
3600 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3601 temp = copy_to_reg (temp);
3602 dont_return_target = 1;
3603 }
1499e0a8
RK
3604 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3605 /* If this is an scalar in a register that is stored in a wider mode
3606 than the declared mode, compute the result into its declared mode
3607 and then convert to the wider mode. Our value is the computed
3608 expression. */
3609 {
5a32d038 3610 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3611 which will often result in some optimizations. Do the conversion
3612 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3613 the extend. But don't do this if the type of EXP is a subtype
3614 of something else since then the conversion might involve
3615 more than just converting modes. */
3616 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3617 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3618 {
3619 if (TREE_UNSIGNED (TREE_TYPE (exp))
3620 != SUBREG_PROMOTED_UNSIGNED_P (target))
3621 exp
3622 = convert
3623 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3624 TREE_TYPE (exp)),
3625 exp);
3626
3627 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3628 SUBREG_PROMOTED_UNSIGNED_P (target)),
3629 exp);
3630 }
5a32d038 3631
1499e0a8 3632 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3633
766f36c7 3634 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3635 the access now so it gets done only once. Likewise if
3636 it contains TARGET. */
3637 if (GET_CODE (temp) == MEM && want_value
3638 && (MEM_VOLATILE_P (temp)
3639 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3640 temp = copy_to_reg (temp);
3641
b258707c
RS
3642 /* If TEMP is a VOIDmode constant, use convert_modes to make
3643 sure that we properly convert it. */
3644 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3645 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3646 TYPE_MODE (TREE_TYPE (exp)), temp,
3647 SUBREG_PROMOTED_UNSIGNED_P (target));
3648
1499e0a8
RK
3649 convert_move (SUBREG_REG (target), temp,
3650 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3651 return want_value ? temp : NULL_RTX;
1499e0a8 3652 }
bbf6f052
RK
3653 else
3654 {
3655 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3656 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3657 If TARGET is a volatile mem ref, either return TARGET
3658 or return a reg copied *from* TARGET; ANSI requires this.
3659
3660 Otherwise, if TEMP is not TARGET, return TEMP
3661 if it is constant (for efficiency),
3662 or if we really want the correct value. */
bbf6f052
RK
3663 if (!(target && GET_CODE (target) == REG
3664 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3665 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3666 && ! rtx_equal_p (temp, target)
709f5be1 3667 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3668 dont_return_target = 1;
3669 }
3670
b258707c
RS
3671 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3672 the same as that of TARGET, adjust the constant. This is needed, for
3673 example, in case it is a CONST_DOUBLE and we want only a word-sized
3674 value. */
3675 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3676 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3677 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3678 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3679 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3680
7d384cc0 3681 if (current_function_check_memory_usage
921b3427
RK
3682 && GET_CODE (target) == MEM
3683 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3684 {
3685 if (GET_CODE (temp) == MEM)
3686 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3687 XEXP (target, 0), ptr_mode,
3688 XEXP (temp, 0), ptr_mode,
3689 expr_size (exp), TYPE_MODE (sizetype));
3690 else
3691 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3692 XEXP (target, 0), ptr_mode,
3693 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3694 GEN_INT (MEMORY_USE_WO),
3695 TYPE_MODE (integer_type_node));
921b3427
RK
3696 }
3697
bbf6f052
RK
3698 /* If value was not generated in the target, store it there.
3699 Convert the value to TARGET's type first if nec. */
f3f2255a
R
3700 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3701 one or both of them are volatile memory refs, we have to distinguish
3702 two cases:
3703 - expand_expr has used TARGET. In this case, we must not generate
3704 another copy. This can be detected by TARGET being equal according
3705 to == .
3706 - expand_expr has not used TARGET - that means that the source just
3707 happens to have the same RTX form. Since temp will have been created
3708 by expand_expr, it will compare unequal according to == .
3709 We must generate a copy in this case, to reach the correct number
3710 of volatile memory references. */
bbf6f052 3711
6036acbb 3712 if ((! rtx_equal_p (temp, target)
f3f2255a
R
3713 || (temp != target && (side_effects_p (temp)
3714 || side_effects_p (target))))
6036acbb 3715 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3716 {
3717 target = protect_from_queue (target, 1);
3718 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3719 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3720 {
3721 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3722 if (dont_return_target)
3723 {
3724 /* In this case, we will return TEMP,
3725 so make sure it has the proper mode.
3726 But don't forget to store the value into TARGET. */
3727 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3728 emit_move_insn (target, temp);
3729 }
3730 else
3731 convert_move (target, temp, unsignedp);
3732 }
3733
3734 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3735 {
3736 /* Handle copying a string constant into an array.
3737 The string constant may be shorter than the array.
3738 So copy just the string's actual length, and clear the rest. */
3739 rtx size;
22619c3f 3740 rtx addr;
bbf6f052 3741
e87b4f3f
RS
3742 /* Get the size of the data type of the string,
3743 which is actually the size of the target. */
3744 size = expr_size (exp);
3745 if (GET_CODE (size) == CONST_INT
3746 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3747 emit_block_move (target, temp, size,
3748 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3749 else
bbf6f052 3750 {
e87b4f3f
RS
3751 /* Compute the size of the data to copy from the string. */
3752 tree copy_size
c03b7665 3753 = size_binop (MIN_EXPR,
b50d17a1 3754 make_tree (sizetype, size),
c03b7665
RK
3755 convert (sizetype,
3756 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3757 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3758 VOIDmode, 0);
e87b4f3f
RS
3759 rtx label = 0;
3760
3761 /* Copy that much. */
3762 emit_block_move (target, temp, copy_size_rtx,
3763 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3764
88f63c77
RK
3765 /* Figure out how much is left in TARGET that we have to clear.
3766 Do all calculations in ptr_mode. */
3767
3768 addr = XEXP (target, 0);
3769 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3770
e87b4f3f
RS
3771 if (GET_CODE (copy_size_rtx) == CONST_INT)
3772 {
88f63c77 3773 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3774 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3775 }
3776 else
3777 {
88f63c77
RK
3778 addr = force_reg (ptr_mode, addr);
3779 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3780 copy_size_rtx, NULL_RTX, 0,
3781 OPTAB_LIB_WIDEN);
e87b4f3f 3782
88f63c77 3783 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3784 copy_size_rtx, NULL_RTX, 0,
3785 OPTAB_LIB_WIDEN);
e87b4f3f 3786
e87b4f3f 3787 label = gen_label_rtx ();
c5d5d461
JL
3788 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3789 GET_MODE (size), 0, 0, label);
e87b4f3f
RS
3790 }
3791
3792 if (size != const0_rtx)
3793 {
921b3427 3794 /* Be sure we can write on ADDR. */
7d384cc0 3795 if (current_function_check_memory_usage)
921b3427
RK
3796 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3797 addr, ptr_mode,
3798 size, TYPE_MODE (sizetype),
956d6950
JL
3799 GEN_INT (MEMORY_USE_WO),
3800 TYPE_MODE (integer_type_node));
bbf6f052 3801#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3802 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3803 addr, ptr_mode,
3b6f75e2
JW
3804 const0_rtx, TYPE_MODE (integer_type_node),
3805 convert_to_mode (TYPE_MODE (sizetype),
3806 size,
3807 TREE_UNSIGNED (sizetype)),
3808 TYPE_MODE (sizetype));
bbf6f052 3809#else
d562e42e 3810 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3811 addr, ptr_mode,
3b6f75e2
JW
3812 convert_to_mode (TYPE_MODE (integer_type_node),
3813 size,
3814 TREE_UNSIGNED (integer_type_node)),
3815 TYPE_MODE (integer_type_node));
bbf6f052 3816#endif
e87b4f3f 3817 }
22619c3f 3818
e87b4f3f
RS
3819 if (label)
3820 emit_label (label);
bbf6f052
RK
3821 }
3822 }
fffa9c1d
JW
3823 /* Handle calls that return values in multiple non-contiguous locations.
3824 The Irix 6 ABI has examples of this. */
3825 else if (GET_CODE (target) == PARALLEL)
aac5cc16
RH
3826 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3827 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
bbf6f052
RK
3828 else if (GET_MODE (temp) == BLKmode)
3829 emit_block_move (target, temp, expr_size (exp),
3830 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3831 else
3832 emit_move_insn (target, temp);
3833 }
709f5be1 3834
766f36c7
RK
3835 /* If we don't want a value, return NULL_RTX. */
3836 if (! want_value)
3837 return NULL_RTX;
3838
3839 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3840 ??? The latter test doesn't seem to make sense. */
3841 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3842 return temp;
766f36c7
RK
3843
3844 /* Return TARGET itself if it is a hard register. */
3845 else if (want_value && GET_MODE (target) != BLKmode
3846 && ! (GET_CODE (target) == REG
3847 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3848 return copy_to_reg (target);
766f36c7
RK
3849
3850 else
709f5be1 3851 return target;
bbf6f052
RK
3852}
3853\f
9de08200
RK
3854/* Return 1 if EXP just contains zeros. */
3855
3856static int
3857is_zeros_p (exp)
3858 tree exp;
3859{
3860 tree elt;
3861
3862 switch (TREE_CODE (exp))
3863 {
3864 case CONVERT_EXPR:
3865 case NOP_EXPR:
3866 case NON_LVALUE_EXPR:
3867 return is_zeros_p (TREE_OPERAND (exp, 0));
3868
3869 case INTEGER_CST:
3870 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3871
3872 case COMPLEX_CST:
3873 return
3874 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3875
3876 case REAL_CST:
41c9120b 3877 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3878
3879 case CONSTRUCTOR:
e1a43f73
PB
3880 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3881 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3882 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3883 if (! is_zeros_p (TREE_VALUE (elt)))
3884 return 0;
3885
3886 return 1;
e9a25f70
JL
3887
3888 default:
3889 return 0;
9de08200 3890 }
9de08200
RK
3891}
3892
3893/* Return 1 if EXP contains mostly (3/4) zeros. */
3894
3895static int
3896mostly_zeros_p (exp)
3897 tree exp;
3898{
9de08200
RK
3899 if (TREE_CODE (exp) == CONSTRUCTOR)
3900 {
e1a43f73
PB
3901 int elts = 0, zeros = 0;
3902 tree elt = CONSTRUCTOR_ELTS (exp);
3903 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3904 {
3905 /* If there are no ranges of true bits, it is all zero. */
3906 return elt == NULL_TREE;
3907 }
3908 for (; elt; elt = TREE_CHAIN (elt))
3909 {
3910 /* We do not handle the case where the index is a RANGE_EXPR,
3911 so the statistic will be somewhat inaccurate.
3912 We do make a more accurate count in store_constructor itself,
3913 so since this function is only used for nested array elements,
0f41302f 3914 this should be close enough. */
e1a43f73
PB
3915 if (mostly_zeros_p (TREE_VALUE (elt)))
3916 zeros++;
3917 elts++;
3918 }
9de08200
RK
3919
3920 return 4 * zeros >= 3 * elts;
3921 }
3922
3923 return is_zeros_p (exp);
3924}
3925\f
e1a43f73
PB
3926/* Helper function for store_constructor.
3927 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3928 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3929 CLEARED is as for store_constructor.
3930
3931 This provides a recursive shortcut back to store_constructor when it isn't
3932 necessary to go through store_field. This is so that we can pass through
3933 the cleared field to let store_constructor know that we may not have to
3934 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3935
3936static void
3937store_constructor_field (target, bitsize, bitpos,
3938 mode, exp, type, cleared)
3939 rtx target;
3940 int bitsize, bitpos;
3941 enum machine_mode mode;
3942 tree exp, type;
3943 int cleared;
3944{
3945 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3946 && bitpos % BITS_PER_UNIT == 0
3947 /* If we have a non-zero bitpos for a register target, then we just
3948 let store_field do the bitfield handling. This is unlikely to
3949 generate unnecessary clear instructions anyways. */
3950 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3951 {
126e5b0d
JW
3952 if (bitpos != 0)
3953 target = change_address (target, VOIDmode,
3954 plus_constant (XEXP (target, 0),
3955 bitpos / BITS_PER_UNIT));
3956 store_constructor (exp, target, cleared);
e1a43f73
PB
3957 }
3958 else
3959 store_field (target, bitsize, bitpos, mode, exp,
3960 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
ece32014 3961 int_size_in_bytes (type), 0);
e1a43f73
PB
3962}
3963
bbf6f052 3964/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3965 TARGET is either a REG or a MEM.
0f41302f 3966 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3967
3968static void
e1a43f73 3969store_constructor (exp, target, cleared)
bbf6f052
RK
3970 tree exp;
3971 rtx target;
e1a43f73 3972 int cleared;
bbf6f052 3973{
4af3895e 3974 tree type = TREE_TYPE (exp);
34c73909 3975 rtx exp_size = expr_size (exp);
4af3895e 3976
bbf6f052
RK
3977 /* We know our target cannot conflict, since safe_from_p has been called. */
3978#if 0
3979 /* Don't try copying piece by piece into a hard register
3980 since that is vulnerable to being clobbered by EXP.
3981 Instead, construct in a pseudo register and then copy it all. */
3982 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3983 {
3984 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3985 store_constructor (exp, temp, 0);
bbf6f052
RK
3986 emit_move_insn (target, temp);
3987 return;
3988 }
3989#endif
3990
e44842fe
RK
3991 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3992 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3993 {
3994 register tree elt;
3995
4af3895e 3996 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3997 if (TREE_CODE (type) == UNION_TYPE
3998 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 3999 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
4000
4001 /* If we are building a static constructor into a register,
4002 set the initial value as zero so we can fold the value into
67225c15
RK
4003 a constant. But if more than one register is involved,
4004 this probably loses. */
4005 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4006 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4007 {
4008 if (! cleared)
e9a25f70 4009 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4010
9de08200
RK
4011 cleared = 1;
4012 }
4013
4014 /* If the constructor has fewer fields than the structure
4015 or if we are initializing the structure to mostly zeros,
bbf6f052 4016 clear the whole structure first. */
9de08200
RK
4017 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4018 != list_length (TYPE_FIELDS (type)))
4019 || mostly_zeros_p (exp))
4020 {
4021 if (! cleared)
4022 clear_storage (target, expr_size (exp),
4023 TYPE_ALIGN (type) / BITS_PER_UNIT);
4024
4025 cleared = 1;
4026 }
bbf6f052
RK
4027 else
4028 /* Inform later passes that the old value is dead. */
38a448ca 4029 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4030
4031 /* Store each element of the constructor into
4032 the corresponding field of TARGET. */
4033
4034 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4035 {
4036 register tree field = TREE_PURPOSE (elt);
34c73909 4037 tree value = TREE_VALUE (elt);
bbf6f052
RK
4038 register enum machine_mode mode;
4039 int bitsize;
b50d17a1 4040 int bitpos = 0;
bbf6f052 4041 int unsignedp;
b50d17a1
RK
4042 tree pos, constant = 0, offset = 0;
4043 rtx to_rtx = target;
bbf6f052 4044
f32fd778
RS
4045 /* Just ignore missing fields.
4046 We cleared the whole structure, above,
4047 if any fields are missing. */
4048 if (field == 0)
4049 continue;
4050
e1a43f73
PB
4051 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4052 continue;
9de08200 4053
bbf6f052
RK
4054 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4055 unsignedp = TREE_UNSIGNED (field);
4056 mode = DECL_MODE (field);
4057 if (DECL_BIT_FIELD (field))
4058 mode = VOIDmode;
4059
b50d17a1
RK
4060 pos = DECL_FIELD_BITPOS (field);
4061 if (TREE_CODE (pos) == INTEGER_CST)
4062 constant = pos;
4063 else if (TREE_CODE (pos) == PLUS_EXPR
4064 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4065 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4066 else
4067 offset = pos;
4068
4069 if (constant)
cd11b87e 4070 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
4071
4072 if (offset)
4073 {
4074 rtx offset_rtx;
4075
4076 if (contains_placeholder_p (offset))
4077 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4078 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4079
b50d17a1
RK
4080 offset = size_binop (FLOOR_DIV_EXPR, offset,
4081 size_int (BITS_PER_UNIT));
bbf6f052 4082
b50d17a1
RK
4083 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4084 if (GET_CODE (to_rtx) != MEM)
4085 abort ();
4086
bd070e1a
RH
4087 if (GET_MODE (offset_rtx) != ptr_mode)
4088 {
4089#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4090 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4091#else
4092 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4093#endif
4094 }
4095
b50d17a1
RK
4096 to_rtx
4097 = change_address (to_rtx, VOIDmode,
38a448ca 4098 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 4099 force_reg (ptr_mode, offset_rtx)));
b50d17a1 4100 }
cf04eb80
RK
4101 if (TREE_READONLY (field))
4102 {
9151b3bf 4103 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4104 to_rtx = copy_rtx (to_rtx);
4105
cf04eb80
RK
4106 RTX_UNCHANGING_P (to_rtx) = 1;
4107 }
4108
34c73909
R
4109#ifdef WORD_REGISTER_OPERATIONS
4110 /* If this initializes a field that is smaller than a word, at the
4111 start of a word, try to widen it to a full word.
4112 This special case allows us to output C++ member function
4113 initializations in a form that the optimizers can understand. */
4114 if (constant
4115 && GET_CODE (target) == REG
4116 && bitsize < BITS_PER_WORD
4117 && bitpos % BITS_PER_WORD == 0
4118 && GET_MODE_CLASS (mode) == MODE_INT
4119 && TREE_CODE (value) == INTEGER_CST
4120 && GET_CODE (exp_size) == CONST_INT
4121 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4122 {
4123 tree type = TREE_TYPE (value);
4124 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4125 {
4126 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4127 value = convert (type, value);
4128 }
4129 if (BYTES_BIG_ENDIAN)
4130 value
4131 = fold (build (LSHIFT_EXPR, type, value,
4132 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4133 bitsize = BITS_PER_WORD;
4134 mode = word_mode;
4135 }
4136#endif
e1a43f73 4137 store_constructor_field (to_rtx, bitsize, bitpos,
34c73909 4138 mode, value, type, cleared);
bbf6f052
RK
4139 }
4140 }
4af3895e 4141 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4142 {
4143 register tree elt;
4144 register int i;
e1a43f73 4145 int need_to_clear;
4af3895e 4146 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
4147 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4148 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 4149 tree elttype = TREE_TYPE (type);
bbf6f052 4150
e1a43f73 4151 /* If the constructor has fewer elements than the array,
38e01259 4152 clear the whole array first. Similarly if this is
e1a43f73
PB
4153 static constructor of a non-BLKmode object. */
4154 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4155 need_to_clear = 1;
4156 else
4157 {
4158 HOST_WIDE_INT count = 0, zero_count = 0;
4159 need_to_clear = 0;
4160 /* This loop is a more accurate version of the loop in
4161 mostly_zeros_p (it handles RANGE_EXPR in an index).
4162 It is also needed to check for missing elements. */
4163 for (elt = CONSTRUCTOR_ELTS (exp);
4164 elt != NULL_TREE;
df0faff1 4165 elt = TREE_CHAIN (elt))
e1a43f73
PB
4166 {
4167 tree index = TREE_PURPOSE (elt);
4168 HOST_WIDE_INT this_node_count;
4169 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4170 {
4171 tree lo_index = TREE_OPERAND (index, 0);
4172 tree hi_index = TREE_OPERAND (index, 1);
4173 if (TREE_CODE (lo_index) != INTEGER_CST
4174 || TREE_CODE (hi_index) != INTEGER_CST)
4175 {
4176 need_to_clear = 1;
4177 break;
4178 }
4179 this_node_count = TREE_INT_CST_LOW (hi_index)
4180 - TREE_INT_CST_LOW (lo_index) + 1;
4181 }
4182 else
4183 this_node_count = 1;
4184 count += this_node_count;
4185 if (mostly_zeros_p (TREE_VALUE (elt)))
4186 zero_count += this_node_count;
4187 }
8e958f70 4188 /* Clear the entire array first if there are any missing elements,
0f41302f 4189 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4190 if (count < maxelt - minelt + 1
4191 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4192 need_to_clear = 1;
4193 }
4194 if (need_to_clear)
9de08200
RK
4195 {
4196 if (! cleared)
4197 clear_storage (target, expr_size (exp),
4198 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
4199 cleared = 1;
4200 }
bbf6f052
RK
4201 else
4202 /* Inform later passes that the old value is dead. */
38a448ca 4203 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4204
4205 /* Store each element of the constructor into
4206 the corresponding element of TARGET, determined
4207 by counting the elements. */
4208 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4209 elt;
4210 elt = TREE_CHAIN (elt), i++)
4211 {
4212 register enum machine_mode mode;
4213 int bitsize;
4214 int bitpos;
4215 int unsignedp;
e1a43f73 4216 tree value = TREE_VALUE (elt);
03dc44a6
RS
4217 tree index = TREE_PURPOSE (elt);
4218 rtx xtarget = target;
bbf6f052 4219
e1a43f73
PB
4220 if (cleared && is_zeros_p (value))
4221 continue;
9de08200 4222
bbf6f052
RK
4223 mode = TYPE_MODE (elttype);
4224 bitsize = GET_MODE_BITSIZE (mode);
4225 unsignedp = TREE_UNSIGNED (elttype);
4226
e1a43f73
PB
4227 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4228 {
4229 tree lo_index = TREE_OPERAND (index, 0);
4230 tree hi_index = TREE_OPERAND (index, 1);
4231 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4232 struct nesting *loop;
05c0b405
PB
4233 HOST_WIDE_INT lo, hi, count;
4234 tree position;
e1a43f73 4235
0f41302f 4236 /* If the range is constant and "small", unroll the loop. */
e1a43f73 4237 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
4238 && TREE_CODE (hi_index) == INTEGER_CST
4239 && (lo = TREE_INT_CST_LOW (lo_index),
4240 hi = TREE_INT_CST_LOW (hi_index),
4241 count = hi - lo + 1,
4242 (GET_CODE (target) != MEM
4243 || count <= 2
4244 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4245 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4246 <= 40 * 8))))
e1a43f73 4247 {
05c0b405
PB
4248 lo -= minelt; hi -= minelt;
4249 for (; lo <= hi; lo++)
e1a43f73 4250 {
05c0b405
PB
4251 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4252 store_constructor_field (target, bitsize, bitpos,
4253 mode, value, type, cleared);
e1a43f73
PB
4254 }
4255 }
4256 else
4257 {
4258 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4259 loop_top = gen_label_rtx ();
4260 loop_end = gen_label_rtx ();
4261
4262 unsignedp = TREE_UNSIGNED (domain);
4263
4264 index = build_decl (VAR_DECL, NULL_TREE, domain);
4265
4266 DECL_RTL (index) = index_r
4267 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4268 &unsignedp, 0));
4269
4270 if (TREE_CODE (value) == SAVE_EXPR
4271 && SAVE_EXPR_RTL (value) == 0)
4272 {
0f41302f
MS
4273 /* Make sure value gets expanded once before the
4274 loop. */
e1a43f73
PB
4275 expand_expr (value, const0_rtx, VOIDmode, 0);
4276 emit_queue ();
4277 }
4278 store_expr (lo_index, index_r, 0);
4279 loop = expand_start_loop (0);
4280
0f41302f 4281 /* Assign value to element index. */
e1a43f73
PB
4282 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4283 size_int (BITS_PER_UNIT));
4284 position = size_binop (MULT_EXPR,
4285 size_binop (MINUS_EXPR, index,
4286 TYPE_MIN_VALUE (domain)),
4287 position);
4288 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4289 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4290 xtarget = change_address (target, mode, addr);
4291 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 4292 store_constructor (value, xtarget, cleared);
e1a43f73
PB
4293 else
4294 store_expr (value, xtarget, 0);
4295
4296 expand_exit_loop_if_false (loop,
4297 build (LT_EXPR, integer_type_node,
4298 index, hi_index));
4299
4300 expand_increment (build (PREINCREMENT_EXPR,
4301 TREE_TYPE (index),
7b8b9722 4302 index, integer_one_node), 0, 0);
e1a43f73
PB
4303 expand_end_loop ();
4304 emit_label (loop_end);
4305
4306 /* Needed by stupid register allocation. to extend the
4307 lifetime of pseudo-regs used by target past the end
4308 of the loop. */
38a448ca 4309 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
4310 }
4311 }
4312 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 4313 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 4314 {
e1a43f73 4315 rtx pos_rtx, addr;
03dc44a6
RS
4316 tree position;
4317
5b6c44ff
RK
4318 if (index == 0)
4319 index = size_int (i);
4320
e1a43f73
PB
4321 if (minelt)
4322 index = size_binop (MINUS_EXPR, index,
4323 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
4324 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4325 size_int (BITS_PER_UNIT));
4326 position = size_binop (MULT_EXPR, index, position);
03dc44a6 4327 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4328 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4329 xtarget = change_address (target, mode, addr);
e1a43f73 4330 store_expr (value, xtarget, 0);
03dc44a6
RS
4331 }
4332 else
4333 {
4334 if (index != 0)
7c314719 4335 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
4336 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4337 else
4338 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
4339 store_constructor_field (target, bitsize, bitpos,
4340 mode, value, type, cleared);
03dc44a6 4341 }
bbf6f052
RK
4342 }
4343 }
071a6595
PB
4344 /* set constructor assignments */
4345 else if (TREE_CODE (type) == SET_TYPE)
4346 {
e1a43f73 4347 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 4348 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4349 tree domain = TYPE_DOMAIN (type);
4350 tree domain_min, domain_max, bitlength;
4351
9faa82d8 4352 /* The default implementation strategy is to extract the constant
071a6595
PB
4353 parts of the constructor, use that to initialize the target,
4354 and then "or" in whatever non-constant ranges we need in addition.
4355
4356 If a large set is all zero or all ones, it is
4357 probably better to set it using memset (if available) or bzero.
4358 Also, if a large set has just a single range, it may also be
4359 better to first clear all the first clear the set (using
0f41302f 4360 bzero/memset), and set the bits we want. */
071a6595 4361
0f41302f 4362 /* Check for all zeros. */
e1a43f73 4363 if (elt == NULL_TREE)
071a6595 4364 {
e1a43f73
PB
4365 if (!cleared)
4366 clear_storage (target, expr_size (exp),
4367 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
4368 return;
4369 }
4370
071a6595
PB
4371 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4372 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4373 bitlength = size_binop (PLUS_EXPR,
4374 size_binop (MINUS_EXPR, domain_max, domain_min),
4375 size_one_node);
4376
e1a43f73
PB
4377 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4378 abort ();
4379 nbits = TREE_INT_CST_LOW (bitlength);
4380
4381 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4382 are "complicated" (more than one range), initialize (the
4383 constant parts) by copying from a constant. */
4384 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4385 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4386 {
b4ee5a72
PB
4387 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4388 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4389 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
4390 HOST_WIDE_INT word = 0;
4391 int bit_pos = 0;
4392 int ibit = 0;
0f41302f 4393 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 4394 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4395 for (;;)
071a6595 4396 {
b4ee5a72
PB
4397 if (bit_buffer[ibit])
4398 {
b09f3348 4399 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4400 word |= (1 << (set_word_size - 1 - bit_pos));
4401 else
4402 word |= 1 << bit_pos;
4403 }
4404 bit_pos++; ibit++;
4405 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4406 {
e1a43f73
PB
4407 if (word != 0 || ! cleared)
4408 {
4409 rtx datum = GEN_INT (word);
4410 rtx to_rtx;
0f41302f
MS
4411 /* The assumption here is that it is safe to use
4412 XEXP if the set is multi-word, but not if
4413 it's single-word. */
e1a43f73
PB
4414 if (GET_CODE (target) == MEM)
4415 {
4416 to_rtx = plus_constant (XEXP (target, 0), offset);
4417 to_rtx = change_address (target, mode, to_rtx);
4418 }
4419 else if (offset == 0)
4420 to_rtx = target;
4421 else
4422 abort ();
4423 emit_move_insn (to_rtx, datum);
4424 }
b4ee5a72
PB
4425 if (ibit == nbits)
4426 break;
4427 word = 0;
4428 bit_pos = 0;
4429 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4430 }
4431 }
071a6595 4432 }
e1a43f73
PB
4433 else if (!cleared)
4434 {
0f41302f 4435 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
4436 if (TREE_CHAIN (elt) != NULL_TREE
4437 || (TREE_PURPOSE (elt) == NULL_TREE
4438 ? nbits != 1
4439 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4440 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4441 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4442 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4443 != nbits))))
4444 clear_storage (target, expr_size (exp),
4445 TYPE_ALIGN (type) / BITS_PER_UNIT);
4446 }
4447
4448 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4449 {
4450 /* start of range of element or NULL */
4451 tree startbit = TREE_PURPOSE (elt);
4452 /* end of range of element, or element value */
4453 tree endbit = TREE_VALUE (elt);
381127e8 4454#ifdef TARGET_MEM_FUNCTIONS
071a6595 4455 HOST_WIDE_INT startb, endb;
381127e8 4456#endif
071a6595
PB
4457 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4458
4459 bitlength_rtx = expand_expr (bitlength,
4460 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4461
4462 /* handle non-range tuple element like [ expr ] */
4463 if (startbit == NULL_TREE)
4464 {
4465 startbit = save_expr (endbit);
4466 endbit = startbit;
4467 }
4468 startbit = convert (sizetype, startbit);
4469 endbit = convert (sizetype, endbit);
4470 if (! integer_zerop (domain_min))
4471 {
4472 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4473 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4474 }
4475 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4476 EXPAND_CONST_ADDRESS);
4477 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4478 EXPAND_CONST_ADDRESS);
4479
4480 if (REG_P (target))
4481 {
4482 targetx = assign_stack_temp (GET_MODE (target),
4483 GET_MODE_SIZE (GET_MODE (target)),
4484 0);
4485 emit_move_insn (targetx, target);
4486 }
4487 else if (GET_CODE (target) == MEM)
4488 targetx = target;
4489 else
4490 abort ();
4491
4492#ifdef TARGET_MEM_FUNCTIONS
4493 /* Optimization: If startbit and endbit are
9faa82d8 4494 constants divisible by BITS_PER_UNIT,
0f41302f 4495 call memset instead. */
071a6595
PB
4496 if (TREE_CODE (startbit) == INTEGER_CST
4497 && TREE_CODE (endbit) == INTEGER_CST
4498 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4499 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4500 {
071a6595
PB
4501 emit_library_call (memset_libfunc, 0,
4502 VOIDmode, 3,
e1a43f73
PB
4503 plus_constant (XEXP (targetx, 0),
4504 startb / BITS_PER_UNIT),
071a6595 4505 Pmode,
3b6f75e2 4506 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4507 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4508 TYPE_MODE (sizetype));
071a6595
PB
4509 }
4510 else
4511#endif
4512 {
38a448ca 4513 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4514 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4515 bitlength_rtx, TYPE_MODE (sizetype),
4516 startbit_rtx, TYPE_MODE (sizetype),
4517 endbit_rtx, TYPE_MODE (sizetype));
4518 }
4519 if (REG_P (target))
4520 emit_move_insn (target, targetx);
4521 }
4522 }
bbf6f052
RK
4523
4524 else
4525 abort ();
4526}
4527
4528/* Store the value of EXP (an expression tree)
4529 into a subfield of TARGET which has mode MODE and occupies
4530 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4531 If MODE is VOIDmode, it means that we are storing into a bit-field.
4532
4533 If VALUE_MODE is VOIDmode, return nothing in particular.
4534 UNSIGNEDP is not used in this case.
4535
4536 Otherwise, return an rtx for the value stored. This rtx
4537 has mode VALUE_MODE if that is convenient to do.
4538 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4539
4540 ALIGN is the alignment that TARGET is known to have, measured in bytes.
ece32014
MM
4541 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4542
4543 ALIAS_SET is the alias set for the destination. This value will
4544 (in general) be different from that for TARGET, since TARGET is a
4545 reference to the containing structure. */
bbf6f052
RK
4546
4547static rtx
4548store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 4549 unsignedp, align, total_size, alias_set)
bbf6f052
RK
4550 rtx target;
4551 int bitsize, bitpos;
4552 enum machine_mode mode;
4553 tree exp;
4554 enum machine_mode value_mode;
4555 int unsignedp;
4556 int align;
4557 int total_size;
ece32014 4558 int alias_set;
bbf6f052 4559{
906c4e36 4560 HOST_WIDE_INT width_mask = 0;
bbf6f052 4561
e9a25f70
JL
4562 if (TREE_CODE (exp) == ERROR_MARK)
4563 return const0_rtx;
4564
906c4e36
RK
4565 if (bitsize < HOST_BITS_PER_WIDE_INT)
4566 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4567
4568 /* If we are storing into an unaligned field of an aligned union that is
4569 in a register, we may have the mode of TARGET being an integer mode but
4570 MODE == BLKmode. In that case, get an aligned object whose size and
4571 alignment are the same as TARGET and store TARGET into it (we can avoid
4572 the store if the field being stored is the entire width of TARGET). Then
4573 call ourselves recursively to store the field into a BLKmode version of
4574 that object. Finally, load from the object into TARGET. This is not
4575 very efficient in general, but should only be slightly more expensive
4576 than the otherwise-required unaligned accesses. Perhaps this can be
4577 cleaned up later. */
4578
4579 if (mode == BLKmode
4580 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4581 {
4582 rtx object = assign_stack_temp (GET_MODE (target),
4583 GET_MODE_SIZE (GET_MODE (target)), 0);
4584 rtx blk_object = copy_rtx (object);
4585
c6df88cb
MM
4586 MEM_SET_IN_STRUCT_P (object, 1);
4587 MEM_SET_IN_STRUCT_P (blk_object, 1);
bbf6f052
RK
4588 PUT_MODE (blk_object, BLKmode);
4589
4590 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4591 emit_move_insn (object, target);
4592
4593 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 4594 align, total_size, alias_set);
bbf6f052 4595
46093b97
RS
4596 /* Even though we aren't returning target, we need to
4597 give it the updated value. */
bbf6f052
RK
4598 emit_move_insn (target, object);
4599
46093b97 4600 return blk_object;
bbf6f052
RK
4601 }
4602
4603 /* If the structure is in a register or if the component
4604 is a bit field, we cannot use addressing to access it.
4605 Use bit-field techniques or SUBREG to store in it. */
4606
4fa52007
RK
4607 if (mode == VOIDmode
4608 || (mode != BLKmode && ! direct_store[(int) mode])
4609 || GET_CODE (target) == REG
c980ac49 4610 || GET_CODE (target) == SUBREG
ccc98036
RS
4611 /* If the field isn't aligned enough to store as an ordinary memref,
4612 store it as a bit field. */
c7a7ac46 4613 || (SLOW_UNALIGNED_ACCESS
ccc98036 4614 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4615 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4616 {
906c4e36 4617 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4618
ef19912d
RK
4619 /* If BITSIZE is narrower than the size of the type of EXP
4620 we will be narrowing TEMP. Normally, what's wanted are the
4621 low-order bits. However, if EXP's type is a record and this is
4622 big-endian machine, we want the upper BITSIZE bits. */
4623 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4624 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4625 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4626 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4627 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4628 - bitsize),
4629 temp, 1);
4630
bbd6cf73
RK
4631 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4632 MODE. */
4633 if (mode != VOIDmode && mode != BLKmode
4634 && mode != TYPE_MODE (TREE_TYPE (exp)))
4635 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4636
a281e72d
RK
4637 /* If the modes of TARGET and TEMP are both BLKmode, both
4638 must be in memory and BITPOS must be aligned on a byte
4639 boundary. If so, we simply do a block copy. */
4640 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4641 {
4642 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4643 || bitpos % BITS_PER_UNIT != 0)
4644 abort ();
4645
0086427c
RK
4646 target = change_address (target, VOIDmode,
4647 plus_constant (XEXP (target, 0),
a281e72d
RK
4648 bitpos / BITS_PER_UNIT));
4649
4650 emit_block_move (target, temp,
4651 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4652 / BITS_PER_UNIT),
4653 1);
4654
4655 return value_mode == VOIDmode ? const0_rtx : target;
4656 }
4657
bbf6f052
RK
4658 /* Store the value in the bitfield. */
4659 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4660 if (value_mode != VOIDmode)
4661 {
4662 /* The caller wants an rtx for the value. */
4663 /* If possible, avoid refetching from the bitfield itself. */
4664 if (width_mask != 0
4665 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4666 {
9074de27 4667 tree count;
5c4d7cfb 4668 enum machine_mode tmode;
86a2c12a 4669
5c4d7cfb
RS
4670 if (unsignedp)
4671 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4672 tmode = GET_MODE (temp);
86a2c12a
RS
4673 if (tmode == VOIDmode)
4674 tmode = value_mode;
5c4d7cfb
RS
4675 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4676 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4677 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4678 }
bbf6f052 4679 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4680 NULL_RTX, value_mode, 0, align,
4681 total_size);
bbf6f052
RK
4682 }
4683 return const0_rtx;
4684 }
4685 else
4686 {
4687 rtx addr = XEXP (target, 0);
4688 rtx to_rtx;
4689
4690 /* If a value is wanted, it must be the lhs;
4691 so make the address stable for multiple use. */
4692
4693 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4694 && ! CONSTANT_ADDRESS_P (addr)
4695 /* A frame-pointer reference is already stable. */
4696 && ! (GET_CODE (addr) == PLUS
4697 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4698 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4699 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4700 addr = copy_to_reg (addr);
4701
4702 /* Now build a reference to just the desired component. */
4703
effbcc6a
RK
4704 to_rtx = copy_rtx (change_address (target, mode,
4705 plus_constant (addr,
4706 (bitpos
4707 / BITS_PER_UNIT))));
c6df88cb 4708 MEM_SET_IN_STRUCT_P (to_rtx, 1);
ece32014 4709 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
4710
4711 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4712 }
4713}
4714\f
4715/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4716 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4717 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4718
4719 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4720 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4721 If the position of the field is variable, we store a tree
4722 giving the variable offset (in units) in *POFFSET.
4723 This offset is in addition to the bit position.
4724 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4725 We set *PALIGNMENT to the alignment in bytes of the address that will be
4726 computed. This is the alignment of the thing we return if *POFFSET
4727 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4728
4729 If any of the extraction expressions is volatile,
4730 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4731
4732 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4733 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4734 is redundant.
4735
4736 If the field describes a variable-sized object, *PMODE is set to
4737 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4738 this case, but the address of the object can be found. */
bbf6f052
RK
4739
4740tree
4969d05d 4741get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4742 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4743 tree exp;
4744 int *pbitsize;
4745 int *pbitpos;
7bb0943f 4746 tree *poffset;
bbf6f052
RK
4747 enum machine_mode *pmode;
4748 int *punsignedp;
4749 int *pvolatilep;
839c4796 4750 int *palignment;
bbf6f052 4751{
b50d17a1 4752 tree orig_exp = exp;
bbf6f052
RK
4753 tree size_tree = 0;
4754 enum machine_mode mode = VOIDmode;
742920c7 4755 tree offset = integer_zero_node;
c84e2712 4756 unsigned int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4757
4758 if (TREE_CODE (exp) == COMPONENT_REF)
4759 {
4760 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4761 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4762 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4763 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4764 }
4765 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4766 {
4767 size_tree = TREE_OPERAND (exp, 1);
4768 *punsignedp = TREE_UNSIGNED (exp);
4769 }
4770 else
4771 {
4772 mode = TYPE_MODE (TREE_TYPE (exp));
ab87f8c8
JL
4773 if (mode == BLKmode)
4774 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4775
bbf6f052
RK
4776 *pbitsize = GET_MODE_BITSIZE (mode);
4777 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4778 }
4779
4780 if (size_tree)
4781 {
4782 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4783 mode = BLKmode, *pbitsize = -1;
4784 else
4785 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4786 }
4787
4788 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4789 and find the ultimate containing object. */
4790
4791 *pbitpos = 0;
4792
4793 while (1)
4794 {
7bb0943f 4795 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4796 {
7bb0943f
RS
4797 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4798 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4799 : TREE_OPERAND (exp, 2));
e6d8c385 4800 tree constant = integer_zero_node, var = pos;
bbf6f052 4801
e7f3c83f
RK
4802 /* If this field hasn't been filled in yet, don't go
4803 past it. This should only happen when folding expressions
4804 made during type construction. */
4805 if (pos == 0)
4806 break;
4807
e6d8c385
RK
4808 /* Assume here that the offset is a multiple of a unit.
4809 If not, there should be an explicitly added constant. */
4810 if (TREE_CODE (pos) == PLUS_EXPR
4811 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4812 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4813 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4814 constant = pos, var = integer_zero_node;
4815
4816 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4817 offset = size_binop (PLUS_EXPR, offset,
4818 size_binop (EXACT_DIV_EXPR, var,
4819 size_int (BITS_PER_UNIT)));
bbf6f052 4820 }
bbf6f052 4821
742920c7 4822 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4823 {
742920c7
RK
4824 /* This code is based on the code in case ARRAY_REF in expand_expr
4825 below. We assume here that the size of an array element is
4826 always an integral multiple of BITS_PER_UNIT. */
4827
4828 tree index = TREE_OPERAND (exp, 1);
4829 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4830 tree low_bound
4831 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4832 tree index_type = TREE_TYPE (index);
ead17059 4833 tree xindex;
742920c7 4834
4c08eef0 4835 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4836 {
4c08eef0
RK
4837 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4838 index);
742920c7
RK
4839 index_type = TREE_TYPE (index);
4840 }
4841
74a4fbfc
DB
4842 /* Optimize the special-case of a zero lower bound.
4843
4844 We convert the low_bound to sizetype to avoid some problems
4845 with constant folding. (E.g. suppose the lower bound is 1,
4846 and its mode is QI. Without the conversion, (ARRAY
4847 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4848 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4849
4850 But sizetype isn't quite right either (especially if
4851 the lowbound is negative). FIXME */
4852
ca0f2220 4853 if (! integer_zerop (low_bound))
74a4fbfc
DB
4854 index = fold (build (MINUS_EXPR, index_type, index,
4855 convert (sizetype, low_bound)));
ca0f2220 4856
f8dac6eb
R
4857 if (TREE_CODE (index) == INTEGER_CST)
4858 {
4859 index = convert (sbitsizetype, index);
4860 index_type = TREE_TYPE (index);
4861 }
4862
ead17059
RH
4863 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4864 convert (sbitsizetype,
4865 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7 4866
ead17059
RH
4867 if (TREE_CODE (xindex) == INTEGER_CST
4868 && TREE_INT_CST_HIGH (xindex) == 0)
4869 *pbitpos += TREE_INT_CST_LOW (xindex);
742920c7 4870 else
956d6950 4871 {
ead17059
RH
4872 /* Either the bit offset calculated above is not constant, or
4873 it overflowed. In either case, redo the multiplication
4874 against the size in units. This is especially important
4875 in the non-constant case to avoid a division at runtime. */
4876 xindex = fold (build (MULT_EXPR, ssizetype, index,
4877 convert (ssizetype,
4878 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4879
4880 if (contains_placeholder_p (xindex))
4881 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4882
4883 offset = size_binop (PLUS_EXPR, offset, xindex);
956d6950 4884 }
bbf6f052
RK
4885 }
4886 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4887 && ! ((TREE_CODE (exp) == NOP_EXPR
4888 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4889 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4890 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4891 != UNION_TYPE))
bbf6f052
RK
4892 && (TYPE_MODE (TREE_TYPE (exp))
4893 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4894 break;
7bb0943f
RS
4895
4896 /* If any reference in the chain is volatile, the effect is volatile. */
4897 if (TREE_THIS_VOLATILE (exp))
4898 *pvolatilep = 1;
839c4796
RK
4899
4900 /* If the offset is non-constant already, then we can't assume any
4901 alignment more than the alignment here. */
4902 if (! integer_zerop (offset))
4903 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4904
bbf6f052
RK
4905 exp = TREE_OPERAND (exp, 0);
4906 }
4907
839c4796
RK
4908 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4909 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4910 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4911 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4912
742920c7
RK
4913 if (integer_zerop (offset))
4914 offset = 0;
4915
b50d17a1
RK
4916 if (offset != 0 && contains_placeholder_p (offset))
4917 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4918
bbf6f052 4919 *pmode = mode;
7bb0943f 4920 *poffset = offset;
839c4796 4921 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4922 return exp;
4923}
921b3427
RK
4924
4925/* Subroutine of expand_exp: compute memory_usage from modifier. */
4926static enum memory_use_mode
4927get_memory_usage_from_modifier (modifier)
4928 enum expand_modifier modifier;
4929{
4930 switch (modifier)
4931 {
4932 case EXPAND_NORMAL:
e5e809f4 4933 case EXPAND_SUM:
921b3427
RK
4934 return MEMORY_USE_RO;
4935 break;
4936 case EXPAND_MEMORY_USE_WO:
4937 return MEMORY_USE_WO;
4938 break;
4939 case EXPAND_MEMORY_USE_RW:
4940 return MEMORY_USE_RW;
4941 break;
921b3427 4942 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
4943 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4944 MEMORY_USE_DONT, because they are modifiers to a call of
4945 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 4946 case EXPAND_CONST_ADDRESS:
e5e809f4 4947 case EXPAND_INITIALIZER:
921b3427
RK
4948 return MEMORY_USE_DONT;
4949 case EXPAND_MEMORY_USE_BAD:
4950 default:
4951 abort ();
4952 }
4953}
bbf6f052
RK
4954\f
4955/* Given an rtx VALUE that may contain additions and multiplications,
4956 return an equivalent value that just refers to a register or memory.
4957 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4958 and returning a pseudo-register containing the value.
4959
4960 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4961
4962rtx
4963force_operand (value, target)
4964 rtx value, target;
4965{
4966 register optab binoptab = 0;
4967 /* Use a temporary to force order of execution of calls to
4968 `force_operand'. */
4969 rtx tmp;
4970 register rtx op2;
4971 /* Use subtarget as the target for operand 0 of a binary operation. */
4972 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4973
8b015896
RH
4974 /* Check for a PIC address load. */
4975 if (flag_pic
4976 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4977 && XEXP (value, 0) == pic_offset_table_rtx
4978 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4979 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4980 || GET_CODE (XEXP (value, 1)) == CONST))
4981 {
4982 if (!subtarget)
4983 subtarget = gen_reg_rtx (GET_MODE (value));
4984 emit_move_insn (subtarget, value);
4985 return subtarget;
4986 }
4987
bbf6f052
RK
4988 if (GET_CODE (value) == PLUS)
4989 binoptab = add_optab;
4990 else if (GET_CODE (value) == MINUS)
4991 binoptab = sub_optab;
4992 else if (GET_CODE (value) == MULT)
4993 {
4994 op2 = XEXP (value, 1);
4995 if (!CONSTANT_P (op2)
4996 && !(GET_CODE (op2) == REG && op2 != subtarget))
4997 subtarget = 0;
4998 tmp = force_operand (XEXP (value, 0), subtarget);
4999 return expand_mult (GET_MODE (value), tmp,
906c4e36 5000 force_operand (op2, NULL_RTX),
bbf6f052
RK
5001 target, 0);
5002 }
5003
5004 if (binoptab)
5005 {
5006 op2 = XEXP (value, 1);
5007 if (!CONSTANT_P (op2)
5008 && !(GET_CODE (op2) == REG && op2 != subtarget))
5009 subtarget = 0;
5010 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5011 {
5012 binoptab = add_optab;
5013 op2 = negate_rtx (GET_MODE (value), op2);
5014 }
5015
5016 /* Check for an addition with OP2 a constant integer and our first
5017 operand a PLUS of a virtual register and something else. In that
5018 case, we want to emit the sum of the virtual register and the
5019 constant first and then add the other value. This allows virtual
5020 register instantiation to simply modify the constant rather than
5021 creating another one around this addition. */
5022 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5023 && GET_CODE (XEXP (value, 0)) == PLUS
5024 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5025 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5026 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5027 {
5028 rtx temp = expand_binop (GET_MODE (value), binoptab,
5029 XEXP (XEXP (value, 0), 0), op2,
5030 subtarget, 0, OPTAB_LIB_WIDEN);
5031 return expand_binop (GET_MODE (value), binoptab, temp,
5032 force_operand (XEXP (XEXP (value, 0), 1), 0),
5033 target, 0, OPTAB_LIB_WIDEN);
5034 }
5035
5036 tmp = force_operand (XEXP (value, 0), subtarget);
5037 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5038 force_operand (op2, NULL_RTX),
bbf6f052 5039 target, 0, OPTAB_LIB_WIDEN);
8008b228 5040 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5041 because the only operations we are expanding here are signed ones. */
5042 }
5043 return value;
5044}
5045\f
5046/* Subroutine of expand_expr:
5047 save the non-copied parts (LIST) of an expr (LHS), and return a list
5048 which can restore these values to their previous values,
5049 should something modify their storage. */
5050
5051static tree
5052save_noncopied_parts (lhs, list)
5053 tree lhs;
5054 tree list;
5055{
5056 tree tail;
5057 tree parts = 0;
5058
5059 for (tail = list; tail; tail = TREE_CHAIN (tail))
5060 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5061 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5062 else
5063 {
5064 tree part = TREE_VALUE (tail);
5065 tree part_type = TREE_TYPE (part);
906c4e36 5066 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 5067 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 5068 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5069 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5070 parts = tree_cons (to_be_saved,
906c4e36
RK
5071 build (RTL_EXPR, part_type, NULL_TREE,
5072 (tree) target),
bbf6f052
RK
5073 parts);
5074 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5075 }
5076 return parts;
5077}
5078
5079/* Subroutine of expand_expr:
5080 record the non-copied parts (LIST) of an expr (LHS), and return a list
5081 which specifies the initial values of these parts. */
5082
5083static tree
5084init_noncopied_parts (lhs, list)
5085 tree lhs;
5086 tree list;
5087{
5088 tree tail;
5089 tree parts = 0;
5090
5091 for (tail = list; tail; tail = TREE_CHAIN (tail))
5092 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5093 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5094 else
5095 {
5096 tree part = TREE_VALUE (tail);
5097 tree part_type = TREE_TYPE (part);
906c4e36 5098 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5099 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5100 }
5101 return parts;
5102}
5103
5104/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5105 EXP can reference X, which is being modified. TOP_P is nonzero if this
5106 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5107 for EXP, as opposed to a recursive call to this function.
5108
5109 It is always safe for this routine to return zero since it merely
5110 searches for optimization opportunities. */
bbf6f052
RK
5111
5112static int
e5e809f4 5113safe_from_p (x, exp, top_p)
bbf6f052
RK
5114 rtx x;
5115 tree exp;
e5e809f4 5116 int top_p;
bbf6f052
RK
5117{
5118 rtx exp_rtl = 0;
5119 int i, nops;
ff439b5f
CB
5120 static int save_expr_count;
5121 static int save_expr_size = 0;
5122 static tree *save_expr_rewritten;
5123 static tree save_expr_trees[256];
bbf6f052 5124
6676e72f
RK
5125 if (x == 0
5126 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5127 have no way of allocating temporaries of variable size
5128 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5129 So we assume here that something at a higher level has prevented a
f4510f37 5130 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
5131 do this when X is BLKmode and when we are at the top level. */
5132 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 5133 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5134 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5135 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5136 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5137 != INTEGER_CST)
f4510f37 5138 && GET_MODE (x) == BLKmode))
bbf6f052
RK
5139 return 1;
5140
ff439b5f
CB
5141 if (top_p && save_expr_size == 0)
5142 {
5143 int rtn;
5144
5145 save_expr_count = 0;
5146 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5147 save_expr_rewritten = &save_expr_trees[0];
5148
5149 rtn = safe_from_p (x, exp, 1);
5150
5151 for (i = 0; i < save_expr_count; ++i)
5152 {
5153 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5154 abort ();
5155 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5156 }
5157
5158 save_expr_size = 0;
5159
5160 return rtn;
5161 }
5162
bbf6f052
RK
5163 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5164 find the underlying pseudo. */
5165 if (GET_CODE (x) == SUBREG)
5166 {
5167 x = SUBREG_REG (x);
5168 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5169 return 0;
5170 }
5171
5172 /* If X is a location in the outgoing argument area, it is always safe. */
5173 if (GET_CODE (x) == MEM
5174 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5175 || (GET_CODE (XEXP (x, 0)) == PLUS
5176 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5177 return 1;
5178
5179 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5180 {
5181 case 'd':
5182 exp_rtl = DECL_RTL (exp);
5183 break;
5184
5185 case 'c':
5186 return 1;
5187
5188 case 'x':
5189 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5190 return ((TREE_VALUE (exp) == 0
e5e809f4 5191 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5192 && (TREE_CHAIN (exp) == 0
e5e809f4 5193 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5194 else if (TREE_CODE (exp) == ERROR_MARK)
5195 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5196 else
5197 return 0;
5198
5199 case '1':
e5e809f4 5200 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5201
5202 case '2':
5203 case '<':
e5e809f4
JL
5204 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5205 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5206
5207 case 'e':
5208 case 'r':
5209 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5210 the expression. If it is set, we conflict iff we are that rtx or
5211 both are in memory. Otherwise, we check all operands of the
5212 expression recursively. */
5213
5214 switch (TREE_CODE (exp))
5215 {
5216 case ADDR_EXPR:
e44842fe 5217 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5218 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5219 || TREE_STATIC (exp));
bbf6f052
RK
5220
5221 case INDIRECT_REF:
5222 if (GET_CODE (x) == MEM)
5223 return 0;
5224 break;
5225
5226 case CALL_EXPR:
5227 exp_rtl = CALL_EXPR_RTL (exp);
5228 if (exp_rtl == 0)
5229 {
5230 /* Assume that the call will clobber all hard registers and
5231 all of memory. */
5232 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5233 || GET_CODE (x) == MEM)
5234 return 0;
5235 }
5236
5237 break;
5238
5239 case RTL_EXPR:
3bb5826a
RK
5240 /* If a sequence exists, we would have to scan every instruction
5241 in the sequence to see if it was safe. This is probably not
5242 worthwhile. */
5243 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5244 return 0;
5245
3bb5826a 5246 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5247 break;
5248
5249 case WITH_CLEANUP_EXPR:
5250 exp_rtl = RTL_EXPR_RTL (exp);
5251 break;
5252
5dab5552 5253 case CLEANUP_POINT_EXPR:
e5e809f4 5254 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5255
bbf6f052
RK
5256 case SAVE_EXPR:
5257 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5258 if (exp_rtl)
5259 break;
5260
5261 /* This SAVE_EXPR might appear many times in the top-level
5262 safe_from_p() expression, and if it has a complex
5263 subexpression, examining it multiple times could result
5264 in a combinatorial explosion. E.g. on an Alpha
5265 running at least 200MHz, a Fortran test case compiled with
5266 optimization took about 28 minutes to compile -- even though
5267 it was only a few lines long, and the complicated line causing
5268 so much time to be spent in the earlier version of safe_from_p()
5269 had only 293 or so unique nodes.
5270
5271 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5272 where it is so we can turn it back in the top-level safe_from_p()
5273 when we're done. */
5274
5275 /* For now, don't bother re-sizing the array. */
5276 if (save_expr_count >= save_expr_size)
5277 return 0;
5278 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5279
5280 nops = tree_code_length[(int) SAVE_EXPR];
5281 for (i = 0; i < nops; i++)
ff59bfe6
JM
5282 {
5283 tree operand = TREE_OPERAND (exp, i);
5284 if (operand == NULL_TREE)
5285 continue;
5286 TREE_SET_CODE (exp, ERROR_MARK);
5287 if (!safe_from_p (x, operand, 0))
5288 return 0;
5289 TREE_SET_CODE (exp, SAVE_EXPR);
5290 }
5291 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5292 return 1;
bbf6f052 5293
8129842c
RS
5294 case BIND_EXPR:
5295 /* The only operand we look at is operand 1. The rest aren't
5296 part of the expression. */
e5e809f4 5297 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5298
bbf6f052 5299 case METHOD_CALL_EXPR:
0f41302f 5300 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5301 abort ();
e9a25f70
JL
5302
5303 default:
5304 break;
bbf6f052
RK
5305 }
5306
5307 /* If we have an rtx, we do not need to scan our operands. */
5308 if (exp_rtl)
5309 break;
5310
5311 nops = tree_code_length[(int) TREE_CODE (exp)];
5312 for (i = 0; i < nops; i++)
5313 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5314 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5315 return 0;
5316 }
5317
5318 /* If we have an rtl, find any enclosed object. Then see if we conflict
5319 with it. */
5320 if (exp_rtl)
5321 {
5322 if (GET_CODE (exp_rtl) == SUBREG)
5323 {
5324 exp_rtl = SUBREG_REG (exp_rtl);
5325 if (GET_CODE (exp_rtl) == REG
5326 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5327 return 0;
5328 }
5329
5330 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5331 are memory and EXP is not readonly. */
5332 return ! (rtx_equal_p (x, exp_rtl)
5333 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5334 && ! TREE_READONLY (exp)));
5335 }
5336
5337 /* If we reach here, it is safe. */
5338 return 1;
5339}
5340
5341/* Subroutine of expand_expr: return nonzero iff EXP is an
5342 expression whose type is statically determinable. */
5343
5344static int
5345fixed_type_p (exp)
5346 tree exp;
5347{
5348 if (TREE_CODE (exp) == PARM_DECL
5349 || TREE_CODE (exp) == VAR_DECL
5350 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5351 || TREE_CODE (exp) == COMPONENT_REF
5352 || TREE_CODE (exp) == ARRAY_REF)
5353 return 1;
5354 return 0;
5355}
01c8a7c8
RK
5356
5357/* Subroutine of expand_expr: return rtx if EXP is a
5358 variable or parameter; else return 0. */
5359
5360static rtx
5361var_rtx (exp)
5362 tree exp;
5363{
5364 STRIP_NOPS (exp);
5365 switch (TREE_CODE (exp))
5366 {
5367 case PARM_DECL:
5368 case VAR_DECL:
5369 return DECL_RTL (exp);
5370 default:
5371 return 0;
5372 }
5373}
dbecbbe4
JL
5374
5375#ifdef MAX_INTEGER_COMPUTATION_MODE
5376void
5377check_max_integer_computation_mode (exp)
5378 tree exp;
5379{
5380 enum tree_code code = TREE_CODE (exp);
5381 enum machine_mode mode;
5382
71bca506
JL
5383 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5384 if (code == NOP_EXPR
5385 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5386 return;
5387
dbecbbe4
JL
5388 /* First check the type of the overall operation. We need only look at
5389 unary, binary and relational operations. */
5390 if (TREE_CODE_CLASS (code) == '1'
5391 || TREE_CODE_CLASS (code) == '2'
5392 || TREE_CODE_CLASS (code) == '<')
5393 {
5394 mode = TYPE_MODE (TREE_TYPE (exp));
5395 if (GET_MODE_CLASS (mode) == MODE_INT
5396 && mode > MAX_INTEGER_COMPUTATION_MODE)
5397 fatal ("unsupported wide integer operation");
5398 }
5399
5400 /* Check operand of a unary op. */
5401 if (TREE_CODE_CLASS (code) == '1')
5402 {
5403 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5404 if (GET_MODE_CLASS (mode) == MODE_INT
5405 && mode > MAX_INTEGER_COMPUTATION_MODE)
5406 fatal ("unsupported wide integer operation");
5407 }
5408
5409 /* Check operands of a binary/comparison op. */
5410 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5411 {
5412 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5413 if (GET_MODE_CLASS (mode) == MODE_INT
5414 && mode > MAX_INTEGER_COMPUTATION_MODE)
5415 fatal ("unsupported wide integer operation");
5416
5417 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5418 if (GET_MODE_CLASS (mode) == MODE_INT
5419 && mode > MAX_INTEGER_COMPUTATION_MODE)
5420 fatal ("unsupported wide integer operation");
5421 }
5422}
5423#endif
5424
bbf6f052
RK
5425\f
5426/* expand_expr: generate code for computing expression EXP.
5427 An rtx for the computed value is returned. The value is never null.
5428 In the case of a void EXP, const0_rtx is returned.
5429
5430 The value may be stored in TARGET if TARGET is nonzero.
5431 TARGET is just a suggestion; callers must assume that
5432 the rtx returned may not be the same as TARGET.
5433
5434 If TARGET is CONST0_RTX, it means that the value will be ignored.
5435
5436 If TMODE is not VOIDmode, it suggests generating the
5437 result in mode TMODE. But this is done only when convenient.
5438 Otherwise, TMODE is ignored and the value generated in its natural mode.
5439 TMODE is just a suggestion; callers must assume that
5440 the rtx returned may not have mode TMODE.
5441
d6a5ac33
RK
5442 Note that TARGET may have neither TMODE nor MODE. In that case, it
5443 probably will not be used.
bbf6f052
RK
5444
5445 If MODIFIER is EXPAND_SUM then when EXP is an addition
5446 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5447 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5448 products as above, or REG or MEM, or constant.
5449 Ordinarily in such cases we would output mul or add instructions
5450 and then return a pseudo reg containing the sum.
5451
5452 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5453 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5454 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5455 This is used for outputting expressions used in initializers.
5456
5457 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5458 with a constant address even if that address is not normally legitimate.
5459 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5460
5461rtx
5462expand_expr (exp, target, tmode, modifier)
5463 register tree exp;
5464 rtx target;
5465 enum machine_mode tmode;
5466 enum expand_modifier modifier;
5467{
b50d17a1
RK
5468 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5469 This is static so it will be accessible to our recursive callees. */
5470 static tree placeholder_list = 0;
bbf6f052
RK
5471 register rtx op0, op1, temp;
5472 tree type = TREE_TYPE (exp);
5473 int unsignedp = TREE_UNSIGNED (type);
5474 register enum machine_mode mode = TYPE_MODE (type);
5475 register enum tree_code code = TREE_CODE (exp);
5476 optab this_optab;
5477 /* Use subtarget as the target for operand 0 of a binary operation. */
5478 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5479 rtx original_target = target;
dd27116b
RK
5480 int ignore = (target == const0_rtx
5481 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
5482 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5483 || code == COND_EXPR)
dd27116b 5484 && TREE_CODE (type) == VOID_TYPE));
bbf6f052 5485 tree context;
921b3427
RK
5486 /* Used by check-memory-usage to make modifier read only. */
5487 enum expand_modifier ro_modifier;
bbf6f052 5488
921b3427
RK
5489 /* Make a read-only version of the modifier. */
5490 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5491 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5492 ro_modifier = modifier;
5493 else
5494 ro_modifier = EXPAND_NORMAL;
ca695ac9 5495
bbf6f052
RK
5496 /* Don't use hard regs as subtargets, because the combiner
5497 can only handle pseudo regs. */
5498 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5499 subtarget = 0;
5500 /* Avoid subtargets inside loops,
5501 since they hide some invariant expressions. */
5502 if (preserve_subexpressions_p ())
5503 subtarget = 0;
5504
dd27116b
RK
5505 /* If we are going to ignore this result, we need only do something
5506 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5507 is, short-circuit the most common cases here. Note that we must
5508 not call expand_expr with anything but const0_rtx in case this
5509 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5510
dd27116b
RK
5511 if (ignore)
5512 {
5513 if (! TREE_SIDE_EFFECTS (exp))
5514 return const0_rtx;
5515
5516 /* Ensure we reference a volatile object even if value is ignored. */
5517 if (TREE_THIS_VOLATILE (exp)
5518 && TREE_CODE (exp) != FUNCTION_DECL
5519 && mode != VOIDmode && mode != BLKmode)
5520 {
921b3427 5521 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5522 if (GET_CODE (temp) == MEM)
5523 temp = copy_to_reg (temp);
5524 return const0_rtx;
5525 }
5526
5527 if (TREE_CODE_CLASS (code) == '1')
5528 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5529 VOIDmode, ro_modifier);
dd27116b
RK
5530 else if (TREE_CODE_CLASS (code) == '2'
5531 || TREE_CODE_CLASS (code) == '<')
5532 {
921b3427
RK
5533 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5534 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5535 return const0_rtx;
5536 }
5537 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5538 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5539 /* If the second operand has no side effects, just evaluate
0f41302f 5540 the first. */
dd27116b 5541 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5542 VOIDmode, ro_modifier);
dd27116b 5543
90764a87 5544 target = 0;
dd27116b 5545 }
bbf6f052 5546
dbecbbe4 5547#ifdef MAX_INTEGER_COMPUTATION_MODE
ce3c0b53
JL
5548 if (target
5549 && TREE_CODE (exp) != INTEGER_CST
5550 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5551 && TREE_CODE (exp) != ARRAY_REF
5552 && TREE_CODE (exp) != COMPONENT_REF
5553 && TREE_CODE (exp) != BIT_FIELD_REF
5554 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5555 && TREE_CODE (exp) != VAR_DECL)
dbecbbe4
JL
5556 {
5557 enum machine_mode mode = GET_MODE (target);
5558
5559 if (GET_MODE_CLASS (mode) == MODE_INT
5560 && mode > MAX_INTEGER_COMPUTATION_MODE)
5561 fatal ("unsupported wide integer operation");
5562 }
5563
71bca506 5564 if (TREE_CODE (exp) != INTEGER_CST
ce3c0b53 5565 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5566 && TREE_CODE (exp) != ARRAY_REF
5567 && TREE_CODE (exp) != COMPONENT_REF
5568 && TREE_CODE (exp) != BIT_FIELD_REF
5569 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5570 && TREE_CODE (exp) != VAR_DECL
71bca506 5571 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5572 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5573 fatal ("unsupported wide integer operation");
5574
5575 check_max_integer_computation_mode (exp);
5576#endif
5577
e44842fe
RK
5578 /* If will do cse, generate all results into pseudo registers
5579 since 1) that allows cse to find more things
5580 and 2) otherwise cse could produce an insn the machine
5581 cannot support. */
5582
bbf6f052
RK
5583 if (! cse_not_expected && mode != BLKmode && target
5584 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5585 target = subtarget;
5586
bbf6f052
RK
5587 switch (code)
5588 {
5589 case LABEL_DECL:
b552441b
RS
5590 {
5591 tree function = decl_function_context (exp);
5592 /* Handle using a label in a containing function. */
d0977240
RK
5593 if (function != current_function_decl
5594 && function != inline_function_decl && function != 0)
b552441b
RS
5595 {
5596 struct function *p = find_function_data (function);
5597 /* Allocate in the memory associated with the function
5598 that the label is in. */
5599 push_obstacks (p->function_obstack,
5600 p->function_maybepermanent_obstack);
5601
38a448ca
RH
5602 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5603 label_rtx (exp),
5604 p->forced_labels);
b552441b
RS
5605 pop_obstacks ();
5606 }
ab87f8c8
JL
5607 else
5608 {
ab87f8c8
JL
5609 if (modifier == EXPAND_INITIALIZER)
5610 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5611 label_rtx (exp),
5612 forced_labels);
5613 }
38a448ca
RH
5614 temp = gen_rtx_MEM (FUNCTION_MODE,
5615 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5616 if (function != current_function_decl
5617 && function != inline_function_decl && function != 0)
26fcb35a
RS
5618 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5619 return temp;
b552441b 5620 }
bbf6f052
RK
5621
5622 case PARM_DECL:
5623 if (DECL_RTL (exp) == 0)
5624 {
5625 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5626 return CONST0_RTX (mode);
bbf6f052
RK
5627 }
5628
0f41302f 5629 /* ... fall through ... */
d6a5ac33 5630
bbf6f052 5631 case VAR_DECL:
2dca20cd
RS
5632 /* If a static var's type was incomplete when the decl was written,
5633 but the type is complete now, lay out the decl now. */
5634 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5635 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5636 {
5637 push_obstacks_nochange ();
5638 end_temporary_allocation ();
5639 layout_decl (exp, 0);
5640 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5641 pop_obstacks ();
5642 }
d6a5ac33 5643
7d384cc0
KR
5644 /* Although static-storage variables start off initialized, according to
5645 ANSI C, a memcpy could overwrite them with uninitialized values. So
5646 we check them too. This also lets us check for read-only variables
5647 accessed via a non-const declaration, in case it won't be detected
5648 any other way (e.g., in an embedded system or OS kernel without
5649 memory protection).
5650
5651 Aggregates are not checked here; they're handled elsewhere. */
5652 if (current_function_check_memory_usage && code == VAR_DECL
921b3427 5653 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
5654 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5655 {
5656 enum memory_use_mode memory_usage;
5657 memory_usage = get_memory_usage_from_modifier (modifier);
5658
5659 if (memory_usage != MEMORY_USE_DONT)
5660 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5661 XEXP (DECL_RTL (exp), 0), ptr_mode,
5662 GEN_INT (int_size_in_bytes (type)),
5663 TYPE_MODE (sizetype),
956d6950
JL
5664 GEN_INT (memory_usage),
5665 TYPE_MODE (integer_type_node));
921b3427
RK
5666 }
5667
0f41302f 5668 /* ... fall through ... */
d6a5ac33 5669
2dca20cd 5670 case FUNCTION_DECL:
bbf6f052
RK
5671 case RESULT_DECL:
5672 if (DECL_RTL (exp) == 0)
5673 abort ();
d6a5ac33 5674
e44842fe
RK
5675 /* Ensure variable marked as used even if it doesn't go through
5676 a parser. If it hasn't be used yet, write out an external
5677 definition. */
5678 if (! TREE_USED (exp))
5679 {
5680 assemble_external (exp);
5681 TREE_USED (exp) = 1;
5682 }
5683
dc6d66b3
RK
5684 /* Show we haven't gotten RTL for this yet. */
5685 temp = 0;
5686
bbf6f052
RK
5687 /* Handle variables inherited from containing functions. */
5688 context = decl_function_context (exp);
5689
5690 /* We treat inline_function_decl as an alias for the current function
5691 because that is the inline function whose vars, types, etc.
5692 are being merged into the current function.
5693 See expand_inline_function. */
d6a5ac33 5694
bbf6f052
RK
5695 if (context != 0 && context != current_function_decl
5696 && context != inline_function_decl
5697 /* If var is static, we don't need a static chain to access it. */
5698 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5699 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5700 {
5701 rtx addr;
5702
5703 /* Mark as non-local and addressable. */
81feeecb 5704 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5705 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5706 abort ();
bbf6f052
RK
5707 mark_addressable (exp);
5708 if (GET_CODE (DECL_RTL (exp)) != MEM)
5709 abort ();
5710 addr = XEXP (DECL_RTL (exp), 0);
5711 if (GET_CODE (addr) == MEM)
38a448ca
RH
5712 addr = gen_rtx_MEM (Pmode,
5713 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5714 else
5715 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5716 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5717 }
4af3895e 5718
bbf6f052
RK
5719 /* This is the case of an array whose size is to be determined
5720 from its initializer, while the initializer is still being parsed.
5721 See expand_decl. */
d6a5ac33 5722
dc6d66b3
RK
5723 else if (GET_CODE (DECL_RTL (exp)) == MEM
5724 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5725 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5726 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5727
5728 /* If DECL_RTL is memory, we are in the normal case and either
5729 the address is not valid or it is not a register and -fforce-addr
5730 is specified, get the address into a register. */
5731
dc6d66b3
RK
5732 else if (GET_CODE (DECL_RTL (exp)) == MEM
5733 && modifier != EXPAND_CONST_ADDRESS
5734 && modifier != EXPAND_SUM
5735 && modifier != EXPAND_INITIALIZER
5736 && (! memory_address_p (DECL_MODE (exp),
5737 XEXP (DECL_RTL (exp), 0))
5738 || (flag_force_addr
5739 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5740 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5741 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5742
dc6d66b3
RK
5743 /* If we got something, return it. But first, set the alignment
5744 the address is a register. */
5745 if (temp != 0)
5746 {
5747 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5748 mark_reg_pointer (XEXP (temp, 0),
5749 DECL_ALIGN (exp) / BITS_PER_UNIT);
5750
5751 return temp;
5752 }
5753
1499e0a8
RK
5754 /* If the mode of DECL_RTL does not match that of the decl, it
5755 must be a promoted value. We return a SUBREG of the wanted mode,
5756 but mark it so that we know that it was already extended. */
5757
5758 if (GET_CODE (DECL_RTL (exp)) == REG
5759 && GET_MODE (DECL_RTL (exp)) != mode)
5760 {
1499e0a8
RK
5761 /* Get the signedness used for this variable. Ensure we get the
5762 same mode we got when the variable was declared. */
78911e8b
RK
5763 if (GET_MODE (DECL_RTL (exp))
5764 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5765 abort ();
5766
38a448ca 5767 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5768 SUBREG_PROMOTED_VAR_P (temp) = 1;
5769 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5770 return temp;
5771 }
5772
bbf6f052
RK
5773 return DECL_RTL (exp);
5774
5775 case INTEGER_CST:
5776 return immed_double_const (TREE_INT_CST_LOW (exp),
5777 TREE_INT_CST_HIGH (exp),
5778 mode);
5779
5780 case CONST_DECL:
921b3427
RK
5781 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5782 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5783
5784 case REAL_CST:
5785 /* If optimized, generate immediate CONST_DOUBLE
5786 which will be turned into memory by reload if necessary.
5787
5788 We used to force a register so that loop.c could see it. But
5789 this does not allow gen_* patterns to perform optimizations with
5790 the constants. It also produces two insns in cases like "x = 1.0;".
5791 On most machines, floating-point constants are not permitted in
5792 many insns, so we'd end up copying it to a register in any case.
5793
5794 Now, we do the copying in expand_binop, if appropriate. */
5795 return immed_real_const (exp);
5796
5797 case COMPLEX_CST:
5798 case STRING_CST:
5799 if (! TREE_CST_RTL (exp))
5800 output_constant_def (exp);
5801
5802 /* TREE_CST_RTL probably contains a constant address.
5803 On RISC machines where a constant address isn't valid,
5804 make some insns to get that address into a register. */
5805 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5806 && modifier != EXPAND_CONST_ADDRESS
5807 && modifier != EXPAND_INITIALIZER
5808 && modifier != EXPAND_SUM
d6a5ac33
RK
5809 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5810 || (flag_force_addr
5811 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5812 return change_address (TREE_CST_RTL (exp), VOIDmode,
5813 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5814 return TREE_CST_RTL (exp);
5815
bf1e5319 5816 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
5817 {
5818 rtx to_return;
5819 char *saved_input_filename = input_filename;
5820 int saved_lineno = lineno;
5821 input_filename = EXPR_WFL_FILENAME (exp);
5822 lineno = EXPR_WFL_LINENO (exp);
5823 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5824 emit_line_note (input_filename, lineno);
5825 /* Possibly avoid switching back and force here */
5826 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5827 input_filename = saved_input_filename;
5828 lineno = saved_lineno;
5829 return to_return;
5830 }
bf1e5319 5831
bbf6f052
RK
5832 case SAVE_EXPR:
5833 context = decl_function_context (exp);
d6a5ac33 5834
d0977240
RK
5835 /* If this SAVE_EXPR was at global context, assume we are an
5836 initialization function and move it into our context. */
5837 if (context == 0)
5838 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5839
bbf6f052
RK
5840 /* We treat inline_function_decl as an alias for the current function
5841 because that is the inline function whose vars, types, etc.
5842 are being merged into the current function.
5843 See expand_inline_function. */
5844 if (context == current_function_decl || context == inline_function_decl)
5845 context = 0;
5846
5847 /* If this is non-local, handle it. */
5848 if (context)
5849 {
d0977240
RK
5850 /* The following call just exists to abort if the context is
5851 not of a containing function. */
5852 find_function_data (context);
5853
bbf6f052
RK
5854 temp = SAVE_EXPR_RTL (exp);
5855 if (temp && GET_CODE (temp) == REG)
5856 {
5857 put_var_into_stack (exp);
5858 temp = SAVE_EXPR_RTL (exp);
5859 }
5860 if (temp == 0 || GET_CODE (temp) != MEM)
5861 abort ();
5862 return change_address (temp, mode,
5863 fix_lexical_addr (XEXP (temp, 0), exp));
5864 }
5865 if (SAVE_EXPR_RTL (exp) == 0)
5866 {
06089a8b
RK
5867 if (mode == VOIDmode)
5868 temp = const0_rtx;
5869 else
e5e809f4 5870 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5871
bbf6f052 5872 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5873 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5874 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5875 save_expr_regs);
ff78f773
RK
5876
5877 /* If the mode of TEMP does not match that of the expression, it
5878 must be a promoted value. We pass store_expr a SUBREG of the
5879 wanted mode but mark it so that we know that it was already
5880 extended. Note that `unsignedp' was modified above in
5881 this case. */
5882
5883 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5884 {
38a448ca 5885 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5886 SUBREG_PROMOTED_VAR_P (temp) = 1;
5887 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5888 }
5889
4c7a0be9 5890 if (temp == const0_rtx)
921b3427
RK
5891 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5892 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5893 else
5894 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5895
5896 TREE_USED (exp) = 1;
bbf6f052 5897 }
1499e0a8
RK
5898
5899 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5900 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5901 but mark it so that we know that it was already extended. */
1499e0a8
RK
5902
5903 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5904 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5905 {
e70d22c8
RK
5906 /* Compute the signedness and make the proper SUBREG. */
5907 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5908 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5909 SUBREG_PROMOTED_VAR_P (temp) = 1;
5910 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5911 return temp;
5912 }
5913
bbf6f052
RK
5914 return SAVE_EXPR_RTL (exp);
5915
679163cf
MS
5916 case UNSAVE_EXPR:
5917 {
5918 rtx temp;
5919 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5920 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5921 return temp;
5922 }
5923
b50d17a1 5924 case PLACEHOLDER_EXPR:
e9a25f70
JL
5925 {
5926 tree placeholder_expr;
5927
5928 /* If there is an object on the head of the placeholder list,
e5e809f4 5929 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
5930 further information, see tree.def. */
5931 for (placeholder_expr = placeholder_list;
5932 placeholder_expr != 0;
5933 placeholder_expr = TREE_CHAIN (placeholder_expr))
5934 {
5935 tree need_type = TYPE_MAIN_VARIANT (type);
5936 tree object = 0;
5937 tree old_list = placeholder_list;
5938 tree elt;
5939
e5e809f4
JL
5940 /* Find the outermost reference that is of the type we want.
5941 If none, see if any object has a type that is a pointer to
5942 the type we want. */
5943 for (elt = TREE_PURPOSE (placeholder_expr);
5944 elt != 0 && object == 0;
5945 elt
5946 = ((TREE_CODE (elt) == COMPOUND_EXPR
5947 || TREE_CODE (elt) == COND_EXPR)
5948 ? TREE_OPERAND (elt, 1)
5949 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5950 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5951 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5952 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5953 ? TREE_OPERAND (elt, 0) : 0))
5954 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5955 object = elt;
e9a25f70 5956
e9a25f70 5957 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
5958 elt != 0 && object == 0;
5959 elt
5960 = ((TREE_CODE (elt) == COMPOUND_EXPR
5961 || TREE_CODE (elt) == COND_EXPR)
5962 ? TREE_OPERAND (elt, 1)
5963 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5964 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5965 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5966 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5967 ? TREE_OPERAND (elt, 0) : 0))
5968 if (POINTER_TYPE_P (TREE_TYPE (elt))
5969 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 5970 == need_type))
e5e809f4 5971 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 5972
e9a25f70 5973 if (object != 0)
2cde2255 5974 {
e9a25f70
JL
5975 /* Expand this object skipping the list entries before
5976 it was found in case it is also a PLACEHOLDER_EXPR.
5977 In that case, we want to translate it using subsequent
5978 entries. */
5979 placeholder_list = TREE_CHAIN (placeholder_expr);
5980 temp = expand_expr (object, original_target, tmode,
5981 ro_modifier);
5982 placeholder_list = old_list;
5983 return temp;
2cde2255 5984 }
e9a25f70
JL
5985 }
5986 }
b50d17a1
RK
5987
5988 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5989 abort ();
5990
5991 case WITH_RECORD_EXPR:
5992 /* Put the object on the placeholder list, expand our first operand,
5993 and pop the list. */
5994 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5995 placeholder_list);
5996 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 5997 tmode, ro_modifier);
b50d17a1
RK
5998 placeholder_list = TREE_CHAIN (placeholder_list);
5999 return target;
6000
70e6ca43
APB
6001 case GOTO_EXPR:
6002 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6003 expand_goto (TREE_OPERAND (exp, 0));
6004 else
6005 expand_computed_goto (TREE_OPERAND (exp, 0));
6006 return const0_rtx;
6007
bbf6f052 6008 case EXIT_EXPR:
e44842fe
RK
6009 expand_exit_loop_if_false (NULL_PTR,
6010 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6011 return const0_rtx;
6012
f42e28dd
APB
6013 case LABELED_BLOCK_EXPR:
6014 if (LABELED_BLOCK_BODY (exp))
6015 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6016 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6017 return const0_rtx;
6018
6019 case EXIT_BLOCK_EXPR:
6020 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6021 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6022 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6023 return const0_rtx;
6024
bbf6f052 6025 case LOOP_EXPR:
0088fcb1 6026 push_temp_slots ();
bbf6f052
RK
6027 expand_start_loop (1);
6028 expand_expr_stmt (TREE_OPERAND (exp, 0));
6029 expand_end_loop ();
0088fcb1 6030 pop_temp_slots ();
bbf6f052
RK
6031
6032 return const0_rtx;
6033
6034 case BIND_EXPR:
6035 {
6036 tree vars = TREE_OPERAND (exp, 0);
6037 int vars_need_expansion = 0;
6038
6039 /* Need to open a binding contour here because
e976b8b2 6040 if there are any cleanups they must be contained here. */
bbf6f052
RK
6041 expand_start_bindings (0);
6042
2df53c0b
RS
6043 /* Mark the corresponding BLOCK for output in its proper place. */
6044 if (TREE_OPERAND (exp, 2) != 0
6045 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6046 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6047
6048 /* If VARS have not yet been expanded, expand them now. */
6049 while (vars)
6050 {
6051 if (DECL_RTL (vars) == 0)
6052 {
6053 vars_need_expansion = 1;
6054 expand_decl (vars);
6055 }
6056 expand_decl_init (vars);
6057 vars = TREE_CHAIN (vars);
6058 }
6059
921b3427 6060 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6061
6062 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6063
6064 return temp;
6065 }
6066
6067 case RTL_EXPR:
83b853c9
JM
6068 if (RTL_EXPR_SEQUENCE (exp))
6069 {
6070 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6071 abort ();
6072 emit_insns (RTL_EXPR_SEQUENCE (exp));
6073 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6074 }
99310285 6075 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 6076 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6077 return RTL_EXPR_RTL (exp);
6078
6079 case CONSTRUCTOR:
dd27116b
RK
6080 /* If we don't need the result, just ensure we evaluate any
6081 subexpressions. */
6082 if (ignore)
6083 {
6084 tree elt;
6085 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6086 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6087 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6088 return const0_rtx;
6089 }
3207b172 6090
4af3895e
JVA
6091 /* All elts simple constants => refer to a constant in memory. But
6092 if this is a non-BLKmode mode, let it store a field at a time
6093 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6094 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6095 store directly into the target unless the type is large enough
6096 that memcpy will be used. If we are making an initializer and
3207b172 6097 all operands are constant, put it in memory as well. */
dd27116b 6098 else if ((TREE_STATIC (exp)
3207b172 6099 && ((mode == BLKmode
e5e809f4 6100 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
6101 || TREE_ADDRESSABLE (exp)
6102 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
fbe1758d
AM
6103 && (!MOVE_BY_PIECES_P
6104 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6105 TYPE_ALIGN (type) / BITS_PER_UNIT))
9de08200 6106 && ! mostly_zeros_p (exp))))
dd27116b 6107 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
6108 {
6109 rtx constructor = output_constant_def (exp);
b552441b
RS
6110 if (modifier != EXPAND_CONST_ADDRESS
6111 && modifier != EXPAND_INITIALIZER
6112 && modifier != EXPAND_SUM
d6a5ac33
RK
6113 && (! memory_address_p (GET_MODE (constructor),
6114 XEXP (constructor, 0))
6115 || (flag_force_addr
6116 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6117 constructor = change_address (constructor, VOIDmode,
6118 XEXP (constructor, 0));
6119 return constructor;
6120 }
6121
bbf6f052
RK
6122 else
6123 {
e9ac02a6
JW
6124 /* Handle calls that pass values in multiple non-contiguous
6125 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6126 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6127 || GET_CODE (target) == PARALLEL)
06089a8b
RK
6128 {
6129 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6130 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6131 else
6132 target = assign_temp (type, 0, 1, 1);
6133 }
07604beb
RK
6134
6135 if (TREE_READONLY (exp))
6136 {
9151b3bf 6137 if (GET_CODE (target) == MEM)
effbcc6a
RK
6138 target = copy_rtx (target);
6139
07604beb
RK
6140 RTX_UNCHANGING_P (target) = 1;
6141 }
6142
e1a43f73 6143 store_constructor (exp, target, 0);
bbf6f052
RK
6144 return target;
6145 }
6146
6147 case INDIRECT_REF:
6148 {
6149 tree exp1 = TREE_OPERAND (exp, 0);
6150 tree exp2;
7581a30f
JW
6151 tree index;
6152 tree string = string_constant (exp1, &index);
6153 int i;
6154
06eaa86f 6155 /* Try to optimize reads from const strings. */
7581a30f
JW
6156 if (string
6157 && TREE_CODE (string) == STRING_CST
6158 && TREE_CODE (index) == INTEGER_CST
6159 && !TREE_INT_CST_HIGH (index)
6160 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6161 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6162 && GET_MODE_SIZE (mode) == 1
6163 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 6164 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 6165
405f0da6
JW
6166 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6167 op0 = memory_address (mode, op0);
8c8a8e34 6168
7d384cc0 6169 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6170 {
6171 enum memory_use_mode memory_usage;
6172 memory_usage = get_memory_usage_from_modifier (modifier);
6173
6174 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6175 {
6176 in_check_memory_usage = 1;
6177 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6178 op0, ptr_mode,
6179 GEN_INT (int_size_in_bytes (type)),
6180 TYPE_MODE (sizetype),
6181 GEN_INT (memory_usage),
6182 TYPE_MODE (integer_type_node));
6183 in_check_memory_usage = 0;
6184 }
921b3427
RK
6185 }
6186
38a448ca 6187 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
6188 /* If address was computed by addition,
6189 mark this as an element of an aggregate. */
9ec36da5
JL
6190 if (TREE_CODE (exp1) == PLUS_EXPR
6191 || (TREE_CODE (exp1) == SAVE_EXPR
6192 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 6193 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
6194 || (TREE_CODE (exp1) == ADDR_EXPR
6195 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 6196 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
c6df88cb 6197 MEM_SET_IN_STRUCT_P (temp, 1);
b5f88157 6198
2c4c436a 6199 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 6200 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
6201
6202 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6203 here, because, in C and C++, the fact that a location is accessed
6204 through a pointer to const does not mean that the value there can
6205 never change. Languages where it can never change should
6206 also set TREE_STATIC. */
5cb7a25a 6207 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
6208 return temp;
6209 }
bbf6f052
RK
6210
6211 case ARRAY_REF:
742920c7
RK
6212 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6213 abort ();
bbf6f052 6214
bbf6f052 6215 {
742920c7
RK
6216 tree array = TREE_OPERAND (exp, 0);
6217 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6218 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6219 tree index = TREE_OPERAND (exp, 1);
6220 tree index_type = TREE_TYPE (index);
08293add 6221 HOST_WIDE_INT i;
b50d17a1 6222
d4c89139
PB
6223 /* Optimize the special-case of a zero lower bound.
6224
6225 We convert the low_bound to sizetype to avoid some problems
6226 with constant folding. (E.g. suppose the lower bound is 1,
6227 and its mode is QI. Without the conversion, (ARRAY
6228 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6229 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6230
6231 But sizetype isn't quite right either (especially if
6232 the lowbound is negative). FIXME */
6233
742920c7 6234 if (! integer_zerop (low_bound))
d4c89139
PB
6235 index = fold (build (MINUS_EXPR, index_type, index,
6236 convert (sizetype, low_bound)));
742920c7 6237
742920c7 6238 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6239 This is not done in fold so it won't happen inside &.
6240 Don't fold if this is for wide characters since it's too
6241 difficult to do correctly and this is a very rare case. */
742920c7
RK
6242
6243 if (TREE_CODE (array) == STRING_CST
6244 && TREE_CODE (index) == INTEGER_CST
6245 && !TREE_INT_CST_HIGH (index)
307b821c 6246 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
6247 && GET_MODE_CLASS (mode) == MODE_INT
6248 && GET_MODE_SIZE (mode) == 1)
307b821c 6249 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 6250
742920c7
RK
6251 /* If this is a constant index into a constant array,
6252 just get the value from the array. Handle both the cases when
6253 we have an explicit constructor and when our operand is a variable
6254 that was declared const. */
4af3895e 6255
742920c7
RK
6256 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6257 {
6258 if (TREE_CODE (index) == INTEGER_CST
6259 && TREE_INT_CST_HIGH (index) == 0)
6260 {
6261 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6262
6263 i = TREE_INT_CST_LOW (index);
6264 while (elem && i--)
6265 elem = TREE_CHAIN (elem);
6266 if (elem)
6267 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6268 tmode, ro_modifier);
742920c7
RK
6269 }
6270 }
4af3895e 6271
742920c7
RK
6272 else if (optimize >= 1
6273 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6274 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6275 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6276 {
08293add 6277 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6278 {
6279 tree init = DECL_INITIAL (array);
6280
6281 i = TREE_INT_CST_LOW (index);
6282 if (TREE_CODE (init) == CONSTRUCTOR)
6283 {
6284 tree elem = CONSTRUCTOR_ELTS (init);
6285
03dc44a6
RS
6286 while (elem
6287 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
6288 elem = TREE_CHAIN (elem);
6289 if (elem)
6290 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6291 tmode, ro_modifier);
742920c7
RK
6292 }
6293 else if (TREE_CODE (init) == STRING_CST
08293add
RK
6294 && TREE_INT_CST_HIGH (index) == 0
6295 && (TREE_INT_CST_LOW (index)
6296 < TREE_STRING_LENGTH (init)))
6297 return (GEN_INT
6298 (TREE_STRING_POINTER
6299 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6300 }
6301 }
6302 }
8c8a8e34 6303
08293add 6304 /* ... fall through ... */
bbf6f052
RK
6305
6306 case COMPONENT_REF:
6307 case BIT_FIELD_REF:
4af3895e 6308 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6309 appropriate field if it is present. Don't do this if we have
6310 already written the data since we want to refer to that copy
6311 and varasm.c assumes that's what we'll do. */
4af3895e 6312 if (code != ARRAY_REF
7a0b7b9a
RK
6313 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6314 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6315 {
6316 tree elt;
6317
6318 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6319 elt = TREE_CHAIN (elt))
86b5812c
RK
6320 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6321 /* We can normally use the value of the field in the
6322 CONSTRUCTOR. However, if this is a bitfield in
6323 an integral mode that we can fit in a HOST_WIDE_INT,
6324 we must mask only the number of bits in the bitfield,
6325 since this is done implicitly by the constructor. If
6326 the bitfield does not meet either of those conditions,
6327 we can't do this optimization. */
6328 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6329 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6330 == MODE_INT)
6331 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6332 <= HOST_BITS_PER_WIDE_INT))))
6333 {
6334 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6335 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6336 {
6337 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
6338
6339 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6340 {
6341 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6342 op0 = expand_and (op0, op1, target);
6343 }
6344 else
6345 {
e5e809f4
JL
6346 enum machine_mode imode
6347 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6348 tree count
e5e809f4
JL
6349 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6350 0);
86b5812c
RK
6351
6352 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6353 target, 0);
6354 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6355 target, 0);
6356 }
6357 }
6358
6359 return op0;
6360 }
4af3895e
JVA
6361 }
6362
bbf6f052
RK
6363 {
6364 enum machine_mode mode1;
6365 int bitsize;
6366 int bitpos;
7bb0943f 6367 tree offset;
bbf6f052 6368 int volatilep = 0;
034f9101 6369 int alignment;
839c4796
RK
6370 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6371 &mode1, &unsignedp, &volatilep,
6372 &alignment);
bbf6f052 6373
e7f3c83f
RK
6374 /* If we got back the original object, something is wrong. Perhaps
6375 we are evaluating an expression too early. In any event, don't
6376 infinitely recurse. */
6377 if (tem == exp)
6378 abort ();
6379
3d27140a 6380 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6381 computation, since it will need a temporary and TARGET is known
6382 to have to do. This occurs in unchecked conversion in Ada. */
6383
6384 op0 = expand_expr (tem,
6385 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6386 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6387 != INTEGER_CST)
6388 ? target : NULL_RTX),
4ed67205 6389 VOIDmode,
e5e809f4
JL
6390 modifier == EXPAND_INITIALIZER
6391 ? modifier : EXPAND_NORMAL);
bbf6f052 6392
8c8a8e34 6393 /* If this is a constant, put it into a register if it is a
8008b228 6394 legitimate constant and memory if it isn't. */
8c8a8e34
JW
6395 if (CONSTANT_P (op0))
6396 {
6397 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 6398 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
6399 op0 = force_reg (mode, op0);
6400 else
6401 op0 = validize_mem (force_const_mem (mode, op0));
6402 }
6403
7bb0943f
RS
6404 if (offset != 0)
6405 {
906c4e36 6406 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
6407
6408 if (GET_CODE (op0) != MEM)
6409 abort ();
2d48c13d
JL
6410
6411 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6412 {
2d48c13d 6413#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6414 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6415#else
bd070e1a 6416 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6417#endif
bd070e1a 6418 }
2d48c13d 6419
89752202
HB
6420 if (GET_CODE (op0) == MEM
6421 && GET_MODE (op0) == BLKmode
6422 && bitsize
6423 && (bitpos % bitsize) == 0
6424 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6425 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6426 {
6427 rtx temp = change_address (op0, mode1,
6428 plus_constant (XEXP (op0, 0),
6429 (bitpos /
6430 BITS_PER_UNIT)));
6431 if (GET_CODE (XEXP (temp, 0)) == REG)
6432 op0 = temp;
6433 else
6434 op0 = change_address (op0, mode1,
6435 force_reg (GET_MODE (XEXP (temp, 0)),
6436 XEXP (temp, 0)));
6437 bitpos = 0;
6438 }
6439
6440
7bb0943f 6441 op0 = change_address (op0, VOIDmode,
38a448ca
RH
6442 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6443 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
6444 }
6445
bbf6f052
RK
6446 /* Don't forget about volatility even if this is a bitfield. */
6447 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6448 {
6449 op0 = copy_rtx (op0);
6450 MEM_VOLATILE_P (op0) = 1;
6451 }
6452
921b3427 6453 /* Check the access. */
7d384cc0 6454 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
921b3427
RK
6455 {
6456 enum memory_use_mode memory_usage;
6457 memory_usage = get_memory_usage_from_modifier (modifier);
6458
6459 if (memory_usage != MEMORY_USE_DONT)
6460 {
6461 rtx to;
6462 int size;
6463
6464 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6465 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6466
6467 /* Check the access right of the pointer. */
e9a25f70
JL
6468 if (size > BITS_PER_UNIT)
6469 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6470 to, ptr_mode,
6471 GEN_INT (size / BITS_PER_UNIT),
6472 TYPE_MODE (sizetype),
956d6950
JL
6473 GEN_INT (memory_usage),
6474 TYPE_MODE (integer_type_node));
921b3427
RK
6475 }
6476 }
6477
ccc98036
RS
6478 /* In cases where an aligned union has an unaligned object
6479 as a field, we might be extracting a BLKmode value from
6480 an integer-mode (e.g., SImode) object. Handle this case
6481 by doing the extract into an object as wide as the field
6482 (which we know to be the width of a basic mode), then
f2420d0b
JW
6483 storing into memory, and changing the mode to BLKmode.
6484 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6485 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6486 if (mode1 == VOIDmode
ccc98036 6487 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6488 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6489 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6490 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6491 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6492 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6493 /* If the field isn't aligned enough to fetch as a memref,
6494 fetch it as a bit field. */
6495 || (SLOW_UNALIGNED_ACCESS
c84e2712 6496 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
f9409c3a 6497 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 6498 {
bbf6f052
RK
6499 enum machine_mode ext_mode = mode;
6500
6501 if (ext_mode == BLKmode)
6502 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6503
6504 if (ext_mode == BLKmode)
a281e72d
RK
6505 {
6506 /* In this case, BITPOS must start at a byte boundary and
6507 TARGET, if specified, must be a MEM. */
6508 if (GET_CODE (op0) != MEM
6509 || (target != 0 && GET_CODE (target) != MEM)
6510 || bitpos % BITS_PER_UNIT != 0)
6511 abort ();
6512
6513 op0 = change_address (op0, VOIDmode,
6514 plus_constant (XEXP (op0, 0),
6515 bitpos / BITS_PER_UNIT));
6516 if (target == 0)
6517 target = assign_temp (type, 0, 1, 1);
6518
6519 emit_block_move (target, op0,
6520 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6521 / BITS_PER_UNIT),
6522 1);
6523
6524 return target;
6525 }
bbf6f052 6526
dc6d66b3
RK
6527 op0 = validize_mem (op0);
6528
6529 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6530 mark_reg_pointer (XEXP (op0, 0), alignment);
6531
6532 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6533 unsignedp, target, ext_mode, ext_mode,
034f9101 6534 alignment,
bbf6f052 6535 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6536
6537 /* If the result is a record type and BITSIZE is narrower than
6538 the mode of OP0, an integral mode, and this is a big endian
6539 machine, we must put the field into the high-order bits. */
6540 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6541 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6542 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6543 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6544 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6545 - bitsize),
6546 op0, 1);
6547
bbf6f052
RK
6548 if (mode == BLKmode)
6549 {
6550 rtx new = assign_stack_temp (ext_mode,
6551 bitsize / BITS_PER_UNIT, 0);
6552
6553 emit_move_insn (new, op0);
6554 op0 = copy_rtx (new);
6555 PUT_MODE (op0, BLKmode);
c6df88cb 6556 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052
RK
6557 }
6558
6559 return op0;
6560 }
6561
05019f83
RK
6562 /* If the result is BLKmode, use that to access the object
6563 now as well. */
6564 if (mode == BLKmode)
6565 mode1 = BLKmode;
6566
bbf6f052
RK
6567 /* Get a reference to just this component. */
6568 if (modifier == EXPAND_CONST_ADDRESS
6569 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6570 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6571 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6572 else
6573 op0 = change_address (op0, mode1,
6574 plus_constant (XEXP (op0, 0),
6575 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6576
6577 if (GET_CODE (op0) == MEM)
6578 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6579
dc6d66b3
RK
6580 if (GET_CODE (XEXP (op0, 0)) == REG)
6581 mark_reg_pointer (XEXP (op0, 0), alignment);
6582
c6df88cb 6583 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052 6584 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6585 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6586 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6587 || modifier == EXPAND_INITIALIZER)
bbf6f052 6588 return op0;
0d15e60c 6589 else if (target == 0)
bbf6f052 6590 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6591
bbf6f052
RK
6592 convert_move (target, op0, unsignedp);
6593 return target;
6594 }
6595
bbf6f052
RK
6596 /* Intended for a reference to a buffer of a file-object in Pascal.
6597 But it's not certain that a special tree code will really be
6598 necessary for these. INDIRECT_REF might work for them. */
6599 case BUFFER_REF:
6600 abort ();
6601
7308a047 6602 case IN_EXPR:
7308a047 6603 {
d6a5ac33
RK
6604 /* Pascal set IN expression.
6605
6606 Algorithm:
6607 rlo = set_low - (set_low%bits_per_word);
6608 the_word = set [ (index - rlo)/bits_per_word ];
6609 bit_index = index % bits_per_word;
6610 bitmask = 1 << bit_index;
6611 return !!(the_word & bitmask); */
6612
7308a047
RS
6613 tree set = TREE_OPERAND (exp, 0);
6614 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6615 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6616 tree set_type = TREE_TYPE (set);
7308a047
RS
6617 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6618 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6619 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6620 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6621 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6622 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6623 rtx setaddr = XEXP (setval, 0);
6624 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6625 rtx rlow;
6626 rtx diff, quo, rem, addr, bit, result;
7308a047 6627
d6a5ac33
RK
6628 preexpand_calls (exp);
6629
6630 /* If domain is empty, answer is no. Likewise if index is constant
6631 and out of bounds. */
51723711 6632 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6633 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6634 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6635 || (TREE_CODE (index) == INTEGER_CST
6636 && TREE_CODE (set_low_bound) == INTEGER_CST
6637 && tree_int_cst_lt (index, set_low_bound))
6638 || (TREE_CODE (set_high_bound) == INTEGER_CST
6639 && TREE_CODE (index) == INTEGER_CST
6640 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6641 return const0_rtx;
6642
d6a5ac33
RK
6643 if (target == 0)
6644 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6645
6646 /* If we get here, we have to generate the code for both cases
6647 (in range and out of range). */
6648
6649 op0 = gen_label_rtx ();
6650 op1 = gen_label_rtx ();
6651
6652 if (! (GET_CODE (index_val) == CONST_INT
6653 && GET_CODE (lo_r) == CONST_INT))
6654 {
c5d5d461
JL
6655 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6656 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6657 }
6658
6659 if (! (GET_CODE (index_val) == CONST_INT
6660 && GET_CODE (hi_r) == CONST_INT))
6661 {
c5d5d461
JL
6662 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6663 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6664 }
6665
6666 /* Calculate the element number of bit zero in the first word
6667 of the set. */
6668 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6669 rlow = GEN_INT (INTVAL (lo_r)
6670 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6671 else
17938e57
RK
6672 rlow = expand_binop (index_mode, and_optab, lo_r,
6673 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6674 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6675
d6a5ac33
RK
6676 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6677 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6678
6679 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6680 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6681 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6682 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6683
7308a047 6684 addr = memory_address (byte_mode,
d6a5ac33
RK
6685 expand_binop (index_mode, add_optab, diff,
6686 setaddr, NULL_RTX, iunsignedp,
17938e57 6687 OPTAB_LIB_WIDEN));
d6a5ac33 6688
7308a047
RS
6689 /* Extract the bit we want to examine */
6690 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6691 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6692 make_tree (TREE_TYPE (index), rem),
6693 NULL_RTX, 1);
6694 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6695 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6696 1, OPTAB_LIB_WIDEN);
17938e57
RK
6697
6698 if (result != target)
6699 convert_move (target, result, 1);
7308a047
RS
6700
6701 /* Output the code to handle the out-of-range case. */
6702 emit_jump (op0);
6703 emit_label (op1);
6704 emit_move_insn (target, const0_rtx);
6705 emit_label (op0);
6706 return target;
6707 }
6708
bbf6f052
RK
6709 case WITH_CLEANUP_EXPR:
6710 if (RTL_EXPR_RTL (exp) == 0)
6711 {
6712 RTL_EXPR_RTL (exp)
921b3427 6713 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6714 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6715
bbf6f052
RK
6716 /* That's it for this cleanup. */
6717 TREE_OPERAND (exp, 2) = 0;
6718 }
6719 return RTL_EXPR_RTL (exp);
6720
5dab5552
MS
6721 case CLEANUP_POINT_EXPR:
6722 {
d93d4205 6723 extern int temp_slot_level;
e976b8b2
MS
6724 /* Start a new binding layer that will keep track of all cleanup
6725 actions to be performed. */
6726 expand_start_bindings (0);
6727
d93d4205 6728 target_temp_slot_level = temp_slot_level;
e976b8b2 6729
921b3427 6730 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6731 /* If we're going to use this value, load it up now. */
6732 if (! ignore)
6733 op0 = force_not_mem (op0);
d93d4205 6734 preserve_temp_slots (op0);
e976b8b2 6735 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6736 }
6737 return op0;
6738
bbf6f052
RK
6739 case CALL_EXPR:
6740 /* Check for a built-in function. */
6741 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6742 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6743 == FUNCTION_DECL)
bbf6f052
RK
6744 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6745 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6746
bbf6f052
RK
6747 /* If this call was expanded already by preexpand_calls,
6748 just return the result we got. */
6749 if (CALL_EXPR_RTL (exp) != 0)
6750 return CALL_EXPR_RTL (exp);
d6a5ac33 6751
8129842c 6752 return expand_call (exp, target, ignore);
bbf6f052
RK
6753
6754 case NON_LVALUE_EXPR:
6755 case NOP_EXPR:
6756 case CONVERT_EXPR:
6757 case REFERENCE_EXPR:
bbf6f052
RK
6758 if (TREE_CODE (type) == UNION_TYPE)
6759 {
6760 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6761 if (target == 0)
06089a8b
RK
6762 {
6763 if (mode != BLKmode)
6764 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6765 else
6766 target = assign_temp (type, 0, 1, 1);
6767 }
d6a5ac33 6768
bbf6f052
RK
6769 if (GET_CODE (target) == MEM)
6770 /* Store data into beginning of memory target. */
6771 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6772 change_address (target, TYPE_MODE (valtype), 0), 0);
6773
bbf6f052
RK
6774 else if (GET_CODE (target) == REG)
6775 /* Store this field into a union of the proper type. */
6776 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6777 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6778 VOIDmode, 0, 1,
ece32014
MM
6779 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6780 0);
bbf6f052
RK
6781 else
6782 abort ();
6783
6784 /* Return the entire union. */
6785 return target;
6786 }
d6a5ac33 6787
7f62854a
RK
6788 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6789 {
6790 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6791 ro_modifier);
7f62854a
RK
6792
6793 /* If the signedness of the conversion differs and OP0 is
6794 a promoted SUBREG, clear that indication since we now
6795 have to do the proper extension. */
6796 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6797 && GET_CODE (op0) == SUBREG)
6798 SUBREG_PROMOTED_VAR_P (op0) = 0;
6799
6800 return op0;
6801 }
6802
1499e0a8 6803 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6804 if (GET_MODE (op0) == mode)
6805 return op0;
12342f90 6806
d6a5ac33
RK
6807 /* If OP0 is a constant, just convert it into the proper mode. */
6808 if (CONSTANT_P (op0))
6809 return
6810 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6811 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6812
26fcb35a 6813 if (modifier == EXPAND_INITIALIZER)
38a448ca 6814 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6815
bbf6f052 6816 if (target == 0)
d6a5ac33
RK
6817 return
6818 convert_to_mode (mode, op0,
6819 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6820 else
d6a5ac33
RK
6821 convert_move (target, op0,
6822 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6823 return target;
6824
6825 case PLUS_EXPR:
0f41302f
MS
6826 /* We come here from MINUS_EXPR when the second operand is a
6827 constant. */
bbf6f052
RK
6828 plus_expr:
6829 this_optab = add_optab;
6830
6831 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6832 something else, make sure we add the register to the constant and
6833 then to the other thing. This case can occur during strength
6834 reduction and doing it this way will produce better code if the
6835 frame pointer or argument pointer is eliminated.
6836
6837 fold-const.c will ensure that the constant is always in the inner
6838 PLUS_EXPR, so the only case we need to do anything about is if
6839 sp, ap, or fp is our second argument, in which case we must swap
6840 the innermost first argument and our second argument. */
6841
6842 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6843 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6844 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6845 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6846 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6847 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6848 {
6849 tree t = TREE_OPERAND (exp, 1);
6850
6851 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6852 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6853 }
6854
88f63c77 6855 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6856 something, we might be forming a constant. So try to use
6857 plus_constant. If it produces a sum and we can't accept it,
6858 use force_operand. This allows P = &ARR[const] to generate
6859 efficient code on machines where a SYMBOL_REF is not a valid
6860 address.
6861
6862 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6863 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6864 || mode == ptr_mode)
bbf6f052 6865 {
c980ac49
RS
6866 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6867 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6868 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6869 {
6870 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6871 EXPAND_SUM);
6872 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6873 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6874 op1 = force_operand (op1, target);
6875 return op1;
6876 }
bbf6f052 6877
c980ac49
RS
6878 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6879 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6880 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6881 {
6882 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6883 EXPAND_SUM);
6884 if (! CONSTANT_P (op0))
6885 {
6886 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6887 VOIDmode, modifier);
709f5be1
RS
6888 /* Don't go to both_summands if modifier
6889 says it's not right to return a PLUS. */
6890 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6891 goto binop2;
c980ac49
RS
6892 goto both_summands;
6893 }
6894 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6895 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6896 op0 = force_operand (op0, target);
6897 return op0;
6898 }
bbf6f052
RK
6899 }
6900
6901 /* No sense saving up arithmetic to be done
6902 if it's all in the wrong mode to form part of an address.
6903 And force_operand won't know whether to sign-extend or
6904 zero-extend. */
6905 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6906 || mode != ptr_mode)
c980ac49 6907 goto binop;
bbf6f052
RK
6908
6909 preexpand_calls (exp);
e5e809f4 6910 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6911 subtarget = 0;
6912
921b3427
RK
6913 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6914 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6915
c980ac49 6916 both_summands:
bbf6f052
RK
6917 /* Make sure any term that's a sum with a constant comes last. */
6918 if (GET_CODE (op0) == PLUS
6919 && CONSTANT_P (XEXP (op0, 1)))
6920 {
6921 temp = op0;
6922 op0 = op1;
6923 op1 = temp;
6924 }
6925 /* If adding to a sum including a constant,
6926 associate it to put the constant outside. */
6927 if (GET_CODE (op1) == PLUS
6928 && CONSTANT_P (XEXP (op1, 1)))
6929 {
6930 rtx constant_term = const0_rtx;
6931
6932 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6933 if (temp != 0)
6934 op0 = temp;
6f90e075
JW
6935 /* Ensure that MULT comes first if there is one. */
6936 else if (GET_CODE (op0) == MULT)
38a448ca 6937 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 6938 else
38a448ca 6939 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
6940
6941 /* Let's also eliminate constants from op0 if possible. */
6942 op0 = eliminate_constant_term (op0, &constant_term);
6943
6944 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6945 their sum should be a constant. Form it into OP1, since the
6946 result we want will then be OP0 + OP1. */
6947
6948 temp = simplify_binary_operation (PLUS, mode, constant_term,
6949 XEXP (op1, 1));
6950 if (temp != 0)
6951 op1 = temp;
6952 else
38a448ca 6953 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
6954 }
6955
6956 /* Put a constant term last and put a multiplication first. */
6957 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6958 temp = op1, op1 = op0, op0 = temp;
6959
6960 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 6961 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
6962
6963 case MINUS_EXPR:
ea87523e
RK
6964 /* For initializers, we are allowed to return a MINUS of two
6965 symbolic constants. Here we handle all cases when both operands
6966 are constant. */
bbf6f052
RK
6967 /* Handle difference of two symbolic constants,
6968 for the sake of an initializer. */
6969 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6970 && really_constant_p (TREE_OPERAND (exp, 0))
6971 && really_constant_p (TREE_OPERAND (exp, 1)))
6972 {
906c4e36 6973 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 6974 VOIDmode, ro_modifier);
906c4e36 6975 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 6976 VOIDmode, ro_modifier);
ea87523e 6977
ea87523e
RK
6978 /* If the last operand is a CONST_INT, use plus_constant of
6979 the negated constant. Else make the MINUS. */
6980 if (GET_CODE (op1) == CONST_INT)
6981 return plus_constant (op0, - INTVAL (op1));
6982 else
38a448ca 6983 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
6984 }
6985 /* Convert A - const to A + (-const). */
6986 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6987 {
ae431183
RK
6988 tree negated = fold (build1 (NEGATE_EXPR, type,
6989 TREE_OPERAND (exp, 1)));
6990
6991 /* Deal with the case where we can't negate the constant
6992 in TYPE. */
6993 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6994 {
6995 tree newtype = signed_type (type);
6996 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6997 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6998 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6999
7000 if (! TREE_OVERFLOW (newneg))
7001 return expand_expr (convert (type,
7002 build (PLUS_EXPR, newtype,
7003 newop0, newneg)),
921b3427 7004 target, tmode, ro_modifier);
ae431183
RK
7005 }
7006 else
7007 {
7008 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7009 goto plus_expr;
7010 }
bbf6f052
RK
7011 }
7012 this_optab = sub_optab;
7013 goto binop;
7014
7015 case MULT_EXPR:
7016 preexpand_calls (exp);
7017 /* If first operand is constant, swap them.
7018 Thus the following special case checks need only
7019 check the second operand. */
7020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7021 {
7022 register tree t1 = TREE_OPERAND (exp, 0);
7023 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7024 TREE_OPERAND (exp, 1) = t1;
7025 }
7026
7027 /* Attempt to return something suitable for generating an
7028 indexed address, for machines that support that. */
7029
88f63c77 7030 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7031 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7032 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7033 {
921b3427
RK
7034 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7035 EXPAND_SUM);
bbf6f052
RK
7036
7037 /* Apply distributive law if OP0 is x+c. */
7038 if (GET_CODE (op0) == PLUS
7039 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
7040 return gen_rtx_PLUS (mode,
7041 gen_rtx_MULT (mode, XEXP (op0, 0),
7042 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
7043 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7044 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7045
7046 if (GET_CODE (op0) != REG)
906c4e36 7047 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7048 if (GET_CODE (op0) != REG)
7049 op0 = copy_to_mode_reg (mode, op0);
7050
38a448ca
RH
7051 return gen_rtx_MULT (mode, op0,
7052 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7053 }
7054
e5e809f4 7055 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7056 subtarget = 0;
7057
7058 /* Check for multiplying things that have been extended
7059 from a narrower type. If this machine supports multiplying
7060 in that narrower type with a result in the desired type,
7061 do it that way, and avoid the explicit type-conversion. */
7062 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7063 && TREE_CODE (type) == INTEGER_TYPE
7064 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7065 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7066 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7067 && int_fits_type_p (TREE_OPERAND (exp, 1),
7068 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7069 /* Don't use a widening multiply if a shift will do. */
7070 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7071 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7072 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7073 ||
7074 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7075 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7076 ==
7077 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7078 /* If both operands are extended, they must either both
7079 be zero-extended or both be sign-extended. */
7080 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7081 ==
7082 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7083 {
7084 enum machine_mode innermode
7085 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7086 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7087 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7088 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7089 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7090 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7091 {
b10af0c8
TG
7092 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7093 {
7094 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7095 NULL_RTX, VOIDmode, 0);
7096 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7097 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7098 VOIDmode, 0);
7099 else
7100 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7101 NULL_RTX, VOIDmode, 0);
7102 goto binop2;
7103 }
7104 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7105 && innermode == word_mode)
7106 {
7107 rtx htem;
7108 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7109 NULL_RTX, VOIDmode, 0);
7110 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7111 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7112 VOIDmode, 0);
7113 else
7114 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7115 NULL_RTX, VOIDmode, 0);
7116 temp = expand_binop (mode, other_optab, op0, op1, target,
7117 unsignedp, OPTAB_LIB_WIDEN);
7118 htem = expand_mult_highpart_adjust (innermode,
7119 gen_highpart (innermode, temp),
7120 op0, op1,
7121 gen_highpart (innermode, temp),
7122 unsignedp);
7123 emit_move_insn (gen_highpart (innermode, temp), htem);
7124 return temp;
7125 }
bbf6f052
RK
7126 }
7127 }
7128 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7129 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7130 return expand_mult (mode, op0, op1, target, unsignedp);
7131
7132 case TRUNC_DIV_EXPR:
7133 case FLOOR_DIV_EXPR:
7134 case CEIL_DIV_EXPR:
7135 case ROUND_DIV_EXPR:
7136 case EXACT_DIV_EXPR:
7137 preexpand_calls (exp);
e5e809f4 7138 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7139 subtarget = 0;
7140 /* Possible optimization: compute the dividend with EXPAND_SUM
7141 then if the divisor is constant can optimize the case
7142 where some terms of the dividend have coeffs divisible by it. */
7143 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7144 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7145 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7146
7147 case RDIV_EXPR:
7148 this_optab = flodiv_optab;
7149 goto binop;
7150
7151 case TRUNC_MOD_EXPR:
7152 case FLOOR_MOD_EXPR:
7153 case CEIL_MOD_EXPR:
7154 case ROUND_MOD_EXPR:
7155 preexpand_calls (exp);
e5e809f4 7156 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7157 subtarget = 0;
7158 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7159 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7160 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7161
7162 case FIX_ROUND_EXPR:
7163 case FIX_FLOOR_EXPR:
7164 case FIX_CEIL_EXPR:
7165 abort (); /* Not used for C. */
7166
7167 case FIX_TRUNC_EXPR:
906c4e36 7168 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7169 if (target == 0)
7170 target = gen_reg_rtx (mode);
7171 expand_fix (target, op0, unsignedp);
7172 return target;
7173
7174 case FLOAT_EXPR:
906c4e36 7175 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7176 if (target == 0)
7177 target = gen_reg_rtx (mode);
7178 /* expand_float can't figure out what to do if FROM has VOIDmode.
7179 So give it the correct mode. With -O, cse will optimize this. */
7180 if (GET_MODE (op0) == VOIDmode)
7181 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7182 op0);
7183 expand_float (target, op0,
7184 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7185 return target;
7186
7187 case NEGATE_EXPR:
5b22bee8 7188 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
7189 temp = expand_unop (mode, neg_optab, op0, target, 0);
7190 if (temp == 0)
7191 abort ();
7192 return temp;
7193
7194 case ABS_EXPR:
7195 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7196
2d7050fd 7197 /* Handle complex values specially. */
d6a5ac33
RK
7198 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7199 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7200 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7201
bbf6f052
RK
7202 /* Unsigned abs is simply the operand. Testing here means we don't
7203 risk generating incorrect code below. */
7204 if (TREE_UNSIGNED (type))
7205 return op0;
7206
2e5ec6cf 7207 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7208 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7209
7210 case MAX_EXPR:
7211 case MIN_EXPR:
7212 target = original_target;
e5e809f4 7213 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7214 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7215 || GET_MODE (target) != mode
bbf6f052
RK
7216 || (GET_CODE (target) == REG
7217 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7218 target = gen_reg_rtx (mode);
906c4e36 7219 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7220 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7221
7222 /* First try to do it with a special MIN or MAX instruction.
7223 If that does not win, use a conditional jump to select the proper
7224 value. */
7225 this_optab = (TREE_UNSIGNED (type)
7226 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7227 : (code == MIN_EXPR ? smin_optab : smax_optab));
7228
7229 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7230 OPTAB_WIDEN);
7231 if (temp != 0)
7232 return temp;
7233
fa2981d8
JW
7234 /* At this point, a MEM target is no longer useful; we will get better
7235 code without it. */
7236
7237 if (GET_CODE (target) == MEM)
7238 target = gen_reg_rtx (mode);
7239
ee456b1c
RK
7240 if (target != op0)
7241 emit_move_insn (target, op0);
d6a5ac33 7242
bbf6f052 7243 op0 = gen_label_rtx ();
d6a5ac33 7244
f81497d9
RS
7245 /* If this mode is an integer too wide to compare properly,
7246 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 7247 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 7248 {
f81497d9 7249 if (code == MAX_EXPR)
d6a5ac33
RK
7250 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7251 target, op1, NULL_RTX, op0);
bbf6f052 7252 else
d6a5ac33
RK
7253 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7254 op1, target, NULL_RTX, op0);
ee456b1c 7255 emit_move_insn (target, op1);
bbf6f052 7256 }
f81497d9
RS
7257 else
7258 {
7259 if (code == MAX_EXPR)
7260 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
7261 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7262 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
7263 else
7264 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
7265 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7266 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 7267 if (temp == const0_rtx)
ee456b1c 7268 emit_move_insn (target, op1);
f81497d9
RS
7269 else if (temp != const_true_rtx)
7270 {
7271 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7272 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7273 else
7274 abort ();
ee456b1c 7275 emit_move_insn (target, op1);
f81497d9
RS
7276 }
7277 }
bbf6f052
RK
7278 emit_label (op0);
7279 return target;
7280
bbf6f052
RK
7281 case BIT_NOT_EXPR:
7282 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7283 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7284 if (temp == 0)
7285 abort ();
7286 return temp;
7287
7288 case FFS_EXPR:
7289 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7290 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7291 if (temp == 0)
7292 abort ();
7293 return temp;
7294
d6a5ac33
RK
7295 /* ??? Can optimize bitwise operations with one arg constant.
7296 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7297 and (a bitwise1 b) bitwise2 b (etc)
7298 but that is probably not worth while. */
7299
7300 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7301 boolean values when we want in all cases to compute both of them. In
7302 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7303 as actual zero-or-1 values and then bitwise anding. In cases where
7304 there cannot be any side effects, better code would be made by
7305 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7306 how to recognize those cases. */
7307
bbf6f052
RK
7308 case TRUTH_AND_EXPR:
7309 case BIT_AND_EXPR:
7310 this_optab = and_optab;
7311 goto binop;
7312
bbf6f052
RK
7313 case TRUTH_OR_EXPR:
7314 case BIT_IOR_EXPR:
7315 this_optab = ior_optab;
7316 goto binop;
7317
874726a8 7318 case TRUTH_XOR_EXPR:
bbf6f052
RK
7319 case BIT_XOR_EXPR:
7320 this_optab = xor_optab;
7321 goto binop;
7322
7323 case LSHIFT_EXPR:
7324 case RSHIFT_EXPR:
7325 case LROTATE_EXPR:
7326 case RROTATE_EXPR:
7327 preexpand_calls (exp);
e5e809f4 7328 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7329 subtarget = 0;
7330 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7331 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7332 unsignedp);
7333
d6a5ac33
RK
7334 /* Could determine the answer when only additive constants differ. Also,
7335 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7336 case LT_EXPR:
7337 case LE_EXPR:
7338 case GT_EXPR:
7339 case GE_EXPR:
7340 case EQ_EXPR:
7341 case NE_EXPR:
7342 preexpand_calls (exp);
7343 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7344 if (temp != 0)
7345 return temp;
d6a5ac33 7346
0f41302f 7347 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7348 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7349 && original_target
7350 && GET_CODE (original_target) == REG
7351 && (GET_MODE (original_target)
7352 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7353 {
d6a5ac33
RK
7354 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7355 VOIDmode, 0);
7356
bbf6f052
RK
7357 if (temp != original_target)
7358 temp = copy_to_reg (temp);
d6a5ac33 7359
bbf6f052 7360 op1 = gen_label_rtx ();
c5d5d461
JL
7361 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7362 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
7363 emit_move_insn (temp, const1_rtx);
7364 emit_label (op1);
7365 return temp;
7366 }
d6a5ac33 7367
bbf6f052
RK
7368 /* If no set-flag instruction, must generate a conditional
7369 store into a temporary variable. Drop through
7370 and handle this like && and ||. */
7371
7372 case TRUTH_ANDIF_EXPR:
7373 case TRUTH_ORIF_EXPR:
e44842fe 7374 if (! ignore
e5e809f4 7375 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7376 /* Make sure we don't have a hard reg (such as function's return
7377 value) live across basic blocks, if not optimizing. */
7378 || (!optimize && GET_CODE (target) == REG
7379 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7380 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7381
7382 if (target)
7383 emit_clr_insn (target);
7384
bbf6f052
RK
7385 op1 = gen_label_rtx ();
7386 jumpifnot (exp, op1);
e44842fe
RK
7387
7388 if (target)
7389 emit_0_to_1_insn (target);
7390
bbf6f052 7391 emit_label (op1);
e44842fe 7392 return ignore ? const0_rtx : target;
bbf6f052
RK
7393
7394 case TRUTH_NOT_EXPR:
7395 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7396 /* The parser is careful to generate TRUTH_NOT_EXPR
7397 only with operands that are always zero or one. */
906c4e36 7398 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7399 target, 1, OPTAB_LIB_WIDEN);
7400 if (temp == 0)
7401 abort ();
7402 return temp;
7403
7404 case COMPOUND_EXPR:
7405 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7406 emit_queue ();
7407 return expand_expr (TREE_OPERAND (exp, 1),
7408 (ignore ? const0_rtx : target),
7409 VOIDmode, 0);
7410
7411 case COND_EXPR:
ac01eace
RK
7412 /* If we would have a "singleton" (see below) were it not for a
7413 conversion in each arm, bring that conversion back out. */
7414 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7415 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7416 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7417 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7418 {
7419 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7420 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7421
7422 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7423 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7424 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7425 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7426 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7427 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7428 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7429 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7430 return expand_expr (build1 (NOP_EXPR, type,
7431 build (COND_EXPR, TREE_TYPE (true),
7432 TREE_OPERAND (exp, 0),
7433 true, false)),
7434 target, tmode, modifier);
7435 }
7436
bbf6f052
RK
7437 {
7438 /* Note that COND_EXPRs whose type is a structure or union
7439 are required to be constructed to contain assignments of
7440 a temporary variable, so that we can evaluate them here
7441 for side effect only. If type is void, we must do likewise. */
7442
7443 /* If an arm of the branch requires a cleanup,
7444 only that cleanup is performed. */
7445
7446 tree singleton = 0;
7447 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7448
7449 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7450 convert it to our mode, if necessary. */
7451 if (integer_onep (TREE_OPERAND (exp, 1))
7452 && integer_zerop (TREE_OPERAND (exp, 2))
7453 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7454 {
dd27116b
RK
7455 if (ignore)
7456 {
7457 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7458 ro_modifier);
dd27116b
RK
7459 return const0_rtx;
7460 }
7461
921b3427 7462 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7463 if (GET_MODE (op0) == mode)
7464 return op0;
d6a5ac33 7465
bbf6f052
RK
7466 if (target == 0)
7467 target = gen_reg_rtx (mode);
7468 convert_move (target, op0, unsignedp);
7469 return target;
7470 }
7471
ac01eace
RK
7472 /* Check for X ? A + B : A. If we have this, we can copy A to the
7473 output and conditionally add B. Similarly for unary operations.
7474 Don't do this if X has side-effects because those side effects
7475 might affect A or B and the "?" operation is a sequence point in
7476 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7477
7478 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7479 && operand_equal_p (TREE_OPERAND (exp, 2),
7480 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7481 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7482 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7483 && operand_equal_p (TREE_OPERAND (exp, 1),
7484 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7485 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7486 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7487 && operand_equal_p (TREE_OPERAND (exp, 2),
7488 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7489 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7490 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7491 && operand_equal_p (TREE_OPERAND (exp, 1),
7492 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7493 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7494
01c8a7c8
RK
7495 /* If we are not to produce a result, we have no target. Otherwise,
7496 if a target was specified use it; it will not be used as an
7497 intermediate target unless it is safe. If no target, use a
7498 temporary. */
7499
7500 if (ignore)
7501 temp = 0;
7502 else if (original_target
e5e809f4 7503 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7504 || (singleton && GET_CODE (original_target) == REG
7505 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7506 && original_target == var_rtx (singleton)))
7507 && GET_MODE (original_target) == mode
7c00d1fe
RK
7508#ifdef HAVE_conditional_move
7509 && (! can_conditionally_move_p (mode)
7510 || GET_CODE (original_target) == REG
7511 || TREE_ADDRESSABLE (type))
7512#endif
01c8a7c8
RK
7513 && ! (GET_CODE (original_target) == MEM
7514 && MEM_VOLATILE_P (original_target)))
7515 temp = original_target;
7516 else if (TREE_ADDRESSABLE (type))
7517 abort ();
7518 else
7519 temp = assign_temp (type, 0, 0, 1);
7520
ac01eace
RK
7521 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7522 do the test of X as a store-flag operation, do this as
7523 A + ((X != 0) << log C). Similarly for other simple binary
7524 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7525 if (temp && singleton && binary_op
bbf6f052
RK
7526 && (TREE_CODE (binary_op) == PLUS_EXPR
7527 || TREE_CODE (binary_op) == MINUS_EXPR
7528 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7529 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7530 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7531 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7532 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7533 {
7534 rtx result;
7535 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7536 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7537 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7538 : xor_optab);
bbf6f052
RK
7539
7540 /* If we had X ? A : A + 1, do this as A + (X == 0).
7541
7542 We have to invert the truth value here and then put it
7543 back later if do_store_flag fails. We cannot simply copy
7544 TREE_OPERAND (exp, 0) to another variable and modify that
7545 because invert_truthvalue can modify the tree pointed to
7546 by its argument. */
7547 if (singleton == TREE_OPERAND (exp, 1))
7548 TREE_OPERAND (exp, 0)
7549 = invert_truthvalue (TREE_OPERAND (exp, 0));
7550
7551 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7552 (safe_from_p (temp, singleton, 1)
906c4e36 7553 ? temp : NULL_RTX),
bbf6f052
RK
7554 mode, BRANCH_COST <= 1);
7555
ac01eace
RK
7556 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7557 result = expand_shift (LSHIFT_EXPR, mode, result,
7558 build_int_2 (tree_log2
7559 (TREE_OPERAND
7560 (binary_op, 1)),
7561 0),
e5e809f4 7562 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7563 ? temp : NULL_RTX), 0);
7564
bbf6f052
RK
7565 if (result)
7566 {
906c4e36 7567 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7568 return expand_binop (mode, boptab, op1, result, temp,
7569 unsignedp, OPTAB_LIB_WIDEN);
7570 }
7571 else if (singleton == TREE_OPERAND (exp, 1))
7572 TREE_OPERAND (exp, 0)
7573 = invert_truthvalue (TREE_OPERAND (exp, 0));
7574 }
7575
dabf8373 7576 do_pending_stack_adjust ();
bbf6f052
RK
7577 NO_DEFER_POP;
7578 op0 = gen_label_rtx ();
7579
7580 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7581 {
7582 if (temp != 0)
7583 {
7584 /* If the target conflicts with the other operand of the
7585 binary op, we can't use it. Also, we can't use the target
7586 if it is a hard register, because evaluating the condition
7587 might clobber it. */
7588 if ((binary_op
e5e809f4 7589 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7590 || (GET_CODE (temp) == REG
7591 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7592 temp = gen_reg_rtx (mode);
7593 store_expr (singleton, temp, 0);
7594 }
7595 else
906c4e36 7596 expand_expr (singleton,
2937cf87 7597 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7598 if (singleton == TREE_OPERAND (exp, 1))
7599 jumpif (TREE_OPERAND (exp, 0), op0);
7600 else
7601 jumpifnot (TREE_OPERAND (exp, 0), op0);
7602
956d6950 7603 start_cleanup_deferral ();
bbf6f052
RK
7604 if (binary_op && temp == 0)
7605 /* Just touch the other operand. */
7606 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7607 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7608 else if (binary_op)
7609 store_expr (build (TREE_CODE (binary_op), type,
7610 make_tree (type, temp),
7611 TREE_OPERAND (binary_op, 1)),
7612 temp, 0);
7613 else
7614 store_expr (build1 (TREE_CODE (unary_op), type,
7615 make_tree (type, temp)),
7616 temp, 0);
7617 op1 = op0;
bbf6f052 7618 }
bbf6f052
RK
7619 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7620 comparison operator. If we have one of these cases, set the
7621 output to A, branch on A (cse will merge these two references),
7622 then set the output to FOO. */
7623 else if (temp
7624 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7625 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7626 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7627 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7628 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7629 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7630 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7631 {
7632 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7633 temp = gen_reg_rtx (mode);
7634 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7635 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7636
956d6950 7637 start_cleanup_deferral ();
bbf6f052
RK
7638 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7639 op1 = op0;
7640 }
7641 else if (temp
7642 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7643 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7644 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7645 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7646 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7647 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7648 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7649 {
7650 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7651 temp = gen_reg_rtx (mode);
7652 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7653 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7654
956d6950 7655 start_cleanup_deferral ();
bbf6f052
RK
7656 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7657 op1 = op0;
7658 }
7659 else
7660 {
7661 op1 = gen_label_rtx ();
7662 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7663
956d6950 7664 start_cleanup_deferral ();
bbf6f052
RK
7665 if (temp != 0)
7666 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7667 else
906c4e36
RK
7668 expand_expr (TREE_OPERAND (exp, 1),
7669 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7670 end_cleanup_deferral ();
bbf6f052
RK
7671 emit_queue ();
7672 emit_jump_insn (gen_jump (op1));
7673 emit_barrier ();
7674 emit_label (op0);
956d6950 7675 start_cleanup_deferral ();
bbf6f052
RK
7676 if (temp != 0)
7677 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7678 else
906c4e36
RK
7679 expand_expr (TREE_OPERAND (exp, 2),
7680 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7681 }
7682
956d6950 7683 end_cleanup_deferral ();
bbf6f052
RK
7684
7685 emit_queue ();
7686 emit_label (op1);
7687 OK_DEFER_POP;
5dab5552 7688
bbf6f052
RK
7689 return temp;
7690 }
7691
7692 case TARGET_EXPR:
7693 {
7694 /* Something needs to be initialized, but we didn't know
7695 where that thing was when building the tree. For example,
7696 it could be the return value of a function, or a parameter
7697 to a function which lays down in the stack, or a temporary
7698 variable which must be passed by reference.
7699
7700 We guarantee that the expression will either be constructed
7701 or copied into our original target. */
7702
7703 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7704 tree cleanups = NULL_TREE;
5c062816 7705 tree exp1;
bbf6f052
RK
7706
7707 if (TREE_CODE (slot) != VAR_DECL)
7708 abort ();
7709
9c51f375
RK
7710 if (! ignore)
7711 target = original_target;
7712
bbf6f052
RK
7713 if (target == 0)
7714 {
7715 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7716 {
7717 target = DECL_RTL (slot);
5c062816 7718 /* If we have already expanded the slot, so don't do
ac993f4f 7719 it again. (mrs) */
5c062816
MS
7720 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7721 return target;
ac993f4f 7722 }
bbf6f052
RK
7723 else
7724 {
e9a25f70 7725 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7726 /* All temp slots at this level must not conflict. */
7727 preserve_temp_slots (target);
7728 DECL_RTL (slot) = target;
e9a25f70
JL
7729 if (TREE_ADDRESSABLE (slot))
7730 {
7731 TREE_ADDRESSABLE (slot) = 0;
7732 mark_addressable (slot);
7733 }
bbf6f052 7734
e287fd6e
RK
7735 /* Since SLOT is not known to the called function
7736 to belong to its stack frame, we must build an explicit
7737 cleanup. This case occurs when we must build up a reference
7738 to pass the reference as an argument. In this case,
7739 it is very likely that such a reference need not be
7740 built here. */
7741
7742 if (TREE_OPERAND (exp, 2) == 0)
7743 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7744 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7745 }
bbf6f052
RK
7746 }
7747 else
7748 {
7749 /* This case does occur, when expanding a parameter which
7750 needs to be constructed on the stack. The target
7751 is the actual stack address that we want to initialize.
7752 The function we call will perform the cleanup in this case. */
7753
8c042b47
RS
7754 /* If we have already assigned it space, use that space,
7755 not target that we were passed in, as our target
7756 parameter is only a hint. */
7757 if (DECL_RTL (slot) != 0)
7758 {
7759 target = DECL_RTL (slot);
7760 /* If we have already expanded the slot, so don't do
7761 it again. (mrs) */
7762 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7763 return target;
7764 }
21002281
JW
7765 else
7766 {
7767 DECL_RTL (slot) = target;
7768 /* If we must have an addressable slot, then make sure that
7769 the RTL that we just stored in slot is OK. */
7770 if (TREE_ADDRESSABLE (slot))
7771 {
7772 TREE_ADDRESSABLE (slot) = 0;
7773 mark_addressable (slot);
7774 }
7775 }
bbf6f052
RK
7776 }
7777
4847c938 7778 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7779 /* Mark it as expanded. */
7780 TREE_OPERAND (exp, 1) = NULL_TREE;
7781
e5e809f4 7782 TREE_USED (slot) = 1;
41531e5b 7783 store_expr (exp1, target, 0);
61d6b1cc 7784
e976b8b2 7785 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7786
41531e5b 7787 return target;
bbf6f052
RK
7788 }
7789
7790 case INIT_EXPR:
7791 {
7792 tree lhs = TREE_OPERAND (exp, 0);
7793 tree rhs = TREE_OPERAND (exp, 1);
7794 tree noncopied_parts = 0;
7795 tree lhs_type = TREE_TYPE (lhs);
7796
7797 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7798 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7799 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7800 TYPE_NONCOPIED_PARTS (lhs_type));
7801 while (noncopied_parts != 0)
7802 {
7803 expand_assignment (TREE_VALUE (noncopied_parts),
7804 TREE_PURPOSE (noncopied_parts), 0, 0);
7805 noncopied_parts = TREE_CHAIN (noncopied_parts);
7806 }
7807 return temp;
7808 }
7809
7810 case MODIFY_EXPR:
7811 {
7812 /* If lhs is complex, expand calls in rhs before computing it.
7813 That's so we don't compute a pointer and save it over a call.
7814 If lhs is simple, compute it first so we can give it as a
7815 target if the rhs is just a call. This avoids an extra temp and copy
7816 and that prevents a partial-subsumption which makes bad code.
7817 Actually we could treat component_ref's of vars like vars. */
7818
7819 tree lhs = TREE_OPERAND (exp, 0);
7820 tree rhs = TREE_OPERAND (exp, 1);
7821 tree noncopied_parts = 0;
7822 tree lhs_type = TREE_TYPE (lhs);
7823
7824 temp = 0;
7825
7826 if (TREE_CODE (lhs) != VAR_DECL
7827 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7828 && TREE_CODE (lhs) != PARM_DECL
7829 && ! (TREE_CODE (lhs) == INDIRECT_REF
7830 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7831 preexpand_calls (exp);
7832
7833 /* Check for |= or &= of a bitfield of size one into another bitfield
7834 of size 1. In this case, (unless we need the result of the
7835 assignment) we can do this more efficiently with a
7836 test followed by an assignment, if necessary.
7837
7838 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7839 things change so we do, this code should be enhanced to
7840 support it. */
7841 if (ignore
7842 && TREE_CODE (lhs) == COMPONENT_REF
7843 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7844 || TREE_CODE (rhs) == BIT_AND_EXPR)
7845 && TREE_OPERAND (rhs, 0) == lhs
7846 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7847 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7848 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7849 {
7850 rtx label = gen_label_rtx ();
7851
7852 do_jump (TREE_OPERAND (rhs, 1),
7853 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7854 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7855 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7856 (TREE_CODE (rhs) == BIT_IOR_EXPR
7857 ? integer_one_node
7858 : integer_zero_node)),
7859 0, 0);
e7c33f54 7860 do_pending_stack_adjust ();
bbf6f052
RK
7861 emit_label (label);
7862 return const0_rtx;
7863 }
7864
7865 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7866 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7867 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7868 TYPE_NONCOPIED_PARTS (lhs_type));
7869
7870 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7871 while (noncopied_parts != 0)
7872 {
7873 expand_assignment (TREE_PURPOSE (noncopied_parts),
7874 TREE_VALUE (noncopied_parts), 0, 0);
7875 noncopied_parts = TREE_CHAIN (noncopied_parts);
7876 }
7877 return temp;
7878 }
7879
6e7f84a7
APB
7880 case RETURN_EXPR:
7881 if (!TREE_OPERAND (exp, 0))
7882 expand_null_return ();
7883 else
7884 expand_return (TREE_OPERAND (exp, 0));
7885 return const0_rtx;
7886
bbf6f052
RK
7887 case PREINCREMENT_EXPR:
7888 case PREDECREMENT_EXPR:
7b8b9722 7889 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7890
7891 case POSTINCREMENT_EXPR:
7892 case POSTDECREMENT_EXPR:
7893 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7894 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7895
7896 case ADDR_EXPR:
987c71d9 7897 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7898 be a MEM corresponding to a stack slot. */
987c71d9
RK
7899 temp = 0;
7900
bbf6f052
RK
7901 /* Are we taking the address of a nested function? */
7902 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 7903 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
7904 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7905 && ! TREE_STATIC (exp))
bbf6f052
RK
7906 {
7907 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7908 op0 = force_operand (op0, target);
7909 }
682ba3a6
RK
7910 /* If we are taking the address of something erroneous, just
7911 return a zero. */
7912 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7913 return const0_rtx;
bbf6f052
RK
7914 else
7915 {
e287fd6e
RK
7916 /* We make sure to pass const0_rtx down if we came in with
7917 ignore set, to avoid doing the cleanups twice for something. */
7918 op0 = expand_expr (TREE_OPERAND (exp, 0),
7919 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7920 (modifier == EXPAND_INITIALIZER
7921 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7922
119af78a
RK
7923 /* If we are going to ignore the result, OP0 will have been set
7924 to const0_rtx, so just return it. Don't get confused and
7925 think we are taking the address of the constant. */
7926 if (ignore)
7927 return op0;
7928
3539e816
MS
7929 op0 = protect_from_queue (op0, 0);
7930
896102d0
RK
7931 /* We would like the object in memory. If it is a constant,
7932 we can have it be statically allocated into memory. For
682ba3a6 7933 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7934 memory and store the value into it. */
7935
7936 if (CONSTANT_P (op0))
7937 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7938 op0);
987c71d9 7939 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7940 {
7941 mark_temp_addr_taken (op0);
7942 temp = XEXP (op0, 0);
7943 }
896102d0 7944
682ba3a6 7945 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 7946 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
7947 {
7948 /* If this object is in a register, it must be not
0f41302f 7949 be BLKmode. */
896102d0 7950 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7951 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7952
7a0b7b9a 7953 mark_temp_addr_taken (memloc);
896102d0
RK
7954 emit_move_insn (memloc, op0);
7955 op0 = memloc;
7956 }
7957
bbf6f052
RK
7958 if (GET_CODE (op0) != MEM)
7959 abort ();
7960
7961 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7962 {
7963 temp = XEXP (op0, 0);
7964#ifdef POINTERS_EXTEND_UNSIGNED
7965 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7966 && mode == ptr_mode)
9fcfcce7 7967 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7968#endif
7969 return temp;
7970 }
987c71d9 7971
bbf6f052
RK
7972 op0 = force_operand (XEXP (op0, 0), target);
7973 }
987c71d9 7974
bbf6f052 7975 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7976 op0 = force_reg (Pmode, op0);
7977
dc6d66b3
RK
7978 if (GET_CODE (op0) == REG
7979 && ! REG_USERVAR_P (op0))
7980 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7981
7982 /* If we might have had a temp slot, add an equivalent address
7983 for it. */
7984 if (temp != 0)
7985 update_temp_slot_address (temp, op0);
7986
88f63c77
RK
7987#ifdef POINTERS_EXTEND_UNSIGNED
7988 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7989 && mode == ptr_mode)
9fcfcce7 7990 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7991#endif
7992
bbf6f052
RK
7993 return op0;
7994
7995 case ENTRY_VALUE_EXPR:
7996 abort ();
7997
7308a047
RS
7998 /* COMPLEX type for Extended Pascal & Fortran */
7999 case COMPLEX_EXPR:
8000 {
8001 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8002 rtx insns;
7308a047
RS
8003
8004 /* Get the rtx code of the operands. */
8005 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8006 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8007
8008 if (! target)
8009 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8010
6551fa4d 8011 start_sequence ();
7308a047
RS
8012
8013 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8014 emit_move_insn (gen_realpart (mode, target), op0);
8015 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8016
6551fa4d
JW
8017 insns = get_insns ();
8018 end_sequence ();
8019
7308a047 8020 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8021 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8022 each with a separate pseudo as destination.
8023 It's not correct for flow to treat them as a unit. */
6d6e61ce 8024 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8025 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8026 else
8027 emit_insns (insns);
7308a047
RS
8028
8029 return target;
8030 }
8031
8032 case REALPART_EXPR:
2d7050fd
RS
8033 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8034 return gen_realpart (mode, op0);
7308a047
RS
8035
8036 case IMAGPART_EXPR:
2d7050fd
RS
8037 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8038 return gen_imagpart (mode, op0);
7308a047
RS
8039
8040 case CONJ_EXPR:
8041 {
62acb978 8042 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8043 rtx imag_t;
6551fa4d 8044 rtx insns;
7308a047
RS
8045
8046 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8047
8048 if (! target)
d6a5ac33 8049 target = gen_reg_rtx (mode);
7308a047 8050
6551fa4d 8051 start_sequence ();
7308a047
RS
8052
8053 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8054 emit_move_insn (gen_realpart (partmode, target),
8055 gen_realpart (partmode, op0));
7308a047 8056
62acb978
RK
8057 imag_t = gen_imagpart (partmode, target);
8058 temp = expand_unop (partmode, neg_optab,
8059 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8060 if (temp != imag_t)
8061 emit_move_insn (imag_t, temp);
8062
6551fa4d
JW
8063 insns = get_insns ();
8064 end_sequence ();
8065
d6a5ac33
RK
8066 /* Conjugate should appear as a single unit
8067 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8068 each with a separate pseudo as destination.
8069 It's not correct for flow to treat them as a unit. */
6d6e61ce 8070 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8071 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8072 else
8073 emit_insns (insns);
7308a047
RS
8074
8075 return target;
8076 }
8077
e976b8b2
MS
8078 case TRY_CATCH_EXPR:
8079 {
8080 tree handler = TREE_OPERAND (exp, 1);
8081
8082 expand_eh_region_start ();
8083
8084 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8085
8086 expand_eh_region_end (handler);
8087
8088 return op0;
8089 }
8090
8091 case POPDCC_EXPR:
8092 {
8093 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8094 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8095 return const0_rtx;
8096 }
8097
8098 case POPDHC_EXPR:
8099 {
8100 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8101 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8102 return const0_rtx;
8103 }
8104
bbf6f052 8105 case ERROR_MARK:
66538193
RS
8106 op0 = CONST0_RTX (tmode);
8107 if (op0 != 0)
8108 return op0;
bbf6f052
RK
8109 return const0_rtx;
8110
8111 default:
90764a87 8112 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8113 }
8114
8115 /* Here to do an ordinary binary operator, generating an instruction
8116 from the optab already placed in `this_optab'. */
8117 binop:
8118 preexpand_calls (exp);
e5e809f4 8119 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8120 subtarget = 0;
8121 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8122 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8123 binop2:
8124 temp = expand_binop (mode, this_optab, op0, op1, target,
8125 unsignedp, OPTAB_LIB_WIDEN);
8126 if (temp == 0)
8127 abort ();
8128 return temp;
8129}
bbf6f052 8130
bbf6f052 8131
b93a436e
JL
8132\f
8133/* Return the alignment in bits of EXP, a pointer valued expression.
8134 But don't return more than MAX_ALIGN no matter what.
8135 The alignment returned is, by default, the alignment of the thing that
8136 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8137
8138 Otherwise, look at the expression to see if we can do better, i.e., if the
8139 expression is actually pointing at an object whose alignment is tighter. */
0f41302f 8140
b93a436e
JL
8141static int
8142get_pointer_alignment (exp, max_align)
8143 tree exp;
8144 unsigned max_align;
bbf6f052 8145{
b93a436e
JL
8146 unsigned align, inner;
8147
8148 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8149 return 0;
8150
8151 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8152 align = MIN (align, max_align);
8153
8154 while (1)
bbf6f052 8155 {
b93a436e 8156 switch (TREE_CODE (exp))
bbf6f052 8157 {
b93a436e
JL
8158 case NOP_EXPR:
8159 case CONVERT_EXPR:
8160 case NON_LVALUE_EXPR:
8161 exp = TREE_OPERAND (exp, 0);
8162 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8163 return align;
8164 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8165 align = MIN (inner, max_align);
8166 break;
8167
8168 case PLUS_EXPR:
8169 /* If sum of pointer + int, restrict our maximum alignment to that
8170 imposed by the integer. If not, we can't do any better than
8171 ALIGN. */
8172 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8173 return align;
8174
8175 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8176 & (max_align - 1))
8177 != 0)
8178 max_align >>= 1;
8179
8180 exp = TREE_OPERAND (exp, 0);
8181 break;
8182
8183 case ADDR_EXPR:
8184 /* See what we are pointing at and look at its alignment. */
8185 exp = TREE_OPERAND (exp, 0);
8186 if (TREE_CODE (exp) == FUNCTION_DECL)
8187 align = FUNCTION_BOUNDARY;
8188 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8189 align = DECL_ALIGN (exp);
8190#ifdef CONSTANT_ALIGNMENT
8191 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8192 align = CONSTANT_ALIGNMENT (exp, align);
c02bd5d9 8193#endif
b93a436e 8194 return MIN (align, max_align);
c02bd5d9 8195
b93a436e
JL
8196 default:
8197 return align;
8198 }
8199 }
8200}
8201\f
8202/* Return the tree node and offset if a given argument corresponds to
8203 a string constant. */
8204
8205static tree
8206string_constant (arg, ptr_offset)
8207 tree arg;
8208 tree *ptr_offset;
8209{
8210 STRIP_NOPS (arg);
8211
8212 if (TREE_CODE (arg) == ADDR_EXPR
8213 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8214 {
8215 *ptr_offset = integer_zero_node;
8216 return TREE_OPERAND (arg, 0);
8217 }
8218 else if (TREE_CODE (arg) == PLUS_EXPR)
8219 {
8220 tree arg0 = TREE_OPERAND (arg, 0);
8221 tree arg1 = TREE_OPERAND (arg, 1);
8222
8223 STRIP_NOPS (arg0);
8224 STRIP_NOPS (arg1);
8225
8226 if (TREE_CODE (arg0) == ADDR_EXPR
8227 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8228 {
b93a436e
JL
8229 *ptr_offset = arg1;
8230 return TREE_OPERAND (arg0, 0);
bbf6f052 8231 }
b93a436e
JL
8232 else if (TREE_CODE (arg1) == ADDR_EXPR
8233 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8234 {
b93a436e
JL
8235 *ptr_offset = arg0;
8236 return TREE_OPERAND (arg1, 0);
bbf6f052 8237 }
b93a436e 8238 }
ca695ac9 8239
b93a436e
JL
8240 return 0;
8241}
ca695ac9 8242
b93a436e
JL
8243/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8244 way, because it could contain a zero byte in the middle.
8245 TREE_STRING_LENGTH is the size of the character array, not the string.
ca695ac9 8246
b93a436e
JL
8247 Unfortunately, string_constant can't access the values of const char
8248 arrays with initializers, so neither can we do so here. */
e87b4f3f 8249
b93a436e
JL
8250static tree
8251c_strlen (src)
8252 tree src;
8253{
8254 tree offset_node;
8255 int offset, max;
8256 char *ptr;
e7c33f54 8257
b93a436e
JL
8258 src = string_constant (src, &offset_node);
8259 if (src == 0)
8260 return 0;
8261 max = TREE_STRING_LENGTH (src);
8262 ptr = TREE_STRING_POINTER (src);
8263 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8264 {
8265 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8266 compute the offset to the following null if we don't know where to
8267 start searching for it. */
8268 int i;
8269 for (i = 0; i < max; i++)
8270 if (ptr[i] == 0)
8271 return 0;
8272 /* We don't know the starting offset, but we do know that the string
8273 has no internal zero bytes. We can assume that the offset falls
8274 within the bounds of the string; otherwise, the programmer deserves
8275 what he gets. Subtract the offset from the length of the string,
8276 and return that. */
8277 /* This would perhaps not be valid if we were dealing with named
8278 arrays in addition to literal string constants. */
8279 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8280 }
e7c33f54 8281
b93a436e
JL
8282 /* We have a known offset into the string. Start searching there for
8283 a null character. */
8284 if (offset_node == 0)
8285 offset = 0;
8286 else
8287 {
8288 /* Did we get a long long offset? If so, punt. */
8289 if (TREE_INT_CST_HIGH (offset_node) != 0)
8290 return 0;
8291 offset = TREE_INT_CST_LOW (offset_node);
8292 }
8293 /* If the offset is known to be out of bounds, warn, and call strlen at
8294 runtime. */
8295 if (offset < 0 || offset > max)
8296 {
8297 warning ("offset outside bounds of constant string");
8298 return 0;
8299 }
8300 /* Use strlen to search for the first zero byte. Since any strings
8301 constructed with build_string will have nulls appended, we win even
8302 if we get handed something like (char[4])"abcd".
e7c33f54 8303
b93a436e
JL
8304 Since OFFSET is our starting index into the string, no further
8305 calculation is needed. */
8306 return size_int (strlen (ptr + offset));
8307}
1bbddf11 8308
b93a436e
JL
8309rtx
8310expand_builtin_return_addr (fndecl_code, count, tem)
8311 enum built_in_function fndecl_code;
8312 int count;
8313 rtx tem;
8314{
8315 int i;
e7c33f54 8316
b93a436e
JL
8317 /* Some machines need special handling before we can access
8318 arbitrary frames. For example, on the sparc, we must first flush
8319 all register windows to the stack. */
8320#ifdef SETUP_FRAME_ADDRESSES
8321 if (count > 0)
8322 SETUP_FRAME_ADDRESSES ();
8323#endif
e87b4f3f 8324
b93a436e
JL
8325 /* On the sparc, the return address is not in the frame, it is in a
8326 register. There is no way to access it off of the current frame
8327 pointer, but it can be accessed off the previous frame pointer by
8328 reading the value from the register window save area. */
8329#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8330 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8331 count--;
8332#endif
60bac6ea 8333
b93a436e
JL
8334 /* Scan back COUNT frames to the specified frame. */
8335 for (i = 0; i < count; i++)
8336 {
8337 /* Assume the dynamic chain pointer is in the word that the
8338 frame address points to, unless otherwise specified. */
8339#ifdef DYNAMIC_CHAIN_ADDRESS
8340 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8341#endif
8342 tem = memory_address (Pmode, tem);
8343 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8344 }
ca695ac9 8345
b93a436e
JL
8346 /* For __builtin_frame_address, return what we've got. */
8347 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8348 return tem;
e9a25f70 8349
b93a436e
JL
8350 /* For __builtin_return_address, Get the return address from that
8351 frame. */
8352#ifdef RETURN_ADDR_RTX
8353 tem = RETURN_ADDR_RTX (count, tem);
8354#else
8355 tem = memory_address (Pmode,
8356 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8357 tem = gen_rtx_MEM (Pmode, tem);
8358#endif
8359 return tem;
8360}
e9a25f70 8361
b93a436e
JL
8362/* __builtin_setjmp is passed a pointer to an array of five words (not
8363 all will be used on all machines). It operates similarly to the C
8364 library function of the same name, but is more efficient. Much of
8365 the code below (and for longjmp) is copied from the handling of
8366 non-local gotos.
ca695ac9 8367
b93a436e
JL
8368 NOTE: This is intended for use by GNAT and the exception handling
8369 scheme in the compiler and will only work in the method used by
8370 them. */
e9a25f70 8371
b93a436e 8372rtx
6fd1c67b 8373expand_builtin_setjmp (buf_addr, target, first_label, next_label)
b93a436e
JL
8374 rtx buf_addr;
8375 rtx target;
6fd1c67b 8376 rtx first_label, next_label;
b93a436e 8377{
6fd1c67b 8378 rtx lab1 = gen_label_rtx ();
a260abc9
DE
8379 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8380 enum machine_mode value_mode;
b93a436e 8381 rtx stack_save;
e9a25f70 8382
b93a436e 8383 value_mode = TYPE_MODE (integer_type_node);
ca695ac9 8384
b93a436e
JL
8385#ifdef POINTERS_EXTEND_UNSIGNED
8386 buf_addr = convert_memory_address (Pmode, buf_addr);
8387#endif
d7f21d63 8388
b93a436e 8389 buf_addr = force_reg (Pmode, buf_addr);
d7f21d63 8390
b93a436e
JL
8391 if (target == 0 || GET_CODE (target) != REG
8392 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8393 target = gen_reg_rtx (value_mode);
d7f21d63 8394
b93a436e 8395 emit_queue ();
d7f21d63 8396
9ec36da5
JL
8397 /* We store the frame pointer and the address of lab1 in the buffer
8398 and use the rest of it for the stack save area, which is
8399 machine-dependent. */
8400
0bc02db4
MS
8401#ifndef BUILTIN_SETJMP_FRAME_VALUE
8402#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8403#endif
8404
b93a436e 8405 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
0bc02db4 8406 BUILTIN_SETJMP_FRAME_VALUE);
6fd1c67b
RH
8407 emit_move_insn (validize_mem
8408 (gen_rtx_MEM (Pmode,
b93a436e
JL
8409 plus_constant (buf_addr,
8410 GET_MODE_SIZE (Pmode)))),
6fd1c67b 8411 gen_rtx_LABEL_REF (Pmode, lab1));
d7f21d63 8412
b93a436e
JL
8413 stack_save = gen_rtx_MEM (sa_mode,
8414 plus_constant (buf_addr,
8415 2 * GET_MODE_SIZE (Pmode)));
8416 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
e9a25f70 8417
6fd1c67b
RH
8418 /* If there is further processing to do, do it. */
8419#ifdef HAVE_builtin_setjmp_setup
8420 if (HAVE_builtin_setjmp_setup)
8421 emit_insn (gen_builtin_setjmp_setup (buf_addr));
b93a436e 8422#endif
d7f21d63 8423
6fd1c67b 8424 /* Set TARGET to zero and branch to the first-time-through label. */
b93a436e 8425 emit_move_insn (target, const0_rtx);
6fd1c67b 8426 emit_jump_insn (gen_jump (first_label));
b93a436e
JL
8427 emit_barrier ();
8428 emit_label (lab1);
d7f21d63 8429
6fd1c67b
RH
8430 /* Tell flow about the strange goings on. */
8431 current_function_has_nonlocal_label = 1;
8432
8433 /* Clobber the FP when we get here, so we have to make sure it's
8434 marked as used by this function. */
b93a436e 8435 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
e9a25f70 8436
b93a436e
JL
8437 /* Mark the static chain as clobbered here so life information
8438 doesn't get messed up for it. */
8439 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
d7f21d63 8440
b93a436e
JL
8441 /* Now put in the code to restore the frame pointer, and argument
8442 pointer, if needed. The code below is from expand_end_bindings
8443 in stmt.c; see detailed documentation there. */
8444#ifdef HAVE_nonlocal_goto
8445 if (! HAVE_nonlocal_goto)
8446#endif
8447 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
ca695ac9 8448
b93a436e
JL
8449#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8450 if (fixed_regs[ARG_POINTER_REGNUM])
8451 {
8452#ifdef ELIMINABLE_REGS
c84e2712 8453 size_t i;
b93a436e 8454 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
ca695ac9 8455
b93a436e
JL
8456 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8457 if (elim_regs[i].from == ARG_POINTER_REGNUM
8458 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8459 break;
ca695ac9 8460
b93a436e
JL
8461 if (i == sizeof elim_regs / sizeof elim_regs [0])
8462#endif
8463 {
8464 /* Now restore our arg pointer from the address at which it
8465 was saved in our stack frame.
8466 If there hasn't be space allocated for it yet, make
8467 some now. */
8468 if (arg_pointer_save_area == 0)
8469 arg_pointer_save_area
8470 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8471 emit_move_insn (virtual_incoming_args_rtx,
8472 copy_to_reg (arg_pointer_save_area));
8473 }
8474 }
8475#endif
ca695ac9 8476
6fd1c67b
RH
8477#ifdef HAVE_builtin_setjmp_receiver
8478 if (HAVE_builtin_setjmp_receiver)
8479 emit_insn (gen_builtin_setjmp_receiver (lab1));
8480 else
8481#endif
b93a436e 8482#ifdef HAVE_nonlocal_goto_receiver
6fd1c67b
RH
8483 if (HAVE_nonlocal_goto_receiver)
8484 emit_insn (gen_nonlocal_goto_receiver ());
8485 else
b93a436e 8486#endif
081f5e7e
KG
8487 {
8488 ; /* Nothing */
8489 }
6fd1c67b
RH
8490
8491 /* Set TARGET, and branch to the next-time-through label. */
3e2b9a3d 8492 emit_move_insn (target, const1_rtx);
6fd1c67b
RH
8493 emit_jump_insn (gen_jump (next_label));
8494 emit_barrier ();
ca695ac9 8495
6fd1c67b
RH
8496 return target;
8497}
ca695ac9 8498
6fd1c67b
RH
8499void
8500expand_builtin_longjmp (buf_addr, value)
8501 rtx buf_addr, value;
8502{
8503 rtx fp, lab, stack;
a260abc9 8504 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
ca695ac9 8505
6fd1c67b
RH
8506#ifdef POINTERS_EXTEND_UNSIGNED
8507 buf_addr = convert_memory_address (Pmode, buf_addr);
b93a436e 8508#endif
6fd1c67b
RH
8509 buf_addr = force_reg (Pmode, buf_addr);
8510
3e2b9a3d
JW
8511 /* We used to store value in static_chain_rtx, but that fails if pointers
8512 are smaller than integers. We instead require that the user must pass
8513 a second argument of 1, because that is what builtin_setjmp will
8514 return. This also makes EH slightly more efficient, since we are no
8515 longer copying around a value that we don't care about. */
8516 if (value != const1_rtx)
8517 abort ();
6fd1c67b
RH
8518
8519#ifdef HAVE_builtin_longjmp
8520 if (HAVE_builtin_longjmp)
3e2b9a3d 8521 emit_insn (gen_builtin_longjmp (buf_addr));
6fd1c67b 8522 else
b93a436e 8523#endif
6fd1c67b
RH
8524 {
8525 fp = gen_rtx_MEM (Pmode, buf_addr);
8526 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8527 GET_MODE_SIZE (Pmode)));
e9a25f70 8528
6fd1c67b
RH
8529 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8530 2 * GET_MODE_SIZE (Pmode)));
8531
8532 /* Pick up FP, label, and SP from the block and jump. This code is
8533 from expand_goto in stmt.c; see there for detailed comments. */
8534#if HAVE_nonlocal_goto
8535 if (HAVE_nonlocal_goto)
3e2b9a3d
JW
8536 /* We have to pass a value to the nonlocal_goto pattern that will
8537 get copied into the static_chain pointer, but it does not matter
8538 what that value is, because builtin_setjmp does not use it. */
6fd1c67b
RH
8539 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8540 else
b93a436e 8541#endif
6fd1c67b
RH
8542 {
8543 lab = copy_to_reg (lab);
60bac6ea 8544
6fd1c67b
RH
8545 emit_move_insn (hard_frame_pointer_rtx, fp);
8546 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8547
8548 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8549 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
6fd1c67b
RH
8550 emit_indirect_jump (lab);
8551 }
8552 }
b93a436e 8553}
60bac6ea 8554
55a6ba9f
JC
8555static rtx
8556get_memory_rtx (exp)
8557 tree exp;
8558{
8559 rtx mem;
8560 int is_aggregate;
8561
8562 mem = gen_rtx_MEM (BLKmode,
8563 memory_address (BLKmode,
8564 expand_expr (exp, NULL_RTX,
8565 ptr_mode, EXPAND_SUM)));
8566
8567 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8568
8569 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8570 if the value is the address of a structure or if the expression is
8571 cast to a pointer to structure type. */
8572 is_aggregate = 0;
8573
8574 while (TREE_CODE (exp) == NOP_EXPR)
8575 {
8576 tree cast_type = TREE_TYPE (exp);
8577 if (TREE_CODE (cast_type) == POINTER_TYPE
8578 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8579 {
8580 is_aggregate = 1;
8581 break;
8582 }
8583 exp = TREE_OPERAND (exp, 0);
8584 }
8585
8586 if (is_aggregate == 0)
8587 {
8588 tree type;
8589
8590 if (TREE_CODE (exp) == ADDR_EXPR)
8591 /* If this is the address of an object, check whether the
8592 object is an array. */
8593 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8594 else
8595 type = TREE_TYPE (TREE_TYPE (exp));
8596 is_aggregate = AGGREGATE_TYPE_P (type);
8597 }
8598
c6df88cb 8599 MEM_SET_IN_STRUCT_P (mem, is_aggregate);
55a6ba9f
JC
8600 return mem;
8601}
8602
b93a436e
JL
8603\f
8604/* Expand an expression EXP that calls a built-in function,
8605 with result going to TARGET if that's convenient
8606 (and in mode MODE if that's convenient).
8607 SUBTARGET may be used as the target for computing one of EXP's operands.
8608 IGNORE is nonzero if the value is to be ignored. */
60bac6ea 8609
b93a436e
JL
8610#define CALLED_AS_BUILT_IN(NODE) \
8611 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
60bac6ea 8612
b93a436e
JL
8613static rtx
8614expand_builtin (exp, target, subtarget, mode, ignore)
8615 tree exp;
8616 rtx target;
8617 rtx subtarget;
8618 enum machine_mode mode;
8619 int ignore;
8620{
8621 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8622 tree arglist = TREE_OPERAND (exp, 1);
8623 rtx op0;
8624 rtx lab1, insns;
8625 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8626 optab builtin_optab;
60bac6ea 8627
b93a436e
JL
8628 switch (DECL_FUNCTION_CODE (fndecl))
8629 {
8630 case BUILT_IN_ABS:
8631 case BUILT_IN_LABS:
8632 case BUILT_IN_FABS:
8633 /* build_function_call changes these into ABS_EXPR. */
8634 abort ();
4ed67205 8635
b93a436e
JL
8636 case BUILT_IN_SIN:
8637 case BUILT_IN_COS:
8638 /* Treat these like sqrt, but only if the user asks for them. */
8639 if (! flag_fast_math)
8640 break;
8641 case BUILT_IN_FSQRT:
8642 /* If not optimizing, call the library function. */
8643 if (! optimize)
8644 break;
4ed67205 8645
b93a436e
JL
8646 if (arglist == 0
8647 /* Arg could be wrong type if user redeclared this fcn wrong. */
8648 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4ed67205
RK
8649 break;
8650
b93a436e
JL
8651 /* Stabilize and compute the argument. */
8652 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8653 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8654 {
8655 exp = copy_node (exp);
8656 arglist = copy_node (arglist);
8657 TREE_OPERAND (exp, 1) = arglist;
8658 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8659 }
8660 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
b089937a 8661
b93a436e
JL
8662 /* Make a suitable register to place result in. */
8663 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7565a035 8664
b93a436e
JL
8665 emit_queue ();
8666 start_sequence ();
7565a035 8667
b93a436e
JL
8668 switch (DECL_FUNCTION_CODE (fndecl))
8669 {
8670 case BUILT_IN_SIN:
8671 builtin_optab = sin_optab; break;
8672 case BUILT_IN_COS:
8673 builtin_optab = cos_optab; break;
8674 case BUILT_IN_FSQRT:
8675 builtin_optab = sqrt_optab; break;
8676 default:
8677 abort ();
8678 }
4ed67205 8679
b93a436e
JL
8680 /* Compute into TARGET.
8681 Set TARGET to wherever the result comes back. */
8682 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8683 builtin_optab, op0, target, 0);
8684
8685 /* If we were unable to expand via the builtin, stop the
8686 sequence (without outputting the insns) and break, causing
38e01259 8687 a call to the library function. */
b93a436e 8688 if (target == 0)
4ed67205 8689 {
b93a436e
JL
8690 end_sequence ();
8691 break;
8692 }
4ed67205 8693
b93a436e
JL
8694 /* Check the results by default. But if flag_fast_math is turned on,
8695 then assume sqrt will always be called with valid arguments. */
4ed67205 8696
b93a436e
JL
8697 if (! flag_fast_math)
8698 {
8699 /* Don't define the builtin FP instructions
8700 if your machine is not IEEE. */
8701 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8702 abort ();
4ed67205 8703
b93a436e 8704 lab1 = gen_label_rtx ();
ca55abae 8705
b93a436e
JL
8706 /* Test the result; if it is NaN, set errno=EDOM because
8707 the argument was not in the domain. */
c5d5d461
JL
8708 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8709 0, 0, lab1);
b93a436e
JL
8710
8711#ifdef TARGET_EDOM
8712 {
8713#ifdef GEN_ERRNO_RTX
8714 rtx errno_rtx = GEN_ERRNO_RTX;
8715#else
8716 rtx errno_rtx
8717 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8718#endif
e87b4f3f 8719
b93a436e
JL
8720 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8721 }
8722#else
8723 /* We can't set errno=EDOM directly; let the library call do it.
8724 Pop the arguments right away in case the call gets deleted. */
8725 NO_DEFER_POP;
8726 expand_call (exp, target, 0);
8727 OK_DEFER_POP;
8728#endif
e7c33f54 8729
b93a436e
JL
8730 emit_label (lab1);
8731 }
0006469d 8732
b93a436e
JL
8733 /* Output the entire sequence. */
8734 insns = get_insns ();
8735 end_sequence ();
8736 emit_insns (insns);
8737
8738 return target;
0006469d 8739
b93a436e
JL
8740 case BUILT_IN_FMOD:
8741 break;
0006469d 8742
b93a436e
JL
8743 /* __builtin_apply_args returns block of memory allocated on
8744 the stack into which is stored the arg pointer, structure
8745 value address, static chain, and all the registers that might
8746 possibly be used in performing a function call. The code is
8747 moved to the start of the function so the incoming values are
8748 saved. */
8749 case BUILT_IN_APPLY_ARGS:
8750 /* Don't do __builtin_apply_args more than once in a function.
8751 Save the result of the first call and reuse it. */
8752 if (apply_args_value != 0)
8753 return apply_args_value;
8754 {
8755 /* When this function is called, it means that registers must be
8756 saved on entry to this function. So we migrate the
8757 call to the first insn of this function. */
8758 rtx temp;
8759 rtx seq;
0006469d 8760
b93a436e
JL
8761 start_sequence ();
8762 temp = expand_builtin_apply_args ();
8763 seq = get_insns ();
8764 end_sequence ();
0006469d 8765
b93a436e 8766 apply_args_value = temp;
0006469d 8767
b93a436e
JL
8768 /* Put the sequence after the NOTE that starts the function.
8769 If this is inside a SEQUENCE, make the outer-level insn
8770 chain current, so the code is placed at the start of the
8771 function. */
8772 push_topmost_sequence ();
8773 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8774 pop_topmost_sequence ();
8775 return temp;
8776 }
0006469d 8777
b93a436e
JL
8778 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8779 FUNCTION with a copy of the parameters described by
8780 ARGUMENTS, and ARGSIZE. It returns a block of memory
8781 allocated on the stack into which is stored all the registers
8782 that might possibly be used for returning the result of a
8783 function. ARGUMENTS is the value returned by
8784 __builtin_apply_args. ARGSIZE is the number of bytes of
8785 arguments that must be copied. ??? How should this value be
8786 computed? We'll also need a safe worst case value for varargs
8787 functions. */
8788 case BUILT_IN_APPLY:
8789 if (arglist == 0
8790 /* Arg could be non-pointer if user redeclared this fcn wrong. */
e5e809f4 8791 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
b93a436e
JL
8792 || TREE_CHAIN (arglist) == 0
8793 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8794 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8795 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8796 return const0_rtx;
8797 else
8798 {
8799 int i;
8800 tree t;
8801 rtx ops[3];
0006469d 8802
b93a436e
JL
8803 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8804 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
bbf6f052 8805
b93a436e
JL
8806 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8807 }
bbf6f052 8808
b93a436e
JL
8809 /* __builtin_return (RESULT) causes the function to return the
8810 value described by RESULT. RESULT is address of the block of
8811 memory returned by __builtin_apply. */
8812 case BUILT_IN_RETURN:
8813 if (arglist
8814 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8815 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8816 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8817 NULL_RTX, VOIDmode, 0));
8818 return const0_rtx;
ca695ac9 8819
b93a436e
JL
8820 case BUILT_IN_SAVEREGS:
8821 /* Don't do __builtin_saveregs more than once in a function.
8822 Save the result of the first call and reuse it. */
8823 if (saveregs_value != 0)
8824 return saveregs_value;
8825 {
8826 /* When this function is called, it means that registers must be
8827 saved on entry to this function. So we migrate the
8828 call to the first insn of this function. */
8829 rtx temp;
8830 rtx seq;
ca695ac9 8831
b93a436e
JL
8832 /* Now really call the function. `expand_call' does not call
8833 expand_builtin, so there is no danger of infinite recursion here. */
8834 start_sequence ();
ca695ac9 8835
b93a436e
JL
8836#ifdef EXPAND_BUILTIN_SAVEREGS
8837 /* Do whatever the machine needs done in this case. */
8838 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8839#else
8840 /* The register where the function returns its value
8841 is likely to have something else in it, such as an argument.
8842 So preserve that register around the call. */
ca695ac9 8843
b93a436e
JL
8844 if (value_mode != VOIDmode)
8845 {
8846 rtx valreg = hard_libcall_value (value_mode);
8847 rtx saved_valreg = gen_reg_rtx (value_mode);
ca695ac9 8848
b93a436e
JL
8849 emit_move_insn (saved_valreg, valreg);
8850 temp = expand_call (exp, target, ignore);
8851 emit_move_insn (valreg, saved_valreg);
ca695ac9
JB
8852 }
8853 else
b93a436e
JL
8854 /* Generate the call, putting the value in a pseudo. */
8855 temp = expand_call (exp, target, ignore);
8856#endif
bbf6f052 8857
b93a436e
JL
8858 seq = get_insns ();
8859 end_sequence ();
bbf6f052 8860
b93a436e 8861 saveregs_value = temp;
bbf6f052 8862
b93a436e
JL
8863 /* Put the sequence after the NOTE that starts the function.
8864 If this is inside a SEQUENCE, make the outer-level insn
8865 chain current, so the code is placed at the start of the
8866 function. */
8867 push_topmost_sequence ();
8868 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8869 pop_topmost_sequence ();
8870 return temp;
8871 }
bbf6f052 8872
b93a436e
JL
8873 /* __builtin_args_info (N) returns word N of the arg space info
8874 for the current function. The number and meanings of words
8875 is controlled by the definition of CUMULATIVE_ARGS. */
8876 case BUILT_IN_ARGS_INFO:
8877 {
8878 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
b93a436e 8879 int *word_ptr = (int *) &current_function_args_info;
381127e8
RL
8880#if 0
8881 /* These are used by the code below that is if 0'ed away */
8882 int i;
b93a436e 8883 tree type, elts, result;
381127e8 8884#endif
bbf6f052 8885
b93a436e
JL
8886 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8887 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8888 __FILE__, __LINE__);
bbf6f052 8889
b93a436e
JL
8890 if (arglist != 0)
8891 {
8892 tree arg = TREE_VALUE (arglist);
8893 if (TREE_CODE (arg) != INTEGER_CST)
8894 error ("argument of `__builtin_args_info' must be constant");
8895 else
8896 {
8897 int wordnum = TREE_INT_CST_LOW (arg);
bbf6f052 8898
b93a436e
JL
8899 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8900 error ("argument of `__builtin_args_info' out of range");
8901 else
8902 return GEN_INT (word_ptr[wordnum]);
8903 }
bbf6f052
RK
8904 }
8905 else
b93a436e 8906 error ("missing argument in `__builtin_args_info'");
bbf6f052 8907
b93a436e 8908 return const0_rtx;
bbf6f052 8909
b93a436e
JL
8910#if 0
8911 for (i = 0; i < nwords; i++)
8912 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
bbf6f052 8913
b93a436e
JL
8914 type = build_array_type (integer_type_node,
8915 build_index_type (build_int_2 (nwords, 0)));
8916 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8917 TREE_CONSTANT (result) = 1;
8918 TREE_STATIC (result) = 1;
8919 result = build (INDIRECT_REF, build_pointer_type (type), result);
8920 TREE_CONSTANT (result) = 1;
8921 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8922#endif
8923 }
8924
8925 /* Return the address of the first anonymous stack arg. */
8926 case BUILT_IN_NEXT_ARG:
ca695ac9 8927 {
b93a436e
JL
8928 tree fntype = TREE_TYPE (current_function_decl);
8929
8930 if ((TYPE_ARG_TYPES (fntype) == 0
8931 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8932 == void_type_node))
8933 && ! current_function_varargs)
8934 {
8935 error ("`va_start' used in function with fixed args");
8936 return const0_rtx;
8937 }
8938
8939 if (arglist)
8940 {
8941 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8942 tree arg = TREE_VALUE (arglist);
8943
8944 /* Strip off all nops for the sake of the comparison. This
8945 is not quite the same as STRIP_NOPS. It does more.
8946 We must also strip off INDIRECT_EXPR for C++ reference
8947 parameters. */
8948 while (TREE_CODE (arg) == NOP_EXPR
8949 || TREE_CODE (arg) == CONVERT_EXPR
8950 || TREE_CODE (arg) == NON_LVALUE_EXPR
8951 || TREE_CODE (arg) == INDIRECT_REF)
8952 arg = TREE_OPERAND (arg, 0);
8953 if (arg != last_parm)
8954 warning ("second parameter of `va_start' not last named argument");
8955 }
8956 else if (! current_function_varargs)
8957 /* Evidently an out of date version of <stdarg.h>; can't validate
8958 va_start's second argument, but can still work as intended. */
8959 warning ("`__builtin_next_arg' called without an argument");
bbf6f052
RK
8960 }
8961
b93a436e
JL
8962 return expand_binop (Pmode, add_optab,
8963 current_function_internal_arg_pointer,
8964 current_function_arg_offset_rtx,
8965 NULL_RTX, 0, OPTAB_LIB_WIDEN);
ca695ac9 8966
b93a436e
JL
8967 case BUILT_IN_CLASSIFY_TYPE:
8968 if (arglist != 0)
8969 {
8970 tree type = TREE_TYPE (TREE_VALUE (arglist));
8971 enum tree_code code = TREE_CODE (type);
8972 if (code == VOID_TYPE)
8973 return GEN_INT (void_type_class);
8974 if (code == INTEGER_TYPE)
8975 return GEN_INT (integer_type_class);
8976 if (code == CHAR_TYPE)
8977 return GEN_INT (char_type_class);
8978 if (code == ENUMERAL_TYPE)
8979 return GEN_INT (enumeral_type_class);
8980 if (code == BOOLEAN_TYPE)
8981 return GEN_INT (boolean_type_class);
8982 if (code == POINTER_TYPE)
8983 return GEN_INT (pointer_type_class);
8984 if (code == REFERENCE_TYPE)
8985 return GEN_INT (reference_type_class);
8986 if (code == OFFSET_TYPE)
8987 return GEN_INT (offset_type_class);
8988 if (code == REAL_TYPE)
8989 return GEN_INT (real_type_class);
8990 if (code == COMPLEX_TYPE)
8991 return GEN_INT (complex_type_class);
8992 if (code == FUNCTION_TYPE)
8993 return GEN_INT (function_type_class);
8994 if (code == METHOD_TYPE)
8995 return GEN_INT (method_type_class);
8996 if (code == RECORD_TYPE)
8997 return GEN_INT (record_type_class);
8998 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8999 return GEN_INT (union_type_class);
9000 if (code == ARRAY_TYPE)
9001 {
9002 if (TYPE_STRING_FLAG (type))
9003 return GEN_INT (string_type_class);
9004 else
9005 return GEN_INT (array_type_class);
9006 }
9007 if (code == SET_TYPE)
9008 return GEN_INT (set_type_class);
9009 if (code == FILE_TYPE)
9010 return GEN_INT (file_type_class);
9011 if (code == LANG_TYPE)
9012 return GEN_INT (lang_type_class);
9013 }
9014 return GEN_INT (no_type_class);
ca695ac9 9015
b93a436e
JL
9016 case BUILT_IN_CONSTANT_P:
9017 if (arglist == 0)
9018 return const0_rtx;
9019 else
9020 {
9021 tree arg = TREE_VALUE (arglist);
185ebd6c 9022 rtx tmp;
ca695ac9 9023
185ebd6c
RH
9024 /* We return 1 for a numeric type that's known to be a constant
9025 value at compile-time or for an aggregate type that's a
9026 literal constant. */
b93a436e 9027 STRIP_NOPS (arg);
185ebd6c
RH
9028
9029 /* If we know this is a constant, emit the constant of one. */
9030 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9031 || (TREE_CODE (arg) == CONSTRUCTOR
9032 && TREE_CONSTANT (arg))
cff48d8f
RH
9033 || (TREE_CODE (arg) == ADDR_EXPR
9034 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9035 return const1_rtx;
9036
185ebd6c
RH
9037 /* If we aren't going to be running CSE or this expression
9038 has side effects, show we don't know it to be a constant.
9039 Likewise if it's a pointer or aggregate type since in those
9040 case we only want literals, since those are only optimized
9041 when generating RTL, not later. */
9042 if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9043 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9044 || POINTER_TYPE_P (TREE_TYPE (arg)))
9045 return const0_rtx;
cff48d8f 9046
ee5332b8
RH
9047 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9048 chance to see if it can deduce whether ARG is constant. */
185ebd6c
RH
9049
9050 tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
ee5332b8 9051 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
185ebd6c 9052 return tmp;
b93a436e 9053 }
ca695ac9 9054
b93a436e
JL
9055 case BUILT_IN_FRAME_ADDRESS:
9056 /* The argument must be a nonnegative integer constant.
9057 It counts the number of frames to scan up the stack.
9058 The value is the address of that frame. */
9059 case BUILT_IN_RETURN_ADDRESS:
9060 /* The argument must be a nonnegative integer constant.
9061 It counts the number of frames to scan up the stack.
9062 The value is the return address saved in that frame. */
9063 if (arglist == 0)
9064 /* Warning about missing arg was already issued. */
9065 return const0_rtx;
9066 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9067 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9068 {
9069 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9070 error ("invalid arg to `__builtin_frame_address'");
9071 else
9072 error ("invalid arg to `__builtin_return_address'");
9073 return const0_rtx;
9074 }
9075 else
9076 {
9077 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9078 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9079 hard_frame_pointer_rtx);
ee33823f 9080
b93a436e
JL
9081 /* Some ports cannot access arbitrary stack frames. */
9082 if (tem == NULL)
9083 {
9084 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9085 warning ("unsupported arg to `__builtin_frame_address'");
9086 else
9087 warning ("unsupported arg to `__builtin_return_address'");
9088 return const0_rtx;
9089 }
ee33823f 9090
b93a436e
JL
9091 /* For __builtin_frame_address, return what we've got. */
9092 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9093 return tem;
ee33823f 9094
b93a436e
JL
9095 if (GET_CODE (tem) != REG)
9096 tem = copy_to_reg (tem);
9097 return tem;
9098 }
ee33823f 9099
b93a436e
JL
9100 /* Returns the address of the area where the structure is returned.
9101 0 otherwise. */
9102 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9103 if (arglist != 0
9104 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9105 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9106 return const0_rtx;
9107 else
9108 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
ee33823f 9109
b93a436e
JL
9110 case BUILT_IN_ALLOCA:
9111 if (arglist == 0
9112 /* Arg could be non-integer if user redeclared this fcn wrong. */
9113 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9114 break;
bbf6f052 9115
b93a436e
JL
9116 /* Compute the argument. */
9117 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052 9118
b93a436e
JL
9119 /* Allocate the desired space. */
9120 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9 9121
b93a436e
JL
9122 case BUILT_IN_FFS:
9123 /* If not optimizing, call the library function. */
9124 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9125 break;
ca695ac9 9126
b93a436e
JL
9127 if (arglist == 0
9128 /* Arg could be non-integer if user redeclared this fcn wrong. */
9129 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9130 break;
ca695ac9 9131
b93a436e
JL
9132 /* Compute the argument. */
9133 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9134 /* Compute ffs, into TARGET if possible.
9135 Set TARGET to wherever the result comes back. */
9136 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9137 ffs_optab, op0, target, 1);
9138 if (target == 0)
9139 abort ();
9140 return target;
bbf6f052 9141
b93a436e
JL
9142 case BUILT_IN_STRLEN:
9143 /* If not optimizing, call the library function. */
9144 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9145 break;
bbf6f052 9146
b93a436e
JL
9147 if (arglist == 0
9148 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9149 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9150 break;
9151 else
9152 {
9153 tree src = TREE_VALUE (arglist);
9154 tree len = c_strlen (src);
bbf6f052 9155
b93a436e
JL
9156 int align
9157 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
46b68a37 9158
b93a436e
JL
9159 rtx result, src_rtx, char_rtx;
9160 enum machine_mode insn_mode = value_mode, char_mode;
9161 enum insn_code icode;
46b68a37 9162
b93a436e
JL
9163 /* If the length is known, just return it. */
9164 if (len != 0)
9165 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
956d6950 9166
b93a436e
JL
9167 /* If SRC is not a pointer type, don't do this operation inline. */
9168 if (align == 0)
9169 break;
bbf6f052 9170
b93a436e 9171 /* Call a function if we can't compute strlen in the right mode. */
bbf6f052 9172
b93a436e
JL
9173 while (insn_mode != VOIDmode)
9174 {
9175 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9176 if (icode != CODE_FOR_nothing)
9177 break;
ca695ac9 9178
b93a436e
JL
9179 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9180 }
9181 if (insn_mode == VOIDmode)
9182 break;
ca695ac9 9183
b93a436e
JL
9184 /* Make a place to write the result of the instruction. */
9185 result = target;
9186 if (! (result != 0
9187 && GET_CODE (result) == REG
9188 && GET_MODE (result) == insn_mode
9189 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9190 result = gen_reg_rtx (insn_mode);
ca695ac9 9191
b93a436e 9192 /* Make sure the operands are acceptable to the predicates. */
ca695ac9 9193
b93a436e
JL
9194 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9195 result = gen_reg_rtx (insn_mode);
9196 src_rtx = memory_address (BLKmode,
9197 expand_expr (src, NULL_RTX, ptr_mode,
9198 EXPAND_NORMAL));
bbf6f052 9199
b93a436e
JL
9200 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9201 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
bbf6f052 9202
b93a436e 9203 /* Check the string is readable and has an end. */
7d384cc0 9204 if (current_function_check_memory_usage)
b93a436e
JL
9205 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9206 src_rtx, ptr_mode,
9207 GEN_INT (MEMORY_USE_RO),
9208 TYPE_MODE (integer_type_node));
bbf6f052 9209
b93a436e
JL
9210 char_rtx = const0_rtx;
9211 char_mode = insn_operand_mode[(int)icode][2];
9212 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9213 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
bbf6f052 9214
b93a436e
JL
9215 emit_insn (GEN_FCN (icode) (result,
9216 gen_rtx_MEM (BLKmode, src_rtx),
9217 char_rtx, GEN_INT (align)));
bbf6f052 9218
b93a436e
JL
9219 /* Return the value in the proper mode for this function. */
9220 if (GET_MODE (result) == value_mode)
9221 return result;
9222 else if (target != 0)
9223 {
9224 convert_move (target, result, 0);
9225 return target;
9226 }
9227 else
9228 return convert_to_mode (value_mode, result, 0);
9229 }
bbf6f052 9230
b93a436e
JL
9231 case BUILT_IN_STRCPY:
9232 /* If not optimizing, call the library function. */
9233 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9234 break;
bbf6f052 9235
b93a436e
JL
9236 if (arglist == 0
9237 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9238 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9239 || TREE_CHAIN (arglist) == 0
9240 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9241 break;
9242 else
9243 {
9244 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
bbf6f052 9245
b93a436e
JL
9246 if (len == 0)
9247 break;
bbf6f052 9248
b93a436e 9249 len = size_binop (PLUS_EXPR, len, integer_one_node);
6d100794 9250
b93a436e
JL
9251 chainon (arglist, build_tree_list (NULL_TREE, len));
9252 }
6d100794 9253
b93a436e
JL
9254 /* Drops in. */
9255 case BUILT_IN_MEMCPY:
9256 /* If not optimizing, call the library function. */
9257 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9258 break;
e7c33f54 9259
b93a436e
JL
9260 if (arglist == 0
9261 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9262 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9263 || TREE_CHAIN (arglist) == 0
9264 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9265 != POINTER_TYPE)
9266 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9267 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9268 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9269 != INTEGER_TYPE))
9270 break;
9271 else
9272 {
9273 tree dest = TREE_VALUE (arglist);
9274 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9275 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e7c33f54 9276
b93a436e
JL
9277 int src_align
9278 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9279 int dest_align
9280 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
55a6ba9f 9281 rtx dest_mem, src_mem, dest_addr, len_rtx;
e7c33f54 9282
b93a436e
JL
9283 /* If either SRC or DEST is not a pointer type, don't do
9284 this operation in-line. */
9285 if (src_align == 0 || dest_align == 0)
9286 {
9287 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9288 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9289 break;
9290 }
e7c33f54 9291
55a6ba9f
JC
9292 dest_mem = get_memory_rtx (dest);
9293 src_mem = get_memory_rtx (src);
b93a436e 9294 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
e7c33f54 9295
b93a436e 9296 /* Just copy the rights of SRC to the rights of DEST. */
7d384cc0 9297 if (current_function_check_memory_usage)
b93a436e 9298 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
55a6ba9f
JC
9299 XEXP (dest_mem, 0), ptr_mode,
9300 XEXP (src_mem, 0), ptr_mode,
b93a436e 9301 len_rtx, TYPE_MODE (sizetype));
e7c33f54 9302
b93a436e
JL
9303 /* Copy word part most expediently. */
9304 dest_addr
9305 = emit_block_move (dest_mem, src_mem, len_rtx,
9306 MIN (src_align, dest_align));
e7c33f54 9307
b93a436e 9308 if (dest_addr == 0)
55a6ba9f 9309 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
e7c33f54 9310
b93a436e
JL
9311 return dest_addr;
9312 }
e7c33f54 9313
b93a436e
JL
9314 case BUILT_IN_MEMSET:
9315 /* If not optimizing, call the library function. */
9316 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9317 break;
e7c33f54 9318
b93a436e
JL
9319 if (arglist == 0
9320 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9321 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9322 || TREE_CHAIN (arglist) == 0
9323 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9324 != INTEGER_TYPE)
9325 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9326 || (INTEGER_TYPE
9327 != (TREE_CODE (TREE_TYPE
9328 (TREE_VALUE
9329 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9330 break;
9331 else
9332 {
9333 tree dest = TREE_VALUE (arglist);
9334 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9335 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e7c33f54 9336
b93a436e
JL
9337 int dest_align
9338 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
55a6ba9f 9339 rtx dest_mem, dest_addr, len_rtx;
e7c33f54 9340
b93a436e
JL
9341 /* If DEST is not a pointer type, don't do this
9342 operation in-line. */
9343 if (dest_align == 0)
9344 break;
bbf6f052 9345
bf931ec8
JW
9346 /* If the arguments have side-effects, then we can only evaluate
9347 them at most once. The following code evaluates them twice if
9348 they are not constants because we break out to expand_call
9349 in that case. They can't be constants if they have side-effects
9350 so we can check for that first. Alternatively, we could call
9351 save_expr to make multiple evaluation safe. */
9352 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9353 break;
9354
b93a436e
JL
9355 /* If VAL is not 0, don't do this operation in-line. */
9356 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9357 break;
bbf6f052 9358
b93a436e
JL
9359 /* If LEN does not expand to a constant, don't do this
9360 operation in-line. */
9361 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9362 if (GET_CODE (len_rtx) != CONST_INT)
9363 break;
bbf6f052 9364
55a6ba9f 9365 dest_mem = get_memory_rtx (dest);
b93a436e
JL
9366
9367 /* Just check DST is writable and mark it as readable. */
7d384cc0 9368 if (current_function_check_memory_usage)
b93a436e 9369 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
55a6ba9f 9370 XEXP (dest_mem, 0), ptr_mode,
b93a436e
JL
9371 len_rtx, TYPE_MODE (sizetype),
9372 GEN_INT (MEMORY_USE_WO),
9373 TYPE_MODE (integer_type_node));
bbf6f052 9374
bbf6f052 9375
b93a436e 9376 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
bbf6f052 9377
b93a436e 9378 if (dest_addr == 0)
55a6ba9f 9379 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
bbf6f052 9380
b93a436e
JL
9381 return dest_addr;
9382 }
bbf6f052 9383
b93a436e
JL
9384/* These comparison functions need an instruction that returns an actual
9385 index. An ordinary compare that just sets the condition codes
9386 is not enough. */
9387#ifdef HAVE_cmpstrsi
9388 case BUILT_IN_STRCMP:
9389 /* If not optimizing, call the library function. */
9390 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9391 break;
bbf6f052 9392
b93a436e 9393 /* If we need to check memory accesses, call the library function. */
7d384cc0 9394 if (current_function_check_memory_usage)
b93a436e 9395 break;
bbf6f052 9396
b93a436e
JL
9397 if (arglist == 0
9398 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9399 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9400 || TREE_CHAIN (arglist) == 0
9401 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9402 break;
9403 else if (!HAVE_cmpstrsi)
9404 break;
9405 {
9406 tree arg1 = TREE_VALUE (arglist);
9407 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
b93a436e 9408 tree len, len2;
a97f5a86 9409
b93a436e
JL
9410 len = c_strlen (arg1);
9411 if (len)
9412 len = size_binop (PLUS_EXPR, integer_one_node, len);
9413 len2 = c_strlen (arg2);
9414 if (len2)
9415 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
e9cdf6e4 9416
b93a436e
JL
9417 /* If we don't have a constant length for the first, use the length
9418 of the second, if we know it. We don't require a constant for
9419 this case; some cost analysis could be done if both are available
9420 but neither is constant. For now, assume they're equally cheap.
e9cdf6e4 9421
b93a436e
JL
9422 If both strings have constant lengths, use the smaller. This
9423 could arise if optimization results in strcpy being called with
9424 two fixed strings, or if the code was machine-generated. We should
9425 add some code to the `memcmp' handler below to deal with such
9426 situations, someday. */
9427 if (!len || TREE_CODE (len) != INTEGER_CST)
9428 {
9429 if (len2)
9430 len = len2;
9431 else if (len == 0)
9432 break;
9433 }
9434 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9435 {
9436 if (tree_int_cst_lt (len2, len))
9437 len = len2;
9438 }
bbf6f052 9439
b93a436e
JL
9440 chainon (arglist, build_tree_list (NULL_TREE, len));
9441 }
bbf6f052 9442
b93a436e
JL
9443 /* Drops in. */
9444 case BUILT_IN_MEMCMP:
9445 /* If not optimizing, call the library function. */
9446 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9447 break;
bbf6f052 9448
b93a436e 9449 /* If we need to check memory accesses, call the library function. */
7d384cc0 9450 if (current_function_check_memory_usage)
b93a436e 9451 break;
bbf6f052 9452
b93a436e
JL
9453 if (arglist == 0
9454 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9455 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9456 || TREE_CHAIN (arglist) == 0
9457 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9458 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9459 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9460 break;
9461 else if (!HAVE_cmpstrsi)
9462 break;
9463 {
9464 tree arg1 = TREE_VALUE (arglist);
9465 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9466 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9467 rtx result;
0842a179 9468
b93a436e
JL
9469 int arg1_align
9470 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9471 int arg2_align
9472 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9473 enum machine_mode insn_mode
9474 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
0842a179 9475
b93a436e
JL
9476 /* If we don't have POINTER_TYPE, call the function. */
9477 if (arg1_align == 0 || arg2_align == 0)
9478 {
9479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9480 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9481 break;
9482 }
bbf6f052 9483
b93a436e
JL
9484 /* Make a place to write the result of the instruction. */
9485 result = target;
9486 if (! (result != 0
9487 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9488 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9489 result = gen_reg_rtx (insn_mode);
bbf6f052 9490
55a6ba9f
JC
9491 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9492 get_memory_rtx (arg2),
b93a436e
JL
9493 expand_expr (len, NULL_RTX, VOIDmode, 0),
9494 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052 9495
b93a436e
JL
9496 /* Return the value in the proper mode for this function. */
9497 mode = TYPE_MODE (TREE_TYPE (exp));
9498 if (GET_MODE (result) == mode)
9499 return result;
9500 else if (target != 0)
9501 {
9502 convert_move (target, result, 0);
9503 return target;
9504 }
9505 else
9506 return convert_to_mode (mode, result, 0);
9507 }
9508#else
9509 case BUILT_IN_STRCMP:
9510 case BUILT_IN_MEMCMP:
9511 break;
9512#endif
bbf6f052 9513
b93a436e
JL
9514 case BUILT_IN_SETJMP:
9515 if (arglist == 0
9516 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9517 break;
6fd1c67b
RH
9518 else
9519 {
9520 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9521 VOIDmode, 0);
9522 rtx lab = gen_label_rtx ();
9523 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9524 emit_label (lab);
9525 return ret;
9526 }
bbf6f052 9527
6fd1c67b
RH
9528 /* __builtin_longjmp is passed a pointer to an array of five words.
9529 It's similar to the C library longjmp function but works with
9530 __builtin_setjmp above. */
b93a436e
JL
9531 case BUILT_IN_LONGJMP:
9532 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9533 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9534 break;
b93a436e 9535 else
b93a436e 9536 {
6fd1c67b
RH
9537 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9538 VOIDmode, 0);
9539 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
3e2b9a3d 9540 NULL_RTX, VOIDmode, 0);
e0cd0770
JC
9541
9542 if (value != const1_rtx)
9543 {
9544 error ("__builtin_longjmp second argument must be 1");
9545 return const0_rtx;
9546 }
9547
6fd1c67b
RH
9548 expand_builtin_longjmp (buf_addr, value);
9549 return const0_rtx;
b93a436e 9550 }
bbf6f052 9551
e0cd0770
JC
9552 case BUILT_IN_TRAP:
9553#ifdef HAVE_trap
9554 if (HAVE_trap)
9555 emit_insn (gen_trap ());
9556 else
9557#endif
9558 error ("__builtin_trap not supported by this target");
9559 emit_barrier ();
9560 return const0_rtx;
9561
b93a436e
JL
9562 /* Various hooks for the DWARF 2 __throw routine. */
9563 case BUILT_IN_UNWIND_INIT:
9564 expand_builtin_unwind_init ();
9565 return const0_rtx;
71038426
RH
9566 case BUILT_IN_DWARF_CFA:
9567 return virtual_cfa_rtx;
b93a436e
JL
9568#ifdef DWARF2_UNWIND_INFO
9569 case BUILT_IN_DWARF_FP_REGNUM:
9570 return expand_builtin_dwarf_fp_regnum ();
9571 case BUILT_IN_DWARF_REG_SIZE:
9572 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
fb2ca25a 9573#endif
b93a436e
JL
9574 case BUILT_IN_FROB_RETURN_ADDR:
9575 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9576 case BUILT_IN_EXTRACT_RETURN_ADDR:
9577 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
71038426
RH
9578 case BUILT_IN_EH_RETURN:
9579 expand_builtin_eh_return (TREE_VALUE (arglist),
9580 TREE_VALUE (TREE_CHAIN (arglist)),
9581 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
b93a436e 9582 return const0_rtx;
ca695ac9 9583
b93a436e
JL
9584 default: /* just do library call, if unknown builtin */
9585 error ("built-in function `%s' not currently supported",
9586 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
ca695ac9 9587 }
0006469d 9588
b93a436e
JL
9589 /* The switch statement above can drop through to cause the function
9590 to be called normally. */
0006469d 9591
b93a436e 9592 return expand_call (exp, target, ignore);
ca695ac9 9593}
b93a436e
JL
9594\f
9595/* Built-in functions to perform an untyped call and return. */
0006469d 9596
b93a436e
JL
9597/* For each register that may be used for calling a function, this
9598 gives a mode used to copy the register's value. VOIDmode indicates
9599 the register is not used for calling a function. If the machine
9600 has register windows, this gives only the outbound registers.
9601 INCOMING_REGNO gives the corresponding inbound register. */
9602static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 9603
b93a436e
JL
9604/* For each register that may be used for returning values, this gives
9605 a mode used to copy the register's value. VOIDmode indicates the
9606 register is not used for returning values. If the machine has
9607 register windows, this gives only the outbound registers.
9608 INCOMING_REGNO gives the corresponding inbound register. */
9609static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 9610
b93a436e
JL
9611/* For each register that may be used for calling a function, this
9612 gives the offset of that register into the block returned by
9613 __builtin_apply_args. 0 indicates that the register is not
9614 used for calling a function. */
9615static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9616
9617/* Return the offset of register REGNO into the block returned by
9618 __builtin_apply_args. This is not declared static, since it is
9619 needed in objc-act.c. */
0006469d 9620
b93a436e
JL
9621int
9622apply_args_register_offset (regno)
9623 int regno;
9624{
9625 apply_args_size ();
0006469d 9626
b93a436e
JL
9627 /* Arguments are always put in outgoing registers (in the argument
9628 block) if such make sense. */
9629#ifdef OUTGOING_REGNO
9630 regno = OUTGOING_REGNO(regno);
9631#endif
9632 return apply_args_reg_offset[regno];
9633}
904762c8 9634
b93a436e
JL
9635/* Return the size required for the block returned by __builtin_apply_args,
9636 and initialize apply_args_mode. */
9637
9638static int
9639apply_args_size ()
0006469d 9640{
b93a436e
JL
9641 static int size = -1;
9642 int align, regno;
2f6e6d22 9643 enum machine_mode mode;
0006469d 9644
b93a436e
JL
9645 /* The values computed by this function never change. */
9646 if (size < 0)
ca695ac9 9647 {
b93a436e
JL
9648 /* The first value is the incoming arg-pointer. */
9649 size = GET_MODE_SIZE (Pmode);
0006469d 9650
b93a436e
JL
9651 /* The second value is the structure value address unless this is
9652 passed as an "invisible" first argument. */
9653 if (struct_value_rtx)
9654 size += GET_MODE_SIZE (Pmode);
0006469d 9655
b93a436e
JL
9656 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9657 if (FUNCTION_ARG_REGNO_P (regno))
9658 {
9659 /* Search for the proper mode for copying this register's
9660 value. I'm not sure this is right, but it works so far. */
9661 enum machine_mode best_mode = VOIDmode;
0006469d 9662
b93a436e
JL
9663 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9664 mode != VOIDmode;
9665 mode = GET_MODE_WIDER_MODE (mode))
9666 if (HARD_REGNO_MODE_OK (regno, mode)
9667 && HARD_REGNO_NREGS (regno, mode) == 1)
9668 best_mode = mode;
0006469d 9669
b93a436e
JL
9670 if (best_mode == VOIDmode)
9671 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9672 mode != VOIDmode;
9673 mode = GET_MODE_WIDER_MODE (mode))
9674 if (HARD_REGNO_MODE_OK (regno, mode)
9675 && (mov_optab->handlers[(int) mode].insn_code
9676 != CODE_FOR_nothing))
9677 best_mode = mode;
0006469d 9678
b93a436e
JL
9679 mode = best_mode;
9680 if (mode == VOIDmode)
9681 abort ();
904762c8 9682
b93a436e
JL
9683 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9684 if (size % align != 0)
9685 size = CEIL (size, align) * align;
9686 apply_args_reg_offset[regno] = size;
9687 size += GET_MODE_SIZE (mode);
9688 apply_args_mode[regno] = mode;
9689 }
9690 else
9691 {
9692 apply_args_mode[regno] = VOIDmode;
9693 apply_args_reg_offset[regno] = 0;
9694 }
9695 }
9696 return size;
9697}
0006469d 9698
b93a436e
JL
9699/* Return the size required for the block returned by __builtin_apply,
9700 and initialize apply_result_mode. */
904762c8 9701
b93a436e
JL
9702static int
9703apply_result_size ()
9704{
9705 static int size = -1;
9706 int align, regno;
9707 enum machine_mode mode;
0006469d 9708
b93a436e
JL
9709 /* The values computed by this function never change. */
9710 if (size < 0)
9711 {
9712 size = 0;
0006469d 9713
b93a436e
JL
9714 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9715 if (FUNCTION_VALUE_REGNO_P (regno))
9716 {
9717 /* Search for the proper mode for copying this register's
9718 value. I'm not sure this is right, but it works so far. */
9719 enum machine_mode best_mode = VOIDmode;
0006469d 9720
b93a436e
JL
9721 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9722 mode != TImode;
9723 mode = GET_MODE_WIDER_MODE (mode))
9724 if (HARD_REGNO_MODE_OK (regno, mode))
9725 best_mode = mode;
0006469d 9726
b93a436e
JL
9727 if (best_mode == VOIDmode)
9728 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9729 mode != VOIDmode;
9730 mode = GET_MODE_WIDER_MODE (mode))
9731 if (HARD_REGNO_MODE_OK (regno, mode)
9732 && (mov_optab->handlers[(int) mode].insn_code
9733 != CODE_FOR_nothing))
9734 best_mode = mode;
0006469d 9735
b93a436e
JL
9736 mode = best_mode;
9737 if (mode == VOIDmode)
9738 abort ();
9739
9740 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9741 if (size % align != 0)
9742 size = CEIL (size, align) * align;
9743 size += GET_MODE_SIZE (mode);
9744 apply_result_mode[regno] = mode;
9745 }
9746 else
9747 apply_result_mode[regno] = VOIDmode;
9748
9749 /* Allow targets that use untyped_call and untyped_return to override
9750 the size so that machine-specific information can be stored here. */
9751#ifdef APPLY_RESULT_SIZE
9752 size = APPLY_RESULT_SIZE;
9753#endif
9754 }
9755 return size;
9756}
0006469d 9757
b93a436e
JL
9758#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9759/* Create a vector describing the result block RESULT. If SAVEP is true,
9760 the result block is used to save the values; otherwise it is used to
9761 restore the values. */
9762
9763static rtx
9764result_vector (savep, result)
9765 int savep;
9766 rtx result;
9767{
9768 int regno, size, align, nelts;
9769 enum machine_mode mode;
9770 rtx reg, mem;
9771 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9772
9773 size = nelts = 0;
9774 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9775 if ((mode = apply_result_mode[regno]) != VOIDmode)
9776 {
9777 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9778 if (size % align != 0)
9779 size = CEIL (size, align) * align;
9780 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9781 mem = change_address (result, mode,
9782 plus_constant (XEXP (result, 0), size));
9783 savevec[nelts++] = (savep
9784 ? gen_rtx_SET (VOIDmode, mem, reg)
9785 : gen_rtx_SET (VOIDmode, reg, mem));
9786 size += GET_MODE_SIZE (mode);
ca695ac9 9787 }
b93a436e
JL
9788 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9789}
9790#endif /* HAVE_untyped_call or HAVE_untyped_return */
0006469d 9791
b93a436e
JL
9792/* Save the state required to perform an untyped call with the same
9793 arguments as were passed to the current function. */
904762c8 9794
b93a436e
JL
9795static rtx
9796expand_builtin_apply_args ()
9797{
9798 rtx registers;
9799 int size, align, regno;
9800 enum machine_mode mode;
0006469d 9801
b93a436e
JL
9802 /* Create a block where the arg-pointer, structure value address,
9803 and argument registers can be saved. */
9804 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
0cb1d109 9805
b93a436e
JL
9806 /* Walk past the arg-pointer and structure value address. */
9807 size = GET_MODE_SIZE (Pmode);
9808 if (struct_value_rtx)
9809 size += GET_MODE_SIZE (Pmode);
0cb1d109 9810
b93a436e
JL
9811 /* Save each register used in calling a function to the block. */
9812 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9813 if ((mode = apply_args_mode[regno]) != VOIDmode)
9814 {
9815 rtx tem;
0cb1d109 9816
b93a436e
JL
9817 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9818 if (size % align != 0)
9819 size = CEIL (size, align) * align;
0006469d 9820
b93a436e 9821 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
0e8c9172 9822
b93a436e
JL
9823#ifdef STACK_REGS
9824 /* For reg-stack.c's stack register household.
9825 Compare with a similar piece of code in function.c. */
0006469d 9826
b93a436e
JL
9827 emit_insn (gen_rtx_USE (mode, tem));
9828#endif
0e8c9172 9829
b93a436e
JL
9830 emit_move_insn (change_address (registers, mode,
9831 plus_constant (XEXP (registers, 0),
9832 size)),
9833 tem);
9834 size += GET_MODE_SIZE (mode);
0e8c9172 9835 }
0006469d 9836
b93a436e
JL
9837 /* Save the arg pointer to the block. */
9838 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9839 copy_to_reg (virtual_incoming_args_rtx));
9840 size = GET_MODE_SIZE (Pmode);
0006469d 9841
b93a436e
JL
9842 /* Save the structure value address unless this is passed as an
9843 "invisible" first argument. */
9844 if (struct_value_incoming_rtx)
9845 {
9846 emit_move_insn (change_address (registers, Pmode,
9847 plus_constant (XEXP (registers, 0),
9848 size)),
9849 copy_to_reg (struct_value_incoming_rtx));
9850 size += GET_MODE_SIZE (Pmode);
9851 }
0006469d 9852
b93a436e
JL
9853 /* Return the address of the block. */
9854 return copy_addr_to_reg (XEXP (registers, 0));
9855}
0006469d 9856
b93a436e
JL
9857/* Perform an untyped call and save the state required to perform an
9858 untyped return of whatever value was returned by the given function. */
0006469d 9859
b93a436e
JL
9860static rtx
9861expand_builtin_apply (function, arguments, argsize)
9862 rtx function, arguments, argsize;
9863{
9864 int size, align, regno;
9865 enum machine_mode mode;
9866 rtx incoming_args, result, reg, dest, call_insn;
9867 rtx old_stack_level = 0;
9868 rtx call_fusage = 0;
0006469d 9869
b93a436e
JL
9870 /* Create a block where the return registers can be saved. */
9871 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9872
9873 /* ??? The argsize value should be adjusted here. */
9874
9875 /* Fetch the arg pointer from the ARGUMENTS block. */
9876 incoming_args = gen_reg_rtx (Pmode);
9877 emit_move_insn (incoming_args,
9878 gen_rtx_MEM (Pmode, arguments));
9879#ifndef STACK_GROWS_DOWNWARD
9880 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9881 incoming_args, 0, OPTAB_LIB_WIDEN);
9882#endif
9883
9884 /* Perform postincrements before actually calling the function. */
ca695ac9 9885 emit_queue ();
0006469d 9886
b93a436e
JL
9887 /* Push a new argument block and copy the arguments. */
9888 do_pending_stack_adjust ();
0006469d 9889
b93a436e
JL
9890 /* Save the stack with nonlocal if available */
9891#ifdef HAVE_save_stack_nonlocal
9892 if (HAVE_save_stack_nonlocal)
9893 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9894 else
9895#endif
9896 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
0006469d 9897
b93a436e
JL
9898 /* Push a block of memory onto the stack to store the memory arguments.
9899 Save the address in a register, and copy the memory arguments. ??? I
9900 haven't figured out how the calling convention macros effect this,
9901 but it's likely that the source and/or destination addresses in
9902 the block copy will need updating in machine specific ways. */
9903 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9904 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9905 gen_rtx_MEM (BLKmode, incoming_args),
9906 argsize,
9907 PARM_BOUNDARY / BITS_PER_UNIT);
9908
9909 /* Refer to the argument block. */
9910 apply_args_size ();
9911 arguments = gen_rtx_MEM (BLKmode, arguments);
9912
9913 /* Walk past the arg-pointer and structure value address. */
9914 size = GET_MODE_SIZE (Pmode);
9915 if (struct_value_rtx)
9916 size += GET_MODE_SIZE (Pmode);
9917
9918 /* Restore each of the registers previously saved. Make USE insns
9919 for each of these registers for use in making the call. */
9920 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9921 if ((mode = apply_args_mode[regno]) != VOIDmode)
9922 {
9923 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9924 if (size % align != 0)
9925 size = CEIL (size, align) * align;
9926 reg = gen_rtx_REG (mode, regno);
9927 emit_move_insn (reg,
9928 change_address (arguments, mode,
9929 plus_constant (XEXP (arguments, 0),
9930 size)));
9931
9932 use_reg (&call_fusage, reg);
9933 size += GET_MODE_SIZE (mode);
9934 }
9935
9936 /* Restore the structure value address unless this is passed as an
9937 "invisible" first argument. */
9938 size = GET_MODE_SIZE (Pmode);
9939 if (struct_value_rtx)
0006469d 9940 {
b93a436e
JL
9941 rtx value = gen_reg_rtx (Pmode);
9942 emit_move_insn (value,
9943 change_address (arguments, Pmode,
9944 plus_constant (XEXP (arguments, 0),
9945 size)));
9946 emit_move_insn (struct_value_rtx, value);
9947 if (GET_CODE (struct_value_rtx) == REG)
9948 use_reg (&call_fusage, struct_value_rtx);
9949 size += GET_MODE_SIZE (Pmode);
ca695ac9 9950 }
0006469d 9951
b93a436e
JL
9952 /* All arguments and registers used for the call are set up by now! */
9953 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
0006469d 9954
b93a436e
JL
9955 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9956 and we don't want to load it into a register as an optimization,
9957 because prepare_call_address already did it if it should be done. */
9958 if (GET_CODE (function) != SYMBOL_REF)
9959 function = memory_address (FUNCTION_MODE, function);
0006469d 9960
b93a436e
JL
9961 /* Generate the actual call instruction and save the return value. */
9962#ifdef HAVE_untyped_call
9963 if (HAVE_untyped_call)
9964 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9965 result, result_vector (1, result)));
9966 else
9967#endif
9968#ifdef HAVE_call_value
9969 if (HAVE_call_value)
ca695ac9 9970 {
b93a436e 9971 rtx valreg = 0;
0006469d 9972
b93a436e
JL
9973 /* Locate the unique return register. It is not possible to
9974 express a call that sets more than one return register using
9975 call_value; use untyped_call for that. In fact, untyped_call
9976 only needs to save the return registers in the given block. */
9977 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9978 if ((mode = apply_result_mode[regno]) != VOIDmode)
9979 {
9980 if (valreg)
9981 abort (); /* HAVE_untyped_call required. */
9982 valreg = gen_rtx_REG (mode, regno);
9983 }
0006469d 9984
b93a436e
JL
9985 emit_call_insn (gen_call_value (valreg,
9986 gen_rtx_MEM (FUNCTION_MODE, function),
9987 const0_rtx, NULL_RTX, const0_rtx));
0006469d 9988
b93a436e
JL
9989 emit_move_insn (change_address (result, GET_MODE (valreg),
9990 XEXP (result, 0)),
9991 valreg);
ca695ac9 9992 }
b93a436e
JL
9993 else
9994#endif
9995 abort ();
0006469d 9996
b93a436e
JL
9997 /* Find the CALL insn we just emitted. */
9998 for (call_insn = get_last_insn ();
9999 call_insn && GET_CODE (call_insn) != CALL_INSN;
10000 call_insn = PREV_INSN (call_insn))
10001 ;
0006469d 10002
b93a436e
JL
10003 if (! call_insn)
10004 abort ();
0006469d 10005
b93a436e
JL
10006 /* Put the register usage information on the CALL. If there is already
10007 some usage information, put ours at the end. */
10008 if (CALL_INSN_FUNCTION_USAGE (call_insn))
0006469d 10009 {
b93a436e 10010 rtx link;
0006469d 10011
b93a436e
JL
10012 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10013 link = XEXP (link, 1))
10014 ;
10015
10016 XEXP (link, 1) = call_fusage;
ca695ac9 10017 }
b93a436e
JL
10018 else
10019 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
0006469d 10020
b93a436e
JL
10021 /* Restore the stack. */
10022#ifdef HAVE_save_stack_nonlocal
10023 if (HAVE_save_stack_nonlocal)
10024 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10025 else
10026#endif
10027 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10028
10029 /* Return the address of the result block. */
10030 return copy_addr_to_reg (XEXP (result, 0));
0006469d 10031}
bbf6f052 10032
b93a436e 10033/* Perform an untyped return. */
ca695ac9
JB
10034
10035static void
b93a436e
JL
10036expand_builtin_return (result)
10037 rtx result;
bbf6f052 10038{
b93a436e
JL
10039 int size, align, regno;
10040 enum machine_mode mode;
10041 rtx reg;
10042 rtx call_fusage = 0;
bbf6f052 10043
b93a436e
JL
10044 apply_result_size ();
10045 result = gen_rtx_MEM (BLKmode, result);
bbf6f052 10046
b93a436e
JL
10047#ifdef HAVE_untyped_return
10048 if (HAVE_untyped_return)
ca695ac9 10049 {
b93a436e
JL
10050 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10051 emit_barrier ();
10052 return;
ca695ac9 10053 }
b93a436e 10054#endif
1499e0a8 10055
b93a436e
JL
10056 /* Restore the return value and note that each value is used. */
10057 size = 0;
10058 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10059 if ((mode = apply_result_mode[regno]) != VOIDmode)
10060 {
10061 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10062 if (size % align != 0)
10063 size = CEIL (size, align) * align;
10064 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10065 emit_move_insn (reg,
10066 change_address (result, mode,
10067 plus_constant (XEXP (result, 0),
10068 size)));
10069
10070 push_to_sequence (call_fusage);
10071 emit_insn (gen_rtx_USE (VOIDmode, reg));
10072 call_fusage = get_insns ();
10073 end_sequence ();
10074 size += GET_MODE_SIZE (mode);
10075 }
10076
10077 /* Put the USE insns before the return. */
10078 emit_insns (call_fusage);
10079
10080 /* Return whatever values was restored by jumping directly to the end
10081 of the function. */
10082 expand_null_return ();
ca695ac9
JB
10083}
10084\f
b93a436e
JL
10085/* Expand code for a post- or pre- increment or decrement
10086 and return the RTX for the result.
10087 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 10088
b93a436e
JL
10089static rtx
10090expand_increment (exp, post, ignore)
10091 register tree exp;
10092 int post, ignore;
ca695ac9 10093{
b93a436e
JL
10094 register rtx op0, op1;
10095 register rtx temp, value;
10096 register tree incremented = TREE_OPERAND (exp, 0);
10097 optab this_optab = add_optab;
10098 int icode;
10099 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10100 int op0_is_copy = 0;
10101 int single_insn = 0;
10102 /* 1 means we can't store into OP0 directly,
10103 because it is a subreg narrower than a word,
10104 and we don't dare clobber the rest of the word. */
10105 int bad_subreg = 0;
1499e0a8 10106
b93a436e
JL
10107 /* Stabilize any component ref that might need to be
10108 evaluated more than once below. */
10109 if (!post
10110 || TREE_CODE (incremented) == BIT_FIELD_REF
10111 || (TREE_CODE (incremented) == COMPONENT_REF
10112 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10113 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10114 incremented = stabilize_reference (incremented);
10115 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10116 ones into save exprs so that they don't accidentally get evaluated
10117 more than once by the code below. */
10118 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10119 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10120 incremented = save_expr (incremented);
e9a25f70 10121
b93a436e
JL
10122 /* Compute the operands as RTX.
10123 Note whether OP0 is the actual lvalue or a copy of it:
10124 I believe it is a copy iff it is a register or subreg
10125 and insns were generated in computing it. */
e9a25f70 10126
b93a436e
JL
10127 temp = get_last_insn ();
10128 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 10129
b93a436e
JL
10130 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10131 in place but instead must do sign- or zero-extension during assignment,
10132 so we copy it into a new register and let the code below use it as
10133 a copy.
e9a25f70 10134
b93a436e
JL
10135 Note that we can safely modify this SUBREG since it is know not to be
10136 shared (it was made by the expand_expr call above). */
10137
10138 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10139 {
10140 if (post)
10141 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10142 else
10143 bad_subreg = 1;
10144 }
10145 else if (GET_CODE (op0) == SUBREG
10146 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10147 {
10148 /* We cannot increment this SUBREG in place. If we are
10149 post-incrementing, get a copy of the old value. Otherwise,
10150 just mark that we cannot increment in place. */
10151 if (post)
10152 op0 = copy_to_reg (op0);
10153 else
10154 bad_subreg = 1;
e9a25f70
JL
10155 }
10156
b93a436e
JL
10157 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10158 && temp != get_last_insn ());
10159 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10160 EXPAND_MEMORY_USE_BAD);
1499e0a8 10161
b93a436e
JL
10162 /* Decide whether incrementing or decrementing. */
10163 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10164 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10165 this_optab = sub_optab;
10166
10167 /* Convert decrement by a constant into a negative increment. */
10168 if (this_optab == sub_optab
10169 && GET_CODE (op1) == CONST_INT)
ca695ac9 10170 {
b93a436e
JL
10171 op1 = GEN_INT (- INTVAL (op1));
10172 this_optab = add_optab;
ca695ac9 10173 }
1499e0a8 10174
b93a436e
JL
10175 /* For a preincrement, see if we can do this with a single instruction. */
10176 if (!post)
10177 {
10178 icode = (int) this_optab->handlers[(int) mode].insn_code;
10179 if (icode != (int) CODE_FOR_nothing
10180 /* Make sure that OP0 is valid for operands 0 and 1
10181 of the insn we want to queue. */
10182 && (*insn_operand_predicate[icode][0]) (op0, mode)
10183 && (*insn_operand_predicate[icode][1]) (op0, mode)
10184 && (*insn_operand_predicate[icode][2]) (op1, mode))
10185 single_insn = 1;
10186 }
bbf6f052 10187
b93a436e
JL
10188 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10189 then we cannot just increment OP0. We must therefore contrive to
10190 increment the original value. Then, for postincrement, we can return
10191 OP0 since it is a copy of the old value. For preincrement, expand here
10192 unless we can do it with a single insn.
bbf6f052 10193
b93a436e
JL
10194 Likewise if storing directly into OP0 would clobber high bits
10195 we need to preserve (bad_subreg). */
10196 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 10197 {
b93a436e
JL
10198 /* This is the easiest way to increment the value wherever it is.
10199 Problems with multiple evaluation of INCREMENTED are prevented
10200 because either (1) it is a component_ref or preincrement,
10201 in which case it was stabilized above, or (2) it is an array_ref
10202 with constant index in an array in a register, which is
10203 safe to reevaluate. */
10204 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10205 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10206 ? MINUS_EXPR : PLUS_EXPR),
10207 TREE_TYPE (exp),
10208 incremented,
10209 TREE_OPERAND (exp, 1));
a358cee0 10210
b93a436e
JL
10211 while (TREE_CODE (incremented) == NOP_EXPR
10212 || TREE_CODE (incremented) == CONVERT_EXPR)
10213 {
10214 newexp = convert (TREE_TYPE (incremented), newexp);
10215 incremented = TREE_OPERAND (incremented, 0);
10216 }
bbf6f052 10217
b93a436e
JL
10218 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10219 return post ? op0 : temp;
10220 }
bbf6f052 10221
b93a436e
JL
10222 if (post)
10223 {
10224 /* We have a true reference to the value in OP0.
10225 If there is an insn to add or subtract in this mode, queue it.
10226 Queueing the increment insn avoids the register shuffling
10227 that often results if we must increment now and first save
10228 the old value for subsequent use. */
bbf6f052 10229
b93a436e
JL
10230#if 0 /* Turned off to avoid making extra insn for indexed memref. */
10231 op0 = stabilize (op0);
10232#endif
41dfd40c 10233
b93a436e
JL
10234 icode = (int) this_optab->handlers[(int) mode].insn_code;
10235 if (icode != (int) CODE_FOR_nothing
10236 /* Make sure that OP0 is valid for operands 0 and 1
10237 of the insn we want to queue. */
10238 && (*insn_operand_predicate[icode][0]) (op0, mode)
10239 && (*insn_operand_predicate[icode][1]) (op0, mode))
10240 {
10241 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10242 op1 = force_reg (mode, op1);
bbf6f052 10243
b93a436e
JL
10244 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10245 }
10246 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10247 {
10248 rtx addr = (general_operand (XEXP (op0, 0), mode)
10249 ? force_reg (Pmode, XEXP (op0, 0))
10250 : copy_to_reg (XEXP (op0, 0)));
10251 rtx temp, result;
ca695ac9 10252
b93a436e
JL
10253 op0 = change_address (op0, VOIDmode, addr);
10254 temp = force_reg (GET_MODE (op0), op0);
10255 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10256 op1 = force_reg (mode, op1);
ca695ac9 10257
b93a436e
JL
10258 /* The increment queue is LIFO, thus we have to `queue'
10259 the instructions in reverse order. */
10260 enqueue_insn (op0, gen_move_insn (op0, temp));
10261 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10262 return result;
bbf6f052
RK
10263 }
10264 }
ca695ac9 10265
b93a436e
JL
10266 /* Preincrement, or we can't increment with one simple insn. */
10267 if (post)
10268 /* Save a copy of the value before inc or dec, to return it later. */
10269 temp = value = copy_to_reg (op0);
10270 else
10271 /* Arrange to return the incremented value. */
10272 /* Copy the rtx because expand_binop will protect from the queue,
10273 and the results of that would be invalid for us to return
10274 if our caller does emit_queue before using our result. */
10275 temp = copy_rtx (value = op0);
bbf6f052 10276
b93a436e
JL
10277 /* Increment however we can. */
10278 op1 = expand_binop (mode, this_optab, value, op1,
7d384cc0 10279 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
10280 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10281 /* Make sure the value is stored into OP0. */
10282 if (op1 != op0)
10283 emit_move_insn (op0, op1);
5718612f 10284
b93a436e
JL
10285 return temp;
10286}
10287\f
10288/* Expand all function calls contained within EXP, innermost ones first.
10289 But don't look within expressions that have sequence points.
10290 For each CALL_EXPR, record the rtx for its value
10291 in the CALL_EXPR_RTL field. */
5718612f 10292
b93a436e
JL
10293static void
10294preexpand_calls (exp)
10295 tree exp;
10296{
10297 register int nops, i;
10298 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 10299
b93a436e
JL
10300 if (! do_preexpand_calls)
10301 return;
5718612f 10302
b93a436e 10303 /* Only expressions and references can contain calls. */
bbf6f052 10304
b93a436e
JL
10305 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10306 return;
bbf6f052 10307
b93a436e
JL
10308 switch (TREE_CODE (exp))
10309 {
10310 case CALL_EXPR:
10311 /* Do nothing if already expanded. */
10312 if (CALL_EXPR_RTL (exp) != 0
10313 /* Do nothing if the call returns a variable-sized object. */
10314 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10315 /* Do nothing to built-in functions. */
10316 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10317 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10318 == FUNCTION_DECL)
10319 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10320 return;
bbf6f052 10321
b93a436e
JL
10322 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10323 return;
bbf6f052 10324
b93a436e
JL
10325 case COMPOUND_EXPR:
10326 case COND_EXPR:
10327 case TRUTH_ANDIF_EXPR:
10328 case TRUTH_ORIF_EXPR:
10329 /* If we find one of these, then we can be sure
10330 the adjust will be done for it (since it makes jumps).
10331 Do it now, so that if this is inside an argument
10332 of a function, we don't get the stack adjustment
10333 after some other args have already been pushed. */
10334 do_pending_stack_adjust ();
10335 return;
bbf6f052 10336
b93a436e
JL
10337 case BLOCK:
10338 case RTL_EXPR:
10339 case WITH_CLEANUP_EXPR:
10340 case CLEANUP_POINT_EXPR:
10341 case TRY_CATCH_EXPR:
10342 return;
bbf6f052 10343
b93a436e
JL
10344 case SAVE_EXPR:
10345 if (SAVE_EXPR_RTL (exp) != 0)
10346 return;
10347
10348 default:
10349 break;
ca695ac9 10350 }
bbf6f052 10351
b93a436e
JL
10352 nops = tree_code_length[(int) TREE_CODE (exp)];
10353 for (i = 0; i < nops; i++)
10354 if (TREE_OPERAND (exp, i) != 0)
10355 {
10356 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10357 if (type == 'e' || type == '<' || type == '1' || type == '2'
10358 || type == 'r')
10359 preexpand_calls (TREE_OPERAND (exp, i));
10360 }
10361}
10362\f
10363/* At the start of a function, record that we have no previously-pushed
10364 arguments waiting to be popped. */
bbf6f052 10365
b93a436e
JL
10366void
10367init_pending_stack_adjust ()
10368{
10369 pending_stack_adjust = 0;
10370}
bbf6f052 10371
b93a436e 10372/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
10373 so the adjustment won't get done.
10374
10375 Note, if the current function calls alloca, then it must have a
10376 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 10377
b93a436e
JL
10378void
10379clear_pending_stack_adjust ()
10380{
10381#ifdef EXIT_IGNORE_STACK
10382 if (optimize > 0
060fbabf
JL
10383 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10384 && EXIT_IGNORE_STACK
b93a436e
JL
10385 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10386 && ! flag_inline_functions)
10387 pending_stack_adjust = 0;
10388#endif
10389}
bbf6f052 10390
b93a436e
JL
10391/* Pop any previously-pushed arguments that have not been popped yet. */
10392
10393void
10394do_pending_stack_adjust ()
10395{
10396 if (inhibit_defer_pop == 0)
ca695ac9 10397 {
b93a436e
JL
10398 if (pending_stack_adjust != 0)
10399 adjust_stack (GEN_INT (pending_stack_adjust));
10400 pending_stack_adjust = 0;
bbf6f052 10401 }
bbf6f052
RK
10402}
10403\f
b93a436e 10404/* Expand conditional expressions. */
bbf6f052 10405
b93a436e
JL
10406/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10407 LABEL is an rtx of code CODE_LABEL, in this function and all the
10408 functions here. */
bbf6f052 10409
b93a436e
JL
10410void
10411jumpifnot (exp, label)
ca695ac9 10412 tree exp;
b93a436e 10413 rtx label;
bbf6f052 10414{
b93a436e
JL
10415 do_jump (exp, label, NULL_RTX);
10416}
bbf6f052 10417
b93a436e 10418/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 10419
b93a436e
JL
10420void
10421jumpif (exp, label)
10422 tree exp;
10423 rtx label;
10424{
10425 do_jump (exp, NULL_RTX, label);
10426}
ca695ac9 10427
b93a436e
JL
10428/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10429 the result is zero, or IF_TRUE_LABEL if the result is one.
10430 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10431 meaning fall through in that case.
ca695ac9 10432
b93a436e
JL
10433 do_jump always does any pending stack adjust except when it does not
10434 actually perform a jump. An example where there is no jump
10435 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 10436
b93a436e
JL
10437 This function is responsible for optimizing cases such as
10438 &&, || and comparison operators in EXP. */
5718612f 10439
b93a436e
JL
10440void
10441do_jump (exp, if_false_label, if_true_label)
10442 tree exp;
10443 rtx if_false_label, if_true_label;
10444{
10445 register enum tree_code code = TREE_CODE (exp);
10446 /* Some cases need to create a label to jump to
10447 in order to properly fall through.
10448 These cases set DROP_THROUGH_LABEL nonzero. */
10449 rtx drop_through_label = 0;
10450 rtx temp;
10451 rtx comparison = 0;
10452 int i;
10453 tree type;
10454 enum machine_mode mode;
ca695ac9 10455
dbecbbe4
JL
10456#ifdef MAX_INTEGER_COMPUTATION_MODE
10457 check_max_integer_computation_mode (exp);
10458#endif
10459
b93a436e 10460 emit_queue ();
ca695ac9 10461
b93a436e 10462 switch (code)
ca695ac9 10463 {
b93a436e 10464 case ERROR_MARK:
ca695ac9 10465 break;
bbf6f052 10466
b93a436e
JL
10467 case INTEGER_CST:
10468 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10469 if (temp)
10470 emit_jump (temp);
10471 break;
bbf6f052 10472
b93a436e
JL
10473#if 0
10474 /* This is not true with #pragma weak */
10475 case ADDR_EXPR:
10476 /* The address of something can never be zero. */
10477 if (if_true_label)
10478 emit_jump (if_true_label);
10479 break;
10480#endif
bbf6f052 10481
b93a436e
JL
10482 case NOP_EXPR:
10483 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10484 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10485 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10486 goto normal;
10487 case CONVERT_EXPR:
10488 /* If we are narrowing the operand, we have to do the compare in the
10489 narrower mode. */
10490 if ((TYPE_PRECISION (TREE_TYPE (exp))
10491 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10492 goto normal;
10493 case NON_LVALUE_EXPR:
10494 case REFERENCE_EXPR:
10495 case ABS_EXPR:
10496 case NEGATE_EXPR:
10497 case LROTATE_EXPR:
10498 case RROTATE_EXPR:
10499 /* These cannot change zero->non-zero or vice versa. */
10500 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10501 break;
bbf6f052 10502
b93a436e
JL
10503#if 0
10504 /* This is never less insns than evaluating the PLUS_EXPR followed by
10505 a test and can be longer if the test is eliminated. */
10506 case PLUS_EXPR:
10507 /* Reduce to minus. */
10508 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10509 TREE_OPERAND (exp, 0),
10510 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10511 TREE_OPERAND (exp, 1))));
10512 /* Process as MINUS. */
ca695ac9 10513#endif
bbf6f052 10514
b93a436e
JL
10515 case MINUS_EXPR:
10516 /* Non-zero iff operands of minus differ. */
10517 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10518 TREE_OPERAND (exp, 0),
10519 TREE_OPERAND (exp, 1)),
10520 NE, NE);
10521 break;
bbf6f052 10522
b93a436e
JL
10523 case BIT_AND_EXPR:
10524 /* If we are AND'ing with a small constant, do this comparison in the
10525 smallest type that fits. If the machine doesn't have comparisons
10526 that small, it will be converted back to the wider comparison.
10527 This helps if we are testing the sign bit of a narrower object.
10528 combine can't do this for us because it can't know whether a
10529 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 10530
b93a436e
JL
10531 if (! SLOW_BYTE_ACCESS
10532 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10533 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10534 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10535 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10536 && (type = type_for_mode (mode, 1)) != 0
10537 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10538 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10539 != CODE_FOR_nothing))
10540 {
10541 do_jump (convert (type, exp), if_false_label, if_true_label);
10542 break;
10543 }
10544 goto normal;
bbf6f052 10545
b93a436e
JL
10546 case TRUTH_NOT_EXPR:
10547 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10548 break;
bbf6f052 10549
b93a436e
JL
10550 case TRUTH_ANDIF_EXPR:
10551 if (if_false_label == 0)
10552 if_false_label = drop_through_label = gen_label_rtx ();
10553 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10554 start_cleanup_deferral ();
10555 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10556 end_cleanup_deferral ();
10557 break;
bbf6f052 10558
b93a436e
JL
10559 case TRUTH_ORIF_EXPR:
10560 if (if_true_label == 0)
10561 if_true_label = drop_through_label = gen_label_rtx ();
10562 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10563 start_cleanup_deferral ();
10564 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10565 end_cleanup_deferral ();
10566 break;
bbf6f052 10567
b93a436e
JL
10568 case COMPOUND_EXPR:
10569 push_temp_slots ();
10570 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10571 preserve_temp_slots (NULL_RTX);
10572 free_temp_slots ();
10573 pop_temp_slots ();
10574 emit_queue ();
10575 do_pending_stack_adjust ();
10576 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10577 break;
bbf6f052 10578
b93a436e
JL
10579 case COMPONENT_REF:
10580 case BIT_FIELD_REF:
10581 case ARRAY_REF:
10582 {
10583 int bitsize, bitpos, unsignedp;
10584 enum machine_mode mode;
10585 tree type;
10586 tree offset;
10587 int volatilep = 0;
10588 int alignment;
bbf6f052 10589
b93a436e
JL
10590 /* Get description of this reference. We don't actually care
10591 about the underlying object here. */
10592 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10593 &mode, &unsignedp, &volatilep,
10594 &alignment);
bbf6f052 10595
b93a436e
JL
10596 type = type_for_size (bitsize, unsignedp);
10597 if (! SLOW_BYTE_ACCESS
10598 && type != 0 && bitsize >= 0
10599 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10600 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10601 != CODE_FOR_nothing))
10602 {
10603 do_jump (convert (type, exp), if_false_label, if_true_label);
10604 break;
10605 }
10606 goto normal;
10607 }
bbf6f052 10608
b93a436e
JL
10609 case COND_EXPR:
10610 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10611 if (integer_onep (TREE_OPERAND (exp, 1))
10612 && integer_zerop (TREE_OPERAND (exp, 2)))
10613 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 10614
b93a436e
JL
10615 else if (integer_zerop (TREE_OPERAND (exp, 1))
10616 && integer_onep (TREE_OPERAND (exp, 2)))
10617 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 10618
b93a436e
JL
10619 else
10620 {
10621 register rtx label1 = gen_label_rtx ();
10622 drop_through_label = gen_label_rtx ();
bbf6f052 10623
b93a436e 10624 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 10625
b93a436e
JL
10626 start_cleanup_deferral ();
10627 /* Now the THEN-expression. */
10628 do_jump (TREE_OPERAND (exp, 1),
10629 if_false_label ? if_false_label : drop_through_label,
10630 if_true_label ? if_true_label : drop_through_label);
10631 /* In case the do_jump just above never jumps. */
10632 do_pending_stack_adjust ();
10633 emit_label (label1);
bbf6f052 10634
b93a436e
JL
10635 /* Now the ELSE-expression. */
10636 do_jump (TREE_OPERAND (exp, 2),
10637 if_false_label ? if_false_label : drop_through_label,
10638 if_true_label ? if_true_label : drop_through_label);
10639 end_cleanup_deferral ();
10640 }
10641 break;
bbf6f052 10642
b93a436e
JL
10643 case EQ_EXPR:
10644 {
10645 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 10646
9ec36da5
JL
10647 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10648 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
10649 {
10650 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10651 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10652 do_jump
10653 (fold
10654 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10655 fold (build (EQ_EXPR, TREE_TYPE (exp),
10656 fold (build1 (REALPART_EXPR,
10657 TREE_TYPE (inner_type),
10658 exp0)),
10659 fold (build1 (REALPART_EXPR,
10660 TREE_TYPE (inner_type),
10661 exp1)))),
10662 fold (build (EQ_EXPR, TREE_TYPE (exp),
10663 fold (build1 (IMAGPART_EXPR,
10664 TREE_TYPE (inner_type),
10665 exp0)),
10666 fold (build1 (IMAGPART_EXPR,
10667 TREE_TYPE (inner_type),
10668 exp1)))))),
10669 if_false_label, if_true_label);
10670 }
9ec36da5
JL
10671
10672 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10673 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10674
b93a436e
JL
10675 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10676 && !can_compare_p (TYPE_MODE (inner_type)))
10677 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10678 else
10679 comparison = compare (exp, EQ, EQ);
10680 break;
10681 }
bbf6f052 10682
b93a436e
JL
10683 case NE_EXPR:
10684 {
10685 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 10686
9ec36da5
JL
10687 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10688 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
10689 {
10690 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10691 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10692 do_jump
10693 (fold
10694 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10695 fold (build (NE_EXPR, TREE_TYPE (exp),
10696 fold (build1 (REALPART_EXPR,
10697 TREE_TYPE (inner_type),
10698 exp0)),
10699 fold (build1 (REALPART_EXPR,
10700 TREE_TYPE (inner_type),
10701 exp1)))),
10702 fold (build (NE_EXPR, TREE_TYPE (exp),
10703 fold (build1 (IMAGPART_EXPR,
10704 TREE_TYPE (inner_type),
10705 exp0)),
10706 fold (build1 (IMAGPART_EXPR,
10707 TREE_TYPE (inner_type),
10708 exp1)))))),
10709 if_false_label, if_true_label);
10710 }
9ec36da5
JL
10711
10712 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10713 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10714
b93a436e
JL
10715 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10716 && !can_compare_p (TYPE_MODE (inner_type)))
10717 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10718 else
10719 comparison = compare (exp, NE, NE);
10720 break;
10721 }
bbf6f052 10722
b93a436e
JL
10723 case LT_EXPR:
10724 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10725 == MODE_INT)
10726 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10727 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10728 else
10729 comparison = compare (exp, LT, LTU);
10730 break;
bbf6f052 10731
b93a436e
JL
10732 case LE_EXPR:
10733 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10734 == MODE_INT)
10735 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10736 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10737 else
10738 comparison = compare (exp, LE, LEU);
10739 break;
bbf6f052 10740
b93a436e
JL
10741 case GT_EXPR:
10742 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10743 == MODE_INT)
10744 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10745 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10746 else
10747 comparison = compare (exp, GT, GTU);
10748 break;
bbf6f052 10749
b93a436e
JL
10750 case GE_EXPR:
10751 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10752 == MODE_INT)
10753 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10754 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10755 else
10756 comparison = compare (exp, GE, GEU);
10757 break;
bbf6f052 10758
b93a436e
JL
10759 default:
10760 normal:
10761 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10762#if 0
10763 /* This is not needed any more and causes poor code since it causes
10764 comparisons and tests from non-SI objects to have different code
10765 sequences. */
10766 /* Copy to register to avoid generating bad insns by cse
10767 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10768 if (!cse_not_expected && GET_CODE (temp) == MEM)
10769 temp = copy_to_reg (temp);
ca695ac9 10770#endif
b93a436e
JL
10771 do_pending_stack_adjust ();
10772 if (GET_CODE (temp) == CONST_INT)
10773 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10774 else if (GET_CODE (temp) == LABEL_REF)
10775 comparison = const_true_rtx;
10776 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10777 && !can_compare_p (GET_MODE (temp)))
10778 /* Note swapping the labels gives us not-equal. */
10779 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10780 else if (GET_MODE (temp) != VOIDmode)
10781 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10782 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10783 GET_MODE (temp), NULL_RTX, 0);
10784 else
10785 abort ();
10786 }
bbf6f052 10787
b93a436e
JL
10788 /* Do any postincrements in the expression that was tested. */
10789 emit_queue ();
bbf6f052 10790
b93a436e
JL
10791 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10792 straight into a conditional jump instruction as the jump condition.
10793 Otherwise, all the work has been done already. */
bbf6f052 10794
b93a436e
JL
10795 if (comparison == const_true_rtx)
10796 {
10797 if (if_true_label)
10798 emit_jump (if_true_label);
10799 }
10800 else if (comparison == const0_rtx)
10801 {
10802 if (if_false_label)
10803 emit_jump (if_false_label);
10804 }
10805 else if (comparison)
10806 do_jump_for_compare (comparison, if_false_label, if_true_label);
bbf6f052 10807
b93a436e
JL
10808 if (drop_through_label)
10809 {
10810 /* If do_jump produces code that might be jumped around,
10811 do any stack adjusts from that code, before the place
10812 where control merges in. */
10813 do_pending_stack_adjust ();
10814 emit_label (drop_through_label);
10815 }
bbf6f052 10816}
b93a436e
JL
10817\f
10818/* Given a comparison expression EXP for values too wide to be compared
10819 with one insn, test the comparison and jump to the appropriate label.
10820 The code of EXP is ignored; we always test GT if SWAP is 0,
10821 and LT if SWAP is 1. */
bbf6f052 10822
b93a436e
JL
10823static void
10824do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10825 tree exp;
10826 int swap;
10827 rtx if_false_label, if_true_label;
10828{
10829 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10830 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10831 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10832 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10833 rtx drop_through_label = 0;
10834 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10835 int i;
bbf6f052 10836
b93a436e
JL
10837 if (! if_true_label || ! if_false_label)
10838 drop_through_label = gen_label_rtx ();
10839 if (! if_true_label)
10840 if_true_label = drop_through_label;
10841 if (! if_false_label)
10842 if_false_label = drop_through_label;
bbf6f052 10843
b93a436e
JL
10844 /* Compare a word at a time, high order first. */
10845 for (i = 0; i < nwords; i++)
f81497d9 10846 {
b93a436e
JL
10847 rtx comp;
10848 rtx op0_word, op1_word;
10849
10850 if (WORDS_BIG_ENDIAN)
10851 {
10852 op0_word = operand_subword_force (op0, i, mode);
10853 op1_word = operand_subword_force (op1, i, mode);
10854 }
f81497d9 10855 else
b93a436e
JL
10856 {
10857 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10858 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10859 }
10860
10861 /* All but high-order word must be compared as unsigned. */
10862 comp = compare_from_rtx (op0_word, op1_word,
10863 (unsignedp || i > 0) ? GTU : GT,
10864 unsignedp, word_mode, NULL_RTX, 0);
10865 if (comp == const_true_rtx)
10866 emit_jump (if_true_label);
10867 else if (comp != const0_rtx)
10868 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10869
10870 /* Consider lower words only if these are equal. */
10871 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10872 NULL_RTX, 0);
10873 if (comp == const_true_rtx)
10874 emit_jump (if_false_label);
10875 else if (comp != const0_rtx)
10876 do_jump_for_compare (comp, NULL_RTX, if_false_label);
f81497d9 10877 }
ca695ac9 10878
b93a436e
JL
10879 if (if_false_label)
10880 emit_jump (if_false_label);
10881 if (drop_through_label)
10882 emit_label (drop_through_label);
f81497d9
RS
10883}
10884
b93a436e
JL
10885/* Compare OP0 with OP1, word at a time, in mode MODE.
10886 UNSIGNEDP says to do unsigned comparison.
10887 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10888
b93a436e
JL
10889void
10890do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10891 enum machine_mode mode;
10892 int unsignedp;
10893 rtx op0, op1;
10894 rtx if_false_label, if_true_label;
f81497d9 10895{
b93a436e
JL
10896 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10897 rtx drop_through_label = 0;
10898 int i;
f81497d9 10899
b93a436e
JL
10900 if (! if_true_label || ! if_false_label)
10901 drop_through_label = gen_label_rtx ();
10902 if (! if_true_label)
10903 if_true_label = drop_through_label;
10904 if (! if_false_label)
10905 if_false_label = drop_through_label;
f81497d9 10906
b93a436e
JL
10907 /* Compare a word at a time, high order first. */
10908 for (i = 0; i < nwords; i++)
10909 {
10910 rtx comp;
10911 rtx op0_word, op1_word;
bbf6f052 10912
b93a436e
JL
10913 if (WORDS_BIG_ENDIAN)
10914 {
10915 op0_word = operand_subword_force (op0, i, mode);
10916 op1_word = operand_subword_force (op1, i, mode);
10917 }
10918 else
10919 {
10920 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10921 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10922 }
bbf6f052 10923
b93a436e
JL
10924 /* All but high-order word must be compared as unsigned. */
10925 comp = compare_from_rtx (op0_word, op1_word,
10926 (unsignedp || i > 0) ? GTU : GT,
10927 unsignedp, word_mode, NULL_RTX, 0);
10928 if (comp == const_true_rtx)
10929 emit_jump (if_true_label);
10930 else if (comp != const0_rtx)
10931 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052 10932
b93a436e
JL
10933 /* Consider lower words only if these are equal. */
10934 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10935 NULL_RTX, 0);
10936 if (comp == const_true_rtx)
10937 emit_jump (if_false_label);
10938 else if (comp != const0_rtx)
10939 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10940 }
bbf6f052 10941
b93a436e
JL
10942 if (if_false_label)
10943 emit_jump (if_false_label);
10944 if (drop_through_label)
10945 emit_label (drop_through_label);
bbf6f052
RK
10946}
10947
b93a436e
JL
10948/* Given an EQ_EXPR expression EXP for values too wide to be compared
10949 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10950
b93a436e
JL
10951static void
10952do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10953 tree exp;
10954 rtx if_false_label, if_true_label;
bbf6f052 10955{
b93a436e
JL
10956 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10957 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10958 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10959 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10960 int i;
10961 rtx drop_through_label = 0;
bbf6f052 10962
b93a436e
JL
10963 if (! if_false_label)
10964 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10965
b93a436e
JL
10966 for (i = 0; i < nwords; i++)
10967 {
10968 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10969 operand_subword_force (op1, i, mode),
10970 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10971 word_mode, NULL_RTX, 0);
10972 if (comp == const_true_rtx)
10973 emit_jump (if_false_label);
10974 else if (comp != const0_rtx)
10975 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10976 }
bbf6f052 10977
b93a436e
JL
10978 if (if_true_label)
10979 emit_jump (if_true_label);
10980 if (drop_through_label)
10981 emit_label (drop_through_label);
bbf6f052 10982}
b93a436e
JL
10983\f
10984/* Jump according to whether OP0 is 0.
10985 We assume that OP0 has an integer mode that is too wide
10986 for the available compare insns. */
bbf6f052 10987
f5963e61 10988void
b93a436e
JL
10989do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10990 rtx op0;
10991 rtx if_false_label, if_true_label;
ca695ac9 10992{
b93a436e
JL
10993 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10994 rtx part;
10995 int i;
10996 rtx drop_through_label = 0;
bbf6f052 10997
b93a436e
JL
10998 /* The fastest way of doing this comparison on almost any machine is to
10999 "or" all the words and compare the result. If all have to be loaded
11000 from memory and this is a very wide item, it's possible this may
11001 be slower, but that's highly unlikely. */
bbf6f052 11002
b93a436e
JL
11003 part = gen_reg_rtx (word_mode);
11004 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11005 for (i = 1; i < nwords && part != 0; i++)
11006 part = expand_binop (word_mode, ior_optab, part,
11007 operand_subword_force (op0, i, GET_MODE (op0)),
11008 part, 1, OPTAB_WIDEN);
bbf6f052 11009
b93a436e
JL
11010 if (part != 0)
11011 {
11012 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11013 NULL_RTX, 0);
0f41302f 11014
b93a436e
JL
11015 if (comp == const_true_rtx)
11016 emit_jump (if_false_label);
11017 else if (comp == const0_rtx)
11018 emit_jump (if_true_label);
11019 else
11020 do_jump_for_compare (comp, if_false_label, if_true_label);
bbf6f052 11021
b93a436e
JL
11022 return;
11023 }
bbf6f052 11024
b93a436e
JL
11025 /* If we couldn't do the "or" simply, do this with a series of compares. */
11026 if (! if_false_label)
11027 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 11028
b93a436e
JL
11029 for (i = 0; i < nwords; i++)
11030 {
11031 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11032 GET_MODE (op0)),
11033 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11034 if (comp == const_true_rtx)
11035 emit_jump (if_false_label);
11036 else if (comp != const0_rtx)
11037 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11038 }
bbf6f052 11039
b93a436e
JL
11040 if (if_true_label)
11041 emit_jump (if_true_label);
0f41302f 11042
b93a436e
JL
11043 if (drop_through_label)
11044 emit_label (drop_through_label);
bbf6f052 11045}
bbf6f052 11046
b93a436e
JL
11047/* Given a comparison expression in rtl form, output conditional branches to
11048 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 11049
b93a436e
JL
11050static void
11051do_jump_for_compare (comparison, if_false_label, if_true_label)
11052 rtx comparison, if_false_label, if_true_label;
bbf6f052 11053{
b93a436e
JL
11054 if (if_true_label)
11055 {
11056 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
d804ed43
RH
11057 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11058 (if_true_label));
b93a436e
JL
11059 else
11060 abort ();
ca695ac9 11061
b93a436e
JL
11062 if (if_false_label)
11063 emit_jump (if_false_label);
11064 }
11065 else if (if_false_label)
11066 {
d804ed43
RH
11067 rtx first = get_last_insn (), insn, branch;
11068 int br_count;
0f41302f 11069
b93a436e
JL
11070 /* Output the branch with the opposite condition. Then try to invert
11071 what is generated. If more than one insn is a branch, or if the
11072 branch is not the last insn written, abort. If we can't invert
11073 the branch, emit make a true label, redirect this jump to that,
11074 emit a jump to the false label and define the true label. */
d804ed43
RH
11075 /* ??? Note that we wouldn't have to do any of this nonsense if
11076 we passed both labels into a combined compare-and-branch.
11077 Ah well, jump threading does a good job of repairing the damage. */
bbf6f052 11078
b93a436e 11079 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
d804ed43
RH
11080 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11081 (if_false_label));
b93a436e
JL
11082 else
11083 abort ();
bbf6f052 11084
d804ed43 11085 /* Here we get the first insn that was just emitted. It used to be the
b93a436e
JL
11086 case that, on some machines, emitting the branch would discard
11087 the previous compare insn and emit a replacement. This isn't
70f8a7a1 11088 done anymore, but abort if we see that FIRST is deleted. */
bbf6f052 11089
d804ed43
RH
11090 if (first == 0)
11091 first = get_insns ();
11092 else if (INSN_DELETED_P (first))
b93a436e
JL
11093 abort ();
11094 else
d804ed43 11095 first = NEXT_INSN (first);
bbf6f052 11096
d804ed43
RH
11097 /* Look for multiple branches in this sequence, as might be generated
11098 for a multi-word integer comparison. */
11099
11100 br_count = 0;
11101 branch = NULL_RTX;
11102 for (insn = first; insn ; insn = NEXT_INSN (insn))
b93a436e
JL
11103 if (GET_CODE (insn) == JUMP_INSN)
11104 {
b93a436e 11105 branch = insn;
d804ed43 11106 br_count += 1;
b93a436e 11107 }
a7c5971a 11108
d804ed43
RH
11109 /* If we've got one branch at the end of the sequence,
11110 we can try to reverse it. */
bbf6f052 11111
d804ed43 11112 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
b93a436e 11113 {
d804ed43
RH
11114 rtx insn_label;
11115 insn_label = XEXP (condjump_label (branch), 0);
11116 JUMP_LABEL (branch) = insn_label;
11117
11118 if (insn_label != if_false_label)
11119 abort ();
11120
11121 if (invert_jump (branch, if_false_label))
11122 return;
b93a436e 11123 }
d804ed43
RH
11124
11125 /* Multiple branches, or reversion failed. Convert to branches
11126 around an unconditional jump. */
11127
11128 if_true_label = gen_label_rtx ();
11129 for (insn = first; insn; insn = NEXT_INSN (insn))
11130 if (GET_CODE (insn) == JUMP_INSN)
11131 {
11132 rtx insn_label;
11133 insn_label = XEXP (condjump_label (insn), 0);
11134 JUMP_LABEL (insn) = insn_label;
11135
11136 if (insn_label == if_false_label)
11137 redirect_jump (insn, if_true_label);
11138 }
11139 emit_jump (if_false_label);
11140 emit_label (if_true_label);
b93a436e
JL
11141 }
11142}
11143\f
11144/* Generate code for a comparison expression EXP
11145 (including code to compute the values to be compared)
11146 and set (CC0) according to the result.
11147 SIGNED_CODE should be the rtx operation for this comparison for
11148 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
bbf6f052 11149
b93a436e
JL
11150 We force a stack adjustment unless there are currently
11151 things pushed on the stack that aren't yet used. */
ca695ac9 11152
b93a436e
JL
11153static rtx
11154compare (exp, signed_code, unsigned_code)
11155 register tree exp;
11156 enum rtx_code signed_code, unsigned_code;
11157{
11158 register rtx op0
11159 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11160 register rtx op1
11161 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11162 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
11163 register enum machine_mode mode = TYPE_MODE (type);
11164 int unsignedp = TREE_UNSIGNED (type);
11165 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
ca695ac9 11166
b93a436e
JL
11167#ifdef HAVE_canonicalize_funcptr_for_compare
11168 /* If function pointers need to be "canonicalized" before they can
11169 be reliably compared, then canonicalize them. */
11170 if (HAVE_canonicalize_funcptr_for_compare
11171 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11172 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11173 == FUNCTION_TYPE))
bbf6f052 11174 {
b93a436e 11175 rtx new_op0 = gen_reg_rtx (mode);
bbf6f052 11176
b93a436e
JL
11177 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11178 op0 = new_op0;
ca695ac9 11179 }
bbf6f052 11180
b93a436e
JL
11181 if (HAVE_canonicalize_funcptr_for_compare
11182 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11183 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11184 == FUNCTION_TYPE))
11185 {
11186 rtx new_op1 = gen_reg_rtx (mode);
bbf6f052 11187
b93a436e
JL
11188 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11189 op1 = new_op1;
11190 }
11191#endif
0f41302f 11192
b93a436e
JL
11193 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11194 ((mode == BLKmode)
11195 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11196 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
ca695ac9 11197}
bbf6f052 11198
b93a436e
JL
11199/* Like compare but expects the values to compare as two rtx's.
11200 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 11201
b93a436e
JL
11202 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11203 compared.
bbf6f052 11204
b93a436e
JL
11205 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11206 size of MODE should be used. */
ca695ac9 11207
b93a436e
JL
11208rtx
11209compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11210 register rtx op0, op1;
11211 enum rtx_code code;
11212 int unsignedp;
11213 enum machine_mode mode;
11214 rtx size;
11215 int align;
bbf6f052 11216{
b93a436e 11217 rtx tem;
bbf6f052 11218
b93a436e
JL
11219 /* If one operand is constant, make it the second one. Only do this
11220 if the other operand is not constant as well. */
e7c33f54 11221
b93a436e
JL
11222 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11223 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 11224 {
b93a436e
JL
11225 tem = op0;
11226 op0 = op1;
11227 op1 = tem;
11228 code = swap_condition (code);
11229 }
bbf6f052 11230
b93a436e
JL
11231 if (flag_force_mem)
11232 {
11233 op0 = force_not_mem (op0);
11234 op1 = force_not_mem (op1);
11235 }
bbf6f052 11236
b93a436e 11237 do_pending_stack_adjust ();
ca695ac9 11238
b93a436e
JL
11239 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11240 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11241 return tem;
ca695ac9 11242
b93a436e
JL
11243#if 0
11244 /* There's no need to do this now that combine.c can eliminate lots of
11245 sign extensions. This can be less efficient in certain cases on other
11246 machines. */
ca695ac9 11247
b93a436e
JL
11248 /* If this is a signed equality comparison, we can do it as an
11249 unsigned comparison since zero-extension is cheaper than sign
11250 extension and comparisons with zero are done as unsigned. This is
11251 the case even on machines that can do fast sign extension, since
11252 zero-extension is easier to combine with other operations than
11253 sign-extension is. If we are comparing against a constant, we must
11254 convert it to what it would look like unsigned. */
11255 if ((code == EQ || code == NE) && ! unsignedp
11256 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11257 {
11258 if (GET_CODE (op1) == CONST_INT
11259 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11260 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11261 unsignedp = 1;
11262 }
11263#endif
ca695ac9 11264
b93a436e 11265 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
ca695ac9 11266
b93a436e
JL
11267 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11268}
11269\f
11270/* Generate code to calculate EXP using a store-flag instruction
11271 and return an rtx for the result. EXP is either a comparison
11272 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 11273
b93a436e 11274 If TARGET is nonzero, store the result there if convenient.
ca695ac9 11275
b93a436e
JL
11276 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11277 cheap.
ca695ac9 11278
b93a436e
JL
11279 Return zero if there is no suitable set-flag instruction
11280 available on this machine.
ca695ac9 11281
b93a436e
JL
11282 Once expand_expr has been called on the arguments of the comparison,
11283 we are committed to doing the store flag, since it is not safe to
11284 re-evaluate the expression. We emit the store-flag insn by calling
11285 emit_store_flag, but only expand the arguments if we have a reason
11286 to believe that emit_store_flag will be successful. If we think that
11287 it will, but it isn't, we have to simulate the store-flag with a
11288 set/jump/set sequence. */
ca695ac9 11289
b93a436e
JL
11290static rtx
11291do_store_flag (exp, target, mode, only_cheap)
11292 tree exp;
11293 rtx target;
11294 enum machine_mode mode;
11295 int only_cheap;
11296{
11297 enum rtx_code code;
11298 tree arg0, arg1, type;
11299 tree tem;
11300 enum machine_mode operand_mode;
11301 int invert = 0;
11302 int unsignedp;
11303 rtx op0, op1;
11304 enum insn_code icode;
11305 rtx subtarget = target;
381127e8 11306 rtx result, label;
ca695ac9 11307
b93a436e
JL
11308 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11309 result at the end. We can't simply invert the test since it would
11310 have already been inverted if it were valid. This case occurs for
11311 some floating-point comparisons. */
ca695ac9 11312
b93a436e
JL
11313 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11314 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 11315
b93a436e
JL
11316 arg0 = TREE_OPERAND (exp, 0);
11317 arg1 = TREE_OPERAND (exp, 1);
11318 type = TREE_TYPE (arg0);
11319 operand_mode = TYPE_MODE (type);
11320 unsignedp = TREE_UNSIGNED (type);
ca695ac9 11321
b93a436e
JL
11322 /* We won't bother with BLKmode store-flag operations because it would mean
11323 passing a lot of information to emit_store_flag. */
11324 if (operand_mode == BLKmode)
11325 return 0;
ca695ac9 11326
b93a436e
JL
11327 /* We won't bother with store-flag operations involving function pointers
11328 when function pointers must be canonicalized before comparisons. */
11329#ifdef HAVE_canonicalize_funcptr_for_compare
11330 if (HAVE_canonicalize_funcptr_for_compare
11331 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11332 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11333 == FUNCTION_TYPE))
11334 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11335 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11336 == FUNCTION_TYPE))))
11337 return 0;
ca695ac9
JB
11338#endif
11339
b93a436e
JL
11340 STRIP_NOPS (arg0);
11341 STRIP_NOPS (arg1);
ca695ac9 11342
b93a436e
JL
11343 /* Get the rtx comparison code to use. We know that EXP is a comparison
11344 operation of some type. Some comparisons against 1 and -1 can be
11345 converted to comparisons with zero. Do so here so that the tests
11346 below will be aware that we have a comparison with zero. These
11347 tests will not catch constants in the first operand, but constants
11348 are rarely passed as the first operand. */
ca695ac9 11349
b93a436e
JL
11350 switch (TREE_CODE (exp))
11351 {
11352 case EQ_EXPR:
11353 code = EQ;
bbf6f052 11354 break;
b93a436e
JL
11355 case NE_EXPR:
11356 code = NE;
bbf6f052 11357 break;
b93a436e
JL
11358 case LT_EXPR:
11359 if (integer_onep (arg1))
11360 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11361 else
11362 code = unsignedp ? LTU : LT;
ca695ac9 11363 break;
b93a436e
JL
11364 case LE_EXPR:
11365 if (! unsignedp && integer_all_onesp (arg1))
11366 arg1 = integer_zero_node, code = LT;
11367 else
11368 code = unsignedp ? LEU : LE;
ca695ac9 11369 break;
b93a436e
JL
11370 case GT_EXPR:
11371 if (! unsignedp && integer_all_onesp (arg1))
11372 arg1 = integer_zero_node, code = GE;
11373 else
11374 code = unsignedp ? GTU : GT;
11375 break;
11376 case GE_EXPR:
11377 if (integer_onep (arg1))
11378 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11379 else
11380 code = unsignedp ? GEU : GE;
ca695ac9 11381 break;
ca695ac9 11382 default:
b93a436e 11383 abort ();
bbf6f052 11384 }
bbf6f052 11385
b93a436e
JL
11386 /* Put a constant second. */
11387 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11388 {
11389 tem = arg0; arg0 = arg1; arg1 = tem;
11390 code = swap_condition (code);
ca695ac9 11391 }
bbf6f052 11392
b93a436e
JL
11393 /* If this is an equality or inequality test of a single bit, we can
11394 do this by shifting the bit being tested to the low-order bit and
11395 masking the result with the constant 1. If the condition was EQ,
11396 we xor it with 1. This does not require an scc insn and is faster
11397 than an scc insn even if we have it. */
d39985fa 11398
b93a436e
JL
11399 if ((code == NE || code == EQ)
11400 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11401 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11402 {
11403 tree inner = TREE_OPERAND (arg0, 0);
11404 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11405 int ops_unsignedp;
bbf6f052 11406
b93a436e
JL
11407 /* If INNER is a right shift of a constant and it plus BITNUM does
11408 not overflow, adjust BITNUM and INNER. */
ca695ac9 11409
b93a436e
JL
11410 if (TREE_CODE (inner) == RSHIFT_EXPR
11411 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11412 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11413 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11414 < TYPE_PRECISION (type)))
ca695ac9 11415 {
b93a436e
JL
11416 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11417 inner = TREE_OPERAND (inner, 0);
ca695ac9 11418 }
ca695ac9 11419
b93a436e
JL
11420 /* If we are going to be able to omit the AND below, we must do our
11421 operations as unsigned. If we must use the AND, we have a choice.
11422 Normally unsigned is faster, but for some machines signed is. */
11423 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11424#ifdef LOAD_EXTEND_OP
11425 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11426#else
11427 : 1
11428#endif
11429 );
bbf6f052 11430
b93a436e
JL
11431 if (subtarget == 0 || GET_CODE (subtarget) != REG
11432 || GET_MODE (subtarget) != operand_mode
e5e809f4 11433 || ! safe_from_p (subtarget, inner, 1))
b93a436e 11434 subtarget = 0;
bbf6f052 11435
b93a436e 11436 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 11437
b93a436e
JL
11438 if (bitnum != 0)
11439 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11440 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 11441
b93a436e
JL
11442 if (GET_MODE (op0) != mode)
11443 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 11444
b93a436e
JL
11445 if ((code == EQ && ! invert) || (code == NE && invert))
11446 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11447 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 11448
b93a436e
JL
11449 /* Put the AND last so it can combine with more things. */
11450 if (bitnum != TYPE_PRECISION (type) - 1)
11451 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 11452
b93a436e
JL
11453 return op0;
11454 }
bbf6f052 11455
b93a436e
JL
11456 /* Now see if we are likely to be able to do this. Return if not. */
11457 if (! can_compare_p (operand_mode))
11458 return 0;
11459 icode = setcc_gen_code[(int) code];
11460 if (icode == CODE_FOR_nothing
11461 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 11462 {
b93a436e
JL
11463 /* We can only do this if it is one of the special cases that
11464 can be handled without an scc insn. */
11465 if ((code == LT && integer_zerop (arg1))
11466 || (! only_cheap && code == GE && integer_zerop (arg1)))
11467 ;
11468 else if (BRANCH_COST >= 0
11469 && ! only_cheap && (code == NE || code == EQ)
11470 && TREE_CODE (type) != REAL_TYPE
11471 && ((abs_optab->handlers[(int) operand_mode].insn_code
11472 != CODE_FOR_nothing)
11473 || (ffs_optab->handlers[(int) operand_mode].insn_code
11474 != CODE_FOR_nothing)))
11475 ;
11476 else
11477 return 0;
ca695ac9 11478 }
b93a436e
JL
11479
11480 preexpand_calls (exp);
11481 if (subtarget == 0 || GET_CODE (subtarget) != REG
11482 || GET_MODE (subtarget) != operand_mode
e5e809f4 11483 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
11484 subtarget = 0;
11485
11486 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11487 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11488
11489 if (target == 0)
11490 target = gen_reg_rtx (mode);
11491
11492 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11493 because, if the emit_store_flag does anything it will succeed and
11494 OP0 and OP1 will not be used subsequently. */
ca695ac9 11495
b93a436e
JL
11496 result = emit_store_flag (target, code,
11497 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11498 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11499 operand_mode, unsignedp, 1);
ca695ac9 11500
b93a436e
JL
11501 if (result)
11502 {
11503 if (invert)
11504 result = expand_binop (mode, xor_optab, result, const1_rtx,
11505 result, 0, OPTAB_LIB_WIDEN);
11506 return result;
ca695ac9 11507 }
bbf6f052 11508
b93a436e
JL
11509 /* If this failed, we have to do this with set/compare/jump/set code. */
11510 if (GET_CODE (target) != REG
11511 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11512 target = gen_reg_rtx (GET_MODE (target));
11513
11514 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11515 result = compare_from_rtx (op0, op1, code, unsignedp,
11516 operand_mode, NULL_RTX, 0);
11517 if (GET_CODE (result) == CONST_INT)
11518 return (((result == const0_rtx && ! invert)
11519 || (result != const0_rtx && invert))
11520 ? const0_rtx : const1_rtx);
ca695ac9 11521
b93a436e
JL
11522 label = gen_label_rtx ();
11523 if (bcc_gen_fctn[(int) code] == 0)
11524 abort ();
0f41302f 11525
b93a436e
JL
11526 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11527 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11528 emit_label (label);
bbf6f052 11529
b93a436e 11530 return target;
ca695ac9 11531}
b93a436e
JL
11532\f
11533/* Generate a tablejump instruction (used for switch statements). */
11534
11535#ifdef HAVE_tablejump
e87b4f3f 11536
b93a436e
JL
11537/* INDEX is the value being switched on, with the lowest value
11538 in the table already subtracted.
11539 MODE is its expected mode (needed if INDEX is constant).
11540 RANGE is the length of the jump table.
11541 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 11542
b93a436e
JL
11543 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11544 index value is out of range. */
0f41302f 11545
ca695ac9 11546void
b93a436e
JL
11547do_tablejump (index, mode, range, table_label, default_label)
11548 rtx index, range, table_label, default_label;
11549 enum machine_mode mode;
ca695ac9 11550{
b93a436e 11551 register rtx temp, vector;
88d3b7f0 11552
b93a436e
JL
11553 /* Do an unsigned comparison (in the proper mode) between the index
11554 expression and the value which represents the length of the range.
11555 Since we just finished subtracting the lower bound of the range
11556 from the index expression, this comparison allows us to simultaneously
11557 check that the original index expression value is both greater than
11558 or equal to the minimum value of the range and less than or equal to
11559 the maximum value of the range. */
709f5be1 11560
c5d5d461
JL
11561 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11562 0, default_label);
bbf6f052 11563
b93a436e
JL
11564 /* If index is in range, it must fit in Pmode.
11565 Convert to Pmode so we can index with it. */
11566 if (mode != Pmode)
11567 index = convert_to_mode (Pmode, index, 1);
bbf6f052 11568
b93a436e
JL
11569 /* Don't let a MEM slip thru, because then INDEX that comes
11570 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11571 and break_out_memory_refs will go to work on it and mess it up. */
11572#ifdef PIC_CASE_VECTOR_ADDRESS
11573 if (flag_pic && GET_CODE (index) != REG)
11574 index = copy_to_mode_reg (Pmode, index);
11575#endif
ca695ac9 11576
b93a436e
JL
11577 /* If flag_force_addr were to affect this address
11578 it could interfere with the tricky assumptions made
11579 about addresses that contain label-refs,
11580 which may be valid only very near the tablejump itself. */
11581 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11582 GET_MODE_SIZE, because this indicates how large insns are. The other
11583 uses should all be Pmode, because they are addresses. This code
11584 could fail if addresses and insns are not the same size. */
11585 index = gen_rtx_PLUS (Pmode,
11586 gen_rtx_MULT (Pmode, index,
11587 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11588 gen_rtx_LABEL_REF (Pmode, table_label));
11589#ifdef PIC_CASE_VECTOR_ADDRESS
11590 if (flag_pic)
11591 index = PIC_CASE_VECTOR_ADDRESS (index);
11592 else
bbf6f052 11593#endif
b93a436e
JL
11594 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11595 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11596 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11597 RTX_UNCHANGING_P (vector) = 1;
11598 convert_move (temp, vector, 0);
11599
11600 emit_jump_insn (gen_tablejump (temp, table_label));
11601
11602 /* If we are generating PIC code or if the table is PC-relative, the
11603 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11604 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11605 emit_barrier ();
bbf6f052 11606}
b93a436e
JL
11607
11608#endif /* HAVE_tablejump */