]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/expr.c
e5f7ae51783875005dca9517eeac29db4d8889c1
[thirdparty/gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #ifndef PUSH_ARGS_REVERSED
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
60 #endif
61 #endif
62
63 #endif
64
65 #ifndef STACK_PUSH_CODE
66 #ifdef STACK_GROWS_DOWNWARD
67 #define STACK_PUSH_CODE PRE_DEC
68 #else
69 #define STACK_PUSH_CODE PRE_INC
70 #endif
71 #endif
72
73 /* Assume that case vectors are not pc-relative. */
74 #ifndef CASE_VECTOR_PC_RELATIVE
75 #define CASE_VECTOR_PC_RELATIVE 0
76 #endif
77
78 /* Convert defined/undefined to boolean. */
79 #ifdef TARGET_MEM_FUNCTIONS
80 #undef TARGET_MEM_FUNCTIONS
81 #define TARGET_MEM_FUNCTIONS 1
82 #else
83 #define TARGET_MEM_FUNCTIONS 0
84 #endif
85
86
87 /* If this is nonzero, we do not bother generating VOLATILE
88 around volatile memory references, and we are willing to
89 output indirect addresses. If cse is to follow, we reject
90 indirect addresses so a useful potential cse is generated;
91 if it is used only once, instruction combination will produce
92 the same indirect address eventually. */
93 int cse_not_expected;
94
95 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
96 static tree placeholder_list = 0;
97
98 /* This structure is used by move_by_pieces to describe the move to
99 be performed. */
100 struct move_by_pieces
101 {
102 rtx to;
103 rtx to_addr;
104 int autinc_to;
105 int explicit_inc_to;
106 rtx from;
107 rtx from_addr;
108 int autinc_from;
109 int explicit_inc_from;
110 unsigned HOST_WIDE_INT len;
111 HOST_WIDE_INT offset;
112 int reverse;
113 };
114
115 /* This structure is used by store_by_pieces to describe the clear to
116 be performed. */
117
118 struct store_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 unsigned HOST_WIDE_INT len;
125 HOST_WIDE_INT offset;
126 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
127 PTR constfundata;
128 int reverse;
129 };
130
131 static rtx enqueue_insn PARAMS ((rtx, rtx));
132 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
133 PARAMS ((unsigned HOST_WIDE_INT,
134 unsigned int));
135 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
136 struct move_by_pieces *));
137 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
138 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
139 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
140 static tree emit_block_move_libcall_fn PARAMS ((int));
141 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
145 unsigned int));
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
147 unsigned int));
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
149 enum machine_mode,
150 struct store_by_pieces *));
151 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
152 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
153 static tree clear_storage_libcall_fn PARAMS ((int));
154 static rtx compress_float_constant PARAMS ((rtx, rtx));
155 static rtx get_subtarget PARAMS ((rtx));
156 static int is_zeros_p PARAMS ((tree));
157 static int mostly_zeros_p PARAMS ((tree));
158 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, tree, int, int));
161 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
162 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
163 HOST_WIDE_INT, enum machine_mode,
164 tree, enum machine_mode, int, tree,
165 int));
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
169 static int is_aligning_offset PARAMS ((tree, tree));
170 static rtx expand_increment PARAMS ((tree, int, int));
171 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
172 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
173 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 rtx, rtx));
175 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
176 #ifdef PUSH_ROUNDING
177 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
178 #endif
179 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
180
181 /* Record for each mode whether we can move a register directly to or
182 from an object of that mode in memory. If we can't, we won't try
183 to use that mode directly when accessing a field of that mode. */
184
185 static char direct_load[NUM_MACHINE_MODES];
186 static char direct_store[NUM_MACHINE_MODES];
187
188 /* Record for each mode whether we can float-extend from memory. */
189
190 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
191
192 /* If a memory-to-memory move would take MOVE_RATIO or more simple
193 move-instruction sequences, we will do a movstr or libcall instead. */
194
195 #ifndef MOVE_RATIO
196 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
197 #define MOVE_RATIO 2
198 #else
199 /* If we are optimizing for space (-Os), cut down the default move ratio. */
200 #define MOVE_RATIO (optimize_size ? 3 : 15)
201 #endif
202 #endif
203
204 /* This macro is used to determine whether move_by_pieces should be called
205 to perform a structure copy. */
206 #ifndef MOVE_BY_PIECES_P
207 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
208 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
209 #endif
210
211 /* If a clear memory operation would take CLEAR_RATIO or more simple
212 move-instruction sequences, we will do a clrstr or libcall instead. */
213
214 #ifndef CLEAR_RATIO
215 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
216 #define CLEAR_RATIO 2
217 #else
218 /* If we are optimizing for space, cut down the default clear ratio. */
219 #define CLEAR_RATIO (optimize_size ? 3 : 15)
220 #endif
221 #endif
222
223 /* This macro is used to determine whether clear_by_pieces should be
224 called to clear storage. */
225 #ifndef CLEAR_BY_PIECES_P
226 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
227 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
228 #endif
229
230 /* This array records the insn_code of insns to perform block moves. */
231 enum insn_code movstr_optab[NUM_MACHINE_MODES];
232
233 /* This array records the insn_code of insns to perform block clears. */
234 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
235
236 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
237
238 #ifndef SLOW_UNALIGNED_ACCESS
239 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
240 #endif
241 \f
242 /* This is run once per compilation to set up which modes can be used
243 directly in memory and to initialize the block move optab. */
244
245 void
246 init_expr_once ()
247 {
248 rtx insn, pat;
249 enum machine_mode mode;
250 int num_clobbers;
251 rtx mem, mem1;
252 rtx reg;
253
254 /* Try indexing by frame ptr and try by stack ptr.
255 It is known that on the Convex the stack ptr isn't a valid index.
256 With luck, one or the other is valid on any machine. */
257 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
258 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
259
260 /* A scratch register we can modify in-place below to avoid
261 useless RTL allocations. */
262 reg = gen_rtx_REG (VOIDmode, -1);
263
264 insn = rtx_alloc (INSN);
265 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
266 PATTERN (insn) = pat;
267
268 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
269 mode = (enum machine_mode) ((int) mode + 1))
270 {
271 int regno;
272
273 direct_load[(int) mode] = direct_store[(int) mode] = 0;
274 PUT_MODE (mem, mode);
275 PUT_MODE (mem1, mode);
276 PUT_MODE (reg, mode);
277
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
280
281 if (mode != VOIDmode && mode != BLKmode)
282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
283 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
284 regno++)
285 {
286 if (! HARD_REGNO_MODE_OK (regno, mode))
287 continue;
288
289 REGNO (reg) = regno;
290
291 SET_SRC (pat) = mem;
292 SET_DEST (pat) = reg;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_load[(int) mode] = 1;
295
296 SET_SRC (pat) = mem1;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
300
301 SET_SRC (pat) = reg;
302 SET_DEST (pat) = mem;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_store[(int) mode] = 1;
305
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem1;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
310 }
311 }
312
313 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
314
315 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
316 mode = GET_MODE_WIDER_MODE (mode))
317 {
318 enum machine_mode srcmode;
319 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
320 srcmode = GET_MODE_WIDER_MODE (srcmode))
321 {
322 enum insn_code ic;
323
324 ic = can_extend_p (mode, srcmode, 0);
325 if (ic == CODE_FOR_nothing)
326 continue;
327
328 PUT_MODE (mem, srcmode);
329
330 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
331 float_extend_from_mem[mode][srcmode] = true;
332 }
333 }
334 }
335
336 /* This is run at the start of compiling a function. */
337
338 void
339 init_expr ()
340 {
341 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
342
343 pending_chain = 0;
344 pending_stack_adjust = 0;
345 stack_pointer_delta = 0;
346 inhibit_defer_pop = 0;
347 saveregs_value = 0;
348 apply_args_value = 0;
349 forced_labels = 0;
350 }
351
352 /* Small sanity check that the queue is empty at the end of a function. */
353
354 void
355 finish_expr_for_function ()
356 {
357 if (pending_chain)
358 abort ();
359 }
360 \f
361 /* Manage the queue of increment instructions to be output
362 for POSTINCREMENT_EXPR expressions, etc. */
363
364 /* Queue up to increment (or change) VAR later. BODY says how:
365 BODY should be the same thing you would pass to emit_insn
366 to increment right away. It will go to emit_insn later on.
367
368 The value is a QUEUED expression to be used in place of VAR
369 where you want to guarantee the pre-incrementation value of VAR. */
370
371 static rtx
372 enqueue_insn (var, body)
373 rtx var, body;
374 {
375 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
376 body, pending_chain);
377 return pending_chain;
378 }
379
380 /* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
386
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
390
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
394
395 rtx
396 protect_from_queue (x, modify)
397 rtx x;
398 int modify;
399 {
400 RTX_CODE code = GET_CODE (x);
401
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain == 0)
405 return x;
406 #endif
407
408 if (code != QUEUED)
409 {
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
414 shared. */
415 if (code == MEM && GET_MODE (x) != BLKmode
416 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 {
418 rtx y = XEXP (x, 0);
419 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
420
421 if (QUEUED_INSN (y))
422 {
423 rtx temp = gen_reg_rtx (GET_MODE (x));
424
425 emit_insn_before (gen_move_insn (temp, new),
426 QUEUED_INSN (y));
427 return temp;
428 }
429
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 }
434
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
437 if (code == MEM)
438 {
439 rtx tem = protect_from_queue (XEXP (x, 0), 0);
440 if (tem != XEXP (x, 0))
441 {
442 x = copy_rtx (x);
443 XEXP (x, 0) = tem;
444 }
445 }
446 else if (code == PLUS || code == MULT)
447 {
448 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
451 {
452 x = copy_rtx (x);
453 XEXP (x, 0) = new0;
454 XEXP (x, 1) = new1;
455 }
456 }
457 return x;
458 }
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
461 emit_queue. */
462 if (QUEUED_INSN (x) == 0)
463 return copy_to_reg (QUEUED_VAR (x));
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
474 }
475
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
480
481 int
482 queued_subexp_p (x)
483 rtx x;
484 {
485 enum rtx_code code = GET_CODE (x);
486 switch (code)
487 {
488 case QUEUED:
489 return 1;
490 case MEM:
491 return queued_subexp_p (XEXP (x, 0));
492 case MULT:
493 case PLUS:
494 case MINUS:
495 return (queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1)));
497 default:
498 return 0;
499 }
500 }
501
502 /* Perform all the pending incrementations. */
503
504 void
505 emit_queue ()
506 {
507 rtx p;
508 while ((p = pending_chain))
509 {
510 rtx body = QUEUED_BODY (p);
511
512 switch (GET_CODE (body))
513 {
514 case INSN:
515 case JUMP_INSN:
516 case CALL_INSN:
517 case CODE_LABEL:
518 case BARRIER:
519 case NOTE:
520 QUEUED_INSN (p) = body;
521 emit_insn (body);
522 break;
523
524 #ifdef ENABLE_CHECKING
525 case SEQUENCE:
526 abort ();
527 break;
528 #endif
529
530 default:
531 QUEUED_INSN (p) = emit_insn (body);
532 break;
533 }
534
535 pending_chain = QUEUED_NEXT (p);
536 }
537 }
538 \f
539 /* Copy data from FROM to TO, where the machine modes are not the same.
540 Both modes may be integer, or both may be floating.
541 UNSIGNEDP should be nonzero if FROM is an unsigned type.
542 This causes zero-extension instead of sign-extension. */
543
544 void
545 convert_move (to, from, unsignedp)
546 rtx to, from;
547 int unsignedp;
548 {
549 enum machine_mode to_mode = GET_MODE (to);
550 enum machine_mode from_mode = GET_MODE (from);
551 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
552 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
553 enum insn_code code;
554 rtx libcall;
555
556 /* rtx code for making an equivalent value. */
557 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
558 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
559
560 to = protect_from_queue (to, 1);
561 from = protect_from_queue (from, 0);
562
563 if (to_real != from_real)
564 abort ();
565
566 /* If FROM is a SUBREG that indicates that we have already done at least
567 the required extension, strip it. We don't handle such SUBREGs as
568 TO here. */
569
570 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
571 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
572 >= GET_MODE_SIZE (to_mode))
573 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
574 from = gen_lowpart (to_mode, from), from_mode = to_mode;
575
576 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
577 abort ();
578
579 if (to_mode == from_mode
580 || (from_mode == VOIDmode && CONSTANT_P (from)))
581 {
582 emit_move_insn (to, from);
583 return;
584 }
585
586 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
587 {
588 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
589 abort ();
590
591 if (VECTOR_MODE_P (to_mode))
592 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
593 else
594 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
595
596 emit_move_insn (to, from);
597 return;
598 }
599
600 if (to_real != from_real)
601 abort ();
602
603 if (to_real)
604 {
605 rtx value, insns;
606
607 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
608 {
609 /* Try converting directly if the insn is supported. */
610 if ((code = can_extend_p (to_mode, from_mode, 0))
611 != CODE_FOR_nothing)
612 {
613 emit_unop_insn (code, to, from, UNKNOWN);
614 return;
615 }
616 }
617
618 #ifdef HAVE_trunchfqf2
619 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
620 {
621 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
622 return;
623 }
624 #endif
625 #ifdef HAVE_trunctqfqf2
626 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
627 {
628 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
629 return;
630 }
631 #endif
632 #ifdef HAVE_truncsfqf2
633 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
634 {
635 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
636 return;
637 }
638 #endif
639 #ifdef HAVE_truncdfqf2
640 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
641 {
642 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
643 return;
644 }
645 #endif
646 #ifdef HAVE_truncxfqf2
647 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
648 {
649 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
650 return;
651 }
652 #endif
653 #ifdef HAVE_trunctfqf2
654 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
655 {
656 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
657 return;
658 }
659 #endif
660
661 #ifdef HAVE_trunctqfhf2
662 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
663 {
664 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
665 return;
666 }
667 #endif
668 #ifdef HAVE_truncsfhf2
669 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
672 return;
673 }
674 #endif
675 #ifdef HAVE_truncdfhf2
676 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
677 {
678 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
679 return;
680 }
681 #endif
682 #ifdef HAVE_truncxfhf2
683 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
684 {
685 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
686 return;
687 }
688 #endif
689 #ifdef HAVE_trunctfhf2
690 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
691 {
692 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
693 return;
694 }
695 #endif
696
697 #ifdef HAVE_truncsftqf2
698 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
699 {
700 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
701 return;
702 }
703 #endif
704 #ifdef HAVE_truncdftqf2
705 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
706 {
707 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
708 return;
709 }
710 #endif
711 #ifdef HAVE_truncxftqf2
712 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
713 {
714 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
715 return;
716 }
717 #endif
718 #ifdef HAVE_trunctftqf2
719 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
720 {
721 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
722 return;
723 }
724 #endif
725
726 #ifdef HAVE_truncdfsf2
727 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
728 {
729 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
730 return;
731 }
732 #endif
733 #ifdef HAVE_truncxfsf2
734 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
735 {
736 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
737 return;
738 }
739 #endif
740 #ifdef HAVE_trunctfsf2
741 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
742 {
743 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
744 return;
745 }
746 #endif
747 #ifdef HAVE_truncxfdf2
748 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
749 {
750 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
751 return;
752 }
753 #endif
754 #ifdef HAVE_trunctfdf2
755 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
756 {
757 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
758 return;
759 }
760 #endif
761
762 libcall = (rtx) 0;
763 switch (from_mode)
764 {
765 case SFmode:
766 switch (to_mode)
767 {
768 case DFmode:
769 libcall = extendsfdf2_libfunc;
770 break;
771
772 case XFmode:
773 libcall = extendsfxf2_libfunc;
774 break;
775
776 case TFmode:
777 libcall = extendsftf2_libfunc;
778 break;
779
780 default:
781 break;
782 }
783 break;
784
785 case DFmode:
786 switch (to_mode)
787 {
788 case SFmode:
789 libcall = truncdfsf2_libfunc;
790 break;
791
792 case XFmode:
793 libcall = extenddfxf2_libfunc;
794 break;
795
796 case TFmode:
797 libcall = extenddftf2_libfunc;
798 break;
799
800 default:
801 break;
802 }
803 break;
804
805 case XFmode:
806 switch (to_mode)
807 {
808 case SFmode:
809 libcall = truncxfsf2_libfunc;
810 break;
811
812 case DFmode:
813 libcall = truncxfdf2_libfunc;
814 break;
815
816 default:
817 break;
818 }
819 break;
820
821 case TFmode:
822 switch (to_mode)
823 {
824 case SFmode:
825 libcall = trunctfsf2_libfunc;
826 break;
827
828 case DFmode:
829 libcall = trunctfdf2_libfunc;
830 break;
831
832 default:
833 break;
834 }
835 break;
836
837 default:
838 break;
839 }
840
841 if (libcall == (rtx) 0)
842 /* This conversion is not implemented yet. */
843 abort ();
844
845 start_sequence ();
846 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
847 1, from, from_mode);
848 insns = get_insns ();
849 end_sequence ();
850 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
851 from));
852 return;
853 }
854
855 /* Now both modes are integers. */
856
857 /* Handle expanding beyond a word. */
858 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
859 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
860 {
861 rtx insns;
862 rtx lowpart;
863 rtx fill_value;
864 rtx lowfrom;
865 int i;
866 enum machine_mode lowpart_mode;
867 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
868
869 /* Try converting directly if the insn is supported. */
870 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
871 != CODE_FOR_nothing)
872 {
873 /* If FROM is a SUBREG, put it into a register. Do this
874 so that we always generate the same set of insns for
875 better cse'ing; if an intermediate assignment occurred,
876 we won't be doing the operation directly on the SUBREG. */
877 if (optimize > 0 && GET_CODE (from) == SUBREG)
878 from = force_reg (from_mode, from);
879 emit_unop_insn (code, to, from, equiv_code);
880 return;
881 }
882 /* Next, try converting via full word. */
883 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
884 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
885 != CODE_FOR_nothing))
886 {
887 if (GET_CODE (to) == REG)
888 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
889 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
890 emit_unop_insn (code, to,
891 gen_lowpart (word_mode, to), equiv_code);
892 return;
893 }
894
895 /* No special multiword conversion insn; do it by hand. */
896 start_sequence ();
897
898 /* Since we will turn this into a no conflict block, we must ensure
899 that the source does not overlap the target. */
900
901 if (reg_overlap_mentioned_p (to, from))
902 from = force_reg (from_mode, from);
903
904 /* Get a copy of FROM widened to a word, if necessary. */
905 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
906 lowpart_mode = word_mode;
907 else
908 lowpart_mode = from_mode;
909
910 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
911
912 lowpart = gen_lowpart (lowpart_mode, to);
913 emit_move_insn (lowpart, lowfrom);
914
915 /* Compute the value to put in each remaining word. */
916 if (unsignedp)
917 fill_value = const0_rtx;
918 else
919 {
920 #ifdef HAVE_slt
921 if (HAVE_slt
922 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
923 && STORE_FLAG_VALUE == -1)
924 {
925 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
926 lowpart_mode, 0);
927 fill_value = gen_reg_rtx (word_mode);
928 emit_insn (gen_slt (fill_value));
929 }
930 else
931 #endif
932 {
933 fill_value
934 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
935 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
936 NULL_RTX, 0);
937 fill_value = convert_to_mode (word_mode, fill_value, 1);
938 }
939 }
940
941 /* Fill the remaining words. */
942 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
943 {
944 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
945 rtx subword = operand_subword (to, index, 1, to_mode);
946
947 if (subword == 0)
948 abort ();
949
950 if (fill_value != subword)
951 emit_move_insn (subword, fill_value);
952 }
953
954 insns = get_insns ();
955 end_sequence ();
956
957 emit_no_conflict_block (insns, to, from, NULL_RTX,
958 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
959 return;
960 }
961
962 /* Truncating multi-word to a word or less. */
963 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
964 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
965 {
966 if (!((GET_CODE (from) == MEM
967 && ! MEM_VOLATILE_P (from)
968 && direct_load[(int) to_mode]
969 && ! mode_dependent_address_p (XEXP (from, 0)))
970 || GET_CODE (from) == REG
971 || GET_CODE (from) == SUBREG))
972 from = force_reg (from_mode, from);
973 convert_move (to, gen_lowpart (word_mode, from), 0);
974 return;
975 }
976
977 /* Handle pointer conversion. */ /* SPEE 900220. */
978 if (to_mode == PQImode)
979 {
980 if (from_mode != QImode)
981 from = convert_to_mode (QImode, from, unsignedp);
982
983 #ifdef HAVE_truncqipqi2
984 if (HAVE_truncqipqi2)
985 {
986 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
987 return;
988 }
989 #endif /* HAVE_truncqipqi2 */
990 abort ();
991 }
992
993 if (from_mode == PQImode)
994 {
995 if (to_mode != QImode)
996 {
997 from = convert_to_mode (QImode, from, unsignedp);
998 from_mode = QImode;
999 }
1000 else
1001 {
1002 #ifdef HAVE_extendpqiqi2
1003 if (HAVE_extendpqiqi2)
1004 {
1005 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1006 return;
1007 }
1008 #endif /* HAVE_extendpqiqi2 */
1009 abort ();
1010 }
1011 }
1012
1013 if (to_mode == PSImode)
1014 {
1015 if (from_mode != SImode)
1016 from = convert_to_mode (SImode, from, unsignedp);
1017
1018 #ifdef HAVE_truncsipsi2
1019 if (HAVE_truncsipsi2)
1020 {
1021 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1022 return;
1023 }
1024 #endif /* HAVE_truncsipsi2 */
1025 abort ();
1026 }
1027
1028 if (from_mode == PSImode)
1029 {
1030 if (to_mode != SImode)
1031 {
1032 from = convert_to_mode (SImode, from, unsignedp);
1033 from_mode = SImode;
1034 }
1035 else
1036 {
1037 #ifdef HAVE_extendpsisi2
1038 if (! unsignedp && HAVE_extendpsisi2)
1039 {
1040 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1041 return;
1042 }
1043 #endif /* HAVE_extendpsisi2 */
1044 #ifdef HAVE_zero_extendpsisi2
1045 if (unsignedp && HAVE_zero_extendpsisi2)
1046 {
1047 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1048 return;
1049 }
1050 #endif /* HAVE_zero_extendpsisi2 */
1051 abort ();
1052 }
1053 }
1054
1055 if (to_mode == PDImode)
1056 {
1057 if (from_mode != DImode)
1058 from = convert_to_mode (DImode, from, unsignedp);
1059
1060 #ifdef HAVE_truncdipdi2
1061 if (HAVE_truncdipdi2)
1062 {
1063 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1064 return;
1065 }
1066 #endif /* HAVE_truncdipdi2 */
1067 abort ();
1068 }
1069
1070 if (from_mode == PDImode)
1071 {
1072 if (to_mode != DImode)
1073 {
1074 from = convert_to_mode (DImode, from, unsignedp);
1075 from_mode = DImode;
1076 }
1077 else
1078 {
1079 #ifdef HAVE_extendpdidi2
1080 if (HAVE_extendpdidi2)
1081 {
1082 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1083 return;
1084 }
1085 #endif /* HAVE_extendpdidi2 */
1086 abort ();
1087 }
1088 }
1089
1090 /* Now follow all the conversions between integers
1091 no more than a word long. */
1092
1093 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1094 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1095 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1096 GET_MODE_BITSIZE (from_mode)))
1097 {
1098 if (!((GET_CODE (from) == MEM
1099 && ! MEM_VOLATILE_P (from)
1100 && direct_load[(int) to_mode]
1101 && ! mode_dependent_address_p (XEXP (from, 0)))
1102 || GET_CODE (from) == REG
1103 || GET_CODE (from) == SUBREG))
1104 from = force_reg (from_mode, from);
1105 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1106 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1107 from = copy_to_reg (from);
1108 emit_move_insn (to, gen_lowpart (to_mode, from));
1109 return;
1110 }
1111
1112 /* Handle extension. */
1113 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1114 {
1115 /* Convert directly if that works. */
1116 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1117 != CODE_FOR_nothing)
1118 {
1119 if (flag_force_mem)
1120 from = force_not_mem (from);
1121
1122 emit_unop_insn (code, to, from, equiv_code);
1123 return;
1124 }
1125 else
1126 {
1127 enum machine_mode intermediate;
1128 rtx tmp;
1129 tree shift_amount;
1130
1131 /* Search for a mode to convert via. */
1132 for (intermediate = from_mode; intermediate != VOIDmode;
1133 intermediate = GET_MODE_WIDER_MODE (intermediate))
1134 if (((can_extend_p (to_mode, intermediate, unsignedp)
1135 != CODE_FOR_nothing)
1136 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1137 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1138 GET_MODE_BITSIZE (intermediate))))
1139 && (can_extend_p (intermediate, from_mode, unsignedp)
1140 != CODE_FOR_nothing))
1141 {
1142 convert_move (to, convert_to_mode (intermediate, from,
1143 unsignedp), unsignedp);
1144 return;
1145 }
1146
1147 /* No suitable intermediate mode.
1148 Generate what we need with shifts. */
1149 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1150 - GET_MODE_BITSIZE (from_mode), 0);
1151 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1152 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1153 to, unsignedp);
1154 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1155 to, unsignedp);
1156 if (tmp != to)
1157 emit_move_insn (to, tmp);
1158 return;
1159 }
1160 }
1161
1162 /* Support special truncate insns for certain modes. */
1163
1164 if (from_mode == DImode && to_mode == SImode)
1165 {
1166 #ifdef HAVE_truncdisi2
1167 if (HAVE_truncdisi2)
1168 {
1169 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1170 return;
1171 }
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1175 }
1176
1177 if (from_mode == DImode && to_mode == HImode)
1178 {
1179 #ifdef HAVE_truncdihi2
1180 if (HAVE_truncdihi2)
1181 {
1182 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1183 return;
1184 }
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1188 }
1189
1190 if (from_mode == DImode && to_mode == QImode)
1191 {
1192 #ifdef HAVE_truncdiqi2
1193 if (HAVE_truncdiqi2)
1194 {
1195 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1196 return;
1197 }
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1201 }
1202
1203 if (from_mode == SImode && to_mode == HImode)
1204 {
1205 #ifdef HAVE_truncsihi2
1206 if (HAVE_truncsihi2)
1207 {
1208 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1209 return;
1210 }
1211 #endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1214 }
1215
1216 if (from_mode == SImode && to_mode == QImode)
1217 {
1218 #ifdef HAVE_truncsiqi2
1219 if (HAVE_truncsiqi2)
1220 {
1221 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1222 return;
1223 }
1224 #endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1227 }
1228
1229 if (from_mode == HImode && to_mode == QImode)
1230 {
1231 #ifdef HAVE_trunchiqi2
1232 if (HAVE_trunchiqi2)
1233 {
1234 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1235 return;
1236 }
1237 #endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1240 }
1241
1242 if (from_mode == TImode && to_mode == DImode)
1243 {
1244 #ifdef HAVE_trunctidi2
1245 if (HAVE_trunctidi2)
1246 {
1247 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1248 return;
1249 }
1250 #endif
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 return;
1253 }
1254
1255 if (from_mode == TImode && to_mode == SImode)
1256 {
1257 #ifdef HAVE_trunctisi2
1258 if (HAVE_trunctisi2)
1259 {
1260 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1261 return;
1262 }
1263 #endif
1264 convert_move (to, force_reg (from_mode, from), unsignedp);
1265 return;
1266 }
1267
1268 if (from_mode == TImode && to_mode == HImode)
1269 {
1270 #ifdef HAVE_trunctihi2
1271 if (HAVE_trunctihi2)
1272 {
1273 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1274 return;
1275 }
1276 #endif
1277 convert_move (to, force_reg (from_mode, from), unsignedp);
1278 return;
1279 }
1280
1281 if (from_mode == TImode && to_mode == QImode)
1282 {
1283 #ifdef HAVE_trunctiqi2
1284 if (HAVE_trunctiqi2)
1285 {
1286 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1287 return;
1288 }
1289 #endif
1290 convert_move (to, force_reg (from_mode, from), unsignedp);
1291 return;
1292 }
1293
1294 /* Handle truncation of volatile memrefs, and so on;
1295 the things that couldn't be truncated directly,
1296 and for which there was no special instruction. */
1297 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1298 {
1299 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1300 emit_move_insn (to, temp);
1301 return;
1302 }
1303
1304 /* Mode combination is not recognized. */
1305 abort ();
1306 }
1307
1308 /* Return an rtx for a value that would result
1309 from converting X to mode MODE.
1310 Both X and MODE may be floating, or both integer.
1311 UNSIGNEDP is nonzero if X is an unsigned value.
1312 This can be done by referring to a part of X in place
1313 or by copying to a new temporary with conversion.
1314
1315 This function *must not* call protect_from_queue
1316 except when putting X into an insn (in which case convert_move does it). */
1317
1318 rtx
1319 convert_to_mode (mode, x, unsignedp)
1320 enum machine_mode mode;
1321 rtx x;
1322 int unsignedp;
1323 {
1324 return convert_modes (mode, VOIDmode, x, unsignedp);
1325 }
1326
1327 /* Return an rtx for a value that would result
1328 from converting X from mode OLDMODE to mode MODE.
1329 Both modes may be floating, or both integer.
1330 UNSIGNEDP is nonzero if X is an unsigned value.
1331
1332 This can be done by referring to a part of X in place
1333 or by copying to a new temporary with conversion.
1334
1335 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1336
1337 This function *must not* call protect_from_queue
1338 except when putting X into an insn (in which case convert_move does it). */
1339
1340 rtx
1341 convert_modes (mode, oldmode, x, unsignedp)
1342 enum machine_mode mode, oldmode;
1343 rtx x;
1344 int unsignedp;
1345 {
1346 rtx temp;
1347
1348 /* If FROM is a SUBREG that indicates that we have already done at least
1349 the required extension, strip it. */
1350
1351 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1352 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1353 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1354 x = gen_lowpart (mode, x);
1355
1356 if (GET_MODE (x) != VOIDmode)
1357 oldmode = GET_MODE (x);
1358
1359 if (mode == oldmode)
1360 return x;
1361
1362 /* There is one case that we must handle specially: If we are converting
1363 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1364 we are to interpret the constant as unsigned, gen_lowpart will do
1365 the wrong if the constant appears negative. What we want to do is
1366 make the high-order word of the constant zero, not all ones. */
1367
1368 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1369 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1370 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1371 {
1372 HOST_WIDE_INT val = INTVAL (x);
1373
1374 if (oldmode != VOIDmode
1375 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1376 {
1377 int width = GET_MODE_BITSIZE (oldmode);
1378
1379 /* We need to zero extend VAL. */
1380 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1381 }
1382
1383 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1384 }
1385
1386 /* We can do this with a gen_lowpart if both desired and current modes
1387 are integer, and this is either a constant integer, a register, or a
1388 non-volatile MEM. Except for the constant case where MODE is no
1389 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1390
1391 if ((GET_CODE (x) == CONST_INT
1392 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1393 || (GET_MODE_CLASS (mode) == MODE_INT
1394 && GET_MODE_CLASS (oldmode) == MODE_INT
1395 && (GET_CODE (x) == CONST_DOUBLE
1396 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1397 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1398 && direct_load[(int) mode])
1399 || (GET_CODE (x) == REG
1400 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1401 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1402 {
1403 /* ?? If we don't know OLDMODE, we have to assume here that
1404 X does not need sign- or zero-extension. This may not be
1405 the case, but it's the best we can do. */
1406 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1407 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1408 {
1409 HOST_WIDE_INT val = INTVAL (x);
1410 int width = GET_MODE_BITSIZE (oldmode);
1411
1412 /* We must sign or zero-extend in this case. Start by
1413 zero-extending, then sign extend if we need to. */
1414 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1415 if (! unsignedp
1416 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1417 val |= (HOST_WIDE_INT) (-1) << width;
1418
1419 return gen_int_mode (val, mode);
1420 }
1421
1422 return gen_lowpart (mode, x);
1423 }
1424
1425 temp = gen_reg_rtx (mode);
1426 convert_move (temp, x, unsignedp);
1427 return temp;
1428 }
1429 \f
1430 /* This macro is used to determine what the largest unit size that
1431 move_by_pieces can use is. */
1432
1433 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1434 move efficiently, as opposed to MOVE_MAX which is the maximum
1435 number of bytes we can move with a single instruction. */
1436
1437 #ifndef MOVE_MAX_PIECES
1438 #define MOVE_MAX_PIECES MOVE_MAX
1439 #endif
1440
1441 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1442 store efficiently. Due to internal GCC limitations, this is
1443 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1444 for an immediate constant. */
1445
1446 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1447
1448 /* Generate several move instructions to copy LEN bytes from block FROM to
1449 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1450 and TO through protect_from_queue before calling.
1451
1452 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1453 used to push FROM to the stack.
1454
1455 ALIGN is maximum alignment we can assume. */
1456
1457 void
1458 move_by_pieces (to, from, len, align)
1459 rtx to, from;
1460 unsigned HOST_WIDE_INT len;
1461 unsigned int align;
1462 {
1463 struct move_by_pieces data;
1464 rtx to_addr, from_addr = XEXP (from, 0);
1465 unsigned int max_size = MOVE_MAX_PIECES + 1;
1466 enum machine_mode mode = VOIDmode, tmode;
1467 enum insn_code icode;
1468
1469 data.offset = 0;
1470 data.from_addr = from_addr;
1471 if (to)
1472 {
1473 to_addr = XEXP (to, 0);
1474 data.to = to;
1475 data.autinc_to
1476 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1477 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1478 data.reverse
1479 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1480 }
1481 else
1482 {
1483 to_addr = NULL_RTX;
1484 data.to = NULL_RTX;
1485 data.autinc_to = 1;
1486 #ifdef STACK_GROWS_DOWNWARD
1487 data.reverse = 1;
1488 #else
1489 data.reverse = 0;
1490 #endif
1491 }
1492 data.to_addr = to_addr;
1493 data.from = from;
1494 data.autinc_from
1495 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1496 || GET_CODE (from_addr) == POST_INC
1497 || GET_CODE (from_addr) == POST_DEC);
1498
1499 data.explicit_inc_from = 0;
1500 data.explicit_inc_to = 0;
1501 if (data.reverse) data.offset = len;
1502 data.len = len;
1503
1504 /* If copying requires more than two move insns,
1505 copy addresses to registers (to make displacements shorter)
1506 and use post-increment if available. */
1507 if (!(data.autinc_from && data.autinc_to)
1508 && move_by_pieces_ninsns (len, align) > 2)
1509 {
1510 /* Find the mode of the largest move... */
1511 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1512 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1513 if (GET_MODE_SIZE (tmode) < max_size)
1514 mode = tmode;
1515
1516 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1517 {
1518 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1519 data.autinc_from = 1;
1520 data.explicit_inc_from = -1;
1521 }
1522 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1523 {
1524 data.from_addr = copy_addr_to_reg (from_addr);
1525 data.autinc_from = 1;
1526 data.explicit_inc_from = 1;
1527 }
1528 if (!data.autinc_from && CONSTANT_P (from_addr))
1529 data.from_addr = copy_addr_to_reg (from_addr);
1530 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1531 {
1532 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1533 data.autinc_to = 1;
1534 data.explicit_inc_to = -1;
1535 }
1536 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1537 {
1538 data.to_addr = copy_addr_to_reg (to_addr);
1539 data.autinc_to = 1;
1540 data.explicit_inc_to = 1;
1541 }
1542 if (!data.autinc_to && CONSTANT_P (to_addr))
1543 data.to_addr = copy_addr_to_reg (to_addr);
1544 }
1545
1546 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1547 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1548 align = MOVE_MAX * BITS_PER_UNIT;
1549
1550 /* First move what we can in the largest integer mode, then go to
1551 successively smaller modes. */
1552
1553 while (max_size > 1)
1554 {
1555 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1556 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1557 if (GET_MODE_SIZE (tmode) < max_size)
1558 mode = tmode;
1559
1560 if (mode == VOIDmode)
1561 break;
1562
1563 icode = mov_optab->handlers[(int) mode].insn_code;
1564 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1565 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1566
1567 max_size = GET_MODE_SIZE (mode);
1568 }
1569
1570 /* The code above should have handled everything. */
1571 if (data.len > 0)
1572 abort ();
1573 }
1574
1575 /* Return number of insns required to move L bytes by pieces.
1576 ALIGN (in bits) is maximum alignment we can assume. */
1577
1578 static unsigned HOST_WIDE_INT
1579 move_by_pieces_ninsns (l, align)
1580 unsigned HOST_WIDE_INT l;
1581 unsigned int align;
1582 {
1583 unsigned HOST_WIDE_INT n_insns = 0;
1584 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1585
1586 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1587 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1588 align = MOVE_MAX * BITS_PER_UNIT;
1589
1590 while (max_size > 1)
1591 {
1592 enum machine_mode mode = VOIDmode, tmode;
1593 enum insn_code icode;
1594
1595 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1596 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1597 if (GET_MODE_SIZE (tmode) < max_size)
1598 mode = tmode;
1599
1600 if (mode == VOIDmode)
1601 break;
1602
1603 icode = mov_optab->handlers[(int) mode].insn_code;
1604 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1605 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1606
1607 max_size = GET_MODE_SIZE (mode);
1608 }
1609
1610 if (l)
1611 abort ();
1612 return n_insns;
1613 }
1614
1615 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1616 with move instructions for mode MODE. GENFUN is the gen_... function
1617 to make a move insn for that mode. DATA has all the other info. */
1618
1619 static void
1620 move_by_pieces_1 (genfun, mode, data)
1621 rtx (*genfun) PARAMS ((rtx, ...));
1622 enum machine_mode mode;
1623 struct move_by_pieces *data;
1624 {
1625 unsigned int size = GET_MODE_SIZE (mode);
1626 rtx to1 = NULL_RTX, from1;
1627
1628 while (data->len >= size)
1629 {
1630 if (data->reverse)
1631 data->offset -= size;
1632
1633 if (data->to)
1634 {
1635 if (data->autinc_to)
1636 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1637 data->offset);
1638 else
1639 to1 = adjust_address (data->to, mode, data->offset);
1640 }
1641
1642 if (data->autinc_from)
1643 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1644 data->offset);
1645 else
1646 from1 = adjust_address (data->from, mode, data->offset);
1647
1648 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1649 emit_insn (gen_add2_insn (data->to_addr,
1650 GEN_INT (-(HOST_WIDE_INT)size)));
1651 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1652 emit_insn (gen_add2_insn (data->from_addr,
1653 GEN_INT (-(HOST_WIDE_INT)size)));
1654
1655 if (data->to)
1656 emit_insn ((*genfun) (to1, from1));
1657 else
1658 {
1659 #ifdef PUSH_ROUNDING
1660 emit_single_push_insn (mode, from1, NULL);
1661 #else
1662 abort ();
1663 #endif
1664 }
1665
1666 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1667 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1668 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1669 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1670
1671 if (! data->reverse)
1672 data->offset += size;
1673
1674 data->len -= size;
1675 }
1676 }
1677 \f
1678 /* Emit code to move a block Y to a block X. This may be done with
1679 string-move instructions, with multiple scalar move instructions,
1680 or with a library call.
1681
1682 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1683 SIZE is an rtx that says how long they are.
1684 ALIGN is the maximum alignment we can assume they have.
1685 METHOD describes what kind of copy this is, and what mechanisms may be used.
1686
1687 Return the address of the new block, if memcpy is called and returns it,
1688 0 otherwise. */
1689
1690 rtx
1691 emit_block_move (x, y, size, method)
1692 rtx x, y, size;
1693 enum block_op_methods method;
1694 {
1695 bool may_use_call;
1696 rtx retval = 0;
1697 unsigned int align;
1698
1699 switch (method)
1700 {
1701 case BLOCK_OP_NORMAL:
1702 may_use_call = true;
1703 break;
1704
1705 case BLOCK_OP_CALL_PARM:
1706 may_use_call = block_move_libcall_safe_for_call_parm ();
1707
1708 /* Make inhibit_defer_pop nonzero around the library call
1709 to force it to pop the arguments right away. */
1710 NO_DEFER_POP;
1711 break;
1712
1713 case BLOCK_OP_NO_LIBCALL:
1714 may_use_call = false;
1715 break;
1716
1717 default:
1718 abort ();
1719 }
1720
1721 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1722
1723 if (GET_MODE (x) != BLKmode)
1724 abort ();
1725 if (GET_MODE (y) != BLKmode)
1726 abort ();
1727
1728 x = protect_from_queue (x, 1);
1729 y = protect_from_queue (y, 0);
1730 size = protect_from_queue (size, 0);
1731
1732 if (GET_CODE (x) != MEM)
1733 abort ();
1734 if (GET_CODE (y) != MEM)
1735 abort ();
1736 if (size == 0)
1737 abort ();
1738
1739 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1740 can be incorrect is coming from __builtin_memcpy. */
1741 if (GET_CODE (size) == CONST_INT)
1742 {
1743 x = shallow_copy_rtx (x);
1744 y = shallow_copy_rtx (y);
1745 set_mem_size (x, size);
1746 set_mem_size (y, size);
1747 }
1748
1749 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1750 move_by_pieces (x, y, INTVAL (size), align);
1751 else if (emit_block_move_via_movstr (x, y, size, align))
1752 ;
1753 else if (may_use_call)
1754 retval = emit_block_move_via_libcall (x, y, size);
1755 else
1756 emit_block_move_via_loop (x, y, size, align);
1757
1758 if (method == BLOCK_OP_CALL_PARM)
1759 OK_DEFER_POP;
1760
1761 return retval;
1762 }
1763
1764 /* A subroutine of emit_block_move. Returns true if calling the
1765 block move libcall will not clobber any parameters which may have
1766 already been placed on the stack. */
1767
1768 static bool
1769 block_move_libcall_safe_for_call_parm ()
1770 {
1771 if (PUSH_ARGS)
1772 return true;
1773 else
1774 {
1775 /* Check to see whether memcpy takes all register arguments. */
1776 static enum {
1777 takes_regs_uninit, takes_regs_no, takes_regs_yes
1778 } takes_regs = takes_regs_uninit;
1779
1780 switch (takes_regs)
1781 {
1782 case takes_regs_uninit:
1783 {
1784 CUMULATIVE_ARGS args_so_far;
1785 tree fn, arg;
1786
1787 fn = emit_block_move_libcall_fn (false);
1788 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1789
1790 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1791 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1792 {
1793 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1794 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1795 if (!tmp || !REG_P (tmp))
1796 goto fail_takes_regs;
1797 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1798 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1799 NULL_TREE, 1))
1800 goto fail_takes_regs;
1801 #endif
1802 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1803 }
1804 }
1805 takes_regs = takes_regs_yes;
1806 /* FALLTHRU */
1807
1808 case takes_regs_yes:
1809 return true;
1810
1811 fail_takes_regs:
1812 takes_regs = takes_regs_no;
1813 /* FALLTHRU */
1814 case takes_regs_no:
1815 return false;
1816
1817 default:
1818 abort ();
1819 }
1820 }
1821 }
1822
1823 /* A subroutine of emit_block_move. Expand a movstr pattern;
1824 return true if successful. */
1825
1826 static bool
1827 emit_block_move_via_movstr (x, y, size, align)
1828 rtx x, y, size;
1829 unsigned int align;
1830 {
1831 /* Try the most limited insn first, because there's no point
1832 including more than one in the machine description unless
1833 the more limited one has some advantage. */
1834
1835 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1836 enum machine_mode mode;
1837
1838 /* Since this is a move insn, we don't care about volatility. */
1839 volatile_ok = 1;
1840
1841 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1842 mode = GET_MODE_WIDER_MODE (mode))
1843 {
1844 enum insn_code code = movstr_optab[(int) mode];
1845 insn_operand_predicate_fn pred;
1846
1847 if (code != CODE_FOR_nothing
1848 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1849 here because if SIZE is less than the mode mask, as it is
1850 returned by the macro, it will definitely be less than the
1851 actual mode mask. */
1852 && ((GET_CODE (size) == CONST_INT
1853 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1854 <= (GET_MODE_MASK (mode) >> 1)))
1855 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1856 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1857 || (*pred) (x, BLKmode))
1858 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1859 || (*pred) (y, BLKmode))
1860 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1861 || (*pred) (opalign, VOIDmode)))
1862 {
1863 rtx op2;
1864 rtx last = get_last_insn ();
1865 rtx pat;
1866
1867 op2 = convert_to_mode (mode, size, 1);
1868 pred = insn_data[(int) code].operand[2].predicate;
1869 if (pred != 0 && ! (*pred) (op2, mode))
1870 op2 = copy_to_mode_reg (mode, op2);
1871
1872 /* ??? When called via emit_block_move_for_call, it'd be
1873 nice if there were some way to inform the backend, so
1874 that it doesn't fail the expansion because it thinks
1875 emitting the libcall would be more efficient. */
1876
1877 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1878 if (pat)
1879 {
1880 emit_insn (pat);
1881 volatile_ok = 0;
1882 return true;
1883 }
1884 else
1885 delete_insns_since (last);
1886 }
1887 }
1888
1889 volatile_ok = 0;
1890 return false;
1891 }
1892
1893 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1894 Return the return value from memcpy, 0 otherwise. */
1895
1896 static rtx
1897 emit_block_move_via_libcall (dst, src, size)
1898 rtx dst, src, size;
1899 {
1900 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1901 enum machine_mode size_mode;
1902 rtx retval;
1903
1904 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1905
1906 It is unsafe to save the value generated by protect_from_queue
1907 and reuse it later. Consider what happens if emit_queue is
1908 called before the return value from protect_from_queue is used.
1909
1910 Expansion of the CALL_EXPR below will call emit_queue before
1911 we are finished emitting RTL for argument setup. So if we are
1912 not careful we could get the wrong value for an argument.
1913
1914 To avoid this problem we go ahead and emit code to copy X, Y &
1915 SIZE into new pseudos. We can then place those new pseudos
1916 into an RTL_EXPR and use them later, even after a call to
1917 emit_queue.
1918
1919 Note this is not strictly needed for library calls since they
1920 do not call emit_queue before loading their arguments. However,
1921 we may need to have library calls call emit_queue in the future
1922 since failing to do so could cause problems for targets which
1923 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1924
1925 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1926 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1927
1928 if (TARGET_MEM_FUNCTIONS)
1929 size_mode = TYPE_MODE (sizetype);
1930 else
1931 size_mode = TYPE_MODE (unsigned_type_node);
1932 size = convert_to_mode (size_mode, size, 1);
1933 size = copy_to_mode_reg (size_mode, size);
1934
1935 /* It is incorrect to use the libcall calling conventions to call
1936 memcpy in this context. This could be a user call to memcpy and
1937 the user may wish to examine the return value from memcpy. For
1938 targets where libcalls and normal calls have different conventions
1939 for returning pointers, we could end up generating incorrect code.
1940
1941 For convenience, we generate the call to bcopy this way as well. */
1942
1943 dst_tree = make_tree (ptr_type_node, dst);
1944 src_tree = make_tree (ptr_type_node, src);
1945 if (TARGET_MEM_FUNCTIONS)
1946 size_tree = make_tree (sizetype, size);
1947 else
1948 size_tree = make_tree (unsigned_type_node, size);
1949
1950 fn = emit_block_move_libcall_fn (true);
1951 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1952 if (TARGET_MEM_FUNCTIONS)
1953 {
1954 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1955 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1956 }
1957 else
1958 {
1959 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1960 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1961 }
1962
1963 /* Now we have to build up the CALL_EXPR itself. */
1964 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1965 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1966 call_expr, arg_list, NULL_TREE);
1967 TREE_SIDE_EFFECTS (call_expr) = 1;
1968
1969 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1970
1971 /* If we are initializing a readonly value, show the above call
1972 clobbered it. Otherwise, a load from it may erroneously be
1973 hoisted from a loop. */
1974 if (RTX_UNCHANGING_P (dst))
1975 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1976
1977 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1978 }
1979
1980 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1981 for the function we use for block copies. The first time FOR_CALL
1982 is true, we call assemble_external. */
1983
1984 static GTY(()) tree block_move_fn;
1985
1986 static tree
1987 emit_block_move_libcall_fn (for_call)
1988 int for_call;
1989 {
1990 static bool emitted_extern;
1991 tree fn = block_move_fn, args;
1992
1993 if (!fn)
1994 {
1995 if (TARGET_MEM_FUNCTIONS)
1996 {
1997 fn = get_identifier ("memcpy");
1998 args = build_function_type_list (ptr_type_node, ptr_type_node,
1999 const_ptr_type_node, sizetype,
2000 NULL_TREE);
2001 }
2002 else
2003 {
2004 fn = get_identifier ("bcopy");
2005 args = build_function_type_list (void_type_node, const_ptr_type_node,
2006 ptr_type_node, unsigned_type_node,
2007 NULL_TREE);
2008 }
2009
2010 fn = build_decl (FUNCTION_DECL, fn, args);
2011 DECL_EXTERNAL (fn) = 1;
2012 TREE_PUBLIC (fn) = 1;
2013 DECL_ARTIFICIAL (fn) = 1;
2014 TREE_NOTHROW (fn) = 1;
2015
2016 block_move_fn = fn;
2017 }
2018
2019 if (for_call && !emitted_extern)
2020 {
2021 emitted_extern = true;
2022 make_decl_rtl (fn, NULL);
2023 assemble_external (fn);
2024 }
2025
2026 return fn;
2027 }
2028
2029 /* A subroutine of emit_block_move. Copy the data via an explicit
2030 loop. This is used only when libcalls are forbidden. */
2031 /* ??? It'd be nice to copy in hunks larger than QImode. */
2032
2033 static void
2034 emit_block_move_via_loop (x, y, size, align)
2035 rtx x, y, size;
2036 unsigned int align ATTRIBUTE_UNUSED;
2037 {
2038 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2039 enum machine_mode iter_mode;
2040
2041 iter_mode = GET_MODE (size);
2042 if (iter_mode == VOIDmode)
2043 iter_mode = word_mode;
2044
2045 top_label = gen_label_rtx ();
2046 cmp_label = gen_label_rtx ();
2047 iter = gen_reg_rtx (iter_mode);
2048
2049 emit_move_insn (iter, const0_rtx);
2050
2051 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2052 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2053 do_pending_stack_adjust ();
2054
2055 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2056
2057 emit_jump (cmp_label);
2058 emit_label (top_label);
2059
2060 tmp = convert_modes (Pmode, iter_mode, iter, true);
2061 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2062 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2063 x = change_address (x, QImode, x_addr);
2064 y = change_address (y, QImode, y_addr);
2065
2066 emit_move_insn (x, y);
2067
2068 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2069 true, OPTAB_LIB_WIDEN);
2070 if (tmp != iter)
2071 emit_move_insn (iter, tmp);
2072
2073 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2074 emit_label (cmp_label);
2075
2076 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2077 true, top_label);
2078
2079 emit_note (NULL, NOTE_INSN_LOOP_END);
2080 }
2081 \f
2082 /* Copy all or part of a value X into registers starting at REGNO.
2083 The number of registers to be filled is NREGS. */
2084
2085 void
2086 move_block_to_reg (regno, x, nregs, mode)
2087 int regno;
2088 rtx x;
2089 int nregs;
2090 enum machine_mode mode;
2091 {
2092 int i;
2093 #ifdef HAVE_load_multiple
2094 rtx pat;
2095 rtx last;
2096 #endif
2097
2098 if (nregs == 0)
2099 return;
2100
2101 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2102 x = validize_mem (force_const_mem (mode, x));
2103
2104 /* See if the machine can do this with a load multiple insn. */
2105 #ifdef HAVE_load_multiple
2106 if (HAVE_load_multiple)
2107 {
2108 last = get_last_insn ();
2109 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2110 GEN_INT (nregs));
2111 if (pat)
2112 {
2113 emit_insn (pat);
2114 return;
2115 }
2116 else
2117 delete_insns_since (last);
2118 }
2119 #endif
2120
2121 for (i = 0; i < nregs; i++)
2122 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2123 operand_subword_force (x, i, mode));
2124 }
2125
2126 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2127 The number of registers to be filled is NREGS. SIZE indicates the number
2128 of bytes in the object X. */
2129
2130 void
2131 move_block_from_reg (regno, x, nregs, size)
2132 int regno;
2133 rtx x;
2134 int nregs;
2135 int size;
2136 {
2137 int i;
2138 #ifdef HAVE_store_multiple
2139 rtx pat;
2140 rtx last;
2141 #endif
2142 enum machine_mode mode;
2143
2144 if (nregs == 0)
2145 return;
2146
2147 /* If SIZE is that of a mode no bigger than a word, just use that
2148 mode's store operation. */
2149 if (size <= UNITS_PER_WORD
2150 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2151 {
2152 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2153 return;
2154 }
2155
2156 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2157 to the left before storing to memory. Note that the previous test
2158 doesn't handle all cases (e.g. SIZE == 3). */
2159 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2160 {
2161 rtx tem = operand_subword (x, 0, 1, BLKmode);
2162 rtx shift;
2163
2164 if (tem == 0)
2165 abort ();
2166
2167 shift = expand_shift (LSHIFT_EXPR, word_mode,
2168 gen_rtx_REG (word_mode, regno),
2169 build_int_2 ((UNITS_PER_WORD - size)
2170 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2171 emit_move_insn (tem, shift);
2172 return;
2173 }
2174
2175 /* See if the machine can do this with a store multiple insn. */
2176 #ifdef HAVE_store_multiple
2177 if (HAVE_store_multiple)
2178 {
2179 last = get_last_insn ();
2180 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2181 GEN_INT (nregs));
2182 if (pat)
2183 {
2184 emit_insn (pat);
2185 return;
2186 }
2187 else
2188 delete_insns_since (last);
2189 }
2190 #endif
2191
2192 for (i = 0; i < nregs; i++)
2193 {
2194 rtx tem = operand_subword (x, i, 1, BLKmode);
2195
2196 if (tem == 0)
2197 abort ();
2198
2199 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2200 }
2201 }
2202
2203 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2204 ORIG, where ORIG is a non-consecutive group of registers represented by
2205 a PARALLEL. The clone is identical to the original except in that the
2206 original set of registers is replaced by a new set of pseudo registers.
2207 The new set has the same modes as the original set. */
2208
2209 rtx
2210 gen_group_rtx (orig)
2211 rtx orig;
2212 {
2213 int i, length;
2214 rtx *tmps;
2215
2216 if (GET_CODE (orig) != PARALLEL)
2217 abort ();
2218
2219 length = XVECLEN (orig, 0);
2220 tmps = (rtx *) alloca (sizeof (rtx) * length);
2221
2222 /* Skip a NULL entry in first slot. */
2223 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2224
2225 if (i)
2226 tmps[0] = 0;
2227
2228 for (; i < length; i++)
2229 {
2230 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2231 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2232
2233 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2234 }
2235
2236 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2237 }
2238
2239 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2240 registers represented by a PARALLEL. SSIZE represents the total size of
2241 block SRC in bytes, or -1 if not known. */
2242 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2243 the balance will be in what would be the low-order memory addresses, i.e.
2244 left justified for big endian, right justified for little endian. This
2245 happens to be true for the targets currently using this support. If this
2246 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2247 would be needed. */
2248
2249 void
2250 emit_group_load (dst, orig_src, ssize)
2251 rtx dst, orig_src;
2252 int ssize;
2253 {
2254 rtx *tmps, src;
2255 int start, i;
2256
2257 if (GET_CODE (dst) != PARALLEL)
2258 abort ();
2259
2260 /* Check for a NULL entry, used to indicate that the parameter goes
2261 both on the stack and in registers. */
2262 if (XEXP (XVECEXP (dst, 0, 0), 0))
2263 start = 0;
2264 else
2265 start = 1;
2266
2267 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2268
2269 /* Process the pieces. */
2270 for (i = start; i < XVECLEN (dst, 0); i++)
2271 {
2272 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2273 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2274 unsigned int bytelen = GET_MODE_SIZE (mode);
2275 int shift = 0;
2276
2277 /* Handle trailing fragments that run over the size of the struct. */
2278 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2279 {
2280 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2281 bytelen = ssize - bytepos;
2282 if (bytelen <= 0)
2283 abort ();
2284 }
2285
2286 /* If we won't be loading directly from memory, protect the real source
2287 from strange tricks we might play; but make sure that the source can
2288 be loaded directly into the destination. */
2289 src = orig_src;
2290 if (GET_CODE (orig_src) != MEM
2291 && (!CONSTANT_P (orig_src)
2292 || (GET_MODE (orig_src) != mode
2293 && GET_MODE (orig_src) != VOIDmode)))
2294 {
2295 if (GET_MODE (orig_src) == VOIDmode)
2296 src = gen_reg_rtx (mode);
2297 else
2298 src = gen_reg_rtx (GET_MODE (orig_src));
2299
2300 emit_move_insn (src, orig_src);
2301 }
2302
2303 /* Optimize the access just a bit. */
2304 if (GET_CODE (src) == MEM
2305 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2306 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2307 && bytelen == GET_MODE_SIZE (mode))
2308 {
2309 tmps[i] = gen_reg_rtx (mode);
2310 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2311 }
2312 else if (GET_CODE (src) == CONCAT)
2313 {
2314 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2315 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2316
2317 if ((bytepos == 0 && bytelen == slen0)
2318 || (bytepos != 0 && bytepos + bytelen <= slen))
2319 {
2320 /* The following assumes that the concatenated objects all
2321 have the same size. In this case, a simple calculation
2322 can be used to determine the object and the bit field
2323 to be extracted. */
2324 tmps[i] = XEXP (src, bytepos / slen0);
2325 if (! CONSTANT_P (tmps[i])
2326 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2327 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2328 (bytepos % slen0) * BITS_PER_UNIT,
2329 1, NULL_RTX, mode, mode, ssize);
2330 }
2331 else if (bytepos == 0)
2332 {
2333 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2334 emit_move_insn (mem, src);
2335 tmps[i] = adjust_address (mem, mode, 0);
2336 }
2337 else
2338 abort ();
2339 }
2340 else if (CONSTANT_P (src)
2341 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2342 tmps[i] = src;
2343 else
2344 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2345 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2346 mode, mode, ssize);
2347
2348 if (BYTES_BIG_ENDIAN && shift)
2349 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2350 tmps[i], 0, OPTAB_WIDEN);
2351 }
2352
2353 emit_queue ();
2354
2355 /* Copy the extracted pieces into the proper (probable) hard regs. */
2356 for (i = start; i < XVECLEN (dst, 0); i++)
2357 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2358 }
2359
2360 /* Emit code to move a block SRC to block DST, where SRC and DST are
2361 non-consecutive groups of registers, each represented by a PARALLEL. */
2362
2363 void
2364 emit_group_move (dst, src)
2365 rtx dst, src;
2366 {
2367 int i;
2368
2369 if (GET_CODE (src) != PARALLEL
2370 || GET_CODE (dst) != PARALLEL
2371 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2372 abort ();
2373
2374 /* Skip first entry if NULL. */
2375 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2376 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2377 XEXP (XVECEXP (src, 0, i), 0));
2378 }
2379
2380 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2381 registers represented by a PARALLEL. SSIZE represents the total size of
2382 block DST, or -1 if not known. */
2383
2384 void
2385 emit_group_store (orig_dst, src, ssize)
2386 rtx orig_dst, src;
2387 int ssize;
2388 {
2389 rtx *tmps, dst;
2390 int start, i;
2391
2392 if (GET_CODE (src) != PARALLEL)
2393 abort ();
2394
2395 /* Check for a NULL entry, used to indicate that the parameter goes
2396 both on the stack and in registers. */
2397 if (XEXP (XVECEXP (src, 0, 0), 0))
2398 start = 0;
2399 else
2400 start = 1;
2401
2402 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2403
2404 /* Copy the (probable) hard regs into pseudos. */
2405 for (i = start; i < XVECLEN (src, 0); i++)
2406 {
2407 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2408 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2409 emit_move_insn (tmps[i], reg);
2410 }
2411 emit_queue ();
2412
2413 /* If we won't be storing directly into memory, protect the real destination
2414 from strange tricks we might play. */
2415 dst = orig_dst;
2416 if (GET_CODE (dst) == PARALLEL)
2417 {
2418 rtx temp;
2419
2420 /* We can get a PARALLEL dst if there is a conditional expression in
2421 a return statement. In that case, the dst and src are the same,
2422 so no action is necessary. */
2423 if (rtx_equal_p (dst, src))
2424 return;
2425
2426 /* It is unclear if we can ever reach here, but we may as well handle
2427 it. Allocate a temporary, and split this into a store/load to/from
2428 the temporary. */
2429
2430 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2431 emit_group_store (temp, src, ssize);
2432 emit_group_load (dst, temp, ssize);
2433 return;
2434 }
2435 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2436 {
2437 dst = gen_reg_rtx (GET_MODE (orig_dst));
2438 /* Make life a bit easier for combine. */
2439 emit_move_insn (dst, const0_rtx);
2440 }
2441
2442 /* Process the pieces. */
2443 for (i = start; i < XVECLEN (src, 0); i++)
2444 {
2445 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2446 enum machine_mode mode = GET_MODE (tmps[i]);
2447 unsigned int bytelen = GET_MODE_SIZE (mode);
2448 rtx dest = dst;
2449
2450 /* Handle trailing fragments that run over the size of the struct. */
2451 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2452 {
2453 if (BYTES_BIG_ENDIAN)
2454 {
2455 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2456 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2457 tmps[i], 0, OPTAB_WIDEN);
2458 }
2459 bytelen = ssize - bytepos;
2460 }
2461
2462 if (GET_CODE (dst) == CONCAT)
2463 {
2464 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2465 dest = XEXP (dst, 0);
2466 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2467 {
2468 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2469 dest = XEXP (dst, 1);
2470 }
2471 else
2472 abort ();
2473 }
2474
2475 /* Optimize the access just a bit. */
2476 if (GET_CODE (dest) == MEM
2477 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2478 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2479 && bytelen == GET_MODE_SIZE (mode))
2480 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2481 else
2482 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2483 mode, tmps[i], ssize);
2484 }
2485
2486 emit_queue ();
2487
2488 /* Copy from the pseudo into the (probable) hard reg. */
2489 if (GET_CODE (dst) == REG)
2490 emit_move_insn (orig_dst, dst);
2491 }
2492
2493 /* Generate code to copy a BLKmode object of TYPE out of a
2494 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2495 is null, a stack temporary is created. TGTBLK is returned.
2496
2497 The primary purpose of this routine is to handle functions
2498 that return BLKmode structures in registers. Some machines
2499 (the PA for example) want to return all small structures
2500 in registers regardless of the structure's alignment. */
2501
2502 rtx
2503 copy_blkmode_from_reg (tgtblk, srcreg, type)
2504 rtx tgtblk;
2505 rtx srcreg;
2506 tree type;
2507 {
2508 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2509 rtx src = NULL, dst = NULL;
2510 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2511 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2512
2513 if (tgtblk == 0)
2514 {
2515 tgtblk = assign_temp (build_qualified_type (type,
2516 (TYPE_QUALS (type)
2517 | TYPE_QUAL_CONST)),
2518 0, 1, 1);
2519 preserve_temp_slots (tgtblk);
2520 }
2521
2522 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2523 into a new pseudo which is a full word. */
2524
2525 if (GET_MODE (srcreg) != BLKmode
2526 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2527 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2528
2529 /* Structures whose size is not a multiple of a word are aligned
2530 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2531 machine, this means we must skip the empty high order bytes when
2532 calculating the bit offset. */
2533 if (BYTES_BIG_ENDIAN
2534 && bytes % UNITS_PER_WORD)
2535 big_endian_correction
2536 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2537
2538 /* Copy the structure BITSIZE bites at a time.
2539
2540 We could probably emit more efficient code for machines which do not use
2541 strict alignment, but it doesn't seem worth the effort at the current
2542 time. */
2543 for (bitpos = 0, xbitpos = big_endian_correction;
2544 bitpos < bytes * BITS_PER_UNIT;
2545 bitpos += bitsize, xbitpos += bitsize)
2546 {
2547 /* We need a new source operand each time xbitpos is on a
2548 word boundary and when xbitpos == big_endian_correction
2549 (the first time through). */
2550 if (xbitpos % BITS_PER_WORD == 0
2551 || xbitpos == big_endian_correction)
2552 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2553 GET_MODE (srcreg));
2554
2555 /* We need a new destination operand each time bitpos is on
2556 a word boundary. */
2557 if (bitpos % BITS_PER_WORD == 0)
2558 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2559
2560 /* Use xbitpos for the source extraction (right justified) and
2561 xbitpos for the destination store (left justified). */
2562 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2563 extract_bit_field (src, bitsize,
2564 xbitpos % BITS_PER_WORD, 1,
2565 NULL_RTX, word_mode, word_mode,
2566 BITS_PER_WORD),
2567 BITS_PER_WORD);
2568 }
2569
2570 return tgtblk;
2571 }
2572
2573 /* Add a USE expression for REG to the (possibly empty) list pointed
2574 to by CALL_FUSAGE. REG must denote a hard register. */
2575
2576 void
2577 use_reg (call_fusage, reg)
2578 rtx *call_fusage, reg;
2579 {
2580 if (GET_CODE (reg) != REG
2581 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2582 abort ();
2583
2584 *call_fusage
2585 = gen_rtx_EXPR_LIST (VOIDmode,
2586 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2587 }
2588
2589 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2590 starting at REGNO. All of these registers must be hard registers. */
2591
2592 void
2593 use_regs (call_fusage, regno, nregs)
2594 rtx *call_fusage;
2595 int regno;
2596 int nregs;
2597 {
2598 int i;
2599
2600 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2601 abort ();
2602
2603 for (i = 0; i < nregs; i++)
2604 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2605 }
2606
2607 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2608 PARALLEL REGS. This is for calls that pass values in multiple
2609 non-contiguous locations. The Irix 6 ABI has examples of this. */
2610
2611 void
2612 use_group_regs (call_fusage, regs)
2613 rtx *call_fusage;
2614 rtx regs;
2615 {
2616 int i;
2617
2618 for (i = 0; i < XVECLEN (regs, 0); i++)
2619 {
2620 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2621
2622 /* A NULL entry means the parameter goes both on the stack and in
2623 registers. This can also be a MEM for targets that pass values
2624 partially on the stack and partially in registers. */
2625 if (reg != 0 && GET_CODE (reg) == REG)
2626 use_reg (call_fusage, reg);
2627 }
2628 }
2629 \f
2630
2631 /* Determine whether the LEN bytes generated by CONSTFUN can be
2632 stored to memory using several move instructions. CONSTFUNDATA is
2633 a pointer which will be passed as argument in every CONSTFUN call.
2634 ALIGN is maximum alignment we can assume. Return nonzero if a
2635 call to store_by_pieces should succeed. */
2636
2637 int
2638 can_store_by_pieces (len, constfun, constfundata, align)
2639 unsigned HOST_WIDE_INT len;
2640 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2641 PTR constfundata;
2642 unsigned int align;
2643 {
2644 unsigned HOST_WIDE_INT max_size, l;
2645 HOST_WIDE_INT offset = 0;
2646 enum machine_mode mode, tmode;
2647 enum insn_code icode;
2648 int reverse;
2649 rtx cst;
2650
2651 if (! MOVE_BY_PIECES_P (len, align))
2652 return 0;
2653
2654 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2655 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2656 align = MOVE_MAX * BITS_PER_UNIT;
2657
2658 /* We would first store what we can in the largest integer mode, then go to
2659 successively smaller modes. */
2660
2661 for (reverse = 0;
2662 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2663 reverse++)
2664 {
2665 l = len;
2666 mode = VOIDmode;
2667 max_size = STORE_MAX_PIECES + 1;
2668 while (max_size > 1)
2669 {
2670 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2671 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2672 if (GET_MODE_SIZE (tmode) < max_size)
2673 mode = tmode;
2674
2675 if (mode == VOIDmode)
2676 break;
2677
2678 icode = mov_optab->handlers[(int) mode].insn_code;
2679 if (icode != CODE_FOR_nothing
2680 && align >= GET_MODE_ALIGNMENT (mode))
2681 {
2682 unsigned int size = GET_MODE_SIZE (mode);
2683
2684 while (l >= size)
2685 {
2686 if (reverse)
2687 offset -= size;
2688
2689 cst = (*constfun) (constfundata, offset, mode);
2690 if (!LEGITIMATE_CONSTANT_P (cst))
2691 return 0;
2692
2693 if (!reverse)
2694 offset += size;
2695
2696 l -= size;
2697 }
2698 }
2699
2700 max_size = GET_MODE_SIZE (mode);
2701 }
2702
2703 /* The code above should have handled everything. */
2704 if (l != 0)
2705 abort ();
2706 }
2707
2708 return 1;
2709 }
2710
2711 /* Generate several move instructions to store LEN bytes generated by
2712 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2713 pointer which will be passed as argument in every CONSTFUN call.
2714 ALIGN is maximum alignment we can assume. */
2715
2716 void
2717 store_by_pieces (to, len, constfun, constfundata, align)
2718 rtx to;
2719 unsigned HOST_WIDE_INT len;
2720 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2721 PTR constfundata;
2722 unsigned int align;
2723 {
2724 struct store_by_pieces data;
2725
2726 if (! MOVE_BY_PIECES_P (len, align))
2727 abort ();
2728 to = protect_from_queue (to, 1);
2729 data.constfun = constfun;
2730 data.constfundata = constfundata;
2731 data.len = len;
2732 data.to = to;
2733 store_by_pieces_1 (&data, align);
2734 }
2735
2736 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2737 rtx with BLKmode). The caller must pass TO through protect_from_queue
2738 before calling. ALIGN is maximum alignment we can assume. */
2739
2740 static void
2741 clear_by_pieces (to, len, align)
2742 rtx to;
2743 unsigned HOST_WIDE_INT len;
2744 unsigned int align;
2745 {
2746 struct store_by_pieces data;
2747
2748 data.constfun = clear_by_pieces_1;
2749 data.constfundata = NULL;
2750 data.len = len;
2751 data.to = to;
2752 store_by_pieces_1 (&data, align);
2753 }
2754
2755 /* Callback routine for clear_by_pieces.
2756 Return const0_rtx unconditionally. */
2757
2758 static rtx
2759 clear_by_pieces_1 (data, offset, mode)
2760 PTR data ATTRIBUTE_UNUSED;
2761 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2762 enum machine_mode mode ATTRIBUTE_UNUSED;
2763 {
2764 return const0_rtx;
2765 }
2766
2767 /* Subroutine of clear_by_pieces and store_by_pieces.
2768 Generate several move instructions to store LEN bytes of block TO. (A MEM
2769 rtx with BLKmode). The caller must pass TO through protect_from_queue
2770 before calling. ALIGN is maximum alignment we can assume. */
2771
2772 static void
2773 store_by_pieces_1 (data, align)
2774 struct store_by_pieces *data;
2775 unsigned int align;
2776 {
2777 rtx to_addr = XEXP (data->to, 0);
2778 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2779 enum machine_mode mode = VOIDmode, tmode;
2780 enum insn_code icode;
2781
2782 data->offset = 0;
2783 data->to_addr = to_addr;
2784 data->autinc_to
2785 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2786 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2787
2788 data->explicit_inc_to = 0;
2789 data->reverse
2790 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2791 if (data->reverse)
2792 data->offset = data->len;
2793
2794 /* If storing requires more than two move insns,
2795 copy addresses to registers (to make displacements shorter)
2796 and use post-increment if available. */
2797 if (!data->autinc_to
2798 && move_by_pieces_ninsns (data->len, align) > 2)
2799 {
2800 /* Determine the main mode we'll be using. */
2801 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2802 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2803 if (GET_MODE_SIZE (tmode) < max_size)
2804 mode = tmode;
2805
2806 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2807 {
2808 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2809 data->autinc_to = 1;
2810 data->explicit_inc_to = -1;
2811 }
2812
2813 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2814 && ! data->autinc_to)
2815 {
2816 data->to_addr = copy_addr_to_reg (to_addr);
2817 data->autinc_to = 1;
2818 data->explicit_inc_to = 1;
2819 }
2820
2821 if ( !data->autinc_to && CONSTANT_P (to_addr))
2822 data->to_addr = copy_addr_to_reg (to_addr);
2823 }
2824
2825 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2826 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2827 align = MOVE_MAX * BITS_PER_UNIT;
2828
2829 /* First store what we can in the largest integer mode, then go to
2830 successively smaller modes. */
2831
2832 while (max_size > 1)
2833 {
2834 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2835 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2836 if (GET_MODE_SIZE (tmode) < max_size)
2837 mode = tmode;
2838
2839 if (mode == VOIDmode)
2840 break;
2841
2842 icode = mov_optab->handlers[(int) mode].insn_code;
2843 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2844 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2845
2846 max_size = GET_MODE_SIZE (mode);
2847 }
2848
2849 /* The code above should have handled everything. */
2850 if (data->len != 0)
2851 abort ();
2852 }
2853
2854 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2855 with move instructions for mode MODE. GENFUN is the gen_... function
2856 to make a move insn for that mode. DATA has all the other info. */
2857
2858 static void
2859 store_by_pieces_2 (genfun, mode, data)
2860 rtx (*genfun) PARAMS ((rtx, ...));
2861 enum machine_mode mode;
2862 struct store_by_pieces *data;
2863 {
2864 unsigned int size = GET_MODE_SIZE (mode);
2865 rtx to1, cst;
2866
2867 while (data->len >= size)
2868 {
2869 if (data->reverse)
2870 data->offset -= size;
2871
2872 if (data->autinc_to)
2873 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2874 data->offset);
2875 else
2876 to1 = adjust_address (data->to, mode, data->offset);
2877
2878 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2879 emit_insn (gen_add2_insn (data->to_addr,
2880 GEN_INT (-(HOST_WIDE_INT) size)));
2881
2882 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2883 emit_insn ((*genfun) (to1, cst));
2884
2885 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2886 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2887
2888 if (! data->reverse)
2889 data->offset += size;
2890
2891 data->len -= size;
2892 }
2893 }
2894 \f
2895 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2896 its length in bytes. */
2897
2898 rtx
2899 clear_storage (object, size)
2900 rtx object;
2901 rtx size;
2902 {
2903 rtx retval = 0;
2904 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2905 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2906
2907 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2908 just move a zero. Otherwise, do this a piece at a time. */
2909 if (GET_MODE (object) != BLKmode
2910 && GET_CODE (size) == CONST_INT
2911 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2912 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2913 else
2914 {
2915 object = protect_from_queue (object, 1);
2916 size = protect_from_queue (size, 0);
2917
2918 if (GET_CODE (size) == CONST_INT
2919 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2920 clear_by_pieces (object, INTVAL (size), align);
2921 else if (clear_storage_via_clrstr (object, size, align))
2922 ;
2923 else
2924 retval = clear_storage_via_libcall (object, size);
2925 }
2926
2927 return retval;
2928 }
2929
2930 /* A subroutine of clear_storage. Expand a clrstr pattern;
2931 return true if successful. */
2932
2933 static bool
2934 clear_storage_via_clrstr (object, size, align)
2935 rtx object, size;
2936 unsigned int align;
2937 {
2938 /* Try the most limited insn first, because there's no point
2939 including more than one in the machine description unless
2940 the more limited one has some advantage. */
2941
2942 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2943 enum machine_mode mode;
2944
2945 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2946 mode = GET_MODE_WIDER_MODE (mode))
2947 {
2948 enum insn_code code = clrstr_optab[(int) mode];
2949 insn_operand_predicate_fn pred;
2950
2951 if (code != CODE_FOR_nothing
2952 /* We don't need MODE to be narrower than
2953 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2954 the mode mask, as it is returned by the macro, it will
2955 definitely be less than the actual mode mask. */
2956 && ((GET_CODE (size) == CONST_INT
2957 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2958 <= (GET_MODE_MASK (mode) >> 1)))
2959 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2960 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2961 || (*pred) (object, BLKmode))
2962 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2963 || (*pred) (opalign, VOIDmode)))
2964 {
2965 rtx op1;
2966 rtx last = get_last_insn ();
2967 rtx pat;
2968
2969 op1 = convert_to_mode (mode, size, 1);
2970 pred = insn_data[(int) code].operand[1].predicate;
2971 if (pred != 0 && ! (*pred) (op1, mode))
2972 op1 = copy_to_mode_reg (mode, op1);
2973
2974 pat = GEN_FCN ((int) code) (object, op1, opalign);
2975 if (pat)
2976 {
2977 emit_insn (pat);
2978 return true;
2979 }
2980 else
2981 delete_insns_since (last);
2982 }
2983 }
2984
2985 return false;
2986 }
2987
2988 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2989 Return the return value of memset, 0 otherwise. */
2990
2991 static rtx
2992 clear_storage_via_libcall (object, size)
2993 rtx object, size;
2994 {
2995 tree call_expr, arg_list, fn, object_tree, size_tree;
2996 enum machine_mode size_mode;
2997 rtx retval;
2998
2999 /* OBJECT or SIZE may have been passed through protect_from_queue.
3000
3001 It is unsafe to save the value generated by protect_from_queue
3002 and reuse it later. Consider what happens if emit_queue is
3003 called before the return value from protect_from_queue is used.
3004
3005 Expansion of the CALL_EXPR below will call emit_queue before
3006 we are finished emitting RTL for argument setup. So if we are
3007 not careful we could get the wrong value for an argument.
3008
3009 To avoid this problem we go ahead and emit code to copy OBJECT
3010 and SIZE into new pseudos. We can then place those new pseudos
3011 into an RTL_EXPR and use them later, even after a call to
3012 emit_queue.
3013
3014 Note this is not strictly needed for library calls since they
3015 do not call emit_queue before loading their arguments. However,
3016 we may need to have library calls call emit_queue in the future
3017 since failing to do so could cause problems for targets which
3018 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3019
3020 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3021
3022 if (TARGET_MEM_FUNCTIONS)
3023 size_mode = TYPE_MODE (sizetype);
3024 else
3025 size_mode = TYPE_MODE (unsigned_type_node);
3026 size = convert_to_mode (size_mode, size, 1);
3027 size = copy_to_mode_reg (size_mode, size);
3028
3029 /* It is incorrect to use the libcall calling conventions to call
3030 memset in this context. This could be a user call to memset and
3031 the user may wish to examine the return value from memset. For
3032 targets where libcalls and normal calls have different conventions
3033 for returning pointers, we could end up generating incorrect code.
3034
3035 For convenience, we generate the call to bzero this way as well. */
3036
3037 object_tree = make_tree (ptr_type_node, object);
3038 if (TARGET_MEM_FUNCTIONS)
3039 size_tree = make_tree (sizetype, size);
3040 else
3041 size_tree = make_tree (unsigned_type_node, size);
3042
3043 fn = clear_storage_libcall_fn (true);
3044 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3045 if (TARGET_MEM_FUNCTIONS)
3046 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3047 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3048
3049 /* Now we have to build up the CALL_EXPR itself. */
3050 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3051 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3052 call_expr, arg_list, NULL_TREE);
3053 TREE_SIDE_EFFECTS (call_expr) = 1;
3054
3055 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3056
3057 /* If we are initializing a readonly value, show the above call
3058 clobbered it. Otherwise, a load from it may erroneously be
3059 hoisted from a loop. */
3060 if (RTX_UNCHANGING_P (object))
3061 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3062
3063 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3064 }
3065
3066 /* A subroutine of clear_storage_via_libcall. Create the tree node
3067 for the function we use for block clears. The first time FOR_CALL
3068 is true, we call assemble_external. */
3069
3070 static GTY(()) tree block_clear_fn;
3071
3072 static tree
3073 clear_storage_libcall_fn (for_call)
3074 int for_call;
3075 {
3076 static bool emitted_extern;
3077 tree fn = block_clear_fn, args;
3078
3079 if (!fn)
3080 {
3081 if (TARGET_MEM_FUNCTIONS)
3082 {
3083 fn = get_identifier ("memset");
3084 args = build_function_type_list (ptr_type_node, ptr_type_node,
3085 integer_type_node, sizetype,
3086 NULL_TREE);
3087 }
3088 else
3089 {
3090 fn = get_identifier ("bzero");
3091 args = build_function_type_list (void_type_node, ptr_type_node,
3092 unsigned_type_node, NULL_TREE);
3093 }
3094
3095 fn = build_decl (FUNCTION_DECL, fn, args);
3096 DECL_EXTERNAL (fn) = 1;
3097 TREE_PUBLIC (fn) = 1;
3098 DECL_ARTIFICIAL (fn) = 1;
3099 TREE_NOTHROW (fn) = 1;
3100
3101 block_clear_fn = fn;
3102 }
3103
3104 if (for_call && !emitted_extern)
3105 {
3106 emitted_extern = true;
3107 make_decl_rtl (fn, NULL);
3108 assemble_external (fn);
3109 }
3110
3111 return fn;
3112 }
3113 \f
3114 /* Generate code to copy Y into X.
3115 Both Y and X must have the same mode, except that
3116 Y can be a constant with VOIDmode.
3117 This mode cannot be BLKmode; use emit_block_move for that.
3118
3119 Return the last instruction emitted. */
3120
3121 rtx
3122 emit_move_insn (x, y)
3123 rtx x, y;
3124 {
3125 enum machine_mode mode = GET_MODE (x);
3126 rtx y_cst = NULL_RTX;
3127 rtx last_insn;
3128
3129 x = protect_from_queue (x, 1);
3130 y = protect_from_queue (y, 0);
3131
3132 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3133 abort ();
3134
3135 /* Never force constant_p_rtx to memory. */
3136 if (GET_CODE (y) == CONSTANT_P_RTX)
3137 ;
3138 else if (CONSTANT_P (y))
3139 {
3140 if (optimize
3141 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3142 && (last_insn = compress_float_constant (x, y)))
3143 return last_insn;
3144
3145 if (!LEGITIMATE_CONSTANT_P (y))
3146 {
3147 y_cst = y;
3148 y = force_const_mem (mode, y);
3149
3150 /* If the target's cannot_force_const_mem prevented the spill,
3151 assume that the target's move expanders will also take care
3152 of the non-legitimate constant. */
3153 if (!y)
3154 y = y_cst;
3155 }
3156 }
3157
3158 /* If X or Y are memory references, verify that their addresses are valid
3159 for the machine. */
3160 if (GET_CODE (x) == MEM
3161 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3162 && ! push_operand (x, GET_MODE (x)))
3163 || (flag_force_addr
3164 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3165 x = validize_mem (x);
3166
3167 if (GET_CODE (y) == MEM
3168 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3169 || (flag_force_addr
3170 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3171 y = validize_mem (y);
3172
3173 if (mode == BLKmode)
3174 abort ();
3175
3176 last_insn = emit_move_insn_1 (x, y);
3177
3178 if (y_cst && GET_CODE (x) == REG)
3179 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3180
3181 return last_insn;
3182 }
3183
3184 /* Low level part of emit_move_insn.
3185 Called just like emit_move_insn, but assumes X and Y
3186 are basically valid. */
3187
3188 rtx
3189 emit_move_insn_1 (x, y)
3190 rtx x, y;
3191 {
3192 enum machine_mode mode = GET_MODE (x);
3193 enum machine_mode submode;
3194 enum mode_class class = GET_MODE_CLASS (mode);
3195
3196 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3197 abort ();
3198
3199 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3200 return
3201 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3202
3203 /* Expand complex moves by moving real part and imag part, if possible. */
3204 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3205 && BLKmode != (submode = GET_MODE_INNER (mode))
3206 && (mov_optab->handlers[(int) submode].insn_code
3207 != CODE_FOR_nothing))
3208 {
3209 /* Don't split destination if it is a stack push. */
3210 int stack = push_operand (x, GET_MODE (x));
3211
3212 #ifdef PUSH_ROUNDING
3213 /* In case we output to the stack, but the size is smaller machine can
3214 push exactly, we need to use move instructions. */
3215 if (stack
3216 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3217 != GET_MODE_SIZE (submode)))
3218 {
3219 rtx temp;
3220 HOST_WIDE_INT offset1, offset2;
3221
3222 /* Do not use anti_adjust_stack, since we don't want to update
3223 stack_pointer_delta. */
3224 temp = expand_binop (Pmode,
3225 #ifdef STACK_GROWS_DOWNWARD
3226 sub_optab,
3227 #else
3228 add_optab,
3229 #endif
3230 stack_pointer_rtx,
3231 GEN_INT
3232 (PUSH_ROUNDING
3233 (GET_MODE_SIZE (GET_MODE (x)))),
3234 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3235
3236 if (temp != stack_pointer_rtx)
3237 emit_move_insn (stack_pointer_rtx, temp);
3238
3239 #ifdef STACK_GROWS_DOWNWARD
3240 offset1 = 0;
3241 offset2 = GET_MODE_SIZE (submode);
3242 #else
3243 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3244 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3245 + GET_MODE_SIZE (submode));
3246 #endif
3247
3248 emit_move_insn (change_address (x, submode,
3249 gen_rtx_PLUS (Pmode,
3250 stack_pointer_rtx,
3251 GEN_INT (offset1))),
3252 gen_realpart (submode, y));
3253 emit_move_insn (change_address (x, submode,
3254 gen_rtx_PLUS (Pmode,
3255 stack_pointer_rtx,
3256 GEN_INT (offset2))),
3257 gen_imagpart (submode, y));
3258 }
3259 else
3260 #endif
3261 /* If this is a stack, push the highpart first, so it
3262 will be in the argument order.
3263
3264 In that case, change_address is used only to convert
3265 the mode, not to change the address. */
3266 if (stack)
3267 {
3268 /* Note that the real part always precedes the imag part in memory
3269 regardless of machine's endianness. */
3270 #ifdef STACK_GROWS_DOWNWARD
3271 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3272 (gen_rtx_MEM (submode, XEXP (x, 0)),
3273 gen_imagpart (submode, y)));
3274 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3275 (gen_rtx_MEM (submode, XEXP (x, 0)),
3276 gen_realpart (submode, y)));
3277 #else
3278 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3279 (gen_rtx_MEM (submode, XEXP (x, 0)),
3280 gen_realpart (submode, y)));
3281 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3282 (gen_rtx_MEM (submode, XEXP (x, 0)),
3283 gen_imagpart (submode, y)));
3284 #endif
3285 }
3286 else
3287 {
3288 rtx realpart_x, realpart_y;
3289 rtx imagpart_x, imagpart_y;
3290
3291 /* If this is a complex value with each part being smaller than a
3292 word, the usual calling sequence will likely pack the pieces into
3293 a single register. Unfortunately, SUBREG of hard registers only
3294 deals in terms of words, so we have a problem converting input
3295 arguments to the CONCAT of two registers that is used elsewhere
3296 for complex values. If this is before reload, we can copy it into
3297 memory and reload. FIXME, we should see about using extract and
3298 insert on integer registers, but complex short and complex char
3299 variables should be rarely used. */
3300 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3301 && (reload_in_progress | reload_completed) == 0)
3302 {
3303 int packed_dest_p
3304 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3305 int packed_src_p
3306 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3307
3308 if (packed_dest_p || packed_src_p)
3309 {
3310 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3311 ? MODE_FLOAT : MODE_INT);
3312
3313 enum machine_mode reg_mode
3314 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3315
3316 if (reg_mode != BLKmode)
3317 {
3318 rtx mem = assign_stack_temp (reg_mode,
3319 GET_MODE_SIZE (mode), 0);
3320 rtx cmem = adjust_address (mem, mode, 0);
3321
3322 cfun->cannot_inline
3323 = N_("function using short complex types cannot be inline");
3324
3325 if (packed_dest_p)
3326 {
3327 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3328
3329 emit_move_insn_1 (cmem, y);
3330 return emit_move_insn_1 (sreg, mem);
3331 }
3332 else
3333 {
3334 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3335
3336 emit_move_insn_1 (mem, sreg);
3337 return emit_move_insn_1 (x, cmem);
3338 }
3339 }
3340 }
3341 }
3342
3343 realpart_x = gen_realpart (submode, x);
3344 realpart_y = gen_realpart (submode, y);
3345 imagpart_x = gen_imagpart (submode, x);
3346 imagpart_y = gen_imagpart (submode, y);
3347
3348 /* Show the output dies here. This is necessary for SUBREGs
3349 of pseudos since we cannot track their lifetimes correctly;
3350 hard regs shouldn't appear here except as return values.
3351 We never want to emit such a clobber after reload. */
3352 if (x != y
3353 && ! (reload_in_progress || reload_completed)
3354 && (GET_CODE (realpart_x) == SUBREG
3355 || GET_CODE (imagpart_x) == SUBREG))
3356 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3357
3358 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3359 (realpart_x, realpart_y));
3360 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3361 (imagpart_x, imagpart_y));
3362 }
3363
3364 return get_last_insn ();
3365 }
3366
3367 /* This will handle any multi-word or full-word mode that lacks a move_insn
3368 pattern. However, you will get better code if you define such patterns,
3369 even if they must turn into multiple assembler instructions. */
3370 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3371 {
3372 rtx last_insn = 0;
3373 rtx seq, inner;
3374 int need_clobber;
3375 int i;
3376
3377 #ifdef PUSH_ROUNDING
3378
3379 /* If X is a push on the stack, do the push now and replace
3380 X with a reference to the stack pointer. */
3381 if (push_operand (x, GET_MODE (x)))
3382 {
3383 rtx temp;
3384 enum rtx_code code;
3385
3386 /* Do not use anti_adjust_stack, since we don't want to update
3387 stack_pointer_delta. */
3388 temp = expand_binop (Pmode,
3389 #ifdef STACK_GROWS_DOWNWARD
3390 sub_optab,
3391 #else
3392 add_optab,
3393 #endif
3394 stack_pointer_rtx,
3395 GEN_INT
3396 (PUSH_ROUNDING
3397 (GET_MODE_SIZE (GET_MODE (x)))),
3398 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3399
3400 if (temp != stack_pointer_rtx)
3401 emit_move_insn (stack_pointer_rtx, temp);
3402
3403 code = GET_CODE (XEXP (x, 0));
3404
3405 /* Just hope that small offsets off SP are OK. */
3406 if (code == POST_INC)
3407 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3408 GEN_INT (-((HOST_WIDE_INT)
3409 GET_MODE_SIZE (GET_MODE (x)))));
3410 else if (code == POST_DEC)
3411 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3412 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3413 else
3414 temp = stack_pointer_rtx;
3415
3416 x = change_address (x, VOIDmode, temp);
3417 }
3418 #endif
3419
3420 /* If we are in reload, see if either operand is a MEM whose address
3421 is scheduled for replacement. */
3422 if (reload_in_progress && GET_CODE (x) == MEM
3423 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3424 x = replace_equiv_address_nv (x, inner);
3425 if (reload_in_progress && GET_CODE (y) == MEM
3426 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3427 y = replace_equiv_address_nv (y, inner);
3428
3429 start_sequence ();
3430
3431 need_clobber = 0;
3432 for (i = 0;
3433 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3434 i++)
3435 {
3436 rtx xpart = operand_subword (x, i, 1, mode);
3437 rtx ypart = operand_subword (y, i, 1, mode);
3438
3439 /* If we can't get a part of Y, put Y into memory if it is a
3440 constant. Otherwise, force it into a register. If we still
3441 can't get a part of Y, abort. */
3442 if (ypart == 0 && CONSTANT_P (y))
3443 {
3444 y = force_const_mem (mode, y);
3445 ypart = operand_subword (y, i, 1, mode);
3446 }
3447 else if (ypart == 0)
3448 ypart = operand_subword_force (y, i, mode);
3449
3450 if (xpart == 0 || ypart == 0)
3451 abort ();
3452
3453 need_clobber |= (GET_CODE (xpart) == SUBREG);
3454
3455 last_insn = emit_move_insn (xpart, ypart);
3456 }
3457
3458 seq = get_insns ();
3459 end_sequence ();
3460
3461 /* Show the output dies here. This is necessary for SUBREGs
3462 of pseudos since we cannot track their lifetimes correctly;
3463 hard regs shouldn't appear here except as return values.
3464 We never want to emit such a clobber after reload. */
3465 if (x != y
3466 && ! (reload_in_progress || reload_completed)
3467 && need_clobber != 0)
3468 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3469
3470 emit_insn (seq);
3471
3472 return last_insn;
3473 }
3474 else
3475 abort ();
3476 }
3477
3478 /* If Y is representable exactly in a narrower mode, and the target can
3479 perform the extension directly from constant or memory, then emit the
3480 move as an extension. */
3481
3482 static rtx
3483 compress_float_constant (x, y)
3484 rtx x, y;
3485 {
3486 enum machine_mode dstmode = GET_MODE (x);
3487 enum machine_mode orig_srcmode = GET_MODE (y);
3488 enum machine_mode srcmode;
3489 REAL_VALUE_TYPE r;
3490
3491 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3492
3493 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3494 srcmode != orig_srcmode;
3495 srcmode = GET_MODE_WIDER_MODE (srcmode))
3496 {
3497 enum insn_code ic;
3498 rtx trunc_y, last_insn;
3499
3500 /* Skip if the target can't extend this way. */
3501 ic = can_extend_p (dstmode, srcmode, 0);
3502 if (ic == CODE_FOR_nothing)
3503 continue;
3504
3505 /* Skip if the narrowed value isn't exact. */
3506 if (! exact_real_truncate (srcmode, &r))
3507 continue;
3508
3509 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3510
3511 if (LEGITIMATE_CONSTANT_P (trunc_y))
3512 {
3513 /* Skip if the target needs extra instructions to perform
3514 the extension. */
3515 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3516 continue;
3517 }
3518 else if (float_extend_from_mem[dstmode][srcmode])
3519 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3520 else
3521 continue;
3522
3523 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3524 last_insn = get_last_insn ();
3525
3526 if (GET_CODE (x) == REG)
3527 REG_NOTES (last_insn)
3528 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3529
3530 return last_insn;
3531 }
3532
3533 return NULL_RTX;
3534 }
3535 \f
3536 /* Pushing data onto the stack. */
3537
3538 /* Push a block of length SIZE (perhaps variable)
3539 and return an rtx to address the beginning of the block.
3540 Note that it is not possible for the value returned to be a QUEUED.
3541 The value may be virtual_outgoing_args_rtx.
3542
3543 EXTRA is the number of bytes of padding to push in addition to SIZE.
3544 BELOW nonzero means this padding comes at low addresses;
3545 otherwise, the padding comes at high addresses. */
3546
3547 rtx
3548 push_block (size, extra, below)
3549 rtx size;
3550 int extra, below;
3551 {
3552 rtx temp;
3553
3554 size = convert_modes (Pmode, ptr_mode, size, 1);
3555 if (CONSTANT_P (size))
3556 anti_adjust_stack (plus_constant (size, extra));
3557 else if (GET_CODE (size) == REG && extra == 0)
3558 anti_adjust_stack (size);
3559 else
3560 {
3561 temp = copy_to_mode_reg (Pmode, size);
3562 if (extra != 0)
3563 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3564 temp, 0, OPTAB_LIB_WIDEN);
3565 anti_adjust_stack (temp);
3566 }
3567
3568 #ifndef STACK_GROWS_DOWNWARD
3569 if (0)
3570 #else
3571 if (1)
3572 #endif
3573 {
3574 temp = virtual_outgoing_args_rtx;
3575 if (extra != 0 && below)
3576 temp = plus_constant (temp, extra);
3577 }
3578 else
3579 {
3580 if (GET_CODE (size) == CONST_INT)
3581 temp = plus_constant (virtual_outgoing_args_rtx,
3582 -INTVAL (size) - (below ? 0 : extra));
3583 else if (extra != 0 && !below)
3584 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3585 negate_rtx (Pmode, plus_constant (size, extra)));
3586 else
3587 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3588 negate_rtx (Pmode, size));
3589 }
3590
3591 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3592 }
3593
3594 #ifdef PUSH_ROUNDING
3595
3596 /* Emit single push insn. */
3597
3598 static void
3599 emit_single_push_insn (mode, x, type)
3600 rtx x;
3601 enum machine_mode mode;
3602 tree type;
3603 {
3604 rtx dest_addr;
3605 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3606 rtx dest;
3607 enum insn_code icode;
3608 insn_operand_predicate_fn pred;
3609
3610 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3611 /* If there is push pattern, use it. Otherwise try old way of throwing
3612 MEM representing push operation to move expander. */
3613 icode = push_optab->handlers[(int) mode].insn_code;
3614 if (icode != CODE_FOR_nothing)
3615 {
3616 if (((pred = insn_data[(int) icode].operand[0].predicate)
3617 && !((*pred) (x, mode))))
3618 x = force_reg (mode, x);
3619 emit_insn (GEN_FCN (icode) (x));
3620 return;
3621 }
3622 if (GET_MODE_SIZE (mode) == rounded_size)
3623 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3624 else
3625 {
3626 #ifdef STACK_GROWS_DOWNWARD
3627 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3628 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3629 #else
3630 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3631 GEN_INT (rounded_size));
3632 #endif
3633 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3634 }
3635
3636 dest = gen_rtx_MEM (mode, dest_addr);
3637
3638 if (type != 0)
3639 {
3640 set_mem_attributes (dest, type, 1);
3641
3642 if (flag_optimize_sibling_calls)
3643 /* Function incoming arguments may overlap with sibling call
3644 outgoing arguments and we cannot allow reordering of reads
3645 from function arguments with stores to outgoing arguments
3646 of sibling calls. */
3647 set_mem_alias_set (dest, 0);
3648 }
3649 emit_move_insn (dest, x);
3650 }
3651 #endif
3652
3653 /* Generate code to push X onto the stack, assuming it has mode MODE and
3654 type TYPE.
3655 MODE is redundant except when X is a CONST_INT (since they don't
3656 carry mode info).
3657 SIZE is an rtx for the size of data to be copied (in bytes),
3658 needed only if X is BLKmode.
3659
3660 ALIGN (in bits) is maximum alignment we can assume.
3661
3662 If PARTIAL and REG are both nonzero, then copy that many of the first
3663 words of X into registers starting with REG, and push the rest of X.
3664 The amount of space pushed is decreased by PARTIAL words,
3665 rounded *down* to a multiple of PARM_BOUNDARY.
3666 REG must be a hard register in this case.
3667 If REG is zero but PARTIAL is not, take any all others actions for an
3668 argument partially in registers, but do not actually load any
3669 registers.
3670
3671 EXTRA is the amount in bytes of extra space to leave next to this arg.
3672 This is ignored if an argument block has already been allocated.
3673
3674 On a machine that lacks real push insns, ARGS_ADDR is the address of
3675 the bottom of the argument block for this call. We use indexing off there
3676 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3677 argument block has not been preallocated.
3678
3679 ARGS_SO_FAR is the size of args previously pushed for this call.
3680
3681 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3682 for arguments passed in registers. If nonzero, it will be the number
3683 of bytes required. */
3684
3685 void
3686 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3687 args_addr, args_so_far, reg_parm_stack_space,
3688 alignment_pad)
3689 rtx x;
3690 enum machine_mode mode;
3691 tree type;
3692 rtx size;
3693 unsigned int align;
3694 int partial;
3695 rtx reg;
3696 int extra;
3697 rtx args_addr;
3698 rtx args_so_far;
3699 int reg_parm_stack_space;
3700 rtx alignment_pad;
3701 {
3702 rtx xinner;
3703 enum direction stack_direction
3704 #ifdef STACK_GROWS_DOWNWARD
3705 = downward;
3706 #else
3707 = upward;
3708 #endif
3709
3710 /* Decide where to pad the argument: `downward' for below,
3711 `upward' for above, or `none' for don't pad it.
3712 Default is below for small data on big-endian machines; else above. */
3713 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3714
3715 /* Invert direction if stack is post-decrement.
3716 FIXME: why? */
3717 if (STACK_PUSH_CODE == POST_DEC)
3718 if (where_pad != none)
3719 where_pad = (where_pad == downward ? upward : downward);
3720
3721 xinner = x = protect_from_queue (x, 0);
3722
3723 if (mode == BLKmode)
3724 {
3725 /* Copy a block into the stack, entirely or partially. */
3726
3727 rtx temp;
3728 int used = partial * UNITS_PER_WORD;
3729 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3730 int skip;
3731
3732 if (size == 0)
3733 abort ();
3734
3735 used -= offset;
3736
3737 /* USED is now the # of bytes we need not copy to the stack
3738 because registers will take care of them. */
3739
3740 if (partial != 0)
3741 xinner = adjust_address (xinner, BLKmode, used);
3742
3743 /* If the partial register-part of the arg counts in its stack size,
3744 skip the part of stack space corresponding to the registers.
3745 Otherwise, start copying to the beginning of the stack space,
3746 by setting SKIP to 0. */
3747 skip = (reg_parm_stack_space == 0) ? 0 : used;
3748
3749 #ifdef PUSH_ROUNDING
3750 /* Do it with several push insns if that doesn't take lots of insns
3751 and if there is no difficulty with push insns that skip bytes
3752 on the stack for alignment purposes. */
3753 if (args_addr == 0
3754 && PUSH_ARGS
3755 && GET_CODE (size) == CONST_INT
3756 && skip == 0
3757 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3758 /* Here we avoid the case of a structure whose weak alignment
3759 forces many pushes of a small amount of data,
3760 and such small pushes do rounding that causes trouble. */
3761 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3762 || align >= BIGGEST_ALIGNMENT
3763 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3764 == (align / BITS_PER_UNIT)))
3765 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3766 {
3767 /* Push padding now if padding above and stack grows down,
3768 or if padding below and stack grows up.
3769 But if space already allocated, this has already been done. */
3770 if (extra && args_addr == 0
3771 && where_pad != none && where_pad != stack_direction)
3772 anti_adjust_stack (GEN_INT (extra));
3773
3774 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3775 }
3776 else
3777 #endif /* PUSH_ROUNDING */
3778 {
3779 rtx target;
3780
3781 /* Otherwise make space on the stack and copy the data
3782 to the address of that space. */
3783
3784 /* Deduct words put into registers from the size we must copy. */
3785 if (partial != 0)
3786 {
3787 if (GET_CODE (size) == CONST_INT)
3788 size = GEN_INT (INTVAL (size) - used);
3789 else
3790 size = expand_binop (GET_MODE (size), sub_optab, size,
3791 GEN_INT (used), NULL_RTX, 0,
3792 OPTAB_LIB_WIDEN);
3793 }
3794
3795 /* Get the address of the stack space.
3796 In this case, we do not deal with EXTRA separately.
3797 A single stack adjust will do. */
3798 if (! args_addr)
3799 {
3800 temp = push_block (size, extra, where_pad == downward);
3801 extra = 0;
3802 }
3803 else if (GET_CODE (args_so_far) == CONST_INT)
3804 temp = memory_address (BLKmode,
3805 plus_constant (args_addr,
3806 skip + INTVAL (args_so_far)));
3807 else
3808 temp = memory_address (BLKmode,
3809 plus_constant (gen_rtx_PLUS (Pmode,
3810 args_addr,
3811 args_so_far),
3812 skip));
3813
3814 if (!ACCUMULATE_OUTGOING_ARGS)
3815 {
3816 /* If the source is referenced relative to the stack pointer,
3817 copy it to another register to stabilize it. We do not need
3818 to do this if we know that we won't be changing sp. */
3819
3820 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3821 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3822 temp = copy_to_reg (temp);
3823 }
3824
3825 target = gen_rtx_MEM (BLKmode, temp);
3826
3827 if (type != 0)
3828 {
3829 set_mem_attributes (target, type, 1);
3830 /* Function incoming arguments may overlap with sibling call
3831 outgoing arguments and we cannot allow reordering of reads
3832 from function arguments with stores to outgoing arguments
3833 of sibling calls. */
3834 set_mem_alias_set (target, 0);
3835 }
3836
3837 /* ALIGN may well be better aligned than TYPE, e.g. due to
3838 PARM_BOUNDARY. Assume the caller isn't lying. */
3839 set_mem_align (target, align);
3840
3841 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3842 }
3843 }
3844 else if (partial > 0)
3845 {
3846 /* Scalar partly in registers. */
3847
3848 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3849 int i;
3850 int not_stack;
3851 /* # words of start of argument
3852 that we must make space for but need not store. */
3853 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3854 int args_offset = INTVAL (args_so_far);
3855 int skip;
3856
3857 /* Push padding now if padding above and stack grows down,
3858 or if padding below and stack grows up.
3859 But if space already allocated, this has already been done. */
3860 if (extra && args_addr == 0
3861 && where_pad != none && where_pad != stack_direction)
3862 anti_adjust_stack (GEN_INT (extra));
3863
3864 /* If we make space by pushing it, we might as well push
3865 the real data. Otherwise, we can leave OFFSET nonzero
3866 and leave the space uninitialized. */
3867 if (args_addr == 0)
3868 offset = 0;
3869
3870 /* Now NOT_STACK gets the number of words that we don't need to
3871 allocate on the stack. */
3872 not_stack = partial - offset;
3873
3874 /* If the partial register-part of the arg counts in its stack size,
3875 skip the part of stack space corresponding to the registers.
3876 Otherwise, start copying to the beginning of the stack space,
3877 by setting SKIP to 0. */
3878 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3879
3880 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3881 x = validize_mem (force_const_mem (mode, x));
3882
3883 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3884 SUBREGs of such registers are not allowed. */
3885 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3886 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3887 x = copy_to_reg (x);
3888
3889 /* Loop over all the words allocated on the stack for this arg. */
3890 /* We can do it by words, because any scalar bigger than a word
3891 has a size a multiple of a word. */
3892 #ifndef PUSH_ARGS_REVERSED
3893 for (i = not_stack; i < size; i++)
3894 #else
3895 for (i = size - 1; i >= not_stack; i--)
3896 #endif
3897 if (i >= not_stack + offset)
3898 emit_push_insn (operand_subword_force (x, i, mode),
3899 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3900 0, args_addr,
3901 GEN_INT (args_offset + ((i - not_stack + skip)
3902 * UNITS_PER_WORD)),
3903 reg_parm_stack_space, alignment_pad);
3904 }
3905 else
3906 {
3907 rtx addr;
3908 rtx target = NULL_RTX;
3909 rtx dest;
3910
3911 /* Push padding now if padding above and stack grows down,
3912 or if padding below and stack grows up.
3913 But if space already allocated, this has already been done. */
3914 if (extra && args_addr == 0
3915 && where_pad != none && where_pad != stack_direction)
3916 anti_adjust_stack (GEN_INT (extra));
3917
3918 #ifdef PUSH_ROUNDING
3919 if (args_addr == 0 && PUSH_ARGS)
3920 emit_single_push_insn (mode, x, type);
3921 else
3922 #endif
3923 {
3924 if (GET_CODE (args_so_far) == CONST_INT)
3925 addr
3926 = memory_address (mode,
3927 plus_constant (args_addr,
3928 INTVAL (args_so_far)));
3929 else
3930 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3931 args_so_far));
3932 target = addr;
3933 dest = gen_rtx_MEM (mode, addr);
3934 if (type != 0)
3935 {
3936 set_mem_attributes (dest, type, 1);
3937 /* Function incoming arguments may overlap with sibling call
3938 outgoing arguments and we cannot allow reordering of reads
3939 from function arguments with stores to outgoing arguments
3940 of sibling calls. */
3941 set_mem_alias_set (dest, 0);
3942 }
3943
3944 emit_move_insn (dest, x);
3945 }
3946 }
3947
3948 /* If part should go in registers, copy that part
3949 into the appropriate registers. Do this now, at the end,
3950 since mem-to-mem copies above may do function calls. */
3951 if (partial > 0 && reg != 0)
3952 {
3953 /* Handle calls that pass values in multiple non-contiguous locations.
3954 The Irix 6 ABI has examples of this. */
3955 if (GET_CODE (reg) == PARALLEL)
3956 emit_group_load (reg, x, -1); /* ??? size? */
3957 else
3958 move_block_to_reg (REGNO (reg), x, partial, mode);
3959 }
3960
3961 if (extra && args_addr == 0 && where_pad == stack_direction)
3962 anti_adjust_stack (GEN_INT (extra));
3963
3964 if (alignment_pad && args_addr == 0)
3965 anti_adjust_stack (alignment_pad);
3966 }
3967 \f
3968 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3969 operations. */
3970
3971 static rtx
3972 get_subtarget (x)
3973 rtx x;
3974 {
3975 return ((x == 0
3976 /* Only registers can be subtargets. */
3977 || GET_CODE (x) != REG
3978 /* If the register is readonly, it can't be set more than once. */
3979 || RTX_UNCHANGING_P (x)
3980 /* Don't use hard regs to avoid extending their life. */
3981 || REGNO (x) < FIRST_PSEUDO_REGISTER
3982 /* Avoid subtargets inside loops,
3983 since they hide some invariant expressions. */
3984 || preserve_subexpressions_p ())
3985 ? 0 : x);
3986 }
3987
3988 /* Expand an assignment that stores the value of FROM into TO.
3989 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3990 (This may contain a QUEUED rtx;
3991 if the value is constant, this rtx is a constant.)
3992 Otherwise, the returned value is NULL_RTX.
3993
3994 SUGGEST_REG is no longer actually used.
3995 It used to mean, copy the value through a register
3996 and return that register, if that is possible.
3997 We now use WANT_VALUE to decide whether to do this. */
3998
3999 rtx
4000 expand_assignment (to, from, want_value, suggest_reg)
4001 tree to, from;
4002 int want_value;
4003 int suggest_reg ATTRIBUTE_UNUSED;
4004 {
4005 rtx to_rtx = 0;
4006 rtx result;
4007
4008 /* Don't crash if the lhs of the assignment was erroneous. */
4009
4010 if (TREE_CODE (to) == ERROR_MARK)
4011 {
4012 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4013 return want_value ? result : NULL_RTX;
4014 }
4015
4016 /* Assignment of a structure component needs special treatment
4017 if the structure component's rtx is not simply a MEM.
4018 Assignment of an array element at a constant index, and assignment of
4019 an array element in an unaligned packed structure field, has the same
4020 problem. */
4021
4022 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4023 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
4024 {
4025 enum machine_mode mode1;
4026 HOST_WIDE_INT bitsize, bitpos;
4027 rtx orig_to_rtx;
4028 tree offset;
4029 int unsignedp;
4030 int volatilep = 0;
4031 tree tem;
4032
4033 push_temp_slots ();
4034 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4035 &unsignedp, &volatilep);
4036
4037 /* If we are going to use store_bit_field and extract_bit_field,
4038 make sure to_rtx will be safe for multiple use. */
4039
4040 if (mode1 == VOIDmode && want_value)
4041 tem = stabilize_reference (tem);
4042
4043 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4044
4045 if (offset != 0)
4046 {
4047 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4048
4049 if (GET_CODE (to_rtx) != MEM)
4050 abort ();
4051
4052 #ifdef POINTERS_EXTEND_UNSIGNED
4053 if (GET_MODE (offset_rtx) != Pmode)
4054 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4055 #else
4056 if (GET_MODE (offset_rtx) != ptr_mode)
4057 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4058 #endif
4059
4060 /* A constant address in TO_RTX can have VOIDmode, we must not try
4061 to call force_reg for that case. Avoid that case. */
4062 if (GET_CODE (to_rtx) == MEM
4063 && GET_MODE (to_rtx) == BLKmode
4064 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4065 && bitsize > 0
4066 && (bitpos % bitsize) == 0
4067 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4068 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4069 {
4070 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4071 bitpos = 0;
4072 }
4073
4074 to_rtx = offset_address (to_rtx, offset_rtx,
4075 highest_pow2_factor_for_type (TREE_TYPE (to),
4076 offset));
4077 }
4078
4079 if (GET_CODE (to_rtx) == MEM)
4080 {
4081 /* If the field is at offset zero, we could have been given the
4082 DECL_RTX of the parent struct. Don't munge it. */
4083 to_rtx = shallow_copy_rtx (to_rtx);
4084
4085 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4086 }
4087
4088 /* Deal with volatile and readonly fields. The former is only done
4089 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4090 if (volatilep && GET_CODE (to_rtx) == MEM)
4091 {
4092 if (to_rtx == orig_to_rtx)
4093 to_rtx = copy_rtx (to_rtx);
4094 MEM_VOLATILE_P (to_rtx) = 1;
4095 }
4096
4097 if (TREE_CODE (to) == COMPONENT_REF
4098 && TREE_READONLY (TREE_OPERAND (to, 1)))
4099 {
4100 if (to_rtx == orig_to_rtx)
4101 to_rtx = copy_rtx (to_rtx);
4102 RTX_UNCHANGING_P (to_rtx) = 1;
4103 }
4104
4105 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4106 {
4107 if (to_rtx == orig_to_rtx)
4108 to_rtx = copy_rtx (to_rtx);
4109 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4110 }
4111
4112 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4113 (want_value
4114 /* Spurious cast for HPUX compiler. */
4115 ? ((enum machine_mode)
4116 TYPE_MODE (TREE_TYPE (to)))
4117 : VOIDmode),
4118 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4119
4120 preserve_temp_slots (result);
4121 free_temp_slots ();
4122 pop_temp_slots ();
4123
4124 /* If the value is meaningful, convert RESULT to the proper mode.
4125 Otherwise, return nothing. */
4126 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4127 TYPE_MODE (TREE_TYPE (from)),
4128 result,
4129 TREE_UNSIGNED (TREE_TYPE (to)))
4130 : NULL_RTX);
4131 }
4132
4133 /* If the rhs is a function call and its value is not an aggregate,
4134 call the function before we start to compute the lhs.
4135 This is needed for correct code for cases such as
4136 val = setjmp (buf) on machines where reference to val
4137 requires loading up part of an address in a separate insn.
4138
4139 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4140 since it might be a promoted variable where the zero- or sign- extension
4141 needs to be done. Handling this in the normal way is safe because no
4142 computation is done before the call. */
4143 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4144 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4145 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4146 && GET_CODE (DECL_RTL (to)) == REG))
4147 {
4148 rtx value;
4149
4150 push_temp_slots ();
4151 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4152 if (to_rtx == 0)
4153 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4154
4155 /* Handle calls that return values in multiple non-contiguous locations.
4156 The Irix 6 ABI has examples of this. */
4157 if (GET_CODE (to_rtx) == PARALLEL)
4158 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4159 else if (GET_MODE (to_rtx) == BLKmode)
4160 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4161 else
4162 {
4163 #ifdef POINTERS_EXTEND_UNSIGNED
4164 if (POINTER_TYPE_P (TREE_TYPE (to))
4165 && GET_MODE (to_rtx) != GET_MODE (value))
4166 value = convert_memory_address (GET_MODE (to_rtx), value);
4167 #endif
4168 emit_move_insn (to_rtx, value);
4169 }
4170 preserve_temp_slots (to_rtx);
4171 free_temp_slots ();
4172 pop_temp_slots ();
4173 return want_value ? to_rtx : NULL_RTX;
4174 }
4175
4176 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4177 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4178
4179 if (to_rtx == 0)
4180 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4181
4182 /* Don't move directly into a return register. */
4183 if (TREE_CODE (to) == RESULT_DECL
4184 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4185 {
4186 rtx temp;
4187
4188 push_temp_slots ();
4189 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4190
4191 if (GET_CODE (to_rtx) == PARALLEL)
4192 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4193 else
4194 emit_move_insn (to_rtx, temp);
4195
4196 preserve_temp_slots (to_rtx);
4197 free_temp_slots ();
4198 pop_temp_slots ();
4199 return want_value ? to_rtx : NULL_RTX;
4200 }
4201
4202 /* In case we are returning the contents of an object which overlaps
4203 the place the value is being stored, use a safe function when copying
4204 a value through a pointer into a structure value return block. */
4205 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4206 && current_function_returns_struct
4207 && !current_function_returns_pcc_struct)
4208 {
4209 rtx from_rtx, size;
4210
4211 push_temp_slots ();
4212 size = expr_size (from);
4213 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4214
4215 if (TARGET_MEM_FUNCTIONS)
4216 emit_library_call (memmove_libfunc, LCT_NORMAL,
4217 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4218 XEXP (from_rtx, 0), Pmode,
4219 convert_to_mode (TYPE_MODE (sizetype),
4220 size, TREE_UNSIGNED (sizetype)),
4221 TYPE_MODE (sizetype));
4222 else
4223 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4224 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4225 XEXP (to_rtx, 0), Pmode,
4226 convert_to_mode (TYPE_MODE (integer_type_node),
4227 size,
4228 TREE_UNSIGNED (integer_type_node)),
4229 TYPE_MODE (integer_type_node));
4230
4231 preserve_temp_slots (to_rtx);
4232 free_temp_slots ();
4233 pop_temp_slots ();
4234 return want_value ? to_rtx : NULL_RTX;
4235 }
4236
4237 /* Compute FROM and store the value in the rtx we got. */
4238
4239 push_temp_slots ();
4240 result = store_expr (from, to_rtx, want_value);
4241 preserve_temp_slots (result);
4242 free_temp_slots ();
4243 pop_temp_slots ();
4244 return want_value ? result : NULL_RTX;
4245 }
4246
4247 /* Generate code for computing expression EXP,
4248 and storing the value into TARGET.
4249 TARGET may contain a QUEUED rtx.
4250
4251 If WANT_VALUE is nonzero, return a copy of the value
4252 not in TARGET, so that we can be sure to use the proper
4253 value in a containing expression even if TARGET has something
4254 else stored in it. If possible, we copy the value through a pseudo
4255 and return that pseudo. Or, if the value is constant, we try to
4256 return the constant. In some cases, we return a pseudo
4257 copied *from* TARGET.
4258
4259 If the mode is BLKmode then we may return TARGET itself.
4260 It turns out that in BLKmode it doesn't cause a problem.
4261 because C has no operators that could combine two different
4262 assignments into the same BLKmode object with different values
4263 with no sequence point. Will other languages need this to
4264 be more thorough?
4265
4266 If WANT_VALUE is 0, we return NULL, to make sure
4267 to catch quickly any cases where the caller uses the value
4268 and fails to set WANT_VALUE. */
4269
4270 rtx
4271 store_expr (exp, target, want_value)
4272 tree exp;
4273 rtx target;
4274 int want_value;
4275 {
4276 rtx temp;
4277 int dont_return_target = 0;
4278 int dont_store_target = 0;
4279
4280 if (TREE_CODE (exp) == COMPOUND_EXPR)
4281 {
4282 /* Perform first part of compound expression, then assign from second
4283 part. */
4284 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4285 emit_queue ();
4286 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4287 }
4288 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4289 {
4290 /* For conditional expression, get safe form of the target. Then
4291 test the condition, doing the appropriate assignment on either
4292 side. This avoids the creation of unnecessary temporaries.
4293 For non-BLKmode, it is more efficient not to do this. */
4294
4295 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4296
4297 emit_queue ();
4298 target = protect_from_queue (target, 1);
4299
4300 do_pending_stack_adjust ();
4301 NO_DEFER_POP;
4302 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4303 start_cleanup_deferral ();
4304 store_expr (TREE_OPERAND (exp, 1), target, 0);
4305 end_cleanup_deferral ();
4306 emit_queue ();
4307 emit_jump_insn (gen_jump (lab2));
4308 emit_barrier ();
4309 emit_label (lab1);
4310 start_cleanup_deferral ();
4311 store_expr (TREE_OPERAND (exp, 2), target, 0);
4312 end_cleanup_deferral ();
4313 emit_queue ();
4314 emit_label (lab2);
4315 OK_DEFER_POP;
4316
4317 return want_value ? target : NULL_RTX;
4318 }
4319 else if (queued_subexp_p (target))
4320 /* If target contains a postincrement, let's not risk
4321 using it as the place to generate the rhs. */
4322 {
4323 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4324 {
4325 /* Expand EXP into a new pseudo. */
4326 temp = gen_reg_rtx (GET_MODE (target));
4327 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4328 }
4329 else
4330 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4331
4332 /* If target is volatile, ANSI requires accessing the value
4333 *from* the target, if it is accessed. So make that happen.
4334 In no case return the target itself. */
4335 if (! MEM_VOLATILE_P (target) && want_value)
4336 dont_return_target = 1;
4337 }
4338 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4339 && GET_MODE (target) != BLKmode)
4340 /* If target is in memory and caller wants value in a register instead,
4341 arrange that. Pass TARGET as target for expand_expr so that,
4342 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4343 We know expand_expr will not use the target in that case.
4344 Don't do this if TARGET is volatile because we are supposed
4345 to write it and then read it. */
4346 {
4347 temp = expand_expr (exp, target, GET_MODE (target), 0);
4348 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4349 {
4350 /* If TEMP is already in the desired TARGET, only copy it from
4351 memory and don't store it there again. */
4352 if (temp == target
4353 || (rtx_equal_p (temp, target)
4354 && ! side_effects_p (temp) && ! side_effects_p (target)))
4355 dont_store_target = 1;
4356 temp = copy_to_reg (temp);
4357 }
4358 dont_return_target = 1;
4359 }
4360 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4361 /* If this is a scalar in a register that is stored in a wider mode
4362 than the declared mode, compute the result into its declared mode
4363 and then convert to the wider mode. Our value is the computed
4364 expression. */
4365 {
4366 rtx inner_target = 0;
4367
4368 /* If we don't want a value, we can do the conversion inside EXP,
4369 which will often result in some optimizations. Do the conversion
4370 in two steps: first change the signedness, if needed, then
4371 the extend. But don't do this if the type of EXP is a subtype
4372 of something else since then the conversion might involve
4373 more than just converting modes. */
4374 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4375 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4376 {
4377 if (TREE_UNSIGNED (TREE_TYPE (exp))
4378 != SUBREG_PROMOTED_UNSIGNED_P (target))
4379 exp = convert
4380 ((*lang_hooks.types.signed_or_unsigned_type)
4381 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4382
4383 exp = convert ((*lang_hooks.types.type_for_mode)
4384 (GET_MODE (SUBREG_REG (target)),
4385 SUBREG_PROMOTED_UNSIGNED_P (target)),
4386 exp);
4387
4388 inner_target = SUBREG_REG (target);
4389 }
4390
4391 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4392
4393 /* If TEMP is a volatile MEM and we want a result value, make
4394 the access now so it gets done only once. Likewise if
4395 it contains TARGET. */
4396 if (GET_CODE (temp) == MEM && want_value
4397 && (MEM_VOLATILE_P (temp)
4398 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4399 temp = copy_to_reg (temp);
4400
4401 /* If TEMP is a VOIDmode constant, use convert_modes to make
4402 sure that we properly convert it. */
4403 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4404 {
4405 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4406 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4407 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4408 GET_MODE (target), temp,
4409 SUBREG_PROMOTED_UNSIGNED_P (target));
4410 }
4411
4412 convert_move (SUBREG_REG (target), temp,
4413 SUBREG_PROMOTED_UNSIGNED_P (target));
4414
4415 /* If we promoted a constant, change the mode back down to match
4416 target. Otherwise, the caller might get confused by a result whose
4417 mode is larger than expected. */
4418
4419 if (want_value && GET_MODE (temp) != GET_MODE (target))
4420 {
4421 if (GET_MODE (temp) != VOIDmode)
4422 {
4423 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4424 SUBREG_PROMOTED_VAR_P (temp) = 1;
4425 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4426 SUBREG_PROMOTED_UNSIGNED_P (target));
4427 }
4428 else
4429 temp = convert_modes (GET_MODE (target),
4430 GET_MODE (SUBREG_REG (target)),
4431 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4432 }
4433
4434 return want_value ? temp : NULL_RTX;
4435 }
4436 else
4437 {
4438 temp = expand_expr (exp, target, GET_MODE (target), 0);
4439 /* Return TARGET if it's a specified hardware register.
4440 If TARGET is a volatile mem ref, either return TARGET
4441 or return a reg copied *from* TARGET; ANSI requires this.
4442
4443 Otherwise, if TEMP is not TARGET, return TEMP
4444 if it is constant (for efficiency),
4445 or if we really want the correct value. */
4446 if (!(target && GET_CODE (target) == REG
4447 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4448 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4449 && ! rtx_equal_p (temp, target)
4450 && (CONSTANT_P (temp) || want_value))
4451 dont_return_target = 1;
4452 }
4453
4454 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4455 the same as that of TARGET, adjust the constant. This is needed, for
4456 example, in case it is a CONST_DOUBLE and we want only a word-sized
4457 value. */
4458 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4459 && TREE_CODE (exp) != ERROR_MARK
4460 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4461 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4462 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4463
4464 /* If value was not generated in the target, store it there.
4465 Convert the value to TARGET's type first if necessary.
4466 If TEMP and TARGET compare equal according to rtx_equal_p, but
4467 one or both of them are volatile memory refs, we have to distinguish
4468 two cases:
4469 - expand_expr has used TARGET. In this case, we must not generate
4470 another copy. This can be detected by TARGET being equal according
4471 to == .
4472 - expand_expr has not used TARGET - that means that the source just
4473 happens to have the same RTX form. Since temp will have been created
4474 by expand_expr, it will compare unequal according to == .
4475 We must generate a copy in this case, to reach the correct number
4476 of volatile memory references. */
4477
4478 if ((! rtx_equal_p (temp, target)
4479 || (temp != target && (side_effects_p (temp)
4480 || side_effects_p (target))))
4481 && TREE_CODE (exp) != ERROR_MARK
4482 && ! dont_store_target
4483 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4484 but TARGET is not valid memory reference, TEMP will differ
4485 from TARGET although it is really the same location. */
4486 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4487 || target != DECL_RTL_IF_SET (exp))
4488 /* If there's nothing to copy, don't bother. Don't call expr_size
4489 unless necessary, because some front-ends (C++) expr_size-hook
4490 aborts on objects that are not supposed to be bit-copied or
4491 bit-initialized. */
4492 && expr_size (exp) != const0_rtx)
4493 {
4494 target = protect_from_queue (target, 1);
4495 if (GET_MODE (temp) != GET_MODE (target)
4496 && GET_MODE (temp) != VOIDmode)
4497 {
4498 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4499 if (dont_return_target)
4500 {
4501 /* In this case, we will return TEMP,
4502 so make sure it has the proper mode.
4503 But don't forget to store the value into TARGET. */
4504 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4505 emit_move_insn (target, temp);
4506 }
4507 else
4508 convert_move (target, temp, unsignedp);
4509 }
4510
4511 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4512 {
4513 /* Handle copying a string constant into an array. The string
4514 constant may be shorter than the array. So copy just the string's
4515 actual length, and clear the rest. First get the size of the data
4516 type of the string, which is actually the size of the target. */
4517 rtx size = expr_size (exp);
4518
4519 if (GET_CODE (size) == CONST_INT
4520 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4521 emit_block_move (target, temp, size, BLOCK_OP_NORMAL);
4522 else
4523 {
4524 /* Compute the size of the data to copy from the string. */
4525 tree copy_size
4526 = size_binop (MIN_EXPR,
4527 make_tree (sizetype, size),
4528 size_int (TREE_STRING_LENGTH (exp)));
4529 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4530 VOIDmode, 0);
4531 rtx label = 0;
4532
4533 /* Copy that much. */
4534 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4535 emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL);
4536
4537 /* Figure out how much is left in TARGET that we have to clear.
4538 Do all calculations in ptr_mode. */
4539 if (GET_CODE (copy_size_rtx) == CONST_INT)
4540 {
4541 size = plus_constant (size, -INTVAL (copy_size_rtx));
4542 target = adjust_address (target, BLKmode,
4543 INTVAL (copy_size_rtx));
4544 }
4545 else
4546 {
4547 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4548 copy_size_rtx, NULL_RTX, 0,
4549 OPTAB_LIB_WIDEN);
4550
4551 #ifdef POINTERS_EXTEND_UNSIGNED
4552 if (GET_MODE (copy_size_rtx) != Pmode)
4553 copy_size_rtx = convert_memory_address (Pmode,
4554 copy_size_rtx);
4555 #endif
4556
4557 target = offset_address (target, copy_size_rtx,
4558 highest_pow2_factor (copy_size));
4559 label = gen_label_rtx ();
4560 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4561 GET_MODE (size), 0, label);
4562 }
4563
4564 if (size != const0_rtx)
4565 clear_storage (target, size);
4566
4567 if (label)
4568 emit_label (label);
4569 }
4570 }
4571 /* Handle calls that return values in multiple non-contiguous locations.
4572 The Irix 6 ABI has examples of this. */
4573 else if (GET_CODE (target) == PARALLEL)
4574 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4575 else if (GET_MODE (temp) == BLKmode)
4576 emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL);
4577 else
4578 emit_move_insn (target, temp);
4579 }
4580
4581 /* If we don't want a value, return NULL_RTX. */
4582 if (! want_value)
4583 return NULL_RTX;
4584
4585 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4586 ??? The latter test doesn't seem to make sense. */
4587 else if (dont_return_target && GET_CODE (temp) != MEM)
4588 return temp;
4589
4590 /* Return TARGET itself if it is a hard register. */
4591 else if (want_value && GET_MODE (target) != BLKmode
4592 && ! (GET_CODE (target) == REG
4593 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4594 return copy_to_reg (target);
4595
4596 else
4597 return target;
4598 }
4599 \f
4600 /* Return 1 if EXP just contains zeros. */
4601
4602 static int
4603 is_zeros_p (exp)
4604 tree exp;
4605 {
4606 tree elt;
4607
4608 switch (TREE_CODE (exp))
4609 {
4610 case CONVERT_EXPR:
4611 case NOP_EXPR:
4612 case NON_LVALUE_EXPR:
4613 case VIEW_CONVERT_EXPR:
4614 return is_zeros_p (TREE_OPERAND (exp, 0));
4615
4616 case INTEGER_CST:
4617 return integer_zerop (exp);
4618
4619 case COMPLEX_CST:
4620 return
4621 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4622
4623 case REAL_CST:
4624 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4625
4626 case VECTOR_CST:
4627 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4628 elt = TREE_CHAIN (elt))
4629 if (!is_zeros_p (TREE_VALUE (elt)))
4630 return 0;
4631
4632 return 1;
4633
4634 case CONSTRUCTOR:
4635 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4636 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4637 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4638 if (! is_zeros_p (TREE_VALUE (elt)))
4639 return 0;
4640
4641 return 1;
4642
4643 default:
4644 return 0;
4645 }
4646 }
4647
4648 /* Return 1 if EXP contains mostly (3/4) zeros. */
4649
4650 static int
4651 mostly_zeros_p (exp)
4652 tree exp;
4653 {
4654 if (TREE_CODE (exp) == CONSTRUCTOR)
4655 {
4656 int elts = 0, zeros = 0;
4657 tree elt = CONSTRUCTOR_ELTS (exp);
4658 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4659 {
4660 /* If there are no ranges of true bits, it is all zero. */
4661 return elt == NULL_TREE;
4662 }
4663 for (; elt; elt = TREE_CHAIN (elt))
4664 {
4665 /* We do not handle the case where the index is a RANGE_EXPR,
4666 so the statistic will be somewhat inaccurate.
4667 We do make a more accurate count in store_constructor itself,
4668 so since this function is only used for nested array elements,
4669 this should be close enough. */
4670 if (mostly_zeros_p (TREE_VALUE (elt)))
4671 zeros++;
4672 elts++;
4673 }
4674
4675 return 4 * zeros >= 3 * elts;
4676 }
4677
4678 return is_zeros_p (exp);
4679 }
4680 \f
4681 /* Helper function for store_constructor.
4682 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4683 TYPE is the type of the CONSTRUCTOR, not the element type.
4684 CLEARED is as for store_constructor.
4685 ALIAS_SET is the alias set to use for any stores.
4686
4687 This provides a recursive shortcut back to store_constructor when it isn't
4688 necessary to go through store_field. This is so that we can pass through
4689 the cleared field to let store_constructor know that we may not have to
4690 clear a substructure if the outer structure has already been cleared. */
4691
4692 static void
4693 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4694 alias_set)
4695 rtx target;
4696 unsigned HOST_WIDE_INT bitsize;
4697 HOST_WIDE_INT bitpos;
4698 enum machine_mode mode;
4699 tree exp, type;
4700 int cleared;
4701 int alias_set;
4702 {
4703 if (TREE_CODE (exp) == CONSTRUCTOR
4704 && bitpos % BITS_PER_UNIT == 0
4705 /* If we have a nonzero bitpos for a register target, then we just
4706 let store_field do the bitfield handling. This is unlikely to
4707 generate unnecessary clear instructions anyways. */
4708 && (bitpos == 0 || GET_CODE (target) == MEM))
4709 {
4710 if (GET_CODE (target) == MEM)
4711 target
4712 = adjust_address (target,
4713 GET_MODE (target) == BLKmode
4714 || 0 != (bitpos
4715 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4716 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4717
4718
4719 /* Update the alias set, if required. */
4720 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4721 && MEM_ALIAS_SET (target) != 0)
4722 {
4723 target = copy_rtx (target);
4724 set_mem_alias_set (target, alias_set);
4725 }
4726
4727 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4728 }
4729 else
4730 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4731 alias_set);
4732 }
4733
4734 /* Store the value of constructor EXP into the rtx TARGET.
4735 TARGET is either a REG or a MEM; we know it cannot conflict, since
4736 safe_from_p has been called.
4737 CLEARED is true if TARGET is known to have been zero'd.
4738 SIZE is the number of bytes of TARGET we are allowed to modify: this
4739 may not be the same as the size of EXP if we are assigning to a field
4740 which has been packed to exclude padding bits. */
4741
4742 static void
4743 store_constructor (exp, target, cleared, size)
4744 tree exp;
4745 rtx target;
4746 int cleared;
4747 HOST_WIDE_INT size;
4748 {
4749 tree type = TREE_TYPE (exp);
4750 #ifdef WORD_REGISTER_OPERATIONS
4751 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4752 #endif
4753
4754 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4755 || TREE_CODE (type) == QUAL_UNION_TYPE)
4756 {
4757 tree elt;
4758
4759 /* We either clear the aggregate or indicate the value is dead. */
4760 if ((TREE_CODE (type) == UNION_TYPE
4761 || TREE_CODE (type) == QUAL_UNION_TYPE)
4762 && ! cleared
4763 && ! CONSTRUCTOR_ELTS (exp))
4764 /* If the constructor is empty, clear the union. */
4765 {
4766 clear_storage (target, expr_size (exp));
4767 cleared = 1;
4768 }
4769
4770 /* If we are building a static constructor into a register,
4771 set the initial value as zero so we can fold the value into
4772 a constant. But if more than one register is involved,
4773 this probably loses. */
4774 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4775 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4776 {
4777 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4778 cleared = 1;
4779 }
4780
4781 /* If the constructor has fewer fields than the structure
4782 or if we are initializing the structure to mostly zeros,
4783 clear the whole structure first. Don't do this if TARGET is a
4784 register whose mode size isn't equal to SIZE since clear_storage
4785 can't handle this case. */
4786 else if (! cleared && size > 0
4787 && ((list_length (CONSTRUCTOR_ELTS (exp))
4788 != fields_length (type))
4789 || mostly_zeros_p (exp))
4790 && (GET_CODE (target) != REG
4791 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4792 == size)))
4793 {
4794 clear_storage (target, GEN_INT (size));
4795 cleared = 1;
4796 }
4797
4798 if (! cleared)
4799 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4800
4801 /* Store each element of the constructor into
4802 the corresponding field of TARGET. */
4803
4804 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4805 {
4806 tree field = TREE_PURPOSE (elt);
4807 tree value = TREE_VALUE (elt);
4808 enum machine_mode mode;
4809 HOST_WIDE_INT bitsize;
4810 HOST_WIDE_INT bitpos = 0;
4811 int unsignedp;
4812 tree offset;
4813 rtx to_rtx = target;
4814
4815 /* Just ignore missing fields.
4816 We cleared the whole structure, above,
4817 if any fields are missing. */
4818 if (field == 0)
4819 continue;
4820
4821 if (cleared && is_zeros_p (value))
4822 continue;
4823
4824 if (host_integerp (DECL_SIZE (field), 1))
4825 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4826 else
4827 bitsize = -1;
4828
4829 unsignedp = TREE_UNSIGNED (field);
4830 mode = DECL_MODE (field);
4831 if (DECL_BIT_FIELD (field))
4832 mode = VOIDmode;
4833
4834 offset = DECL_FIELD_OFFSET (field);
4835 if (host_integerp (offset, 0)
4836 && host_integerp (bit_position (field), 0))
4837 {
4838 bitpos = int_bit_position (field);
4839 offset = 0;
4840 }
4841 else
4842 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4843
4844 if (offset)
4845 {
4846 rtx offset_rtx;
4847
4848 if (contains_placeholder_p (offset))
4849 offset = build (WITH_RECORD_EXPR, sizetype,
4850 offset, make_tree (TREE_TYPE (exp), target));
4851
4852 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4853 if (GET_CODE (to_rtx) != MEM)
4854 abort ();
4855
4856 #ifdef POINTERS_EXTEND_UNSIGNED
4857 if (GET_MODE (offset_rtx) != Pmode)
4858 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4859 #else
4860 if (GET_MODE (offset_rtx) != ptr_mode)
4861 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4862 #endif
4863
4864 to_rtx = offset_address (to_rtx, offset_rtx,
4865 highest_pow2_factor (offset));
4866 }
4867
4868 if (TREE_READONLY (field))
4869 {
4870 if (GET_CODE (to_rtx) == MEM)
4871 to_rtx = copy_rtx (to_rtx);
4872
4873 RTX_UNCHANGING_P (to_rtx) = 1;
4874 }
4875
4876 #ifdef WORD_REGISTER_OPERATIONS
4877 /* If this initializes a field that is smaller than a word, at the
4878 start of a word, try to widen it to a full word.
4879 This special case allows us to output C++ member function
4880 initializations in a form that the optimizers can understand. */
4881 if (GET_CODE (target) == REG
4882 && bitsize < BITS_PER_WORD
4883 && bitpos % BITS_PER_WORD == 0
4884 && GET_MODE_CLASS (mode) == MODE_INT
4885 && TREE_CODE (value) == INTEGER_CST
4886 && exp_size >= 0
4887 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4888 {
4889 tree type = TREE_TYPE (value);
4890
4891 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4892 {
4893 type = (*lang_hooks.types.type_for_size)
4894 (BITS_PER_WORD, TREE_UNSIGNED (type));
4895 value = convert (type, value);
4896 }
4897
4898 if (BYTES_BIG_ENDIAN)
4899 value
4900 = fold (build (LSHIFT_EXPR, type, value,
4901 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4902 bitsize = BITS_PER_WORD;
4903 mode = word_mode;
4904 }
4905 #endif
4906
4907 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4908 && DECL_NONADDRESSABLE_P (field))
4909 {
4910 to_rtx = copy_rtx (to_rtx);
4911 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4912 }
4913
4914 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4915 value, type, cleared,
4916 get_alias_set (TREE_TYPE (field)));
4917 }
4918 }
4919 else if (TREE_CODE (type) == ARRAY_TYPE
4920 || TREE_CODE (type) == VECTOR_TYPE)
4921 {
4922 tree elt;
4923 int i;
4924 int need_to_clear;
4925 tree domain = TYPE_DOMAIN (type);
4926 tree elttype = TREE_TYPE (type);
4927 int const_bounds_p;
4928 HOST_WIDE_INT minelt = 0;
4929 HOST_WIDE_INT maxelt = 0;
4930
4931 /* Vectors are like arrays, but the domain is stored via an array
4932 type indirectly. */
4933 if (TREE_CODE (type) == VECTOR_TYPE)
4934 {
4935 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4936 the same field as TYPE_DOMAIN, we are not guaranteed that
4937 it always will. */
4938 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4939 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4940 }
4941
4942 const_bounds_p = (TYPE_MIN_VALUE (domain)
4943 && TYPE_MAX_VALUE (domain)
4944 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4945 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4946
4947 /* If we have constant bounds for the range of the type, get them. */
4948 if (const_bounds_p)
4949 {
4950 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4951 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4952 }
4953
4954 /* If the constructor has fewer elements than the array,
4955 clear the whole array first. Similarly if this is
4956 static constructor of a non-BLKmode object. */
4957 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4958 need_to_clear = 1;
4959 else
4960 {
4961 HOST_WIDE_INT count = 0, zero_count = 0;
4962 need_to_clear = ! const_bounds_p;
4963
4964 /* This loop is a more accurate version of the loop in
4965 mostly_zeros_p (it handles RANGE_EXPR in an index).
4966 It is also needed to check for missing elements. */
4967 for (elt = CONSTRUCTOR_ELTS (exp);
4968 elt != NULL_TREE && ! need_to_clear;
4969 elt = TREE_CHAIN (elt))
4970 {
4971 tree index = TREE_PURPOSE (elt);
4972 HOST_WIDE_INT this_node_count;
4973
4974 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4975 {
4976 tree lo_index = TREE_OPERAND (index, 0);
4977 tree hi_index = TREE_OPERAND (index, 1);
4978
4979 if (! host_integerp (lo_index, 1)
4980 || ! host_integerp (hi_index, 1))
4981 {
4982 need_to_clear = 1;
4983 break;
4984 }
4985
4986 this_node_count = (tree_low_cst (hi_index, 1)
4987 - tree_low_cst (lo_index, 1) + 1);
4988 }
4989 else
4990 this_node_count = 1;
4991
4992 count += this_node_count;
4993 if (mostly_zeros_p (TREE_VALUE (elt)))
4994 zero_count += this_node_count;
4995 }
4996
4997 /* Clear the entire array first if there are any missing elements,
4998 or if the incidence of zero elements is >= 75%. */
4999 if (! need_to_clear
5000 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5001 need_to_clear = 1;
5002 }
5003
5004 if (need_to_clear && size > 0)
5005 {
5006 if (! cleared)
5007 {
5008 if (REG_P (target))
5009 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5010 else
5011 clear_storage (target, GEN_INT (size));
5012 }
5013 cleared = 1;
5014 }
5015 else if (REG_P (target))
5016 /* Inform later passes that the old value is dead. */
5017 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5018
5019 /* Store each element of the constructor into
5020 the corresponding element of TARGET, determined
5021 by counting the elements. */
5022 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5023 elt;
5024 elt = TREE_CHAIN (elt), i++)
5025 {
5026 enum machine_mode mode;
5027 HOST_WIDE_INT bitsize;
5028 HOST_WIDE_INT bitpos;
5029 int unsignedp;
5030 tree value = TREE_VALUE (elt);
5031 tree index = TREE_PURPOSE (elt);
5032 rtx xtarget = target;
5033
5034 if (cleared && is_zeros_p (value))
5035 continue;
5036
5037 unsignedp = TREE_UNSIGNED (elttype);
5038 mode = TYPE_MODE (elttype);
5039 if (mode == BLKmode)
5040 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5041 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5042 : -1);
5043 else
5044 bitsize = GET_MODE_BITSIZE (mode);
5045
5046 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5047 {
5048 tree lo_index = TREE_OPERAND (index, 0);
5049 tree hi_index = TREE_OPERAND (index, 1);
5050 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
5051 struct nesting *loop;
5052 HOST_WIDE_INT lo, hi, count;
5053 tree position;
5054
5055 /* If the range is constant and "small", unroll the loop. */
5056 if (const_bounds_p
5057 && host_integerp (lo_index, 0)
5058 && host_integerp (hi_index, 0)
5059 && (lo = tree_low_cst (lo_index, 0),
5060 hi = tree_low_cst (hi_index, 0),
5061 count = hi - lo + 1,
5062 (GET_CODE (target) != MEM
5063 || count <= 2
5064 || (host_integerp (TYPE_SIZE (elttype), 1)
5065 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5066 <= 40 * 8)))))
5067 {
5068 lo -= minelt; hi -= minelt;
5069 for (; lo <= hi; lo++)
5070 {
5071 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5072
5073 if (GET_CODE (target) == MEM
5074 && !MEM_KEEP_ALIAS_SET_P (target)
5075 && TREE_CODE (type) == ARRAY_TYPE
5076 && TYPE_NONALIASED_COMPONENT (type))
5077 {
5078 target = copy_rtx (target);
5079 MEM_KEEP_ALIAS_SET_P (target) = 1;
5080 }
5081
5082 store_constructor_field
5083 (target, bitsize, bitpos, mode, value, type, cleared,
5084 get_alias_set (elttype));
5085 }
5086 }
5087 else
5088 {
5089 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5090 loop_top = gen_label_rtx ();
5091 loop_end = gen_label_rtx ();
5092
5093 unsignedp = TREE_UNSIGNED (domain);
5094
5095 index = build_decl (VAR_DECL, NULL_TREE, domain);
5096
5097 index_r
5098 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5099 &unsignedp, 0));
5100 SET_DECL_RTL (index, index_r);
5101 if (TREE_CODE (value) == SAVE_EXPR
5102 && SAVE_EXPR_RTL (value) == 0)
5103 {
5104 /* Make sure value gets expanded once before the
5105 loop. */
5106 expand_expr (value, const0_rtx, VOIDmode, 0);
5107 emit_queue ();
5108 }
5109 store_expr (lo_index, index_r, 0);
5110 loop = expand_start_loop (0);
5111
5112 /* Assign value to element index. */
5113 position
5114 = convert (ssizetype,
5115 fold (build (MINUS_EXPR, TREE_TYPE (index),
5116 index, TYPE_MIN_VALUE (domain))));
5117 position = size_binop (MULT_EXPR, position,
5118 convert (ssizetype,
5119 TYPE_SIZE_UNIT (elttype)));
5120
5121 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5122 xtarget = offset_address (target, pos_rtx,
5123 highest_pow2_factor (position));
5124 xtarget = adjust_address (xtarget, mode, 0);
5125 if (TREE_CODE (value) == CONSTRUCTOR)
5126 store_constructor (value, xtarget, cleared,
5127 bitsize / BITS_PER_UNIT);
5128 else
5129 store_expr (value, xtarget, 0);
5130
5131 expand_exit_loop_if_false (loop,
5132 build (LT_EXPR, integer_type_node,
5133 index, hi_index));
5134
5135 expand_increment (build (PREINCREMENT_EXPR,
5136 TREE_TYPE (index),
5137 index, integer_one_node), 0, 0);
5138 expand_end_loop ();
5139 emit_label (loop_end);
5140 }
5141 }
5142 else if ((index != 0 && ! host_integerp (index, 0))
5143 || ! host_integerp (TYPE_SIZE (elttype), 1))
5144 {
5145 tree position;
5146
5147 if (index == 0)
5148 index = ssize_int (1);
5149
5150 if (minelt)
5151 index = convert (ssizetype,
5152 fold (build (MINUS_EXPR, index,
5153 TYPE_MIN_VALUE (domain))));
5154
5155 position = size_binop (MULT_EXPR, index,
5156 convert (ssizetype,
5157 TYPE_SIZE_UNIT (elttype)));
5158 xtarget = offset_address (target,
5159 expand_expr (position, 0, VOIDmode, 0),
5160 highest_pow2_factor (position));
5161 xtarget = adjust_address (xtarget, mode, 0);
5162 store_expr (value, xtarget, 0);
5163 }
5164 else
5165 {
5166 if (index != 0)
5167 bitpos = ((tree_low_cst (index, 0) - minelt)
5168 * tree_low_cst (TYPE_SIZE (elttype), 1));
5169 else
5170 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5171
5172 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5173 && TREE_CODE (type) == ARRAY_TYPE
5174 && TYPE_NONALIASED_COMPONENT (type))
5175 {
5176 target = copy_rtx (target);
5177 MEM_KEEP_ALIAS_SET_P (target) = 1;
5178 }
5179
5180 store_constructor_field (target, bitsize, bitpos, mode, value,
5181 type, cleared, get_alias_set (elttype));
5182
5183 }
5184 }
5185 }
5186
5187 /* Set constructor assignments. */
5188 else if (TREE_CODE (type) == SET_TYPE)
5189 {
5190 tree elt = CONSTRUCTOR_ELTS (exp);
5191 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5192 tree domain = TYPE_DOMAIN (type);
5193 tree domain_min, domain_max, bitlength;
5194
5195 /* The default implementation strategy is to extract the constant
5196 parts of the constructor, use that to initialize the target,
5197 and then "or" in whatever non-constant ranges we need in addition.
5198
5199 If a large set is all zero or all ones, it is
5200 probably better to set it using memset (if available) or bzero.
5201 Also, if a large set has just a single range, it may also be
5202 better to first clear all the first clear the set (using
5203 bzero/memset), and set the bits we want. */
5204
5205 /* Check for all zeros. */
5206 if (elt == NULL_TREE && size > 0)
5207 {
5208 if (!cleared)
5209 clear_storage (target, GEN_INT (size));
5210 return;
5211 }
5212
5213 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5214 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5215 bitlength = size_binop (PLUS_EXPR,
5216 size_diffop (domain_max, domain_min),
5217 ssize_int (1));
5218
5219 nbits = tree_low_cst (bitlength, 1);
5220
5221 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5222 are "complicated" (more than one range), initialize (the
5223 constant parts) by copying from a constant. */
5224 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5225 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5226 {
5227 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5228 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5229 char *bit_buffer = (char *) alloca (nbits);
5230 HOST_WIDE_INT word = 0;
5231 unsigned int bit_pos = 0;
5232 unsigned int ibit = 0;
5233 unsigned int offset = 0; /* In bytes from beginning of set. */
5234
5235 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5236 for (;;)
5237 {
5238 if (bit_buffer[ibit])
5239 {
5240 if (BYTES_BIG_ENDIAN)
5241 word |= (1 << (set_word_size - 1 - bit_pos));
5242 else
5243 word |= 1 << bit_pos;
5244 }
5245
5246 bit_pos++; ibit++;
5247 if (bit_pos >= set_word_size || ibit == nbits)
5248 {
5249 if (word != 0 || ! cleared)
5250 {
5251 rtx datum = GEN_INT (word);
5252 rtx to_rtx;
5253
5254 /* The assumption here is that it is safe to use
5255 XEXP if the set is multi-word, but not if
5256 it's single-word. */
5257 if (GET_CODE (target) == MEM)
5258 to_rtx = adjust_address (target, mode, offset);
5259 else if (offset == 0)
5260 to_rtx = target;
5261 else
5262 abort ();
5263 emit_move_insn (to_rtx, datum);
5264 }
5265
5266 if (ibit == nbits)
5267 break;
5268 word = 0;
5269 bit_pos = 0;
5270 offset += set_word_size / BITS_PER_UNIT;
5271 }
5272 }
5273 }
5274 else if (!cleared)
5275 /* Don't bother clearing storage if the set is all ones. */
5276 if (TREE_CHAIN (elt) != NULL_TREE
5277 || (TREE_PURPOSE (elt) == NULL_TREE
5278 ? nbits != 1
5279 : ( ! host_integerp (TREE_VALUE (elt), 0)
5280 || ! host_integerp (TREE_PURPOSE (elt), 0)
5281 || (tree_low_cst (TREE_VALUE (elt), 0)
5282 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5283 != (HOST_WIDE_INT) nbits))))
5284 clear_storage (target, expr_size (exp));
5285
5286 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5287 {
5288 /* Start of range of element or NULL. */
5289 tree startbit = TREE_PURPOSE (elt);
5290 /* End of range of element, or element value. */
5291 tree endbit = TREE_VALUE (elt);
5292 HOST_WIDE_INT startb, endb;
5293 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5294
5295 bitlength_rtx = expand_expr (bitlength,
5296 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5297
5298 /* Handle non-range tuple element like [ expr ]. */
5299 if (startbit == NULL_TREE)
5300 {
5301 startbit = save_expr (endbit);
5302 endbit = startbit;
5303 }
5304
5305 startbit = convert (sizetype, startbit);
5306 endbit = convert (sizetype, endbit);
5307 if (! integer_zerop (domain_min))
5308 {
5309 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5310 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5311 }
5312 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5313 EXPAND_CONST_ADDRESS);
5314 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5315 EXPAND_CONST_ADDRESS);
5316
5317 if (REG_P (target))
5318 {
5319 targetx
5320 = assign_temp
5321 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5322 (GET_MODE (target), 0),
5323 TYPE_QUAL_CONST)),
5324 0, 1, 1);
5325 emit_move_insn (targetx, target);
5326 }
5327
5328 else if (GET_CODE (target) == MEM)
5329 targetx = target;
5330 else
5331 abort ();
5332
5333 /* Optimization: If startbit and endbit are constants divisible
5334 by BITS_PER_UNIT, call memset instead. */
5335 if (TARGET_MEM_FUNCTIONS
5336 && TREE_CODE (startbit) == INTEGER_CST
5337 && TREE_CODE (endbit) == INTEGER_CST
5338 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5339 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5340 {
5341 emit_library_call (memset_libfunc, LCT_NORMAL,
5342 VOIDmode, 3,
5343 plus_constant (XEXP (targetx, 0),
5344 startb / BITS_PER_UNIT),
5345 Pmode,
5346 constm1_rtx, TYPE_MODE (integer_type_node),
5347 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5348 TYPE_MODE (sizetype));
5349 }
5350 else
5351 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5352 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5353 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5354 startbit_rtx, TYPE_MODE (sizetype),
5355 endbit_rtx, TYPE_MODE (sizetype));
5356
5357 if (REG_P (target))
5358 emit_move_insn (target, targetx);
5359 }
5360 }
5361
5362 else
5363 abort ();
5364 }
5365
5366 /* Store the value of EXP (an expression tree)
5367 into a subfield of TARGET which has mode MODE and occupies
5368 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5369 If MODE is VOIDmode, it means that we are storing into a bit-field.
5370
5371 If VALUE_MODE is VOIDmode, return nothing in particular.
5372 UNSIGNEDP is not used in this case.
5373
5374 Otherwise, return an rtx for the value stored. This rtx
5375 has mode VALUE_MODE if that is convenient to do.
5376 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5377
5378 TYPE is the type of the underlying object,
5379
5380 ALIAS_SET is the alias set for the destination. This value will
5381 (in general) be different from that for TARGET, since TARGET is a
5382 reference to the containing structure. */
5383
5384 static rtx
5385 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5386 alias_set)
5387 rtx target;
5388 HOST_WIDE_INT bitsize;
5389 HOST_WIDE_INT bitpos;
5390 enum machine_mode mode;
5391 tree exp;
5392 enum machine_mode value_mode;
5393 int unsignedp;
5394 tree type;
5395 int alias_set;
5396 {
5397 HOST_WIDE_INT width_mask = 0;
5398
5399 if (TREE_CODE (exp) == ERROR_MARK)
5400 return const0_rtx;
5401
5402 /* If we have nothing to store, do nothing unless the expression has
5403 side-effects. */
5404 if (bitsize == 0)
5405 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5406 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5407 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5408
5409 /* If we are storing into an unaligned field of an aligned union that is
5410 in a register, we may have the mode of TARGET being an integer mode but
5411 MODE == BLKmode. In that case, get an aligned object whose size and
5412 alignment are the same as TARGET and store TARGET into it (we can avoid
5413 the store if the field being stored is the entire width of TARGET). Then
5414 call ourselves recursively to store the field into a BLKmode version of
5415 that object. Finally, load from the object into TARGET. This is not
5416 very efficient in general, but should only be slightly more expensive
5417 than the otherwise-required unaligned accesses. Perhaps this can be
5418 cleaned up later. */
5419
5420 if (mode == BLKmode
5421 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5422 {
5423 rtx object
5424 = assign_temp
5425 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5426 0, 1, 1);
5427 rtx blk_object = adjust_address (object, BLKmode, 0);
5428
5429 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5430 emit_move_insn (object, target);
5431
5432 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5433 alias_set);
5434
5435 emit_move_insn (target, object);
5436
5437 /* We want to return the BLKmode version of the data. */
5438 return blk_object;
5439 }
5440
5441 if (GET_CODE (target) == CONCAT)
5442 {
5443 /* We're storing into a struct containing a single __complex. */
5444
5445 if (bitpos != 0)
5446 abort ();
5447 return store_expr (exp, target, 0);
5448 }
5449
5450 /* If the structure is in a register or if the component
5451 is a bit field, we cannot use addressing to access it.
5452 Use bit-field techniques or SUBREG to store in it. */
5453
5454 if (mode == VOIDmode
5455 || (mode != BLKmode && ! direct_store[(int) mode]
5456 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5457 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5458 || GET_CODE (target) == REG
5459 || GET_CODE (target) == SUBREG
5460 /* If the field isn't aligned enough to store as an ordinary memref,
5461 store it as a bit field. */
5462 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5463 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5464 || bitpos % GET_MODE_ALIGNMENT (mode)))
5465 /* If the RHS and field are a constant size and the size of the
5466 RHS isn't the same size as the bitfield, we must use bitfield
5467 operations. */
5468 || (bitsize >= 0
5469 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5470 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5471 {
5472 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5473
5474 /* If BITSIZE is narrower than the size of the type of EXP
5475 we will be narrowing TEMP. Normally, what's wanted are the
5476 low-order bits. However, if EXP's type is a record and this is
5477 big-endian machine, we want the upper BITSIZE bits. */
5478 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5479 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5480 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5481 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5482 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5483 - bitsize),
5484 temp, 1);
5485
5486 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5487 MODE. */
5488 if (mode != VOIDmode && mode != BLKmode
5489 && mode != TYPE_MODE (TREE_TYPE (exp)))
5490 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5491
5492 /* If the modes of TARGET and TEMP are both BLKmode, both
5493 must be in memory and BITPOS must be aligned on a byte
5494 boundary. If so, we simply do a block copy. */
5495 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5496 {
5497 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5498 || bitpos % BITS_PER_UNIT != 0)
5499 abort ();
5500
5501 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5502 emit_block_move (target, temp,
5503 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5504 / BITS_PER_UNIT),
5505 BLOCK_OP_NORMAL);
5506
5507 return value_mode == VOIDmode ? const0_rtx : target;
5508 }
5509
5510 /* Store the value in the bitfield. */
5511 store_bit_field (target, bitsize, bitpos, mode, temp,
5512 int_size_in_bytes (type));
5513
5514 if (value_mode != VOIDmode)
5515 {
5516 /* The caller wants an rtx for the value.
5517 If possible, avoid refetching from the bitfield itself. */
5518 if (width_mask != 0
5519 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5520 {
5521 tree count;
5522 enum machine_mode tmode;
5523
5524 tmode = GET_MODE (temp);
5525 if (tmode == VOIDmode)
5526 tmode = value_mode;
5527
5528 if (unsignedp)
5529 return expand_and (tmode, temp,
5530 gen_int_mode (width_mask, tmode),
5531 NULL_RTX);
5532
5533 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5534 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5535 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5536 }
5537
5538 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5539 NULL_RTX, value_mode, VOIDmode,
5540 int_size_in_bytes (type));
5541 }
5542 return const0_rtx;
5543 }
5544 else
5545 {
5546 rtx addr = XEXP (target, 0);
5547 rtx to_rtx = target;
5548
5549 /* If a value is wanted, it must be the lhs;
5550 so make the address stable for multiple use. */
5551
5552 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5553 && ! CONSTANT_ADDRESS_P (addr)
5554 /* A frame-pointer reference is already stable. */
5555 && ! (GET_CODE (addr) == PLUS
5556 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5557 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5558 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5559 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5560
5561 /* Now build a reference to just the desired component. */
5562
5563 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5564
5565 if (to_rtx == target)
5566 to_rtx = copy_rtx (to_rtx);
5567
5568 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5569 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5570 set_mem_alias_set (to_rtx, alias_set);
5571
5572 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5573 }
5574 }
5575 \f
5576 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5577 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5578 codes and find the ultimate containing object, which we return.
5579
5580 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5581 bit position, and *PUNSIGNEDP to the signedness of the field.
5582 If the position of the field is variable, we store a tree
5583 giving the variable offset (in units) in *POFFSET.
5584 This offset is in addition to the bit position.
5585 If the position is not variable, we store 0 in *POFFSET.
5586
5587 If any of the extraction expressions is volatile,
5588 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5589
5590 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5591 is a mode that can be used to access the field. In that case, *PBITSIZE
5592 is redundant.
5593
5594 If the field describes a variable-sized object, *PMODE is set to
5595 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5596 this case, but the address of the object can be found. */
5597
5598 tree
5599 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5600 punsignedp, pvolatilep)
5601 tree exp;
5602 HOST_WIDE_INT *pbitsize;
5603 HOST_WIDE_INT *pbitpos;
5604 tree *poffset;
5605 enum machine_mode *pmode;
5606 int *punsignedp;
5607 int *pvolatilep;
5608 {
5609 tree size_tree = 0;
5610 enum machine_mode mode = VOIDmode;
5611 tree offset = size_zero_node;
5612 tree bit_offset = bitsize_zero_node;
5613 tree placeholder_ptr = 0;
5614 tree tem;
5615
5616 /* First get the mode, signedness, and size. We do this from just the
5617 outermost expression. */
5618 if (TREE_CODE (exp) == COMPONENT_REF)
5619 {
5620 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5621 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5622 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5623
5624 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5625 }
5626 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5627 {
5628 size_tree = TREE_OPERAND (exp, 1);
5629 *punsignedp = TREE_UNSIGNED (exp);
5630 }
5631 else
5632 {
5633 mode = TYPE_MODE (TREE_TYPE (exp));
5634 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5635
5636 if (mode == BLKmode)
5637 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5638 else
5639 *pbitsize = GET_MODE_BITSIZE (mode);
5640 }
5641
5642 if (size_tree != 0)
5643 {
5644 if (! host_integerp (size_tree, 1))
5645 mode = BLKmode, *pbitsize = -1;
5646 else
5647 *pbitsize = tree_low_cst (size_tree, 1);
5648 }
5649
5650 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5651 and find the ultimate containing object. */
5652 while (1)
5653 {
5654 if (TREE_CODE (exp) == BIT_FIELD_REF)
5655 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5656 else if (TREE_CODE (exp) == COMPONENT_REF)
5657 {
5658 tree field = TREE_OPERAND (exp, 1);
5659 tree this_offset = DECL_FIELD_OFFSET (field);
5660
5661 /* If this field hasn't been filled in yet, don't go
5662 past it. This should only happen when folding expressions
5663 made during type construction. */
5664 if (this_offset == 0)
5665 break;
5666 else if (! TREE_CONSTANT (this_offset)
5667 && contains_placeholder_p (this_offset))
5668 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5669
5670 offset = size_binop (PLUS_EXPR, offset, this_offset);
5671 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5672 DECL_FIELD_BIT_OFFSET (field));
5673
5674 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5675 }
5676
5677 else if (TREE_CODE (exp) == ARRAY_REF
5678 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5679 {
5680 tree index = TREE_OPERAND (exp, 1);
5681 tree array = TREE_OPERAND (exp, 0);
5682 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5683 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5684 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5685
5686 /* We assume all arrays have sizes that are a multiple of a byte.
5687 First subtract the lower bound, if any, in the type of the
5688 index, then convert to sizetype and multiply by the size of the
5689 array element. */
5690 if (low_bound != 0 && ! integer_zerop (low_bound))
5691 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5692 index, low_bound));
5693
5694 /* If the index has a self-referential type, pass it to a
5695 WITH_RECORD_EXPR; if the component size is, pass our
5696 component to one. */
5697 if (! TREE_CONSTANT (index)
5698 && contains_placeholder_p (index))
5699 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5700 if (! TREE_CONSTANT (unit_size)
5701 && contains_placeholder_p (unit_size))
5702 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5703
5704 offset = size_binop (PLUS_EXPR, offset,
5705 size_binop (MULT_EXPR,
5706 convert (sizetype, index),
5707 unit_size));
5708 }
5709
5710 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5711 {
5712 tree new = find_placeholder (exp, &placeholder_ptr);
5713
5714 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5715 We might have been called from tree optimization where we
5716 haven't set up an object yet. */
5717 if (new == 0)
5718 break;
5719 else
5720 exp = new;
5721
5722 continue;
5723 }
5724 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5725 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5726 && ! ((TREE_CODE (exp) == NOP_EXPR
5727 || TREE_CODE (exp) == CONVERT_EXPR)
5728 && (TYPE_MODE (TREE_TYPE (exp))
5729 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5730 break;
5731
5732 /* If any reference in the chain is volatile, the effect is volatile. */
5733 if (TREE_THIS_VOLATILE (exp))
5734 *pvolatilep = 1;
5735
5736 exp = TREE_OPERAND (exp, 0);
5737 }
5738
5739 /* If OFFSET is constant, see if we can return the whole thing as a
5740 constant bit position. Otherwise, split it up. */
5741 if (host_integerp (offset, 0)
5742 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5743 bitsize_unit_node))
5744 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5745 && host_integerp (tem, 0))
5746 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5747 else
5748 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5749
5750 *pmode = mode;
5751 return exp;
5752 }
5753
5754 /* Return 1 if T is an expression that get_inner_reference handles. */
5755
5756 int
5757 handled_component_p (t)
5758 tree t;
5759 {
5760 switch (TREE_CODE (t))
5761 {
5762 case BIT_FIELD_REF:
5763 case COMPONENT_REF:
5764 case ARRAY_REF:
5765 case ARRAY_RANGE_REF:
5766 case NON_LVALUE_EXPR:
5767 case VIEW_CONVERT_EXPR:
5768 return 1;
5769
5770 case NOP_EXPR:
5771 case CONVERT_EXPR:
5772 return (TYPE_MODE (TREE_TYPE (t))
5773 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5774
5775 default:
5776 return 0;
5777 }
5778 }
5779 \f
5780 /* Given an rtx VALUE that may contain additions and multiplications, return
5781 an equivalent value that just refers to a register, memory, or constant.
5782 This is done by generating instructions to perform the arithmetic and
5783 returning a pseudo-register containing the value.
5784
5785 The returned value may be a REG, SUBREG, MEM or constant. */
5786
5787 rtx
5788 force_operand (value, target)
5789 rtx value, target;
5790 {
5791 rtx op1, op2;
5792 /* Use subtarget as the target for operand 0 of a binary operation. */
5793 rtx subtarget = get_subtarget (target);
5794 enum rtx_code code = GET_CODE (value);
5795
5796 /* Check for a PIC address load. */
5797 if ((code == PLUS || code == MINUS)
5798 && XEXP (value, 0) == pic_offset_table_rtx
5799 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5800 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5801 || GET_CODE (XEXP (value, 1)) == CONST))
5802 {
5803 if (!subtarget)
5804 subtarget = gen_reg_rtx (GET_MODE (value));
5805 emit_move_insn (subtarget, value);
5806 return subtarget;
5807 }
5808
5809 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5810 {
5811 if (!target)
5812 target = gen_reg_rtx (GET_MODE (value));
5813 convert_move (target, force_operand (XEXP (value, 0), NULL),
5814 code == ZERO_EXTEND);
5815 return target;
5816 }
5817
5818 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5819 {
5820 op2 = XEXP (value, 1);
5821 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5822 subtarget = 0;
5823 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5824 {
5825 code = PLUS;
5826 op2 = negate_rtx (GET_MODE (value), op2);
5827 }
5828
5829 /* Check for an addition with OP2 a constant integer and our first
5830 operand a PLUS of a virtual register and something else. In that
5831 case, we want to emit the sum of the virtual register and the
5832 constant first and then add the other value. This allows virtual
5833 register instantiation to simply modify the constant rather than
5834 creating another one around this addition. */
5835 if (code == PLUS && GET_CODE (op2) == CONST_INT
5836 && GET_CODE (XEXP (value, 0)) == PLUS
5837 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5838 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5839 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5840 {
5841 rtx temp = expand_simple_binop (GET_MODE (value), code,
5842 XEXP (XEXP (value, 0), 0), op2,
5843 subtarget, 0, OPTAB_LIB_WIDEN);
5844 return expand_simple_binop (GET_MODE (value), code, temp,
5845 force_operand (XEXP (XEXP (value,
5846 0), 1), 0),
5847 target, 0, OPTAB_LIB_WIDEN);
5848 }
5849
5850 op1 = force_operand (XEXP (value, 0), subtarget);
5851 op2 = force_operand (op2, NULL_RTX);
5852 switch (code)
5853 {
5854 case MULT:
5855 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5856 case DIV:
5857 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5858 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5859 target, 1, OPTAB_LIB_WIDEN);
5860 else
5861 return expand_divmod (0,
5862 FLOAT_MODE_P (GET_MODE (value))
5863 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5864 GET_MODE (value), op1, op2, target, 0);
5865 break;
5866 case MOD:
5867 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5868 target, 0);
5869 break;
5870 case UDIV:
5871 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5872 target, 1);
5873 break;
5874 case UMOD:
5875 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5876 target, 1);
5877 break;
5878 case ASHIFTRT:
5879 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5880 target, 0, OPTAB_LIB_WIDEN);
5881 break;
5882 default:
5883 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5884 target, 1, OPTAB_LIB_WIDEN);
5885 }
5886 }
5887 if (GET_RTX_CLASS (code) == '1')
5888 {
5889 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5890 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5891 }
5892
5893 #ifdef INSN_SCHEDULING
5894 /* On machines that have insn scheduling, we want all memory reference to be
5895 explicit, so we need to deal with such paradoxical SUBREGs. */
5896 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5897 && (GET_MODE_SIZE (GET_MODE (value))
5898 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5899 value
5900 = simplify_gen_subreg (GET_MODE (value),
5901 force_reg (GET_MODE (SUBREG_REG (value)),
5902 force_operand (SUBREG_REG (value),
5903 NULL_RTX)),
5904 GET_MODE (SUBREG_REG (value)),
5905 SUBREG_BYTE (value));
5906 #endif
5907
5908 return value;
5909 }
5910 \f
5911 /* Subroutine of expand_expr: return nonzero iff there is no way that
5912 EXP can reference X, which is being modified. TOP_P is nonzero if this
5913 call is going to be used to determine whether we need a temporary
5914 for EXP, as opposed to a recursive call to this function.
5915
5916 It is always safe for this routine to return zero since it merely
5917 searches for optimization opportunities. */
5918
5919 int
5920 safe_from_p (x, exp, top_p)
5921 rtx x;
5922 tree exp;
5923 int top_p;
5924 {
5925 rtx exp_rtl = 0;
5926 int i, nops;
5927 static tree save_expr_list;
5928
5929 if (x == 0
5930 /* If EXP has varying size, we MUST use a target since we currently
5931 have no way of allocating temporaries of variable size
5932 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5933 So we assume here that something at a higher level has prevented a
5934 clash. This is somewhat bogus, but the best we can do. Only
5935 do this when X is BLKmode and when we are at the top level. */
5936 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5937 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5938 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5939 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5940 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5941 != INTEGER_CST)
5942 && GET_MODE (x) == BLKmode)
5943 /* If X is in the outgoing argument area, it is always safe. */
5944 || (GET_CODE (x) == MEM
5945 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5946 || (GET_CODE (XEXP (x, 0)) == PLUS
5947 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5948 return 1;
5949
5950 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5951 find the underlying pseudo. */
5952 if (GET_CODE (x) == SUBREG)
5953 {
5954 x = SUBREG_REG (x);
5955 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5956 return 0;
5957 }
5958
5959 /* A SAVE_EXPR might appear many times in the expression passed to the
5960 top-level safe_from_p call, and if it has a complex subexpression,
5961 examining it multiple times could result in a combinatorial explosion.
5962 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5963 with optimization took about 28 minutes to compile -- even though it was
5964 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5965 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5966 we have processed. Note that the only test of top_p was above. */
5967
5968 if (top_p)
5969 {
5970 int rtn;
5971 tree t;
5972
5973 save_expr_list = 0;
5974
5975 rtn = safe_from_p (x, exp, 0);
5976
5977 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5978 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5979
5980 return rtn;
5981 }
5982
5983 /* Now look at our tree code and possibly recurse. */
5984 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5985 {
5986 case 'd':
5987 exp_rtl = DECL_RTL_IF_SET (exp);
5988 break;
5989
5990 case 'c':
5991 return 1;
5992
5993 case 'x':
5994 if (TREE_CODE (exp) == TREE_LIST)
5995 return ((TREE_VALUE (exp) == 0
5996 || safe_from_p (x, TREE_VALUE (exp), 0))
5997 && (TREE_CHAIN (exp) == 0
5998 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5999 else if (TREE_CODE (exp) == ERROR_MARK)
6000 return 1; /* An already-visited SAVE_EXPR? */
6001 else
6002 return 0;
6003
6004 case '1':
6005 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6006
6007 case '2':
6008 case '<':
6009 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
6010 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
6011
6012 case 'e':
6013 case 'r':
6014 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6015 the expression. If it is set, we conflict iff we are that rtx or
6016 both are in memory. Otherwise, we check all operands of the
6017 expression recursively. */
6018
6019 switch (TREE_CODE (exp))
6020 {
6021 case ADDR_EXPR:
6022 /* If the operand is static or we are static, we can't conflict.
6023 Likewise if we don't conflict with the operand at all. */
6024 if (staticp (TREE_OPERAND (exp, 0))
6025 || TREE_STATIC (exp)
6026 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6027 return 1;
6028
6029 /* Otherwise, the only way this can conflict is if we are taking
6030 the address of a DECL a that address if part of X, which is
6031 very rare. */
6032 exp = TREE_OPERAND (exp, 0);
6033 if (DECL_P (exp))
6034 {
6035 if (!DECL_RTL_SET_P (exp)
6036 || GET_CODE (DECL_RTL (exp)) != MEM)
6037 return 0;
6038 else
6039 exp_rtl = XEXP (DECL_RTL (exp), 0);
6040 }
6041 break;
6042
6043 case INDIRECT_REF:
6044 if (GET_CODE (x) == MEM
6045 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6046 get_alias_set (exp)))
6047 return 0;
6048 break;
6049
6050 case CALL_EXPR:
6051 /* Assume that the call will clobber all hard registers and
6052 all of memory. */
6053 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6054 || GET_CODE (x) == MEM)
6055 return 0;
6056 break;
6057
6058 case RTL_EXPR:
6059 /* If a sequence exists, we would have to scan every instruction
6060 in the sequence to see if it was safe. This is probably not
6061 worthwhile. */
6062 if (RTL_EXPR_SEQUENCE (exp))
6063 return 0;
6064
6065 exp_rtl = RTL_EXPR_RTL (exp);
6066 break;
6067
6068 case WITH_CLEANUP_EXPR:
6069 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6070 break;
6071
6072 case CLEANUP_POINT_EXPR:
6073 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6074
6075 case SAVE_EXPR:
6076 exp_rtl = SAVE_EXPR_RTL (exp);
6077 if (exp_rtl)
6078 break;
6079
6080 /* If we've already scanned this, don't do it again. Otherwise,
6081 show we've scanned it and record for clearing the flag if we're
6082 going on. */
6083 if (TREE_PRIVATE (exp))
6084 return 1;
6085
6086 TREE_PRIVATE (exp) = 1;
6087 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6088 {
6089 TREE_PRIVATE (exp) = 0;
6090 return 0;
6091 }
6092
6093 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6094 return 1;
6095
6096 case BIND_EXPR:
6097 /* The only operand we look at is operand 1. The rest aren't
6098 part of the expression. */
6099 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6100
6101 case METHOD_CALL_EXPR:
6102 /* This takes an rtx argument, but shouldn't appear here. */
6103 abort ();
6104
6105 default:
6106 break;
6107 }
6108
6109 /* If we have an rtx, we do not need to scan our operands. */
6110 if (exp_rtl)
6111 break;
6112
6113 nops = first_rtl_op (TREE_CODE (exp));
6114 for (i = 0; i < nops; i++)
6115 if (TREE_OPERAND (exp, i) != 0
6116 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6117 return 0;
6118
6119 /* If this is a language-specific tree code, it may require
6120 special handling. */
6121 if ((unsigned int) TREE_CODE (exp)
6122 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6123 && !(*lang_hooks.safe_from_p) (x, exp))
6124 return 0;
6125 }
6126
6127 /* If we have an rtl, find any enclosed object. Then see if we conflict
6128 with it. */
6129 if (exp_rtl)
6130 {
6131 if (GET_CODE (exp_rtl) == SUBREG)
6132 {
6133 exp_rtl = SUBREG_REG (exp_rtl);
6134 if (GET_CODE (exp_rtl) == REG
6135 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6136 return 0;
6137 }
6138
6139 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6140 are memory and they conflict. */
6141 return ! (rtx_equal_p (x, exp_rtl)
6142 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6143 && true_dependence (exp_rtl, VOIDmode, x,
6144 rtx_addr_varies_p)));
6145 }
6146
6147 /* If we reach here, it is safe. */
6148 return 1;
6149 }
6150
6151 /* Subroutine of expand_expr: return rtx if EXP is a
6152 variable or parameter; else return 0. */
6153
6154 static rtx
6155 var_rtx (exp)
6156 tree exp;
6157 {
6158 STRIP_NOPS (exp);
6159 switch (TREE_CODE (exp))
6160 {
6161 case PARM_DECL:
6162 case VAR_DECL:
6163 return DECL_RTL (exp);
6164 default:
6165 return 0;
6166 }
6167 }
6168
6169 #ifdef MAX_INTEGER_COMPUTATION_MODE
6170
6171 void
6172 check_max_integer_computation_mode (exp)
6173 tree exp;
6174 {
6175 enum tree_code code;
6176 enum machine_mode mode;
6177
6178 /* Strip any NOPs that don't change the mode. */
6179 STRIP_NOPS (exp);
6180 code = TREE_CODE (exp);
6181
6182 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6183 if (code == NOP_EXPR
6184 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6185 return;
6186
6187 /* First check the type of the overall operation. We need only look at
6188 unary, binary and relational operations. */
6189 if (TREE_CODE_CLASS (code) == '1'
6190 || TREE_CODE_CLASS (code) == '2'
6191 || TREE_CODE_CLASS (code) == '<')
6192 {
6193 mode = TYPE_MODE (TREE_TYPE (exp));
6194 if (GET_MODE_CLASS (mode) == MODE_INT
6195 && mode > MAX_INTEGER_COMPUTATION_MODE)
6196 internal_error ("unsupported wide integer operation");
6197 }
6198
6199 /* Check operand of a unary op. */
6200 if (TREE_CODE_CLASS (code) == '1')
6201 {
6202 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6203 if (GET_MODE_CLASS (mode) == MODE_INT
6204 && mode > MAX_INTEGER_COMPUTATION_MODE)
6205 internal_error ("unsupported wide integer operation");
6206 }
6207
6208 /* Check operands of a binary/comparison op. */
6209 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6210 {
6211 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6212 if (GET_MODE_CLASS (mode) == MODE_INT
6213 && mode > MAX_INTEGER_COMPUTATION_MODE)
6214 internal_error ("unsupported wide integer operation");
6215
6216 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6217 if (GET_MODE_CLASS (mode) == MODE_INT
6218 && mode > MAX_INTEGER_COMPUTATION_MODE)
6219 internal_error ("unsupported wide integer operation");
6220 }
6221 }
6222 #endif
6223 \f
6224 /* Return the highest power of two that EXP is known to be a multiple of.
6225 This is used in updating alignment of MEMs in array references. */
6226
6227 static HOST_WIDE_INT
6228 highest_pow2_factor (exp)
6229 tree exp;
6230 {
6231 HOST_WIDE_INT c0, c1;
6232
6233 switch (TREE_CODE (exp))
6234 {
6235 case INTEGER_CST:
6236 /* We can find the lowest bit that's a one. If the low
6237 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6238 We need to handle this case since we can find it in a COND_EXPR,
6239 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6240 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6241 later ICE. */
6242 if (TREE_CONSTANT_OVERFLOW (exp))
6243 return BIGGEST_ALIGNMENT;
6244 else
6245 {
6246 /* Note: tree_low_cst is intentionally not used here,
6247 we don't care about the upper bits. */
6248 c0 = TREE_INT_CST_LOW (exp);
6249 c0 &= -c0;
6250 return c0 ? c0 : BIGGEST_ALIGNMENT;
6251 }
6252 break;
6253
6254 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6255 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6256 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6257 return MIN (c0, c1);
6258
6259 case MULT_EXPR:
6260 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6261 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6262 return c0 * c1;
6263
6264 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6265 case CEIL_DIV_EXPR:
6266 if (integer_pow2p (TREE_OPERAND (exp, 1))
6267 && host_integerp (TREE_OPERAND (exp, 1), 1))
6268 {
6269 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6270 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6271 return MAX (1, c0 / c1);
6272 }
6273 break;
6274
6275 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6276 case SAVE_EXPR: case WITH_RECORD_EXPR:
6277 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6278
6279 case COMPOUND_EXPR:
6280 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6281
6282 case COND_EXPR:
6283 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6284 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6285 return MIN (c0, c1);
6286
6287 default:
6288 break;
6289 }
6290
6291 return 1;
6292 }
6293
6294 /* Similar, except that it is known that the expression must be a multiple
6295 of the alignment of TYPE. */
6296
6297 static HOST_WIDE_INT
6298 highest_pow2_factor_for_type (type, exp)
6299 tree type;
6300 tree exp;
6301 {
6302 HOST_WIDE_INT type_align, factor;
6303
6304 factor = highest_pow2_factor (exp);
6305 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6306 return MAX (factor, type_align);
6307 }
6308 \f
6309 /* Return an object on the placeholder list that matches EXP, a
6310 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6311 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6312 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6313 is a location which initially points to a starting location in the
6314 placeholder list (zero means start of the list) and where a pointer into
6315 the placeholder list at which the object is found is placed. */
6316
6317 tree
6318 find_placeholder (exp, plist)
6319 tree exp;
6320 tree *plist;
6321 {
6322 tree type = TREE_TYPE (exp);
6323 tree placeholder_expr;
6324
6325 for (placeholder_expr
6326 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6327 placeholder_expr != 0;
6328 placeholder_expr = TREE_CHAIN (placeholder_expr))
6329 {
6330 tree need_type = TYPE_MAIN_VARIANT (type);
6331 tree elt;
6332
6333 /* Find the outermost reference that is of the type we want. If none,
6334 see if any object has a type that is a pointer to the type we
6335 want. */
6336 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6337 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6338 || TREE_CODE (elt) == COND_EXPR)
6339 ? TREE_OPERAND (elt, 1)
6340 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6341 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6342 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6343 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6344 ? TREE_OPERAND (elt, 0) : 0))
6345 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6346 {
6347 if (plist)
6348 *plist = placeholder_expr;
6349 return elt;
6350 }
6351
6352 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6353 elt
6354 = ((TREE_CODE (elt) == COMPOUND_EXPR
6355 || TREE_CODE (elt) == COND_EXPR)
6356 ? TREE_OPERAND (elt, 1)
6357 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6358 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6359 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6360 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6361 ? TREE_OPERAND (elt, 0) : 0))
6362 if (POINTER_TYPE_P (TREE_TYPE (elt))
6363 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6364 == need_type))
6365 {
6366 if (plist)
6367 *plist = placeholder_expr;
6368 return build1 (INDIRECT_REF, need_type, elt);
6369 }
6370 }
6371
6372 return 0;
6373 }
6374 \f
6375 /* expand_expr: generate code for computing expression EXP.
6376 An rtx for the computed value is returned. The value is never null.
6377 In the case of a void EXP, const0_rtx is returned.
6378
6379 The value may be stored in TARGET if TARGET is nonzero.
6380 TARGET is just a suggestion; callers must assume that
6381 the rtx returned may not be the same as TARGET.
6382
6383 If TARGET is CONST0_RTX, it means that the value will be ignored.
6384
6385 If TMODE is not VOIDmode, it suggests generating the
6386 result in mode TMODE. But this is done only when convenient.
6387 Otherwise, TMODE is ignored and the value generated in its natural mode.
6388 TMODE is just a suggestion; callers must assume that
6389 the rtx returned may not have mode TMODE.
6390
6391 Note that TARGET may have neither TMODE nor MODE. In that case, it
6392 probably will not be used.
6393
6394 If MODIFIER is EXPAND_SUM then when EXP is an addition
6395 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6396 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6397 products as above, or REG or MEM, or constant.
6398 Ordinarily in such cases we would output mul or add instructions
6399 and then return a pseudo reg containing the sum.
6400
6401 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6402 it also marks a label as absolutely required (it can't be dead).
6403 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6404 This is used for outputting expressions used in initializers.
6405
6406 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6407 with a constant address even if that address is not normally legitimate.
6408 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6409
6410 rtx
6411 expand_expr (exp, target, tmode, modifier)
6412 tree exp;
6413 rtx target;
6414 enum machine_mode tmode;
6415 enum expand_modifier modifier;
6416 {
6417 rtx op0, op1, temp;
6418 tree type = TREE_TYPE (exp);
6419 int unsignedp = TREE_UNSIGNED (type);
6420 enum machine_mode mode;
6421 enum tree_code code = TREE_CODE (exp);
6422 optab this_optab;
6423 rtx subtarget, original_target;
6424 int ignore;
6425 tree context;
6426
6427 /* Handle ERROR_MARK before anybody tries to access its type. */
6428 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6429 {
6430 op0 = CONST0_RTX (tmode);
6431 if (op0 != 0)
6432 return op0;
6433 return const0_rtx;
6434 }
6435
6436 mode = TYPE_MODE (type);
6437 /* Use subtarget as the target for operand 0 of a binary operation. */
6438 subtarget = get_subtarget (target);
6439 original_target = target;
6440 ignore = (target == const0_rtx
6441 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6442 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6443 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6444 && TREE_CODE (type) == VOID_TYPE));
6445
6446 /* If we are going to ignore this result, we need only do something
6447 if there is a side-effect somewhere in the expression. If there
6448 is, short-circuit the most common cases here. Note that we must
6449 not call expand_expr with anything but const0_rtx in case this
6450 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6451
6452 if (ignore)
6453 {
6454 if (! TREE_SIDE_EFFECTS (exp))
6455 return const0_rtx;
6456
6457 /* Ensure we reference a volatile object even if value is ignored, but
6458 don't do this if all we are doing is taking its address. */
6459 if (TREE_THIS_VOLATILE (exp)
6460 && TREE_CODE (exp) != FUNCTION_DECL
6461 && mode != VOIDmode && mode != BLKmode
6462 && modifier != EXPAND_CONST_ADDRESS)
6463 {
6464 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6465 if (GET_CODE (temp) == MEM)
6466 temp = copy_to_reg (temp);
6467 return const0_rtx;
6468 }
6469
6470 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6471 || code == INDIRECT_REF || code == BUFFER_REF)
6472 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6473 modifier);
6474
6475 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6476 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6477 {
6478 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6479 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6480 return const0_rtx;
6481 }
6482 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6483 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6484 /* If the second operand has no side effects, just evaluate
6485 the first. */
6486 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6487 modifier);
6488 else if (code == BIT_FIELD_REF)
6489 {
6490 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6491 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6492 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6493 return const0_rtx;
6494 }
6495
6496 target = 0;
6497 }
6498
6499 #ifdef MAX_INTEGER_COMPUTATION_MODE
6500 /* Only check stuff here if the mode we want is different from the mode
6501 of the expression; if it's the same, check_max_integer_computiation_mode
6502 will handle it. Do we really need to check this stuff at all? */
6503
6504 if (target
6505 && GET_MODE (target) != mode
6506 && TREE_CODE (exp) != INTEGER_CST
6507 && TREE_CODE (exp) != PARM_DECL
6508 && TREE_CODE (exp) != ARRAY_REF
6509 && TREE_CODE (exp) != ARRAY_RANGE_REF
6510 && TREE_CODE (exp) != COMPONENT_REF
6511 && TREE_CODE (exp) != BIT_FIELD_REF
6512 && TREE_CODE (exp) != INDIRECT_REF
6513 && TREE_CODE (exp) != CALL_EXPR
6514 && TREE_CODE (exp) != VAR_DECL
6515 && TREE_CODE (exp) != RTL_EXPR)
6516 {
6517 enum machine_mode mode = GET_MODE (target);
6518
6519 if (GET_MODE_CLASS (mode) == MODE_INT
6520 && mode > MAX_INTEGER_COMPUTATION_MODE)
6521 internal_error ("unsupported wide integer operation");
6522 }
6523
6524 if (tmode != mode
6525 && TREE_CODE (exp) != INTEGER_CST
6526 && TREE_CODE (exp) != PARM_DECL
6527 && TREE_CODE (exp) != ARRAY_REF
6528 && TREE_CODE (exp) != ARRAY_RANGE_REF
6529 && TREE_CODE (exp) != COMPONENT_REF
6530 && TREE_CODE (exp) != BIT_FIELD_REF
6531 && TREE_CODE (exp) != INDIRECT_REF
6532 && TREE_CODE (exp) != VAR_DECL
6533 && TREE_CODE (exp) != CALL_EXPR
6534 && TREE_CODE (exp) != RTL_EXPR
6535 && GET_MODE_CLASS (tmode) == MODE_INT
6536 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6537 internal_error ("unsupported wide integer operation");
6538
6539 check_max_integer_computation_mode (exp);
6540 #endif
6541
6542 /* If will do cse, generate all results into pseudo registers
6543 since 1) that allows cse to find more things
6544 and 2) otherwise cse could produce an insn the machine
6545 cannot support. And exception is a CONSTRUCTOR into a multi-word
6546 MEM: that's much more likely to be most efficient into the MEM. */
6547
6548 if (! cse_not_expected && mode != BLKmode && target
6549 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6550 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6551 target = subtarget;
6552
6553 switch (code)
6554 {
6555 case LABEL_DECL:
6556 {
6557 tree function = decl_function_context (exp);
6558 /* Handle using a label in a containing function. */
6559 if (function != current_function_decl
6560 && function != inline_function_decl && function != 0)
6561 {
6562 struct function *p = find_function_data (function);
6563 p->expr->x_forced_labels
6564 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6565 p->expr->x_forced_labels);
6566 }
6567 else
6568 {
6569 if (modifier == EXPAND_INITIALIZER)
6570 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6571 label_rtx (exp),
6572 forced_labels);
6573 }
6574
6575 temp = gen_rtx_MEM (FUNCTION_MODE,
6576 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6577 if (function != current_function_decl
6578 && function != inline_function_decl && function != 0)
6579 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6580 return temp;
6581 }
6582
6583 case PARM_DECL:
6584 if (!DECL_RTL_SET_P (exp))
6585 {
6586 error_with_decl (exp, "prior parameter's size depends on `%s'");
6587 return CONST0_RTX (mode);
6588 }
6589
6590 /* ... fall through ... */
6591
6592 case VAR_DECL:
6593 /* If a static var's type was incomplete when the decl was written,
6594 but the type is complete now, lay out the decl now. */
6595 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6596 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6597 {
6598 rtx value = DECL_RTL_IF_SET (exp);
6599
6600 layout_decl (exp, 0);
6601
6602 /* If the RTL was already set, update its mode and memory
6603 attributes. */
6604 if (value != 0)
6605 {
6606 PUT_MODE (value, DECL_MODE (exp));
6607 SET_DECL_RTL (exp, 0);
6608 set_mem_attributes (value, exp, 1);
6609 SET_DECL_RTL (exp, value);
6610 }
6611 }
6612
6613 /* ... fall through ... */
6614
6615 case FUNCTION_DECL:
6616 case RESULT_DECL:
6617 if (DECL_RTL (exp) == 0)
6618 abort ();
6619
6620 /* Ensure variable marked as used even if it doesn't go through
6621 a parser. If it hasn't be used yet, write out an external
6622 definition. */
6623 if (! TREE_USED (exp))
6624 {
6625 assemble_external (exp);
6626 TREE_USED (exp) = 1;
6627 }
6628
6629 /* Show we haven't gotten RTL for this yet. */
6630 temp = 0;
6631
6632 /* Handle variables inherited from containing functions. */
6633 context = decl_function_context (exp);
6634
6635 /* We treat inline_function_decl as an alias for the current function
6636 because that is the inline function whose vars, types, etc.
6637 are being merged into the current function.
6638 See expand_inline_function. */
6639
6640 if (context != 0 && context != current_function_decl
6641 && context != inline_function_decl
6642 /* If var is static, we don't need a static chain to access it. */
6643 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6644 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6645 {
6646 rtx addr;
6647
6648 /* Mark as non-local and addressable. */
6649 DECL_NONLOCAL (exp) = 1;
6650 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6651 abort ();
6652 (*lang_hooks.mark_addressable) (exp);
6653 if (GET_CODE (DECL_RTL (exp)) != MEM)
6654 abort ();
6655 addr = XEXP (DECL_RTL (exp), 0);
6656 if (GET_CODE (addr) == MEM)
6657 addr
6658 = replace_equiv_address (addr,
6659 fix_lexical_addr (XEXP (addr, 0), exp));
6660 else
6661 addr = fix_lexical_addr (addr, exp);
6662
6663 temp = replace_equiv_address (DECL_RTL (exp), addr);
6664 }
6665
6666 /* This is the case of an array whose size is to be determined
6667 from its initializer, while the initializer is still being parsed.
6668 See expand_decl. */
6669
6670 else if (GET_CODE (DECL_RTL (exp)) == MEM
6671 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6672 temp = validize_mem (DECL_RTL (exp));
6673
6674 /* If DECL_RTL is memory, we are in the normal case and either
6675 the address is not valid or it is not a register and -fforce-addr
6676 is specified, get the address into a register. */
6677
6678 else if (GET_CODE (DECL_RTL (exp)) == MEM
6679 && modifier != EXPAND_CONST_ADDRESS
6680 && modifier != EXPAND_SUM
6681 && modifier != EXPAND_INITIALIZER
6682 && (! memory_address_p (DECL_MODE (exp),
6683 XEXP (DECL_RTL (exp), 0))
6684 || (flag_force_addr
6685 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6686 temp = replace_equiv_address (DECL_RTL (exp),
6687 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6688
6689 /* If we got something, return it. But first, set the alignment
6690 if the address is a register. */
6691 if (temp != 0)
6692 {
6693 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6694 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6695
6696 return temp;
6697 }
6698
6699 /* If the mode of DECL_RTL does not match that of the decl, it
6700 must be a promoted value. We return a SUBREG of the wanted mode,
6701 but mark it so that we know that it was already extended. */
6702
6703 if (GET_CODE (DECL_RTL (exp)) == REG
6704 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6705 {
6706 /* Get the signedness used for this variable. Ensure we get the
6707 same mode we got when the variable was declared. */
6708 if (GET_MODE (DECL_RTL (exp))
6709 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6710 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6711 abort ();
6712
6713 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6714 SUBREG_PROMOTED_VAR_P (temp) = 1;
6715 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6716 return temp;
6717 }
6718
6719 return DECL_RTL (exp);
6720
6721 case INTEGER_CST:
6722 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6723 TREE_INT_CST_HIGH (exp), mode);
6724
6725 /* ??? If overflow is set, fold will have done an incomplete job,
6726 which can result in (plus xx (const_int 0)), which can get
6727 simplified by validate_replace_rtx during virtual register
6728 instantiation, which can result in unrecognizable insns.
6729 Avoid this by forcing all overflows into registers. */
6730 if (TREE_CONSTANT_OVERFLOW (exp)
6731 && modifier != EXPAND_INITIALIZER)
6732 temp = force_reg (mode, temp);
6733
6734 return temp;
6735
6736 case CONST_DECL:
6737 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6738
6739 case REAL_CST:
6740 /* If optimized, generate immediate CONST_DOUBLE
6741 which will be turned into memory by reload if necessary.
6742
6743 We used to force a register so that loop.c could see it. But
6744 this does not allow gen_* patterns to perform optimizations with
6745 the constants. It also produces two insns in cases like "x = 1.0;".
6746 On most machines, floating-point constants are not permitted in
6747 many insns, so we'd end up copying it to a register in any case.
6748
6749 Now, we do the copying in expand_binop, if appropriate. */
6750 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6751 TYPE_MODE (TREE_TYPE (exp)));
6752
6753 case COMPLEX_CST:
6754 case STRING_CST:
6755 if (! TREE_CST_RTL (exp))
6756 output_constant_def (exp, 1);
6757
6758 /* TREE_CST_RTL probably contains a constant address.
6759 On RISC machines where a constant address isn't valid,
6760 make some insns to get that address into a register. */
6761 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6762 && modifier != EXPAND_CONST_ADDRESS
6763 && modifier != EXPAND_INITIALIZER
6764 && modifier != EXPAND_SUM
6765 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6766 || (flag_force_addr
6767 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6768 return replace_equiv_address (TREE_CST_RTL (exp),
6769 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6770 return TREE_CST_RTL (exp);
6771
6772 case EXPR_WITH_FILE_LOCATION:
6773 {
6774 rtx to_return;
6775 const char *saved_input_filename = input_filename;
6776 int saved_lineno = lineno;
6777 input_filename = EXPR_WFL_FILENAME (exp);
6778 lineno = EXPR_WFL_LINENO (exp);
6779 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6780 emit_line_note (input_filename, lineno);
6781 /* Possibly avoid switching back and forth here. */
6782 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6783 input_filename = saved_input_filename;
6784 lineno = saved_lineno;
6785 return to_return;
6786 }
6787
6788 case SAVE_EXPR:
6789 context = decl_function_context (exp);
6790
6791 /* If this SAVE_EXPR was at global context, assume we are an
6792 initialization function and move it into our context. */
6793 if (context == 0)
6794 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6795
6796 /* We treat inline_function_decl as an alias for the current function
6797 because that is the inline function whose vars, types, etc.
6798 are being merged into the current function.
6799 See expand_inline_function. */
6800 if (context == current_function_decl || context == inline_function_decl)
6801 context = 0;
6802
6803 /* If this is non-local, handle it. */
6804 if (context)
6805 {
6806 /* The following call just exists to abort if the context is
6807 not of a containing function. */
6808 find_function_data (context);
6809
6810 temp = SAVE_EXPR_RTL (exp);
6811 if (temp && GET_CODE (temp) == REG)
6812 {
6813 put_var_into_stack (exp);
6814 temp = SAVE_EXPR_RTL (exp);
6815 }
6816 if (temp == 0 || GET_CODE (temp) != MEM)
6817 abort ();
6818 return
6819 replace_equiv_address (temp,
6820 fix_lexical_addr (XEXP (temp, 0), exp));
6821 }
6822 if (SAVE_EXPR_RTL (exp) == 0)
6823 {
6824 if (mode == VOIDmode)
6825 temp = const0_rtx;
6826 else
6827 temp = assign_temp (build_qualified_type (type,
6828 (TYPE_QUALS (type)
6829 | TYPE_QUAL_CONST)),
6830 3, 0, 0);
6831
6832 SAVE_EXPR_RTL (exp) = temp;
6833 if (!optimize && GET_CODE (temp) == REG)
6834 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6835 save_expr_regs);
6836
6837 /* If the mode of TEMP does not match that of the expression, it
6838 must be a promoted value. We pass store_expr a SUBREG of the
6839 wanted mode but mark it so that we know that it was already
6840 extended. Note that `unsignedp' was modified above in
6841 this case. */
6842
6843 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6844 {
6845 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6846 SUBREG_PROMOTED_VAR_P (temp) = 1;
6847 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6848 }
6849
6850 if (temp == const0_rtx)
6851 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6852 else
6853 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6854
6855 TREE_USED (exp) = 1;
6856 }
6857
6858 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6859 must be a promoted value. We return a SUBREG of the wanted mode,
6860 but mark it so that we know that it was already extended. */
6861
6862 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6863 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6864 {
6865 /* Compute the signedness and make the proper SUBREG. */
6866 promote_mode (type, mode, &unsignedp, 0);
6867 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6868 SUBREG_PROMOTED_VAR_P (temp) = 1;
6869 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6870 return temp;
6871 }
6872
6873 return SAVE_EXPR_RTL (exp);
6874
6875 case UNSAVE_EXPR:
6876 {
6877 rtx temp;
6878 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6879 TREE_OPERAND (exp, 0)
6880 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6881 return temp;
6882 }
6883
6884 case PLACEHOLDER_EXPR:
6885 {
6886 tree old_list = placeholder_list;
6887 tree placeholder_expr = 0;
6888
6889 exp = find_placeholder (exp, &placeholder_expr);
6890 if (exp == 0)
6891 abort ();
6892
6893 placeholder_list = TREE_CHAIN (placeholder_expr);
6894 temp = expand_expr (exp, original_target, tmode, modifier);
6895 placeholder_list = old_list;
6896 return temp;
6897 }
6898
6899 case WITH_RECORD_EXPR:
6900 /* Put the object on the placeholder list, expand our first operand,
6901 and pop the list. */
6902 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6903 placeholder_list);
6904 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6905 modifier);
6906 placeholder_list = TREE_CHAIN (placeholder_list);
6907 return target;
6908
6909 case GOTO_EXPR:
6910 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6911 expand_goto (TREE_OPERAND (exp, 0));
6912 else
6913 expand_computed_goto (TREE_OPERAND (exp, 0));
6914 return const0_rtx;
6915
6916 case EXIT_EXPR:
6917 expand_exit_loop_if_false (NULL,
6918 invert_truthvalue (TREE_OPERAND (exp, 0)));
6919 return const0_rtx;
6920
6921 case LABELED_BLOCK_EXPR:
6922 if (LABELED_BLOCK_BODY (exp))
6923 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6924 /* Should perhaps use expand_label, but this is simpler and safer. */
6925 do_pending_stack_adjust ();
6926 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6927 return const0_rtx;
6928
6929 case EXIT_BLOCK_EXPR:
6930 if (EXIT_BLOCK_RETURN (exp))
6931 sorry ("returned value in block_exit_expr");
6932 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6933 return const0_rtx;
6934
6935 case LOOP_EXPR:
6936 push_temp_slots ();
6937 expand_start_loop (1);
6938 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6939 expand_end_loop ();
6940 pop_temp_slots ();
6941
6942 return const0_rtx;
6943
6944 case BIND_EXPR:
6945 {
6946 tree vars = TREE_OPERAND (exp, 0);
6947 int vars_need_expansion = 0;
6948
6949 /* Need to open a binding contour here because
6950 if there are any cleanups they must be contained here. */
6951 expand_start_bindings (2);
6952
6953 /* Mark the corresponding BLOCK for output in its proper place. */
6954 if (TREE_OPERAND (exp, 2) != 0
6955 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6956 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6957
6958 /* If VARS have not yet been expanded, expand them now. */
6959 while (vars)
6960 {
6961 if (!DECL_RTL_SET_P (vars))
6962 {
6963 vars_need_expansion = 1;
6964 expand_decl (vars);
6965 }
6966 expand_decl_init (vars);
6967 vars = TREE_CHAIN (vars);
6968 }
6969
6970 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6971
6972 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6973
6974 return temp;
6975 }
6976
6977 case RTL_EXPR:
6978 if (RTL_EXPR_SEQUENCE (exp))
6979 {
6980 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6981 abort ();
6982 emit_insn (RTL_EXPR_SEQUENCE (exp));
6983 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6984 }
6985 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6986 free_temps_for_rtl_expr (exp);
6987 return RTL_EXPR_RTL (exp);
6988
6989 case CONSTRUCTOR:
6990 /* If we don't need the result, just ensure we evaluate any
6991 subexpressions. */
6992 if (ignore)
6993 {
6994 tree elt;
6995
6996 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6997 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6998
6999 return const0_rtx;
7000 }
7001
7002 /* All elts simple constants => refer to a constant in memory. But
7003 if this is a non-BLKmode mode, let it store a field at a time
7004 since that should make a CONST_INT or CONST_DOUBLE when we
7005 fold. Likewise, if we have a target we can use, it is best to
7006 store directly into the target unless the type is large enough
7007 that memcpy will be used. If we are making an initializer and
7008 all operands are constant, put it in memory as well.
7009
7010 FIXME: Avoid trying to fill vector constructors piece-meal.
7011 Output them with output_constant_def below unless we're sure
7012 they're zeros. This should go away when vector initializers
7013 are treated like VECTOR_CST instead of arrays.
7014 */
7015 else if ((TREE_STATIC (exp)
7016 && ((mode == BLKmode
7017 && ! (target != 0 && safe_from_p (target, exp, 1)))
7018 || TREE_ADDRESSABLE (exp)
7019 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7020 && (! MOVE_BY_PIECES_P
7021 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7022 TYPE_ALIGN (type)))
7023 && ((TREE_CODE (type) == VECTOR_TYPE
7024 && !is_zeros_p (exp))
7025 || ! mostly_zeros_p (exp)))))
7026 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7027 {
7028 rtx constructor = output_constant_def (exp, 1);
7029
7030 if (modifier != EXPAND_CONST_ADDRESS
7031 && modifier != EXPAND_INITIALIZER
7032 && modifier != EXPAND_SUM)
7033 constructor = validize_mem (constructor);
7034
7035 return constructor;
7036 }
7037 else
7038 {
7039 /* Handle calls that pass values in multiple non-contiguous
7040 locations. The Irix 6 ABI has examples of this. */
7041 if (target == 0 || ! safe_from_p (target, exp, 1)
7042 || GET_CODE (target) == PARALLEL)
7043 target
7044 = assign_temp (build_qualified_type (type,
7045 (TYPE_QUALS (type)
7046 | (TREE_READONLY (exp)
7047 * TYPE_QUAL_CONST))),
7048 0, TREE_ADDRESSABLE (exp), 1);
7049
7050 store_constructor (exp, target, 0, int_expr_size (exp));
7051 return target;
7052 }
7053
7054 case INDIRECT_REF:
7055 {
7056 tree exp1 = TREE_OPERAND (exp, 0);
7057 tree index;
7058 tree string = string_constant (exp1, &index);
7059
7060 /* Try to optimize reads from const strings. */
7061 if (string
7062 && TREE_CODE (string) == STRING_CST
7063 && TREE_CODE (index) == INTEGER_CST
7064 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7065 && GET_MODE_CLASS (mode) == MODE_INT
7066 && GET_MODE_SIZE (mode) == 1
7067 && modifier != EXPAND_WRITE)
7068 return gen_int_mode (TREE_STRING_POINTER (string)
7069 [TREE_INT_CST_LOW (index)], mode);
7070
7071 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7072 op0 = memory_address (mode, op0);
7073 temp = gen_rtx_MEM (mode, op0);
7074 set_mem_attributes (temp, exp, 0);
7075
7076 /* If we are writing to this object and its type is a record with
7077 readonly fields, we must mark it as readonly so it will
7078 conflict with readonly references to those fields. */
7079 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7080 RTX_UNCHANGING_P (temp) = 1;
7081
7082 return temp;
7083 }
7084
7085 case ARRAY_REF:
7086 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7087 abort ();
7088
7089 {
7090 tree array = TREE_OPERAND (exp, 0);
7091 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7092 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7093 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7094 HOST_WIDE_INT i;
7095
7096 /* Optimize the special-case of a zero lower bound.
7097
7098 We convert the low_bound to sizetype to avoid some problems
7099 with constant folding. (E.g. suppose the lower bound is 1,
7100 and its mode is QI. Without the conversion, (ARRAY
7101 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7102 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7103
7104 if (! integer_zerop (low_bound))
7105 index = size_diffop (index, convert (sizetype, low_bound));
7106
7107 /* Fold an expression like: "foo"[2].
7108 This is not done in fold so it won't happen inside &.
7109 Don't fold if this is for wide characters since it's too
7110 difficult to do correctly and this is a very rare case. */
7111
7112 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7113 && TREE_CODE (array) == STRING_CST
7114 && TREE_CODE (index) == INTEGER_CST
7115 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7116 && GET_MODE_CLASS (mode) == MODE_INT
7117 && GET_MODE_SIZE (mode) == 1)
7118 return gen_int_mode (TREE_STRING_POINTER (array)
7119 [TREE_INT_CST_LOW (index)], mode);
7120
7121 /* If this is a constant index into a constant array,
7122 just get the value from the array. Handle both the cases when
7123 we have an explicit constructor and when our operand is a variable
7124 that was declared const. */
7125
7126 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7127 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7128 && TREE_CODE (index) == INTEGER_CST
7129 && 0 > compare_tree_int (index,
7130 list_length (CONSTRUCTOR_ELTS
7131 (TREE_OPERAND (exp, 0)))))
7132 {
7133 tree elem;
7134
7135 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7136 i = TREE_INT_CST_LOW (index);
7137 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7138 ;
7139
7140 if (elem)
7141 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7142 modifier);
7143 }
7144
7145 else if (optimize >= 1
7146 && modifier != EXPAND_CONST_ADDRESS
7147 && modifier != EXPAND_INITIALIZER
7148 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7149 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7150 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7151 {
7152 if (TREE_CODE (index) == INTEGER_CST)
7153 {
7154 tree init = DECL_INITIAL (array);
7155
7156 if (TREE_CODE (init) == CONSTRUCTOR)
7157 {
7158 tree elem;
7159
7160 for (elem = CONSTRUCTOR_ELTS (init);
7161 (elem
7162 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7163 elem = TREE_CHAIN (elem))
7164 ;
7165
7166 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7167 return expand_expr (fold (TREE_VALUE (elem)), target,
7168 tmode, modifier);
7169 }
7170 else if (TREE_CODE (init) == STRING_CST
7171 && 0 > compare_tree_int (index,
7172 TREE_STRING_LENGTH (init)))
7173 {
7174 tree type = TREE_TYPE (TREE_TYPE (init));
7175 enum machine_mode mode = TYPE_MODE (type);
7176
7177 if (GET_MODE_CLASS (mode) == MODE_INT
7178 && GET_MODE_SIZE (mode) == 1)
7179 return gen_int_mode (TREE_STRING_POINTER (init)
7180 [TREE_INT_CST_LOW (index)], mode);
7181 }
7182 }
7183 }
7184 }
7185 /* Fall through. */
7186
7187 case COMPONENT_REF:
7188 case BIT_FIELD_REF:
7189 case ARRAY_RANGE_REF:
7190 /* If the operand is a CONSTRUCTOR, we can just extract the
7191 appropriate field if it is present. Don't do this if we have
7192 already written the data since we want to refer to that copy
7193 and varasm.c assumes that's what we'll do. */
7194 if (code == COMPONENT_REF
7195 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7196 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7197 {
7198 tree elt;
7199
7200 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7201 elt = TREE_CHAIN (elt))
7202 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7203 /* We can normally use the value of the field in the
7204 CONSTRUCTOR. However, if this is a bitfield in
7205 an integral mode that we can fit in a HOST_WIDE_INT,
7206 we must mask only the number of bits in the bitfield,
7207 since this is done implicitly by the constructor. If
7208 the bitfield does not meet either of those conditions,
7209 we can't do this optimization. */
7210 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7211 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7212 == MODE_INT)
7213 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7214 <= HOST_BITS_PER_WIDE_INT))))
7215 {
7216 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7217 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7218 {
7219 HOST_WIDE_INT bitsize
7220 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7221 enum machine_mode imode
7222 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7223
7224 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7225 {
7226 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7227 op0 = expand_and (imode, op0, op1, target);
7228 }
7229 else
7230 {
7231 tree count
7232 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7233 0);
7234
7235 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7236 target, 0);
7237 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7238 target, 0);
7239 }
7240 }
7241
7242 return op0;
7243 }
7244 }
7245
7246 {
7247 enum machine_mode mode1;
7248 HOST_WIDE_INT bitsize, bitpos;
7249 tree offset;
7250 int volatilep = 0;
7251 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7252 &mode1, &unsignedp, &volatilep);
7253 rtx orig_op0;
7254
7255 /* If we got back the original object, something is wrong. Perhaps
7256 we are evaluating an expression too early. In any event, don't
7257 infinitely recurse. */
7258 if (tem == exp)
7259 abort ();
7260
7261 /* If TEM's type is a union of variable size, pass TARGET to the inner
7262 computation, since it will need a temporary and TARGET is known
7263 to have to do. This occurs in unchecked conversion in Ada. */
7264
7265 orig_op0 = op0
7266 = expand_expr (tem,
7267 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7268 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7269 != INTEGER_CST)
7270 ? target : NULL_RTX),
7271 VOIDmode,
7272 (modifier == EXPAND_INITIALIZER
7273 || modifier == EXPAND_CONST_ADDRESS)
7274 ? modifier : EXPAND_NORMAL);
7275
7276 /* If this is a constant, put it into a register if it is a
7277 legitimate constant and OFFSET is 0 and memory if it isn't. */
7278 if (CONSTANT_P (op0))
7279 {
7280 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7281 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7282 && offset == 0)
7283 op0 = force_reg (mode, op0);
7284 else
7285 op0 = validize_mem (force_const_mem (mode, op0));
7286 }
7287
7288 if (offset != 0)
7289 {
7290 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7291
7292 /* If this object is in a register, put it into memory.
7293 This case can't occur in C, but can in Ada if we have
7294 unchecked conversion of an expression from a scalar type to
7295 an array or record type. */
7296 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7297 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7298 {
7299 /* If the operand is a SAVE_EXPR, we can deal with this by
7300 forcing the SAVE_EXPR into memory. */
7301 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7302 {
7303 put_var_into_stack (TREE_OPERAND (exp, 0));
7304 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7305 }
7306 else
7307 {
7308 tree nt
7309 = build_qualified_type (TREE_TYPE (tem),
7310 (TYPE_QUALS (TREE_TYPE (tem))
7311 | TYPE_QUAL_CONST));
7312 rtx memloc = assign_temp (nt, 1, 1, 1);
7313
7314 emit_move_insn (memloc, op0);
7315 op0 = memloc;
7316 }
7317 }
7318
7319 if (GET_CODE (op0) != MEM)
7320 abort ();
7321
7322 #ifdef POINTERS_EXTEND_UNSIGNED
7323 if (GET_MODE (offset_rtx) != Pmode)
7324 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7325 #else
7326 if (GET_MODE (offset_rtx) != ptr_mode)
7327 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7328 #endif
7329
7330 /* A constant address in OP0 can have VOIDmode, we must not try
7331 to call force_reg for that case. Avoid that case. */
7332 if (GET_CODE (op0) == MEM
7333 && GET_MODE (op0) == BLKmode
7334 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7335 && bitsize != 0
7336 && (bitpos % bitsize) == 0
7337 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7338 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7339 {
7340 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7341 bitpos = 0;
7342 }
7343
7344 op0 = offset_address (op0, offset_rtx,
7345 highest_pow2_factor (offset));
7346 }
7347
7348 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7349 record its alignment as BIGGEST_ALIGNMENT. */
7350 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7351 && is_aligning_offset (offset, tem))
7352 set_mem_align (op0, BIGGEST_ALIGNMENT);
7353
7354 /* Don't forget about volatility even if this is a bitfield. */
7355 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7356 {
7357 if (op0 == orig_op0)
7358 op0 = copy_rtx (op0);
7359
7360 MEM_VOLATILE_P (op0) = 1;
7361 }
7362
7363 /* The following code doesn't handle CONCAT.
7364 Assume only bitpos == 0 can be used for CONCAT, due to
7365 one element arrays having the same mode as its element. */
7366 if (GET_CODE (op0) == CONCAT)
7367 {
7368 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7369 abort ();
7370 return op0;
7371 }
7372
7373 /* In cases where an aligned union has an unaligned object
7374 as a field, we might be extracting a BLKmode value from
7375 an integer-mode (e.g., SImode) object. Handle this case
7376 by doing the extract into an object as wide as the field
7377 (which we know to be the width of a basic mode), then
7378 storing into memory, and changing the mode to BLKmode. */
7379 if (mode1 == VOIDmode
7380 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7381 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7382 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7383 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7384 && modifier != EXPAND_CONST_ADDRESS
7385 && modifier != EXPAND_INITIALIZER)
7386 /* If the field isn't aligned enough to fetch as a memref,
7387 fetch it as a bit field. */
7388 || (mode1 != BLKmode
7389 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7390 && ((TYPE_ALIGN (TREE_TYPE (tem))
7391 < GET_MODE_ALIGNMENT (mode))
7392 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7393 /* If the type and the field are a constant size and the
7394 size of the type isn't the same size as the bitfield,
7395 we must use bitfield operations. */
7396 || (bitsize >= 0
7397 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7398 == INTEGER_CST)
7399 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7400 bitsize)))
7401 {
7402 enum machine_mode ext_mode = mode;
7403
7404 if (ext_mode == BLKmode
7405 && ! (target != 0 && GET_CODE (op0) == MEM
7406 && GET_CODE (target) == MEM
7407 && bitpos % BITS_PER_UNIT == 0))
7408 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7409
7410 if (ext_mode == BLKmode)
7411 {
7412 /* In this case, BITPOS must start at a byte boundary and
7413 TARGET, if specified, must be a MEM. */
7414 if (GET_CODE (op0) != MEM
7415 || (target != 0 && GET_CODE (target) != MEM)
7416 || bitpos % BITS_PER_UNIT != 0)
7417 abort ();
7418
7419 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7420 if (target == 0)
7421 target = assign_temp (type, 0, 1, 1);
7422
7423 emit_block_move (target, op0,
7424 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7425 / BITS_PER_UNIT),
7426 BLOCK_OP_NORMAL);
7427
7428 return target;
7429 }
7430
7431 op0 = validize_mem (op0);
7432
7433 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7434 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7435
7436 op0 = extract_bit_field (op0, bitsize, bitpos,
7437 unsignedp, target, ext_mode, ext_mode,
7438 int_size_in_bytes (TREE_TYPE (tem)));
7439
7440 /* If the result is a record type and BITSIZE is narrower than
7441 the mode of OP0, an integral mode, and this is a big endian
7442 machine, we must put the field into the high-order bits. */
7443 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7444 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7445 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7446 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7447 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7448 - bitsize),
7449 op0, 1);
7450
7451 if (mode == BLKmode)
7452 {
7453 rtx new = assign_temp (build_qualified_type
7454 ((*lang_hooks.types.type_for_mode)
7455 (ext_mode, 0),
7456 TYPE_QUAL_CONST), 0, 1, 1);
7457
7458 emit_move_insn (new, op0);
7459 op0 = copy_rtx (new);
7460 PUT_MODE (op0, BLKmode);
7461 set_mem_attributes (op0, exp, 1);
7462 }
7463
7464 return op0;
7465 }
7466
7467 /* If the result is BLKmode, use that to access the object
7468 now as well. */
7469 if (mode == BLKmode)
7470 mode1 = BLKmode;
7471
7472 /* Get a reference to just this component. */
7473 if (modifier == EXPAND_CONST_ADDRESS
7474 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7475 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7476 else
7477 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7478
7479 if (op0 == orig_op0)
7480 op0 = copy_rtx (op0);
7481
7482 set_mem_attributes (op0, exp, 0);
7483 if (GET_CODE (XEXP (op0, 0)) == REG)
7484 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7485
7486 MEM_VOLATILE_P (op0) |= volatilep;
7487 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7488 || modifier == EXPAND_CONST_ADDRESS
7489 || modifier == EXPAND_INITIALIZER)
7490 return op0;
7491 else if (target == 0)
7492 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7493
7494 convert_move (target, op0, unsignedp);
7495 return target;
7496 }
7497
7498 case VTABLE_REF:
7499 {
7500 rtx insn, before = get_last_insn (), vtbl_ref;
7501
7502 /* Evaluate the interior expression. */
7503 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7504 tmode, modifier);
7505
7506 /* Get or create an instruction off which to hang a note. */
7507 if (REG_P (subtarget))
7508 {
7509 target = subtarget;
7510 insn = get_last_insn ();
7511 if (insn == before)
7512 abort ();
7513 if (! INSN_P (insn))
7514 insn = prev_nonnote_insn (insn);
7515 }
7516 else
7517 {
7518 target = gen_reg_rtx (GET_MODE (subtarget));
7519 insn = emit_move_insn (target, subtarget);
7520 }
7521
7522 /* Collect the data for the note. */
7523 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7524 vtbl_ref = plus_constant (vtbl_ref,
7525 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7526 /* Discard the initial CONST that was added. */
7527 vtbl_ref = XEXP (vtbl_ref, 0);
7528
7529 REG_NOTES (insn)
7530 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7531
7532 return target;
7533 }
7534
7535 /* Intended for a reference to a buffer of a file-object in Pascal.
7536 But it's not certain that a special tree code will really be
7537 necessary for these. INDIRECT_REF might work for them. */
7538 case BUFFER_REF:
7539 abort ();
7540
7541 case IN_EXPR:
7542 {
7543 /* Pascal set IN expression.
7544
7545 Algorithm:
7546 rlo = set_low - (set_low%bits_per_word);
7547 the_word = set [ (index - rlo)/bits_per_word ];
7548 bit_index = index % bits_per_word;
7549 bitmask = 1 << bit_index;
7550 return !!(the_word & bitmask); */
7551
7552 tree set = TREE_OPERAND (exp, 0);
7553 tree index = TREE_OPERAND (exp, 1);
7554 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7555 tree set_type = TREE_TYPE (set);
7556 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7557 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7558 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7559 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7560 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7561 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7562 rtx setaddr = XEXP (setval, 0);
7563 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7564 rtx rlow;
7565 rtx diff, quo, rem, addr, bit, result;
7566
7567 /* If domain is empty, answer is no. Likewise if index is constant
7568 and out of bounds. */
7569 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7570 && TREE_CODE (set_low_bound) == INTEGER_CST
7571 && tree_int_cst_lt (set_high_bound, set_low_bound))
7572 || (TREE_CODE (index) == INTEGER_CST
7573 && TREE_CODE (set_low_bound) == INTEGER_CST
7574 && tree_int_cst_lt (index, set_low_bound))
7575 || (TREE_CODE (set_high_bound) == INTEGER_CST
7576 && TREE_CODE (index) == INTEGER_CST
7577 && tree_int_cst_lt (set_high_bound, index))))
7578 return const0_rtx;
7579
7580 if (target == 0)
7581 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7582
7583 /* If we get here, we have to generate the code for both cases
7584 (in range and out of range). */
7585
7586 op0 = gen_label_rtx ();
7587 op1 = gen_label_rtx ();
7588
7589 if (! (GET_CODE (index_val) == CONST_INT
7590 && GET_CODE (lo_r) == CONST_INT))
7591 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7592 GET_MODE (index_val), iunsignedp, op1);
7593
7594 if (! (GET_CODE (index_val) == CONST_INT
7595 && GET_CODE (hi_r) == CONST_INT))
7596 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7597 GET_MODE (index_val), iunsignedp, op1);
7598
7599 /* Calculate the element number of bit zero in the first word
7600 of the set. */
7601 if (GET_CODE (lo_r) == CONST_INT)
7602 rlow = GEN_INT (INTVAL (lo_r)
7603 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7604 else
7605 rlow = expand_binop (index_mode, and_optab, lo_r,
7606 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7607 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7608
7609 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7610 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7611
7612 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7613 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7614 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7615 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7616
7617 addr = memory_address (byte_mode,
7618 expand_binop (index_mode, add_optab, diff,
7619 setaddr, NULL_RTX, iunsignedp,
7620 OPTAB_LIB_WIDEN));
7621
7622 /* Extract the bit we want to examine. */
7623 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7624 gen_rtx_MEM (byte_mode, addr),
7625 make_tree (TREE_TYPE (index), rem),
7626 NULL_RTX, 1);
7627 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7628 GET_MODE (target) == byte_mode ? target : 0,
7629 1, OPTAB_LIB_WIDEN);
7630
7631 if (result != target)
7632 convert_move (target, result, 1);
7633
7634 /* Output the code to handle the out-of-range case. */
7635 emit_jump (op0);
7636 emit_label (op1);
7637 emit_move_insn (target, const0_rtx);
7638 emit_label (op0);
7639 return target;
7640 }
7641
7642 case WITH_CLEANUP_EXPR:
7643 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7644 {
7645 WITH_CLEANUP_EXPR_RTL (exp)
7646 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7647 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7648 CLEANUP_EH_ONLY (exp));
7649
7650 /* That's it for this cleanup. */
7651 TREE_OPERAND (exp, 1) = 0;
7652 }
7653 return WITH_CLEANUP_EXPR_RTL (exp);
7654
7655 case CLEANUP_POINT_EXPR:
7656 {
7657 /* Start a new binding layer that will keep track of all cleanup
7658 actions to be performed. */
7659 expand_start_bindings (2);
7660
7661 target_temp_slot_level = temp_slot_level;
7662
7663 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7664 /* If we're going to use this value, load it up now. */
7665 if (! ignore)
7666 op0 = force_not_mem (op0);
7667 preserve_temp_slots (op0);
7668 expand_end_bindings (NULL_TREE, 0, 0);
7669 }
7670 return op0;
7671
7672 case CALL_EXPR:
7673 /* Check for a built-in function. */
7674 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7675 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7676 == FUNCTION_DECL)
7677 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7678 {
7679 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7680 == BUILT_IN_FRONTEND)
7681 return (*lang_hooks.expand_expr)
7682 (exp, original_target, tmode, modifier);
7683 else
7684 return expand_builtin (exp, target, subtarget, tmode, ignore);
7685 }
7686
7687 return expand_call (exp, target, ignore);
7688
7689 case NON_LVALUE_EXPR:
7690 case NOP_EXPR:
7691 case CONVERT_EXPR:
7692 case REFERENCE_EXPR:
7693 if (TREE_OPERAND (exp, 0) == error_mark_node)
7694 return const0_rtx;
7695
7696 if (TREE_CODE (type) == UNION_TYPE)
7697 {
7698 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7699
7700 /* If both input and output are BLKmode, this conversion isn't doing
7701 anything except possibly changing memory attribute. */
7702 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7703 {
7704 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7705 modifier);
7706
7707 result = copy_rtx (result);
7708 set_mem_attributes (result, exp, 0);
7709 return result;
7710 }
7711
7712 if (target == 0)
7713 target = assign_temp (type, 0, 1, 1);
7714
7715 if (GET_CODE (target) == MEM)
7716 /* Store data into beginning of memory target. */
7717 store_expr (TREE_OPERAND (exp, 0),
7718 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7719
7720 else if (GET_CODE (target) == REG)
7721 /* Store this field into a union of the proper type. */
7722 store_field (target,
7723 MIN ((int_size_in_bytes (TREE_TYPE
7724 (TREE_OPERAND (exp, 0)))
7725 * BITS_PER_UNIT),
7726 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7727 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7728 VOIDmode, 0, type, 0);
7729 else
7730 abort ();
7731
7732 /* Return the entire union. */
7733 return target;
7734 }
7735
7736 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7737 {
7738 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7739 modifier);
7740
7741 /* If the signedness of the conversion differs and OP0 is
7742 a promoted SUBREG, clear that indication since we now
7743 have to do the proper extension. */
7744 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7745 && GET_CODE (op0) == SUBREG)
7746 SUBREG_PROMOTED_VAR_P (op0) = 0;
7747
7748 return op0;
7749 }
7750
7751 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7752 if (GET_MODE (op0) == mode)
7753 return op0;
7754
7755 /* If OP0 is a constant, just convert it into the proper mode. */
7756 if (CONSTANT_P (op0))
7757 {
7758 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7759 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7760
7761 if (modifier == EXPAND_INITIALIZER)
7762 return simplify_gen_subreg (mode, op0, inner_mode,
7763 subreg_lowpart_offset (mode,
7764 inner_mode));
7765 else
7766 return convert_modes (mode, inner_mode, op0,
7767 TREE_UNSIGNED (inner_type));
7768 }
7769
7770 if (modifier == EXPAND_INITIALIZER)
7771 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7772
7773 if (target == 0)
7774 return
7775 convert_to_mode (mode, op0,
7776 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7777 else
7778 convert_move (target, op0,
7779 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7780 return target;
7781
7782 case VIEW_CONVERT_EXPR:
7783 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7784
7785 /* If the input and output modes are both the same, we are done.
7786 Otherwise, if neither mode is BLKmode and both are within a word, we
7787 can use gen_lowpart. If neither is true, make sure the operand is
7788 in memory and convert the MEM to the new mode. */
7789 if (TYPE_MODE (type) == GET_MODE (op0))
7790 ;
7791 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7792 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7793 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7794 op0 = gen_lowpart (TYPE_MODE (type), op0);
7795 else if (GET_CODE (op0) != MEM)
7796 {
7797 /* If the operand is not a MEM, force it into memory. Since we
7798 are going to be be changing the mode of the MEM, don't call
7799 force_const_mem for constants because we don't allow pool
7800 constants to change mode. */
7801 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7802
7803 if (TREE_ADDRESSABLE (exp))
7804 abort ();
7805
7806 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7807 target
7808 = assign_stack_temp_for_type
7809 (TYPE_MODE (inner_type),
7810 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7811
7812 emit_move_insn (target, op0);
7813 op0 = target;
7814 }
7815
7816 /* At this point, OP0 is in the correct mode. If the output type is such
7817 that the operand is known to be aligned, indicate that it is.
7818 Otherwise, we need only be concerned about alignment for non-BLKmode
7819 results. */
7820 if (GET_CODE (op0) == MEM)
7821 {
7822 op0 = copy_rtx (op0);
7823
7824 if (TYPE_ALIGN_OK (type))
7825 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7826 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7827 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7828 {
7829 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7830 HOST_WIDE_INT temp_size
7831 = MAX (int_size_in_bytes (inner_type),
7832 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7833 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7834 temp_size, 0, type);
7835 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7836
7837 if (TREE_ADDRESSABLE (exp))
7838 abort ();
7839
7840 if (GET_MODE (op0) == BLKmode)
7841 emit_block_move (new_with_op0_mode, op0,
7842 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7843 BLOCK_OP_NORMAL);
7844 else
7845 emit_move_insn (new_with_op0_mode, op0);
7846
7847 op0 = new;
7848 }
7849
7850 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7851 }
7852
7853 return op0;
7854
7855 case PLUS_EXPR:
7856 this_optab = ! unsignedp && flag_trapv
7857 && (GET_MODE_CLASS (mode) == MODE_INT)
7858 ? addv_optab : add_optab;
7859
7860 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7861 something else, make sure we add the register to the constant and
7862 then to the other thing. This case can occur during strength
7863 reduction and doing it this way will produce better code if the
7864 frame pointer or argument pointer is eliminated.
7865
7866 fold-const.c will ensure that the constant is always in the inner
7867 PLUS_EXPR, so the only case we need to do anything about is if
7868 sp, ap, or fp is our second argument, in which case we must swap
7869 the innermost first argument and our second argument. */
7870
7871 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7872 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7873 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7874 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7875 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7876 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7877 {
7878 tree t = TREE_OPERAND (exp, 1);
7879
7880 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7881 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7882 }
7883
7884 /* If the result is to be ptr_mode and we are adding an integer to
7885 something, we might be forming a constant. So try to use
7886 plus_constant. If it produces a sum and we can't accept it,
7887 use force_operand. This allows P = &ARR[const] to generate
7888 efficient code on machines where a SYMBOL_REF is not a valid
7889 address.
7890
7891 If this is an EXPAND_SUM call, always return the sum. */
7892 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7893 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7894 {
7895 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7896 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7897 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7898 {
7899 rtx constant_part;
7900
7901 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7902 EXPAND_SUM);
7903 /* Use immed_double_const to ensure that the constant is
7904 truncated according to the mode of OP1, then sign extended
7905 to a HOST_WIDE_INT. Using the constant directly can result
7906 in non-canonical RTL in a 64x32 cross compile. */
7907 constant_part
7908 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7909 (HOST_WIDE_INT) 0,
7910 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7911 op1 = plus_constant (op1, INTVAL (constant_part));
7912 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7913 op1 = force_operand (op1, target);
7914 return op1;
7915 }
7916
7917 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7918 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7919 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7920 {
7921 rtx constant_part;
7922
7923 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7924 (modifier == EXPAND_INITIALIZER
7925 ? EXPAND_INITIALIZER : EXPAND_SUM));
7926 if (! CONSTANT_P (op0))
7927 {
7928 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7929 VOIDmode, modifier);
7930 /* Don't go to both_summands if modifier
7931 says it's not right to return a PLUS. */
7932 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7933 goto binop2;
7934 goto both_summands;
7935 }
7936 /* Use immed_double_const to ensure that the constant is
7937 truncated according to the mode of OP1, then sign extended
7938 to a HOST_WIDE_INT. Using the constant directly can result
7939 in non-canonical RTL in a 64x32 cross compile. */
7940 constant_part
7941 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7942 (HOST_WIDE_INT) 0,
7943 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7944 op0 = plus_constant (op0, INTVAL (constant_part));
7945 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7946 op0 = force_operand (op0, target);
7947 return op0;
7948 }
7949 }
7950
7951 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7952 subtarget = 0;
7953
7954 /* No sense saving up arithmetic to be done
7955 if it's all in the wrong mode to form part of an address.
7956 And force_operand won't know whether to sign-extend or
7957 zero-extend. */
7958 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7959 || mode != ptr_mode)
7960 {
7961 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7962 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7963 if (op0 == const0_rtx)
7964 return op1;
7965 if (op1 == const0_rtx)
7966 return op0;
7967 goto binop2;
7968 }
7969
7970 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7971 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7972
7973 /* We come here from MINUS_EXPR when the second operand is a
7974 constant. */
7975 both_summands:
7976 /* Make sure any term that's a sum with a constant comes last. */
7977 if (GET_CODE (op0) == PLUS
7978 && CONSTANT_P (XEXP (op0, 1)))
7979 {
7980 temp = op0;
7981 op0 = op1;
7982 op1 = temp;
7983 }
7984 /* If adding to a sum including a constant,
7985 associate it to put the constant outside. */
7986 if (GET_CODE (op1) == PLUS
7987 && CONSTANT_P (XEXP (op1, 1)))
7988 {
7989 rtx constant_term = const0_rtx;
7990
7991 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7992 if (temp != 0)
7993 op0 = temp;
7994 /* Ensure that MULT comes first if there is one. */
7995 else if (GET_CODE (op0) == MULT)
7996 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7997 else
7998 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7999
8000 /* Let's also eliminate constants from op0 if possible. */
8001 op0 = eliminate_constant_term (op0, &constant_term);
8002
8003 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8004 their sum should be a constant. Form it into OP1, since the
8005 result we want will then be OP0 + OP1. */
8006
8007 temp = simplify_binary_operation (PLUS, mode, constant_term,
8008 XEXP (op1, 1));
8009 if (temp != 0)
8010 op1 = temp;
8011 else
8012 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8013 }
8014
8015 /* Put a constant term last and put a multiplication first. */
8016 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8017 temp = op1, op1 = op0, op0 = temp;
8018
8019 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8020 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8021
8022 case MINUS_EXPR:
8023 /* For initializers, we are allowed to return a MINUS of two
8024 symbolic constants. Here we handle all cases when both operands
8025 are constant. */
8026 /* Handle difference of two symbolic constants,
8027 for the sake of an initializer. */
8028 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8029 && really_constant_p (TREE_OPERAND (exp, 0))
8030 && really_constant_p (TREE_OPERAND (exp, 1)))
8031 {
8032 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8033 modifier);
8034 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8035 modifier);
8036
8037 /* If the last operand is a CONST_INT, use plus_constant of
8038 the negated constant. Else make the MINUS. */
8039 if (GET_CODE (op1) == CONST_INT)
8040 return plus_constant (op0, - INTVAL (op1));
8041 else
8042 return gen_rtx_MINUS (mode, op0, op1);
8043 }
8044
8045 this_optab = ! unsignedp && flag_trapv
8046 && (GET_MODE_CLASS(mode) == MODE_INT)
8047 ? subv_optab : sub_optab;
8048
8049 /* No sense saving up arithmetic to be done
8050 if it's all in the wrong mode to form part of an address.
8051 And force_operand won't know whether to sign-extend or
8052 zero-extend. */
8053 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8054 || mode != ptr_mode)
8055 goto binop;
8056
8057 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8058 subtarget = 0;
8059
8060 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8061 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8062
8063 /* Convert A - const to A + (-const). */
8064 if (GET_CODE (op1) == CONST_INT)
8065 {
8066 op1 = negate_rtx (mode, op1);
8067 goto both_summands;
8068 }
8069
8070 goto binop2;
8071
8072 case MULT_EXPR:
8073 /* If first operand is constant, swap them.
8074 Thus the following special case checks need only
8075 check the second operand. */
8076 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8077 {
8078 tree t1 = TREE_OPERAND (exp, 0);
8079 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8080 TREE_OPERAND (exp, 1) = t1;
8081 }
8082
8083 /* Attempt to return something suitable for generating an
8084 indexed address, for machines that support that. */
8085
8086 if (modifier == EXPAND_SUM && mode == ptr_mode
8087 && host_integerp (TREE_OPERAND (exp, 1), 0))
8088 {
8089 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8090 EXPAND_SUM);
8091
8092 /* If we knew for certain that this is arithmetic for an array
8093 reference, and we knew the bounds of the array, then we could
8094 apply the distributive law across (PLUS X C) for constant C.
8095 Without such knowledge, we risk overflowing the computation
8096 when both X and C are large, but X+C isn't. */
8097 /* ??? Could perhaps special-case EXP being unsigned and C being
8098 positive. In that case we are certain that X+C is no smaller
8099 than X and so the transformed expression will overflow iff the
8100 original would have. */
8101
8102 if (GET_CODE (op0) != REG)
8103 op0 = force_operand (op0, NULL_RTX);
8104 if (GET_CODE (op0) != REG)
8105 op0 = copy_to_mode_reg (mode, op0);
8106
8107 return
8108 gen_rtx_MULT (mode, op0,
8109 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
8110 }
8111
8112 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8113 subtarget = 0;
8114
8115 /* Check for multiplying things that have been extended
8116 from a narrower type. If this machine supports multiplying
8117 in that narrower type with a result in the desired type,
8118 do it that way, and avoid the explicit type-conversion. */
8119 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8120 && TREE_CODE (type) == INTEGER_TYPE
8121 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8122 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8123 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8124 && int_fits_type_p (TREE_OPERAND (exp, 1),
8125 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8126 /* Don't use a widening multiply if a shift will do. */
8127 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8128 > HOST_BITS_PER_WIDE_INT)
8129 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8130 ||
8131 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8132 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8133 ==
8134 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8135 /* If both operands are extended, they must either both
8136 be zero-extended or both be sign-extended. */
8137 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8138 ==
8139 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8140 {
8141 enum machine_mode innermode
8142 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8143 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8144 ? smul_widen_optab : umul_widen_optab);
8145 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8146 ? umul_widen_optab : smul_widen_optab);
8147 if (mode == GET_MODE_WIDER_MODE (innermode))
8148 {
8149 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8150 {
8151 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8152 NULL_RTX, VOIDmode, 0);
8153 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8154 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8155 VOIDmode, 0);
8156 else
8157 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8158 NULL_RTX, VOIDmode, 0);
8159 goto binop2;
8160 }
8161 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8162 && innermode == word_mode)
8163 {
8164 rtx htem;
8165 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8166 NULL_RTX, VOIDmode, 0);
8167 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8168 op1 = convert_modes (innermode, mode,
8169 expand_expr (TREE_OPERAND (exp, 1),
8170 NULL_RTX, VOIDmode, 0),
8171 unsignedp);
8172 else
8173 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8174 NULL_RTX, VOIDmode, 0);
8175 temp = expand_binop (mode, other_optab, op0, op1, target,
8176 unsignedp, OPTAB_LIB_WIDEN);
8177 htem = expand_mult_highpart_adjust (innermode,
8178 gen_highpart (innermode, temp),
8179 op0, op1,
8180 gen_highpart (innermode, temp),
8181 unsignedp);
8182 emit_move_insn (gen_highpart (innermode, temp), htem);
8183 return temp;
8184 }
8185 }
8186 }
8187 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8188 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8189 return expand_mult (mode, op0, op1, target, unsignedp);
8190
8191 case TRUNC_DIV_EXPR:
8192 case FLOOR_DIV_EXPR:
8193 case CEIL_DIV_EXPR:
8194 case ROUND_DIV_EXPR:
8195 case EXACT_DIV_EXPR:
8196 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8197 subtarget = 0;
8198 /* Possible optimization: compute the dividend with EXPAND_SUM
8199 then if the divisor is constant can optimize the case
8200 where some terms of the dividend have coeffs divisible by it. */
8201 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8202 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8203 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8204
8205 case RDIV_EXPR:
8206 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8207 expensive divide. If not, combine will rebuild the original
8208 computation. */
8209 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8210 && TREE_CODE (type) == REAL_TYPE
8211 && !real_onep (TREE_OPERAND (exp, 0)))
8212 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8213 build (RDIV_EXPR, type,
8214 build_real (type, dconst1),
8215 TREE_OPERAND (exp, 1))),
8216 target, tmode, unsignedp);
8217 this_optab = sdiv_optab;
8218 goto binop;
8219
8220 case TRUNC_MOD_EXPR:
8221 case FLOOR_MOD_EXPR:
8222 case CEIL_MOD_EXPR:
8223 case ROUND_MOD_EXPR:
8224 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8225 subtarget = 0;
8226 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8227 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8228 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8229
8230 case FIX_ROUND_EXPR:
8231 case FIX_FLOOR_EXPR:
8232 case FIX_CEIL_EXPR:
8233 abort (); /* Not used for C. */
8234
8235 case FIX_TRUNC_EXPR:
8236 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8237 if (target == 0)
8238 target = gen_reg_rtx (mode);
8239 expand_fix (target, op0, unsignedp);
8240 return target;
8241
8242 case FLOAT_EXPR:
8243 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8244 if (target == 0)
8245 target = gen_reg_rtx (mode);
8246 /* expand_float can't figure out what to do if FROM has VOIDmode.
8247 So give it the correct mode. With -O, cse will optimize this. */
8248 if (GET_MODE (op0) == VOIDmode)
8249 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8250 op0);
8251 expand_float (target, op0,
8252 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8253 return target;
8254
8255 case NEGATE_EXPR:
8256 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8257 temp = expand_unop (mode,
8258 ! unsignedp && flag_trapv
8259 && (GET_MODE_CLASS(mode) == MODE_INT)
8260 ? negv_optab : neg_optab, op0, target, 0);
8261 if (temp == 0)
8262 abort ();
8263 return temp;
8264
8265 case ABS_EXPR:
8266 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8267
8268 /* Handle complex values specially. */
8269 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8270 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8271 return expand_complex_abs (mode, op0, target, unsignedp);
8272
8273 /* Unsigned abs is simply the operand. Testing here means we don't
8274 risk generating incorrect code below. */
8275 if (TREE_UNSIGNED (type))
8276 return op0;
8277
8278 return expand_abs (mode, op0, target, unsignedp,
8279 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8280
8281 case MAX_EXPR:
8282 case MIN_EXPR:
8283 target = original_target;
8284 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8285 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8286 || GET_MODE (target) != mode
8287 || (GET_CODE (target) == REG
8288 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8289 target = gen_reg_rtx (mode);
8290 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8291 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8292
8293 /* First try to do it with a special MIN or MAX instruction.
8294 If that does not win, use a conditional jump to select the proper
8295 value. */
8296 this_optab = (TREE_UNSIGNED (type)
8297 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8298 : (code == MIN_EXPR ? smin_optab : smax_optab));
8299
8300 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8301 OPTAB_WIDEN);
8302 if (temp != 0)
8303 return temp;
8304
8305 /* At this point, a MEM target is no longer useful; we will get better
8306 code without it. */
8307
8308 if (GET_CODE (target) == MEM)
8309 target = gen_reg_rtx (mode);
8310
8311 if (target != op0)
8312 emit_move_insn (target, op0);
8313
8314 op0 = gen_label_rtx ();
8315
8316 /* If this mode is an integer too wide to compare properly,
8317 compare word by word. Rely on cse to optimize constant cases. */
8318 if (GET_MODE_CLASS (mode) == MODE_INT
8319 && ! can_compare_p (GE, mode, ccp_jump))
8320 {
8321 if (code == MAX_EXPR)
8322 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8323 target, op1, NULL_RTX, op0);
8324 else
8325 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8326 op1, target, NULL_RTX, op0);
8327 }
8328 else
8329 {
8330 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8331 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8332 unsignedp, mode, NULL_RTX, NULL_RTX,
8333 op0);
8334 }
8335 emit_move_insn (target, op1);
8336 emit_label (op0);
8337 return target;
8338
8339 case BIT_NOT_EXPR:
8340 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8341 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8342 if (temp == 0)
8343 abort ();
8344 return temp;
8345
8346 case FFS_EXPR:
8347 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8348 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8349 if (temp == 0)
8350 abort ();
8351 return temp;
8352
8353 /* ??? Can optimize bitwise operations with one arg constant.
8354 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8355 and (a bitwise1 b) bitwise2 b (etc)
8356 but that is probably not worth while. */
8357
8358 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8359 boolean values when we want in all cases to compute both of them. In
8360 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8361 as actual zero-or-1 values and then bitwise anding. In cases where
8362 there cannot be any side effects, better code would be made by
8363 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8364 how to recognize those cases. */
8365
8366 case TRUTH_AND_EXPR:
8367 case BIT_AND_EXPR:
8368 this_optab = and_optab;
8369 goto binop;
8370
8371 case TRUTH_OR_EXPR:
8372 case BIT_IOR_EXPR:
8373 this_optab = ior_optab;
8374 goto binop;
8375
8376 case TRUTH_XOR_EXPR:
8377 case BIT_XOR_EXPR:
8378 this_optab = xor_optab;
8379 goto binop;
8380
8381 case LSHIFT_EXPR:
8382 case RSHIFT_EXPR:
8383 case LROTATE_EXPR:
8384 case RROTATE_EXPR:
8385 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8386 subtarget = 0;
8387 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8388 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8389 unsignedp);
8390
8391 /* Could determine the answer when only additive constants differ. Also,
8392 the addition of one can be handled by changing the condition. */
8393 case LT_EXPR:
8394 case LE_EXPR:
8395 case GT_EXPR:
8396 case GE_EXPR:
8397 case EQ_EXPR:
8398 case NE_EXPR:
8399 case UNORDERED_EXPR:
8400 case ORDERED_EXPR:
8401 case UNLT_EXPR:
8402 case UNLE_EXPR:
8403 case UNGT_EXPR:
8404 case UNGE_EXPR:
8405 case UNEQ_EXPR:
8406 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8407 if (temp != 0)
8408 return temp;
8409
8410 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8411 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8412 && original_target
8413 && GET_CODE (original_target) == REG
8414 && (GET_MODE (original_target)
8415 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8416 {
8417 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8418 VOIDmode, 0);
8419
8420 /* If temp is constant, we can just compute the result. */
8421 if (GET_CODE (temp) == CONST_INT)
8422 {
8423 if (INTVAL (temp) != 0)
8424 emit_move_insn (target, const1_rtx);
8425 else
8426 emit_move_insn (target, const0_rtx);
8427
8428 return target;
8429 }
8430
8431 if (temp != original_target)
8432 {
8433 enum machine_mode mode1 = GET_MODE (temp);
8434 if (mode1 == VOIDmode)
8435 mode1 = tmode != VOIDmode ? tmode : mode;
8436
8437 temp = copy_to_mode_reg (mode1, temp);
8438 }
8439
8440 op1 = gen_label_rtx ();
8441 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8442 GET_MODE (temp), unsignedp, op1);
8443 emit_move_insn (temp, const1_rtx);
8444 emit_label (op1);
8445 return temp;
8446 }
8447
8448 /* If no set-flag instruction, must generate a conditional
8449 store into a temporary variable. Drop through
8450 and handle this like && and ||. */
8451
8452 case TRUTH_ANDIF_EXPR:
8453 case TRUTH_ORIF_EXPR:
8454 if (! ignore
8455 && (target == 0 || ! safe_from_p (target, exp, 1)
8456 /* Make sure we don't have a hard reg (such as function's return
8457 value) live across basic blocks, if not optimizing. */
8458 || (!optimize && GET_CODE (target) == REG
8459 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8460 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8461
8462 if (target)
8463 emit_clr_insn (target);
8464
8465 op1 = gen_label_rtx ();
8466 jumpifnot (exp, op1);
8467
8468 if (target)
8469 emit_0_to_1_insn (target);
8470
8471 emit_label (op1);
8472 return ignore ? const0_rtx : target;
8473
8474 case TRUTH_NOT_EXPR:
8475 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8476 /* The parser is careful to generate TRUTH_NOT_EXPR
8477 only with operands that are always zero or one. */
8478 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8479 target, 1, OPTAB_LIB_WIDEN);
8480 if (temp == 0)
8481 abort ();
8482 return temp;
8483
8484 case COMPOUND_EXPR:
8485 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8486 emit_queue ();
8487 return expand_expr (TREE_OPERAND (exp, 1),
8488 (ignore ? const0_rtx : target),
8489 VOIDmode, 0);
8490
8491 case COND_EXPR:
8492 /* If we would have a "singleton" (see below) were it not for a
8493 conversion in each arm, bring that conversion back out. */
8494 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8495 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8496 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8497 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8498 {
8499 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8500 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8501
8502 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8503 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8504 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8505 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8506 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8507 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8508 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8509 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8510 return expand_expr (build1 (NOP_EXPR, type,
8511 build (COND_EXPR, TREE_TYPE (iftrue),
8512 TREE_OPERAND (exp, 0),
8513 iftrue, iffalse)),
8514 target, tmode, modifier);
8515 }
8516
8517 {
8518 /* Note that COND_EXPRs whose type is a structure or union
8519 are required to be constructed to contain assignments of
8520 a temporary variable, so that we can evaluate them here
8521 for side effect only. If type is void, we must do likewise. */
8522
8523 /* If an arm of the branch requires a cleanup,
8524 only that cleanup is performed. */
8525
8526 tree singleton = 0;
8527 tree binary_op = 0, unary_op = 0;
8528
8529 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8530 convert it to our mode, if necessary. */
8531 if (integer_onep (TREE_OPERAND (exp, 1))
8532 && integer_zerop (TREE_OPERAND (exp, 2))
8533 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8534 {
8535 if (ignore)
8536 {
8537 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8538 modifier);
8539 return const0_rtx;
8540 }
8541
8542 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8543 if (GET_MODE (op0) == mode)
8544 return op0;
8545
8546 if (target == 0)
8547 target = gen_reg_rtx (mode);
8548 convert_move (target, op0, unsignedp);
8549 return target;
8550 }
8551
8552 /* Check for X ? A + B : A. If we have this, we can copy A to the
8553 output and conditionally add B. Similarly for unary operations.
8554 Don't do this if X has side-effects because those side effects
8555 might affect A or B and the "?" operation is a sequence point in
8556 ANSI. (operand_equal_p tests for side effects.) */
8557
8558 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8559 && operand_equal_p (TREE_OPERAND (exp, 2),
8560 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8561 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8562 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8563 && operand_equal_p (TREE_OPERAND (exp, 1),
8564 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8565 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8566 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8567 && operand_equal_p (TREE_OPERAND (exp, 2),
8568 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8569 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8570 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8571 && operand_equal_p (TREE_OPERAND (exp, 1),
8572 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8573 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8574
8575 /* If we are not to produce a result, we have no target. Otherwise,
8576 if a target was specified use it; it will not be used as an
8577 intermediate target unless it is safe. If no target, use a
8578 temporary. */
8579
8580 if (ignore)
8581 temp = 0;
8582 else if (original_target
8583 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8584 || (singleton && GET_CODE (original_target) == REG
8585 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8586 && original_target == var_rtx (singleton)))
8587 && GET_MODE (original_target) == mode
8588 #ifdef HAVE_conditional_move
8589 && (! can_conditionally_move_p (mode)
8590 || GET_CODE (original_target) == REG
8591 || TREE_ADDRESSABLE (type))
8592 #endif
8593 && (GET_CODE (original_target) != MEM
8594 || TREE_ADDRESSABLE (type)))
8595 temp = original_target;
8596 else if (TREE_ADDRESSABLE (type))
8597 abort ();
8598 else
8599 temp = assign_temp (type, 0, 0, 1);
8600
8601 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8602 do the test of X as a store-flag operation, do this as
8603 A + ((X != 0) << log C). Similarly for other simple binary
8604 operators. Only do for C == 1 if BRANCH_COST is low. */
8605 if (temp && singleton && binary_op
8606 && (TREE_CODE (binary_op) == PLUS_EXPR
8607 || TREE_CODE (binary_op) == MINUS_EXPR
8608 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8609 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8610 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8611 : integer_onep (TREE_OPERAND (binary_op, 1)))
8612 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8613 {
8614 rtx result;
8615 tree cond;
8616 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8617 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8618 ? addv_optab : add_optab)
8619 : TREE_CODE (binary_op) == MINUS_EXPR
8620 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8621 ? subv_optab : sub_optab)
8622 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8623 : xor_optab);
8624
8625 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8626 if (singleton == TREE_OPERAND (exp, 1))
8627 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8628 else
8629 cond = TREE_OPERAND (exp, 0);
8630
8631 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8632 ? temp : NULL_RTX),
8633 mode, BRANCH_COST <= 1);
8634
8635 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8636 result = expand_shift (LSHIFT_EXPR, mode, result,
8637 build_int_2 (tree_log2
8638 (TREE_OPERAND
8639 (binary_op, 1)),
8640 0),
8641 (safe_from_p (temp, singleton, 1)
8642 ? temp : NULL_RTX), 0);
8643
8644 if (result)
8645 {
8646 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8647 return expand_binop (mode, boptab, op1, result, temp,
8648 unsignedp, OPTAB_LIB_WIDEN);
8649 }
8650 }
8651
8652 do_pending_stack_adjust ();
8653 NO_DEFER_POP;
8654 op0 = gen_label_rtx ();
8655
8656 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8657 {
8658 if (temp != 0)
8659 {
8660 /* If the target conflicts with the other operand of the
8661 binary op, we can't use it. Also, we can't use the target
8662 if it is a hard register, because evaluating the condition
8663 might clobber it. */
8664 if ((binary_op
8665 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8666 || (GET_CODE (temp) == REG
8667 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8668 temp = gen_reg_rtx (mode);
8669 store_expr (singleton, temp, 0);
8670 }
8671 else
8672 expand_expr (singleton,
8673 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8674 if (singleton == TREE_OPERAND (exp, 1))
8675 jumpif (TREE_OPERAND (exp, 0), op0);
8676 else
8677 jumpifnot (TREE_OPERAND (exp, 0), op0);
8678
8679 start_cleanup_deferral ();
8680 if (binary_op && temp == 0)
8681 /* Just touch the other operand. */
8682 expand_expr (TREE_OPERAND (binary_op, 1),
8683 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8684 else if (binary_op)
8685 store_expr (build (TREE_CODE (binary_op), type,
8686 make_tree (type, temp),
8687 TREE_OPERAND (binary_op, 1)),
8688 temp, 0);
8689 else
8690 store_expr (build1 (TREE_CODE (unary_op), type,
8691 make_tree (type, temp)),
8692 temp, 0);
8693 op1 = op0;
8694 }
8695 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8696 comparison operator. If we have one of these cases, set the
8697 output to A, branch on A (cse will merge these two references),
8698 then set the output to FOO. */
8699 else if (temp
8700 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8701 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8702 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8703 TREE_OPERAND (exp, 1), 0)
8704 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8705 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8706 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8707 {
8708 if (GET_CODE (temp) == REG
8709 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8710 temp = gen_reg_rtx (mode);
8711 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8712 jumpif (TREE_OPERAND (exp, 0), op0);
8713
8714 start_cleanup_deferral ();
8715 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8716 op1 = op0;
8717 }
8718 else if (temp
8719 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8720 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8721 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8722 TREE_OPERAND (exp, 2), 0)
8723 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8724 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8725 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8726 {
8727 if (GET_CODE (temp) == REG
8728 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8729 temp = gen_reg_rtx (mode);
8730 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8731 jumpifnot (TREE_OPERAND (exp, 0), op0);
8732
8733 start_cleanup_deferral ();
8734 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8735 op1 = op0;
8736 }
8737 else
8738 {
8739 op1 = gen_label_rtx ();
8740 jumpifnot (TREE_OPERAND (exp, 0), op0);
8741
8742 start_cleanup_deferral ();
8743
8744 /* One branch of the cond can be void, if it never returns. For
8745 example A ? throw : E */
8746 if (temp != 0
8747 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8748 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8749 else
8750 expand_expr (TREE_OPERAND (exp, 1),
8751 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8752 end_cleanup_deferral ();
8753 emit_queue ();
8754 emit_jump_insn (gen_jump (op1));
8755 emit_barrier ();
8756 emit_label (op0);
8757 start_cleanup_deferral ();
8758 if (temp != 0
8759 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8760 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8761 else
8762 expand_expr (TREE_OPERAND (exp, 2),
8763 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8764 }
8765
8766 end_cleanup_deferral ();
8767
8768 emit_queue ();
8769 emit_label (op1);
8770 OK_DEFER_POP;
8771
8772 return temp;
8773 }
8774
8775 case TARGET_EXPR:
8776 {
8777 /* Something needs to be initialized, but we didn't know
8778 where that thing was when building the tree. For example,
8779 it could be the return value of a function, or a parameter
8780 to a function which lays down in the stack, or a temporary
8781 variable which must be passed by reference.
8782
8783 We guarantee that the expression will either be constructed
8784 or copied into our original target. */
8785
8786 tree slot = TREE_OPERAND (exp, 0);
8787 tree cleanups = NULL_TREE;
8788 tree exp1;
8789
8790 if (TREE_CODE (slot) != VAR_DECL)
8791 abort ();
8792
8793 if (! ignore)
8794 target = original_target;
8795
8796 /* Set this here so that if we get a target that refers to a
8797 register variable that's already been used, put_reg_into_stack
8798 knows that it should fix up those uses. */
8799 TREE_USED (slot) = 1;
8800
8801 if (target == 0)
8802 {
8803 if (DECL_RTL_SET_P (slot))
8804 {
8805 target = DECL_RTL (slot);
8806 /* If we have already expanded the slot, so don't do
8807 it again. (mrs) */
8808 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8809 return target;
8810 }
8811 else
8812 {
8813 target = assign_temp (type, 2, 0, 1);
8814 /* All temp slots at this level must not conflict. */
8815 preserve_temp_slots (target);
8816 SET_DECL_RTL (slot, target);
8817 if (TREE_ADDRESSABLE (slot))
8818 put_var_into_stack (slot);
8819
8820 /* Since SLOT is not known to the called function
8821 to belong to its stack frame, we must build an explicit
8822 cleanup. This case occurs when we must build up a reference
8823 to pass the reference as an argument. In this case,
8824 it is very likely that such a reference need not be
8825 built here. */
8826
8827 if (TREE_OPERAND (exp, 2) == 0)
8828 TREE_OPERAND (exp, 2)
8829 = (*lang_hooks.maybe_build_cleanup) (slot);
8830 cleanups = TREE_OPERAND (exp, 2);
8831 }
8832 }
8833 else
8834 {
8835 /* This case does occur, when expanding a parameter which
8836 needs to be constructed on the stack. The target
8837 is the actual stack address that we want to initialize.
8838 The function we call will perform the cleanup in this case. */
8839
8840 /* If we have already assigned it space, use that space,
8841 not target that we were passed in, as our target
8842 parameter is only a hint. */
8843 if (DECL_RTL_SET_P (slot))
8844 {
8845 target = DECL_RTL (slot);
8846 /* If we have already expanded the slot, so don't do
8847 it again. (mrs) */
8848 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8849 return target;
8850 }
8851 else
8852 {
8853 SET_DECL_RTL (slot, target);
8854 /* If we must have an addressable slot, then make sure that
8855 the RTL that we just stored in slot is OK. */
8856 if (TREE_ADDRESSABLE (slot))
8857 put_var_into_stack (slot);
8858 }
8859 }
8860
8861 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8862 /* Mark it as expanded. */
8863 TREE_OPERAND (exp, 1) = NULL_TREE;
8864
8865 store_expr (exp1, target, 0);
8866
8867 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8868
8869 return target;
8870 }
8871
8872 case INIT_EXPR:
8873 {
8874 tree lhs = TREE_OPERAND (exp, 0);
8875 tree rhs = TREE_OPERAND (exp, 1);
8876
8877 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8878 return temp;
8879 }
8880
8881 case MODIFY_EXPR:
8882 {
8883 /* If lhs is complex, expand calls in rhs before computing it.
8884 That's so we don't compute a pointer and save it over a
8885 call. If lhs is simple, compute it first so we can give it
8886 as a target if the rhs is just a call. This avoids an
8887 extra temp and copy and that prevents a partial-subsumption
8888 which makes bad code. Actually we could treat
8889 component_ref's of vars like vars. */
8890
8891 tree lhs = TREE_OPERAND (exp, 0);
8892 tree rhs = TREE_OPERAND (exp, 1);
8893
8894 temp = 0;
8895
8896 /* Check for |= or &= of a bitfield of size one into another bitfield
8897 of size 1. In this case, (unless we need the result of the
8898 assignment) we can do this more efficiently with a
8899 test followed by an assignment, if necessary.
8900
8901 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8902 things change so we do, this code should be enhanced to
8903 support it. */
8904 if (ignore
8905 && TREE_CODE (lhs) == COMPONENT_REF
8906 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8907 || TREE_CODE (rhs) == BIT_AND_EXPR)
8908 && TREE_OPERAND (rhs, 0) == lhs
8909 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8910 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8911 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8912 {
8913 rtx label = gen_label_rtx ();
8914
8915 do_jump (TREE_OPERAND (rhs, 1),
8916 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8917 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8918 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8919 (TREE_CODE (rhs) == BIT_IOR_EXPR
8920 ? integer_one_node
8921 : integer_zero_node)),
8922 0, 0);
8923 do_pending_stack_adjust ();
8924 emit_label (label);
8925 return const0_rtx;
8926 }
8927
8928 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8929
8930 return temp;
8931 }
8932
8933 case RETURN_EXPR:
8934 if (!TREE_OPERAND (exp, 0))
8935 expand_null_return ();
8936 else
8937 expand_return (TREE_OPERAND (exp, 0));
8938 return const0_rtx;
8939
8940 case PREINCREMENT_EXPR:
8941 case PREDECREMENT_EXPR:
8942 return expand_increment (exp, 0, ignore);
8943
8944 case POSTINCREMENT_EXPR:
8945 case POSTDECREMENT_EXPR:
8946 /* Faster to treat as pre-increment if result is not used. */
8947 return expand_increment (exp, ! ignore, ignore);
8948
8949 case ADDR_EXPR:
8950 /* Are we taking the address of a nested function? */
8951 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8952 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8953 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8954 && ! TREE_STATIC (exp))
8955 {
8956 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8957 op0 = force_operand (op0, target);
8958 }
8959 /* If we are taking the address of something erroneous, just
8960 return a zero. */
8961 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8962 return const0_rtx;
8963 /* If we are taking the address of a constant and are at the
8964 top level, we have to use output_constant_def since we can't
8965 call force_const_mem at top level. */
8966 else if (cfun == 0
8967 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8968 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8969 == 'c')))
8970 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8971 else
8972 {
8973 /* We make sure to pass const0_rtx down if we came in with
8974 ignore set, to avoid doing the cleanups twice for something. */
8975 op0 = expand_expr (TREE_OPERAND (exp, 0),
8976 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8977 (modifier == EXPAND_INITIALIZER
8978 ? modifier : EXPAND_CONST_ADDRESS));
8979
8980 /* If we are going to ignore the result, OP0 will have been set
8981 to const0_rtx, so just return it. Don't get confused and
8982 think we are taking the address of the constant. */
8983 if (ignore)
8984 return op0;
8985
8986 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8987 clever and returns a REG when given a MEM. */
8988 op0 = protect_from_queue (op0, 1);
8989
8990 /* We would like the object in memory. If it is a constant, we can
8991 have it be statically allocated into memory. For a non-constant,
8992 we need to allocate some memory and store the value into it. */
8993
8994 if (CONSTANT_P (op0))
8995 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8996 op0);
8997 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8998 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8999 || GET_CODE (op0) == PARALLEL)
9000 {
9001 /* If the operand is a SAVE_EXPR, we can deal with this by
9002 forcing the SAVE_EXPR into memory. */
9003 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9004 {
9005 put_var_into_stack (TREE_OPERAND (exp, 0));
9006 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9007 }
9008 else
9009 {
9010 /* If this object is in a register, it can't be BLKmode. */
9011 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9012 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9013
9014 if (GET_CODE (op0) == PARALLEL)
9015 /* Handle calls that pass values in multiple
9016 non-contiguous locations. The Irix 6 ABI has examples
9017 of this. */
9018 emit_group_store (memloc, op0,
9019 int_size_in_bytes (inner_type));
9020 else
9021 emit_move_insn (memloc, op0);
9022
9023 op0 = memloc;
9024 }
9025 }
9026
9027 if (GET_CODE (op0) != MEM)
9028 abort ();
9029
9030 mark_temp_addr_taken (op0);
9031 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9032 {
9033 op0 = XEXP (op0, 0);
9034 #ifdef POINTERS_EXTEND_UNSIGNED
9035 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9036 && mode == ptr_mode)
9037 op0 = convert_memory_address (ptr_mode, op0);
9038 #endif
9039 return op0;
9040 }
9041
9042 /* If OP0 is not aligned as least as much as the type requires, we
9043 need to make a temporary, copy OP0 to it, and take the address of
9044 the temporary. We want to use the alignment of the type, not of
9045 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9046 the test for BLKmode means that can't happen. The test for
9047 BLKmode is because we never make mis-aligned MEMs with
9048 non-BLKmode.
9049
9050 We don't need to do this at all if the machine doesn't have
9051 strict alignment. */
9052 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9053 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9054 > MEM_ALIGN (op0))
9055 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9056 {
9057 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9058 rtx new
9059 = assign_stack_temp_for_type
9060 (TYPE_MODE (inner_type),
9061 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9062 : int_size_in_bytes (inner_type),
9063 1, build_qualified_type (inner_type,
9064 (TYPE_QUALS (inner_type)
9065 | TYPE_QUAL_CONST)));
9066
9067 if (TYPE_ALIGN_OK (inner_type))
9068 abort ();
9069
9070 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9071 BLOCK_OP_NORMAL);
9072 op0 = new;
9073 }
9074
9075 op0 = force_operand (XEXP (op0, 0), target);
9076 }
9077
9078 if (flag_force_addr
9079 && GET_CODE (op0) != REG
9080 && modifier != EXPAND_CONST_ADDRESS
9081 && modifier != EXPAND_INITIALIZER
9082 && modifier != EXPAND_SUM)
9083 op0 = force_reg (Pmode, op0);
9084
9085 if (GET_CODE (op0) == REG
9086 && ! REG_USERVAR_P (op0))
9087 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9088
9089 #ifdef POINTERS_EXTEND_UNSIGNED
9090 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9091 && mode == ptr_mode)
9092 op0 = convert_memory_address (ptr_mode, op0);
9093 #endif
9094
9095 return op0;
9096
9097 case ENTRY_VALUE_EXPR:
9098 abort ();
9099
9100 /* COMPLEX type for Extended Pascal & Fortran */
9101 case COMPLEX_EXPR:
9102 {
9103 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9104 rtx insns;
9105
9106 /* Get the rtx code of the operands. */
9107 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9108 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9109
9110 if (! target)
9111 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9112
9113 start_sequence ();
9114
9115 /* Move the real (op0) and imaginary (op1) parts to their location. */
9116 emit_move_insn (gen_realpart (mode, target), op0);
9117 emit_move_insn (gen_imagpart (mode, target), op1);
9118
9119 insns = get_insns ();
9120 end_sequence ();
9121
9122 /* Complex construction should appear as a single unit. */
9123 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9124 each with a separate pseudo as destination.
9125 It's not correct for flow to treat them as a unit. */
9126 if (GET_CODE (target) != CONCAT)
9127 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9128 else
9129 emit_insn (insns);
9130
9131 return target;
9132 }
9133
9134 case REALPART_EXPR:
9135 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9136 return gen_realpart (mode, op0);
9137
9138 case IMAGPART_EXPR:
9139 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9140 return gen_imagpart (mode, op0);
9141
9142 case CONJ_EXPR:
9143 {
9144 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9145 rtx imag_t;
9146 rtx insns;
9147
9148 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9149
9150 if (! target)
9151 target = gen_reg_rtx (mode);
9152
9153 start_sequence ();
9154
9155 /* Store the realpart and the negated imagpart to target. */
9156 emit_move_insn (gen_realpart (partmode, target),
9157 gen_realpart (partmode, op0));
9158
9159 imag_t = gen_imagpart (partmode, target);
9160 temp = expand_unop (partmode,
9161 ! unsignedp && flag_trapv
9162 && (GET_MODE_CLASS(partmode) == MODE_INT)
9163 ? negv_optab : neg_optab,
9164 gen_imagpart (partmode, op0), imag_t, 0);
9165 if (temp != imag_t)
9166 emit_move_insn (imag_t, temp);
9167
9168 insns = get_insns ();
9169 end_sequence ();
9170
9171 /* Conjugate should appear as a single unit
9172 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9173 each with a separate pseudo as destination.
9174 It's not correct for flow to treat them as a unit. */
9175 if (GET_CODE (target) != CONCAT)
9176 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9177 else
9178 emit_insn (insns);
9179
9180 return target;
9181 }
9182
9183 case TRY_CATCH_EXPR:
9184 {
9185 tree handler = TREE_OPERAND (exp, 1);
9186
9187 expand_eh_region_start ();
9188
9189 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9190
9191 expand_eh_region_end_cleanup (handler);
9192
9193 return op0;
9194 }
9195
9196 case TRY_FINALLY_EXPR:
9197 {
9198 tree try_block = TREE_OPERAND (exp, 0);
9199 tree finally_block = TREE_OPERAND (exp, 1);
9200
9201 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9202 {
9203 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9204 is not sufficient, so we cannot expand the block twice.
9205 So we play games with GOTO_SUBROUTINE_EXPR to let us
9206 expand the thing only once. */
9207 /* When not optimizing, we go ahead with this form since
9208 (1) user breakpoints operate more predictably without
9209 code duplication, and
9210 (2) we're not running any of the global optimizers
9211 that would explode in time/space with the highly
9212 connected CFG created by the indirect branching. */
9213
9214 rtx finally_label = gen_label_rtx ();
9215 rtx done_label = gen_label_rtx ();
9216 rtx return_link = gen_reg_rtx (Pmode);
9217 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9218 (tree) finally_label, (tree) return_link);
9219 TREE_SIDE_EFFECTS (cleanup) = 1;
9220
9221 /* Start a new binding layer that will keep track of all cleanup
9222 actions to be performed. */
9223 expand_start_bindings (2);
9224 target_temp_slot_level = temp_slot_level;
9225
9226 expand_decl_cleanup (NULL_TREE, cleanup);
9227 op0 = expand_expr (try_block, target, tmode, modifier);
9228
9229 preserve_temp_slots (op0);
9230 expand_end_bindings (NULL_TREE, 0, 0);
9231 emit_jump (done_label);
9232 emit_label (finally_label);
9233 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9234 emit_indirect_jump (return_link);
9235 emit_label (done_label);
9236 }
9237 else
9238 {
9239 expand_start_bindings (2);
9240 target_temp_slot_level = temp_slot_level;
9241
9242 expand_decl_cleanup (NULL_TREE, finally_block);
9243 op0 = expand_expr (try_block, target, tmode, modifier);
9244
9245 preserve_temp_slots (op0);
9246 expand_end_bindings (NULL_TREE, 0, 0);
9247 }
9248
9249 return op0;
9250 }
9251
9252 case GOTO_SUBROUTINE_EXPR:
9253 {
9254 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9255 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9256 rtx return_address = gen_label_rtx ();
9257 emit_move_insn (return_link,
9258 gen_rtx_LABEL_REF (Pmode, return_address));
9259 emit_jump (subr);
9260 emit_label (return_address);
9261 return const0_rtx;
9262 }
9263
9264 case VA_ARG_EXPR:
9265 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9266
9267 case EXC_PTR_EXPR:
9268 return get_exception_pointer (cfun);
9269
9270 case FDESC_EXPR:
9271 /* Function descriptors are not valid except for as
9272 initialization constants, and should not be expanded. */
9273 abort ();
9274
9275 default:
9276 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9277 }
9278
9279 /* Here to do an ordinary binary operator, generating an instruction
9280 from the optab already placed in `this_optab'. */
9281 binop:
9282 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9283 subtarget = 0;
9284 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9285 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9286 binop2:
9287 temp = expand_binop (mode, this_optab, op0, op1, target,
9288 unsignedp, OPTAB_LIB_WIDEN);
9289 if (temp == 0)
9290 abort ();
9291 return temp;
9292 }
9293 \f
9294 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9295 when applied to the address of EXP produces an address known to be
9296 aligned more than BIGGEST_ALIGNMENT. */
9297
9298 static int
9299 is_aligning_offset (offset, exp)
9300 tree offset;
9301 tree exp;
9302 {
9303 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9304 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9305 || TREE_CODE (offset) == NOP_EXPR
9306 || TREE_CODE (offset) == CONVERT_EXPR
9307 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9308 offset = TREE_OPERAND (offset, 0);
9309
9310 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9311 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9312 if (TREE_CODE (offset) != BIT_AND_EXPR
9313 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9314 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9315 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9316 return 0;
9317
9318 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9319 It must be NEGATE_EXPR. Then strip any more conversions. */
9320 offset = TREE_OPERAND (offset, 0);
9321 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9322 || TREE_CODE (offset) == NOP_EXPR
9323 || TREE_CODE (offset) == CONVERT_EXPR)
9324 offset = TREE_OPERAND (offset, 0);
9325
9326 if (TREE_CODE (offset) != NEGATE_EXPR)
9327 return 0;
9328
9329 offset = TREE_OPERAND (offset, 0);
9330 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9331 || TREE_CODE (offset) == NOP_EXPR
9332 || TREE_CODE (offset) == CONVERT_EXPR)
9333 offset = TREE_OPERAND (offset, 0);
9334
9335 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9336 whose type is the same as EXP. */
9337 return (TREE_CODE (offset) == ADDR_EXPR
9338 && (TREE_OPERAND (offset, 0) == exp
9339 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9340 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9341 == TREE_TYPE (exp)))));
9342 }
9343 \f
9344 /* Return the tree node if an ARG corresponds to a string constant or zero
9345 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9346 in bytes within the string that ARG is accessing. The type of the
9347 offset will be `sizetype'. */
9348
9349 tree
9350 string_constant (arg, ptr_offset)
9351 tree arg;
9352 tree *ptr_offset;
9353 {
9354 STRIP_NOPS (arg);
9355
9356 if (TREE_CODE (arg) == ADDR_EXPR
9357 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9358 {
9359 *ptr_offset = size_zero_node;
9360 return TREE_OPERAND (arg, 0);
9361 }
9362 else if (TREE_CODE (arg) == PLUS_EXPR)
9363 {
9364 tree arg0 = TREE_OPERAND (arg, 0);
9365 tree arg1 = TREE_OPERAND (arg, 1);
9366
9367 STRIP_NOPS (arg0);
9368 STRIP_NOPS (arg1);
9369
9370 if (TREE_CODE (arg0) == ADDR_EXPR
9371 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9372 {
9373 *ptr_offset = convert (sizetype, arg1);
9374 return TREE_OPERAND (arg0, 0);
9375 }
9376 else if (TREE_CODE (arg1) == ADDR_EXPR
9377 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9378 {
9379 *ptr_offset = convert (sizetype, arg0);
9380 return TREE_OPERAND (arg1, 0);
9381 }
9382 }
9383
9384 return 0;
9385 }
9386 \f
9387 /* Expand code for a post- or pre- increment or decrement
9388 and return the RTX for the result.
9389 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9390
9391 static rtx
9392 expand_increment (exp, post, ignore)
9393 tree exp;
9394 int post, ignore;
9395 {
9396 rtx op0, op1;
9397 rtx temp, value;
9398 tree incremented = TREE_OPERAND (exp, 0);
9399 optab this_optab = add_optab;
9400 int icode;
9401 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9402 int op0_is_copy = 0;
9403 int single_insn = 0;
9404 /* 1 means we can't store into OP0 directly,
9405 because it is a subreg narrower than a word,
9406 and we don't dare clobber the rest of the word. */
9407 int bad_subreg = 0;
9408
9409 /* Stabilize any component ref that might need to be
9410 evaluated more than once below. */
9411 if (!post
9412 || TREE_CODE (incremented) == BIT_FIELD_REF
9413 || (TREE_CODE (incremented) == COMPONENT_REF
9414 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9415 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9416 incremented = stabilize_reference (incremented);
9417 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9418 ones into save exprs so that they don't accidentally get evaluated
9419 more than once by the code below. */
9420 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9421 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9422 incremented = save_expr (incremented);
9423
9424 /* Compute the operands as RTX.
9425 Note whether OP0 is the actual lvalue or a copy of it:
9426 I believe it is a copy iff it is a register or subreg
9427 and insns were generated in computing it. */
9428
9429 temp = get_last_insn ();
9430 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9431
9432 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9433 in place but instead must do sign- or zero-extension during assignment,
9434 so we copy it into a new register and let the code below use it as
9435 a copy.
9436
9437 Note that we can safely modify this SUBREG since it is know not to be
9438 shared (it was made by the expand_expr call above). */
9439
9440 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9441 {
9442 if (post)
9443 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9444 else
9445 bad_subreg = 1;
9446 }
9447 else if (GET_CODE (op0) == SUBREG
9448 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9449 {
9450 /* We cannot increment this SUBREG in place. If we are
9451 post-incrementing, get a copy of the old value. Otherwise,
9452 just mark that we cannot increment in place. */
9453 if (post)
9454 op0 = copy_to_reg (op0);
9455 else
9456 bad_subreg = 1;
9457 }
9458
9459 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9460 && temp != get_last_insn ());
9461 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9462
9463 /* Decide whether incrementing or decrementing. */
9464 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9465 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9466 this_optab = sub_optab;
9467
9468 /* Convert decrement by a constant into a negative increment. */
9469 if (this_optab == sub_optab
9470 && GET_CODE (op1) == CONST_INT)
9471 {
9472 op1 = GEN_INT (-INTVAL (op1));
9473 this_optab = add_optab;
9474 }
9475
9476 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9477 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9478
9479 /* For a preincrement, see if we can do this with a single instruction. */
9480 if (!post)
9481 {
9482 icode = (int) this_optab->handlers[(int) mode].insn_code;
9483 if (icode != (int) CODE_FOR_nothing
9484 /* Make sure that OP0 is valid for operands 0 and 1
9485 of the insn we want to queue. */
9486 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9487 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9488 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9489 single_insn = 1;
9490 }
9491
9492 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9493 then we cannot just increment OP0. We must therefore contrive to
9494 increment the original value. Then, for postincrement, we can return
9495 OP0 since it is a copy of the old value. For preincrement, expand here
9496 unless we can do it with a single insn.
9497
9498 Likewise if storing directly into OP0 would clobber high bits
9499 we need to preserve (bad_subreg). */
9500 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9501 {
9502 /* This is the easiest way to increment the value wherever it is.
9503 Problems with multiple evaluation of INCREMENTED are prevented
9504 because either (1) it is a component_ref or preincrement,
9505 in which case it was stabilized above, or (2) it is an array_ref
9506 with constant index in an array in a register, which is
9507 safe to reevaluate. */
9508 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9509 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9510 ? MINUS_EXPR : PLUS_EXPR),
9511 TREE_TYPE (exp),
9512 incremented,
9513 TREE_OPERAND (exp, 1));
9514
9515 while (TREE_CODE (incremented) == NOP_EXPR
9516 || TREE_CODE (incremented) == CONVERT_EXPR)
9517 {
9518 newexp = convert (TREE_TYPE (incremented), newexp);
9519 incremented = TREE_OPERAND (incremented, 0);
9520 }
9521
9522 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9523 return post ? op0 : temp;
9524 }
9525
9526 if (post)
9527 {
9528 /* We have a true reference to the value in OP0.
9529 If there is an insn to add or subtract in this mode, queue it.
9530 Queueing the increment insn avoids the register shuffling
9531 that often results if we must increment now and first save
9532 the old value for subsequent use. */
9533
9534 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9535 op0 = stabilize (op0);
9536 #endif
9537
9538 icode = (int) this_optab->handlers[(int) mode].insn_code;
9539 if (icode != (int) CODE_FOR_nothing
9540 /* Make sure that OP0 is valid for operands 0 and 1
9541 of the insn we want to queue. */
9542 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9543 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9544 {
9545 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9546 op1 = force_reg (mode, op1);
9547
9548 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9549 }
9550 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9551 {
9552 rtx addr = (general_operand (XEXP (op0, 0), mode)
9553 ? force_reg (Pmode, XEXP (op0, 0))
9554 : copy_to_reg (XEXP (op0, 0)));
9555 rtx temp, result;
9556
9557 op0 = replace_equiv_address (op0, addr);
9558 temp = force_reg (GET_MODE (op0), op0);
9559 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9560 op1 = force_reg (mode, op1);
9561
9562 /* The increment queue is LIFO, thus we have to `queue'
9563 the instructions in reverse order. */
9564 enqueue_insn (op0, gen_move_insn (op0, temp));
9565 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9566 return result;
9567 }
9568 }
9569
9570 /* Preincrement, or we can't increment with one simple insn. */
9571 if (post)
9572 /* Save a copy of the value before inc or dec, to return it later. */
9573 temp = value = copy_to_reg (op0);
9574 else
9575 /* Arrange to return the incremented value. */
9576 /* Copy the rtx because expand_binop will protect from the queue,
9577 and the results of that would be invalid for us to return
9578 if our caller does emit_queue before using our result. */
9579 temp = copy_rtx (value = op0);
9580
9581 /* Increment however we can. */
9582 op1 = expand_binop (mode, this_optab, value, op1, op0,
9583 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9584
9585 /* Make sure the value is stored into OP0. */
9586 if (op1 != op0)
9587 emit_move_insn (op0, op1);
9588
9589 return temp;
9590 }
9591 \f
9592 /* At the start of a function, record that we have no previously-pushed
9593 arguments waiting to be popped. */
9594
9595 void
9596 init_pending_stack_adjust ()
9597 {
9598 pending_stack_adjust = 0;
9599 }
9600
9601 /* When exiting from function, if safe, clear out any pending stack adjust
9602 so the adjustment won't get done.
9603
9604 Note, if the current function calls alloca, then it must have a
9605 frame pointer regardless of the value of flag_omit_frame_pointer. */
9606
9607 void
9608 clear_pending_stack_adjust ()
9609 {
9610 #ifdef EXIT_IGNORE_STACK
9611 if (optimize > 0
9612 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9613 && EXIT_IGNORE_STACK
9614 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9615 && ! flag_inline_functions)
9616 {
9617 stack_pointer_delta -= pending_stack_adjust,
9618 pending_stack_adjust = 0;
9619 }
9620 #endif
9621 }
9622
9623 /* Pop any previously-pushed arguments that have not been popped yet. */
9624
9625 void
9626 do_pending_stack_adjust ()
9627 {
9628 if (inhibit_defer_pop == 0)
9629 {
9630 if (pending_stack_adjust != 0)
9631 adjust_stack (GEN_INT (pending_stack_adjust));
9632 pending_stack_adjust = 0;
9633 }
9634 }
9635 \f
9636 /* Expand conditional expressions. */
9637
9638 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9639 LABEL is an rtx of code CODE_LABEL, in this function and all the
9640 functions here. */
9641
9642 void
9643 jumpifnot (exp, label)
9644 tree exp;
9645 rtx label;
9646 {
9647 do_jump (exp, label, NULL_RTX);
9648 }
9649
9650 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9651
9652 void
9653 jumpif (exp, label)
9654 tree exp;
9655 rtx label;
9656 {
9657 do_jump (exp, NULL_RTX, label);
9658 }
9659
9660 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9661 the result is zero, or IF_TRUE_LABEL if the result is one.
9662 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9663 meaning fall through in that case.
9664
9665 do_jump always does any pending stack adjust except when it does not
9666 actually perform a jump. An example where there is no jump
9667 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9668
9669 This function is responsible for optimizing cases such as
9670 &&, || and comparison operators in EXP. */
9671
9672 void
9673 do_jump (exp, if_false_label, if_true_label)
9674 tree exp;
9675 rtx if_false_label, if_true_label;
9676 {
9677 enum tree_code code = TREE_CODE (exp);
9678 /* Some cases need to create a label to jump to
9679 in order to properly fall through.
9680 These cases set DROP_THROUGH_LABEL nonzero. */
9681 rtx drop_through_label = 0;
9682 rtx temp;
9683 int i;
9684 tree type;
9685 enum machine_mode mode;
9686
9687 #ifdef MAX_INTEGER_COMPUTATION_MODE
9688 check_max_integer_computation_mode (exp);
9689 #endif
9690
9691 emit_queue ();
9692
9693 switch (code)
9694 {
9695 case ERROR_MARK:
9696 break;
9697
9698 case INTEGER_CST:
9699 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9700 if (temp)
9701 emit_jump (temp);
9702 break;
9703
9704 #if 0
9705 /* This is not true with #pragma weak */
9706 case ADDR_EXPR:
9707 /* The address of something can never be zero. */
9708 if (if_true_label)
9709 emit_jump (if_true_label);
9710 break;
9711 #endif
9712
9713 case NOP_EXPR:
9714 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9715 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9716 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9717 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9718 goto normal;
9719 case CONVERT_EXPR:
9720 /* If we are narrowing the operand, we have to do the compare in the
9721 narrower mode. */
9722 if ((TYPE_PRECISION (TREE_TYPE (exp))
9723 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9724 goto normal;
9725 case NON_LVALUE_EXPR:
9726 case REFERENCE_EXPR:
9727 case ABS_EXPR:
9728 case NEGATE_EXPR:
9729 case LROTATE_EXPR:
9730 case RROTATE_EXPR:
9731 /* These cannot change zero->nonzero or vice versa. */
9732 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9733 break;
9734
9735 case WITH_RECORD_EXPR:
9736 /* Put the object on the placeholder list, recurse through our first
9737 operand, and pop the list. */
9738 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9739 placeholder_list);
9740 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9741 placeholder_list = TREE_CHAIN (placeholder_list);
9742 break;
9743
9744 #if 0
9745 /* This is never less insns than evaluating the PLUS_EXPR followed by
9746 a test and can be longer if the test is eliminated. */
9747 case PLUS_EXPR:
9748 /* Reduce to minus. */
9749 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9750 TREE_OPERAND (exp, 0),
9751 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9752 TREE_OPERAND (exp, 1))));
9753 /* Process as MINUS. */
9754 #endif
9755
9756 case MINUS_EXPR:
9757 /* Nonzero iff operands of minus differ. */
9758 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9759 TREE_OPERAND (exp, 0),
9760 TREE_OPERAND (exp, 1)),
9761 NE, NE, if_false_label, if_true_label);
9762 break;
9763
9764 case BIT_AND_EXPR:
9765 /* If we are AND'ing with a small constant, do this comparison in the
9766 smallest type that fits. If the machine doesn't have comparisons
9767 that small, it will be converted back to the wider comparison.
9768 This helps if we are testing the sign bit of a narrower object.
9769 combine can't do this for us because it can't know whether a
9770 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9771
9772 if (! SLOW_BYTE_ACCESS
9773 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9774 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9775 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9776 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9777 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9778 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9779 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9780 != CODE_FOR_nothing))
9781 {
9782 do_jump (convert (type, exp), if_false_label, if_true_label);
9783 break;
9784 }
9785 goto normal;
9786
9787 case TRUTH_NOT_EXPR:
9788 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9789 break;
9790
9791 case TRUTH_ANDIF_EXPR:
9792 if (if_false_label == 0)
9793 if_false_label = drop_through_label = gen_label_rtx ();
9794 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9795 start_cleanup_deferral ();
9796 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9797 end_cleanup_deferral ();
9798 break;
9799
9800 case TRUTH_ORIF_EXPR:
9801 if (if_true_label == 0)
9802 if_true_label = drop_through_label = gen_label_rtx ();
9803 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9804 start_cleanup_deferral ();
9805 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9806 end_cleanup_deferral ();
9807 break;
9808
9809 case COMPOUND_EXPR:
9810 push_temp_slots ();
9811 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9812 preserve_temp_slots (NULL_RTX);
9813 free_temp_slots ();
9814 pop_temp_slots ();
9815 emit_queue ();
9816 do_pending_stack_adjust ();
9817 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9818 break;
9819
9820 case COMPONENT_REF:
9821 case BIT_FIELD_REF:
9822 case ARRAY_REF:
9823 case ARRAY_RANGE_REF:
9824 {
9825 HOST_WIDE_INT bitsize, bitpos;
9826 int unsignedp;
9827 enum machine_mode mode;
9828 tree type;
9829 tree offset;
9830 int volatilep = 0;
9831
9832 /* Get description of this reference. We don't actually care
9833 about the underlying object here. */
9834 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9835 &unsignedp, &volatilep);
9836
9837 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9838 if (! SLOW_BYTE_ACCESS
9839 && type != 0 && bitsize >= 0
9840 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9841 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9842 != CODE_FOR_nothing))
9843 {
9844 do_jump (convert (type, exp), if_false_label, if_true_label);
9845 break;
9846 }
9847 goto normal;
9848 }
9849
9850 case COND_EXPR:
9851 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9852 if (integer_onep (TREE_OPERAND (exp, 1))
9853 && integer_zerop (TREE_OPERAND (exp, 2)))
9854 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9855
9856 else if (integer_zerop (TREE_OPERAND (exp, 1))
9857 && integer_onep (TREE_OPERAND (exp, 2)))
9858 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9859
9860 else
9861 {
9862 rtx label1 = gen_label_rtx ();
9863 drop_through_label = gen_label_rtx ();
9864
9865 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9866
9867 start_cleanup_deferral ();
9868 /* Now the THEN-expression. */
9869 do_jump (TREE_OPERAND (exp, 1),
9870 if_false_label ? if_false_label : drop_through_label,
9871 if_true_label ? if_true_label : drop_through_label);
9872 /* In case the do_jump just above never jumps. */
9873 do_pending_stack_adjust ();
9874 emit_label (label1);
9875
9876 /* Now the ELSE-expression. */
9877 do_jump (TREE_OPERAND (exp, 2),
9878 if_false_label ? if_false_label : drop_through_label,
9879 if_true_label ? if_true_label : drop_through_label);
9880 end_cleanup_deferral ();
9881 }
9882 break;
9883
9884 case EQ_EXPR:
9885 {
9886 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9887
9888 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9889 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9890 {
9891 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9892 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9893 do_jump
9894 (fold
9895 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9896 fold (build (EQ_EXPR, TREE_TYPE (exp),
9897 fold (build1 (REALPART_EXPR,
9898 TREE_TYPE (inner_type),
9899 exp0)),
9900 fold (build1 (REALPART_EXPR,
9901 TREE_TYPE (inner_type),
9902 exp1)))),
9903 fold (build (EQ_EXPR, TREE_TYPE (exp),
9904 fold (build1 (IMAGPART_EXPR,
9905 TREE_TYPE (inner_type),
9906 exp0)),
9907 fold (build1 (IMAGPART_EXPR,
9908 TREE_TYPE (inner_type),
9909 exp1)))))),
9910 if_false_label, if_true_label);
9911 }
9912
9913 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9914 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9915
9916 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9917 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9918 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9919 else
9920 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9921 break;
9922 }
9923
9924 case NE_EXPR:
9925 {
9926 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9927
9928 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9929 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9930 {
9931 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9932 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9933 do_jump
9934 (fold
9935 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9936 fold (build (NE_EXPR, TREE_TYPE (exp),
9937 fold (build1 (REALPART_EXPR,
9938 TREE_TYPE (inner_type),
9939 exp0)),
9940 fold (build1 (REALPART_EXPR,
9941 TREE_TYPE (inner_type),
9942 exp1)))),
9943 fold (build (NE_EXPR, TREE_TYPE (exp),
9944 fold (build1 (IMAGPART_EXPR,
9945 TREE_TYPE (inner_type),
9946 exp0)),
9947 fold (build1 (IMAGPART_EXPR,
9948 TREE_TYPE (inner_type),
9949 exp1)))))),
9950 if_false_label, if_true_label);
9951 }
9952
9953 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9954 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9955
9956 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9957 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9958 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9959 else
9960 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9961 break;
9962 }
9963
9964 case LT_EXPR:
9965 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9966 if (GET_MODE_CLASS (mode) == MODE_INT
9967 && ! can_compare_p (LT, mode, ccp_jump))
9968 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9969 else
9970 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9971 break;
9972
9973 case LE_EXPR:
9974 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9975 if (GET_MODE_CLASS (mode) == MODE_INT
9976 && ! can_compare_p (LE, mode, ccp_jump))
9977 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9978 else
9979 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9980 break;
9981
9982 case GT_EXPR:
9983 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9984 if (GET_MODE_CLASS (mode) == MODE_INT
9985 && ! can_compare_p (GT, mode, ccp_jump))
9986 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9987 else
9988 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9989 break;
9990
9991 case GE_EXPR:
9992 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9993 if (GET_MODE_CLASS (mode) == MODE_INT
9994 && ! can_compare_p (GE, mode, ccp_jump))
9995 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9996 else
9997 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9998 break;
9999
10000 case UNORDERED_EXPR:
10001 case ORDERED_EXPR:
10002 {
10003 enum rtx_code cmp, rcmp;
10004 int do_rev;
10005
10006 if (code == UNORDERED_EXPR)
10007 cmp = UNORDERED, rcmp = ORDERED;
10008 else
10009 cmp = ORDERED, rcmp = UNORDERED;
10010 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10011
10012 do_rev = 0;
10013 if (! can_compare_p (cmp, mode, ccp_jump)
10014 && (can_compare_p (rcmp, mode, ccp_jump)
10015 /* If the target doesn't provide either UNORDERED or ORDERED
10016 comparisons, canonicalize on UNORDERED for the library. */
10017 || rcmp == UNORDERED))
10018 do_rev = 1;
10019
10020 if (! do_rev)
10021 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10022 else
10023 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10024 }
10025 break;
10026
10027 {
10028 enum rtx_code rcode1;
10029 enum tree_code tcode2;
10030
10031 case UNLT_EXPR:
10032 rcode1 = UNLT;
10033 tcode2 = LT_EXPR;
10034 goto unordered_bcc;
10035 case UNLE_EXPR:
10036 rcode1 = UNLE;
10037 tcode2 = LE_EXPR;
10038 goto unordered_bcc;
10039 case UNGT_EXPR:
10040 rcode1 = UNGT;
10041 tcode2 = GT_EXPR;
10042 goto unordered_bcc;
10043 case UNGE_EXPR:
10044 rcode1 = UNGE;
10045 tcode2 = GE_EXPR;
10046 goto unordered_bcc;
10047 case UNEQ_EXPR:
10048 rcode1 = UNEQ;
10049 tcode2 = EQ_EXPR;
10050 goto unordered_bcc;
10051
10052 unordered_bcc:
10053 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10054 if (can_compare_p (rcode1, mode, ccp_jump))
10055 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10056 if_true_label);
10057 else
10058 {
10059 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10060 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10061 tree cmp0, cmp1;
10062
10063 /* If the target doesn't support combined unordered
10064 compares, decompose into UNORDERED + comparison. */
10065 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10066 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10067 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10068 do_jump (exp, if_false_label, if_true_label);
10069 }
10070 }
10071 break;
10072
10073 /* Special case:
10074 __builtin_expect (<test>, 0) and
10075 __builtin_expect (<test>, 1)
10076
10077 We need to do this here, so that <test> is not converted to a SCC
10078 operation on machines that use condition code registers and COMPARE
10079 like the PowerPC, and then the jump is done based on whether the SCC
10080 operation produced a 1 or 0. */
10081 case CALL_EXPR:
10082 /* Check for a built-in function. */
10083 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10084 {
10085 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10086 tree arglist = TREE_OPERAND (exp, 1);
10087
10088 if (TREE_CODE (fndecl) == FUNCTION_DECL
10089 && DECL_BUILT_IN (fndecl)
10090 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10091 && arglist != NULL_TREE
10092 && TREE_CHAIN (arglist) != NULL_TREE)
10093 {
10094 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10095 if_true_label);
10096
10097 if (seq != NULL_RTX)
10098 {
10099 emit_insn (seq);
10100 return;
10101 }
10102 }
10103 }
10104 /* fall through and generate the normal code. */
10105
10106 default:
10107 normal:
10108 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10109 #if 0
10110 /* This is not needed any more and causes poor code since it causes
10111 comparisons and tests from non-SI objects to have different code
10112 sequences. */
10113 /* Copy to register to avoid generating bad insns by cse
10114 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10115 if (!cse_not_expected && GET_CODE (temp) == MEM)
10116 temp = copy_to_reg (temp);
10117 #endif
10118 do_pending_stack_adjust ();
10119 /* Do any postincrements in the expression that was tested. */
10120 emit_queue ();
10121
10122 if (GET_CODE (temp) == CONST_INT
10123 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10124 || GET_CODE (temp) == LABEL_REF)
10125 {
10126 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10127 if (target)
10128 emit_jump (target);
10129 }
10130 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10131 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10132 /* Note swapping the labels gives us not-equal. */
10133 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10134 else if (GET_MODE (temp) != VOIDmode)
10135 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10136 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10137 GET_MODE (temp), NULL_RTX,
10138 if_false_label, if_true_label);
10139 else
10140 abort ();
10141 }
10142
10143 if (drop_through_label)
10144 {
10145 /* If do_jump produces code that might be jumped around,
10146 do any stack adjusts from that code, before the place
10147 where control merges in. */
10148 do_pending_stack_adjust ();
10149 emit_label (drop_through_label);
10150 }
10151 }
10152 \f
10153 /* Given a comparison expression EXP for values too wide to be compared
10154 with one insn, test the comparison and jump to the appropriate label.
10155 The code of EXP is ignored; we always test GT if SWAP is 0,
10156 and LT if SWAP is 1. */
10157
10158 static void
10159 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10160 tree exp;
10161 int swap;
10162 rtx if_false_label, if_true_label;
10163 {
10164 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10165 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10166 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10167 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10168
10169 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10170 }
10171
10172 /* Compare OP0 with OP1, word at a time, in mode MODE.
10173 UNSIGNEDP says to do unsigned comparison.
10174 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10175
10176 void
10177 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10178 enum machine_mode mode;
10179 int unsignedp;
10180 rtx op0, op1;
10181 rtx if_false_label, if_true_label;
10182 {
10183 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10184 rtx drop_through_label = 0;
10185 int i;
10186
10187 if (! if_true_label || ! if_false_label)
10188 drop_through_label = gen_label_rtx ();
10189 if (! if_true_label)
10190 if_true_label = drop_through_label;
10191 if (! if_false_label)
10192 if_false_label = drop_through_label;
10193
10194 /* Compare a word at a time, high order first. */
10195 for (i = 0; i < nwords; i++)
10196 {
10197 rtx op0_word, op1_word;
10198
10199 if (WORDS_BIG_ENDIAN)
10200 {
10201 op0_word = operand_subword_force (op0, i, mode);
10202 op1_word = operand_subword_force (op1, i, mode);
10203 }
10204 else
10205 {
10206 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10207 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10208 }
10209
10210 /* All but high-order word must be compared as unsigned. */
10211 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10212 (unsignedp || i > 0), word_mode, NULL_RTX,
10213 NULL_RTX, if_true_label);
10214
10215 /* Consider lower words only if these are equal. */
10216 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10217 NULL_RTX, NULL_RTX, if_false_label);
10218 }
10219
10220 if (if_false_label)
10221 emit_jump (if_false_label);
10222 if (drop_through_label)
10223 emit_label (drop_through_label);
10224 }
10225
10226 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10227 with one insn, test the comparison and jump to the appropriate label. */
10228
10229 static void
10230 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10231 tree exp;
10232 rtx if_false_label, if_true_label;
10233 {
10234 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10235 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10236 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10237 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10238 int i;
10239 rtx drop_through_label = 0;
10240
10241 if (! if_false_label)
10242 drop_through_label = if_false_label = gen_label_rtx ();
10243
10244 for (i = 0; i < nwords; i++)
10245 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10246 operand_subword_force (op1, i, mode),
10247 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10248 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10249
10250 if (if_true_label)
10251 emit_jump (if_true_label);
10252 if (drop_through_label)
10253 emit_label (drop_through_label);
10254 }
10255 \f
10256 /* Jump according to whether OP0 is 0.
10257 We assume that OP0 has an integer mode that is too wide
10258 for the available compare insns. */
10259
10260 void
10261 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10262 rtx op0;
10263 rtx if_false_label, if_true_label;
10264 {
10265 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10266 rtx part;
10267 int i;
10268 rtx drop_through_label = 0;
10269
10270 /* The fastest way of doing this comparison on almost any machine is to
10271 "or" all the words and compare the result. If all have to be loaded
10272 from memory and this is a very wide item, it's possible this may
10273 be slower, but that's highly unlikely. */
10274
10275 part = gen_reg_rtx (word_mode);
10276 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10277 for (i = 1; i < nwords && part != 0; i++)
10278 part = expand_binop (word_mode, ior_optab, part,
10279 operand_subword_force (op0, i, GET_MODE (op0)),
10280 part, 1, OPTAB_WIDEN);
10281
10282 if (part != 0)
10283 {
10284 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10285 NULL_RTX, if_false_label, if_true_label);
10286
10287 return;
10288 }
10289
10290 /* If we couldn't do the "or" simply, do this with a series of compares. */
10291 if (! if_false_label)
10292 drop_through_label = if_false_label = gen_label_rtx ();
10293
10294 for (i = 0; i < nwords; i++)
10295 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10296 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10297 if_false_label, NULL_RTX);
10298
10299 if (if_true_label)
10300 emit_jump (if_true_label);
10301
10302 if (drop_through_label)
10303 emit_label (drop_through_label);
10304 }
10305 \f
10306 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10307 (including code to compute the values to be compared)
10308 and set (CC0) according to the result.
10309 The decision as to signed or unsigned comparison must be made by the caller.
10310
10311 We force a stack adjustment unless there are currently
10312 things pushed on the stack that aren't yet used.
10313
10314 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10315 compared. */
10316
10317 rtx
10318 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10319 rtx op0, op1;
10320 enum rtx_code code;
10321 int unsignedp;
10322 enum machine_mode mode;
10323 rtx size;
10324 {
10325 enum rtx_code ucode;
10326 rtx tem;
10327
10328 /* If one operand is constant, make it the second one. Only do this
10329 if the other operand is not constant as well. */
10330
10331 if (swap_commutative_operands_p (op0, op1))
10332 {
10333 tem = op0;
10334 op0 = op1;
10335 op1 = tem;
10336 code = swap_condition (code);
10337 }
10338
10339 if (flag_force_mem)
10340 {
10341 op0 = force_not_mem (op0);
10342 op1 = force_not_mem (op1);
10343 }
10344
10345 do_pending_stack_adjust ();
10346
10347 ucode = unsignedp ? unsigned_condition (code) : code;
10348 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10349 return tem;
10350
10351 #if 0
10352 /* There's no need to do this now that combine.c can eliminate lots of
10353 sign extensions. This can be less efficient in certain cases on other
10354 machines. */
10355
10356 /* If this is a signed equality comparison, we can do it as an
10357 unsigned comparison since zero-extension is cheaper than sign
10358 extension and comparisons with zero are done as unsigned. This is
10359 the case even on machines that can do fast sign extension, since
10360 zero-extension is easier to combine with other operations than
10361 sign-extension is. If we are comparing against a constant, we must
10362 convert it to what it would look like unsigned. */
10363 if ((code == EQ || code == NE) && ! unsignedp
10364 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10365 {
10366 if (GET_CODE (op1) == CONST_INT
10367 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10368 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10369 unsignedp = 1;
10370 }
10371 #endif
10372
10373 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10374
10375 #if HAVE_cc0
10376 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10377 #else
10378 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10379 #endif
10380 }
10381
10382 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10383 The decision as to signed or unsigned comparison must be made by the caller.
10384
10385 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10386 compared. */
10387
10388 void
10389 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10390 if_false_label, if_true_label)
10391 rtx op0, op1;
10392 enum rtx_code code;
10393 int unsignedp;
10394 enum machine_mode mode;
10395 rtx size;
10396 rtx if_false_label, if_true_label;
10397 {
10398 enum rtx_code ucode;
10399 rtx tem;
10400 int dummy_true_label = 0;
10401
10402 /* Reverse the comparison if that is safe and we want to jump if it is
10403 false. */
10404 if (! if_true_label && ! FLOAT_MODE_P (mode))
10405 {
10406 if_true_label = if_false_label;
10407 if_false_label = 0;
10408 code = reverse_condition (code);
10409 }
10410
10411 /* If one operand is constant, make it the second one. Only do this
10412 if the other operand is not constant as well. */
10413
10414 if (swap_commutative_operands_p (op0, op1))
10415 {
10416 tem = op0;
10417 op0 = op1;
10418 op1 = tem;
10419 code = swap_condition (code);
10420 }
10421
10422 if (flag_force_mem)
10423 {
10424 op0 = force_not_mem (op0);
10425 op1 = force_not_mem (op1);
10426 }
10427
10428 do_pending_stack_adjust ();
10429
10430 ucode = unsignedp ? unsigned_condition (code) : code;
10431 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10432 {
10433 if (tem == const_true_rtx)
10434 {
10435 if (if_true_label)
10436 emit_jump (if_true_label);
10437 }
10438 else
10439 {
10440 if (if_false_label)
10441 emit_jump (if_false_label);
10442 }
10443 return;
10444 }
10445
10446 #if 0
10447 /* There's no need to do this now that combine.c can eliminate lots of
10448 sign extensions. This can be less efficient in certain cases on other
10449 machines. */
10450
10451 /* If this is a signed equality comparison, we can do it as an
10452 unsigned comparison since zero-extension is cheaper than sign
10453 extension and comparisons with zero are done as unsigned. This is
10454 the case even on machines that can do fast sign extension, since
10455 zero-extension is easier to combine with other operations than
10456 sign-extension is. If we are comparing against a constant, we must
10457 convert it to what it would look like unsigned. */
10458 if ((code == EQ || code == NE) && ! unsignedp
10459 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10460 {
10461 if (GET_CODE (op1) == CONST_INT
10462 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10463 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10464 unsignedp = 1;
10465 }
10466 #endif
10467
10468 if (! if_true_label)
10469 {
10470 dummy_true_label = 1;
10471 if_true_label = gen_label_rtx ();
10472 }
10473
10474 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10475 if_true_label);
10476
10477 if (if_false_label)
10478 emit_jump (if_false_label);
10479 if (dummy_true_label)
10480 emit_label (if_true_label);
10481 }
10482
10483 /* Generate code for a comparison expression EXP (including code to compute
10484 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10485 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10486 generated code will drop through.
10487 SIGNED_CODE should be the rtx operation for this comparison for
10488 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10489
10490 We force a stack adjustment unless there are currently
10491 things pushed on the stack that aren't yet used. */
10492
10493 static void
10494 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10495 if_true_label)
10496 tree exp;
10497 enum rtx_code signed_code, unsigned_code;
10498 rtx if_false_label, if_true_label;
10499 {
10500 rtx op0, op1;
10501 tree type;
10502 enum machine_mode mode;
10503 int unsignedp;
10504 enum rtx_code code;
10505
10506 /* Don't crash if the comparison was erroneous. */
10507 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10508 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10509 return;
10510
10511 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10512 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10513 return;
10514
10515 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10516 mode = TYPE_MODE (type);
10517 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10518 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10519 || (GET_MODE_BITSIZE (mode)
10520 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10521 1)))))))
10522 {
10523 /* op0 might have been replaced by promoted constant, in which
10524 case the type of second argument should be used. */
10525 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10526 mode = TYPE_MODE (type);
10527 }
10528 unsignedp = TREE_UNSIGNED (type);
10529 code = unsignedp ? unsigned_code : signed_code;
10530
10531 #ifdef HAVE_canonicalize_funcptr_for_compare
10532 /* If function pointers need to be "canonicalized" before they can
10533 be reliably compared, then canonicalize them. */
10534 if (HAVE_canonicalize_funcptr_for_compare
10535 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10536 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10537 == FUNCTION_TYPE))
10538 {
10539 rtx new_op0 = gen_reg_rtx (mode);
10540
10541 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10542 op0 = new_op0;
10543 }
10544
10545 if (HAVE_canonicalize_funcptr_for_compare
10546 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10547 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10548 == FUNCTION_TYPE))
10549 {
10550 rtx new_op1 = gen_reg_rtx (mode);
10551
10552 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10553 op1 = new_op1;
10554 }
10555 #endif
10556
10557 /* Do any postincrements in the expression that was tested. */
10558 emit_queue ();
10559
10560 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10561 ((mode == BLKmode)
10562 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10563 if_false_label, if_true_label);
10564 }
10565 \f
10566 /* Generate code to calculate EXP using a store-flag instruction
10567 and return an rtx for the result. EXP is either a comparison
10568 or a TRUTH_NOT_EXPR whose operand is a comparison.
10569
10570 If TARGET is nonzero, store the result there if convenient.
10571
10572 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10573 cheap.
10574
10575 Return zero if there is no suitable set-flag instruction
10576 available on this machine.
10577
10578 Once expand_expr has been called on the arguments of the comparison,
10579 we are committed to doing the store flag, since it is not safe to
10580 re-evaluate the expression. We emit the store-flag insn by calling
10581 emit_store_flag, but only expand the arguments if we have a reason
10582 to believe that emit_store_flag will be successful. If we think that
10583 it will, but it isn't, we have to simulate the store-flag with a
10584 set/jump/set sequence. */
10585
10586 static rtx
10587 do_store_flag (exp, target, mode, only_cheap)
10588 tree exp;
10589 rtx target;
10590 enum machine_mode mode;
10591 int only_cheap;
10592 {
10593 enum rtx_code code;
10594 tree arg0, arg1, type;
10595 tree tem;
10596 enum machine_mode operand_mode;
10597 int invert = 0;
10598 int unsignedp;
10599 rtx op0, op1;
10600 enum insn_code icode;
10601 rtx subtarget = target;
10602 rtx result, label;
10603
10604 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10605 result at the end. We can't simply invert the test since it would
10606 have already been inverted if it were valid. This case occurs for
10607 some floating-point comparisons. */
10608
10609 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10610 invert = 1, exp = TREE_OPERAND (exp, 0);
10611
10612 arg0 = TREE_OPERAND (exp, 0);
10613 arg1 = TREE_OPERAND (exp, 1);
10614
10615 /* Don't crash if the comparison was erroneous. */
10616 if (arg0 == error_mark_node || arg1 == error_mark_node)
10617 return const0_rtx;
10618
10619 type = TREE_TYPE (arg0);
10620 operand_mode = TYPE_MODE (type);
10621 unsignedp = TREE_UNSIGNED (type);
10622
10623 /* We won't bother with BLKmode store-flag operations because it would mean
10624 passing a lot of information to emit_store_flag. */
10625 if (operand_mode == BLKmode)
10626 return 0;
10627
10628 /* We won't bother with store-flag operations involving function pointers
10629 when function pointers must be canonicalized before comparisons. */
10630 #ifdef HAVE_canonicalize_funcptr_for_compare
10631 if (HAVE_canonicalize_funcptr_for_compare
10632 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10633 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10634 == FUNCTION_TYPE))
10635 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10636 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10637 == FUNCTION_TYPE))))
10638 return 0;
10639 #endif
10640
10641 STRIP_NOPS (arg0);
10642 STRIP_NOPS (arg1);
10643
10644 /* Get the rtx comparison code to use. We know that EXP is a comparison
10645 operation of some type. Some comparisons against 1 and -1 can be
10646 converted to comparisons with zero. Do so here so that the tests
10647 below will be aware that we have a comparison with zero. These
10648 tests will not catch constants in the first operand, but constants
10649 are rarely passed as the first operand. */
10650
10651 switch (TREE_CODE (exp))
10652 {
10653 case EQ_EXPR:
10654 code = EQ;
10655 break;
10656 case NE_EXPR:
10657 code = NE;
10658 break;
10659 case LT_EXPR:
10660 if (integer_onep (arg1))
10661 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10662 else
10663 code = unsignedp ? LTU : LT;
10664 break;
10665 case LE_EXPR:
10666 if (! unsignedp && integer_all_onesp (arg1))
10667 arg1 = integer_zero_node, code = LT;
10668 else
10669 code = unsignedp ? LEU : LE;
10670 break;
10671 case GT_EXPR:
10672 if (! unsignedp && integer_all_onesp (arg1))
10673 arg1 = integer_zero_node, code = GE;
10674 else
10675 code = unsignedp ? GTU : GT;
10676 break;
10677 case GE_EXPR:
10678 if (integer_onep (arg1))
10679 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10680 else
10681 code = unsignedp ? GEU : GE;
10682 break;
10683
10684 case UNORDERED_EXPR:
10685 code = UNORDERED;
10686 break;
10687 case ORDERED_EXPR:
10688 code = ORDERED;
10689 break;
10690 case UNLT_EXPR:
10691 code = UNLT;
10692 break;
10693 case UNLE_EXPR:
10694 code = UNLE;
10695 break;
10696 case UNGT_EXPR:
10697 code = UNGT;
10698 break;
10699 case UNGE_EXPR:
10700 code = UNGE;
10701 break;
10702 case UNEQ_EXPR:
10703 code = UNEQ;
10704 break;
10705
10706 default:
10707 abort ();
10708 }
10709
10710 /* Put a constant second. */
10711 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10712 {
10713 tem = arg0; arg0 = arg1; arg1 = tem;
10714 code = swap_condition (code);
10715 }
10716
10717 /* If this is an equality or inequality test of a single bit, we can
10718 do this by shifting the bit being tested to the low-order bit and
10719 masking the result with the constant 1. If the condition was EQ,
10720 we xor it with 1. This does not require an scc insn and is faster
10721 than an scc insn even if we have it. */
10722
10723 if ((code == NE || code == EQ)
10724 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10725 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10726 {
10727 tree inner = TREE_OPERAND (arg0, 0);
10728 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10729 int ops_unsignedp;
10730
10731 /* If INNER is a right shift of a constant and it plus BITNUM does
10732 not overflow, adjust BITNUM and INNER. */
10733
10734 if (TREE_CODE (inner) == RSHIFT_EXPR
10735 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10736 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10737 && bitnum < TYPE_PRECISION (type)
10738 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10739 bitnum - TYPE_PRECISION (type)))
10740 {
10741 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10742 inner = TREE_OPERAND (inner, 0);
10743 }
10744
10745 /* If we are going to be able to omit the AND below, we must do our
10746 operations as unsigned. If we must use the AND, we have a choice.
10747 Normally unsigned is faster, but for some machines signed is. */
10748 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10749 #ifdef LOAD_EXTEND_OP
10750 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10751 #else
10752 : 1
10753 #endif
10754 );
10755
10756 if (! get_subtarget (subtarget)
10757 || GET_MODE (subtarget) != operand_mode
10758 || ! safe_from_p (subtarget, inner, 1))
10759 subtarget = 0;
10760
10761 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10762
10763 if (bitnum != 0)
10764 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10765 size_int (bitnum), subtarget, ops_unsignedp);
10766
10767 if (GET_MODE (op0) != mode)
10768 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10769
10770 if ((code == EQ && ! invert) || (code == NE && invert))
10771 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10772 ops_unsignedp, OPTAB_LIB_WIDEN);
10773
10774 /* Put the AND last so it can combine with more things. */
10775 if (bitnum != TYPE_PRECISION (type) - 1)
10776 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10777
10778 return op0;
10779 }
10780
10781 /* Now see if we are likely to be able to do this. Return if not. */
10782 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10783 return 0;
10784
10785 icode = setcc_gen_code[(int) code];
10786 if (icode == CODE_FOR_nothing
10787 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10788 {
10789 /* We can only do this if it is one of the special cases that
10790 can be handled without an scc insn. */
10791 if ((code == LT && integer_zerop (arg1))
10792 || (! only_cheap && code == GE && integer_zerop (arg1)))
10793 ;
10794 else if (BRANCH_COST >= 0
10795 && ! only_cheap && (code == NE || code == EQ)
10796 && TREE_CODE (type) != REAL_TYPE
10797 && ((abs_optab->handlers[(int) operand_mode].insn_code
10798 != CODE_FOR_nothing)
10799 || (ffs_optab->handlers[(int) operand_mode].insn_code
10800 != CODE_FOR_nothing)))
10801 ;
10802 else
10803 return 0;
10804 }
10805
10806 if (! get_subtarget (target)
10807 || GET_MODE (subtarget) != operand_mode
10808 || ! safe_from_p (subtarget, arg1, 1))
10809 subtarget = 0;
10810
10811 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10812 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10813
10814 if (target == 0)
10815 target = gen_reg_rtx (mode);
10816
10817 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10818 because, if the emit_store_flag does anything it will succeed and
10819 OP0 and OP1 will not be used subsequently. */
10820
10821 result = emit_store_flag (target, code,
10822 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10823 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10824 operand_mode, unsignedp, 1);
10825
10826 if (result)
10827 {
10828 if (invert)
10829 result = expand_binop (mode, xor_optab, result, const1_rtx,
10830 result, 0, OPTAB_LIB_WIDEN);
10831 return result;
10832 }
10833
10834 /* If this failed, we have to do this with set/compare/jump/set code. */
10835 if (GET_CODE (target) != REG
10836 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10837 target = gen_reg_rtx (GET_MODE (target));
10838
10839 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10840 result = compare_from_rtx (op0, op1, code, unsignedp,
10841 operand_mode, NULL_RTX);
10842 if (GET_CODE (result) == CONST_INT)
10843 return (((result == const0_rtx && ! invert)
10844 || (result != const0_rtx && invert))
10845 ? const0_rtx : const1_rtx);
10846
10847 /* The code of RESULT may not match CODE if compare_from_rtx
10848 decided to swap its operands and reverse the original code.
10849
10850 We know that compare_from_rtx returns either a CONST_INT or
10851 a new comparison code, so it is safe to just extract the
10852 code from RESULT. */
10853 code = GET_CODE (result);
10854
10855 label = gen_label_rtx ();
10856 if (bcc_gen_fctn[(int) code] == 0)
10857 abort ();
10858
10859 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10860 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10861 emit_label (label);
10862
10863 return target;
10864 }
10865 \f
10866
10867 /* Stubs in case we haven't got a casesi insn. */
10868 #ifndef HAVE_casesi
10869 # define HAVE_casesi 0
10870 # define gen_casesi(a, b, c, d, e) (0)
10871 # define CODE_FOR_casesi CODE_FOR_nothing
10872 #endif
10873
10874 /* If the machine does not have a case insn that compares the bounds,
10875 this means extra overhead for dispatch tables, which raises the
10876 threshold for using them. */
10877 #ifndef CASE_VALUES_THRESHOLD
10878 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10879 #endif /* CASE_VALUES_THRESHOLD */
10880
10881 unsigned int
10882 case_values_threshold ()
10883 {
10884 return CASE_VALUES_THRESHOLD;
10885 }
10886
10887 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10888 0 otherwise (i.e. if there is no casesi instruction). */
10889 int
10890 try_casesi (index_type, index_expr, minval, range,
10891 table_label, default_label)
10892 tree index_type, index_expr, minval, range;
10893 rtx table_label ATTRIBUTE_UNUSED;
10894 rtx default_label;
10895 {
10896 enum machine_mode index_mode = SImode;
10897 int index_bits = GET_MODE_BITSIZE (index_mode);
10898 rtx op1, op2, index;
10899 enum machine_mode op_mode;
10900
10901 if (! HAVE_casesi)
10902 return 0;
10903
10904 /* Convert the index to SImode. */
10905 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10906 {
10907 enum machine_mode omode = TYPE_MODE (index_type);
10908 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10909
10910 /* We must handle the endpoints in the original mode. */
10911 index_expr = build (MINUS_EXPR, index_type,
10912 index_expr, minval);
10913 minval = integer_zero_node;
10914 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10915 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10916 omode, 1, default_label);
10917 /* Now we can safely truncate. */
10918 index = convert_to_mode (index_mode, index, 0);
10919 }
10920 else
10921 {
10922 if (TYPE_MODE (index_type) != index_mode)
10923 {
10924 index_expr = convert ((*lang_hooks.types.type_for_size)
10925 (index_bits, 0), index_expr);
10926 index_type = TREE_TYPE (index_expr);
10927 }
10928
10929 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10930 }
10931 emit_queue ();
10932 index = protect_from_queue (index, 0);
10933 do_pending_stack_adjust ();
10934
10935 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10936 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10937 (index, op_mode))
10938 index = copy_to_mode_reg (op_mode, index);
10939
10940 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10941
10942 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10943 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10944 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10945 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10946 (op1, op_mode))
10947 op1 = copy_to_mode_reg (op_mode, op1);
10948
10949 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10950
10951 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10952 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10953 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10954 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10955 (op2, op_mode))
10956 op2 = copy_to_mode_reg (op_mode, op2);
10957
10958 emit_jump_insn (gen_casesi (index, op1, op2,
10959 table_label, default_label));
10960 return 1;
10961 }
10962
10963 /* Attempt to generate a tablejump instruction; same concept. */
10964 #ifndef HAVE_tablejump
10965 #define HAVE_tablejump 0
10966 #define gen_tablejump(x, y) (0)
10967 #endif
10968
10969 /* Subroutine of the next function.
10970
10971 INDEX is the value being switched on, with the lowest value
10972 in the table already subtracted.
10973 MODE is its expected mode (needed if INDEX is constant).
10974 RANGE is the length of the jump table.
10975 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10976
10977 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10978 index value is out of range. */
10979
10980 static void
10981 do_tablejump (index, mode, range, table_label, default_label)
10982 rtx index, range, table_label, default_label;
10983 enum machine_mode mode;
10984 {
10985 rtx temp, vector;
10986
10987 if (INTVAL (range) > cfun->max_jumptable_ents)
10988 cfun->max_jumptable_ents = INTVAL (range);
10989
10990 /* Do an unsigned comparison (in the proper mode) between the index
10991 expression and the value which represents the length of the range.
10992 Since we just finished subtracting the lower bound of the range
10993 from the index expression, this comparison allows us to simultaneously
10994 check that the original index expression value is both greater than
10995 or equal to the minimum value of the range and less than or equal to
10996 the maximum value of the range. */
10997
10998 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10999 default_label);
11000
11001 /* If index is in range, it must fit in Pmode.
11002 Convert to Pmode so we can index with it. */
11003 if (mode != Pmode)
11004 index = convert_to_mode (Pmode, index, 1);
11005
11006 /* Don't let a MEM slip thru, because then INDEX that comes
11007 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11008 and break_out_memory_refs will go to work on it and mess it up. */
11009 #ifdef PIC_CASE_VECTOR_ADDRESS
11010 if (flag_pic && GET_CODE (index) != REG)
11011 index = copy_to_mode_reg (Pmode, index);
11012 #endif
11013
11014 /* If flag_force_addr were to affect this address
11015 it could interfere with the tricky assumptions made
11016 about addresses that contain label-refs,
11017 which may be valid only very near the tablejump itself. */
11018 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11019 GET_MODE_SIZE, because this indicates how large insns are. The other
11020 uses should all be Pmode, because they are addresses. This code
11021 could fail if addresses and insns are not the same size. */
11022 index = gen_rtx_PLUS (Pmode,
11023 gen_rtx_MULT (Pmode, index,
11024 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11025 gen_rtx_LABEL_REF (Pmode, table_label));
11026 #ifdef PIC_CASE_VECTOR_ADDRESS
11027 if (flag_pic)
11028 index = PIC_CASE_VECTOR_ADDRESS (index);
11029 else
11030 #endif
11031 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11032 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11033 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11034 RTX_UNCHANGING_P (vector) = 1;
11035 convert_move (temp, vector, 0);
11036
11037 emit_jump_insn (gen_tablejump (temp, table_label));
11038
11039 /* If we are generating PIC code or if the table is PC-relative, the
11040 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11041 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11042 emit_barrier ();
11043 }
11044
11045 int
11046 try_tablejump (index_type, index_expr, minval, range,
11047 table_label, default_label)
11048 tree index_type, index_expr, minval, range;
11049 rtx table_label, default_label;
11050 {
11051 rtx index;
11052
11053 if (! HAVE_tablejump)
11054 return 0;
11055
11056 index_expr = fold (build (MINUS_EXPR, index_type,
11057 convert (index_type, index_expr),
11058 convert (index_type, minval)));
11059 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11060 emit_queue ();
11061 index = protect_from_queue (index, 0);
11062 do_pending_stack_adjust ();
11063
11064 do_tablejump (index, TYPE_MODE (index_type),
11065 convert_modes (TYPE_MODE (index_type),
11066 TYPE_MODE (TREE_TYPE (range)),
11067 expand_expr (range, NULL_RTX,
11068 VOIDmode, 0),
11069 TREE_UNSIGNED (TREE_TYPE (range))),
11070 table_label, default_label);
11071 return 1;
11072 }
11073
11074 /* Nonzero if the mode is a valid vector mode for this architecture.
11075 This returns nonzero even if there is no hardware support for the
11076 vector mode, but we can emulate with narrower modes. */
11077
11078 int
11079 vector_mode_valid_p (mode)
11080 enum machine_mode mode;
11081 {
11082 enum mode_class class = GET_MODE_CLASS (mode);
11083 enum machine_mode innermode;
11084
11085 /* Doh! What's going on? */
11086 if (class != MODE_VECTOR_INT
11087 && class != MODE_VECTOR_FLOAT)
11088 return 0;
11089
11090 /* Hardware support. Woo hoo! */
11091 if (VECTOR_MODE_SUPPORTED_P (mode))
11092 return 1;
11093
11094 innermode = GET_MODE_INNER (mode);
11095
11096 /* We should probably return 1 if requesting V4DI and we have no DI,
11097 but we have V2DI, but this is probably very unlikely. */
11098
11099 /* If we have support for the inner mode, we can safely emulate it.
11100 We may not have V2DI, but me can emulate with a pair of DIs. */
11101 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11102 }
11103
11104 #include "gt-expr.h"