]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/expr.c
builtins.c (expand_builtin_setjmp): Only call convert_memory_address if needed.
[thirdparty/gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "intl.h"
45 #include "tm_p.h"
46
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
49
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
52
53 #ifdef PUSH_ROUNDING
54
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
57 #endif
58
59 #endif
60
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
68
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
73
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
80 parameter. */
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
82
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
90
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
95
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
98
99 /* This structure is used by move_by_pieces to describe the move to
100 be performed. */
101 struct move_by_pieces
102 {
103 rtx to;
104 rtx to_addr;
105 int autinc_to;
106 int explicit_inc_to;
107 rtx from;
108 rtx from_addr;
109 int autinc_from;
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
113 int reverse;
114 };
115
116 /* This structure is used by store_by_pieces to describe the clear to
117 be performed. */
118
119 struct store_by_pieces
120 {
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
128 PTR constfundata;
129 int reverse;
130 };
131
132 extern struct obstack permanent_obstack;
133
134 static rtx get_push_address PARAMS ((int));
135
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
139 unsigned int));
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
145 unsigned int));
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
147 unsigned int));
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
149 enum machine_mode,
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
157 int));
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
159 HOST_WIDE_INT));
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 #ifdef PUSH_ROUNDING
176 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 #endif
178 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
179
180 /* Record for each mode whether we can move a register directly to or
181 from an object of that mode in memory. If we can't, we won't try
182 to use that mode directly when accessing a field of that mode. */
183
184 static char direct_load[NUM_MACHINE_MODES];
185 static char direct_store[NUM_MACHINE_MODES];
186
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
189
190 #ifndef MOVE_RATIO
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 #define MOVE_RATIO 2
193 #else
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 #endif
197 #endif
198
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 #endif
205
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movstr_optab[NUM_MACHINE_MODES];
208
209 /* This array records the insn_code of insns to perform block clears. */
210 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
211
212 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
213
214 #ifndef SLOW_UNALIGNED_ACCESS
215 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 #endif
217 \f
218 /* This is run once per compilation to set up which modes can be used
219 directly in memory and to initialize the block move optab. */
220
221 void
222 init_expr_once ()
223 {
224 rtx insn, pat;
225 enum machine_mode mode;
226 int num_clobbers;
227 rtx mem, mem1;
228
229 start_sequence ();
230
231 /* Try indexing by frame ptr and try by stack ptr.
232 It is known that on the Convex the stack ptr isn't a valid index.
233 With luck, one or the other is valid on any machine. */
234 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
235 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
236
237 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
238 pat = PATTERN (insn);
239
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
242 {
243 int regno;
244 rtx reg;
245
246 direct_load[(int) mode] = direct_store[(int) mode] = 0;
247 PUT_MODE (mem, mode);
248 PUT_MODE (mem1, mode);
249
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
252
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
256 regno++)
257 {
258 if (! HARD_REGNO_MODE_OK (regno, mode))
259 continue;
260
261 reg = gen_rtx_REG (mode, regno);
262
263 SET_SRC (pat) = mem;
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
267
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
272
273 SET_SRC (pat) = reg;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
277
278 SET_SRC (pat) = reg;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
282 }
283 }
284
285 end_sequence ();
286 }
287
288 /* This is run at the start of compiling a function. */
289
290 void
291 init_expr ()
292 {
293 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
294
295 pending_chain = 0;
296 pending_stack_adjust = 0;
297 stack_pointer_delta = 0;
298 inhibit_defer_pop = 0;
299 saveregs_value = 0;
300 apply_args_value = 0;
301 forced_labels = 0;
302 }
303
304 void
305 mark_expr_status (p)
306 struct expr_status *p;
307 {
308 if (p == NULL)
309 return;
310
311 ggc_mark_rtx (p->x_saveregs_value);
312 ggc_mark_rtx (p->x_apply_args_value);
313 ggc_mark_rtx (p->x_forced_labels);
314 }
315
316 void
317 free_expr_status (f)
318 struct function *f;
319 {
320 free (f->expr);
321 f->expr = NULL;
322 }
323
324 /* Small sanity check that the queue is empty at the end of a function. */
325
326 void
327 finish_expr_for_function ()
328 {
329 if (pending_chain)
330 abort ();
331 }
332 \f
333 /* Manage the queue of increment instructions to be output
334 for POSTINCREMENT_EXPR expressions, etc. */
335
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
339
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
342
343 static rtx
344 enqueue_insn (var, body)
345 rtx var, body;
346 {
347 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
348 body, pending_chain);
349 return pending_chain;
350 }
351
352 /* Use protect_from_queue to convert a QUEUED expression
353 into something that you can put immediately into an instruction.
354 If the queued incrementation has not happened yet,
355 protect_from_queue returns the variable itself.
356 If the incrementation has happened, protect_from_queue returns a temp
357 that contains a copy of the old value of the variable.
358
359 Any time an rtx which might possibly be a QUEUED is to be put
360 into an instruction, it must be passed through protect_from_queue first.
361 QUEUED expressions are not meaningful in instructions.
362
363 Do not pass a value through protect_from_queue and then hold
364 on to it for a while before putting it in an instruction!
365 If the queue is flushed in between, incorrect code will result. */
366
367 rtx
368 protect_from_queue (x, modify)
369 rtx x;
370 int modify;
371 {
372 RTX_CODE code = GET_CODE (x);
373
374 #if 0 /* A QUEUED can hang around after the queue is forced out. */
375 /* Shortcut for most common case. */
376 if (pending_chain == 0)
377 return x;
378 #endif
379
380 if (code != QUEUED)
381 {
382 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
383 use of autoincrement. Make a copy of the contents of the memory
384 location rather than a copy of the address, but not if the value is
385 of mode BLKmode. Don't modify X in place since it might be
386 shared. */
387 if (code == MEM && GET_MODE (x) != BLKmode
388 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 {
390 rtx y = XEXP (x, 0);
391 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
392
393 if (QUEUED_INSN (y))
394 {
395 rtx temp = gen_reg_rtx (GET_MODE (x));
396
397 emit_insn_before (gen_move_insn (temp, new),
398 QUEUED_INSN (y));
399 return temp;
400 }
401
402 /* Copy the address into a pseudo, so that the returned value
403 remains correct across calls to emit_queue. */
404 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
405 }
406
407 /* Otherwise, recursively protect the subexpressions of all
408 the kinds of rtx's that can contain a QUEUED. */
409 if (code == MEM)
410 {
411 rtx tem = protect_from_queue (XEXP (x, 0), 0);
412 if (tem != XEXP (x, 0))
413 {
414 x = copy_rtx (x);
415 XEXP (x, 0) = tem;
416 }
417 }
418 else if (code == PLUS || code == MULT)
419 {
420 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
421 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
422 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
423 {
424 x = copy_rtx (x);
425 XEXP (x, 0) = new0;
426 XEXP (x, 1) = new1;
427 }
428 }
429 return x;
430 }
431 /* If the increment has not happened, use the variable itself. Copy it
432 into a new pseudo so that the value remains correct across calls to
433 emit_queue. */
434 if (QUEUED_INSN (x) == 0)
435 return copy_to_reg (QUEUED_VAR (x));
436 /* If the increment has happened and a pre-increment copy exists,
437 use that copy. */
438 if (QUEUED_COPY (x) != 0)
439 return QUEUED_COPY (x);
440 /* The increment has happened but we haven't set up a pre-increment copy.
441 Set one up now, and use it. */
442 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
443 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
444 QUEUED_INSN (x));
445 return QUEUED_COPY (x);
446 }
447
448 /* Return nonzero if X contains a QUEUED expression:
449 if it contains anything that will be altered by a queued increment.
450 We handle only combinations of MEM, PLUS, MINUS and MULT operators
451 since memory addresses generally contain only those. */
452
453 int
454 queued_subexp_p (x)
455 rtx x;
456 {
457 enum rtx_code code = GET_CODE (x);
458 switch (code)
459 {
460 case QUEUED:
461 return 1;
462 case MEM:
463 return queued_subexp_p (XEXP (x, 0));
464 case MULT:
465 case PLUS:
466 case MINUS:
467 return (queued_subexp_p (XEXP (x, 0))
468 || queued_subexp_p (XEXP (x, 1)));
469 default:
470 return 0;
471 }
472 }
473
474 /* Perform all the pending incrementations. */
475
476 void
477 emit_queue ()
478 {
479 rtx p;
480 while ((p = pending_chain))
481 {
482 rtx body = QUEUED_BODY (p);
483
484 if (GET_CODE (body) == SEQUENCE)
485 {
486 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
487 emit_insn (QUEUED_BODY (p));
488 }
489 else
490 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
491 pending_chain = QUEUED_NEXT (p);
492 }
493 }
494 \f
495 /* Copy data from FROM to TO, where the machine modes are not the same.
496 Both modes may be integer, or both may be floating.
497 UNSIGNEDP should be nonzero if FROM is an unsigned type.
498 This causes zero-extension instead of sign-extension. */
499
500 void
501 convert_move (to, from, unsignedp)
502 rtx to, from;
503 int unsignedp;
504 {
505 enum machine_mode to_mode = GET_MODE (to);
506 enum machine_mode from_mode = GET_MODE (from);
507 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
508 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
509 enum insn_code code;
510 rtx libcall;
511
512 /* rtx code for making an equivalent value. */
513 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
514
515 to = protect_from_queue (to, 1);
516 from = protect_from_queue (from, 0);
517
518 if (to_real != from_real)
519 abort ();
520
521 /* If FROM is a SUBREG that indicates that we have already done at least
522 the required extension, strip it. We don't handle such SUBREGs as
523 TO here. */
524
525 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
526 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
527 >= GET_MODE_SIZE (to_mode))
528 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
529 from = gen_lowpart (to_mode, from), from_mode = to_mode;
530
531 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
532 abort ();
533
534 if (to_mode == from_mode
535 || (from_mode == VOIDmode && CONSTANT_P (from)))
536 {
537 emit_move_insn (to, from);
538 return;
539 }
540
541 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
542 {
543 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
544 abort ();
545
546 if (VECTOR_MODE_P (to_mode))
547 from = gen_rtx_SUBREG (to_mode, from, 0);
548 else
549 to = gen_rtx_SUBREG (from_mode, to, 0);
550
551 emit_move_insn (to, from);
552 return;
553 }
554
555 if (to_real != from_real)
556 abort ();
557
558 if (to_real)
559 {
560 rtx value, insns;
561
562 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
563 {
564 /* Try converting directly if the insn is supported. */
565 if ((code = can_extend_p (to_mode, from_mode, 0))
566 != CODE_FOR_nothing)
567 {
568 emit_unop_insn (code, to, from, UNKNOWN);
569 return;
570 }
571 }
572
573 #ifdef HAVE_trunchfqf2
574 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
575 {
576 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
577 return;
578 }
579 #endif
580 #ifdef HAVE_trunctqfqf2
581 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
582 {
583 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
584 return;
585 }
586 #endif
587 #ifdef HAVE_truncsfqf2
588 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
589 {
590 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
591 return;
592 }
593 #endif
594 #ifdef HAVE_truncdfqf2
595 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
596 {
597 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
598 return;
599 }
600 #endif
601 #ifdef HAVE_truncxfqf2
602 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
605 return;
606 }
607 #endif
608 #ifdef HAVE_trunctfqf2
609 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
610 {
611 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615
616 #ifdef HAVE_trunctqfhf2
617 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
618 {
619 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
620 return;
621 }
622 #endif
623 #ifdef HAVE_truncsfhf2
624 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
625 {
626 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
627 return;
628 }
629 #endif
630 #ifdef HAVE_truncdfhf2
631 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncxfhf2
638 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_trunctfhf2
645 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
646 {
647 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651
652 #ifdef HAVE_truncsftqf2
653 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
654 {
655 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
656 return;
657 }
658 #endif
659 #ifdef HAVE_truncdftqf2
660 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
661 {
662 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
663 return;
664 }
665 #endif
666 #ifdef HAVE_truncxftqf2
667 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
668 {
669 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_trunctftqf2
674 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
675 {
676 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680
681 #ifdef HAVE_truncdfsf2
682 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
683 {
684 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
685 return;
686 }
687 #endif
688 #ifdef HAVE_truncxfsf2
689 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
690 {
691 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
692 return;
693 }
694 #endif
695 #ifdef HAVE_trunctfsf2
696 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
697 {
698 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
699 return;
700 }
701 #endif
702 #ifdef HAVE_truncxfdf2
703 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
704 {
705 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
706 return;
707 }
708 #endif
709 #ifdef HAVE_trunctfdf2
710 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
711 {
712 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
713 return;
714 }
715 #endif
716
717 libcall = (rtx) 0;
718 switch (from_mode)
719 {
720 case SFmode:
721 switch (to_mode)
722 {
723 case DFmode:
724 libcall = extendsfdf2_libfunc;
725 break;
726
727 case XFmode:
728 libcall = extendsfxf2_libfunc;
729 break;
730
731 case TFmode:
732 libcall = extendsftf2_libfunc;
733 break;
734
735 default:
736 break;
737 }
738 break;
739
740 case DFmode:
741 switch (to_mode)
742 {
743 case SFmode:
744 libcall = truncdfsf2_libfunc;
745 break;
746
747 case XFmode:
748 libcall = extenddfxf2_libfunc;
749 break;
750
751 case TFmode:
752 libcall = extenddftf2_libfunc;
753 break;
754
755 default:
756 break;
757 }
758 break;
759
760 case XFmode:
761 switch (to_mode)
762 {
763 case SFmode:
764 libcall = truncxfsf2_libfunc;
765 break;
766
767 case DFmode:
768 libcall = truncxfdf2_libfunc;
769 break;
770
771 default:
772 break;
773 }
774 break;
775
776 case TFmode:
777 switch (to_mode)
778 {
779 case SFmode:
780 libcall = trunctfsf2_libfunc;
781 break;
782
783 case DFmode:
784 libcall = trunctfdf2_libfunc;
785 break;
786
787 default:
788 break;
789 }
790 break;
791
792 default:
793 break;
794 }
795
796 if (libcall == (rtx) 0)
797 /* This conversion is not implemented yet. */
798 abort ();
799
800 start_sequence ();
801 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
802 1, from, from_mode);
803 insns = get_insns ();
804 end_sequence ();
805 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
806 from));
807 return;
808 }
809
810 /* Now both modes are integers. */
811
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
814 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
815 {
816 rtx insns;
817 rtx lowpart;
818 rtx fill_value;
819 rtx lowfrom;
820 int i;
821 enum machine_mode lowpart_mode;
822 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
823
824 /* Try converting directly if the insn is supported. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
826 != CODE_FOR_nothing)
827 {
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize > 0 && GET_CODE (from) == SUBREG)
833 from = force_reg (from_mode, from);
834 emit_unop_insn (code, to, from, equiv_code);
835 return;
836 }
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
839 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
840 != CODE_FOR_nothing))
841 {
842 if (GET_CODE (to) == REG)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
844 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
845 emit_unop_insn (code, to,
846 gen_lowpart (word_mode, to), equiv_code);
847 return;
848 }
849
850 /* No special multiword conversion insn; do it by hand. */
851 start_sequence ();
852
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
855
856 if (reg_overlap_mentioned_p (to, from))
857 from = force_reg (from_mode, from);
858
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
861 lowpart_mode = word_mode;
862 else
863 lowpart_mode = from_mode;
864
865 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
866
867 lowpart = gen_lowpart (lowpart_mode, to);
868 emit_move_insn (lowpart, lowfrom);
869
870 /* Compute the value to put in each remaining word. */
871 if (unsignedp)
872 fill_value = const0_rtx;
873 else
874 {
875 #ifdef HAVE_slt
876 if (HAVE_slt
877 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
878 && STORE_FLAG_VALUE == -1)
879 {
880 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
881 lowpart_mode, 0, 0);
882 fill_value = gen_reg_rtx (word_mode);
883 emit_insn (gen_slt (fill_value));
884 }
885 else
886 #endif
887 {
888 fill_value
889 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
890 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
891 NULL_RTX, 0);
892 fill_value = convert_to_mode (word_mode, fill_value, 1);
893 }
894 }
895
896 /* Fill the remaining words. */
897 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
898 {
899 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
900 rtx subword = operand_subword (to, index, 1, to_mode);
901
902 if (subword == 0)
903 abort ();
904
905 if (fill_value != subword)
906 emit_move_insn (subword, fill_value);
907 }
908
909 insns = get_insns ();
910 end_sequence ();
911
912 emit_no_conflict_block (insns, to, from, NULL_RTX,
913 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
914 return;
915 }
916
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
920 {
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 convert_move (to, gen_lowpart (word_mode, from), 0);
929 return;
930 }
931
932 /* Handle pointer conversion. */ /* SPEE 900220. */
933 if (to_mode == PQImode)
934 {
935 if (from_mode != QImode)
936 from = convert_to_mode (QImode, from, unsignedp);
937
938 #ifdef HAVE_truncqipqi2
939 if (HAVE_truncqipqi2)
940 {
941 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif /* HAVE_truncqipqi2 */
945 abort ();
946 }
947
948 if (from_mode == PQImode)
949 {
950 if (to_mode != QImode)
951 {
952 from = convert_to_mode (QImode, from, unsignedp);
953 from_mode = QImode;
954 }
955 else
956 {
957 #ifdef HAVE_extendpqiqi2
958 if (HAVE_extendpqiqi2)
959 {
960 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
961 return;
962 }
963 #endif /* HAVE_extendpqiqi2 */
964 abort ();
965 }
966 }
967
968 if (to_mode == PSImode)
969 {
970 if (from_mode != SImode)
971 from = convert_to_mode (SImode, from, unsignedp);
972
973 #ifdef HAVE_truncsipsi2
974 if (HAVE_truncsipsi2)
975 {
976 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
977 return;
978 }
979 #endif /* HAVE_truncsipsi2 */
980 abort ();
981 }
982
983 if (from_mode == PSImode)
984 {
985 if (to_mode != SImode)
986 {
987 from = convert_to_mode (SImode, from, unsignedp);
988 from_mode = SImode;
989 }
990 else
991 {
992 #ifdef HAVE_extendpsisi2
993 if (! unsignedp && HAVE_extendpsisi2)
994 {
995 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
996 return;
997 }
998 #endif /* HAVE_extendpsisi2 */
999 #ifdef HAVE_zero_extendpsisi2
1000 if (unsignedp && HAVE_zero_extendpsisi2)
1001 {
1002 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1003 return;
1004 }
1005 #endif /* HAVE_zero_extendpsisi2 */
1006 abort ();
1007 }
1008 }
1009
1010 if (to_mode == PDImode)
1011 {
1012 if (from_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1014
1015 #ifdef HAVE_truncdipdi2
1016 if (HAVE_truncdipdi2)
1017 {
1018 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1019 return;
1020 }
1021 #endif /* HAVE_truncdipdi2 */
1022 abort ();
1023 }
1024
1025 if (from_mode == PDImode)
1026 {
1027 if (to_mode != DImode)
1028 {
1029 from = convert_to_mode (DImode, from, unsignedp);
1030 from_mode = DImode;
1031 }
1032 else
1033 {
1034 #ifdef HAVE_extendpdidi2
1035 if (HAVE_extendpdidi2)
1036 {
1037 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1038 return;
1039 }
1040 #endif /* HAVE_extendpdidi2 */
1041 abort ();
1042 }
1043 }
1044
1045 /* Now follow all the conversions between integers
1046 no more than a word long. */
1047
1048 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1049 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1050 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1051 GET_MODE_BITSIZE (from_mode)))
1052 {
1053 if (!((GET_CODE (from) == MEM
1054 && ! MEM_VOLATILE_P (from)
1055 && direct_load[(int) to_mode]
1056 && ! mode_dependent_address_p (XEXP (from, 0)))
1057 || GET_CODE (from) == REG
1058 || GET_CODE (from) == SUBREG))
1059 from = force_reg (from_mode, from);
1060 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1061 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1062 from = copy_to_reg (from);
1063 emit_move_insn (to, gen_lowpart (to_mode, from));
1064 return;
1065 }
1066
1067 /* Handle extension. */
1068 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1069 {
1070 /* Convert directly if that works. */
1071 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1072 != CODE_FOR_nothing)
1073 {
1074 emit_unop_insn (code, to, from, equiv_code);
1075 return;
1076 }
1077 else
1078 {
1079 enum machine_mode intermediate;
1080 rtx tmp;
1081 tree shift_amount;
1082
1083 /* Search for a mode to convert via. */
1084 for (intermediate = from_mode; intermediate != VOIDmode;
1085 intermediate = GET_MODE_WIDER_MODE (intermediate))
1086 if (((can_extend_p (to_mode, intermediate, unsignedp)
1087 != CODE_FOR_nothing)
1088 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1089 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1090 GET_MODE_BITSIZE (intermediate))))
1091 && (can_extend_p (intermediate, from_mode, unsignedp)
1092 != CODE_FOR_nothing))
1093 {
1094 convert_move (to, convert_to_mode (intermediate, from,
1095 unsignedp), unsignedp);
1096 return;
1097 }
1098
1099 /* No suitable intermediate mode.
1100 Generate what we need with shifts. */
1101 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1102 - GET_MODE_BITSIZE (from_mode), 0);
1103 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1104 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1105 to, unsignedp);
1106 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1107 to, unsignedp);
1108 if (tmp != to)
1109 emit_move_insn (to, tmp);
1110 return;
1111 }
1112 }
1113
1114 /* Support special truncate insns for certain modes. */
1115
1116 if (from_mode == DImode && to_mode == SImode)
1117 {
1118 #ifdef HAVE_truncdisi2
1119 if (HAVE_truncdisi2)
1120 {
1121 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1122 return;
1123 }
1124 #endif
1125 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 return;
1127 }
1128
1129 if (from_mode == DImode && to_mode == HImode)
1130 {
1131 #ifdef HAVE_truncdihi2
1132 if (HAVE_truncdihi2)
1133 {
1134 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1135 return;
1136 }
1137 #endif
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 return;
1140 }
1141
1142 if (from_mode == DImode && to_mode == QImode)
1143 {
1144 #ifdef HAVE_truncdiqi2
1145 if (HAVE_truncdiqi2)
1146 {
1147 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1148 return;
1149 }
1150 #endif
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 return;
1153 }
1154
1155 if (from_mode == SImode && to_mode == HImode)
1156 {
1157 #ifdef HAVE_truncsihi2
1158 if (HAVE_truncsihi2)
1159 {
1160 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1161 return;
1162 }
1163 #endif
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 return;
1166 }
1167
1168 if (from_mode == SImode && to_mode == QImode)
1169 {
1170 #ifdef HAVE_truncsiqi2
1171 if (HAVE_truncsiqi2)
1172 {
1173 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1174 return;
1175 }
1176 #endif
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 return;
1179 }
1180
1181 if (from_mode == HImode && to_mode == QImode)
1182 {
1183 #ifdef HAVE_trunchiqi2
1184 if (HAVE_trunchiqi2)
1185 {
1186 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1187 return;
1188 }
1189 #endif
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 return;
1192 }
1193
1194 if (from_mode == TImode && to_mode == DImode)
1195 {
1196 #ifdef HAVE_trunctidi2
1197 if (HAVE_trunctidi2)
1198 {
1199 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1200 return;
1201 }
1202 #endif
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 return;
1205 }
1206
1207 if (from_mode == TImode && to_mode == SImode)
1208 {
1209 #ifdef HAVE_trunctisi2
1210 if (HAVE_trunctisi2)
1211 {
1212 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1213 return;
1214 }
1215 #endif
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 return;
1218 }
1219
1220 if (from_mode == TImode && to_mode == HImode)
1221 {
1222 #ifdef HAVE_trunctihi2
1223 if (HAVE_trunctihi2)
1224 {
1225 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1226 return;
1227 }
1228 #endif
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 return;
1231 }
1232
1233 if (from_mode == TImode && to_mode == QImode)
1234 {
1235 #ifdef HAVE_trunctiqi2
1236 if (HAVE_trunctiqi2)
1237 {
1238 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1239 return;
1240 }
1241 #endif
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 return;
1244 }
1245
1246 /* Handle truncation of volatile memrefs, and so on;
1247 the things that couldn't be truncated directly,
1248 and for which there was no special instruction. */
1249 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1250 {
1251 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1252 emit_move_insn (to, temp);
1253 return;
1254 }
1255
1256 /* Mode combination is not recognized. */
1257 abort ();
1258 }
1259
1260 /* Return an rtx for a value that would result
1261 from converting X to mode MODE.
1262 Both X and MODE may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1266
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1269
1270 rtx
1271 convert_to_mode (mode, x, unsignedp)
1272 enum machine_mode mode;
1273 rtx x;
1274 int unsignedp;
1275 {
1276 return convert_modes (mode, VOIDmode, x, unsignedp);
1277 }
1278
1279 /* Return an rtx for a value that would result
1280 from converting X from mode OLDMODE to mode MODE.
1281 Both modes may be floating, or both integer.
1282 UNSIGNEDP is nonzero if X is an unsigned value.
1283
1284 This can be done by referring to a part of X in place
1285 or by copying to a new temporary with conversion.
1286
1287 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1288
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1291
1292 rtx
1293 convert_modes (mode, oldmode, x, unsignedp)
1294 enum machine_mode mode, oldmode;
1295 rtx x;
1296 int unsignedp;
1297 {
1298 rtx temp;
1299
1300 /* If FROM is a SUBREG that indicates that we have already done at least
1301 the required extension, strip it. */
1302
1303 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1304 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1305 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1306 x = gen_lowpart (mode, x);
1307
1308 if (GET_MODE (x) != VOIDmode)
1309 oldmode = GET_MODE (x);
1310
1311 if (mode == oldmode)
1312 return x;
1313
1314 /* There is one case that we must handle specially: If we are converting
1315 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1316 we are to interpret the constant as unsigned, gen_lowpart will do
1317 the wrong if the constant appears negative. What we want to do is
1318 make the high-order word of the constant zero, not all ones. */
1319
1320 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1321 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1322 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1323 {
1324 HOST_WIDE_INT val = INTVAL (x);
1325
1326 if (oldmode != VOIDmode
1327 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1328 {
1329 int width = GET_MODE_BITSIZE (oldmode);
1330
1331 /* We need to zero extend VAL. */
1332 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1333 }
1334
1335 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1336 }
1337
1338 /* We can do this with a gen_lowpart if both desired and current modes
1339 are integer, and this is either a constant integer, a register, or a
1340 non-volatile MEM. Except for the constant case where MODE is no
1341 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1342
1343 if ((GET_CODE (x) == CONST_INT
1344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1345 || (GET_MODE_CLASS (mode) == MODE_INT
1346 && GET_MODE_CLASS (oldmode) == MODE_INT
1347 && (GET_CODE (x) == CONST_DOUBLE
1348 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1349 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1350 && direct_load[(int) mode])
1351 || (GET_CODE (x) == REG
1352 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1353 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1354 {
1355 /* ?? If we don't know OLDMODE, we have to assume here that
1356 X does not need sign- or zero-extension. This may not be
1357 the case, but it's the best we can do. */
1358 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1359 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1360 {
1361 HOST_WIDE_INT val = INTVAL (x);
1362 int width = GET_MODE_BITSIZE (oldmode);
1363
1364 /* We must sign or zero-extend in this case. Start by
1365 zero-extending, then sign extend if we need to. */
1366 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1367 if (! unsignedp
1368 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1369 val |= (HOST_WIDE_INT) (-1) << width;
1370
1371 return GEN_INT (trunc_int_for_mode (val, mode));
1372 }
1373
1374 return gen_lowpart (mode, x);
1375 }
1376
1377 temp = gen_reg_rtx (mode);
1378 convert_move (temp, x, unsignedp);
1379 return temp;
1380 }
1381 \f
1382 /* This macro is used to determine what the largest unit size that
1383 move_by_pieces can use is. */
1384
1385 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1386 move efficiently, as opposed to MOVE_MAX which is the maximum
1387 number of bytes we can move with a single instruction. */
1388
1389 #ifndef MOVE_MAX_PIECES
1390 #define MOVE_MAX_PIECES MOVE_MAX
1391 #endif
1392
1393 /* Generate several move instructions to copy LEN bytes from block FROM to
1394 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1395 and TO through protect_from_queue before calling.
1396
1397 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1398 used to push FROM to the stack.
1399
1400 ALIGN is maximum alignment we can assume. */
1401
1402 void
1403 move_by_pieces (to, from, len, align)
1404 rtx to, from;
1405 unsigned HOST_WIDE_INT len;
1406 unsigned int align;
1407 {
1408 struct move_by_pieces data;
1409 rtx to_addr, from_addr = XEXP (from, 0);
1410 unsigned int max_size = MOVE_MAX_PIECES + 1;
1411 enum machine_mode mode = VOIDmode, tmode;
1412 enum insn_code icode;
1413
1414 data.offset = 0;
1415 data.from_addr = from_addr;
1416 if (to)
1417 {
1418 to_addr = XEXP (to, 0);
1419 data.to = to;
1420 data.autinc_to
1421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1423 data.reverse
1424 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1425 }
1426 else
1427 {
1428 to_addr = NULL_RTX;
1429 data.to = NULL_RTX;
1430 data.autinc_to = 1;
1431 #ifdef STACK_GROWS_DOWNWARD
1432 data.reverse = 1;
1433 #else
1434 data.reverse = 0;
1435 #endif
1436 }
1437 data.to_addr = to_addr;
1438 data.from = from;
1439 data.autinc_from
1440 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1441 || GET_CODE (from_addr) == POST_INC
1442 || GET_CODE (from_addr) == POST_DEC);
1443
1444 data.explicit_inc_from = 0;
1445 data.explicit_inc_to = 0;
1446 if (data.reverse) data.offset = len;
1447 data.len = len;
1448
1449 /* If copying requires more than two move insns,
1450 copy addresses to registers (to make displacements shorter)
1451 and use post-increment if available. */
1452 if (!(data.autinc_from && data.autinc_to)
1453 && move_by_pieces_ninsns (len, align) > 2)
1454 {
1455 /* Find the mode of the largest move... */
1456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1458 if (GET_MODE_SIZE (tmode) < max_size)
1459 mode = tmode;
1460
1461 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1462 {
1463 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = -1;
1466 }
1467 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1468 {
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 data.autinc_from = 1;
1471 data.explicit_inc_from = 1;
1472 }
1473 if (!data.autinc_from && CONSTANT_P (from_addr))
1474 data.from_addr = copy_addr_to_reg (from_addr);
1475 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1476 {
1477 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1478 data.autinc_to = 1;
1479 data.explicit_inc_to = -1;
1480 }
1481 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1482 {
1483 data.to_addr = copy_addr_to_reg (to_addr);
1484 data.autinc_to = 1;
1485 data.explicit_inc_to = 1;
1486 }
1487 if (!data.autinc_to && CONSTANT_P (to_addr))
1488 data.to_addr = copy_addr_to_reg (to_addr);
1489 }
1490
1491 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1492 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1493 align = MOVE_MAX * BITS_PER_UNIT;
1494
1495 /* First move what we can in the largest integer mode, then go to
1496 successively smaller modes. */
1497
1498 while (max_size > 1)
1499 {
1500 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1501 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1502 if (GET_MODE_SIZE (tmode) < max_size)
1503 mode = tmode;
1504
1505 if (mode == VOIDmode)
1506 break;
1507
1508 icode = mov_optab->handlers[(int) mode].insn_code;
1509 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1510 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1511
1512 max_size = GET_MODE_SIZE (mode);
1513 }
1514
1515 /* The code above should have handled everything. */
1516 if (data.len > 0)
1517 abort ();
1518 }
1519
1520 /* Return number of insns required to move L bytes by pieces.
1521 ALIGN (in bits) is maximum alignment we can assume. */
1522
1523 static unsigned HOST_WIDE_INT
1524 move_by_pieces_ninsns (l, align)
1525 unsigned HOST_WIDE_INT l;
1526 unsigned int align;
1527 {
1528 unsigned HOST_WIDE_INT n_insns = 0;
1529 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1530
1531 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1532 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1533 align = MOVE_MAX * BITS_PER_UNIT;
1534
1535 while (max_size > 1)
1536 {
1537 enum machine_mode mode = VOIDmode, tmode;
1538 enum insn_code icode;
1539
1540 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1541 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1542 if (GET_MODE_SIZE (tmode) < max_size)
1543 mode = tmode;
1544
1545 if (mode == VOIDmode)
1546 break;
1547
1548 icode = mov_optab->handlers[(int) mode].insn_code;
1549 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1550 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1551
1552 max_size = GET_MODE_SIZE (mode);
1553 }
1554
1555 if (l)
1556 abort ();
1557 return n_insns;
1558 }
1559
1560 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1561 with move instructions for mode MODE. GENFUN is the gen_... function
1562 to make a move insn for that mode. DATA has all the other info. */
1563
1564 static void
1565 move_by_pieces_1 (genfun, mode, data)
1566 rtx (*genfun) PARAMS ((rtx, ...));
1567 enum machine_mode mode;
1568 struct move_by_pieces *data;
1569 {
1570 unsigned int size = GET_MODE_SIZE (mode);
1571 rtx to1 = NULL_RTX, from1;
1572
1573 while (data->len >= size)
1574 {
1575 if (data->reverse)
1576 data->offset -= size;
1577
1578 if (data->to)
1579 {
1580 if (data->autinc_to)
1581 {
1582 to1 = replace_equiv_address (data->to, data->to_addr);
1583 to1 = adjust_address (to1, mode, 0);
1584 }
1585 else
1586 to1 = adjust_address (data->to, mode, data->offset);
1587 }
1588
1589 if (data->autinc_from)
1590 {
1591 from1 = replace_equiv_address (data->from, data->from_addr);
1592 from1 = adjust_address (from1, mode, 0);
1593 }
1594 else
1595 from1 = adjust_address (data->from, mode, data->offset);
1596
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1601
1602 if (data->to)
1603 emit_insn ((*genfun) (to1, from1));
1604 else
1605 {
1606 #ifdef PUSH_ROUNDING
1607 emit_single_push_insn (mode, from1, NULL);
1608 #else
1609 abort ();
1610 #endif
1611 }
1612
1613 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1614 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1616 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1617
1618 if (! data->reverse)
1619 data->offset += size;
1620
1621 data->len -= size;
1622 }
1623 }
1624 \f
1625 /* Emit code to move a block Y to a block X.
1626 This may be done with string-move instructions,
1627 with multiple scalar move instructions, or with a library call.
1628
1629 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1630 with mode BLKmode.
1631 SIZE is an rtx that says how long they are.
1632 ALIGN is the maximum alignment we can assume they have.
1633
1634 Return the address of the new block, if memcpy is called and returns it,
1635 0 otherwise. */
1636
1637 rtx
1638 emit_block_move (x, y, size)
1639 rtx x, y;
1640 rtx size;
1641 {
1642 rtx retval = 0;
1643 #ifdef TARGET_MEM_FUNCTIONS
1644 static tree fn;
1645 tree call_expr, arg_list;
1646 #endif
1647 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1648
1649 if (GET_MODE (x) != BLKmode)
1650 abort ();
1651
1652 if (GET_MODE (y) != BLKmode)
1653 abort ();
1654
1655 x = protect_from_queue (x, 1);
1656 y = protect_from_queue (y, 0);
1657 size = protect_from_queue (size, 0);
1658
1659 if (GET_CODE (x) != MEM)
1660 abort ();
1661 if (GET_CODE (y) != MEM)
1662 abort ();
1663 if (size == 0)
1664 abort ();
1665
1666 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1667 move_by_pieces (x, y, INTVAL (size), align);
1668 else
1669 {
1670 /* Try the most limited insn first, because there's no point
1671 including more than one in the machine description unless
1672 the more limited one has some advantage. */
1673
1674 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1675 enum machine_mode mode;
1676
1677 /* Since this is a move insn, we don't care about volatility. */
1678 volatile_ok = 1;
1679
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
1682 {
1683 enum insn_code code = movstr_optab[(int) mode];
1684 insn_operand_predicate_fn pred;
1685
1686 if (code != CODE_FOR_nothing
1687 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1688 here because if SIZE is less than the mode mask, as it is
1689 returned by the macro, it will definitely be less than the
1690 actual mode mask. */
1691 && ((GET_CODE (size) == CONST_INT
1692 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1693 <= (GET_MODE_MASK (mode) >> 1)))
1694 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1695 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1696 || (*pred) (x, BLKmode))
1697 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1698 || (*pred) (y, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1700 || (*pred) (opalign, VOIDmode)))
1701 {
1702 rtx op2;
1703 rtx last = get_last_insn ();
1704 rtx pat;
1705
1706 op2 = convert_to_mode (mode, size, 1);
1707 pred = insn_data[(int) code].operand[2].predicate;
1708 if (pred != 0 && ! (*pred) (op2, mode))
1709 op2 = copy_to_mode_reg (mode, op2);
1710
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1712 if (pat)
1713 {
1714 emit_insn (pat);
1715 volatile_ok = 0;
1716 return 0;
1717 }
1718 else
1719 delete_insns_since (last);
1720 }
1721 }
1722
1723 volatile_ok = 0;
1724
1725 /* X, Y, or SIZE may have been passed through protect_from_queue.
1726
1727 It is unsafe to save the value generated by protect_from_queue
1728 and reuse it later. Consider what happens if emit_queue is
1729 called before the return value from protect_from_queue is used.
1730
1731 Expansion of the CALL_EXPR below will call emit_queue before
1732 we are finished emitting RTL for argument setup. So if we are
1733 not careful we could get the wrong value for an argument.
1734
1735 To avoid this problem we go ahead and emit code to copy X, Y &
1736 SIZE into new pseudos. We can then place those new pseudos
1737 into an RTL_EXPR and use them later, even after a call to
1738 emit_queue.
1739
1740 Note this is not strictly needed for library calls since they
1741 do not call emit_queue before loading their arguments. However,
1742 we may need to have library calls call emit_queue in the future
1743 since failing to do so could cause problems for targets which
1744 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1745 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1746 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1747
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1750 #else
1751 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1752 TREE_UNSIGNED (integer_type_node));
1753 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1754 #endif
1755
1756 #ifdef TARGET_MEM_FUNCTIONS
1757 /* It is incorrect to use the libcall calling conventions to call
1758 memcpy in this context.
1759
1760 This could be a user call to memcpy and the user may wish to
1761 examine the return value from memcpy.
1762
1763 For targets where libcalls and normal calls have different conventions
1764 for returning pointers, we could end up generating incorrect code.
1765
1766 So instead of using a libcall sequence we build up a suitable
1767 CALL_EXPR and expand the call in the normal fashion. */
1768 if (fn == NULL_TREE)
1769 {
1770 tree fntype;
1771
1772 /* This was copied from except.c, I don't know if all this is
1773 necessary in this context or not. */
1774 fn = get_identifier ("memcpy");
1775 fntype = build_pointer_type (void_type_node);
1776 fntype = build_function_type (fntype, NULL_TREE);
1777 fn = build_decl (FUNCTION_DECL, fn, fntype);
1778 ggc_add_tree_root (&fn, 1);
1779 DECL_EXTERNAL (fn) = 1;
1780 TREE_PUBLIC (fn) = 1;
1781 DECL_ARTIFICIAL (fn) = 1;
1782 TREE_NOTHROW (fn) = 1;
1783 make_decl_rtl (fn, NULL);
1784 assemble_external (fn);
1785 }
1786
1787 /* We need to make an argument list for the function call.
1788
1789 memcpy has three arguments, the first two are void * addresses and
1790 the last is a size_t byte count for the copy. */
1791 arg_list
1792 = build_tree_list (NULL_TREE,
1793 make_tree (build_pointer_type (void_type_node), x));
1794 TREE_CHAIN (arg_list)
1795 = build_tree_list (NULL_TREE,
1796 make_tree (build_pointer_type (void_type_node), y));
1797 TREE_CHAIN (TREE_CHAIN (arg_list))
1798 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1799 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1800
1801 /* Now we have to build up the CALL_EXPR itself. */
1802 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1803 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1804 call_expr, arg_list, NULL_TREE);
1805 TREE_SIDE_EFFECTS (call_expr) = 1;
1806
1807 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1808 #else
1809 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1810 VOIDmode, 3, y, Pmode, x, Pmode,
1811 convert_to_mode (TYPE_MODE (integer_type_node), size,
1812 TREE_UNSIGNED (integer_type_node)),
1813 TYPE_MODE (integer_type_node));
1814 #endif
1815 }
1816
1817 return retval;
1818 }
1819 \f
1820 /* Copy all or part of a value X into registers starting at REGNO.
1821 The number of registers to be filled is NREGS. */
1822
1823 void
1824 move_block_to_reg (regno, x, nregs, mode)
1825 int regno;
1826 rtx x;
1827 int nregs;
1828 enum machine_mode mode;
1829 {
1830 int i;
1831 #ifdef HAVE_load_multiple
1832 rtx pat;
1833 rtx last;
1834 #endif
1835
1836 if (nregs == 0)
1837 return;
1838
1839 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1840 x = validize_mem (force_const_mem (mode, x));
1841
1842 /* See if the machine can do this with a load multiple insn. */
1843 #ifdef HAVE_load_multiple
1844 if (HAVE_load_multiple)
1845 {
1846 last = get_last_insn ();
1847 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1848 GEN_INT (nregs));
1849 if (pat)
1850 {
1851 emit_insn (pat);
1852 return;
1853 }
1854 else
1855 delete_insns_since (last);
1856 }
1857 #endif
1858
1859 for (i = 0; i < nregs; i++)
1860 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1861 operand_subword_force (x, i, mode));
1862 }
1863
1864 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1865 The number of registers to be filled is NREGS. SIZE indicates the number
1866 of bytes in the object X. */
1867
1868 void
1869 move_block_from_reg (regno, x, nregs, size)
1870 int regno;
1871 rtx x;
1872 int nregs;
1873 int size;
1874 {
1875 int i;
1876 #ifdef HAVE_store_multiple
1877 rtx pat;
1878 rtx last;
1879 #endif
1880 enum machine_mode mode;
1881
1882 if (nregs == 0)
1883 return;
1884
1885 /* If SIZE is that of a mode no bigger than a word, just use that
1886 mode's store operation. */
1887 if (size <= UNITS_PER_WORD
1888 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1889 {
1890 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1891 return;
1892 }
1893
1894 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1895 to the left before storing to memory. Note that the previous test
1896 doesn't handle all cases (e.g. SIZE == 3). */
1897 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1898 {
1899 rtx tem = operand_subword (x, 0, 1, BLKmode);
1900 rtx shift;
1901
1902 if (tem == 0)
1903 abort ();
1904
1905 shift = expand_shift (LSHIFT_EXPR, word_mode,
1906 gen_rtx_REG (word_mode, regno),
1907 build_int_2 ((UNITS_PER_WORD - size)
1908 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1909 emit_move_insn (tem, shift);
1910 return;
1911 }
1912
1913 /* See if the machine can do this with a store multiple insn. */
1914 #ifdef HAVE_store_multiple
1915 if (HAVE_store_multiple)
1916 {
1917 last = get_last_insn ();
1918 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1919 GEN_INT (nregs));
1920 if (pat)
1921 {
1922 emit_insn (pat);
1923 return;
1924 }
1925 else
1926 delete_insns_since (last);
1927 }
1928 #endif
1929
1930 for (i = 0; i < nregs; i++)
1931 {
1932 rtx tem = operand_subword (x, i, 1, BLKmode);
1933
1934 if (tem == 0)
1935 abort ();
1936
1937 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1938 }
1939 }
1940
1941 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1944 SRC in bits. */
1945 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1946 the balance will be in what would be the low-order memory addresses, i.e.
1947 left justified for big endian, right justified for little endian. This
1948 happens to be true for the targets currently using this support. If this
1949 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1950 would be needed. */
1951
1952 void
1953 emit_group_load (dst, orig_src, ssize, align)
1954 rtx dst, orig_src;
1955 unsigned int align;
1956 int ssize;
1957 {
1958 rtx *tmps, src;
1959 int start, i;
1960
1961 if (GET_CODE (dst) != PARALLEL)
1962 abort ();
1963
1964 /* Check for a NULL entry, used to indicate that the parameter goes
1965 both on the stack and in registers. */
1966 if (XEXP (XVECEXP (dst, 0, 0), 0))
1967 start = 0;
1968 else
1969 start = 1;
1970
1971 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1972
1973 /* Process the pieces. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1975 {
1976 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1977 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1978 unsigned int bytelen = GET_MODE_SIZE (mode);
1979 int shift = 0;
1980
1981 /* Handle trailing fragments that run over the size of the struct. */
1982 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1983 {
1984 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1985 bytelen = ssize - bytepos;
1986 if (bytelen <= 0)
1987 abort ();
1988 }
1989
1990 /* If we won't be loading directly from memory, protect the real source
1991 from strange tricks we might play; but make sure that the source can
1992 be loaded directly into the destination. */
1993 src = orig_src;
1994 if (GET_CODE (orig_src) != MEM
1995 && (!CONSTANT_P (orig_src)
1996 || (GET_MODE (orig_src) != mode
1997 && GET_MODE (orig_src) != VOIDmode)))
1998 {
1999 if (GET_MODE (orig_src) == VOIDmode)
2000 src = gen_reg_rtx (mode);
2001 else
2002 src = gen_reg_rtx (GET_MODE (orig_src));
2003 emit_move_insn (src, orig_src);
2004 }
2005
2006 /* Optimize the access just a bit. */
2007 if (GET_CODE (src) == MEM
2008 && align >= GET_MODE_ALIGNMENT (mode)
2009 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2010 && bytelen == GET_MODE_SIZE (mode))
2011 {
2012 tmps[i] = gen_reg_rtx (mode);
2013 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2014 }
2015 else if (GET_CODE (src) == CONCAT)
2016 {
2017 if (bytepos == 0
2018 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2019 tmps[i] = XEXP (src, 0);
2020 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2021 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2022 tmps[i] = XEXP (src, 1);
2023 else
2024 abort ();
2025 }
2026 else if (CONSTANT_P (src)
2027 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2028 tmps[i] = src;
2029 else
2030 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2031 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2032 mode, mode, align, ssize);
2033
2034 if (BYTES_BIG_ENDIAN && shift)
2035 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2036 tmps[i], 0, OPTAB_WIDEN);
2037 }
2038
2039 emit_queue ();
2040
2041 /* Copy the extracted pieces into the proper (probable) hard regs. */
2042 for (i = start; i < XVECLEN (dst, 0); i++)
2043 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2044 }
2045
2046 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2047 registers represented by a PARALLEL. SSIZE represents the total size of
2048 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2049
2050 void
2051 emit_group_store (orig_dst, src, ssize, align)
2052 rtx orig_dst, src;
2053 int ssize;
2054 unsigned int align;
2055 {
2056 rtx *tmps, dst;
2057 int start, i;
2058
2059 if (GET_CODE (src) != PARALLEL)
2060 abort ();
2061
2062 /* Check for a NULL entry, used to indicate that the parameter goes
2063 both on the stack and in registers. */
2064 if (XEXP (XVECEXP (src, 0, 0), 0))
2065 start = 0;
2066 else
2067 start = 1;
2068
2069 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2070
2071 /* Copy the (probable) hard regs into pseudos. */
2072 for (i = start; i < XVECLEN (src, 0); i++)
2073 {
2074 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2075 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2076 emit_move_insn (tmps[i], reg);
2077 }
2078 emit_queue ();
2079
2080 /* If we won't be storing directly into memory, protect the real destination
2081 from strange tricks we might play. */
2082 dst = orig_dst;
2083 if (GET_CODE (dst) == PARALLEL)
2084 {
2085 rtx temp;
2086
2087 /* We can get a PARALLEL dst if there is a conditional expression in
2088 a return statement. In that case, the dst and src are the same,
2089 so no action is necessary. */
2090 if (rtx_equal_p (dst, src))
2091 return;
2092
2093 /* It is unclear if we can ever reach here, but we may as well handle
2094 it. Allocate a temporary, and split this into a store/load to/from
2095 the temporary. */
2096
2097 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2098 emit_group_store (temp, src, ssize, align);
2099 emit_group_load (dst, temp, ssize, align);
2100 return;
2101 }
2102 else if (GET_CODE (dst) != MEM)
2103 {
2104 dst = gen_reg_rtx (GET_MODE (orig_dst));
2105 /* Make life a bit easier for combine. */
2106 emit_move_insn (dst, const0_rtx);
2107 }
2108
2109 /* Process the pieces. */
2110 for (i = start; i < XVECLEN (src, 0); i++)
2111 {
2112 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2113 enum machine_mode mode = GET_MODE (tmps[i]);
2114 unsigned int bytelen = GET_MODE_SIZE (mode);
2115
2116 /* Handle trailing fragments that run over the size of the struct. */
2117 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2118 {
2119 if (BYTES_BIG_ENDIAN)
2120 {
2121 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2122 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2123 tmps[i], 0, OPTAB_WIDEN);
2124 }
2125 bytelen = ssize - bytepos;
2126 }
2127
2128 /* Optimize the access just a bit. */
2129 if (GET_CODE (dst) == MEM
2130 && align >= GET_MODE_ALIGNMENT (mode)
2131 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2132 && bytelen == GET_MODE_SIZE (mode))
2133 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2134 else
2135 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2136 mode, tmps[i], align, ssize);
2137 }
2138
2139 emit_queue ();
2140
2141 /* Copy from the pseudo into the (probable) hard reg. */
2142 if (GET_CODE (dst) == REG)
2143 emit_move_insn (orig_dst, dst);
2144 }
2145
2146 /* Generate code to copy a BLKmode object of TYPE out of a
2147 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2148 is null, a stack temporary is created. TGTBLK is returned.
2149
2150 The primary purpose of this routine is to handle functions
2151 that return BLKmode structures in registers. Some machines
2152 (the PA for example) want to return all small structures
2153 in registers regardless of the structure's alignment. */
2154
2155 rtx
2156 copy_blkmode_from_reg (tgtblk, srcreg, type)
2157 rtx tgtblk;
2158 rtx srcreg;
2159 tree type;
2160 {
2161 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2162 rtx src = NULL, dst = NULL;
2163 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2164 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2165
2166 if (tgtblk == 0)
2167 {
2168 tgtblk = assign_temp (build_qualified_type (type,
2169 (TYPE_QUALS (type)
2170 | TYPE_QUAL_CONST)),
2171 0, 1, 1);
2172 preserve_temp_slots (tgtblk);
2173 }
2174
2175 /* This code assumes srcreg is at least a full word. If it isn't,
2176 copy it into a new pseudo which is a full word. */
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2179 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2180
2181 /* Structures whose size is not a multiple of a word are aligned
2182 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2183 machine, this means we must skip the empty high order bytes when
2184 calculating the bit offset. */
2185 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2186 big_endian_correction
2187 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2188
2189 /* Copy the structure BITSIZE bites at a time.
2190
2191 We could probably emit more efficient code for machines which do not use
2192 strict alignment, but it doesn't seem worth the effort at the current
2193 time. */
2194 for (bitpos = 0, xbitpos = big_endian_correction;
2195 bitpos < bytes * BITS_PER_UNIT;
2196 bitpos += bitsize, xbitpos += bitsize)
2197 {
2198 /* We need a new source operand each time xbitpos is on a
2199 word boundary and when xbitpos == big_endian_correction
2200 (the first time through). */
2201 if (xbitpos % BITS_PER_WORD == 0
2202 || xbitpos == big_endian_correction)
2203 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2204 GET_MODE (srcreg));
2205
2206 /* We need a new destination operand each time bitpos is on
2207 a word boundary. */
2208 if (bitpos % BITS_PER_WORD == 0)
2209 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2210
2211 /* Use xbitpos for the source extraction (right justified) and
2212 xbitpos for the destination store (left justified). */
2213 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2214 extract_bit_field (src, bitsize,
2215 xbitpos % BITS_PER_WORD, 1,
2216 NULL_RTX, word_mode, word_mode,
2217 bitsize, BITS_PER_WORD),
2218 bitsize, BITS_PER_WORD);
2219 }
2220
2221 return tgtblk;
2222 }
2223
2224 /* Add a USE expression for REG to the (possibly empty) list pointed
2225 to by CALL_FUSAGE. REG must denote a hard register. */
2226
2227 void
2228 use_reg (call_fusage, reg)
2229 rtx *call_fusage, reg;
2230 {
2231 if (GET_CODE (reg) != REG
2232 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2233 abort ();
2234
2235 *call_fusage
2236 = gen_rtx_EXPR_LIST (VOIDmode,
2237 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2238 }
2239
2240 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2241 starting at REGNO. All of these registers must be hard registers. */
2242
2243 void
2244 use_regs (call_fusage, regno, nregs)
2245 rtx *call_fusage;
2246 int regno;
2247 int nregs;
2248 {
2249 int i;
2250
2251 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2252 abort ();
2253
2254 for (i = 0; i < nregs; i++)
2255 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2256 }
2257
2258 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2259 PARALLEL REGS. This is for calls that pass values in multiple
2260 non-contiguous locations. The Irix 6 ABI has examples of this. */
2261
2262 void
2263 use_group_regs (call_fusage, regs)
2264 rtx *call_fusage;
2265 rtx regs;
2266 {
2267 int i;
2268
2269 for (i = 0; i < XVECLEN (regs, 0); i++)
2270 {
2271 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2272
2273 /* A NULL entry means the parameter goes both on the stack and in
2274 registers. This can also be a MEM for targets that pass values
2275 partially on the stack and partially in registers. */
2276 if (reg != 0 && GET_CODE (reg) == REG)
2277 use_reg (call_fusage, reg);
2278 }
2279 }
2280 \f
2281
2282 int
2283 can_store_by_pieces (len, constfun, constfundata, align)
2284 unsigned HOST_WIDE_INT len;
2285 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2286 PTR constfundata;
2287 unsigned int align;
2288 {
2289 unsigned HOST_WIDE_INT max_size, l;
2290 HOST_WIDE_INT offset = 0;
2291 enum machine_mode mode, tmode;
2292 enum insn_code icode;
2293 int reverse;
2294 rtx cst;
2295
2296 if (! MOVE_BY_PIECES_P (len, align))
2297 return 0;
2298
2299 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2300 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2301 align = MOVE_MAX * BITS_PER_UNIT;
2302
2303 /* We would first store what we can in the largest integer mode, then go to
2304 successively smaller modes. */
2305
2306 for (reverse = 0;
2307 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2308 reverse++)
2309 {
2310 l = len;
2311 mode = VOIDmode;
2312 max_size = MOVE_MAX_PIECES + 1;
2313 while (max_size > 1)
2314 {
2315 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2316 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2317 if (GET_MODE_SIZE (tmode) < max_size)
2318 mode = tmode;
2319
2320 if (mode == VOIDmode)
2321 break;
2322
2323 icode = mov_optab->handlers[(int) mode].insn_code;
2324 if (icode != CODE_FOR_nothing
2325 && align >= GET_MODE_ALIGNMENT (mode))
2326 {
2327 unsigned int size = GET_MODE_SIZE (mode);
2328
2329 while (l >= size)
2330 {
2331 if (reverse)
2332 offset -= size;
2333
2334 cst = (*constfun) (constfundata, offset, mode);
2335 if (!LEGITIMATE_CONSTANT_P (cst))
2336 return 0;
2337
2338 if (!reverse)
2339 offset += size;
2340
2341 l -= size;
2342 }
2343 }
2344
2345 max_size = GET_MODE_SIZE (mode);
2346 }
2347
2348 /* The code above should have handled everything. */
2349 if (l != 0)
2350 abort ();
2351 }
2352
2353 return 1;
2354 }
2355
2356 /* Generate several move instructions to store LEN bytes generated by
2357 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2358 pointer which will be passed as argument in every CONSTFUN call.
2359 ALIGN is maximum alignment we can assume. */
2360
2361 void
2362 store_by_pieces (to, len, constfun, constfundata, align)
2363 rtx to;
2364 unsigned HOST_WIDE_INT len;
2365 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2366 PTR constfundata;
2367 unsigned int align;
2368 {
2369 struct store_by_pieces data;
2370
2371 if (! MOVE_BY_PIECES_P (len, align))
2372 abort ();
2373 to = protect_from_queue (to, 1);
2374 data.constfun = constfun;
2375 data.constfundata = constfundata;
2376 data.len = len;
2377 data.to = to;
2378 store_by_pieces_1 (&data, align);
2379 }
2380
2381 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2382 rtx with BLKmode). The caller must pass TO through protect_from_queue
2383 before calling. ALIGN is maximum alignment we can assume. */
2384
2385 static void
2386 clear_by_pieces (to, len, align)
2387 rtx to;
2388 unsigned HOST_WIDE_INT len;
2389 unsigned int align;
2390 {
2391 struct store_by_pieces data;
2392
2393 data.constfun = clear_by_pieces_1;
2394 data.constfundata = NULL;
2395 data.len = len;
2396 data.to = to;
2397 store_by_pieces_1 (&data, align);
2398 }
2399
2400 /* Callback routine for clear_by_pieces.
2401 Return const0_rtx unconditionally. */
2402
2403 static rtx
2404 clear_by_pieces_1 (data, offset, mode)
2405 PTR data ATTRIBUTE_UNUSED;
2406 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2407 enum machine_mode mode ATTRIBUTE_UNUSED;
2408 {
2409 return const0_rtx;
2410 }
2411
2412 /* Subroutine of clear_by_pieces and store_by_pieces.
2413 Generate several move instructions to store LEN bytes of block TO. (A MEM
2414 rtx with BLKmode). The caller must pass TO through protect_from_queue
2415 before calling. ALIGN is maximum alignment we can assume. */
2416
2417 static void
2418 store_by_pieces_1 (data, align)
2419 struct store_by_pieces *data;
2420 unsigned int align;
2421 {
2422 rtx to_addr = XEXP (data->to, 0);
2423 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2424 enum machine_mode mode = VOIDmode, tmode;
2425 enum insn_code icode;
2426
2427 data->offset = 0;
2428 data->to_addr = to_addr;
2429 data->autinc_to
2430 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2431 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2432
2433 data->explicit_inc_to = 0;
2434 data->reverse
2435 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2436 if (data->reverse)
2437 data->offset = data->len;
2438
2439 /* If storing requires more than two move insns,
2440 copy addresses to registers (to make displacements shorter)
2441 and use post-increment if available. */
2442 if (!data->autinc_to
2443 && move_by_pieces_ninsns (data->len, align) > 2)
2444 {
2445 /* Determine the main mode we'll be using. */
2446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2448 if (GET_MODE_SIZE (tmode) < max_size)
2449 mode = tmode;
2450
2451 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2452 {
2453 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = -1;
2456 }
2457
2458 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2459 && ! data->autinc_to)
2460 {
2461 data->to_addr = copy_addr_to_reg (to_addr);
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = 1;
2464 }
2465
2466 if ( !data->autinc_to && CONSTANT_P (to_addr))
2467 data->to_addr = copy_addr_to_reg (to_addr);
2468 }
2469
2470 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2471 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2472 align = MOVE_MAX * BITS_PER_UNIT;
2473
2474 /* First store what we can in the largest integer mode, then go to
2475 successively smaller modes. */
2476
2477 while (max_size > 1)
2478 {
2479 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2480 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2481 if (GET_MODE_SIZE (tmode) < max_size)
2482 mode = tmode;
2483
2484 if (mode == VOIDmode)
2485 break;
2486
2487 icode = mov_optab->handlers[(int) mode].insn_code;
2488 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2489 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2490
2491 max_size = GET_MODE_SIZE (mode);
2492 }
2493
2494 /* The code above should have handled everything. */
2495 if (data->len != 0)
2496 abort ();
2497 }
2498
2499 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2500 with move instructions for mode MODE. GENFUN is the gen_... function
2501 to make a move insn for that mode. DATA has all the other info. */
2502
2503 static void
2504 store_by_pieces_2 (genfun, mode, data)
2505 rtx (*genfun) PARAMS ((rtx, ...));
2506 enum machine_mode mode;
2507 struct store_by_pieces *data;
2508 {
2509 unsigned int size = GET_MODE_SIZE (mode);
2510 rtx to1, cst;
2511
2512 while (data->len >= size)
2513 {
2514 if (data->reverse)
2515 data->offset -= size;
2516
2517 if (data->autinc_to)
2518 {
2519 to1 = replace_equiv_address (data->to, data->to_addr);
2520 to1 = adjust_address (to1, mode, 0);
2521 }
2522 else
2523 to1 = adjust_address (data->to, mode, data->offset);
2524
2525 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2526 emit_insn (gen_add2_insn (data->to_addr,
2527 GEN_INT (-(HOST_WIDE_INT) size)));
2528
2529 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2530 emit_insn ((*genfun) (to1, cst));
2531
2532 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2533 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2534
2535 if (! data->reverse)
2536 data->offset += size;
2537
2538 data->len -= size;
2539 }
2540 }
2541 \f
2542 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2543 its length in bytes. */
2544
2545 rtx
2546 clear_storage (object, size)
2547 rtx object;
2548 rtx size;
2549 {
2550 #ifdef TARGET_MEM_FUNCTIONS
2551 static tree fn;
2552 tree call_expr, arg_list;
2553 #endif
2554 rtx retval = 0;
2555 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2556 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2557
2558 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2559 just move a zero. Otherwise, do this a piece at a time. */
2560 if (GET_MODE (object) != BLKmode
2561 && GET_CODE (size) == CONST_INT
2562 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2563 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2564 else
2565 {
2566 object = protect_from_queue (object, 1);
2567 size = protect_from_queue (size, 0);
2568
2569 if (GET_CODE (size) == CONST_INT
2570 && MOVE_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2572 else
2573 {
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2577
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2580
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2583 {
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2586
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2600 {
2601 rtx op1;
2602 rtx last = get_last_insn ();
2603 rtx pat;
2604
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2609
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2611 if (pat)
2612 {
2613 emit_insn (pat);
2614 return 0;
2615 }
2616 else
2617 delete_insns_since (last);
2618 }
2619 }
2620
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2622
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2626
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2630
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos. We can then place those new pseudos
2633 into an RTL_EXPR and use them later, even after a call to
2634 emit_queue.
2635
2636 Note this is not strictly needed for library calls since they
2637 do not call emit_queue before loading their arguments. However,
2638 we may need to have library calls call emit_queue in the future
2639 since failing to do so could cause problems for targets which
2640 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2641 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2642
2643 #ifdef TARGET_MEM_FUNCTIONS
2644 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2645 #else
2646 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2647 TREE_UNSIGNED (integer_type_node));
2648 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2649 #endif
2650
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 /* It is incorrect to use the libcall calling conventions to call
2653 memset in this context.
2654
2655 This could be a user call to memset and the user may wish to
2656 examine the return value from memset.
2657
2658 For targets where libcalls and normal calls have different
2659 conventions for returning pointers, we could end up generating
2660 incorrect code.
2661
2662 So instead of using a libcall sequence we build up a suitable
2663 CALL_EXPR and expand the call in the normal fashion. */
2664 if (fn == NULL_TREE)
2665 {
2666 tree fntype;
2667
2668 /* This was copied from except.c, I don't know if all this is
2669 necessary in this context or not. */
2670 fn = get_identifier ("memset");
2671 fntype = build_pointer_type (void_type_node);
2672 fntype = build_function_type (fntype, NULL_TREE);
2673 fn = build_decl (FUNCTION_DECL, fn, fntype);
2674 ggc_add_tree_root (&fn, 1);
2675 DECL_EXTERNAL (fn) = 1;
2676 TREE_PUBLIC (fn) = 1;
2677 DECL_ARTIFICIAL (fn) = 1;
2678 TREE_NOTHROW (fn) = 1;
2679 make_decl_rtl (fn, NULL);
2680 assemble_external (fn);
2681 }
2682
2683 /* We need to make an argument list for the function call.
2684
2685 memset has three arguments, the first is a void * addresses, the
2686 second an integer with the initialization value, the last is a
2687 size_t byte count for the copy. */
2688 arg_list
2689 = build_tree_list (NULL_TREE,
2690 make_tree (build_pointer_type (void_type_node),
2691 object));
2692 TREE_CHAIN (arg_list)
2693 = build_tree_list (NULL_TREE,
2694 make_tree (integer_type_node, const0_rtx));
2695 TREE_CHAIN (TREE_CHAIN (arg_list))
2696 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2697 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2698
2699 /* Now we have to build up the CALL_EXPR itself. */
2700 call_expr = build1 (ADDR_EXPR,
2701 build_pointer_type (TREE_TYPE (fn)), fn);
2702 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2703 call_expr, arg_list, NULL_TREE);
2704 TREE_SIDE_EFFECTS (call_expr) = 1;
2705
2706 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2707 #else
2708 emit_library_call (bzero_libfunc, LCT_NORMAL,
2709 VOIDmode, 2, object, Pmode, size,
2710 TYPE_MODE (integer_type_node));
2711 #endif
2712 }
2713 }
2714
2715 return retval;
2716 }
2717
2718 /* Generate code to copy Y into X.
2719 Both Y and X must have the same mode, except that
2720 Y can be a constant with VOIDmode.
2721 This mode cannot be BLKmode; use emit_block_move for that.
2722
2723 Return the last instruction emitted. */
2724
2725 rtx
2726 emit_move_insn (x, y)
2727 rtx x, y;
2728 {
2729 enum machine_mode mode = GET_MODE (x);
2730 rtx y_cst = NULL_RTX;
2731 rtx last_insn;
2732
2733 x = protect_from_queue (x, 1);
2734 y = protect_from_queue (y, 0);
2735
2736 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2737 abort ();
2738
2739 /* Never force constant_p_rtx to memory. */
2740 if (GET_CODE (y) == CONSTANT_P_RTX)
2741 ;
2742 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2743 {
2744 y_cst = y;
2745 y = force_const_mem (mode, y);
2746 }
2747
2748 /* If X or Y are memory references, verify that their addresses are valid
2749 for the machine. */
2750 if (GET_CODE (x) == MEM
2751 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2752 && ! push_operand (x, GET_MODE (x)))
2753 || (flag_force_addr
2754 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2755 x = validize_mem (x);
2756
2757 if (GET_CODE (y) == MEM
2758 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2759 || (flag_force_addr
2760 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2761 y = validize_mem (y);
2762
2763 if (mode == BLKmode)
2764 abort ();
2765
2766 last_insn = emit_move_insn_1 (x, y);
2767
2768 if (y_cst && GET_CODE (x) == REG)
2769 REG_NOTES (last_insn)
2770 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2771
2772 return last_insn;
2773 }
2774
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2778
2779 rtx
2780 emit_move_insn_1 (x, y)
2781 rtx x, y;
2782 {
2783 enum machine_mode mode = GET_MODE (x);
2784 enum machine_mode submode;
2785 enum mode_class class = GET_MODE_CLASS (mode);
2786 unsigned int i;
2787
2788 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2789 abort ();
2790
2791 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2792 return
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2794
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2797 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2798 * BITS_PER_UNIT),
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT : MODE_FLOAT),
2801 0))
2802 && (mov_optab->handlers[(int) submode].insn_code
2803 != CODE_FOR_nothing))
2804 {
2805 /* Don't split destination if it is a stack push. */
2806 int stack = push_operand (x, GET_MODE (x));
2807
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2811 if (stack
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2813 {
2814 rtx temp;
2815 int offset1, offset2;
2816
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp = expand_binop (Pmode,
2820 #ifdef STACK_GROWS_DOWNWARD
2821 sub_optab,
2822 #else
2823 add_optab,
2824 #endif
2825 stack_pointer_rtx,
2826 GEN_INT
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2828 stack_pointer_rtx,
2829 0,
2830 OPTAB_LIB_WIDEN);
2831 if (temp != stack_pointer_rtx)
2832 emit_move_insn (stack_pointer_rtx, temp);
2833 #ifdef STACK_GROWS_DOWNWARD
2834 offset1 = 0;
2835 offset2 = GET_MODE_SIZE (submode);
2836 #else
2837 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2838 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2839 + GET_MODE_SIZE (submode));
2840 #endif
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2843 stack_pointer_rtx,
2844 GEN_INT (offset1))),
2845 gen_realpart (submode, y));
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2848 stack_pointer_rtx,
2849 GEN_INT (offset2))),
2850 gen_imagpart (submode, y));
2851 }
2852 else
2853 #endif
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2856
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2859 if (stack)
2860 {
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_imagpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_realpart (submode, y)));
2870 #else
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2877 #endif
2878 }
2879 else
2880 {
2881 rtx realpart_x, realpart_y;
2882 rtx imagpart_x, imagpart_y;
2883
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2894 && (reload_in_progress | reload_completed) == 0)
2895 {
2896 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2897 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2898
2899 if (packed_dest_p || packed_src_p)
2900 {
2901 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2902 ? MODE_FLOAT : MODE_INT);
2903
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2906
2907 if (reg_mode != BLKmode)
2908 {
2909 rtx mem = assign_stack_temp (reg_mode,
2910 GET_MODE_SIZE (mode), 0);
2911 rtx cmem = adjust_address (mem, mode, 0);
2912
2913 cfun->cannot_inline
2914 = N_("function using short complex types cannot be inline");
2915
2916 if (packed_dest_p)
2917 {
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2919 emit_move_insn_1 (cmem, y);
2920 return emit_move_insn_1 (sreg, mem);
2921 }
2922 else
2923 {
2924 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2925 emit_move_insn_1 (mem, sreg);
2926 return emit_move_insn_1 (x, cmem);
2927 }
2928 }
2929 }
2930 }
2931
2932 realpart_x = gen_realpart (submode, x);
2933 realpart_y = gen_realpart (submode, y);
2934 imagpart_x = gen_imagpart (submode, x);
2935 imagpart_y = gen_imagpart (submode, y);
2936
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2941 if (x != y
2942 && ! (reload_in_progress || reload_completed)
2943 && (GET_CODE (realpart_x) == SUBREG
2944 || GET_CODE (imagpart_x) == SUBREG))
2945 {
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2947 }
2948
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (realpart_x, realpart_y));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (imagpart_x, imagpart_y));
2953 }
2954
2955 return get_last_insn ();
2956 }
2957
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2962 {
2963 rtx last_insn = 0;
2964 rtx seq, inner;
2965 int need_clobber;
2966
2967 #ifdef PUSH_ROUNDING
2968
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x, GET_MODE (x)))
2972 {
2973 rtx temp;
2974 enum rtx_code code;
2975
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp = expand_binop (Pmode,
2979 #ifdef STACK_GROWS_DOWNWARD
2980 sub_optab,
2981 #else
2982 add_optab,
2983 #endif
2984 stack_pointer_rtx,
2985 GEN_INT
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2987 stack_pointer_rtx,
2988 0,
2989 OPTAB_LIB_WIDEN);
2990 if (temp != stack_pointer_rtx)
2991 emit_move_insn (stack_pointer_rtx, temp);
2992
2993 code = GET_CODE (XEXP (x, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code == POST_INC)
2996 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2997 GEN_INT (-(HOST_WIDE_INT)
2998 GET_MODE_SIZE (GET_MODE (x))));
2999 else if (code == POST_DEC)
3000 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3002 else
3003 temp = stack_pointer_rtx;
3004
3005 x = change_address (x, VOIDmode, temp);
3006 }
3007 #endif
3008
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress && GET_CODE (x) == MEM
3012 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3013 x = replace_equiv_address_nv (x, inner);
3014 if (reload_in_progress && GET_CODE (y) == MEM
3015 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3016 y = replace_equiv_address_nv (y, inner);
3017
3018 start_sequence ();
3019
3020 need_clobber = 0;
3021 for (i = 0;
3022 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3023 i++)
3024 {
3025 rtx xpart = operand_subword (x, i, 1, mode);
3026 rtx ypart = operand_subword (y, i, 1, mode);
3027
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart == 0 && CONSTANT_P (y))
3032 {
3033 y = force_const_mem (mode, y);
3034 ypart = operand_subword (y, i, 1, mode);
3035 }
3036 else if (ypart == 0)
3037 ypart = operand_subword_force (y, i, mode);
3038
3039 if (xpart == 0 || ypart == 0)
3040 abort ();
3041
3042 need_clobber |= (GET_CODE (xpart) == SUBREG);
3043
3044 last_insn = emit_move_insn (xpart, ypart);
3045 }
3046
3047 seq = gen_sequence ();
3048 end_sequence ();
3049
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3054 if (x != y
3055 && ! (reload_in_progress || reload_completed)
3056 && need_clobber != 0)
3057 {
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3059 }
3060
3061 emit_insn (seq);
3062
3063 return last_insn;
3064 }
3065 else
3066 abort ();
3067 }
3068 \f
3069 /* Pushing data onto the stack. */
3070
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3075
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3079
3080 rtx
3081 push_block (size, extra, below)
3082 rtx size;
3083 int extra, below;
3084 {
3085 rtx temp;
3086
3087 size = convert_modes (Pmode, ptr_mode, size, 1);
3088 if (CONSTANT_P (size))
3089 anti_adjust_stack (plus_constant (size, extra));
3090 else if (GET_CODE (size) == REG && extra == 0)
3091 anti_adjust_stack (size);
3092 else
3093 {
3094 temp = copy_to_mode_reg (Pmode, size);
3095 if (extra != 0)
3096 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3097 temp, 0, OPTAB_LIB_WIDEN);
3098 anti_adjust_stack (temp);
3099 }
3100
3101 #ifndef STACK_GROWS_DOWNWARD
3102 if (0)
3103 #else
3104 if (1)
3105 #endif
3106 {
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3110 }
3111 else
3112 {
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3119 else
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3122 }
3123
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3125 }
3126
3127
3128 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3129 block of SIZE bytes. */
3130
3131 static rtx
3132 get_push_address (size)
3133 int size;
3134 {
3135 rtx temp;
3136
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3141 else
3142 temp = stack_pointer_rtx;
3143
3144 return copy_to_reg (temp);
3145 }
3146
3147 #ifdef PUSH_ROUNDING
3148
3149 /* Emit single push insn. */
3150
3151 static void
3152 emit_single_push_insn (mode, x, type)
3153 rtx x;
3154 enum machine_mode mode;
3155 tree type;
3156 {
3157 rtx dest_addr;
3158 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3159 rtx dest;
3160 enum insn_code icode;
3161 insn_operand_predicate_fn pred;
3162
3163 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode = push_optab->handlers[(int) mode].insn_code;
3167 if (icode != CODE_FOR_nothing)
3168 {
3169 if (((pred = insn_data[(int) icode].operand[0].predicate)
3170 && !((*pred) (x, mode))))
3171 x = force_reg (mode, x);
3172 emit_insn (GEN_FCN (icode) (x));
3173 return;
3174 }
3175 if (GET_MODE_SIZE (mode) == rounded_size)
3176 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3177 else
3178 {
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3181 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3182 #else
3183 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3184 GEN_INT (rounded_size));
3185 #endif
3186 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3187 }
3188
3189 dest = gen_rtx_MEM (mode, dest_addr);
3190
3191 if (type != 0)
3192 {
3193 set_mem_attributes (dest, type, 1);
3194 /* Function incoming arguments may overlap with sibling call
3195 outgoing arguments and we cannot allow reordering of reads
3196 from function arguments with stores to outgoing arguments
3197 of sibling calls. */
3198 set_mem_alias_set (dest, 0);
3199 }
3200 emit_move_insn (dest, x);
3201 }
3202 #endif
3203
3204 /* Generate code to push X onto the stack, assuming it has mode MODE and
3205 type TYPE.
3206 MODE is redundant except when X is a CONST_INT (since they don't
3207 carry mode info).
3208 SIZE is an rtx for the size of data to be copied (in bytes),
3209 needed only if X is BLKmode.
3210
3211 ALIGN (in bits) is maximum alignment we can assume.
3212
3213 If PARTIAL and REG are both nonzero, then copy that many of the first
3214 words of X into registers starting with REG, and push the rest of X.
3215 The amount of space pushed is decreased by PARTIAL words,
3216 rounded *down* to a multiple of PARM_BOUNDARY.
3217 REG must be a hard register in this case.
3218 If REG is zero but PARTIAL is not, take any all others actions for an
3219 argument partially in registers, but do not actually load any
3220 registers.
3221
3222 EXTRA is the amount in bytes of extra space to leave next to this arg.
3223 This is ignored if an argument block has already been allocated.
3224
3225 On a machine that lacks real push insns, ARGS_ADDR is the address of
3226 the bottom of the argument block for this call. We use indexing off there
3227 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3228 argument block has not been preallocated.
3229
3230 ARGS_SO_FAR is the size of args previously pushed for this call.
3231
3232 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3233 for arguments passed in registers. If nonzero, it will be the number
3234 of bytes required. */
3235
3236 void
3237 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3238 args_addr, args_so_far, reg_parm_stack_space,
3239 alignment_pad)
3240 rtx x;
3241 enum machine_mode mode;
3242 tree type;
3243 rtx size;
3244 unsigned int align;
3245 int partial;
3246 rtx reg;
3247 int extra;
3248 rtx args_addr;
3249 rtx args_so_far;
3250 int reg_parm_stack_space;
3251 rtx alignment_pad;
3252 {
3253 rtx xinner;
3254 enum direction stack_direction
3255 #ifdef STACK_GROWS_DOWNWARD
3256 = downward;
3257 #else
3258 = upward;
3259 #endif
3260
3261 /* Decide where to pad the argument: `downward' for below,
3262 `upward' for above, or `none' for don't pad it.
3263 Default is below for small data on big-endian machines; else above. */
3264 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3265
3266 /* Invert direction if stack is post-decrement.
3267 FIXME: why? */
3268 if (STACK_PUSH_CODE == POST_DEC)
3269 if (where_pad != none)
3270 where_pad = (where_pad == downward ? upward : downward);
3271
3272 xinner = x = protect_from_queue (x, 0);
3273
3274 if (mode == BLKmode)
3275 {
3276 /* Copy a block into the stack, entirely or partially. */
3277
3278 rtx temp;
3279 int used = partial * UNITS_PER_WORD;
3280 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3281 int skip;
3282
3283 if (size == 0)
3284 abort ();
3285
3286 used -= offset;
3287
3288 /* USED is now the # of bytes we need not copy to the stack
3289 because registers will take care of them. */
3290
3291 if (partial != 0)
3292 xinner = adjust_address (xinner, BLKmode, used);
3293
3294 /* If the partial register-part of the arg counts in its stack size,
3295 skip the part of stack space corresponding to the registers.
3296 Otherwise, start copying to the beginning of the stack space,
3297 by setting SKIP to 0. */
3298 skip = (reg_parm_stack_space == 0) ? 0 : used;
3299
3300 #ifdef PUSH_ROUNDING
3301 /* Do it with several push insns if that doesn't take lots of insns
3302 and if there is no difficulty with push insns that skip bytes
3303 on the stack for alignment purposes. */
3304 if (args_addr == 0
3305 && PUSH_ARGS
3306 && GET_CODE (size) == CONST_INT
3307 && skip == 0
3308 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3309 /* Here we avoid the case of a structure whose weak alignment
3310 forces many pushes of a small amount of data,
3311 and such small pushes do rounding that causes trouble. */
3312 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3313 || align >= BIGGEST_ALIGNMENT
3314 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3315 == (align / BITS_PER_UNIT)))
3316 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3317 {
3318 /* Push padding now if padding above and stack grows down,
3319 or if padding below and stack grows up.
3320 But if space already allocated, this has already been done. */
3321 if (extra && args_addr == 0
3322 && where_pad != none && where_pad != stack_direction)
3323 anti_adjust_stack (GEN_INT (extra));
3324
3325 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3326
3327 if (current_function_check_memory_usage && ! in_check_memory_usage)
3328 {
3329 rtx temp;
3330
3331 in_check_memory_usage = 1;
3332 temp = get_push_address (INTVAL (size) - used);
3333 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3334 emit_library_call (chkr_copy_bitmap_libfunc,
3335 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3336 Pmode, XEXP (xinner, 0), Pmode,
3337 GEN_INT (INTVAL (size) - used),
3338 TYPE_MODE (sizetype));
3339 else
3340 emit_library_call (chkr_set_right_libfunc,
3341 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3342 Pmode, GEN_INT (INTVAL (size) - used),
3343 TYPE_MODE (sizetype),
3344 GEN_INT (MEMORY_USE_RW),
3345 TYPE_MODE (integer_type_node));
3346 in_check_memory_usage = 0;
3347 }
3348 }
3349 else
3350 #endif /* PUSH_ROUNDING */
3351 {
3352 rtx target;
3353
3354 /* Otherwise make space on the stack and copy the data
3355 to the address of that space. */
3356
3357 /* Deduct words put into registers from the size we must copy. */
3358 if (partial != 0)
3359 {
3360 if (GET_CODE (size) == CONST_INT)
3361 size = GEN_INT (INTVAL (size) - used);
3362 else
3363 size = expand_binop (GET_MODE (size), sub_optab, size,
3364 GEN_INT (used), NULL_RTX, 0,
3365 OPTAB_LIB_WIDEN);
3366 }
3367
3368 /* Get the address of the stack space.
3369 In this case, we do not deal with EXTRA separately.
3370 A single stack adjust will do. */
3371 if (! args_addr)
3372 {
3373 temp = push_block (size, extra, where_pad == downward);
3374 extra = 0;
3375 }
3376 else if (GET_CODE (args_so_far) == CONST_INT)
3377 temp = memory_address (BLKmode,
3378 plus_constant (args_addr,
3379 skip + INTVAL (args_so_far)));
3380 else
3381 temp = memory_address (BLKmode,
3382 plus_constant (gen_rtx_PLUS (Pmode,
3383 args_addr,
3384 args_so_far),
3385 skip));
3386 if (current_function_check_memory_usage && ! in_check_memory_usage)
3387 {
3388 in_check_memory_usage = 1;
3389 target = copy_to_reg (temp);
3390 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3391 emit_library_call (chkr_copy_bitmap_libfunc,
3392 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3393 target, Pmode,
3394 XEXP (xinner, 0), Pmode,
3395 size, TYPE_MODE (sizetype));
3396 else
3397 emit_library_call (chkr_set_right_libfunc,
3398 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3399 target, Pmode,
3400 size, TYPE_MODE (sizetype),
3401 GEN_INT (MEMORY_USE_RW),
3402 TYPE_MODE (integer_type_node));
3403 in_check_memory_usage = 0;
3404 }
3405
3406 target = gen_rtx_MEM (BLKmode, temp);
3407
3408 if (type != 0)
3409 {
3410 set_mem_attributes (target, type, 1);
3411 /* Function incoming arguments may overlap with sibling call
3412 outgoing arguments and we cannot allow reordering of reads
3413 from function arguments with stores to outgoing arguments
3414 of sibling calls. */
3415 set_mem_alias_set (target, 0);
3416 }
3417 else
3418 set_mem_align (target, align);
3419
3420 /* TEMP is the address of the block. Copy the data there. */
3421 if (GET_CODE (size) == CONST_INT
3422 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3423 {
3424 move_by_pieces (target, xinner, INTVAL (size), align);
3425 goto ret;
3426 }
3427 else
3428 {
3429 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3430 enum machine_mode mode;
3431
3432 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3433 mode != VOIDmode;
3434 mode = GET_MODE_WIDER_MODE (mode))
3435 {
3436 enum insn_code code = movstr_optab[(int) mode];
3437 insn_operand_predicate_fn pred;
3438
3439 if (code != CODE_FOR_nothing
3440 && ((GET_CODE (size) == CONST_INT
3441 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3442 <= (GET_MODE_MASK (mode) >> 1)))
3443 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3444 && (!(pred = insn_data[(int) code].operand[0].predicate)
3445 || ((*pred) (target, BLKmode)))
3446 && (!(pred = insn_data[(int) code].operand[1].predicate)
3447 || ((*pred) (xinner, BLKmode)))
3448 && (!(pred = insn_data[(int) code].operand[3].predicate)
3449 || ((*pred) (opalign, VOIDmode))))
3450 {
3451 rtx op2 = convert_to_mode (mode, size, 1);
3452 rtx last = get_last_insn ();
3453 rtx pat;
3454
3455 pred = insn_data[(int) code].operand[2].predicate;
3456 if (pred != 0 && ! (*pred) (op2, mode))
3457 op2 = copy_to_mode_reg (mode, op2);
3458
3459 pat = GEN_FCN ((int) code) (target, xinner,
3460 op2, opalign);
3461 if (pat)
3462 {
3463 emit_insn (pat);
3464 goto ret;
3465 }
3466 else
3467 delete_insns_since (last);
3468 }
3469 }
3470 }
3471
3472 if (!ACCUMULATE_OUTGOING_ARGS)
3473 {
3474 /* If the source is referenced relative to the stack pointer,
3475 copy it to another register to stabilize it. We do not need
3476 to do this if we know that we won't be changing sp. */
3477
3478 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3479 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3480 temp = copy_to_reg (temp);
3481 }
3482
3483 /* Make inhibit_defer_pop nonzero around the library call
3484 to force it to pop the bcopy-arguments right away. */
3485 NO_DEFER_POP;
3486 #ifdef TARGET_MEM_FUNCTIONS
3487 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3488 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3489 convert_to_mode (TYPE_MODE (sizetype),
3490 size, TREE_UNSIGNED (sizetype)),
3491 TYPE_MODE (sizetype));
3492 #else
3493 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3494 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3495 convert_to_mode (TYPE_MODE (integer_type_node),
3496 size,
3497 TREE_UNSIGNED (integer_type_node)),
3498 TYPE_MODE (integer_type_node));
3499 #endif
3500 OK_DEFER_POP;
3501 }
3502 }
3503 else if (partial > 0)
3504 {
3505 /* Scalar partly in registers. */
3506
3507 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3508 int i;
3509 int not_stack;
3510 /* # words of start of argument
3511 that we must make space for but need not store. */
3512 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3513 int args_offset = INTVAL (args_so_far);
3514 int skip;
3515
3516 /* Push padding now if padding above and stack grows down,
3517 or if padding below and stack grows up.
3518 But if space already allocated, this has already been done. */
3519 if (extra && args_addr == 0
3520 && where_pad != none && where_pad != stack_direction)
3521 anti_adjust_stack (GEN_INT (extra));
3522
3523 /* If we make space by pushing it, we might as well push
3524 the real data. Otherwise, we can leave OFFSET nonzero
3525 and leave the space uninitialized. */
3526 if (args_addr == 0)
3527 offset = 0;
3528
3529 /* Now NOT_STACK gets the number of words that we don't need to
3530 allocate on the stack. */
3531 not_stack = partial - offset;
3532
3533 /* If the partial register-part of the arg counts in its stack size,
3534 skip the part of stack space corresponding to the registers.
3535 Otherwise, start copying to the beginning of the stack space,
3536 by setting SKIP to 0. */
3537 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3538
3539 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3540 x = validize_mem (force_const_mem (mode, x));
3541
3542 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3543 SUBREGs of such registers are not allowed. */
3544 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3545 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3546 x = copy_to_reg (x);
3547
3548 /* Loop over all the words allocated on the stack for this arg. */
3549 /* We can do it by words, because any scalar bigger than a word
3550 has a size a multiple of a word. */
3551 #ifndef PUSH_ARGS_REVERSED
3552 for (i = not_stack; i < size; i++)
3553 #else
3554 for (i = size - 1; i >= not_stack; i--)
3555 #endif
3556 if (i >= not_stack + offset)
3557 emit_push_insn (operand_subword_force (x, i, mode),
3558 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3559 0, args_addr,
3560 GEN_INT (args_offset + ((i - not_stack + skip)
3561 * UNITS_PER_WORD)),
3562 reg_parm_stack_space, alignment_pad);
3563 }
3564 else
3565 {
3566 rtx addr;
3567 rtx target = NULL_RTX;
3568 rtx dest;
3569
3570 /* Push padding now if padding above and stack grows down,
3571 or if padding below and stack grows up.
3572 But if space already allocated, this has already been done. */
3573 if (extra && args_addr == 0
3574 && where_pad != none && where_pad != stack_direction)
3575 anti_adjust_stack (GEN_INT (extra));
3576
3577 #ifdef PUSH_ROUNDING
3578 if (args_addr == 0 && PUSH_ARGS)
3579 emit_single_push_insn (mode, x, type);
3580 else
3581 #endif
3582 {
3583 if (GET_CODE (args_so_far) == CONST_INT)
3584 addr
3585 = memory_address (mode,
3586 plus_constant (args_addr,
3587 INTVAL (args_so_far)));
3588 else
3589 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3590 args_so_far));
3591 target = addr;
3592 dest = gen_rtx_MEM (mode, addr);
3593 if (type != 0)
3594 {
3595 set_mem_attributes (dest, type, 1);
3596 /* Function incoming arguments may overlap with sibling call
3597 outgoing arguments and we cannot allow reordering of reads
3598 from function arguments with stores to outgoing arguments
3599 of sibling calls. */
3600 set_mem_alias_set (dest, 0);
3601 }
3602
3603 emit_move_insn (dest, x);
3604
3605 }
3606
3607 if (current_function_check_memory_usage && ! in_check_memory_usage)
3608 {
3609 in_check_memory_usage = 1;
3610 if (target == 0)
3611 target = get_push_address (GET_MODE_SIZE (mode));
3612
3613 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3614 emit_library_call (chkr_copy_bitmap_libfunc,
3615 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3616 Pmode, XEXP (x, 0), Pmode,
3617 GEN_INT (GET_MODE_SIZE (mode)),
3618 TYPE_MODE (sizetype));
3619 else
3620 emit_library_call (chkr_set_right_libfunc,
3621 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3622 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3623 TYPE_MODE (sizetype),
3624 GEN_INT (MEMORY_USE_RW),
3625 TYPE_MODE (integer_type_node));
3626 in_check_memory_usage = 0;
3627 }
3628 }
3629
3630 ret:
3631 /* If part should go in registers, copy that part
3632 into the appropriate registers. Do this now, at the end,
3633 since mem-to-mem copies above may do function calls. */
3634 if (partial > 0 && reg != 0)
3635 {
3636 /* Handle calls that pass values in multiple non-contiguous locations.
3637 The Irix 6 ABI has examples of this. */
3638 if (GET_CODE (reg) == PARALLEL)
3639 emit_group_load (reg, x, -1, align); /* ??? size? */
3640 else
3641 move_block_to_reg (REGNO (reg), x, partial, mode);
3642 }
3643
3644 if (extra && args_addr == 0 && where_pad == stack_direction)
3645 anti_adjust_stack (GEN_INT (extra));
3646
3647 if (alignment_pad && args_addr == 0)
3648 anti_adjust_stack (alignment_pad);
3649 }
3650 \f
3651 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3652 operations. */
3653
3654 static rtx
3655 get_subtarget (x)
3656 rtx x;
3657 {
3658 return ((x == 0
3659 /* Only registers can be subtargets. */
3660 || GET_CODE (x) != REG
3661 /* If the register is readonly, it can't be set more than once. */
3662 || RTX_UNCHANGING_P (x)
3663 /* Don't use hard regs to avoid extending their life. */
3664 || REGNO (x) < FIRST_PSEUDO_REGISTER
3665 /* Avoid subtargets inside loops,
3666 since they hide some invariant expressions. */
3667 || preserve_subexpressions_p ())
3668 ? 0 : x);
3669 }
3670
3671 /* Expand an assignment that stores the value of FROM into TO.
3672 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3673 (This may contain a QUEUED rtx;
3674 if the value is constant, this rtx is a constant.)
3675 Otherwise, the returned value is NULL_RTX.
3676
3677 SUGGEST_REG is no longer actually used.
3678 It used to mean, copy the value through a register
3679 and return that register, if that is possible.
3680 We now use WANT_VALUE to decide whether to do this. */
3681
3682 rtx
3683 expand_assignment (to, from, want_value, suggest_reg)
3684 tree to, from;
3685 int want_value;
3686 int suggest_reg ATTRIBUTE_UNUSED;
3687 {
3688 rtx to_rtx = 0;
3689 rtx result;
3690
3691 /* Don't crash if the lhs of the assignment was erroneous. */
3692
3693 if (TREE_CODE (to) == ERROR_MARK)
3694 {
3695 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3696 return want_value ? result : NULL_RTX;
3697 }
3698
3699 /* Assignment of a structure component needs special treatment
3700 if the structure component's rtx is not simply a MEM.
3701 Assignment of an array element at a constant index, and assignment of
3702 an array element in an unaligned packed structure field, has the same
3703 problem. */
3704
3705 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3706 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3707 {
3708 enum machine_mode mode1;
3709 HOST_WIDE_INT bitsize, bitpos;
3710 tree offset;
3711 int unsignedp;
3712 int volatilep = 0;
3713 tree tem;
3714 unsigned int alignment;
3715
3716 push_temp_slots ();
3717 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3718 &unsignedp, &volatilep, &alignment);
3719
3720 /* If we are going to use store_bit_field and extract_bit_field,
3721 make sure to_rtx will be safe for multiple use. */
3722
3723 if (mode1 == VOIDmode && want_value)
3724 tem = stabilize_reference (tem);
3725
3726 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3727 if (offset != 0)
3728 {
3729 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3730
3731 if (GET_CODE (to_rtx) != MEM)
3732 abort ();
3733
3734 if (GET_MODE (offset_rtx) != ptr_mode)
3735 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3736
3737 #ifdef POINTERS_EXTEND_UNSIGNED
3738 if (GET_MODE (offset_rtx) != Pmode)
3739 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3740 #endif
3741
3742 /* A constant address in TO_RTX can have VOIDmode, we must not try
3743 to call force_reg for that case. Avoid that case. */
3744 if (GET_CODE (to_rtx) == MEM
3745 && GET_MODE (to_rtx) == BLKmode
3746 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3747 && bitsize
3748 && (bitpos % bitsize) == 0
3749 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3750 && alignment == GET_MODE_ALIGNMENT (mode1))
3751 {
3752 rtx temp
3753 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3754
3755 if (GET_CODE (XEXP (temp, 0)) == REG)
3756 to_rtx = temp;
3757 else
3758 to_rtx = (replace_equiv_address
3759 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3760 XEXP (temp, 0))));
3761 bitpos = 0;
3762 }
3763
3764 to_rtx = offset_address (to_rtx, offset_rtx,
3765 highest_pow2_factor (offset));
3766 }
3767
3768 if (volatilep)
3769 {
3770 if (GET_CODE (to_rtx) == MEM)
3771 {
3772 /* When the offset is zero, to_rtx is the address of the
3773 structure we are storing into, and hence may be shared.
3774 We must make a new MEM before setting the volatile bit. */
3775 if (offset == 0)
3776 to_rtx = copy_rtx (to_rtx);
3777
3778 MEM_VOLATILE_P (to_rtx) = 1;
3779 }
3780 #if 0 /* This was turned off because, when a field is volatile
3781 in an object which is not volatile, the object may be in a register,
3782 and then we would abort over here. */
3783 else
3784 abort ();
3785 #endif
3786 }
3787
3788 if (TREE_CODE (to) == COMPONENT_REF
3789 && TREE_READONLY (TREE_OPERAND (to, 1)))
3790 {
3791 if (offset == 0)
3792 to_rtx = copy_rtx (to_rtx);
3793
3794 RTX_UNCHANGING_P (to_rtx) = 1;
3795 }
3796
3797 /* Check the access. */
3798 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3799 {
3800 rtx to_addr;
3801 int size;
3802 int best_mode_size;
3803 enum machine_mode best_mode;
3804
3805 best_mode = get_best_mode (bitsize, bitpos,
3806 TYPE_ALIGN (TREE_TYPE (tem)),
3807 mode1, volatilep);
3808 if (best_mode == VOIDmode)
3809 best_mode = QImode;
3810
3811 best_mode_size = GET_MODE_BITSIZE (best_mode);
3812 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3813 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3814 size *= GET_MODE_SIZE (best_mode);
3815
3816 /* Check the access right of the pointer. */
3817 in_check_memory_usage = 1;
3818 if (size)
3819 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3820 VOIDmode, 3, to_addr, Pmode,
3821 GEN_INT (size), TYPE_MODE (sizetype),
3822 GEN_INT (MEMORY_USE_WO),
3823 TYPE_MODE (integer_type_node));
3824 in_check_memory_usage = 0;
3825 }
3826
3827 /* If this is a varying-length object, we must get the address of
3828 the source and do an explicit block move. */
3829 if (bitsize < 0)
3830 {
3831 unsigned int from_align;
3832 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3833 rtx inner_to_rtx
3834 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3835
3836 emit_block_move (inner_to_rtx, from_rtx, expr_size (from));
3837
3838 free_temp_slots ();
3839 pop_temp_slots ();
3840 return to_rtx;
3841 }
3842 else
3843 {
3844 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3845 (want_value
3846 /* Spurious cast for HPUX compiler. */
3847 ? ((enum machine_mode)
3848 TYPE_MODE (TREE_TYPE (to)))
3849 : VOIDmode),
3850 unsignedp,
3851 alignment,
3852 int_size_in_bytes (TREE_TYPE (tem)),
3853 get_alias_set (to));
3854
3855 preserve_temp_slots (result);
3856 free_temp_slots ();
3857 pop_temp_slots ();
3858
3859 /* If the value is meaningful, convert RESULT to the proper mode.
3860 Otherwise, return nothing. */
3861 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3862 TYPE_MODE (TREE_TYPE (from)),
3863 result,
3864 TREE_UNSIGNED (TREE_TYPE (to)))
3865 : NULL_RTX);
3866 }
3867 }
3868
3869 /* If the rhs is a function call and its value is not an aggregate,
3870 call the function before we start to compute the lhs.
3871 This is needed for correct code for cases such as
3872 val = setjmp (buf) on machines where reference to val
3873 requires loading up part of an address in a separate insn.
3874
3875 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3876 since it might be a promoted variable where the zero- or sign- extension
3877 needs to be done. Handling this in the normal way is safe because no
3878 computation is done before the call. */
3879 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3880 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3881 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3882 && GET_CODE (DECL_RTL (to)) == REG))
3883 {
3884 rtx value;
3885
3886 push_temp_slots ();
3887 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3888 if (to_rtx == 0)
3889 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3890
3891 /* Handle calls that return values in multiple non-contiguous locations.
3892 The Irix 6 ABI has examples of this. */
3893 if (GET_CODE (to_rtx) == PARALLEL)
3894 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3895 TYPE_ALIGN (TREE_TYPE (from)));
3896 else if (GET_MODE (to_rtx) == BLKmode)
3897 emit_block_move (to_rtx, value, expr_size (from));
3898 else
3899 {
3900 #ifdef POINTERS_EXTEND_UNSIGNED
3901 if (POINTER_TYPE_P (TREE_TYPE (to))
3902 && GET_MODE (to_rtx) != GET_MODE (value))
3903 value = convert_memory_address (GET_MODE (to_rtx), value);
3904 #endif
3905 emit_move_insn (to_rtx, value);
3906 }
3907 preserve_temp_slots (to_rtx);
3908 free_temp_slots ();
3909 pop_temp_slots ();
3910 return want_value ? to_rtx : NULL_RTX;
3911 }
3912
3913 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3914 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3915
3916 if (to_rtx == 0)
3917 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3918
3919 /* Don't move directly into a return register. */
3920 if (TREE_CODE (to) == RESULT_DECL
3921 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3922 {
3923 rtx temp;
3924
3925 push_temp_slots ();
3926 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3927
3928 if (GET_CODE (to_rtx) == PARALLEL)
3929 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3930 TYPE_ALIGN (TREE_TYPE (from)));
3931 else
3932 emit_move_insn (to_rtx, temp);
3933
3934 preserve_temp_slots (to_rtx);
3935 free_temp_slots ();
3936 pop_temp_slots ();
3937 return want_value ? to_rtx : NULL_RTX;
3938 }
3939
3940 /* In case we are returning the contents of an object which overlaps
3941 the place the value is being stored, use a safe function when copying
3942 a value through a pointer into a structure value return block. */
3943 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3944 && current_function_returns_struct
3945 && !current_function_returns_pcc_struct)
3946 {
3947 rtx from_rtx, size;
3948
3949 push_temp_slots ();
3950 size = expr_size (from);
3951 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3952 EXPAND_MEMORY_USE_DONT);
3953
3954 /* Copy the rights of the bitmap. */
3955 if (current_function_check_memory_usage)
3956 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3957 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3958 XEXP (from_rtx, 0), Pmode,
3959 convert_to_mode (TYPE_MODE (sizetype),
3960 size, TREE_UNSIGNED (sizetype)),
3961 TYPE_MODE (sizetype));
3962
3963 #ifdef TARGET_MEM_FUNCTIONS
3964 emit_library_call (memmove_libfunc, LCT_NORMAL,
3965 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3966 XEXP (from_rtx, 0), Pmode,
3967 convert_to_mode (TYPE_MODE (sizetype),
3968 size, TREE_UNSIGNED (sizetype)),
3969 TYPE_MODE (sizetype));
3970 #else
3971 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3972 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3973 XEXP (to_rtx, 0), Pmode,
3974 convert_to_mode (TYPE_MODE (integer_type_node),
3975 size, TREE_UNSIGNED (integer_type_node)),
3976 TYPE_MODE (integer_type_node));
3977 #endif
3978
3979 preserve_temp_slots (to_rtx);
3980 free_temp_slots ();
3981 pop_temp_slots ();
3982 return want_value ? to_rtx : NULL_RTX;
3983 }
3984
3985 /* Compute FROM and store the value in the rtx we got. */
3986
3987 push_temp_slots ();
3988 result = store_expr (from, to_rtx, want_value);
3989 preserve_temp_slots (result);
3990 free_temp_slots ();
3991 pop_temp_slots ();
3992 return want_value ? result : NULL_RTX;
3993 }
3994
3995 /* Generate code for computing expression EXP,
3996 and storing the value into TARGET.
3997 TARGET may contain a QUEUED rtx.
3998
3999 If WANT_VALUE is nonzero, return a copy of the value
4000 not in TARGET, so that we can be sure to use the proper
4001 value in a containing expression even if TARGET has something
4002 else stored in it. If possible, we copy the value through a pseudo
4003 and return that pseudo. Or, if the value is constant, we try to
4004 return the constant. In some cases, we return a pseudo
4005 copied *from* TARGET.
4006
4007 If the mode is BLKmode then we may return TARGET itself.
4008 It turns out that in BLKmode it doesn't cause a problem.
4009 because C has no operators that could combine two different
4010 assignments into the same BLKmode object with different values
4011 with no sequence point. Will other languages need this to
4012 be more thorough?
4013
4014 If WANT_VALUE is 0, we return NULL, to make sure
4015 to catch quickly any cases where the caller uses the value
4016 and fails to set WANT_VALUE. */
4017
4018 rtx
4019 store_expr (exp, target, want_value)
4020 tree exp;
4021 rtx target;
4022 int want_value;
4023 {
4024 rtx temp;
4025 int dont_return_target = 0;
4026 int dont_store_target = 0;
4027
4028 if (TREE_CODE (exp) == COMPOUND_EXPR)
4029 {
4030 /* Perform first part of compound expression, then assign from second
4031 part. */
4032 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4033 emit_queue ();
4034 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4035 }
4036 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4037 {
4038 /* For conditional expression, get safe form of the target. Then
4039 test the condition, doing the appropriate assignment on either
4040 side. This avoids the creation of unnecessary temporaries.
4041 For non-BLKmode, it is more efficient not to do this. */
4042
4043 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4044
4045 emit_queue ();
4046 target = protect_from_queue (target, 1);
4047
4048 do_pending_stack_adjust ();
4049 NO_DEFER_POP;
4050 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4051 start_cleanup_deferral ();
4052 store_expr (TREE_OPERAND (exp, 1), target, 0);
4053 end_cleanup_deferral ();
4054 emit_queue ();
4055 emit_jump_insn (gen_jump (lab2));
4056 emit_barrier ();
4057 emit_label (lab1);
4058 start_cleanup_deferral ();
4059 store_expr (TREE_OPERAND (exp, 2), target, 0);
4060 end_cleanup_deferral ();
4061 emit_queue ();
4062 emit_label (lab2);
4063 OK_DEFER_POP;
4064
4065 return want_value ? target : NULL_RTX;
4066 }
4067 else if (queued_subexp_p (target))
4068 /* If target contains a postincrement, let's not risk
4069 using it as the place to generate the rhs. */
4070 {
4071 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4072 {
4073 /* Expand EXP into a new pseudo. */
4074 temp = gen_reg_rtx (GET_MODE (target));
4075 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4076 }
4077 else
4078 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4079
4080 /* If target is volatile, ANSI requires accessing the value
4081 *from* the target, if it is accessed. So make that happen.
4082 In no case return the target itself. */
4083 if (! MEM_VOLATILE_P (target) && want_value)
4084 dont_return_target = 1;
4085 }
4086 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4087 && GET_MODE (target) != BLKmode)
4088 /* If target is in memory and caller wants value in a register instead,
4089 arrange that. Pass TARGET as target for expand_expr so that,
4090 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4091 We know expand_expr will not use the target in that case.
4092 Don't do this if TARGET is volatile because we are supposed
4093 to write it and then read it. */
4094 {
4095 temp = expand_expr (exp, target, GET_MODE (target), 0);
4096 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4097 {
4098 /* If TEMP is already in the desired TARGET, only copy it from
4099 memory and don't store it there again. */
4100 if (temp == target
4101 || (rtx_equal_p (temp, target)
4102 && ! side_effects_p (temp) && ! side_effects_p (target)))
4103 dont_store_target = 1;
4104 temp = copy_to_reg (temp);
4105 }
4106 dont_return_target = 1;
4107 }
4108 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4109 /* If this is an scalar in a register that is stored in a wider mode
4110 than the declared mode, compute the result into its declared mode
4111 and then convert to the wider mode. Our value is the computed
4112 expression. */
4113 {
4114 /* If we don't want a value, we can do the conversion inside EXP,
4115 which will often result in some optimizations. Do the conversion
4116 in two steps: first change the signedness, if needed, then
4117 the extend. But don't do this if the type of EXP is a subtype
4118 of something else since then the conversion might involve
4119 more than just converting modes. */
4120 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4121 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4122 {
4123 if (TREE_UNSIGNED (TREE_TYPE (exp))
4124 != SUBREG_PROMOTED_UNSIGNED_P (target))
4125 exp
4126 = convert
4127 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4128 TREE_TYPE (exp)),
4129 exp);
4130
4131 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4132 SUBREG_PROMOTED_UNSIGNED_P (target)),
4133 exp);
4134 }
4135
4136 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4137
4138 /* If TEMP is a volatile MEM and we want a result value, make
4139 the access now so it gets done only once. Likewise if
4140 it contains TARGET. */
4141 if (GET_CODE (temp) == MEM && want_value
4142 && (MEM_VOLATILE_P (temp)
4143 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4144 temp = copy_to_reg (temp);
4145
4146 /* If TEMP is a VOIDmode constant, use convert_modes to make
4147 sure that we properly convert it. */
4148 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4149 {
4150 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4151 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4152 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4153 GET_MODE (target), temp,
4154 SUBREG_PROMOTED_UNSIGNED_P (target));
4155 }
4156
4157 convert_move (SUBREG_REG (target), temp,
4158 SUBREG_PROMOTED_UNSIGNED_P (target));
4159
4160 /* If we promoted a constant, change the mode back down to match
4161 target. Otherwise, the caller might get confused by a result whose
4162 mode is larger than expected. */
4163
4164 if (want_value && GET_MODE (temp) != GET_MODE (target)
4165 && GET_MODE (temp) != VOIDmode)
4166 {
4167 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4168 SUBREG_PROMOTED_VAR_P (temp) = 1;
4169 SUBREG_PROMOTED_UNSIGNED_P (temp)
4170 = SUBREG_PROMOTED_UNSIGNED_P (target);
4171 }
4172
4173 return want_value ? temp : NULL_RTX;
4174 }
4175 else
4176 {
4177 temp = expand_expr (exp, target, GET_MODE (target), 0);
4178 /* Return TARGET if it's a specified hardware register.
4179 If TARGET is a volatile mem ref, either return TARGET
4180 or return a reg copied *from* TARGET; ANSI requires this.
4181
4182 Otherwise, if TEMP is not TARGET, return TEMP
4183 if it is constant (for efficiency),
4184 or if we really want the correct value. */
4185 if (!(target && GET_CODE (target) == REG
4186 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4187 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4188 && ! rtx_equal_p (temp, target)
4189 && (CONSTANT_P (temp) || want_value))
4190 dont_return_target = 1;
4191 }
4192
4193 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4194 the same as that of TARGET, adjust the constant. This is needed, for
4195 example, in case it is a CONST_DOUBLE and we want only a word-sized
4196 value. */
4197 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4198 && TREE_CODE (exp) != ERROR_MARK
4199 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4200 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4201 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4202
4203 if (current_function_check_memory_usage
4204 && GET_CODE (target) == MEM
4205 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4206 {
4207 in_check_memory_usage = 1;
4208 if (GET_CODE (temp) == MEM)
4209 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4210 VOIDmode, 3, XEXP (target, 0), Pmode,
4211 XEXP (temp, 0), Pmode,
4212 expr_size (exp), TYPE_MODE (sizetype));
4213 else
4214 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4215 VOIDmode, 3, XEXP (target, 0), Pmode,
4216 expr_size (exp), TYPE_MODE (sizetype),
4217 GEN_INT (MEMORY_USE_WO),
4218 TYPE_MODE (integer_type_node));
4219 in_check_memory_usage = 0;
4220 }
4221
4222 /* If value was not generated in the target, store it there.
4223 Convert the value to TARGET's type first if nec. */
4224 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4225 one or both of them are volatile memory refs, we have to distinguish
4226 two cases:
4227 - expand_expr has used TARGET. In this case, we must not generate
4228 another copy. This can be detected by TARGET being equal according
4229 to == .
4230 - expand_expr has not used TARGET - that means that the source just
4231 happens to have the same RTX form. Since temp will have been created
4232 by expand_expr, it will compare unequal according to == .
4233 We must generate a copy in this case, to reach the correct number
4234 of volatile memory references. */
4235
4236 if ((! rtx_equal_p (temp, target)
4237 || (temp != target && (side_effects_p (temp)
4238 || side_effects_p (target))))
4239 && TREE_CODE (exp) != ERROR_MARK
4240 && ! dont_store_target)
4241 {
4242 target = protect_from_queue (target, 1);
4243 if (GET_MODE (temp) != GET_MODE (target)
4244 && GET_MODE (temp) != VOIDmode)
4245 {
4246 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4247 if (dont_return_target)
4248 {
4249 /* In this case, we will return TEMP,
4250 so make sure it has the proper mode.
4251 But don't forget to store the value into TARGET. */
4252 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4253 emit_move_insn (target, temp);
4254 }
4255 else
4256 convert_move (target, temp, unsignedp);
4257 }
4258
4259 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4260 {
4261 /* Handle copying a string constant into an array.
4262 The string constant may be shorter than the array.
4263 So copy just the string's actual length, and clear the rest. */
4264 rtx size;
4265 rtx addr;
4266
4267 /* Get the size of the data type of the string,
4268 which is actually the size of the target. */
4269 size = expr_size (exp);
4270 if (GET_CODE (size) == CONST_INT
4271 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4272 emit_block_move (target, temp, size);
4273 else
4274 {
4275 /* Compute the size of the data to copy from the string. */
4276 tree copy_size
4277 = size_binop (MIN_EXPR,
4278 make_tree (sizetype, size),
4279 size_int (TREE_STRING_LENGTH (exp)));
4280 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4281 VOIDmode, 0);
4282 rtx label = 0;
4283
4284 /* Copy that much. */
4285 emit_block_move (target, temp, copy_size_rtx);
4286
4287 /* Figure out how much is left in TARGET that we have to clear.
4288 Do all calculations in ptr_mode. */
4289
4290 addr = XEXP (target, 0);
4291 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4292
4293 if (GET_CODE (copy_size_rtx) == CONST_INT)
4294 {
4295 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4296 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4297 }
4298 else
4299 {
4300 addr = force_reg (ptr_mode, addr);
4301 addr = expand_binop (ptr_mode, add_optab, addr,
4302 copy_size_rtx, NULL_RTX, 0,
4303 OPTAB_LIB_WIDEN);
4304
4305 size = expand_binop (ptr_mode, sub_optab, size,
4306 copy_size_rtx, NULL_RTX, 0,
4307 OPTAB_LIB_WIDEN);
4308
4309 label = gen_label_rtx ();
4310 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4311 GET_MODE (size), 0, 0, label);
4312 }
4313
4314 if (size != const0_rtx)
4315 {
4316 rtx dest = gen_rtx_MEM (BLKmode, addr);
4317
4318 MEM_COPY_ATTRIBUTES (dest, target);
4319
4320 /* Be sure we can write on ADDR. */
4321 in_check_memory_usage = 1;
4322 if (current_function_check_memory_usage)
4323 emit_library_call (chkr_check_addr_libfunc,
4324 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4325 addr, Pmode,
4326 size, TYPE_MODE (sizetype),
4327 GEN_INT (MEMORY_USE_WO),
4328 TYPE_MODE (integer_type_node));
4329 in_check_memory_usage = 0;
4330 clear_storage (dest, size);
4331 }
4332
4333 if (label)
4334 emit_label (label);
4335 }
4336 }
4337 /* Handle calls that return values in multiple non-contiguous locations.
4338 The Irix 6 ABI has examples of this. */
4339 else if (GET_CODE (target) == PARALLEL)
4340 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4341 TYPE_ALIGN (TREE_TYPE (exp)));
4342 else if (GET_MODE (temp) == BLKmode)
4343 emit_block_move (target, temp, expr_size (exp));
4344 else
4345 emit_move_insn (target, temp);
4346 }
4347
4348 /* If we don't want a value, return NULL_RTX. */
4349 if (! want_value)
4350 return NULL_RTX;
4351
4352 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4353 ??? The latter test doesn't seem to make sense. */
4354 else if (dont_return_target && GET_CODE (temp) != MEM)
4355 return temp;
4356
4357 /* Return TARGET itself if it is a hard register. */
4358 else if (want_value && GET_MODE (target) != BLKmode
4359 && ! (GET_CODE (target) == REG
4360 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4361 return copy_to_reg (target);
4362
4363 else
4364 return target;
4365 }
4366 \f
4367 /* Return 1 if EXP just contains zeros. */
4368
4369 static int
4370 is_zeros_p (exp)
4371 tree exp;
4372 {
4373 tree elt;
4374
4375 switch (TREE_CODE (exp))
4376 {
4377 case CONVERT_EXPR:
4378 case NOP_EXPR:
4379 case NON_LVALUE_EXPR:
4380 return is_zeros_p (TREE_OPERAND (exp, 0));
4381
4382 case INTEGER_CST:
4383 return integer_zerop (exp);
4384
4385 case COMPLEX_CST:
4386 return
4387 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4388
4389 case REAL_CST:
4390 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4391
4392 case CONSTRUCTOR:
4393 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4394 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4395 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4396 if (! is_zeros_p (TREE_VALUE (elt)))
4397 return 0;
4398
4399 return 1;
4400
4401 default:
4402 return 0;
4403 }
4404 }
4405
4406 /* Return 1 if EXP contains mostly (3/4) zeros. */
4407
4408 static int
4409 mostly_zeros_p (exp)
4410 tree exp;
4411 {
4412 if (TREE_CODE (exp) == CONSTRUCTOR)
4413 {
4414 int elts = 0, zeros = 0;
4415 tree elt = CONSTRUCTOR_ELTS (exp);
4416 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4417 {
4418 /* If there are no ranges of true bits, it is all zero. */
4419 return elt == NULL_TREE;
4420 }
4421 for (; elt; elt = TREE_CHAIN (elt))
4422 {
4423 /* We do not handle the case where the index is a RANGE_EXPR,
4424 so the statistic will be somewhat inaccurate.
4425 We do make a more accurate count in store_constructor itself,
4426 so since this function is only used for nested array elements,
4427 this should be close enough. */
4428 if (mostly_zeros_p (TREE_VALUE (elt)))
4429 zeros++;
4430 elts++;
4431 }
4432
4433 return 4 * zeros >= 3 * elts;
4434 }
4435
4436 return is_zeros_p (exp);
4437 }
4438 \f
4439 /* Helper function for store_constructor.
4440 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4441 TYPE is the type of the CONSTRUCTOR, not the element type.
4442 ALIGN and CLEARED are as for store_constructor.
4443 ALIAS_SET is the alias set to use for any stores.
4444
4445 This provides a recursive shortcut back to store_constructor when it isn't
4446 necessary to go through store_field. This is so that we can pass through
4447 the cleared field to let store_constructor know that we may not have to
4448 clear a substructure if the outer structure has already been cleared. */
4449
4450 static void
4451 store_constructor_field (target, bitsize, bitpos,
4452 mode, exp, type, align, cleared, alias_set)
4453 rtx target;
4454 unsigned HOST_WIDE_INT bitsize;
4455 HOST_WIDE_INT bitpos;
4456 enum machine_mode mode;
4457 tree exp, type;
4458 unsigned int align;
4459 int cleared;
4460 int alias_set;
4461 {
4462 if (TREE_CODE (exp) == CONSTRUCTOR
4463 && bitpos % BITS_PER_UNIT == 0
4464 /* If we have a non-zero bitpos for a register target, then we just
4465 let store_field do the bitfield handling. This is unlikely to
4466 generate unnecessary clear instructions anyways. */
4467 && (bitpos == 0 || GET_CODE (target) == MEM))
4468 {
4469 if (bitpos != 0)
4470 target
4471 = adjust_address (target,
4472 GET_MODE (target) == BLKmode
4473 || 0 != (bitpos
4474 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4475 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4476
4477
4478 /* Show the alignment may no longer be what it was and update the alias
4479 set, if required. */
4480 if (bitpos != 0)
4481 align = MIN (align, (unsigned int) bitpos & - bitpos);
4482 if (GET_CODE (target) == MEM)
4483 set_mem_alias_set (target, alias_set);
4484
4485 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4486 }
4487 else
4488 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4489 int_size_in_bytes (type), alias_set);
4490 }
4491
4492 /* Store the value of constructor EXP into the rtx TARGET.
4493 TARGET is either a REG or a MEM.
4494 ALIGN is the maximum known alignment for TARGET.
4495 CLEARED is true if TARGET is known to have been zero'd.
4496 SIZE is the number of bytes of TARGET we are allowed to modify: this
4497 may not be the same as the size of EXP if we are assigning to a field
4498 which has been packed to exclude padding bits. */
4499
4500 static void
4501 store_constructor (exp, target, align, cleared, size)
4502 tree exp;
4503 rtx target;
4504 unsigned int align;
4505 int cleared;
4506 HOST_WIDE_INT size;
4507 {
4508 tree type = TREE_TYPE (exp);
4509 #ifdef WORD_REGISTER_OPERATIONS
4510 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4511 #endif
4512
4513 /* We know our target cannot conflict, since safe_from_p has been called. */
4514 #if 0
4515 /* Don't try copying piece by piece into a hard register
4516 since that is vulnerable to being clobbered by EXP.
4517 Instead, construct in a pseudo register and then copy it all. */
4518 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4519 {
4520 rtx temp = gen_reg_rtx (GET_MODE (target));
4521 store_constructor (exp, temp, align, cleared, size);
4522 emit_move_insn (target, temp);
4523 return;
4524 }
4525 #endif
4526
4527 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4528 || TREE_CODE (type) == QUAL_UNION_TYPE)
4529 {
4530 tree elt;
4531
4532 /* Inform later passes that the whole union value is dead. */
4533 if ((TREE_CODE (type) == UNION_TYPE
4534 || TREE_CODE (type) == QUAL_UNION_TYPE)
4535 && ! cleared)
4536 {
4537 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4538
4539 /* If the constructor is empty, clear the union. */
4540 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4541 clear_storage (target, expr_size (exp));
4542 }
4543
4544 /* If we are building a static constructor into a register,
4545 set the initial value as zero so we can fold the value into
4546 a constant. But if more than one register is involved,
4547 this probably loses. */
4548 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4549 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4550 {
4551 if (! cleared)
4552 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4553
4554 cleared = 1;
4555 }
4556
4557 /* If the constructor has fewer fields than the structure
4558 or if we are initializing the structure to mostly zeros,
4559 clear the whole structure first. Don't do this if TARGET is a
4560 register whose mode size isn't equal to SIZE since clear_storage
4561 can't handle this case. */
4562 else if (size > 0
4563 && ((list_length (CONSTRUCTOR_ELTS (exp))
4564 != fields_length (type))
4565 || mostly_zeros_p (exp))
4566 && (GET_CODE (target) != REG
4567 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4568 {
4569 if (! cleared)
4570 clear_storage (target, GEN_INT (size));
4571
4572 cleared = 1;
4573 }
4574 else if (! cleared)
4575 /* Inform later passes that the old value is dead. */
4576 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4577
4578 /* Store each element of the constructor into
4579 the corresponding field of TARGET. */
4580
4581 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4582 {
4583 tree field = TREE_PURPOSE (elt);
4584 #ifdef WORD_REGISTER_OPERATIONS
4585 tree value = TREE_VALUE (elt);
4586 #endif
4587 enum machine_mode mode;
4588 HOST_WIDE_INT bitsize;
4589 HOST_WIDE_INT bitpos = 0;
4590 int unsignedp;
4591 tree offset;
4592 rtx to_rtx = target;
4593
4594 /* Just ignore missing fields.
4595 We cleared the whole structure, above,
4596 if any fields are missing. */
4597 if (field == 0)
4598 continue;
4599
4600 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4601 continue;
4602
4603 if (host_integerp (DECL_SIZE (field), 1))
4604 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4605 else
4606 bitsize = -1;
4607
4608 unsignedp = TREE_UNSIGNED (field);
4609 mode = DECL_MODE (field);
4610 if (DECL_BIT_FIELD (field))
4611 mode = VOIDmode;
4612
4613 offset = DECL_FIELD_OFFSET (field);
4614 if (host_integerp (offset, 0)
4615 && host_integerp (bit_position (field), 0))
4616 {
4617 bitpos = int_bit_position (field);
4618 offset = 0;
4619 }
4620 else
4621 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4622
4623 if (offset)
4624 {
4625 rtx offset_rtx;
4626
4627 if (contains_placeholder_p (offset))
4628 offset = build (WITH_RECORD_EXPR, sizetype,
4629 offset, make_tree (TREE_TYPE (exp), target));
4630
4631 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4632 if (GET_CODE (to_rtx) != MEM)
4633 abort ();
4634
4635 if (GET_MODE (offset_rtx) != ptr_mode)
4636 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4637
4638 #ifdef POINTERS_EXTEND_UNSIGNED
4639 if (GET_MODE (offset_rtx) != Pmode)
4640 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4641 #endif
4642
4643 to_rtx = offset_address (to_rtx, offset_rtx,
4644 highest_pow2_factor (offset));
4645
4646 align = DECL_OFFSET_ALIGN (field);
4647 }
4648
4649 if (TREE_READONLY (field))
4650 {
4651 if (GET_CODE (to_rtx) == MEM)
4652 to_rtx = copy_rtx (to_rtx);
4653
4654 RTX_UNCHANGING_P (to_rtx) = 1;
4655 }
4656
4657 #ifdef WORD_REGISTER_OPERATIONS
4658 /* If this initializes a field that is smaller than a word, at the
4659 start of a word, try to widen it to a full word.
4660 This special case allows us to output C++ member function
4661 initializations in a form that the optimizers can understand. */
4662 if (GET_CODE (target) == REG
4663 && bitsize < BITS_PER_WORD
4664 && bitpos % BITS_PER_WORD == 0
4665 && GET_MODE_CLASS (mode) == MODE_INT
4666 && TREE_CODE (value) == INTEGER_CST
4667 && exp_size >= 0
4668 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4669 {
4670 tree type = TREE_TYPE (value);
4671 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4672 {
4673 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4674 value = convert (type, value);
4675 }
4676 if (BYTES_BIG_ENDIAN)
4677 value
4678 = fold (build (LSHIFT_EXPR, type, value,
4679 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4680 bitsize = BITS_PER_WORD;
4681 mode = word_mode;
4682 }
4683 #endif
4684 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4685 TREE_VALUE (elt), type, align, cleared,
4686 (DECL_NONADDRESSABLE_P (field)
4687 && GET_CODE (to_rtx) == MEM)
4688 ? MEM_ALIAS_SET (to_rtx)
4689 : get_alias_set (TREE_TYPE (field)));
4690 }
4691 }
4692 else if (TREE_CODE (type) == ARRAY_TYPE)
4693 {
4694 tree elt;
4695 int i;
4696 int need_to_clear;
4697 tree domain = TYPE_DOMAIN (type);
4698 tree elttype = TREE_TYPE (type);
4699 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4700 && TYPE_MAX_VALUE (domain)
4701 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4702 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4703 HOST_WIDE_INT minelt = 0;
4704 HOST_WIDE_INT maxelt = 0;
4705
4706 /* If we have constant bounds for the range of the type, get them. */
4707 if (const_bounds_p)
4708 {
4709 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4710 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4711 }
4712
4713 /* If the constructor has fewer elements than the array,
4714 clear the whole array first. Similarly if this is
4715 static constructor of a non-BLKmode object. */
4716 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4717 need_to_clear = 1;
4718 else
4719 {
4720 HOST_WIDE_INT count = 0, zero_count = 0;
4721 need_to_clear = ! const_bounds_p;
4722
4723 /* This loop is a more accurate version of the loop in
4724 mostly_zeros_p (it handles RANGE_EXPR in an index).
4725 It is also needed to check for missing elements. */
4726 for (elt = CONSTRUCTOR_ELTS (exp);
4727 elt != NULL_TREE && ! need_to_clear;
4728 elt = TREE_CHAIN (elt))
4729 {
4730 tree index = TREE_PURPOSE (elt);
4731 HOST_WIDE_INT this_node_count;
4732
4733 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4734 {
4735 tree lo_index = TREE_OPERAND (index, 0);
4736 tree hi_index = TREE_OPERAND (index, 1);
4737
4738 if (! host_integerp (lo_index, 1)
4739 || ! host_integerp (hi_index, 1))
4740 {
4741 need_to_clear = 1;
4742 break;
4743 }
4744
4745 this_node_count = (tree_low_cst (hi_index, 1)
4746 - tree_low_cst (lo_index, 1) + 1);
4747 }
4748 else
4749 this_node_count = 1;
4750
4751 count += this_node_count;
4752 if (mostly_zeros_p (TREE_VALUE (elt)))
4753 zero_count += this_node_count;
4754 }
4755
4756 /* Clear the entire array first if there are any missing elements,
4757 or if the incidence of zero elements is >= 75%. */
4758 if (! need_to_clear
4759 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4760 need_to_clear = 1;
4761 }
4762
4763 if (need_to_clear && size > 0)
4764 {
4765 if (! cleared)
4766 clear_storage (target, GEN_INT (size));
4767 cleared = 1;
4768 }
4769 else if (REG_P (target))
4770 /* Inform later passes that the old value is dead. */
4771 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4772
4773 /* Store each element of the constructor into
4774 the corresponding element of TARGET, determined
4775 by counting the elements. */
4776 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4777 elt;
4778 elt = TREE_CHAIN (elt), i++)
4779 {
4780 enum machine_mode mode;
4781 HOST_WIDE_INT bitsize;
4782 HOST_WIDE_INT bitpos;
4783 int unsignedp;
4784 tree value = TREE_VALUE (elt);
4785 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4786 tree index = TREE_PURPOSE (elt);
4787 rtx xtarget = target;
4788
4789 if (cleared && is_zeros_p (value))
4790 continue;
4791
4792 unsignedp = TREE_UNSIGNED (elttype);
4793 mode = TYPE_MODE (elttype);
4794 if (mode == BLKmode)
4795 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4796 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4797 : -1);
4798 else
4799 bitsize = GET_MODE_BITSIZE (mode);
4800
4801 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4802 {
4803 tree lo_index = TREE_OPERAND (index, 0);
4804 tree hi_index = TREE_OPERAND (index, 1);
4805 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4806 struct nesting *loop;
4807 HOST_WIDE_INT lo, hi, count;
4808 tree position;
4809
4810 /* If the range is constant and "small", unroll the loop. */
4811 if (const_bounds_p
4812 && host_integerp (lo_index, 0)
4813 && host_integerp (hi_index, 0)
4814 && (lo = tree_low_cst (lo_index, 0),
4815 hi = tree_low_cst (hi_index, 0),
4816 count = hi - lo + 1,
4817 (GET_CODE (target) != MEM
4818 || count <= 2
4819 || (host_integerp (TYPE_SIZE (elttype), 1)
4820 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4821 <= 40 * 8)))))
4822 {
4823 lo -= minelt; hi -= minelt;
4824 for (; lo <= hi; lo++)
4825 {
4826 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4827 store_constructor_field
4828 (target, bitsize, bitpos, mode, value, type, align,
4829 cleared,
4830 TYPE_NONALIASED_COMPONENT (type)
4831 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4832 }
4833 }
4834 else
4835 {
4836 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4837 loop_top = gen_label_rtx ();
4838 loop_end = gen_label_rtx ();
4839
4840 unsignedp = TREE_UNSIGNED (domain);
4841
4842 index = build_decl (VAR_DECL, NULL_TREE, domain);
4843
4844 index_r
4845 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4846 &unsignedp, 0));
4847 SET_DECL_RTL (index, index_r);
4848 if (TREE_CODE (value) == SAVE_EXPR
4849 && SAVE_EXPR_RTL (value) == 0)
4850 {
4851 /* Make sure value gets expanded once before the
4852 loop. */
4853 expand_expr (value, const0_rtx, VOIDmode, 0);
4854 emit_queue ();
4855 }
4856 store_expr (lo_index, index_r, 0);
4857 loop = expand_start_loop (0);
4858
4859 /* Assign value to element index. */
4860 position
4861 = convert (ssizetype,
4862 fold (build (MINUS_EXPR, TREE_TYPE (index),
4863 index, TYPE_MIN_VALUE (domain))));
4864 position = size_binop (MULT_EXPR, position,
4865 convert (ssizetype,
4866 TYPE_SIZE_UNIT (elttype)));
4867
4868 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4869 xtarget = offset_address (target, pos_rtx,
4870 highest_pow2_factor (position));
4871 xtarget = adjust_address (xtarget, mode, 0);
4872 if (TREE_CODE (value) == CONSTRUCTOR)
4873 store_constructor (value, xtarget, align, cleared,
4874 bitsize / BITS_PER_UNIT);
4875 else
4876 store_expr (value, xtarget, 0);
4877
4878 expand_exit_loop_if_false (loop,
4879 build (LT_EXPR, integer_type_node,
4880 index, hi_index));
4881
4882 expand_increment (build (PREINCREMENT_EXPR,
4883 TREE_TYPE (index),
4884 index, integer_one_node), 0, 0);
4885 expand_end_loop ();
4886 emit_label (loop_end);
4887 }
4888 }
4889 else if ((index != 0 && ! host_integerp (index, 0))
4890 || ! host_integerp (TYPE_SIZE (elttype), 1))
4891 {
4892 tree position;
4893
4894 if (index == 0)
4895 index = ssize_int (1);
4896
4897 if (minelt)
4898 index = convert (ssizetype,
4899 fold (build (MINUS_EXPR, index,
4900 TYPE_MIN_VALUE (domain))));
4901
4902 position = size_binop (MULT_EXPR, index,
4903 convert (ssizetype,
4904 TYPE_SIZE_UNIT (elttype)));
4905 xtarget = offset_address (target,
4906 expand_expr (position, 0, VOIDmode, 0),
4907 highest_pow2_factor (position));
4908 xtarget = adjust_address (xtarget, mode, 0);
4909 store_expr (value, xtarget, 0);
4910 }
4911 else
4912 {
4913 if (index != 0)
4914 bitpos = ((tree_low_cst (index, 0) - minelt)
4915 * tree_low_cst (TYPE_SIZE (elttype), 1));
4916 else
4917 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4918
4919 store_constructor_field (target, bitsize, bitpos, mode, value,
4920 type, align, cleared,
4921 TYPE_NONALIASED_COMPONENT (type)
4922 && GET_CODE (target) == MEM
4923 ? MEM_ALIAS_SET (target) :
4924 get_alias_set (elttype));
4925
4926 }
4927 }
4928 }
4929
4930 /* Set constructor assignments. */
4931 else if (TREE_CODE (type) == SET_TYPE)
4932 {
4933 tree elt = CONSTRUCTOR_ELTS (exp);
4934 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4935 tree domain = TYPE_DOMAIN (type);
4936 tree domain_min, domain_max, bitlength;
4937
4938 /* The default implementation strategy is to extract the constant
4939 parts of the constructor, use that to initialize the target,
4940 and then "or" in whatever non-constant ranges we need in addition.
4941
4942 If a large set is all zero or all ones, it is
4943 probably better to set it using memset (if available) or bzero.
4944 Also, if a large set has just a single range, it may also be
4945 better to first clear all the first clear the set (using
4946 bzero/memset), and set the bits we want. */
4947
4948 /* Check for all zeros. */
4949 if (elt == NULL_TREE && size > 0)
4950 {
4951 if (!cleared)
4952 clear_storage (target, GEN_INT (size));
4953 return;
4954 }
4955
4956 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4957 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4958 bitlength = size_binop (PLUS_EXPR,
4959 size_diffop (domain_max, domain_min),
4960 ssize_int (1));
4961
4962 nbits = tree_low_cst (bitlength, 1);
4963
4964 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4965 are "complicated" (more than one range), initialize (the
4966 constant parts) by copying from a constant. */
4967 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4968 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4969 {
4970 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4971 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4972 char *bit_buffer = (char *) alloca (nbits);
4973 HOST_WIDE_INT word = 0;
4974 unsigned int bit_pos = 0;
4975 unsigned int ibit = 0;
4976 unsigned int offset = 0; /* In bytes from beginning of set. */
4977
4978 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4979 for (;;)
4980 {
4981 if (bit_buffer[ibit])
4982 {
4983 if (BYTES_BIG_ENDIAN)
4984 word |= (1 << (set_word_size - 1 - bit_pos));
4985 else
4986 word |= 1 << bit_pos;
4987 }
4988
4989 bit_pos++; ibit++;
4990 if (bit_pos >= set_word_size || ibit == nbits)
4991 {
4992 if (word != 0 || ! cleared)
4993 {
4994 rtx datum = GEN_INT (word);
4995 rtx to_rtx;
4996
4997 /* The assumption here is that it is safe to use
4998 XEXP if the set is multi-word, but not if
4999 it's single-word. */
5000 if (GET_CODE (target) == MEM)
5001 to_rtx = adjust_address (target, mode, offset);
5002 else if (offset == 0)
5003 to_rtx = target;
5004 else
5005 abort ();
5006 emit_move_insn (to_rtx, datum);
5007 }
5008
5009 if (ibit == nbits)
5010 break;
5011 word = 0;
5012 bit_pos = 0;
5013 offset += set_word_size / BITS_PER_UNIT;
5014 }
5015 }
5016 }
5017 else if (!cleared)
5018 /* Don't bother clearing storage if the set is all ones. */
5019 if (TREE_CHAIN (elt) != NULL_TREE
5020 || (TREE_PURPOSE (elt) == NULL_TREE
5021 ? nbits != 1
5022 : ( ! host_integerp (TREE_VALUE (elt), 0)
5023 || ! host_integerp (TREE_PURPOSE (elt), 0)
5024 || (tree_low_cst (TREE_VALUE (elt), 0)
5025 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5026 != (HOST_WIDE_INT) nbits))))
5027 clear_storage (target, expr_size (exp));
5028
5029 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5030 {
5031 /* Start of range of element or NULL. */
5032 tree startbit = TREE_PURPOSE (elt);
5033 /* End of range of element, or element value. */
5034 tree endbit = TREE_VALUE (elt);
5035 #ifdef TARGET_MEM_FUNCTIONS
5036 HOST_WIDE_INT startb, endb;
5037 #endif
5038 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5039
5040 bitlength_rtx = expand_expr (bitlength,
5041 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5042
5043 /* Handle non-range tuple element like [ expr ]. */
5044 if (startbit == NULL_TREE)
5045 {
5046 startbit = save_expr (endbit);
5047 endbit = startbit;
5048 }
5049
5050 startbit = convert (sizetype, startbit);
5051 endbit = convert (sizetype, endbit);
5052 if (! integer_zerop (domain_min))
5053 {
5054 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5055 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5056 }
5057 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5058 EXPAND_CONST_ADDRESS);
5059 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5060 EXPAND_CONST_ADDRESS);
5061
5062 if (REG_P (target))
5063 {
5064 targetx
5065 = assign_temp
5066 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5067 TYPE_QUAL_CONST)),
5068 0, 1, 1);
5069 emit_move_insn (targetx, target);
5070 }
5071
5072 else if (GET_CODE (target) == MEM)
5073 targetx = target;
5074 else
5075 abort ();
5076
5077 #ifdef TARGET_MEM_FUNCTIONS
5078 /* Optimization: If startbit and endbit are
5079 constants divisible by BITS_PER_UNIT,
5080 call memset instead. */
5081 if (TREE_CODE (startbit) == INTEGER_CST
5082 && TREE_CODE (endbit) == INTEGER_CST
5083 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5084 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5085 {
5086 emit_library_call (memset_libfunc, LCT_NORMAL,
5087 VOIDmode, 3,
5088 plus_constant (XEXP (targetx, 0),
5089 startb / BITS_PER_UNIT),
5090 Pmode,
5091 constm1_rtx, TYPE_MODE (integer_type_node),
5092 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5093 TYPE_MODE (sizetype));
5094 }
5095 else
5096 #endif
5097 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5098 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5099 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5100 startbit_rtx, TYPE_MODE (sizetype),
5101 endbit_rtx, TYPE_MODE (sizetype));
5102
5103 if (REG_P (target))
5104 emit_move_insn (target, targetx);
5105 }
5106 }
5107
5108 else
5109 abort ();
5110 }
5111
5112 /* Store the value of EXP (an expression tree)
5113 into a subfield of TARGET which has mode MODE and occupies
5114 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5115 If MODE is VOIDmode, it means that we are storing into a bit-field.
5116
5117 If VALUE_MODE is VOIDmode, return nothing in particular.
5118 UNSIGNEDP is not used in this case.
5119
5120 Otherwise, return an rtx for the value stored. This rtx
5121 has mode VALUE_MODE if that is convenient to do.
5122 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5123
5124 ALIGN is the alignment that TARGET is known to have.
5125 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5126
5127 ALIAS_SET is the alias set for the destination. This value will
5128 (in general) be different from that for TARGET, since TARGET is a
5129 reference to the containing structure. */
5130
5131 static rtx
5132 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5133 unsignedp, align, total_size, alias_set)
5134 rtx target;
5135 HOST_WIDE_INT bitsize;
5136 HOST_WIDE_INT bitpos;
5137 enum machine_mode mode;
5138 tree exp;
5139 enum machine_mode value_mode;
5140 int unsignedp;
5141 unsigned int align;
5142 HOST_WIDE_INT total_size;
5143 int alias_set;
5144 {
5145 HOST_WIDE_INT width_mask = 0;
5146
5147 if (TREE_CODE (exp) == ERROR_MARK)
5148 return const0_rtx;
5149
5150 /* If we have nothing to store, do nothing unless the expression has
5151 side-effects. */
5152 if (bitsize == 0)
5153 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5154
5155 if (bitsize < HOST_BITS_PER_WIDE_INT)
5156 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5157
5158 /* If we are storing into an unaligned field of an aligned union that is
5159 in a register, we may have the mode of TARGET being an integer mode but
5160 MODE == BLKmode. In that case, get an aligned object whose size and
5161 alignment are the same as TARGET and store TARGET into it (we can avoid
5162 the store if the field being stored is the entire width of TARGET). Then
5163 call ourselves recursively to store the field into a BLKmode version of
5164 that object. Finally, load from the object into TARGET. This is not
5165 very efficient in general, but should only be slightly more expensive
5166 than the otherwise-required unaligned accesses. Perhaps this can be
5167 cleaned up later. */
5168
5169 if (mode == BLKmode
5170 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5171 {
5172 rtx object
5173 = assign_temp
5174 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5175 TYPE_QUAL_CONST),
5176 0, 1, 1);
5177 rtx blk_object = copy_rtx (object);
5178
5179 PUT_MODE (blk_object, BLKmode);
5180
5181 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5182 emit_move_insn (object, target);
5183
5184 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5185 align, total_size, alias_set);
5186
5187 /* Even though we aren't returning target, we need to
5188 give it the updated value. */
5189 emit_move_insn (target, object);
5190
5191 return blk_object;
5192 }
5193
5194 if (GET_CODE (target) == CONCAT)
5195 {
5196 /* We're storing into a struct containing a single __complex. */
5197
5198 if (bitpos != 0)
5199 abort ();
5200 return store_expr (exp, target, 0);
5201 }
5202
5203 /* If the structure is in a register or if the component
5204 is a bit field, we cannot use addressing to access it.
5205 Use bit-field techniques or SUBREG to store in it. */
5206
5207 if (mode == VOIDmode
5208 || (mode != BLKmode && ! direct_store[(int) mode]
5209 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5210 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5211 || GET_CODE (target) == REG
5212 || GET_CODE (target) == SUBREG
5213 /* If the field isn't aligned enough to store as an ordinary memref,
5214 store it as a bit field. */
5215 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5216 && (align < GET_MODE_ALIGNMENT (mode)
5217 || bitpos % GET_MODE_ALIGNMENT (mode)))
5218 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5219 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5220 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5221 /* If the RHS and field are a constant size and the size of the
5222 RHS isn't the same size as the bitfield, we must use bitfield
5223 operations. */
5224 || (bitsize >= 0
5225 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5226 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5227 {
5228 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5229
5230 /* If BITSIZE is narrower than the size of the type of EXP
5231 we will be narrowing TEMP. Normally, what's wanted are the
5232 low-order bits. However, if EXP's type is a record and this is
5233 big-endian machine, we want the upper BITSIZE bits. */
5234 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5235 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5236 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5237 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5238 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5239 - bitsize),
5240 temp, 1);
5241
5242 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5243 MODE. */
5244 if (mode != VOIDmode && mode != BLKmode
5245 && mode != TYPE_MODE (TREE_TYPE (exp)))
5246 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5247
5248 /* If the modes of TARGET and TEMP are both BLKmode, both
5249 must be in memory and BITPOS must be aligned on a byte
5250 boundary. If so, we simply do a block copy. */
5251 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5252 {
5253 unsigned int exp_align = expr_align (exp);
5254
5255 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5256 || bitpos % BITS_PER_UNIT != 0)
5257 abort ();
5258
5259 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5260
5261 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5262 align = MIN (exp_align, align);
5263
5264 /* Find an alignment that is consistent with the bit position. */
5265 while ((bitpos % align) != 0)
5266 align >>= 1;
5267
5268 emit_block_move (target, temp,
5269 bitsize == -1 ? expr_size (exp)
5270 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5271 / BITS_PER_UNIT));
5272
5273 return value_mode == VOIDmode ? const0_rtx : target;
5274 }
5275
5276 /* Store the value in the bitfield. */
5277 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5278 if (value_mode != VOIDmode)
5279 {
5280 /* The caller wants an rtx for the value. */
5281 /* If possible, avoid refetching from the bitfield itself. */
5282 if (width_mask != 0
5283 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5284 {
5285 tree count;
5286 enum machine_mode tmode;
5287
5288 if (unsignedp)
5289 return expand_and (temp,
5290 GEN_INT
5291 (trunc_int_for_mode
5292 (width_mask,
5293 GET_MODE (temp) == VOIDmode
5294 ? value_mode
5295 : GET_MODE (temp))), NULL_RTX);
5296 tmode = GET_MODE (temp);
5297 if (tmode == VOIDmode)
5298 tmode = value_mode;
5299 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5300 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5301 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5302 }
5303 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5304 NULL_RTX, value_mode, 0, align,
5305 total_size);
5306 }
5307 return const0_rtx;
5308 }
5309 else
5310 {
5311 rtx addr = XEXP (target, 0);
5312 rtx to_rtx;
5313
5314 /* If a value is wanted, it must be the lhs;
5315 so make the address stable for multiple use. */
5316
5317 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5318 && ! CONSTANT_ADDRESS_P (addr)
5319 /* A frame-pointer reference is already stable. */
5320 && ! (GET_CODE (addr) == PLUS
5321 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5322 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5323 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5324 target = replace_equiv_address (target, copy_to_reg (addr));
5325
5326 /* Now build a reference to just the desired component. */
5327
5328 to_rtx = copy_rtx (adjust_address (target, mode,
5329 bitpos / BITS_PER_UNIT));
5330
5331 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5332 set_mem_alias_set (to_rtx, alias_set);
5333
5334 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5335 }
5336 }
5337 \f
5338 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5339 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5340 codes and find the ultimate containing object, which we return.
5341
5342 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5343 bit position, and *PUNSIGNEDP to the signedness of the field.
5344 If the position of the field is variable, we store a tree
5345 giving the variable offset (in units) in *POFFSET.
5346 This offset is in addition to the bit position.
5347 If the position is not variable, we store 0 in *POFFSET.
5348 We set *PALIGNMENT to the alignment of the address that will be
5349 computed. This is the alignment of the thing we return if *POFFSET
5350 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5351
5352 If any of the extraction expressions is volatile,
5353 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5354
5355 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5356 is a mode that can be used to access the field. In that case, *PBITSIZE
5357 is redundant.
5358
5359 If the field describes a variable-sized object, *PMODE is set to
5360 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5361 this case, but the address of the object can be found. */
5362
5363 tree
5364 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5365 punsignedp, pvolatilep, palignment)
5366 tree exp;
5367 HOST_WIDE_INT *pbitsize;
5368 HOST_WIDE_INT *pbitpos;
5369 tree *poffset;
5370 enum machine_mode *pmode;
5371 int *punsignedp;
5372 int *pvolatilep;
5373 unsigned int *palignment;
5374 {
5375 tree size_tree = 0;
5376 enum machine_mode mode = VOIDmode;
5377 tree offset = size_zero_node;
5378 tree bit_offset = bitsize_zero_node;
5379 unsigned int alignment = BIGGEST_ALIGNMENT;
5380 tree placeholder_ptr = 0;
5381 tree tem;
5382
5383 /* First get the mode, signedness, and size. We do this from just the
5384 outermost expression. */
5385 if (TREE_CODE (exp) == COMPONENT_REF)
5386 {
5387 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5388 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5389 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5390
5391 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5392 }
5393 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5394 {
5395 size_tree = TREE_OPERAND (exp, 1);
5396 *punsignedp = TREE_UNSIGNED (exp);
5397 }
5398 else
5399 {
5400 mode = TYPE_MODE (TREE_TYPE (exp));
5401 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5402
5403 if (mode == BLKmode)
5404 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5405 else
5406 *pbitsize = GET_MODE_BITSIZE (mode);
5407 }
5408
5409 if (size_tree != 0)
5410 {
5411 if (! host_integerp (size_tree, 1))
5412 mode = BLKmode, *pbitsize = -1;
5413 else
5414 *pbitsize = tree_low_cst (size_tree, 1);
5415 }
5416
5417 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5418 and find the ultimate containing object. */
5419 while (1)
5420 {
5421 if (TREE_CODE (exp) == BIT_FIELD_REF)
5422 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5423 else if (TREE_CODE (exp) == COMPONENT_REF)
5424 {
5425 tree field = TREE_OPERAND (exp, 1);
5426 tree this_offset = DECL_FIELD_OFFSET (field);
5427
5428 /* If this field hasn't been filled in yet, don't go
5429 past it. This should only happen when folding expressions
5430 made during type construction. */
5431 if (this_offset == 0)
5432 break;
5433 else if (! TREE_CONSTANT (this_offset)
5434 && contains_placeholder_p (this_offset))
5435 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5436
5437 offset = size_binop (PLUS_EXPR, offset, this_offset);
5438 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5439 DECL_FIELD_BIT_OFFSET (field));
5440
5441 if (! host_integerp (offset, 0))
5442 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5443 }
5444
5445 else if (TREE_CODE (exp) == ARRAY_REF
5446 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5447 {
5448 tree index = TREE_OPERAND (exp, 1);
5449 tree array = TREE_OPERAND (exp, 0);
5450 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5451 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5452 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5453
5454 /* We assume all arrays have sizes that are a multiple of a byte.
5455 First subtract the lower bound, if any, in the type of the
5456 index, then convert to sizetype and multiply by the size of the
5457 array element. */
5458 if (low_bound != 0 && ! integer_zerop (low_bound))
5459 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5460 index, low_bound));
5461
5462 /* If the index has a self-referential type, pass it to a
5463 WITH_RECORD_EXPR; if the component size is, pass our
5464 component to one. */
5465 if (! TREE_CONSTANT (index)
5466 && contains_placeholder_p (index))
5467 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5468 if (! TREE_CONSTANT (unit_size)
5469 && contains_placeholder_p (unit_size))
5470 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5471
5472 offset = size_binop (PLUS_EXPR, offset,
5473 size_binop (MULT_EXPR,
5474 convert (sizetype, index),
5475 unit_size));
5476 }
5477
5478 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5479 {
5480 exp = find_placeholder (exp, &placeholder_ptr);
5481 continue;
5482 }
5483 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5484 && ! ((TREE_CODE (exp) == NOP_EXPR
5485 || TREE_CODE (exp) == CONVERT_EXPR)
5486 && (TYPE_MODE (TREE_TYPE (exp))
5487 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5488 break;
5489
5490 /* If any reference in the chain is volatile, the effect is volatile. */
5491 if (TREE_THIS_VOLATILE (exp))
5492 *pvolatilep = 1;
5493
5494 /* If the offset is non-constant already, then we can't assume any
5495 alignment more than the alignment here. */
5496 if (! TREE_CONSTANT (offset))
5497 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5498
5499 exp = TREE_OPERAND (exp, 0);
5500 }
5501
5502 if (DECL_P (exp))
5503 alignment = MIN (alignment, DECL_ALIGN (exp));
5504 else if (TREE_TYPE (exp) != 0)
5505 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5506
5507 /* If OFFSET is constant, see if we can return the whole thing as a
5508 constant bit position. Otherwise, split it up. */
5509 if (host_integerp (offset, 0)
5510 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5511 bitsize_unit_node))
5512 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5513 && host_integerp (tem, 0))
5514 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5515 else
5516 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5517
5518 *pmode = mode;
5519 *palignment = alignment;
5520 return exp;
5521 }
5522
5523 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5524
5525 static enum memory_use_mode
5526 get_memory_usage_from_modifier (modifier)
5527 enum expand_modifier modifier;
5528 {
5529 switch (modifier)
5530 {
5531 case EXPAND_NORMAL:
5532 case EXPAND_SUM:
5533 return MEMORY_USE_RO;
5534 break;
5535 case EXPAND_MEMORY_USE_WO:
5536 return MEMORY_USE_WO;
5537 break;
5538 case EXPAND_MEMORY_USE_RW:
5539 return MEMORY_USE_RW;
5540 break;
5541 case EXPAND_MEMORY_USE_DONT:
5542 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5543 MEMORY_USE_DONT, because they are modifiers to a call of
5544 expand_expr in the ADDR_EXPR case of expand_expr. */
5545 case EXPAND_CONST_ADDRESS:
5546 case EXPAND_INITIALIZER:
5547 return MEMORY_USE_DONT;
5548 case EXPAND_MEMORY_USE_BAD:
5549 default:
5550 abort ();
5551 }
5552 }
5553 \f
5554 /* Given an rtx VALUE that may contain additions and multiplications, return
5555 an equivalent value that just refers to a register, memory, or constant.
5556 This is done by generating instructions to perform the arithmetic and
5557 returning a pseudo-register containing the value.
5558
5559 The returned value may be a REG, SUBREG, MEM or constant. */
5560
5561 rtx
5562 force_operand (value, target)
5563 rtx value, target;
5564 {
5565 optab binoptab = 0;
5566 /* Use a temporary to force order of execution of calls to
5567 `force_operand'. */
5568 rtx tmp;
5569 rtx op2;
5570 /* Use subtarget as the target for operand 0 of a binary operation. */
5571 rtx subtarget = get_subtarget (target);
5572
5573 /* Check for a PIC address load. */
5574 if (flag_pic
5575 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5576 && XEXP (value, 0) == pic_offset_table_rtx
5577 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5578 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5579 || GET_CODE (XEXP (value, 1)) == CONST))
5580 {
5581 if (!subtarget)
5582 subtarget = gen_reg_rtx (GET_MODE (value));
5583 emit_move_insn (subtarget, value);
5584 return subtarget;
5585 }
5586
5587 if (GET_CODE (value) == PLUS)
5588 binoptab = add_optab;
5589 else if (GET_CODE (value) == MINUS)
5590 binoptab = sub_optab;
5591 else if (GET_CODE (value) == MULT)
5592 {
5593 op2 = XEXP (value, 1);
5594 if (!CONSTANT_P (op2)
5595 && !(GET_CODE (op2) == REG && op2 != subtarget))
5596 subtarget = 0;
5597 tmp = force_operand (XEXP (value, 0), subtarget);
5598 return expand_mult (GET_MODE (value), tmp,
5599 force_operand (op2, NULL_RTX),
5600 target, 1);
5601 }
5602
5603 if (binoptab)
5604 {
5605 op2 = XEXP (value, 1);
5606 if (!CONSTANT_P (op2)
5607 && !(GET_CODE (op2) == REG && op2 != subtarget))
5608 subtarget = 0;
5609 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5610 {
5611 binoptab = add_optab;
5612 op2 = negate_rtx (GET_MODE (value), op2);
5613 }
5614
5615 /* Check for an addition with OP2 a constant integer and our first
5616 operand a PLUS of a virtual register and something else. In that
5617 case, we want to emit the sum of the virtual register and the
5618 constant first and then add the other value. This allows virtual
5619 register instantiation to simply modify the constant rather than
5620 creating another one around this addition. */
5621 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5622 && GET_CODE (XEXP (value, 0)) == PLUS
5623 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5624 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5625 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5626 {
5627 rtx temp = expand_binop (GET_MODE (value), binoptab,
5628 XEXP (XEXP (value, 0), 0), op2,
5629 subtarget, 0, OPTAB_LIB_WIDEN);
5630 return expand_binop (GET_MODE (value), binoptab, temp,
5631 force_operand (XEXP (XEXP (value, 0), 1), 0),
5632 target, 0, OPTAB_LIB_WIDEN);
5633 }
5634
5635 tmp = force_operand (XEXP (value, 0), subtarget);
5636 return expand_binop (GET_MODE (value), binoptab, tmp,
5637 force_operand (op2, NULL_RTX),
5638 target, 0, OPTAB_LIB_WIDEN);
5639 /* We give UNSIGNEDP = 0 to expand_binop
5640 because the only operations we are expanding here are signed ones. */
5641 }
5642 return value;
5643 }
5644 \f
5645 /* Subroutine of expand_expr: return nonzero iff there is no way that
5646 EXP can reference X, which is being modified. TOP_P is nonzero if this
5647 call is going to be used to determine whether we need a temporary
5648 for EXP, as opposed to a recursive call to this function.
5649
5650 It is always safe for this routine to return zero since it merely
5651 searches for optimization opportunities. */
5652
5653 int
5654 safe_from_p (x, exp, top_p)
5655 rtx x;
5656 tree exp;
5657 int top_p;
5658 {
5659 rtx exp_rtl = 0;
5660 int i, nops;
5661 static tree save_expr_list;
5662
5663 if (x == 0
5664 /* If EXP has varying size, we MUST use a target since we currently
5665 have no way of allocating temporaries of variable size
5666 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5667 So we assume here that something at a higher level has prevented a
5668 clash. This is somewhat bogus, but the best we can do. Only
5669 do this when X is BLKmode and when we are at the top level. */
5670 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5671 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5672 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5673 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5674 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5675 != INTEGER_CST)
5676 && GET_MODE (x) == BLKmode)
5677 /* If X is in the outgoing argument area, it is always safe. */
5678 || (GET_CODE (x) == MEM
5679 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5680 || (GET_CODE (XEXP (x, 0)) == PLUS
5681 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5682 return 1;
5683
5684 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5685 find the underlying pseudo. */
5686 if (GET_CODE (x) == SUBREG)
5687 {
5688 x = SUBREG_REG (x);
5689 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5690 return 0;
5691 }
5692
5693 /* A SAVE_EXPR might appear many times in the expression passed to the
5694 top-level safe_from_p call, and if it has a complex subexpression,
5695 examining it multiple times could result in a combinatorial explosion.
5696 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5697 with optimization took about 28 minutes to compile -- even though it was
5698 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5699 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5700 we have processed. Note that the only test of top_p was above. */
5701
5702 if (top_p)
5703 {
5704 int rtn;
5705 tree t;
5706
5707 save_expr_list = 0;
5708
5709 rtn = safe_from_p (x, exp, 0);
5710
5711 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5712 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5713
5714 return rtn;
5715 }
5716
5717 /* Now look at our tree code and possibly recurse. */
5718 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5719 {
5720 case 'd':
5721 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5722 break;
5723
5724 case 'c':
5725 return 1;
5726
5727 case 'x':
5728 if (TREE_CODE (exp) == TREE_LIST)
5729 return ((TREE_VALUE (exp) == 0
5730 || safe_from_p (x, TREE_VALUE (exp), 0))
5731 && (TREE_CHAIN (exp) == 0
5732 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5733 else if (TREE_CODE (exp) == ERROR_MARK)
5734 return 1; /* An already-visited SAVE_EXPR? */
5735 else
5736 return 0;
5737
5738 case '1':
5739 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5740
5741 case '2':
5742 case '<':
5743 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5744 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5745
5746 case 'e':
5747 case 'r':
5748 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5749 the expression. If it is set, we conflict iff we are that rtx or
5750 both are in memory. Otherwise, we check all operands of the
5751 expression recursively. */
5752
5753 switch (TREE_CODE (exp))
5754 {
5755 case ADDR_EXPR:
5756 return (staticp (TREE_OPERAND (exp, 0))
5757 || TREE_STATIC (exp)
5758 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5759
5760 case INDIRECT_REF:
5761 if (GET_CODE (x) == MEM
5762 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5763 get_alias_set (exp)))
5764 return 0;
5765 break;
5766
5767 case CALL_EXPR:
5768 /* Assume that the call will clobber all hard registers and
5769 all of memory. */
5770 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5771 || GET_CODE (x) == MEM)
5772 return 0;
5773 break;
5774
5775 case RTL_EXPR:
5776 /* If a sequence exists, we would have to scan every instruction
5777 in the sequence to see if it was safe. This is probably not
5778 worthwhile. */
5779 if (RTL_EXPR_SEQUENCE (exp))
5780 return 0;
5781
5782 exp_rtl = RTL_EXPR_RTL (exp);
5783 break;
5784
5785 case WITH_CLEANUP_EXPR:
5786 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5787 break;
5788
5789 case CLEANUP_POINT_EXPR:
5790 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5791
5792 case SAVE_EXPR:
5793 exp_rtl = SAVE_EXPR_RTL (exp);
5794 if (exp_rtl)
5795 break;
5796
5797 /* If we've already scanned this, don't do it again. Otherwise,
5798 show we've scanned it and record for clearing the flag if we're
5799 going on. */
5800 if (TREE_PRIVATE (exp))
5801 return 1;
5802
5803 TREE_PRIVATE (exp) = 1;
5804 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5805 {
5806 TREE_PRIVATE (exp) = 0;
5807 return 0;
5808 }
5809
5810 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5811 return 1;
5812
5813 case BIND_EXPR:
5814 /* The only operand we look at is operand 1. The rest aren't
5815 part of the expression. */
5816 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5817
5818 case METHOD_CALL_EXPR:
5819 /* This takes an rtx argument, but shouldn't appear here. */
5820 abort ();
5821
5822 default:
5823 break;
5824 }
5825
5826 /* If we have an rtx, we do not need to scan our operands. */
5827 if (exp_rtl)
5828 break;
5829
5830 nops = first_rtl_op (TREE_CODE (exp));
5831 for (i = 0; i < nops; i++)
5832 if (TREE_OPERAND (exp, i) != 0
5833 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5834 return 0;
5835
5836 /* If this is a language-specific tree code, it may require
5837 special handling. */
5838 if ((unsigned int) TREE_CODE (exp)
5839 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5840 && lang_safe_from_p
5841 && !(*lang_safe_from_p) (x, exp))
5842 return 0;
5843 }
5844
5845 /* If we have an rtl, find any enclosed object. Then see if we conflict
5846 with it. */
5847 if (exp_rtl)
5848 {
5849 if (GET_CODE (exp_rtl) == SUBREG)
5850 {
5851 exp_rtl = SUBREG_REG (exp_rtl);
5852 if (GET_CODE (exp_rtl) == REG
5853 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5854 return 0;
5855 }
5856
5857 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5858 are memory and they conflict. */
5859 return ! (rtx_equal_p (x, exp_rtl)
5860 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5861 && true_dependence (exp_rtl, GET_MODE (x), x,
5862 rtx_addr_varies_p)));
5863 }
5864
5865 /* If we reach here, it is safe. */
5866 return 1;
5867 }
5868
5869 /* Subroutine of expand_expr: return rtx if EXP is a
5870 variable or parameter; else return 0. */
5871
5872 static rtx
5873 var_rtx (exp)
5874 tree exp;
5875 {
5876 STRIP_NOPS (exp);
5877 switch (TREE_CODE (exp))
5878 {
5879 case PARM_DECL:
5880 case VAR_DECL:
5881 return DECL_RTL (exp);
5882 default:
5883 return 0;
5884 }
5885 }
5886
5887 #ifdef MAX_INTEGER_COMPUTATION_MODE
5888
5889 void
5890 check_max_integer_computation_mode (exp)
5891 tree exp;
5892 {
5893 enum tree_code code;
5894 enum machine_mode mode;
5895
5896 /* Strip any NOPs that don't change the mode. */
5897 STRIP_NOPS (exp);
5898 code = TREE_CODE (exp);
5899
5900 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5901 if (code == NOP_EXPR
5902 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5903 return;
5904
5905 /* First check the type of the overall operation. We need only look at
5906 unary, binary and relational operations. */
5907 if (TREE_CODE_CLASS (code) == '1'
5908 || TREE_CODE_CLASS (code) == '2'
5909 || TREE_CODE_CLASS (code) == '<')
5910 {
5911 mode = TYPE_MODE (TREE_TYPE (exp));
5912 if (GET_MODE_CLASS (mode) == MODE_INT
5913 && mode > MAX_INTEGER_COMPUTATION_MODE)
5914 internal_error ("unsupported wide integer operation");
5915 }
5916
5917 /* Check operand of a unary op. */
5918 if (TREE_CODE_CLASS (code) == '1')
5919 {
5920 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5921 if (GET_MODE_CLASS (mode) == MODE_INT
5922 && mode > MAX_INTEGER_COMPUTATION_MODE)
5923 internal_error ("unsupported wide integer operation");
5924 }
5925
5926 /* Check operands of a binary/comparison op. */
5927 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5928 {
5929 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5930 if (GET_MODE_CLASS (mode) == MODE_INT
5931 && mode > MAX_INTEGER_COMPUTATION_MODE)
5932 internal_error ("unsupported wide integer operation");
5933
5934 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5935 if (GET_MODE_CLASS (mode) == MODE_INT
5936 && mode > MAX_INTEGER_COMPUTATION_MODE)
5937 internal_error ("unsupported wide integer operation");
5938 }
5939 }
5940 #endif
5941 \f
5942 /* Return the highest power of two that EXP is known to be a multiple of.
5943 This is used in updating alignment of MEMs in array references. */
5944
5945 static HOST_WIDE_INT
5946 highest_pow2_factor (exp)
5947 tree exp;
5948 {
5949 HOST_WIDE_INT c0, c1;
5950
5951 switch (TREE_CODE (exp))
5952 {
5953 case INTEGER_CST:
5954 /* If the integer is expressable in a HOST_WIDE_INT, we can find
5955 the lowest bit that's a one. If the result is zero or negative,
5956 pessimize by returning 1. This is overly-conservative, but such
5957 things should not happen in the offset expressions that we are
5958 called with. */
5959 if (host_integerp (exp, 0))
5960 {
5961 c0 = tree_low_cst (exp, 0);
5962 return c0 >= 0 ? c0 & -c0 : 1;
5963 }
5964 break;
5965
5966 case PLUS_EXPR: case MINUS_EXPR:
5967 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5968 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5969 return MIN (c0, c1);
5970
5971 case MULT_EXPR:
5972 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5973 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5974 return c0 * c1;
5975
5976 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5977 case CEIL_DIV_EXPR:
5978 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5979 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5980 return MAX (1, c0 / c1);
5981
5982 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5983 case COMPOUND_EXPR: case SAVE_EXPR:
5984 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5985
5986 case COND_EXPR:
5987 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5988 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5989 return MIN (c0, c1);
5990
5991 default:
5992 break;
5993 }
5994
5995 return 1;
5996 }
5997 \f
5998 /* Return an object on the placeholder list that matches EXP, a
5999 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6000 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6001 tree.def. If no such object is found, abort. If PLIST is nonzero, it is
6002 a location which initially points to a starting location in the
6003 placeholder list (zero means start of the list) and where a pointer into
6004 the placeholder list at which the object is found is placed. */
6005
6006 tree
6007 find_placeholder (exp, plist)
6008 tree exp;
6009 tree *plist;
6010 {
6011 tree type = TREE_TYPE (exp);
6012 tree placeholder_expr;
6013
6014 for (placeholder_expr
6015 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6016 placeholder_expr != 0;
6017 placeholder_expr = TREE_CHAIN (placeholder_expr))
6018 {
6019 tree need_type = TYPE_MAIN_VARIANT (type);
6020 tree elt;
6021
6022 /* Find the outermost reference that is of the type we want. If none,
6023 see if any object has a type that is a pointer to the type we
6024 want. */
6025 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6026 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6027 || TREE_CODE (elt) == COND_EXPR)
6028 ? TREE_OPERAND (elt, 1)
6029 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6030 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6031 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6032 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6033 ? TREE_OPERAND (elt, 0) : 0))
6034 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6035 {
6036 if (plist)
6037 *plist = placeholder_expr;
6038 return elt;
6039 }
6040
6041 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6042 elt
6043 = ((TREE_CODE (elt) == COMPOUND_EXPR
6044 || TREE_CODE (elt) == COND_EXPR)
6045 ? TREE_OPERAND (elt, 1)
6046 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6047 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6048 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6049 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6050 ? TREE_OPERAND (elt, 0) : 0))
6051 if (POINTER_TYPE_P (TREE_TYPE (elt))
6052 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6053 == need_type))
6054 {
6055 if (plist)
6056 *plist = placeholder_expr;
6057 return build1 (INDIRECT_REF, need_type, elt);
6058 }
6059 }
6060
6061 abort ();
6062 }
6063 \f
6064 /* expand_expr: generate code for computing expression EXP.
6065 An rtx for the computed value is returned. The value is never null.
6066 In the case of a void EXP, const0_rtx is returned.
6067
6068 The value may be stored in TARGET if TARGET is nonzero.
6069 TARGET is just a suggestion; callers must assume that
6070 the rtx returned may not be the same as TARGET.
6071
6072 If TARGET is CONST0_RTX, it means that the value will be ignored.
6073
6074 If TMODE is not VOIDmode, it suggests generating the
6075 result in mode TMODE. But this is done only when convenient.
6076 Otherwise, TMODE is ignored and the value generated in its natural mode.
6077 TMODE is just a suggestion; callers must assume that
6078 the rtx returned may not have mode TMODE.
6079
6080 Note that TARGET may have neither TMODE nor MODE. In that case, it
6081 probably will not be used.
6082
6083 If MODIFIER is EXPAND_SUM then when EXP is an addition
6084 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6085 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6086 products as above, or REG or MEM, or constant.
6087 Ordinarily in such cases we would output mul or add instructions
6088 and then return a pseudo reg containing the sum.
6089
6090 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6091 it also marks a label as absolutely required (it can't be dead).
6092 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6093 This is used for outputting expressions used in initializers.
6094
6095 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6096 with a constant address even if that address is not normally legitimate.
6097 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6098
6099 rtx
6100 expand_expr (exp, target, tmode, modifier)
6101 tree exp;
6102 rtx target;
6103 enum machine_mode tmode;
6104 enum expand_modifier modifier;
6105 {
6106 rtx op0, op1, temp;
6107 tree type = TREE_TYPE (exp);
6108 int unsignedp = TREE_UNSIGNED (type);
6109 enum machine_mode mode;
6110 enum tree_code code = TREE_CODE (exp);
6111 optab this_optab;
6112 rtx subtarget, original_target;
6113 int ignore;
6114 tree context;
6115 /* Used by check-memory-usage to make modifier read only. */
6116 enum expand_modifier ro_modifier;
6117
6118 /* Handle ERROR_MARK before anybody tries to access its type. */
6119 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6120 {
6121 op0 = CONST0_RTX (tmode);
6122 if (op0 != 0)
6123 return op0;
6124 return const0_rtx;
6125 }
6126
6127 mode = TYPE_MODE (type);
6128 /* Use subtarget as the target for operand 0 of a binary operation. */
6129 subtarget = get_subtarget (target);
6130 original_target = target;
6131 ignore = (target == const0_rtx
6132 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6133 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6134 || code == COND_EXPR)
6135 && TREE_CODE (type) == VOID_TYPE));
6136
6137 /* Make a read-only version of the modifier. */
6138 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6139 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6140 ro_modifier = modifier;
6141 else
6142 ro_modifier = EXPAND_NORMAL;
6143
6144 /* If we are going to ignore this result, we need only do something
6145 if there is a side-effect somewhere in the expression. If there
6146 is, short-circuit the most common cases here. Note that we must
6147 not call expand_expr with anything but const0_rtx in case this
6148 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6149
6150 if (ignore)
6151 {
6152 if (! TREE_SIDE_EFFECTS (exp))
6153 return const0_rtx;
6154
6155 /* Ensure we reference a volatile object even if value is ignored, but
6156 don't do this if all we are doing is taking its address. */
6157 if (TREE_THIS_VOLATILE (exp)
6158 && TREE_CODE (exp) != FUNCTION_DECL
6159 && mode != VOIDmode && mode != BLKmode
6160 && modifier != EXPAND_CONST_ADDRESS)
6161 {
6162 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6163 if (GET_CODE (temp) == MEM)
6164 temp = copy_to_reg (temp);
6165 return const0_rtx;
6166 }
6167
6168 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6169 || code == INDIRECT_REF || code == BUFFER_REF)
6170 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6171 VOIDmode, ro_modifier);
6172 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6173 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6174 {
6175 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6176 ro_modifier);
6177 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6178 ro_modifier);
6179 return const0_rtx;
6180 }
6181 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6182 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6183 /* If the second operand has no side effects, just evaluate
6184 the first. */
6185 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6186 VOIDmode, ro_modifier);
6187 else if (code == BIT_FIELD_REF)
6188 {
6189 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6190 ro_modifier);
6191 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6192 ro_modifier);
6193 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6194 ro_modifier);
6195 return const0_rtx;
6196 }
6197 ;
6198 target = 0;
6199 }
6200
6201 #ifdef MAX_INTEGER_COMPUTATION_MODE
6202 /* Only check stuff here if the mode we want is different from the mode
6203 of the expression; if it's the same, check_max_integer_computiation_mode
6204 will handle it. Do we really need to check this stuff at all? */
6205
6206 if (target
6207 && GET_MODE (target) != mode
6208 && TREE_CODE (exp) != INTEGER_CST
6209 && TREE_CODE (exp) != PARM_DECL
6210 && TREE_CODE (exp) != ARRAY_REF
6211 && TREE_CODE (exp) != ARRAY_RANGE_REF
6212 && TREE_CODE (exp) != COMPONENT_REF
6213 && TREE_CODE (exp) != BIT_FIELD_REF
6214 && TREE_CODE (exp) != INDIRECT_REF
6215 && TREE_CODE (exp) != CALL_EXPR
6216 && TREE_CODE (exp) != VAR_DECL
6217 && TREE_CODE (exp) != RTL_EXPR)
6218 {
6219 enum machine_mode mode = GET_MODE (target);
6220
6221 if (GET_MODE_CLASS (mode) == MODE_INT
6222 && mode > MAX_INTEGER_COMPUTATION_MODE)
6223 internal_error ("unsupported wide integer operation");
6224 }
6225
6226 if (tmode != mode
6227 && TREE_CODE (exp) != INTEGER_CST
6228 && TREE_CODE (exp) != PARM_DECL
6229 && TREE_CODE (exp) != ARRAY_REF
6230 && TREE_CODE (exp) != ARRAY_RANGE_REF
6231 && TREE_CODE (exp) != COMPONENT_REF
6232 && TREE_CODE (exp) != BIT_FIELD_REF
6233 && TREE_CODE (exp) != INDIRECT_REF
6234 && TREE_CODE (exp) != VAR_DECL
6235 && TREE_CODE (exp) != CALL_EXPR
6236 && TREE_CODE (exp) != RTL_EXPR
6237 && GET_MODE_CLASS (tmode) == MODE_INT
6238 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6239 internal_error ("unsupported wide integer operation");
6240
6241 check_max_integer_computation_mode (exp);
6242 #endif
6243
6244 /* If will do cse, generate all results into pseudo registers
6245 since 1) that allows cse to find more things
6246 and 2) otherwise cse could produce an insn the machine
6247 cannot support. */
6248
6249 if (! cse_not_expected && mode != BLKmode && target
6250 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6251 target = subtarget;
6252
6253 switch (code)
6254 {
6255 case LABEL_DECL:
6256 {
6257 tree function = decl_function_context (exp);
6258 /* Handle using a label in a containing function. */
6259 if (function != current_function_decl
6260 && function != inline_function_decl && function != 0)
6261 {
6262 struct function *p = find_function_data (function);
6263 p->expr->x_forced_labels
6264 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6265 p->expr->x_forced_labels);
6266 }
6267 else
6268 {
6269 if (modifier == EXPAND_INITIALIZER)
6270 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6271 label_rtx (exp),
6272 forced_labels);
6273 }
6274
6275 temp = gen_rtx_MEM (FUNCTION_MODE,
6276 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6277 if (function != current_function_decl
6278 && function != inline_function_decl && function != 0)
6279 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6280 return temp;
6281 }
6282
6283 case PARM_DECL:
6284 if (DECL_RTL (exp) == 0)
6285 {
6286 error_with_decl (exp, "prior parameter's size depends on `%s'");
6287 return CONST0_RTX (mode);
6288 }
6289
6290 /* ... fall through ... */
6291
6292 case VAR_DECL:
6293 /* If a static var's type was incomplete when the decl was written,
6294 but the type is complete now, lay out the decl now. */
6295 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6296 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6297 {
6298 layout_decl (exp, 0);
6299 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6300 }
6301
6302 /* Although static-storage variables start off initialized, according to
6303 ANSI C, a memcpy could overwrite them with uninitialized values. So
6304 we check them too. This also lets us check for read-only variables
6305 accessed via a non-const declaration, in case it won't be detected
6306 any other way (e.g., in an embedded system or OS kernel without
6307 memory protection).
6308
6309 Aggregates are not checked here; they're handled elsewhere. */
6310 if (cfun && current_function_check_memory_usage
6311 && code == VAR_DECL
6312 && GET_CODE (DECL_RTL (exp)) == MEM
6313 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6314 {
6315 enum memory_use_mode memory_usage;
6316 memory_usage = get_memory_usage_from_modifier (modifier);
6317
6318 in_check_memory_usage = 1;
6319 if (memory_usage != MEMORY_USE_DONT)
6320 emit_library_call (chkr_check_addr_libfunc,
6321 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6322 XEXP (DECL_RTL (exp), 0), Pmode,
6323 GEN_INT (int_size_in_bytes (type)),
6324 TYPE_MODE (sizetype),
6325 GEN_INT (memory_usage),
6326 TYPE_MODE (integer_type_node));
6327 in_check_memory_usage = 0;
6328 }
6329
6330 /* ... fall through ... */
6331
6332 case FUNCTION_DECL:
6333 case RESULT_DECL:
6334 if (DECL_RTL (exp) == 0)
6335 abort ();
6336
6337 /* Ensure variable marked as used even if it doesn't go through
6338 a parser. If it hasn't be used yet, write out an external
6339 definition. */
6340 if (! TREE_USED (exp))
6341 {
6342 assemble_external (exp);
6343 TREE_USED (exp) = 1;
6344 }
6345
6346 /* Show we haven't gotten RTL for this yet. */
6347 temp = 0;
6348
6349 /* Handle variables inherited from containing functions. */
6350 context = decl_function_context (exp);
6351
6352 /* We treat inline_function_decl as an alias for the current function
6353 because that is the inline function whose vars, types, etc.
6354 are being merged into the current function.
6355 See expand_inline_function. */
6356
6357 if (context != 0 && context != current_function_decl
6358 && context != inline_function_decl
6359 /* If var is static, we don't need a static chain to access it. */
6360 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6361 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6362 {
6363 rtx addr;
6364
6365 /* Mark as non-local and addressable. */
6366 DECL_NONLOCAL (exp) = 1;
6367 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6368 abort ();
6369 mark_addressable (exp);
6370 if (GET_CODE (DECL_RTL (exp)) != MEM)
6371 abort ();
6372 addr = XEXP (DECL_RTL (exp), 0);
6373 if (GET_CODE (addr) == MEM)
6374 addr
6375 = replace_equiv_address (addr,
6376 fix_lexical_addr (XEXP (addr, 0), exp));
6377 else
6378 addr = fix_lexical_addr (addr, exp);
6379
6380 temp = replace_equiv_address (DECL_RTL (exp), addr);
6381 }
6382
6383 /* This is the case of an array whose size is to be determined
6384 from its initializer, while the initializer is still being parsed.
6385 See expand_decl. */
6386
6387 else if (GET_CODE (DECL_RTL (exp)) == MEM
6388 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6389 temp = validize_mem (DECL_RTL (exp));
6390
6391 /* If DECL_RTL is memory, we are in the normal case and either
6392 the address is not valid or it is not a register and -fforce-addr
6393 is specified, get the address into a register. */
6394
6395 else if (GET_CODE (DECL_RTL (exp)) == MEM
6396 && modifier != EXPAND_CONST_ADDRESS
6397 && modifier != EXPAND_SUM
6398 && modifier != EXPAND_INITIALIZER
6399 && (! memory_address_p (DECL_MODE (exp),
6400 XEXP (DECL_RTL (exp), 0))
6401 || (flag_force_addr
6402 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6403 temp = replace_equiv_address (DECL_RTL (exp),
6404 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6405
6406 /* If we got something, return it. But first, set the alignment
6407 if the address is a register. */
6408 if (temp != 0)
6409 {
6410 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6411 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6412
6413 return temp;
6414 }
6415
6416 /* If the mode of DECL_RTL does not match that of the decl, it
6417 must be a promoted value. We return a SUBREG of the wanted mode,
6418 but mark it so that we know that it was already extended. */
6419
6420 if (GET_CODE (DECL_RTL (exp)) == REG
6421 && GET_MODE (DECL_RTL (exp)) != mode)
6422 {
6423 /* Get the signedness used for this variable. Ensure we get the
6424 same mode we got when the variable was declared. */
6425 if (GET_MODE (DECL_RTL (exp))
6426 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6427 abort ();
6428
6429 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6430 SUBREG_PROMOTED_VAR_P (temp) = 1;
6431 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6432 return temp;
6433 }
6434
6435 return DECL_RTL (exp);
6436
6437 case INTEGER_CST:
6438 return immed_double_const (TREE_INT_CST_LOW (exp),
6439 TREE_INT_CST_HIGH (exp), mode);
6440
6441 case CONST_DECL:
6442 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6443 EXPAND_MEMORY_USE_BAD);
6444
6445 case REAL_CST:
6446 /* If optimized, generate immediate CONST_DOUBLE
6447 which will be turned into memory by reload if necessary.
6448
6449 We used to force a register so that loop.c could see it. But
6450 this does not allow gen_* patterns to perform optimizations with
6451 the constants. It also produces two insns in cases like "x = 1.0;".
6452 On most machines, floating-point constants are not permitted in
6453 many insns, so we'd end up copying it to a register in any case.
6454
6455 Now, we do the copying in expand_binop, if appropriate. */
6456 return immed_real_const (exp);
6457
6458 case COMPLEX_CST:
6459 case STRING_CST:
6460 if (! TREE_CST_RTL (exp))
6461 output_constant_def (exp, 1);
6462
6463 /* TREE_CST_RTL probably contains a constant address.
6464 On RISC machines where a constant address isn't valid,
6465 make some insns to get that address into a register. */
6466 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6467 && modifier != EXPAND_CONST_ADDRESS
6468 && modifier != EXPAND_INITIALIZER
6469 && modifier != EXPAND_SUM
6470 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6471 || (flag_force_addr
6472 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6473 return replace_equiv_address (TREE_CST_RTL (exp),
6474 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6475 return TREE_CST_RTL (exp);
6476
6477 case EXPR_WITH_FILE_LOCATION:
6478 {
6479 rtx to_return;
6480 const char *saved_input_filename = input_filename;
6481 int saved_lineno = lineno;
6482 input_filename = EXPR_WFL_FILENAME (exp);
6483 lineno = EXPR_WFL_LINENO (exp);
6484 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6485 emit_line_note (input_filename, lineno);
6486 /* Possibly avoid switching back and forth here. */
6487 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6488 input_filename = saved_input_filename;
6489 lineno = saved_lineno;
6490 return to_return;
6491 }
6492
6493 case SAVE_EXPR:
6494 context = decl_function_context (exp);
6495
6496 /* If this SAVE_EXPR was at global context, assume we are an
6497 initialization function and move it into our context. */
6498 if (context == 0)
6499 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6500
6501 /* We treat inline_function_decl as an alias for the current function
6502 because that is the inline function whose vars, types, etc.
6503 are being merged into the current function.
6504 See expand_inline_function. */
6505 if (context == current_function_decl || context == inline_function_decl)
6506 context = 0;
6507
6508 /* If this is non-local, handle it. */
6509 if (context)
6510 {
6511 /* The following call just exists to abort if the context is
6512 not of a containing function. */
6513 find_function_data (context);
6514
6515 temp = SAVE_EXPR_RTL (exp);
6516 if (temp && GET_CODE (temp) == REG)
6517 {
6518 put_var_into_stack (exp);
6519 temp = SAVE_EXPR_RTL (exp);
6520 }
6521 if (temp == 0 || GET_CODE (temp) != MEM)
6522 abort ();
6523 return
6524 replace_equiv_address (temp,
6525 fix_lexical_addr (XEXP (temp, 0), exp));
6526 }
6527 if (SAVE_EXPR_RTL (exp) == 0)
6528 {
6529 if (mode == VOIDmode)
6530 temp = const0_rtx;
6531 else
6532 temp = assign_temp (build_qualified_type (type,
6533 (TYPE_QUALS (type)
6534 | TYPE_QUAL_CONST)),
6535 3, 0, 0);
6536
6537 SAVE_EXPR_RTL (exp) = temp;
6538 if (!optimize && GET_CODE (temp) == REG)
6539 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6540 save_expr_regs);
6541
6542 /* If the mode of TEMP does not match that of the expression, it
6543 must be a promoted value. We pass store_expr a SUBREG of the
6544 wanted mode but mark it so that we know that it was already
6545 extended. Note that `unsignedp' was modified above in
6546 this case. */
6547
6548 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6549 {
6550 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6551 SUBREG_PROMOTED_VAR_P (temp) = 1;
6552 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6553 }
6554
6555 if (temp == const0_rtx)
6556 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6557 EXPAND_MEMORY_USE_BAD);
6558 else
6559 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6560
6561 TREE_USED (exp) = 1;
6562 }
6563
6564 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6565 must be a promoted value. We return a SUBREG of the wanted mode,
6566 but mark it so that we know that it was already extended. */
6567
6568 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6569 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6570 {
6571 /* Compute the signedness and make the proper SUBREG. */
6572 promote_mode (type, mode, &unsignedp, 0);
6573 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6574 SUBREG_PROMOTED_VAR_P (temp) = 1;
6575 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6576 return temp;
6577 }
6578
6579 return SAVE_EXPR_RTL (exp);
6580
6581 case UNSAVE_EXPR:
6582 {
6583 rtx temp;
6584 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6585 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6586 return temp;
6587 }
6588
6589 case PLACEHOLDER_EXPR:
6590 {
6591 tree old_list = placeholder_list;
6592 tree placeholder_expr = 0;
6593
6594 exp = find_placeholder (exp, &placeholder_expr);
6595 placeholder_list = TREE_CHAIN (placeholder_expr);
6596 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6597 placeholder_list = old_list;
6598 return temp;
6599 }
6600
6601 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6602 abort ();
6603
6604 case WITH_RECORD_EXPR:
6605 /* Put the object on the placeholder list, expand our first operand,
6606 and pop the list. */
6607 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6608 placeholder_list);
6609 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6610 tmode, ro_modifier);
6611 placeholder_list = TREE_CHAIN (placeholder_list);
6612 return target;
6613
6614 case GOTO_EXPR:
6615 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6616 expand_goto (TREE_OPERAND (exp, 0));
6617 else
6618 expand_computed_goto (TREE_OPERAND (exp, 0));
6619 return const0_rtx;
6620
6621 case EXIT_EXPR:
6622 expand_exit_loop_if_false (NULL,
6623 invert_truthvalue (TREE_OPERAND (exp, 0)));
6624 return const0_rtx;
6625
6626 case LABELED_BLOCK_EXPR:
6627 if (LABELED_BLOCK_BODY (exp))
6628 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6629 /* Should perhaps use expand_label, but this is simpler and safer. */
6630 do_pending_stack_adjust ();
6631 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6632 return const0_rtx;
6633
6634 case EXIT_BLOCK_EXPR:
6635 if (EXIT_BLOCK_RETURN (exp))
6636 sorry ("returned value in block_exit_expr");
6637 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6638 return const0_rtx;
6639
6640 case LOOP_EXPR:
6641 push_temp_slots ();
6642 expand_start_loop (1);
6643 expand_expr_stmt (TREE_OPERAND (exp, 0));
6644 expand_end_loop ();
6645 pop_temp_slots ();
6646
6647 return const0_rtx;
6648
6649 case BIND_EXPR:
6650 {
6651 tree vars = TREE_OPERAND (exp, 0);
6652 int vars_need_expansion = 0;
6653
6654 /* Need to open a binding contour here because
6655 if there are any cleanups they must be contained here. */
6656 expand_start_bindings (2);
6657
6658 /* Mark the corresponding BLOCK for output in its proper place. */
6659 if (TREE_OPERAND (exp, 2) != 0
6660 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6661 insert_block (TREE_OPERAND (exp, 2));
6662
6663 /* If VARS have not yet been expanded, expand them now. */
6664 while (vars)
6665 {
6666 if (!DECL_RTL_SET_P (vars))
6667 {
6668 vars_need_expansion = 1;
6669 expand_decl (vars);
6670 }
6671 expand_decl_init (vars);
6672 vars = TREE_CHAIN (vars);
6673 }
6674
6675 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6676
6677 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6678
6679 return temp;
6680 }
6681
6682 case RTL_EXPR:
6683 if (RTL_EXPR_SEQUENCE (exp))
6684 {
6685 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6686 abort ();
6687 emit_insns (RTL_EXPR_SEQUENCE (exp));
6688 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6689 }
6690 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6691 free_temps_for_rtl_expr (exp);
6692 return RTL_EXPR_RTL (exp);
6693
6694 case CONSTRUCTOR:
6695 /* If we don't need the result, just ensure we evaluate any
6696 subexpressions. */
6697 if (ignore)
6698 {
6699 tree elt;
6700 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6701 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6702 EXPAND_MEMORY_USE_BAD);
6703 return const0_rtx;
6704 }
6705
6706 /* All elts simple constants => refer to a constant in memory. But
6707 if this is a non-BLKmode mode, let it store a field at a time
6708 since that should make a CONST_INT or CONST_DOUBLE when we
6709 fold. Likewise, if we have a target we can use, it is best to
6710 store directly into the target unless the type is large enough
6711 that memcpy will be used. If we are making an initializer and
6712 all operands are constant, put it in memory as well. */
6713 else if ((TREE_STATIC (exp)
6714 && ((mode == BLKmode
6715 && ! (target != 0 && safe_from_p (target, exp, 1)))
6716 || TREE_ADDRESSABLE (exp)
6717 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6718 && (! MOVE_BY_PIECES_P
6719 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6720 TYPE_ALIGN (type)))
6721 && ! mostly_zeros_p (exp))))
6722 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6723 {
6724 rtx constructor = output_constant_def (exp, 1);
6725
6726 if (modifier != EXPAND_CONST_ADDRESS
6727 && modifier != EXPAND_INITIALIZER
6728 && modifier != EXPAND_SUM)
6729 constructor = validize_mem (constructor);
6730
6731 return constructor;
6732 }
6733 else
6734 {
6735 /* Handle calls that pass values in multiple non-contiguous
6736 locations. The Irix 6 ABI has examples of this. */
6737 if (target == 0 || ! safe_from_p (target, exp, 1)
6738 || GET_CODE (target) == PARALLEL)
6739 target
6740 = assign_temp (build_qualified_type (type,
6741 (TYPE_QUALS (type)
6742 | (TREE_READONLY (exp)
6743 * TYPE_QUAL_CONST))),
6744 TREE_ADDRESSABLE (exp), 1, 1);
6745
6746 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6747 int_size_in_bytes (TREE_TYPE (exp)));
6748 return target;
6749 }
6750
6751 case INDIRECT_REF:
6752 {
6753 tree exp1 = TREE_OPERAND (exp, 0);
6754 tree index;
6755 tree string = string_constant (exp1, &index);
6756
6757 /* Try to optimize reads from const strings. */
6758 if (string
6759 && TREE_CODE (string) == STRING_CST
6760 && TREE_CODE (index) == INTEGER_CST
6761 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6762 && GET_MODE_CLASS (mode) == MODE_INT
6763 && GET_MODE_SIZE (mode) == 1
6764 && modifier != EXPAND_MEMORY_USE_WO)
6765 return
6766 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6767
6768 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6769 op0 = memory_address (mode, op0);
6770
6771 if (cfun && current_function_check_memory_usage
6772 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6773 {
6774 enum memory_use_mode memory_usage;
6775 memory_usage = get_memory_usage_from_modifier (modifier);
6776
6777 if (memory_usage != MEMORY_USE_DONT)
6778 {
6779 in_check_memory_usage = 1;
6780 emit_library_call (chkr_check_addr_libfunc,
6781 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6782 Pmode, GEN_INT (int_size_in_bytes (type)),
6783 TYPE_MODE (sizetype),
6784 GEN_INT (memory_usage),
6785 TYPE_MODE (integer_type_node));
6786 in_check_memory_usage = 0;
6787 }
6788 }
6789
6790 temp = gen_rtx_MEM (mode, op0);
6791 set_mem_attributes (temp, exp, 0);
6792
6793 /* If we are writing to this object and its type is a record with
6794 readonly fields, we must mark it as readonly so it will
6795 conflict with readonly references to those fields. */
6796 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6797 RTX_UNCHANGING_P (temp) = 1;
6798
6799 return temp;
6800 }
6801
6802 case ARRAY_REF:
6803 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6804 abort ();
6805
6806 {
6807 tree array = TREE_OPERAND (exp, 0);
6808 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6809 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6810 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6811 HOST_WIDE_INT i;
6812
6813 /* Optimize the special-case of a zero lower bound.
6814
6815 We convert the low_bound to sizetype to avoid some problems
6816 with constant folding. (E.g. suppose the lower bound is 1,
6817 and its mode is QI. Without the conversion, (ARRAY
6818 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6819 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6820
6821 if (! integer_zerop (low_bound))
6822 index = size_diffop (index, convert (sizetype, low_bound));
6823
6824 /* Fold an expression like: "foo"[2].
6825 This is not done in fold so it won't happen inside &.
6826 Don't fold if this is for wide characters since it's too
6827 difficult to do correctly and this is a very rare case. */
6828
6829 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6830 && TREE_CODE (array) == STRING_CST
6831 && TREE_CODE (index) == INTEGER_CST
6832 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6833 && GET_MODE_CLASS (mode) == MODE_INT
6834 && GET_MODE_SIZE (mode) == 1)
6835 return
6836 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6837
6838 /* If this is a constant index into a constant array,
6839 just get the value from the array. Handle both the cases when
6840 we have an explicit constructor and when our operand is a variable
6841 that was declared const. */
6842
6843 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6844 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6845 && TREE_CODE (index) == INTEGER_CST
6846 && 0 > compare_tree_int (index,
6847 list_length (CONSTRUCTOR_ELTS
6848 (TREE_OPERAND (exp, 0)))))
6849 {
6850 tree elem;
6851
6852 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6853 i = TREE_INT_CST_LOW (index);
6854 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6855 ;
6856
6857 if (elem)
6858 return expand_expr (fold (TREE_VALUE (elem)), target,
6859 tmode, ro_modifier);
6860 }
6861
6862 else if (optimize >= 1
6863 && modifier != EXPAND_CONST_ADDRESS
6864 && modifier != EXPAND_INITIALIZER
6865 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6866 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6867 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6868 {
6869 if (TREE_CODE (index) == INTEGER_CST)
6870 {
6871 tree init = DECL_INITIAL (array);
6872
6873 if (TREE_CODE (init) == CONSTRUCTOR)
6874 {
6875 tree elem;
6876
6877 for (elem = CONSTRUCTOR_ELTS (init);
6878 (elem
6879 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6880 elem = TREE_CHAIN (elem))
6881 ;
6882
6883 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6884 return expand_expr (fold (TREE_VALUE (elem)), target,
6885 tmode, ro_modifier);
6886 }
6887 else if (TREE_CODE (init) == STRING_CST
6888 && 0 > compare_tree_int (index,
6889 TREE_STRING_LENGTH (init)))
6890 {
6891 tree type = TREE_TYPE (TREE_TYPE (init));
6892 enum machine_mode mode = TYPE_MODE (type);
6893
6894 if (GET_MODE_CLASS (mode) == MODE_INT
6895 && GET_MODE_SIZE (mode) == 1)
6896 return (GEN_INT
6897 (TREE_STRING_POINTER
6898 (init)[TREE_INT_CST_LOW (index)]));
6899 }
6900 }
6901 }
6902 }
6903 /* Fall through. */
6904
6905 case COMPONENT_REF:
6906 case BIT_FIELD_REF:
6907 case ARRAY_RANGE_REF:
6908 /* If the operand is a CONSTRUCTOR, we can just extract the
6909 appropriate field if it is present. Don't do this if we have
6910 already written the data since we want to refer to that copy
6911 and varasm.c assumes that's what we'll do. */
6912 if (code == COMPONENT_REF
6913 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6914 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6915 {
6916 tree elt;
6917
6918 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6919 elt = TREE_CHAIN (elt))
6920 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6921 /* We can normally use the value of the field in the
6922 CONSTRUCTOR. However, if this is a bitfield in
6923 an integral mode that we can fit in a HOST_WIDE_INT,
6924 we must mask only the number of bits in the bitfield,
6925 since this is done implicitly by the constructor. If
6926 the bitfield does not meet either of those conditions,
6927 we can't do this optimization. */
6928 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6929 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6930 == MODE_INT)
6931 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6932 <= HOST_BITS_PER_WIDE_INT))))
6933 {
6934 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6935 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6936 {
6937 HOST_WIDE_INT bitsize
6938 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6939
6940 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6941 {
6942 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6943 op0 = expand_and (op0, op1, target);
6944 }
6945 else
6946 {
6947 enum machine_mode imode
6948 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6949 tree count
6950 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6951 0);
6952
6953 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6954 target, 0);
6955 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6956 target, 0);
6957 }
6958 }
6959
6960 return op0;
6961 }
6962 }
6963
6964 {
6965 enum machine_mode mode1;
6966 HOST_WIDE_INT bitsize, bitpos;
6967 tree offset;
6968 int volatilep = 0;
6969 unsigned int alignment;
6970 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6971 &mode1, &unsignedp, &volatilep,
6972 &alignment);
6973 rtx orig_op0;
6974
6975 /* If we got back the original object, something is wrong. Perhaps
6976 we are evaluating an expression too early. In any event, don't
6977 infinitely recurse. */
6978 if (tem == exp)
6979 abort ();
6980
6981 /* If TEM's type is a union of variable size, pass TARGET to the inner
6982 computation, since it will need a temporary and TARGET is known
6983 to have to do. This occurs in unchecked conversion in Ada. */
6984
6985 orig_op0 = op0
6986 = expand_expr (tem,
6987 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6988 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6989 != INTEGER_CST)
6990 ? target : NULL_RTX),
6991 VOIDmode,
6992 (modifier == EXPAND_INITIALIZER
6993 || modifier == EXPAND_CONST_ADDRESS)
6994 ? modifier : EXPAND_NORMAL);
6995
6996 /* If this is a constant, put it into a register if it is a
6997 legitimate constant and OFFSET is 0 and memory if it isn't. */
6998 if (CONSTANT_P (op0))
6999 {
7000 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7001 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7002 && offset == 0)
7003 op0 = force_reg (mode, op0);
7004 else
7005 op0 = validize_mem (force_const_mem (mode, op0));
7006 }
7007
7008 if (offset != 0)
7009 {
7010 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7011
7012 /* If this object is in a register, put it into memory.
7013 This case can't occur in C, but can in Ada if we have
7014 unchecked conversion of an expression from a scalar type to
7015 an array or record type. */
7016 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7017 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7018 {
7019 /* If the operand is a SAVE_EXPR, we can deal with this by
7020 forcing the SAVE_EXPR into memory. */
7021 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7022 {
7023 put_var_into_stack (TREE_OPERAND (exp, 0));
7024 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7025 }
7026 else
7027 {
7028 tree nt
7029 = build_qualified_type (TREE_TYPE (tem),
7030 (TYPE_QUALS (TREE_TYPE (tem))
7031 | TYPE_QUAL_CONST));
7032 rtx memloc = assign_temp (nt, 1, 1, 1);
7033
7034 mark_temp_addr_taken (memloc);
7035 emit_move_insn (memloc, op0);
7036 op0 = memloc;
7037 }
7038 }
7039
7040 if (GET_CODE (op0) != MEM)
7041 abort ();
7042
7043 if (GET_MODE (offset_rtx) != ptr_mode)
7044 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7045
7046 #ifdef POINTERS_EXTEND_UNSIGNED
7047 if (GET_MODE (offset_rtx) != Pmode)
7048 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7049 #endif
7050
7051 /* A constant address in OP0 can have VOIDmode, we must not try
7052 to call force_reg for that case. Avoid that case. */
7053 if (GET_CODE (op0) == MEM
7054 && GET_MODE (op0) == BLKmode
7055 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7056 && bitsize != 0
7057 && (bitpos % bitsize) == 0
7058 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7059 && alignment == GET_MODE_ALIGNMENT (mode1))
7060 {
7061 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7062
7063 if (GET_CODE (XEXP (temp, 0)) == REG)
7064 op0 = temp;
7065 else
7066 op0 = (replace_equiv_address
7067 (op0,
7068 force_reg (GET_MODE (XEXP (temp, 0)),
7069 XEXP (temp, 0))));
7070 bitpos = 0;
7071 }
7072
7073 op0 = offset_address (op0, offset_rtx,
7074 highest_pow2_factor (offset));
7075 }
7076
7077 /* Don't forget about volatility even if this is a bitfield. */
7078 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7079 {
7080 if (op0 == orig_op0)
7081 op0 = copy_rtx (op0);
7082
7083 MEM_VOLATILE_P (op0) = 1;
7084 }
7085
7086 /* Check the access. */
7087 if (cfun != 0 && current_function_check_memory_usage
7088 && GET_CODE (op0) == MEM)
7089 {
7090 enum memory_use_mode memory_usage;
7091 memory_usage = get_memory_usage_from_modifier (modifier);
7092
7093 if (memory_usage != MEMORY_USE_DONT)
7094 {
7095 rtx to;
7096 int size;
7097
7098 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7099 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7100
7101 /* Check the access right of the pointer. */
7102 in_check_memory_usage = 1;
7103 if (size > BITS_PER_UNIT)
7104 emit_library_call (chkr_check_addr_libfunc,
7105 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7106 Pmode, GEN_INT (size / BITS_PER_UNIT),
7107 TYPE_MODE (sizetype),
7108 GEN_INT (memory_usage),
7109 TYPE_MODE (integer_type_node));
7110 in_check_memory_usage = 0;
7111 }
7112 }
7113
7114 /* In cases where an aligned union has an unaligned object
7115 as a field, we might be extracting a BLKmode value from
7116 an integer-mode (e.g., SImode) object. Handle this case
7117 by doing the extract into an object as wide as the field
7118 (which we know to be the width of a basic mode), then
7119 storing into memory, and changing the mode to BLKmode. */
7120 if (mode1 == VOIDmode
7121 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7122 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7123 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7124 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7125 && modifier != EXPAND_CONST_ADDRESS
7126 && modifier != EXPAND_INITIALIZER)
7127 /* If the field isn't aligned enough to fetch as a memref,
7128 fetch it as a bit field. */
7129 || (mode1 != BLKmode
7130 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7131 && ((TYPE_ALIGN (TREE_TYPE (tem))
7132 < GET_MODE_ALIGNMENT (mode))
7133 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7134 /* If the type and the field are a constant size and the
7135 size of the type isn't the same size as the bitfield,
7136 we must use bitfield operations. */
7137 || (bitsize >= 0
7138 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7139 == INTEGER_CST)
7140 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7141 bitsize))
7142 || (mode == BLKmode
7143 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7144 && (TYPE_ALIGN (type) > alignment
7145 || bitpos % TYPE_ALIGN (type) != 0)))
7146 {
7147 enum machine_mode ext_mode = mode;
7148
7149 if (ext_mode == BLKmode
7150 && ! (target != 0 && GET_CODE (op0) == MEM
7151 && GET_CODE (target) == MEM
7152 && bitpos % BITS_PER_UNIT == 0))
7153 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7154
7155 if (ext_mode == BLKmode)
7156 {
7157 /* In this case, BITPOS must start at a byte boundary and
7158 TARGET, if specified, must be a MEM. */
7159 if (GET_CODE (op0) != MEM
7160 || (target != 0 && GET_CODE (target) != MEM)
7161 || bitpos % BITS_PER_UNIT != 0)
7162 abort ();
7163
7164 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7165 if (target == 0)
7166 target = assign_temp (type, 0, 1, 1);
7167
7168 emit_block_move (target, op0,
7169 bitsize == -1 ? expr_size (exp)
7170 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7171 / BITS_PER_UNIT));
7172
7173 return target;
7174 }
7175
7176 op0 = validize_mem (op0);
7177
7178 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7179 mark_reg_pointer (XEXP (op0, 0), alignment);
7180
7181 op0 = extract_bit_field (op0, bitsize, bitpos,
7182 unsignedp, target, ext_mode, ext_mode,
7183 alignment,
7184 int_size_in_bytes (TREE_TYPE (tem)));
7185
7186 /* If the result is a record type and BITSIZE is narrower than
7187 the mode of OP0, an integral mode, and this is a big endian
7188 machine, we must put the field into the high-order bits. */
7189 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7190 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7191 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7192 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7193 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7194 - bitsize),
7195 op0, 1);
7196
7197 if (mode == BLKmode)
7198 {
7199 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7200 TYPE_QUAL_CONST);
7201 rtx new = assign_temp (nt, 0, 1, 1);
7202
7203 emit_move_insn (new, op0);
7204 op0 = copy_rtx (new);
7205 PUT_MODE (op0, BLKmode);
7206 }
7207
7208 return op0;
7209 }
7210
7211 /* If the result is BLKmode, use that to access the object
7212 now as well. */
7213 if (mode == BLKmode)
7214 mode1 = BLKmode;
7215
7216 /* Get a reference to just this component. */
7217 if (modifier == EXPAND_CONST_ADDRESS
7218 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7219 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7220 else
7221 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7222
7223 if (op0 == orig_op0)
7224 op0 = copy_rtx (op0);
7225
7226 set_mem_attributes (op0, exp, 0);
7227 if (GET_CODE (XEXP (op0, 0)) == REG)
7228 mark_reg_pointer (XEXP (op0, 0), alignment);
7229
7230 MEM_VOLATILE_P (op0) |= volatilep;
7231 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7232 || modifier == EXPAND_CONST_ADDRESS
7233 || modifier == EXPAND_INITIALIZER)
7234 return op0;
7235 else if (target == 0)
7236 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7237
7238 convert_move (target, op0, unsignedp);
7239 return target;
7240 }
7241
7242 case VTABLE_REF:
7243 {
7244 rtx insn, before = get_last_insn (), vtbl_ref;
7245
7246 /* Evaluate the interior expression. */
7247 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7248 tmode, modifier);
7249
7250 /* Get or create an instruction off which to hang a note. */
7251 if (REG_P (subtarget))
7252 {
7253 target = subtarget;
7254 insn = get_last_insn ();
7255 if (insn == before)
7256 abort ();
7257 if (! INSN_P (insn))
7258 insn = prev_nonnote_insn (insn);
7259 }
7260 else
7261 {
7262 target = gen_reg_rtx (GET_MODE (subtarget));
7263 insn = emit_move_insn (target, subtarget);
7264 }
7265
7266 /* Collect the data for the note. */
7267 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7268 vtbl_ref = plus_constant (vtbl_ref,
7269 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7270 /* Discard the initial CONST that was added. */
7271 vtbl_ref = XEXP (vtbl_ref, 0);
7272
7273 REG_NOTES (insn)
7274 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7275
7276 return target;
7277 }
7278
7279 /* Intended for a reference to a buffer of a file-object in Pascal.
7280 But it's not certain that a special tree code will really be
7281 necessary for these. INDIRECT_REF might work for them. */
7282 case BUFFER_REF:
7283 abort ();
7284
7285 case IN_EXPR:
7286 {
7287 /* Pascal set IN expression.
7288
7289 Algorithm:
7290 rlo = set_low - (set_low%bits_per_word);
7291 the_word = set [ (index - rlo)/bits_per_word ];
7292 bit_index = index % bits_per_word;
7293 bitmask = 1 << bit_index;
7294 return !!(the_word & bitmask); */
7295
7296 tree set = TREE_OPERAND (exp, 0);
7297 tree index = TREE_OPERAND (exp, 1);
7298 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7299 tree set_type = TREE_TYPE (set);
7300 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7301 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7302 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7303 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7304 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7305 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7306 rtx setaddr = XEXP (setval, 0);
7307 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308 rtx rlow;
7309 rtx diff, quo, rem, addr, bit, result;
7310
7311 /* If domain is empty, answer is no. Likewise if index is constant
7312 and out of bounds. */
7313 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7314 && TREE_CODE (set_low_bound) == INTEGER_CST
7315 && tree_int_cst_lt (set_high_bound, set_low_bound))
7316 || (TREE_CODE (index) == INTEGER_CST
7317 && TREE_CODE (set_low_bound) == INTEGER_CST
7318 && tree_int_cst_lt (index, set_low_bound))
7319 || (TREE_CODE (set_high_bound) == INTEGER_CST
7320 && TREE_CODE (index) == INTEGER_CST
7321 && tree_int_cst_lt (set_high_bound, index))))
7322 return const0_rtx;
7323
7324 if (target == 0)
7325 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7326
7327 /* If we get here, we have to generate the code for both cases
7328 (in range and out of range). */
7329
7330 op0 = gen_label_rtx ();
7331 op1 = gen_label_rtx ();
7332
7333 if (! (GET_CODE (index_val) == CONST_INT
7334 && GET_CODE (lo_r) == CONST_INT))
7335 {
7336 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7337 GET_MODE (index_val), iunsignedp, 0, op1);
7338 }
7339
7340 if (! (GET_CODE (index_val) == CONST_INT
7341 && GET_CODE (hi_r) == CONST_INT))
7342 {
7343 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7344 GET_MODE (index_val), iunsignedp, 0, op1);
7345 }
7346
7347 /* Calculate the element number of bit zero in the first word
7348 of the set. */
7349 if (GET_CODE (lo_r) == CONST_INT)
7350 rlow = GEN_INT (INTVAL (lo_r)
7351 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7352 else
7353 rlow = expand_binop (index_mode, and_optab, lo_r,
7354 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7355 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7356
7357 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7358 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7359
7360 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7361 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7362 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7363 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7364
7365 addr = memory_address (byte_mode,
7366 expand_binop (index_mode, add_optab, diff,
7367 setaddr, NULL_RTX, iunsignedp,
7368 OPTAB_LIB_WIDEN));
7369
7370 /* Extract the bit we want to examine. */
7371 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7372 gen_rtx_MEM (byte_mode, addr),
7373 make_tree (TREE_TYPE (index), rem),
7374 NULL_RTX, 1);
7375 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7376 GET_MODE (target) == byte_mode ? target : 0,
7377 1, OPTAB_LIB_WIDEN);
7378
7379 if (result != target)
7380 convert_move (target, result, 1);
7381
7382 /* Output the code to handle the out-of-range case. */
7383 emit_jump (op0);
7384 emit_label (op1);
7385 emit_move_insn (target, const0_rtx);
7386 emit_label (op0);
7387 return target;
7388 }
7389
7390 case WITH_CLEANUP_EXPR:
7391 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7392 {
7393 WITH_CLEANUP_EXPR_RTL (exp)
7394 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7395 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7396
7397 /* That's it for this cleanup. */
7398 TREE_OPERAND (exp, 1) = 0;
7399 }
7400 return WITH_CLEANUP_EXPR_RTL (exp);
7401
7402 case CLEANUP_POINT_EXPR:
7403 {
7404 /* Start a new binding layer that will keep track of all cleanup
7405 actions to be performed. */
7406 expand_start_bindings (2);
7407
7408 target_temp_slot_level = temp_slot_level;
7409
7410 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7411 /* If we're going to use this value, load it up now. */
7412 if (! ignore)
7413 op0 = force_not_mem (op0);
7414 preserve_temp_slots (op0);
7415 expand_end_bindings (NULL_TREE, 0, 0);
7416 }
7417 return op0;
7418
7419 case CALL_EXPR:
7420 /* Check for a built-in function. */
7421 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7422 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7423 == FUNCTION_DECL)
7424 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7425 {
7426 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7427 == BUILT_IN_FRONTEND)
7428 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7429 else
7430 return expand_builtin (exp, target, subtarget, tmode, ignore);
7431 }
7432
7433 return expand_call (exp, target, ignore);
7434
7435 case NON_LVALUE_EXPR:
7436 case NOP_EXPR:
7437 case CONVERT_EXPR:
7438 case REFERENCE_EXPR:
7439 if (TREE_OPERAND (exp, 0) == error_mark_node)
7440 return const0_rtx;
7441
7442 if (TREE_CODE (type) == UNION_TYPE)
7443 {
7444 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7445
7446 /* If both input and output are BLKmode, this conversion
7447 isn't actually doing anything unless we need to make the
7448 alignment stricter. */
7449 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7450 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7451 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7452 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7453 modifier);
7454
7455 if (target == 0)
7456 target = assign_temp (type, 0, 1, 1);
7457
7458 if (GET_CODE (target) == MEM)
7459 /* Store data into beginning of memory target. */
7460 store_expr (TREE_OPERAND (exp, 0),
7461 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7462
7463 else if (GET_CODE (target) == REG)
7464 /* Store this field into a union of the proper type. */
7465 store_field (target,
7466 MIN ((int_size_in_bytes (TREE_TYPE
7467 (TREE_OPERAND (exp, 0)))
7468 * BITS_PER_UNIT),
7469 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7470 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7471 VOIDmode, 0, BITS_PER_UNIT,
7472 int_size_in_bytes (type), 0);
7473 else
7474 abort ();
7475
7476 /* Return the entire union. */
7477 return target;
7478 }
7479
7480 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7481 {
7482 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7483 ro_modifier);
7484
7485 /* If the signedness of the conversion differs and OP0 is
7486 a promoted SUBREG, clear that indication since we now
7487 have to do the proper extension. */
7488 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7489 && GET_CODE (op0) == SUBREG)
7490 SUBREG_PROMOTED_VAR_P (op0) = 0;
7491
7492 return op0;
7493 }
7494
7495 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7496 if (GET_MODE (op0) == mode)
7497 return op0;
7498
7499 /* If OP0 is a constant, just convert it into the proper mode. */
7500 if (CONSTANT_P (op0))
7501 return
7502 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7503 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7504
7505 if (modifier == EXPAND_INITIALIZER)
7506 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7507
7508 if (target == 0)
7509 return
7510 convert_to_mode (mode, op0,
7511 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7512 else
7513 convert_move (target, op0,
7514 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7515 return target;
7516
7517 case PLUS_EXPR:
7518 /* We come here from MINUS_EXPR when the second operand is a
7519 constant. */
7520 plus_expr:
7521 this_optab = ! unsignedp && flag_trapv
7522 && (GET_MODE_CLASS(mode) == MODE_INT)
7523 ? addv_optab : add_optab;
7524
7525 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7526 something else, make sure we add the register to the constant and
7527 then to the other thing. This case can occur during strength
7528 reduction and doing it this way will produce better code if the
7529 frame pointer or argument pointer is eliminated.
7530
7531 fold-const.c will ensure that the constant is always in the inner
7532 PLUS_EXPR, so the only case we need to do anything about is if
7533 sp, ap, or fp is our second argument, in which case we must swap
7534 the innermost first argument and our second argument. */
7535
7536 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7537 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7538 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7539 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7540 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7541 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7542 {
7543 tree t = TREE_OPERAND (exp, 1);
7544
7545 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7546 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7547 }
7548
7549 /* If the result is to be ptr_mode and we are adding an integer to
7550 something, we might be forming a constant. So try to use
7551 plus_constant. If it produces a sum and we can't accept it,
7552 use force_operand. This allows P = &ARR[const] to generate
7553 efficient code on machines where a SYMBOL_REF is not a valid
7554 address.
7555
7556 If this is an EXPAND_SUM call, always return the sum. */
7557 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7558 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7559 {
7560 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7561 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7562 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7563 {
7564 rtx constant_part;
7565
7566 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7567 EXPAND_SUM);
7568 /* Use immed_double_const to ensure that the constant is
7569 truncated according to the mode of OP1, then sign extended
7570 to a HOST_WIDE_INT. Using the constant directly can result
7571 in non-canonical RTL in a 64x32 cross compile. */
7572 constant_part
7573 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7574 (HOST_WIDE_INT) 0,
7575 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7576 op1 = plus_constant (op1, INTVAL (constant_part));
7577 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7578 op1 = force_operand (op1, target);
7579 return op1;
7580 }
7581
7582 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7583 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7584 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7585 {
7586 rtx constant_part;
7587
7588 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7589 EXPAND_SUM);
7590 if (! CONSTANT_P (op0))
7591 {
7592 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7593 VOIDmode, modifier);
7594 /* Don't go to both_summands if modifier
7595 says it's not right to return a PLUS. */
7596 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7597 goto binop2;
7598 goto both_summands;
7599 }
7600 /* Use immed_double_const to ensure that the constant is
7601 truncated according to the mode of OP1, then sign extended
7602 to a HOST_WIDE_INT. Using the constant directly can result
7603 in non-canonical RTL in a 64x32 cross compile. */
7604 constant_part
7605 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7606 (HOST_WIDE_INT) 0,
7607 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7608 op0 = plus_constant (op0, INTVAL (constant_part));
7609 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7610 op0 = force_operand (op0, target);
7611 return op0;
7612 }
7613 }
7614
7615 /* No sense saving up arithmetic to be done
7616 if it's all in the wrong mode to form part of an address.
7617 And force_operand won't know whether to sign-extend or
7618 zero-extend. */
7619 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7620 || mode != ptr_mode)
7621 goto binop;
7622
7623 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7624 subtarget = 0;
7625
7626 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7627 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7628
7629 both_summands:
7630 /* Make sure any term that's a sum with a constant comes last. */
7631 if (GET_CODE (op0) == PLUS
7632 && CONSTANT_P (XEXP (op0, 1)))
7633 {
7634 temp = op0;
7635 op0 = op1;
7636 op1 = temp;
7637 }
7638 /* If adding to a sum including a constant,
7639 associate it to put the constant outside. */
7640 if (GET_CODE (op1) == PLUS
7641 && CONSTANT_P (XEXP (op1, 1)))
7642 {
7643 rtx constant_term = const0_rtx;
7644
7645 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7646 if (temp != 0)
7647 op0 = temp;
7648 /* Ensure that MULT comes first if there is one. */
7649 else if (GET_CODE (op0) == MULT)
7650 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7651 else
7652 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7653
7654 /* Let's also eliminate constants from op0 if possible. */
7655 op0 = eliminate_constant_term (op0, &constant_term);
7656
7657 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7658 their sum should be a constant. Form it into OP1, since the
7659 result we want will then be OP0 + OP1. */
7660
7661 temp = simplify_binary_operation (PLUS, mode, constant_term,
7662 XEXP (op1, 1));
7663 if (temp != 0)
7664 op1 = temp;
7665 else
7666 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7667 }
7668
7669 /* Put a constant term last and put a multiplication first. */
7670 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7671 temp = op1, op1 = op0, op0 = temp;
7672
7673 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7674 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7675
7676 case MINUS_EXPR:
7677 /* For initializers, we are allowed to return a MINUS of two
7678 symbolic constants. Here we handle all cases when both operands
7679 are constant. */
7680 /* Handle difference of two symbolic constants,
7681 for the sake of an initializer. */
7682 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7683 && really_constant_p (TREE_OPERAND (exp, 0))
7684 && really_constant_p (TREE_OPERAND (exp, 1)))
7685 {
7686 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7687 VOIDmode, ro_modifier);
7688 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7689 VOIDmode, ro_modifier);
7690
7691 /* If the last operand is a CONST_INT, use plus_constant of
7692 the negated constant. Else make the MINUS. */
7693 if (GET_CODE (op1) == CONST_INT)
7694 return plus_constant (op0, - INTVAL (op1));
7695 else
7696 return gen_rtx_MINUS (mode, op0, op1);
7697 }
7698 /* Convert A - const to A + (-const). */
7699 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7700 {
7701 tree negated = fold (build1 (NEGATE_EXPR, type,
7702 TREE_OPERAND (exp, 1)));
7703
7704 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7705 /* If we can't negate the constant in TYPE, leave it alone and
7706 expand_binop will negate it for us. We used to try to do it
7707 here in the signed version of TYPE, but that doesn't work
7708 on POINTER_TYPEs. */;
7709 else
7710 {
7711 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7712 goto plus_expr;
7713 }
7714 }
7715 this_optab = ! unsignedp && flag_trapv
7716 && (GET_MODE_CLASS(mode) == MODE_INT)
7717 ? subv_optab : sub_optab;
7718 goto binop;
7719
7720 case MULT_EXPR:
7721 /* If first operand is constant, swap them.
7722 Thus the following special case checks need only
7723 check the second operand. */
7724 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7725 {
7726 tree t1 = TREE_OPERAND (exp, 0);
7727 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7728 TREE_OPERAND (exp, 1) = t1;
7729 }
7730
7731 /* Attempt to return something suitable for generating an
7732 indexed address, for machines that support that. */
7733
7734 if (modifier == EXPAND_SUM && mode == ptr_mode
7735 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7736 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7737 {
7738 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7739 EXPAND_SUM);
7740
7741 /* Apply distributive law if OP0 is x+c. */
7742 if (GET_CODE (op0) == PLUS
7743 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7744 return
7745 gen_rtx_PLUS
7746 (mode,
7747 gen_rtx_MULT
7748 (mode, XEXP (op0, 0),
7749 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7750 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7751 * INTVAL (XEXP (op0, 1))));
7752
7753 if (GET_CODE (op0) != REG)
7754 op0 = force_operand (op0, NULL_RTX);
7755 if (GET_CODE (op0) != REG)
7756 op0 = copy_to_mode_reg (mode, op0);
7757
7758 return
7759 gen_rtx_MULT (mode, op0,
7760 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7761 }
7762
7763 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7764 subtarget = 0;
7765
7766 /* Check for multiplying things that have been extended
7767 from a narrower type. If this machine supports multiplying
7768 in that narrower type with a result in the desired type,
7769 do it that way, and avoid the explicit type-conversion. */
7770 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7771 && TREE_CODE (type) == INTEGER_TYPE
7772 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7773 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7774 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7775 && int_fits_type_p (TREE_OPERAND (exp, 1),
7776 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7777 /* Don't use a widening multiply if a shift will do. */
7778 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7779 > HOST_BITS_PER_WIDE_INT)
7780 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7781 ||
7782 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7783 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7784 ==
7785 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7786 /* If both operands are extended, they must either both
7787 be zero-extended or both be sign-extended. */
7788 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7789 ==
7790 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7791 {
7792 enum machine_mode innermode
7793 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7794 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7795 ? smul_widen_optab : umul_widen_optab);
7796 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7797 ? umul_widen_optab : smul_widen_optab);
7798 if (mode == GET_MODE_WIDER_MODE (innermode))
7799 {
7800 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7801 {
7802 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7803 NULL_RTX, VOIDmode, 0);
7804 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7805 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7806 VOIDmode, 0);
7807 else
7808 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7809 NULL_RTX, VOIDmode, 0);
7810 goto binop2;
7811 }
7812 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7813 && innermode == word_mode)
7814 {
7815 rtx htem;
7816 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7817 NULL_RTX, VOIDmode, 0);
7818 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7819 op1 = convert_modes (innermode, mode,
7820 expand_expr (TREE_OPERAND (exp, 1),
7821 NULL_RTX, VOIDmode, 0),
7822 unsignedp);
7823 else
7824 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7825 NULL_RTX, VOIDmode, 0);
7826 temp = expand_binop (mode, other_optab, op0, op1, target,
7827 unsignedp, OPTAB_LIB_WIDEN);
7828 htem = expand_mult_highpart_adjust (innermode,
7829 gen_highpart (innermode, temp),
7830 op0, op1,
7831 gen_highpart (innermode, temp),
7832 unsignedp);
7833 emit_move_insn (gen_highpart (innermode, temp), htem);
7834 return temp;
7835 }
7836 }
7837 }
7838 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7839 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7840 return expand_mult (mode, op0, op1, target, unsignedp);
7841
7842 case TRUNC_DIV_EXPR:
7843 case FLOOR_DIV_EXPR:
7844 case CEIL_DIV_EXPR:
7845 case ROUND_DIV_EXPR:
7846 case EXACT_DIV_EXPR:
7847 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7848 subtarget = 0;
7849 /* Possible optimization: compute the dividend with EXPAND_SUM
7850 then if the divisor is constant can optimize the case
7851 where some terms of the dividend have coeffs divisible by it. */
7852 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7853 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7854 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7855
7856 case RDIV_EXPR:
7857 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7858 expensive divide. If not, combine will rebuild the original
7859 computation. */
7860 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7861 && !real_onep (TREE_OPERAND (exp, 0)))
7862 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7863 build (RDIV_EXPR, type,
7864 build_real (type, dconst1),
7865 TREE_OPERAND (exp, 1))),
7866 target, tmode, unsignedp);
7867 this_optab = sdiv_optab;
7868 goto binop;
7869
7870 case TRUNC_MOD_EXPR:
7871 case FLOOR_MOD_EXPR:
7872 case CEIL_MOD_EXPR:
7873 case ROUND_MOD_EXPR:
7874 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7875 subtarget = 0;
7876 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7877 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7878 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7879
7880 case FIX_ROUND_EXPR:
7881 case FIX_FLOOR_EXPR:
7882 case FIX_CEIL_EXPR:
7883 abort (); /* Not used for C. */
7884
7885 case FIX_TRUNC_EXPR:
7886 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7887 if (target == 0)
7888 target = gen_reg_rtx (mode);
7889 expand_fix (target, op0, unsignedp);
7890 return target;
7891
7892 case FLOAT_EXPR:
7893 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7894 if (target == 0)
7895 target = gen_reg_rtx (mode);
7896 /* expand_float can't figure out what to do if FROM has VOIDmode.
7897 So give it the correct mode. With -O, cse will optimize this. */
7898 if (GET_MODE (op0) == VOIDmode)
7899 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7900 op0);
7901 expand_float (target, op0,
7902 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7903 return target;
7904
7905 case NEGATE_EXPR:
7906 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7907 temp = expand_unop (mode,
7908 ! unsignedp && flag_trapv
7909 && (GET_MODE_CLASS(mode) == MODE_INT)
7910 ? negv_optab : neg_optab, op0, target, 0);
7911 if (temp == 0)
7912 abort ();
7913 return temp;
7914
7915 case ABS_EXPR:
7916 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7917
7918 /* Handle complex values specially. */
7919 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7920 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7921 return expand_complex_abs (mode, op0, target, unsignedp);
7922
7923 /* Unsigned abs is simply the operand. Testing here means we don't
7924 risk generating incorrect code below. */
7925 if (TREE_UNSIGNED (type))
7926 return op0;
7927
7928 return expand_abs (mode, op0, target, unsignedp,
7929 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7930
7931 case MAX_EXPR:
7932 case MIN_EXPR:
7933 target = original_target;
7934 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7935 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7936 || GET_MODE (target) != mode
7937 || (GET_CODE (target) == REG
7938 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7939 target = gen_reg_rtx (mode);
7940 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7941 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7942
7943 /* First try to do it with a special MIN or MAX instruction.
7944 If that does not win, use a conditional jump to select the proper
7945 value. */
7946 this_optab = (TREE_UNSIGNED (type)
7947 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7948 : (code == MIN_EXPR ? smin_optab : smax_optab));
7949
7950 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7951 OPTAB_WIDEN);
7952 if (temp != 0)
7953 return temp;
7954
7955 /* At this point, a MEM target is no longer useful; we will get better
7956 code without it. */
7957
7958 if (GET_CODE (target) == MEM)
7959 target = gen_reg_rtx (mode);
7960
7961 if (target != op0)
7962 emit_move_insn (target, op0);
7963
7964 op0 = gen_label_rtx ();
7965
7966 /* If this mode is an integer too wide to compare properly,
7967 compare word by word. Rely on cse to optimize constant cases. */
7968 if (GET_MODE_CLASS (mode) == MODE_INT
7969 && ! can_compare_p (GE, mode, ccp_jump))
7970 {
7971 if (code == MAX_EXPR)
7972 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7973 target, op1, NULL_RTX, op0);
7974 else
7975 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7976 op1, target, NULL_RTX, op0);
7977 }
7978 else
7979 {
7980 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7981 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7982 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7983 op0);
7984 }
7985 emit_move_insn (target, op1);
7986 emit_label (op0);
7987 return target;
7988
7989 case BIT_NOT_EXPR:
7990 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7991 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7992 if (temp == 0)
7993 abort ();
7994 return temp;
7995
7996 case FFS_EXPR:
7997 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7998 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7999 if (temp == 0)
8000 abort ();
8001 return temp;
8002
8003 /* ??? Can optimize bitwise operations with one arg constant.
8004 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8005 and (a bitwise1 b) bitwise2 b (etc)
8006 but that is probably not worth while. */
8007
8008 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8009 boolean values when we want in all cases to compute both of them. In
8010 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8011 as actual zero-or-1 values and then bitwise anding. In cases where
8012 there cannot be any side effects, better code would be made by
8013 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8014 how to recognize those cases. */
8015
8016 case TRUTH_AND_EXPR:
8017 case BIT_AND_EXPR:
8018 this_optab = and_optab;
8019 goto binop;
8020
8021 case TRUTH_OR_EXPR:
8022 case BIT_IOR_EXPR:
8023 this_optab = ior_optab;
8024 goto binop;
8025
8026 case TRUTH_XOR_EXPR:
8027 case BIT_XOR_EXPR:
8028 this_optab = xor_optab;
8029 goto binop;
8030
8031 case LSHIFT_EXPR:
8032 case RSHIFT_EXPR:
8033 case LROTATE_EXPR:
8034 case RROTATE_EXPR:
8035 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8036 subtarget = 0;
8037 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8038 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8039 unsignedp);
8040
8041 /* Could determine the answer when only additive constants differ. Also,
8042 the addition of one can be handled by changing the condition. */
8043 case LT_EXPR:
8044 case LE_EXPR:
8045 case GT_EXPR:
8046 case GE_EXPR:
8047 case EQ_EXPR:
8048 case NE_EXPR:
8049 case UNORDERED_EXPR:
8050 case ORDERED_EXPR:
8051 case UNLT_EXPR:
8052 case UNLE_EXPR:
8053 case UNGT_EXPR:
8054 case UNGE_EXPR:
8055 case UNEQ_EXPR:
8056 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8057 if (temp != 0)
8058 return temp;
8059
8060 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8061 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8062 && original_target
8063 && GET_CODE (original_target) == REG
8064 && (GET_MODE (original_target)
8065 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8066 {
8067 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8068 VOIDmode, 0);
8069
8070 if (temp != original_target)
8071 temp = copy_to_reg (temp);
8072
8073 op1 = gen_label_rtx ();
8074 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8075 GET_MODE (temp), unsignedp, 0, op1);
8076 emit_move_insn (temp, const1_rtx);
8077 emit_label (op1);
8078 return temp;
8079 }
8080
8081 /* If no set-flag instruction, must generate a conditional
8082 store into a temporary variable. Drop through
8083 and handle this like && and ||. */
8084
8085 case TRUTH_ANDIF_EXPR:
8086 case TRUTH_ORIF_EXPR:
8087 if (! ignore
8088 && (target == 0 || ! safe_from_p (target, exp, 1)
8089 /* Make sure we don't have a hard reg (such as function's return
8090 value) live across basic blocks, if not optimizing. */
8091 || (!optimize && GET_CODE (target) == REG
8092 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8093 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8094
8095 if (target)
8096 emit_clr_insn (target);
8097
8098 op1 = gen_label_rtx ();
8099 jumpifnot (exp, op1);
8100
8101 if (target)
8102 emit_0_to_1_insn (target);
8103
8104 emit_label (op1);
8105 return ignore ? const0_rtx : target;
8106
8107 case TRUTH_NOT_EXPR:
8108 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8109 /* The parser is careful to generate TRUTH_NOT_EXPR
8110 only with operands that are always zero or one. */
8111 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8112 target, 1, OPTAB_LIB_WIDEN);
8113 if (temp == 0)
8114 abort ();
8115 return temp;
8116
8117 case COMPOUND_EXPR:
8118 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8119 emit_queue ();
8120 return expand_expr (TREE_OPERAND (exp, 1),
8121 (ignore ? const0_rtx : target),
8122 VOIDmode, 0);
8123
8124 case COND_EXPR:
8125 /* If we would have a "singleton" (see below) were it not for a
8126 conversion in each arm, bring that conversion back out. */
8127 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8128 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8129 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8130 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8131 {
8132 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8133 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8134
8135 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8136 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8137 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8138 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8139 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8140 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8141 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8142 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8143 return expand_expr (build1 (NOP_EXPR, type,
8144 build (COND_EXPR, TREE_TYPE (iftrue),
8145 TREE_OPERAND (exp, 0),
8146 iftrue, iffalse)),
8147 target, tmode, modifier);
8148 }
8149
8150 {
8151 /* Note that COND_EXPRs whose type is a structure or union
8152 are required to be constructed to contain assignments of
8153 a temporary variable, so that we can evaluate them here
8154 for side effect only. If type is void, we must do likewise. */
8155
8156 /* If an arm of the branch requires a cleanup,
8157 only that cleanup is performed. */
8158
8159 tree singleton = 0;
8160 tree binary_op = 0, unary_op = 0;
8161
8162 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8163 convert it to our mode, if necessary. */
8164 if (integer_onep (TREE_OPERAND (exp, 1))
8165 && integer_zerop (TREE_OPERAND (exp, 2))
8166 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8167 {
8168 if (ignore)
8169 {
8170 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8171 ro_modifier);
8172 return const0_rtx;
8173 }
8174
8175 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8176 if (GET_MODE (op0) == mode)
8177 return op0;
8178
8179 if (target == 0)
8180 target = gen_reg_rtx (mode);
8181 convert_move (target, op0, unsignedp);
8182 return target;
8183 }
8184
8185 /* Check for X ? A + B : A. If we have this, we can copy A to the
8186 output and conditionally add B. Similarly for unary operations.
8187 Don't do this if X has side-effects because those side effects
8188 might affect A or B and the "?" operation is a sequence point in
8189 ANSI. (operand_equal_p tests for side effects.) */
8190
8191 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8192 && operand_equal_p (TREE_OPERAND (exp, 2),
8193 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8194 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8195 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8196 && operand_equal_p (TREE_OPERAND (exp, 1),
8197 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8198 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8199 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8200 && operand_equal_p (TREE_OPERAND (exp, 2),
8201 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8202 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8203 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8204 && operand_equal_p (TREE_OPERAND (exp, 1),
8205 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8206 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8207
8208 /* If we are not to produce a result, we have no target. Otherwise,
8209 if a target was specified use it; it will not be used as an
8210 intermediate target unless it is safe. If no target, use a
8211 temporary. */
8212
8213 if (ignore)
8214 temp = 0;
8215 else if (original_target
8216 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8217 || (singleton && GET_CODE (original_target) == REG
8218 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8219 && original_target == var_rtx (singleton)))
8220 && GET_MODE (original_target) == mode
8221 #ifdef HAVE_conditional_move
8222 && (! can_conditionally_move_p (mode)
8223 || GET_CODE (original_target) == REG
8224 || TREE_ADDRESSABLE (type))
8225 #endif
8226 && (GET_CODE (original_target) != MEM
8227 || TREE_ADDRESSABLE (type)))
8228 temp = original_target;
8229 else if (TREE_ADDRESSABLE (type))
8230 abort ();
8231 else
8232 temp = assign_temp (type, 0, 0, 1);
8233
8234 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8235 do the test of X as a store-flag operation, do this as
8236 A + ((X != 0) << log C). Similarly for other simple binary
8237 operators. Only do for C == 1 if BRANCH_COST is low. */
8238 if (temp && singleton && binary_op
8239 && (TREE_CODE (binary_op) == PLUS_EXPR
8240 || TREE_CODE (binary_op) == MINUS_EXPR
8241 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8242 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8243 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8244 : integer_onep (TREE_OPERAND (binary_op, 1)))
8245 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8246 {
8247 rtx result;
8248 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8249 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8250 ? addv_optab : add_optab)
8251 : TREE_CODE (binary_op) == MINUS_EXPR
8252 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8253 ? subv_optab : sub_optab)
8254 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8255 : xor_optab);
8256
8257 /* If we had X ? A : A + 1, do this as A + (X == 0).
8258
8259 We have to invert the truth value here and then put it
8260 back later if do_store_flag fails. We cannot simply copy
8261 TREE_OPERAND (exp, 0) to another variable and modify that
8262 because invert_truthvalue can modify the tree pointed to
8263 by its argument. */
8264 if (singleton == TREE_OPERAND (exp, 1))
8265 TREE_OPERAND (exp, 0)
8266 = invert_truthvalue (TREE_OPERAND (exp, 0));
8267
8268 result = do_store_flag (TREE_OPERAND (exp, 0),
8269 (safe_from_p (temp, singleton, 1)
8270 ? temp : NULL_RTX),
8271 mode, BRANCH_COST <= 1);
8272
8273 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8274 result = expand_shift (LSHIFT_EXPR, mode, result,
8275 build_int_2 (tree_log2
8276 (TREE_OPERAND
8277 (binary_op, 1)),
8278 0),
8279 (safe_from_p (temp, singleton, 1)
8280 ? temp : NULL_RTX), 0);
8281
8282 if (result)
8283 {
8284 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8285 return expand_binop (mode, boptab, op1, result, temp,
8286 unsignedp, OPTAB_LIB_WIDEN);
8287 }
8288 else if (singleton == TREE_OPERAND (exp, 1))
8289 TREE_OPERAND (exp, 0)
8290 = invert_truthvalue (TREE_OPERAND (exp, 0));
8291 }
8292
8293 do_pending_stack_adjust ();
8294 NO_DEFER_POP;
8295 op0 = gen_label_rtx ();
8296
8297 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8298 {
8299 if (temp != 0)
8300 {
8301 /* If the target conflicts with the other operand of the
8302 binary op, we can't use it. Also, we can't use the target
8303 if it is a hard register, because evaluating the condition
8304 might clobber it. */
8305 if ((binary_op
8306 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8307 || (GET_CODE (temp) == REG
8308 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8309 temp = gen_reg_rtx (mode);
8310 store_expr (singleton, temp, 0);
8311 }
8312 else
8313 expand_expr (singleton,
8314 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8315 if (singleton == TREE_OPERAND (exp, 1))
8316 jumpif (TREE_OPERAND (exp, 0), op0);
8317 else
8318 jumpifnot (TREE_OPERAND (exp, 0), op0);
8319
8320 start_cleanup_deferral ();
8321 if (binary_op && temp == 0)
8322 /* Just touch the other operand. */
8323 expand_expr (TREE_OPERAND (binary_op, 1),
8324 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8325 else if (binary_op)
8326 store_expr (build (TREE_CODE (binary_op), type,
8327 make_tree (type, temp),
8328 TREE_OPERAND (binary_op, 1)),
8329 temp, 0);
8330 else
8331 store_expr (build1 (TREE_CODE (unary_op), type,
8332 make_tree (type, temp)),
8333 temp, 0);
8334 op1 = op0;
8335 }
8336 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8337 comparison operator. If we have one of these cases, set the
8338 output to A, branch on A (cse will merge these two references),
8339 then set the output to FOO. */
8340 else if (temp
8341 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8342 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8343 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8344 TREE_OPERAND (exp, 1), 0)
8345 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8346 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8347 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8348 {
8349 if (GET_CODE (temp) == REG
8350 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8351 temp = gen_reg_rtx (mode);
8352 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8353 jumpif (TREE_OPERAND (exp, 0), op0);
8354
8355 start_cleanup_deferral ();
8356 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8357 op1 = op0;
8358 }
8359 else if (temp
8360 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8361 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8362 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8363 TREE_OPERAND (exp, 2), 0)
8364 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8365 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8366 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8367 {
8368 if (GET_CODE (temp) == REG
8369 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8370 temp = gen_reg_rtx (mode);
8371 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8372 jumpifnot (TREE_OPERAND (exp, 0), op0);
8373
8374 start_cleanup_deferral ();
8375 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8376 op1 = op0;
8377 }
8378 else
8379 {
8380 op1 = gen_label_rtx ();
8381 jumpifnot (TREE_OPERAND (exp, 0), op0);
8382
8383 start_cleanup_deferral ();
8384
8385 /* One branch of the cond can be void, if it never returns. For
8386 example A ? throw : E */
8387 if (temp != 0
8388 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8389 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8390 else
8391 expand_expr (TREE_OPERAND (exp, 1),
8392 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8393 end_cleanup_deferral ();
8394 emit_queue ();
8395 emit_jump_insn (gen_jump (op1));
8396 emit_barrier ();
8397 emit_label (op0);
8398 start_cleanup_deferral ();
8399 if (temp != 0
8400 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8401 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8402 else
8403 expand_expr (TREE_OPERAND (exp, 2),
8404 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8405 }
8406
8407 end_cleanup_deferral ();
8408
8409 emit_queue ();
8410 emit_label (op1);
8411 OK_DEFER_POP;
8412
8413 return temp;
8414 }
8415
8416 case TARGET_EXPR:
8417 {
8418 /* Something needs to be initialized, but we didn't know
8419 where that thing was when building the tree. For example,
8420 it could be the return value of a function, or a parameter
8421 to a function which lays down in the stack, or a temporary
8422 variable which must be passed by reference.
8423
8424 We guarantee that the expression will either be constructed
8425 or copied into our original target. */
8426
8427 tree slot = TREE_OPERAND (exp, 0);
8428 tree cleanups = NULL_TREE;
8429 tree exp1;
8430
8431 if (TREE_CODE (slot) != VAR_DECL)
8432 abort ();
8433
8434 if (! ignore)
8435 target = original_target;
8436
8437 /* Set this here so that if we get a target that refers to a
8438 register variable that's already been used, put_reg_into_stack
8439 knows that it should fix up those uses. */
8440 TREE_USED (slot) = 1;
8441
8442 if (target == 0)
8443 {
8444 if (DECL_RTL_SET_P (slot))
8445 {
8446 target = DECL_RTL (slot);
8447 /* If we have already expanded the slot, so don't do
8448 it again. (mrs) */
8449 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8450 return target;
8451 }
8452 else
8453 {
8454 target = assign_temp (type, 2, 0, 1);
8455 /* All temp slots at this level must not conflict. */
8456 preserve_temp_slots (target);
8457 SET_DECL_RTL (slot, target);
8458 if (TREE_ADDRESSABLE (slot))
8459 put_var_into_stack (slot);
8460
8461 /* Since SLOT is not known to the called function
8462 to belong to its stack frame, we must build an explicit
8463 cleanup. This case occurs when we must build up a reference
8464 to pass the reference as an argument. In this case,
8465 it is very likely that such a reference need not be
8466 built here. */
8467
8468 if (TREE_OPERAND (exp, 2) == 0)
8469 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8470 cleanups = TREE_OPERAND (exp, 2);
8471 }
8472 }
8473 else
8474 {
8475 /* This case does occur, when expanding a parameter which
8476 needs to be constructed on the stack. The target
8477 is the actual stack address that we want to initialize.
8478 The function we call will perform the cleanup in this case. */
8479
8480 /* If we have already assigned it space, use that space,
8481 not target that we were passed in, as our target
8482 parameter is only a hint. */
8483 if (DECL_RTL_SET_P (slot))
8484 {
8485 target = DECL_RTL (slot);
8486 /* If we have already expanded the slot, so don't do
8487 it again. (mrs) */
8488 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8489 return target;
8490 }
8491 else
8492 {
8493 SET_DECL_RTL (slot, target);
8494 /* If we must have an addressable slot, then make sure that
8495 the RTL that we just stored in slot is OK. */
8496 if (TREE_ADDRESSABLE (slot))
8497 put_var_into_stack (slot);
8498 }
8499 }
8500
8501 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8502 /* Mark it as expanded. */
8503 TREE_OPERAND (exp, 1) = NULL_TREE;
8504
8505 store_expr (exp1, target, 0);
8506
8507 expand_decl_cleanup (NULL_TREE, cleanups);
8508
8509 return target;
8510 }
8511
8512 case INIT_EXPR:
8513 {
8514 tree lhs = TREE_OPERAND (exp, 0);
8515 tree rhs = TREE_OPERAND (exp, 1);
8516
8517 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8518 return temp;
8519 }
8520
8521 case MODIFY_EXPR:
8522 {
8523 /* If lhs is complex, expand calls in rhs before computing it.
8524 That's so we don't compute a pointer and save it over a
8525 call. If lhs is simple, compute it first so we can give it
8526 as a target if the rhs is just a call. This avoids an
8527 extra temp and copy and that prevents a partial-subsumption
8528 which makes bad code. Actually we could treat
8529 component_ref's of vars like vars. */
8530
8531 tree lhs = TREE_OPERAND (exp, 0);
8532 tree rhs = TREE_OPERAND (exp, 1);
8533
8534 temp = 0;
8535
8536 /* Check for |= or &= of a bitfield of size one into another bitfield
8537 of size 1. In this case, (unless we need the result of the
8538 assignment) we can do this more efficiently with a
8539 test followed by an assignment, if necessary.
8540
8541 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8542 things change so we do, this code should be enhanced to
8543 support it. */
8544 if (ignore
8545 && TREE_CODE (lhs) == COMPONENT_REF
8546 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8547 || TREE_CODE (rhs) == BIT_AND_EXPR)
8548 && TREE_OPERAND (rhs, 0) == lhs
8549 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8550 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8551 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8552 {
8553 rtx label = gen_label_rtx ();
8554
8555 do_jump (TREE_OPERAND (rhs, 1),
8556 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8557 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8558 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8559 (TREE_CODE (rhs) == BIT_IOR_EXPR
8560 ? integer_one_node
8561 : integer_zero_node)),
8562 0, 0);
8563 do_pending_stack_adjust ();
8564 emit_label (label);
8565 return const0_rtx;
8566 }
8567
8568 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8569
8570 return temp;
8571 }
8572
8573 case RETURN_EXPR:
8574 if (!TREE_OPERAND (exp, 0))
8575 expand_null_return ();
8576 else
8577 expand_return (TREE_OPERAND (exp, 0));
8578 return const0_rtx;
8579
8580 case PREINCREMENT_EXPR:
8581 case PREDECREMENT_EXPR:
8582 return expand_increment (exp, 0, ignore);
8583
8584 case POSTINCREMENT_EXPR:
8585 case POSTDECREMENT_EXPR:
8586 /* Faster to treat as pre-increment if result is not used. */
8587 return expand_increment (exp, ! ignore, ignore);
8588
8589 case ADDR_EXPR:
8590 /* If nonzero, TEMP will be set to the address of something that might
8591 be a MEM corresponding to a stack slot. */
8592 temp = 0;
8593
8594 /* Are we taking the address of a nested function? */
8595 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8596 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8597 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8598 && ! TREE_STATIC (exp))
8599 {
8600 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8601 op0 = force_operand (op0, target);
8602 }
8603 /* If we are taking the address of something erroneous, just
8604 return a zero. */
8605 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8606 return const0_rtx;
8607 else
8608 {
8609 /* We make sure to pass const0_rtx down if we came in with
8610 ignore set, to avoid doing the cleanups twice for something. */
8611 op0 = expand_expr (TREE_OPERAND (exp, 0),
8612 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8613 (modifier == EXPAND_INITIALIZER
8614 ? modifier : EXPAND_CONST_ADDRESS));
8615
8616 /* If we are going to ignore the result, OP0 will have been set
8617 to const0_rtx, so just return it. Don't get confused and
8618 think we are taking the address of the constant. */
8619 if (ignore)
8620 return op0;
8621
8622 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8623 clever and returns a REG when given a MEM. */
8624 op0 = protect_from_queue (op0, 1);
8625
8626 /* We would like the object in memory. If it is a constant, we can
8627 have it be statically allocated into memory. For a non-constant,
8628 we need to allocate some memory and store the value into it. */
8629
8630 if (CONSTANT_P (op0))
8631 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8632 op0);
8633 else if (GET_CODE (op0) == MEM)
8634 {
8635 mark_temp_addr_taken (op0);
8636 temp = XEXP (op0, 0);
8637 }
8638
8639 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8640 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8641 || GET_CODE (op0) == PARALLEL)
8642 {
8643 /* If this object is in a register, it must be not
8644 be BLKmode. */
8645 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8646 tree nt = build_qualified_type (inner_type,
8647 (TYPE_QUALS (inner_type)
8648 | TYPE_QUAL_CONST));
8649 rtx memloc = assign_temp (nt, 1, 1, 1);
8650
8651 mark_temp_addr_taken (memloc);
8652 if (GET_CODE (op0) == PARALLEL)
8653 /* Handle calls that pass values in multiple non-contiguous
8654 locations. The Irix 6 ABI has examples of this. */
8655 emit_group_store (memloc, op0,
8656 int_size_in_bytes (inner_type),
8657 TYPE_ALIGN (inner_type));
8658 else
8659 emit_move_insn (memloc, op0);
8660 op0 = memloc;
8661 }
8662
8663 if (GET_CODE (op0) != MEM)
8664 abort ();
8665
8666 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8667 {
8668 temp = XEXP (op0, 0);
8669 #ifdef POINTERS_EXTEND_UNSIGNED
8670 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8671 && mode == ptr_mode)
8672 temp = convert_memory_address (ptr_mode, temp);
8673 #endif
8674 return temp;
8675 }
8676
8677 op0 = force_operand (XEXP (op0, 0), target);
8678 }
8679
8680 if (flag_force_addr && GET_CODE (op0) != REG)
8681 op0 = force_reg (Pmode, op0);
8682
8683 if (GET_CODE (op0) == REG
8684 && ! REG_USERVAR_P (op0))
8685 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8686
8687 /* If we might have had a temp slot, add an equivalent address
8688 for it. */
8689 if (temp != 0)
8690 update_temp_slot_address (temp, op0);
8691
8692 #ifdef POINTERS_EXTEND_UNSIGNED
8693 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8694 && mode == ptr_mode)
8695 op0 = convert_memory_address (ptr_mode, op0);
8696 #endif
8697
8698 return op0;
8699
8700 case ENTRY_VALUE_EXPR:
8701 abort ();
8702
8703 /* COMPLEX type for Extended Pascal & Fortran */
8704 case COMPLEX_EXPR:
8705 {
8706 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8707 rtx insns;
8708
8709 /* Get the rtx code of the operands. */
8710 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8711 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8712
8713 if (! target)
8714 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8715
8716 start_sequence ();
8717
8718 /* Move the real (op0) and imaginary (op1) parts to their location. */
8719 emit_move_insn (gen_realpart (mode, target), op0);
8720 emit_move_insn (gen_imagpart (mode, target), op1);
8721
8722 insns = get_insns ();
8723 end_sequence ();
8724
8725 /* Complex construction should appear as a single unit. */
8726 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8727 each with a separate pseudo as destination.
8728 It's not correct for flow to treat them as a unit. */
8729 if (GET_CODE (target) != CONCAT)
8730 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8731 else
8732 emit_insns (insns);
8733
8734 return target;
8735 }
8736
8737 case REALPART_EXPR:
8738 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8739 return gen_realpart (mode, op0);
8740
8741 case IMAGPART_EXPR:
8742 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8743 return gen_imagpart (mode, op0);
8744
8745 case CONJ_EXPR:
8746 {
8747 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8748 rtx imag_t;
8749 rtx insns;
8750
8751 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8752
8753 if (! target)
8754 target = gen_reg_rtx (mode);
8755
8756 start_sequence ();
8757
8758 /* Store the realpart and the negated imagpart to target. */
8759 emit_move_insn (gen_realpart (partmode, target),
8760 gen_realpart (partmode, op0));
8761
8762 imag_t = gen_imagpart (partmode, target);
8763 temp = expand_unop (partmode,
8764 ! unsignedp && flag_trapv
8765 && (GET_MODE_CLASS(partmode) == MODE_INT)
8766 ? negv_optab : neg_optab,
8767 gen_imagpart (partmode, op0), imag_t, 0);
8768 if (temp != imag_t)
8769 emit_move_insn (imag_t, temp);
8770
8771 insns = get_insns ();
8772 end_sequence ();
8773
8774 /* Conjugate should appear as a single unit
8775 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8776 each with a separate pseudo as destination.
8777 It's not correct for flow to treat them as a unit. */
8778 if (GET_CODE (target) != CONCAT)
8779 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8780 else
8781 emit_insns (insns);
8782
8783 return target;
8784 }
8785
8786 case TRY_CATCH_EXPR:
8787 {
8788 tree handler = TREE_OPERAND (exp, 1);
8789
8790 expand_eh_region_start ();
8791
8792 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8793
8794 expand_eh_region_end_cleanup (handler);
8795
8796 return op0;
8797 }
8798
8799 case TRY_FINALLY_EXPR:
8800 {
8801 tree try_block = TREE_OPERAND (exp, 0);
8802 tree finally_block = TREE_OPERAND (exp, 1);
8803 rtx finally_label = gen_label_rtx ();
8804 rtx done_label = gen_label_rtx ();
8805 rtx return_link = gen_reg_rtx (Pmode);
8806 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8807 (tree) finally_label, (tree) return_link);
8808 TREE_SIDE_EFFECTS (cleanup) = 1;
8809
8810 /* Start a new binding layer that will keep track of all cleanup
8811 actions to be performed. */
8812 expand_start_bindings (2);
8813
8814 target_temp_slot_level = temp_slot_level;
8815
8816 expand_decl_cleanup (NULL_TREE, cleanup);
8817 op0 = expand_expr (try_block, target, tmode, modifier);
8818
8819 preserve_temp_slots (op0);
8820 expand_end_bindings (NULL_TREE, 0, 0);
8821 emit_jump (done_label);
8822 emit_label (finally_label);
8823 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8824 emit_indirect_jump (return_link);
8825 emit_label (done_label);
8826 return op0;
8827 }
8828
8829 case GOTO_SUBROUTINE_EXPR:
8830 {
8831 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8832 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8833 rtx return_address = gen_label_rtx ();
8834 emit_move_insn (return_link,
8835 gen_rtx_LABEL_REF (Pmode, return_address));
8836 emit_jump (subr);
8837 emit_label (return_address);
8838 return const0_rtx;
8839 }
8840
8841 case VA_ARG_EXPR:
8842 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8843
8844 case EXC_PTR_EXPR:
8845 return get_exception_pointer (cfun);
8846
8847 case FDESC_EXPR:
8848 /* Function descriptors are not valid except for as
8849 initialization constants, and should not be expanded. */
8850 abort ();
8851
8852 default:
8853 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8854 }
8855
8856 /* Here to do an ordinary binary operator, generating an instruction
8857 from the optab already placed in `this_optab'. */
8858 binop:
8859 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8860 subtarget = 0;
8861 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8862 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8863 binop2:
8864 temp = expand_binop (mode, this_optab, op0, op1, target,
8865 unsignedp, OPTAB_LIB_WIDEN);
8866 if (temp == 0)
8867 abort ();
8868 return temp;
8869 }
8870 \f
8871 /* Similar to expand_expr, except that we don't specify a target, target
8872 mode, or modifier and we return the alignment of the inner type. This is
8873 used in cases where it is not necessary to align the result to the
8874 alignment of its type as long as we know the alignment of the result, for
8875 example for comparisons of BLKmode values. */
8876
8877 static rtx
8878 expand_expr_unaligned (exp, palign)
8879 tree exp;
8880 unsigned int *palign;
8881 {
8882 rtx op0;
8883 tree type = TREE_TYPE (exp);
8884 enum machine_mode mode = TYPE_MODE (type);
8885
8886 /* Default the alignment we return to that of the type. */
8887 *palign = TYPE_ALIGN (type);
8888
8889 /* The only cases in which we do anything special is if the resulting mode
8890 is BLKmode. */
8891 if (mode != BLKmode)
8892 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8893
8894 switch (TREE_CODE (exp))
8895 {
8896 case CONVERT_EXPR:
8897 case NOP_EXPR:
8898 case NON_LVALUE_EXPR:
8899 /* Conversions between BLKmode values don't change the underlying
8900 alignment or value. */
8901 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8902 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8903 break;
8904
8905 case ARRAY_REF:
8906 /* Much of the code for this case is copied directly from expand_expr.
8907 We need to duplicate it here because we will do something different
8908 in the fall-through case, so we need to handle the same exceptions
8909 it does. */
8910 {
8911 tree array = TREE_OPERAND (exp, 0);
8912 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8913 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8914 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8915 HOST_WIDE_INT i;
8916
8917 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8918 abort ();
8919
8920 /* Optimize the special-case of a zero lower bound.
8921
8922 We convert the low_bound to sizetype to avoid some problems
8923 with constant folding. (E.g. suppose the lower bound is 1,
8924 and its mode is QI. Without the conversion, (ARRAY
8925 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8926 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8927
8928 if (! integer_zerop (low_bound))
8929 index = size_diffop (index, convert (sizetype, low_bound));
8930
8931 /* If this is a constant index into a constant array,
8932 just get the value from the array. Handle both the cases when
8933 we have an explicit constructor and when our operand is a variable
8934 that was declared const. */
8935
8936 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8937 && host_integerp (index, 0)
8938 && 0 > compare_tree_int (index,
8939 list_length (CONSTRUCTOR_ELTS
8940 (TREE_OPERAND (exp, 0)))))
8941 {
8942 tree elem;
8943
8944 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8945 i = tree_low_cst (index, 0);
8946 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8947 ;
8948
8949 if (elem)
8950 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8951 }
8952
8953 else if (optimize >= 1
8954 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8955 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8956 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8957 {
8958 if (TREE_CODE (index) == INTEGER_CST)
8959 {
8960 tree init = DECL_INITIAL (array);
8961
8962 if (TREE_CODE (init) == CONSTRUCTOR)
8963 {
8964 tree elem;
8965
8966 for (elem = CONSTRUCTOR_ELTS (init);
8967 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8968 elem = TREE_CHAIN (elem))
8969 ;
8970
8971 if (elem)
8972 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8973 palign);
8974 }
8975 }
8976 }
8977 }
8978 /* Fall through. */
8979
8980 case COMPONENT_REF:
8981 case BIT_FIELD_REF:
8982 case ARRAY_RANGE_REF:
8983 /* If the operand is a CONSTRUCTOR, we can just extract the
8984 appropriate field if it is present. Don't do this if we have
8985 already written the data since we want to refer to that copy
8986 and varasm.c assumes that's what we'll do. */
8987 if (TREE_CODE (exp) == COMPONENT_REF
8988 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8989 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8990 {
8991 tree elt;
8992
8993 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8994 elt = TREE_CHAIN (elt))
8995 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8996 /* Note that unlike the case in expand_expr, we know this is
8997 BLKmode and hence not an integer. */
8998 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8999 }
9000
9001 {
9002 enum machine_mode mode1;
9003 HOST_WIDE_INT bitsize, bitpos;
9004 tree offset;
9005 int volatilep = 0;
9006 unsigned int alignment;
9007 int unsignedp;
9008 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9009 &mode1, &unsignedp, &volatilep,
9010 &alignment);
9011
9012 /* If we got back the original object, something is wrong. Perhaps
9013 we are evaluating an expression too early. In any event, don't
9014 infinitely recurse. */
9015 if (tem == exp)
9016 abort ();
9017
9018 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9019
9020 /* If this is a constant, put it into a register if it is a
9021 legitimate constant and OFFSET is 0 and memory if it isn't. */
9022 if (CONSTANT_P (op0))
9023 {
9024 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9025
9026 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9027 && offset == 0)
9028 op0 = force_reg (inner_mode, op0);
9029 else
9030 op0 = validize_mem (force_const_mem (inner_mode, op0));
9031 }
9032
9033 if (offset != 0)
9034 {
9035 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9036
9037 /* If this object is in a register, put it into memory.
9038 This case can't occur in C, but can in Ada if we have
9039 unchecked conversion of an expression from a scalar type to
9040 an array or record type. */
9041 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9042 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9043 {
9044 tree nt = build_qualified_type (TREE_TYPE (tem),
9045 (TYPE_QUALS (TREE_TYPE (tem))
9046 | TYPE_QUAL_CONST));
9047 rtx memloc = assign_temp (nt, 1, 1, 1);
9048
9049 mark_temp_addr_taken (memloc);
9050 emit_move_insn (memloc, op0);
9051 op0 = memloc;
9052 }
9053
9054 if (GET_CODE (op0) != MEM)
9055 abort ();
9056
9057 if (GET_MODE (offset_rtx) != ptr_mode)
9058 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9059
9060 #ifdef POINTERS_EXTEND_UNSIGNED
9061 if (GET_MODE (offset_rtx) != Pmode)
9062 offset_rtx = convert_memory_address (Pmode, offset_rtx);
9063 #endif
9064
9065 op0 = offset_address (op0, offset_rtx,
9066 highest_pow2_factor (offset));
9067 }
9068
9069 /* Don't forget about volatility even if this is a bitfield. */
9070 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9071 {
9072 op0 = copy_rtx (op0);
9073 MEM_VOLATILE_P (op0) = 1;
9074 }
9075
9076 /* Check the access. */
9077 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9078 {
9079 rtx to;
9080 int size;
9081
9082 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9083 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9084
9085 /* Check the access right of the pointer. */
9086 in_check_memory_usage = 1;
9087 if (size > BITS_PER_UNIT)
9088 emit_library_call (chkr_check_addr_libfunc,
9089 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9090 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9091 TYPE_MODE (sizetype),
9092 GEN_INT (MEMORY_USE_RO),
9093 TYPE_MODE (integer_type_node));
9094 in_check_memory_usage = 0;
9095 }
9096
9097 /* In cases where an aligned union has an unaligned object
9098 as a field, we might be extracting a BLKmode value from
9099 an integer-mode (e.g., SImode) object. Handle this case
9100 by doing the extract into an object as wide as the field
9101 (which we know to be the width of a basic mode), then
9102 storing into memory, and changing the mode to BLKmode.
9103 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9104 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9105 if (mode1 == VOIDmode
9106 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9107 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9108 && (TYPE_ALIGN (type) > alignment
9109 || bitpos % TYPE_ALIGN (type) != 0)))
9110 {
9111 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9112
9113 if (ext_mode == BLKmode)
9114 {
9115 /* In this case, BITPOS must start at a byte boundary. */
9116 if (GET_CODE (op0) != MEM
9117 || bitpos % BITS_PER_UNIT != 0)
9118 abort ();
9119
9120 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9121 }
9122 else
9123 {
9124 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9125 TYPE_QUAL_CONST);
9126 rtx new = assign_temp (nt, 0, 1, 1);
9127
9128 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9129 unsignedp, NULL_RTX, ext_mode,
9130 ext_mode, alignment,
9131 int_size_in_bytes (TREE_TYPE (tem)));
9132
9133 /* If the result is a record type and BITSIZE is narrower than
9134 the mode of OP0, an integral mode, and this is a big endian
9135 machine, we must put the field into the high-order bits. */
9136 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9137 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9138 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9139 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9140 size_int (GET_MODE_BITSIZE
9141 (GET_MODE (op0))
9142 - bitsize),
9143 op0, 1);
9144
9145 emit_move_insn (new, op0);
9146 op0 = copy_rtx (new);
9147 PUT_MODE (op0, BLKmode);
9148 }
9149 }
9150 else
9151 /* Get a reference to just this component. */
9152 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9153
9154 set_mem_attributes (op0, exp, 0);
9155
9156 /* Adjust the alignment in case the bit position is not
9157 a multiple of the alignment of the inner object. */
9158 while (bitpos % alignment != 0)
9159 alignment >>= 1;
9160
9161 if (GET_CODE (XEXP (op0, 0)) == REG)
9162 mark_reg_pointer (XEXP (op0, 0), alignment);
9163
9164 MEM_IN_STRUCT_P (op0) = 1;
9165 MEM_VOLATILE_P (op0) |= volatilep;
9166
9167 *palign = alignment;
9168 return op0;
9169 }
9170
9171 default:
9172 break;
9173
9174 }
9175
9176 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9177 }
9178 \f
9179 /* Return the tree node if a ARG corresponds to a string constant or zero
9180 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9181 in bytes within the string that ARG is accessing. The type of the
9182 offset will be `sizetype'. */
9183
9184 tree
9185 string_constant (arg, ptr_offset)
9186 tree arg;
9187 tree *ptr_offset;
9188 {
9189 STRIP_NOPS (arg);
9190
9191 if (TREE_CODE (arg) == ADDR_EXPR
9192 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9193 {
9194 *ptr_offset = size_zero_node;
9195 return TREE_OPERAND (arg, 0);
9196 }
9197 else if (TREE_CODE (arg) == PLUS_EXPR)
9198 {
9199 tree arg0 = TREE_OPERAND (arg, 0);
9200 tree arg1 = TREE_OPERAND (arg, 1);
9201
9202 STRIP_NOPS (arg0);
9203 STRIP_NOPS (arg1);
9204
9205 if (TREE_CODE (arg0) == ADDR_EXPR
9206 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9207 {
9208 *ptr_offset = convert (sizetype, arg1);
9209 return TREE_OPERAND (arg0, 0);
9210 }
9211 else if (TREE_CODE (arg1) == ADDR_EXPR
9212 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9213 {
9214 *ptr_offset = convert (sizetype, arg0);
9215 return TREE_OPERAND (arg1, 0);
9216 }
9217 }
9218
9219 return 0;
9220 }
9221 \f
9222 /* Expand code for a post- or pre- increment or decrement
9223 and return the RTX for the result.
9224 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9225
9226 static rtx
9227 expand_increment (exp, post, ignore)
9228 tree exp;
9229 int post, ignore;
9230 {
9231 rtx op0, op1;
9232 rtx temp, value;
9233 tree incremented = TREE_OPERAND (exp, 0);
9234 optab this_optab = add_optab;
9235 int icode;
9236 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9237 int op0_is_copy = 0;
9238 int single_insn = 0;
9239 /* 1 means we can't store into OP0 directly,
9240 because it is a subreg narrower than a word,
9241 and we don't dare clobber the rest of the word. */
9242 int bad_subreg = 0;
9243
9244 /* Stabilize any component ref that might need to be
9245 evaluated more than once below. */
9246 if (!post
9247 || TREE_CODE (incremented) == BIT_FIELD_REF
9248 || (TREE_CODE (incremented) == COMPONENT_REF
9249 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9250 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9251 incremented = stabilize_reference (incremented);
9252 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9253 ones into save exprs so that they don't accidentally get evaluated
9254 more than once by the code below. */
9255 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9256 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9257 incremented = save_expr (incremented);
9258
9259 /* Compute the operands as RTX.
9260 Note whether OP0 is the actual lvalue or a copy of it:
9261 I believe it is a copy iff it is a register or subreg
9262 and insns were generated in computing it. */
9263
9264 temp = get_last_insn ();
9265 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9266
9267 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9268 in place but instead must do sign- or zero-extension during assignment,
9269 so we copy it into a new register and let the code below use it as
9270 a copy.
9271
9272 Note that we can safely modify this SUBREG since it is know not to be
9273 shared (it was made by the expand_expr call above). */
9274
9275 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9276 {
9277 if (post)
9278 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9279 else
9280 bad_subreg = 1;
9281 }
9282 else if (GET_CODE (op0) == SUBREG
9283 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9284 {
9285 /* We cannot increment this SUBREG in place. If we are
9286 post-incrementing, get a copy of the old value. Otherwise,
9287 just mark that we cannot increment in place. */
9288 if (post)
9289 op0 = copy_to_reg (op0);
9290 else
9291 bad_subreg = 1;
9292 }
9293
9294 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9295 && temp != get_last_insn ());
9296 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9297 EXPAND_MEMORY_USE_BAD);
9298
9299 /* Decide whether incrementing or decrementing. */
9300 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9301 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9302 this_optab = sub_optab;
9303
9304 /* Convert decrement by a constant into a negative increment. */
9305 if (this_optab == sub_optab
9306 && GET_CODE (op1) == CONST_INT)
9307 {
9308 op1 = GEN_INT (-INTVAL (op1));
9309 this_optab = add_optab;
9310 }
9311
9312 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9313 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9314
9315 /* For a preincrement, see if we can do this with a single instruction. */
9316 if (!post)
9317 {
9318 icode = (int) this_optab->handlers[(int) mode].insn_code;
9319 if (icode != (int) CODE_FOR_nothing
9320 /* Make sure that OP0 is valid for operands 0 and 1
9321 of the insn we want to queue. */
9322 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9323 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9324 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9325 single_insn = 1;
9326 }
9327
9328 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9329 then we cannot just increment OP0. We must therefore contrive to
9330 increment the original value. Then, for postincrement, we can return
9331 OP0 since it is a copy of the old value. For preincrement, expand here
9332 unless we can do it with a single insn.
9333
9334 Likewise if storing directly into OP0 would clobber high bits
9335 we need to preserve (bad_subreg). */
9336 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9337 {
9338 /* This is the easiest way to increment the value wherever it is.
9339 Problems with multiple evaluation of INCREMENTED are prevented
9340 because either (1) it is a component_ref or preincrement,
9341 in which case it was stabilized above, or (2) it is an array_ref
9342 with constant index in an array in a register, which is
9343 safe to reevaluate. */
9344 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9345 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9346 ? MINUS_EXPR : PLUS_EXPR),
9347 TREE_TYPE (exp),
9348 incremented,
9349 TREE_OPERAND (exp, 1));
9350
9351 while (TREE_CODE (incremented) == NOP_EXPR
9352 || TREE_CODE (incremented) == CONVERT_EXPR)
9353 {
9354 newexp = convert (TREE_TYPE (incremented), newexp);
9355 incremented = TREE_OPERAND (incremented, 0);
9356 }
9357
9358 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9359 return post ? op0 : temp;
9360 }
9361
9362 if (post)
9363 {
9364 /* We have a true reference to the value in OP0.
9365 If there is an insn to add or subtract in this mode, queue it.
9366 Queueing the increment insn avoids the register shuffling
9367 that often results if we must increment now and first save
9368 the old value for subsequent use. */
9369
9370 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9371 op0 = stabilize (op0);
9372 #endif
9373
9374 icode = (int) this_optab->handlers[(int) mode].insn_code;
9375 if (icode != (int) CODE_FOR_nothing
9376 /* Make sure that OP0 is valid for operands 0 and 1
9377 of the insn we want to queue. */
9378 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9379 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9380 {
9381 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9382 op1 = force_reg (mode, op1);
9383
9384 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9385 }
9386 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9387 {
9388 rtx addr = (general_operand (XEXP (op0, 0), mode)
9389 ? force_reg (Pmode, XEXP (op0, 0))
9390 : copy_to_reg (XEXP (op0, 0)));
9391 rtx temp, result;
9392
9393 op0 = replace_equiv_address (op0, addr);
9394 temp = force_reg (GET_MODE (op0), op0);
9395 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9396 op1 = force_reg (mode, op1);
9397
9398 /* The increment queue is LIFO, thus we have to `queue'
9399 the instructions in reverse order. */
9400 enqueue_insn (op0, gen_move_insn (op0, temp));
9401 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9402 return result;
9403 }
9404 }
9405
9406 /* Preincrement, or we can't increment with one simple insn. */
9407 if (post)
9408 /* Save a copy of the value before inc or dec, to return it later. */
9409 temp = value = copy_to_reg (op0);
9410 else
9411 /* Arrange to return the incremented value. */
9412 /* Copy the rtx because expand_binop will protect from the queue,
9413 and the results of that would be invalid for us to return
9414 if our caller does emit_queue before using our result. */
9415 temp = copy_rtx (value = op0);
9416
9417 /* Increment however we can. */
9418 op1 = expand_binop (mode, this_optab, value, op1,
9419 current_function_check_memory_usage ? NULL_RTX : op0,
9420 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9421 /* Make sure the value is stored into OP0. */
9422 if (op1 != op0)
9423 emit_move_insn (op0, op1);
9424
9425 return temp;
9426 }
9427 \f
9428 /* At the start of a function, record that we have no previously-pushed
9429 arguments waiting to be popped. */
9430
9431 void
9432 init_pending_stack_adjust ()
9433 {
9434 pending_stack_adjust = 0;
9435 }
9436
9437 /* When exiting from function, if safe, clear out any pending stack adjust
9438 so the adjustment won't get done.
9439
9440 Note, if the current function calls alloca, then it must have a
9441 frame pointer regardless of the value of flag_omit_frame_pointer. */
9442
9443 void
9444 clear_pending_stack_adjust ()
9445 {
9446 #ifdef EXIT_IGNORE_STACK
9447 if (optimize > 0
9448 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9449 && EXIT_IGNORE_STACK
9450 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9451 && ! flag_inline_functions)
9452 {
9453 stack_pointer_delta -= pending_stack_adjust,
9454 pending_stack_adjust = 0;
9455 }
9456 #endif
9457 }
9458
9459 /* Pop any previously-pushed arguments that have not been popped yet. */
9460
9461 void
9462 do_pending_stack_adjust ()
9463 {
9464 if (inhibit_defer_pop == 0)
9465 {
9466 if (pending_stack_adjust != 0)
9467 adjust_stack (GEN_INT (pending_stack_adjust));
9468 pending_stack_adjust = 0;
9469 }
9470 }
9471 \f
9472 /* Expand conditional expressions. */
9473
9474 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9475 LABEL is an rtx of code CODE_LABEL, in this function and all the
9476 functions here. */
9477
9478 void
9479 jumpifnot (exp, label)
9480 tree exp;
9481 rtx label;
9482 {
9483 do_jump (exp, label, NULL_RTX);
9484 }
9485
9486 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9487
9488 void
9489 jumpif (exp, label)
9490 tree exp;
9491 rtx label;
9492 {
9493 do_jump (exp, NULL_RTX, label);
9494 }
9495
9496 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9497 the result is zero, or IF_TRUE_LABEL if the result is one.
9498 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9499 meaning fall through in that case.
9500
9501 do_jump always does any pending stack adjust except when it does not
9502 actually perform a jump. An example where there is no jump
9503 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9504
9505 This function is responsible for optimizing cases such as
9506 &&, || and comparison operators in EXP. */
9507
9508 void
9509 do_jump (exp, if_false_label, if_true_label)
9510 tree exp;
9511 rtx if_false_label, if_true_label;
9512 {
9513 enum tree_code code = TREE_CODE (exp);
9514 /* Some cases need to create a label to jump to
9515 in order to properly fall through.
9516 These cases set DROP_THROUGH_LABEL nonzero. */
9517 rtx drop_through_label = 0;
9518 rtx temp;
9519 int i;
9520 tree type;
9521 enum machine_mode mode;
9522
9523 #ifdef MAX_INTEGER_COMPUTATION_MODE
9524 check_max_integer_computation_mode (exp);
9525 #endif
9526
9527 emit_queue ();
9528
9529 switch (code)
9530 {
9531 case ERROR_MARK:
9532 break;
9533
9534 case INTEGER_CST:
9535 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9536 if (temp)
9537 emit_jump (temp);
9538 break;
9539
9540 #if 0
9541 /* This is not true with #pragma weak */
9542 case ADDR_EXPR:
9543 /* The address of something can never be zero. */
9544 if (if_true_label)
9545 emit_jump (if_true_label);
9546 break;
9547 #endif
9548
9549 case NOP_EXPR:
9550 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9551 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9552 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9553 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9554 goto normal;
9555 case CONVERT_EXPR:
9556 /* If we are narrowing the operand, we have to do the compare in the
9557 narrower mode. */
9558 if ((TYPE_PRECISION (TREE_TYPE (exp))
9559 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9560 goto normal;
9561 case NON_LVALUE_EXPR:
9562 case REFERENCE_EXPR:
9563 case ABS_EXPR:
9564 case NEGATE_EXPR:
9565 case LROTATE_EXPR:
9566 case RROTATE_EXPR:
9567 /* These cannot change zero->non-zero or vice versa. */
9568 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9569 break;
9570
9571 case WITH_RECORD_EXPR:
9572 /* Put the object on the placeholder list, recurse through our first
9573 operand, and pop the list. */
9574 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9575 placeholder_list);
9576 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9577 placeholder_list = TREE_CHAIN (placeholder_list);
9578 break;
9579
9580 #if 0
9581 /* This is never less insns than evaluating the PLUS_EXPR followed by
9582 a test and can be longer if the test is eliminated. */
9583 case PLUS_EXPR:
9584 /* Reduce to minus. */
9585 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9586 TREE_OPERAND (exp, 0),
9587 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9588 TREE_OPERAND (exp, 1))));
9589 /* Process as MINUS. */
9590 #endif
9591
9592 case MINUS_EXPR:
9593 /* Non-zero iff operands of minus differ. */
9594 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9595 TREE_OPERAND (exp, 0),
9596 TREE_OPERAND (exp, 1)),
9597 NE, NE, if_false_label, if_true_label);
9598 break;
9599
9600 case BIT_AND_EXPR:
9601 /* If we are AND'ing with a small constant, do this comparison in the
9602 smallest type that fits. If the machine doesn't have comparisons
9603 that small, it will be converted back to the wider comparison.
9604 This helps if we are testing the sign bit of a narrower object.
9605 combine can't do this for us because it can't know whether a
9606 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9607
9608 if (! SLOW_BYTE_ACCESS
9609 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9610 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9611 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9612 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9613 && (type = type_for_mode (mode, 1)) != 0
9614 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9615 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9616 != CODE_FOR_nothing))
9617 {
9618 do_jump (convert (type, exp), if_false_label, if_true_label);
9619 break;
9620 }
9621 goto normal;
9622
9623 case TRUTH_NOT_EXPR:
9624 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9625 break;
9626
9627 case TRUTH_ANDIF_EXPR:
9628 if (if_false_label == 0)
9629 if_false_label = drop_through_label = gen_label_rtx ();
9630 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9631 start_cleanup_deferral ();
9632 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9633 end_cleanup_deferral ();
9634 break;
9635
9636 case TRUTH_ORIF_EXPR:
9637 if (if_true_label == 0)
9638 if_true_label = drop_through_label = gen_label_rtx ();
9639 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9640 start_cleanup_deferral ();
9641 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9642 end_cleanup_deferral ();
9643 break;
9644
9645 case COMPOUND_EXPR:
9646 push_temp_slots ();
9647 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9648 preserve_temp_slots (NULL_RTX);
9649 free_temp_slots ();
9650 pop_temp_slots ();
9651 emit_queue ();
9652 do_pending_stack_adjust ();
9653 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9654 break;
9655
9656 case COMPONENT_REF:
9657 case BIT_FIELD_REF:
9658 case ARRAY_REF:
9659 case ARRAY_RANGE_REF:
9660 {
9661 HOST_WIDE_INT bitsize, bitpos;
9662 int unsignedp;
9663 enum machine_mode mode;
9664 tree type;
9665 tree offset;
9666 int volatilep = 0;
9667 unsigned int alignment;
9668
9669 /* Get description of this reference. We don't actually care
9670 about the underlying object here. */
9671 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9672 &unsignedp, &volatilep, &alignment);
9673
9674 type = type_for_size (bitsize, unsignedp);
9675 if (! SLOW_BYTE_ACCESS
9676 && type != 0 && bitsize >= 0
9677 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9678 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9679 != CODE_FOR_nothing))
9680 {
9681 do_jump (convert (type, exp), if_false_label, if_true_label);
9682 break;
9683 }
9684 goto normal;
9685 }
9686
9687 case COND_EXPR:
9688 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9689 if (integer_onep (TREE_OPERAND (exp, 1))
9690 && integer_zerop (TREE_OPERAND (exp, 2)))
9691 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9692
9693 else if (integer_zerop (TREE_OPERAND (exp, 1))
9694 && integer_onep (TREE_OPERAND (exp, 2)))
9695 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9696
9697 else
9698 {
9699 rtx label1 = gen_label_rtx ();
9700 drop_through_label = gen_label_rtx ();
9701
9702 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9703
9704 start_cleanup_deferral ();
9705 /* Now the THEN-expression. */
9706 do_jump (TREE_OPERAND (exp, 1),
9707 if_false_label ? if_false_label : drop_through_label,
9708 if_true_label ? if_true_label : drop_through_label);
9709 /* In case the do_jump just above never jumps. */
9710 do_pending_stack_adjust ();
9711 emit_label (label1);
9712
9713 /* Now the ELSE-expression. */
9714 do_jump (TREE_OPERAND (exp, 2),
9715 if_false_label ? if_false_label : drop_through_label,
9716 if_true_label ? if_true_label : drop_through_label);
9717 end_cleanup_deferral ();
9718 }
9719 break;
9720
9721 case EQ_EXPR:
9722 {
9723 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9724
9725 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9726 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9727 {
9728 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9729 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9730 do_jump
9731 (fold
9732 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9733 fold (build (EQ_EXPR, TREE_TYPE (exp),
9734 fold (build1 (REALPART_EXPR,
9735 TREE_TYPE (inner_type),
9736 exp0)),
9737 fold (build1 (REALPART_EXPR,
9738 TREE_TYPE (inner_type),
9739 exp1)))),
9740 fold (build (EQ_EXPR, TREE_TYPE (exp),
9741 fold (build1 (IMAGPART_EXPR,
9742 TREE_TYPE (inner_type),
9743 exp0)),
9744 fold (build1 (IMAGPART_EXPR,
9745 TREE_TYPE (inner_type),
9746 exp1)))))),
9747 if_false_label, if_true_label);
9748 }
9749
9750 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9751 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9752
9753 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9754 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9755 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9756 else
9757 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9758 break;
9759 }
9760
9761 case NE_EXPR:
9762 {
9763 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9764
9765 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9766 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9767 {
9768 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9769 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9770 do_jump
9771 (fold
9772 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9773 fold (build (NE_EXPR, TREE_TYPE (exp),
9774 fold (build1 (REALPART_EXPR,
9775 TREE_TYPE (inner_type),
9776 exp0)),
9777 fold (build1 (REALPART_EXPR,
9778 TREE_TYPE (inner_type),
9779 exp1)))),
9780 fold (build (NE_EXPR, TREE_TYPE (exp),
9781 fold (build1 (IMAGPART_EXPR,
9782 TREE_TYPE (inner_type),
9783 exp0)),
9784 fold (build1 (IMAGPART_EXPR,
9785 TREE_TYPE (inner_type),
9786 exp1)))))),
9787 if_false_label, if_true_label);
9788 }
9789
9790 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9791 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9792
9793 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9794 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9795 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9796 else
9797 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9798 break;
9799 }
9800
9801 case LT_EXPR:
9802 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9803 if (GET_MODE_CLASS (mode) == MODE_INT
9804 && ! can_compare_p (LT, mode, ccp_jump))
9805 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9806 else
9807 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9808 break;
9809
9810 case LE_EXPR:
9811 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9812 if (GET_MODE_CLASS (mode) == MODE_INT
9813 && ! can_compare_p (LE, mode, ccp_jump))
9814 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9815 else
9816 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9817 break;
9818
9819 case GT_EXPR:
9820 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9821 if (GET_MODE_CLASS (mode) == MODE_INT
9822 && ! can_compare_p (GT, mode, ccp_jump))
9823 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9824 else
9825 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9826 break;
9827
9828 case GE_EXPR:
9829 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9830 if (GET_MODE_CLASS (mode) == MODE_INT
9831 && ! can_compare_p (GE, mode, ccp_jump))
9832 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9833 else
9834 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9835 break;
9836
9837 case UNORDERED_EXPR:
9838 case ORDERED_EXPR:
9839 {
9840 enum rtx_code cmp, rcmp;
9841 int do_rev;
9842
9843 if (code == UNORDERED_EXPR)
9844 cmp = UNORDERED, rcmp = ORDERED;
9845 else
9846 cmp = ORDERED, rcmp = UNORDERED;
9847 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9848
9849 do_rev = 0;
9850 if (! can_compare_p (cmp, mode, ccp_jump)
9851 && (can_compare_p (rcmp, mode, ccp_jump)
9852 /* If the target doesn't provide either UNORDERED or ORDERED
9853 comparisons, canonicalize on UNORDERED for the library. */
9854 || rcmp == UNORDERED))
9855 do_rev = 1;
9856
9857 if (! do_rev)
9858 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9859 else
9860 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9861 }
9862 break;
9863
9864 {
9865 enum rtx_code rcode1;
9866 enum tree_code tcode2;
9867
9868 case UNLT_EXPR:
9869 rcode1 = UNLT;
9870 tcode2 = LT_EXPR;
9871 goto unordered_bcc;
9872 case UNLE_EXPR:
9873 rcode1 = UNLE;
9874 tcode2 = LE_EXPR;
9875 goto unordered_bcc;
9876 case UNGT_EXPR:
9877 rcode1 = UNGT;
9878 tcode2 = GT_EXPR;
9879 goto unordered_bcc;
9880 case UNGE_EXPR:
9881 rcode1 = UNGE;
9882 tcode2 = GE_EXPR;
9883 goto unordered_bcc;
9884 case UNEQ_EXPR:
9885 rcode1 = UNEQ;
9886 tcode2 = EQ_EXPR;
9887 goto unordered_bcc;
9888
9889 unordered_bcc:
9890 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9891 if (can_compare_p (rcode1, mode, ccp_jump))
9892 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9893 if_true_label);
9894 else
9895 {
9896 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9897 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9898 tree cmp0, cmp1;
9899
9900 /* If the target doesn't support combined unordered
9901 compares, decompose into UNORDERED + comparison. */
9902 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9903 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9904 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9905 do_jump (exp, if_false_label, if_true_label);
9906 }
9907 }
9908 break;
9909
9910 /* Special case:
9911 __builtin_expect (<test>, 0) and
9912 __builtin_expect (<test>, 1)
9913
9914 We need to do this here, so that <test> is not converted to a SCC
9915 operation on machines that use condition code registers and COMPARE
9916 like the PowerPC, and then the jump is done based on whether the SCC
9917 operation produced a 1 or 0. */
9918 case CALL_EXPR:
9919 /* Check for a built-in function. */
9920 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9921 {
9922 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9923 tree arglist = TREE_OPERAND (exp, 1);
9924
9925 if (TREE_CODE (fndecl) == FUNCTION_DECL
9926 && DECL_BUILT_IN (fndecl)
9927 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9928 && arglist != NULL_TREE
9929 && TREE_CHAIN (arglist) != NULL_TREE)
9930 {
9931 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9932 if_true_label);
9933
9934 if (seq != NULL_RTX)
9935 {
9936 emit_insn (seq);
9937 return;
9938 }
9939 }
9940 }
9941 /* fall through and generate the normal code. */
9942
9943 default:
9944 normal:
9945 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9946 #if 0
9947 /* This is not needed any more and causes poor code since it causes
9948 comparisons and tests from non-SI objects to have different code
9949 sequences. */
9950 /* Copy to register to avoid generating bad insns by cse
9951 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9952 if (!cse_not_expected && GET_CODE (temp) == MEM)
9953 temp = copy_to_reg (temp);
9954 #endif
9955 do_pending_stack_adjust ();
9956 /* Do any postincrements in the expression that was tested. */
9957 emit_queue ();
9958
9959 if (GET_CODE (temp) == CONST_INT
9960 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9961 || GET_CODE (temp) == LABEL_REF)
9962 {
9963 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9964 if (target)
9965 emit_jump (target);
9966 }
9967 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9968 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9969 /* Note swapping the labels gives us not-equal. */
9970 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9971 else if (GET_MODE (temp) != VOIDmode)
9972 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9973 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9974 GET_MODE (temp), NULL_RTX, 0,
9975 if_false_label, if_true_label);
9976 else
9977 abort ();
9978 }
9979
9980 if (drop_through_label)
9981 {
9982 /* If do_jump produces code that might be jumped around,
9983 do any stack adjusts from that code, before the place
9984 where control merges in. */
9985 do_pending_stack_adjust ();
9986 emit_label (drop_through_label);
9987 }
9988 }
9989 \f
9990 /* Given a comparison expression EXP for values too wide to be compared
9991 with one insn, test the comparison and jump to the appropriate label.
9992 The code of EXP is ignored; we always test GT if SWAP is 0,
9993 and LT if SWAP is 1. */
9994
9995 static void
9996 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9997 tree exp;
9998 int swap;
9999 rtx if_false_label, if_true_label;
10000 {
10001 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10002 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10003 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10004 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10005
10006 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10007 }
10008
10009 /* Compare OP0 with OP1, word at a time, in mode MODE.
10010 UNSIGNEDP says to do unsigned comparison.
10011 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10012
10013 void
10014 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10015 enum machine_mode mode;
10016 int unsignedp;
10017 rtx op0, op1;
10018 rtx if_false_label, if_true_label;
10019 {
10020 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10021 rtx drop_through_label = 0;
10022 int i;
10023
10024 if (! if_true_label || ! if_false_label)
10025 drop_through_label = gen_label_rtx ();
10026 if (! if_true_label)
10027 if_true_label = drop_through_label;
10028 if (! if_false_label)
10029 if_false_label = drop_through_label;
10030
10031 /* Compare a word at a time, high order first. */
10032 for (i = 0; i < nwords; i++)
10033 {
10034 rtx op0_word, op1_word;
10035
10036 if (WORDS_BIG_ENDIAN)
10037 {
10038 op0_word = operand_subword_force (op0, i, mode);
10039 op1_word = operand_subword_force (op1, i, mode);
10040 }
10041 else
10042 {
10043 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10044 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10045 }
10046
10047 /* All but high-order word must be compared as unsigned. */
10048 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10049 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10050 NULL_RTX, if_true_label);
10051
10052 /* Consider lower words only if these are equal. */
10053 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10054 NULL_RTX, 0, NULL_RTX, if_false_label);
10055 }
10056
10057 if (if_false_label)
10058 emit_jump (if_false_label);
10059 if (drop_through_label)
10060 emit_label (drop_through_label);
10061 }
10062
10063 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10064 with one insn, test the comparison and jump to the appropriate label. */
10065
10066 static void
10067 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10068 tree exp;
10069 rtx if_false_label, if_true_label;
10070 {
10071 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10072 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10073 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10074 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10075 int i;
10076 rtx drop_through_label = 0;
10077
10078 if (! if_false_label)
10079 drop_through_label = if_false_label = gen_label_rtx ();
10080
10081 for (i = 0; i < nwords; i++)
10082 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10083 operand_subword_force (op1, i, mode),
10084 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10085 word_mode, NULL_RTX, 0, if_false_label,
10086 NULL_RTX);
10087
10088 if (if_true_label)
10089 emit_jump (if_true_label);
10090 if (drop_through_label)
10091 emit_label (drop_through_label);
10092 }
10093 \f
10094 /* Jump according to whether OP0 is 0.
10095 We assume that OP0 has an integer mode that is too wide
10096 for the available compare insns. */
10097
10098 void
10099 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10100 rtx op0;
10101 rtx if_false_label, if_true_label;
10102 {
10103 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10104 rtx part;
10105 int i;
10106 rtx drop_through_label = 0;
10107
10108 /* The fastest way of doing this comparison on almost any machine is to
10109 "or" all the words and compare the result. If all have to be loaded
10110 from memory and this is a very wide item, it's possible this may
10111 be slower, but that's highly unlikely. */
10112
10113 part = gen_reg_rtx (word_mode);
10114 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10115 for (i = 1; i < nwords && part != 0; i++)
10116 part = expand_binop (word_mode, ior_optab, part,
10117 operand_subword_force (op0, i, GET_MODE (op0)),
10118 part, 1, OPTAB_WIDEN);
10119
10120 if (part != 0)
10121 {
10122 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10123 NULL_RTX, 0, if_false_label, if_true_label);
10124
10125 return;
10126 }
10127
10128 /* If we couldn't do the "or" simply, do this with a series of compares. */
10129 if (! if_false_label)
10130 drop_through_label = if_false_label = gen_label_rtx ();
10131
10132 for (i = 0; i < nwords; i++)
10133 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10134 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10135 if_false_label, NULL_RTX);
10136
10137 if (if_true_label)
10138 emit_jump (if_true_label);
10139
10140 if (drop_through_label)
10141 emit_label (drop_through_label);
10142 }
10143 \f
10144 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10145 (including code to compute the values to be compared)
10146 and set (CC0) according to the result.
10147 The decision as to signed or unsigned comparison must be made by the caller.
10148
10149 We force a stack adjustment unless there are currently
10150 things pushed on the stack that aren't yet used.
10151
10152 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10153 compared.
10154
10155 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10156 size of MODE should be used. */
10157
10158 rtx
10159 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10160 rtx op0, op1;
10161 enum rtx_code code;
10162 int unsignedp;
10163 enum machine_mode mode;
10164 rtx size;
10165 unsigned int align;
10166 {
10167 rtx tem;
10168
10169 /* If one operand is constant, make it the second one. Only do this
10170 if the other operand is not constant as well. */
10171
10172 if (swap_commutative_operands_p (op0, op1))
10173 {
10174 tem = op0;
10175 op0 = op1;
10176 op1 = tem;
10177 code = swap_condition (code);
10178 }
10179
10180 if (flag_force_mem)
10181 {
10182 op0 = force_not_mem (op0);
10183 op1 = force_not_mem (op1);
10184 }
10185
10186 do_pending_stack_adjust ();
10187
10188 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10189 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10190 return tem;
10191
10192 #if 0
10193 /* There's no need to do this now that combine.c can eliminate lots of
10194 sign extensions. This can be less efficient in certain cases on other
10195 machines. */
10196
10197 /* If this is a signed equality comparison, we can do it as an
10198 unsigned comparison since zero-extension is cheaper than sign
10199 extension and comparisons with zero are done as unsigned. This is
10200 the case even on machines that can do fast sign extension, since
10201 zero-extension is easier to combine with other operations than
10202 sign-extension is. If we are comparing against a constant, we must
10203 convert it to what it would look like unsigned. */
10204 if ((code == EQ || code == NE) && ! unsignedp
10205 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10206 {
10207 if (GET_CODE (op1) == CONST_INT
10208 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10209 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10210 unsignedp = 1;
10211 }
10212 #endif
10213
10214 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10215
10216 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10217 }
10218
10219 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10220 The decision as to signed or unsigned comparison must be made by the caller.
10221
10222 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10223 compared.
10224
10225 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10226 size of MODE should be used. */
10227
10228 void
10229 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10230 if_false_label, if_true_label)
10231 rtx op0, op1;
10232 enum rtx_code code;
10233 int unsignedp;
10234 enum machine_mode mode;
10235 rtx size;
10236 unsigned int align;
10237 rtx if_false_label, if_true_label;
10238 {
10239 rtx tem;
10240 int dummy_true_label = 0;
10241
10242 /* Reverse the comparison if that is safe and we want to jump if it is
10243 false. */
10244 if (! if_true_label && ! FLOAT_MODE_P (mode))
10245 {
10246 if_true_label = if_false_label;
10247 if_false_label = 0;
10248 code = reverse_condition (code);
10249 }
10250
10251 /* If one operand is constant, make it the second one. Only do this
10252 if the other operand is not constant as well. */
10253
10254 if (swap_commutative_operands_p (op0, op1))
10255 {
10256 tem = op0;
10257 op0 = op1;
10258 op1 = tem;
10259 code = swap_condition (code);
10260 }
10261
10262 if (flag_force_mem)
10263 {
10264 op0 = force_not_mem (op0);
10265 op1 = force_not_mem (op1);
10266 }
10267
10268 do_pending_stack_adjust ();
10269
10270 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10271 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10272 {
10273 if (tem == const_true_rtx)
10274 {
10275 if (if_true_label)
10276 emit_jump (if_true_label);
10277 }
10278 else
10279 {
10280 if (if_false_label)
10281 emit_jump (if_false_label);
10282 }
10283 return;
10284 }
10285
10286 #if 0
10287 /* There's no need to do this now that combine.c can eliminate lots of
10288 sign extensions. This can be less efficient in certain cases on other
10289 machines. */
10290
10291 /* If this is a signed equality comparison, we can do it as an
10292 unsigned comparison since zero-extension is cheaper than sign
10293 extension and comparisons with zero are done as unsigned. This is
10294 the case even on machines that can do fast sign extension, since
10295 zero-extension is easier to combine with other operations than
10296 sign-extension is. If we are comparing against a constant, we must
10297 convert it to what it would look like unsigned. */
10298 if ((code == EQ || code == NE) && ! unsignedp
10299 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10300 {
10301 if (GET_CODE (op1) == CONST_INT
10302 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10303 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10304 unsignedp = 1;
10305 }
10306 #endif
10307
10308 if (! if_true_label)
10309 {
10310 dummy_true_label = 1;
10311 if_true_label = gen_label_rtx ();
10312 }
10313
10314 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10315 if_true_label);
10316
10317 if (if_false_label)
10318 emit_jump (if_false_label);
10319 if (dummy_true_label)
10320 emit_label (if_true_label);
10321 }
10322
10323 /* Generate code for a comparison expression EXP (including code to compute
10324 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10325 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10326 generated code will drop through.
10327 SIGNED_CODE should be the rtx operation for this comparison for
10328 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10329
10330 We force a stack adjustment unless there are currently
10331 things pushed on the stack that aren't yet used. */
10332
10333 static void
10334 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10335 if_true_label)
10336 tree exp;
10337 enum rtx_code signed_code, unsigned_code;
10338 rtx if_false_label, if_true_label;
10339 {
10340 unsigned int align0, align1;
10341 rtx op0, op1;
10342 tree type;
10343 enum machine_mode mode;
10344 int unsignedp;
10345 enum rtx_code code;
10346
10347 /* Don't crash if the comparison was erroneous. */
10348 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10349 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10350 return;
10351
10352 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10353 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10354 return;
10355
10356 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10357 mode = TYPE_MODE (type);
10358 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10359 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10360 || (GET_MODE_BITSIZE (mode)
10361 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10362 1)))))))
10363 {
10364 /* op0 might have been replaced by promoted constant, in which
10365 case the type of second argument should be used. */
10366 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10367 mode = TYPE_MODE (type);
10368 }
10369 unsignedp = TREE_UNSIGNED (type);
10370 code = unsignedp ? unsigned_code : signed_code;
10371
10372 #ifdef HAVE_canonicalize_funcptr_for_compare
10373 /* If function pointers need to be "canonicalized" before they can
10374 be reliably compared, then canonicalize them. */
10375 if (HAVE_canonicalize_funcptr_for_compare
10376 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10377 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10378 == FUNCTION_TYPE))
10379 {
10380 rtx new_op0 = gen_reg_rtx (mode);
10381
10382 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10383 op0 = new_op0;
10384 }
10385
10386 if (HAVE_canonicalize_funcptr_for_compare
10387 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10388 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10389 == FUNCTION_TYPE))
10390 {
10391 rtx new_op1 = gen_reg_rtx (mode);
10392
10393 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10394 op1 = new_op1;
10395 }
10396 #endif
10397
10398 /* Do any postincrements in the expression that was tested. */
10399 emit_queue ();
10400
10401 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10402 ((mode == BLKmode)
10403 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10404 MIN (align0, align1),
10405 if_false_label, if_true_label);
10406 }
10407 \f
10408 /* Generate code to calculate EXP using a store-flag instruction
10409 and return an rtx for the result. EXP is either a comparison
10410 or a TRUTH_NOT_EXPR whose operand is a comparison.
10411
10412 If TARGET is nonzero, store the result there if convenient.
10413
10414 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10415 cheap.
10416
10417 Return zero if there is no suitable set-flag instruction
10418 available on this machine.
10419
10420 Once expand_expr has been called on the arguments of the comparison,
10421 we are committed to doing the store flag, since it is not safe to
10422 re-evaluate the expression. We emit the store-flag insn by calling
10423 emit_store_flag, but only expand the arguments if we have a reason
10424 to believe that emit_store_flag will be successful. If we think that
10425 it will, but it isn't, we have to simulate the store-flag with a
10426 set/jump/set sequence. */
10427
10428 static rtx
10429 do_store_flag (exp, target, mode, only_cheap)
10430 tree exp;
10431 rtx target;
10432 enum machine_mode mode;
10433 int only_cheap;
10434 {
10435 enum rtx_code code;
10436 tree arg0, arg1, type;
10437 tree tem;
10438 enum machine_mode operand_mode;
10439 int invert = 0;
10440 int unsignedp;
10441 rtx op0, op1;
10442 enum insn_code icode;
10443 rtx subtarget = target;
10444 rtx result, label;
10445
10446 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10447 result at the end. We can't simply invert the test since it would
10448 have already been inverted if it were valid. This case occurs for
10449 some floating-point comparisons. */
10450
10451 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10452 invert = 1, exp = TREE_OPERAND (exp, 0);
10453
10454 arg0 = TREE_OPERAND (exp, 0);
10455 arg1 = TREE_OPERAND (exp, 1);
10456
10457 /* Don't crash if the comparison was erroneous. */
10458 if (arg0 == error_mark_node || arg1 == error_mark_node)
10459 return const0_rtx;
10460
10461 type = TREE_TYPE (arg0);
10462 operand_mode = TYPE_MODE (type);
10463 unsignedp = TREE_UNSIGNED (type);
10464
10465 /* We won't bother with BLKmode store-flag operations because it would mean
10466 passing a lot of information to emit_store_flag. */
10467 if (operand_mode == BLKmode)
10468 return 0;
10469
10470 /* We won't bother with store-flag operations involving function pointers
10471 when function pointers must be canonicalized before comparisons. */
10472 #ifdef HAVE_canonicalize_funcptr_for_compare
10473 if (HAVE_canonicalize_funcptr_for_compare
10474 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10475 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10476 == FUNCTION_TYPE))
10477 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10478 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10479 == FUNCTION_TYPE))))
10480 return 0;
10481 #endif
10482
10483 STRIP_NOPS (arg0);
10484 STRIP_NOPS (arg1);
10485
10486 /* Get the rtx comparison code to use. We know that EXP is a comparison
10487 operation of some type. Some comparisons against 1 and -1 can be
10488 converted to comparisons with zero. Do so here so that the tests
10489 below will be aware that we have a comparison with zero. These
10490 tests will not catch constants in the first operand, but constants
10491 are rarely passed as the first operand. */
10492
10493 switch (TREE_CODE (exp))
10494 {
10495 case EQ_EXPR:
10496 code = EQ;
10497 break;
10498 case NE_EXPR:
10499 code = NE;
10500 break;
10501 case LT_EXPR:
10502 if (integer_onep (arg1))
10503 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10504 else
10505 code = unsignedp ? LTU : LT;
10506 break;
10507 case LE_EXPR:
10508 if (! unsignedp && integer_all_onesp (arg1))
10509 arg1 = integer_zero_node, code = LT;
10510 else
10511 code = unsignedp ? LEU : LE;
10512 break;
10513 case GT_EXPR:
10514 if (! unsignedp && integer_all_onesp (arg1))
10515 arg1 = integer_zero_node, code = GE;
10516 else
10517 code = unsignedp ? GTU : GT;
10518 break;
10519 case GE_EXPR:
10520 if (integer_onep (arg1))
10521 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10522 else
10523 code = unsignedp ? GEU : GE;
10524 break;
10525
10526 case UNORDERED_EXPR:
10527 code = UNORDERED;
10528 break;
10529 case ORDERED_EXPR:
10530 code = ORDERED;
10531 break;
10532 case UNLT_EXPR:
10533 code = UNLT;
10534 break;
10535 case UNLE_EXPR:
10536 code = UNLE;
10537 break;
10538 case UNGT_EXPR:
10539 code = UNGT;
10540 break;
10541 case UNGE_EXPR:
10542 code = UNGE;
10543 break;
10544 case UNEQ_EXPR:
10545 code = UNEQ;
10546 break;
10547
10548 default:
10549 abort ();
10550 }
10551
10552 /* Put a constant second. */
10553 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10554 {
10555 tem = arg0; arg0 = arg1; arg1 = tem;
10556 code = swap_condition (code);
10557 }
10558
10559 /* If this is an equality or inequality test of a single bit, we can
10560 do this by shifting the bit being tested to the low-order bit and
10561 masking the result with the constant 1. If the condition was EQ,
10562 we xor it with 1. This does not require an scc insn and is faster
10563 than an scc insn even if we have it. */
10564
10565 if ((code == NE || code == EQ)
10566 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10567 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10568 {
10569 tree inner = TREE_OPERAND (arg0, 0);
10570 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10571 int ops_unsignedp;
10572
10573 /* If INNER is a right shift of a constant and it plus BITNUM does
10574 not overflow, adjust BITNUM and INNER. */
10575
10576 if (TREE_CODE (inner) == RSHIFT_EXPR
10577 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10578 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10579 && bitnum < TYPE_PRECISION (type)
10580 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10581 bitnum - TYPE_PRECISION (type)))
10582 {
10583 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10584 inner = TREE_OPERAND (inner, 0);
10585 }
10586
10587 /* If we are going to be able to omit the AND below, we must do our
10588 operations as unsigned. If we must use the AND, we have a choice.
10589 Normally unsigned is faster, but for some machines signed is. */
10590 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10591 #ifdef LOAD_EXTEND_OP
10592 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10593 #else
10594 : 1
10595 #endif
10596 );
10597
10598 if (! get_subtarget (subtarget)
10599 || GET_MODE (subtarget) != operand_mode
10600 || ! safe_from_p (subtarget, inner, 1))
10601 subtarget = 0;
10602
10603 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10604
10605 if (bitnum != 0)
10606 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10607 size_int (bitnum), subtarget, ops_unsignedp);
10608
10609 if (GET_MODE (op0) != mode)
10610 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10611
10612 if ((code == EQ && ! invert) || (code == NE && invert))
10613 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10614 ops_unsignedp, OPTAB_LIB_WIDEN);
10615
10616 /* Put the AND last so it can combine with more things. */
10617 if (bitnum != TYPE_PRECISION (type) - 1)
10618 op0 = expand_and (op0, const1_rtx, subtarget);
10619
10620 return op0;
10621 }
10622
10623 /* Now see if we are likely to be able to do this. Return if not. */
10624 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10625 return 0;
10626
10627 icode = setcc_gen_code[(int) code];
10628 if (icode == CODE_FOR_nothing
10629 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10630 {
10631 /* We can only do this if it is one of the special cases that
10632 can be handled without an scc insn. */
10633 if ((code == LT && integer_zerop (arg1))
10634 || (! only_cheap && code == GE && integer_zerop (arg1)))
10635 ;
10636 else if (BRANCH_COST >= 0
10637 && ! only_cheap && (code == NE || code == EQ)
10638 && TREE_CODE (type) != REAL_TYPE
10639 && ((abs_optab->handlers[(int) operand_mode].insn_code
10640 != CODE_FOR_nothing)
10641 || (ffs_optab->handlers[(int) operand_mode].insn_code
10642 != CODE_FOR_nothing)))
10643 ;
10644 else
10645 return 0;
10646 }
10647
10648 if (! get_subtarget (target)
10649 || GET_MODE (subtarget) != operand_mode
10650 || ! safe_from_p (subtarget, arg1, 1))
10651 subtarget = 0;
10652
10653 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10654 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10655
10656 if (target == 0)
10657 target = gen_reg_rtx (mode);
10658
10659 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10660 because, if the emit_store_flag does anything it will succeed and
10661 OP0 and OP1 will not be used subsequently. */
10662
10663 result = emit_store_flag (target, code,
10664 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10665 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10666 operand_mode, unsignedp, 1);
10667
10668 if (result)
10669 {
10670 if (invert)
10671 result = expand_binop (mode, xor_optab, result, const1_rtx,
10672 result, 0, OPTAB_LIB_WIDEN);
10673 return result;
10674 }
10675
10676 /* If this failed, we have to do this with set/compare/jump/set code. */
10677 if (GET_CODE (target) != REG
10678 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10679 target = gen_reg_rtx (GET_MODE (target));
10680
10681 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10682 result = compare_from_rtx (op0, op1, code, unsignedp,
10683 operand_mode, NULL_RTX, 0);
10684 if (GET_CODE (result) == CONST_INT)
10685 return (((result == const0_rtx && ! invert)
10686 || (result != const0_rtx && invert))
10687 ? const0_rtx : const1_rtx);
10688
10689 label = gen_label_rtx ();
10690 if (bcc_gen_fctn[(int) code] == 0)
10691 abort ();
10692
10693 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10694 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10695 emit_label (label);
10696
10697 return target;
10698 }
10699 \f
10700
10701 /* Stubs in case we haven't got a casesi insn. */
10702 #ifndef HAVE_casesi
10703 # define HAVE_casesi 0
10704 # define gen_casesi(a, b, c, d, e) (0)
10705 # define CODE_FOR_casesi CODE_FOR_nothing
10706 #endif
10707
10708 /* If the machine does not have a case insn that compares the bounds,
10709 this means extra overhead for dispatch tables, which raises the
10710 threshold for using them. */
10711 #ifndef CASE_VALUES_THRESHOLD
10712 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10713 #endif /* CASE_VALUES_THRESHOLD */
10714
10715 unsigned int
10716 case_values_threshold ()
10717 {
10718 return CASE_VALUES_THRESHOLD;
10719 }
10720
10721 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10722 0 otherwise (i.e. if there is no casesi instruction). */
10723 int
10724 try_casesi (index_type, index_expr, minval, range,
10725 table_label, default_label)
10726 tree index_type, index_expr, minval, range;
10727 rtx table_label ATTRIBUTE_UNUSED;
10728 rtx default_label;
10729 {
10730 enum machine_mode index_mode = SImode;
10731 int index_bits = GET_MODE_BITSIZE (index_mode);
10732 rtx op1, op2, index;
10733 enum machine_mode op_mode;
10734
10735 if (! HAVE_casesi)
10736 return 0;
10737
10738 /* Convert the index to SImode. */
10739 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10740 {
10741 enum machine_mode omode = TYPE_MODE (index_type);
10742 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10743
10744 /* We must handle the endpoints in the original mode. */
10745 index_expr = build (MINUS_EXPR, index_type,
10746 index_expr, minval);
10747 minval = integer_zero_node;
10748 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10749 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10750 omode, 1, 0, default_label);
10751 /* Now we can safely truncate. */
10752 index = convert_to_mode (index_mode, index, 0);
10753 }
10754 else
10755 {
10756 if (TYPE_MODE (index_type) != index_mode)
10757 {
10758 index_expr = convert (type_for_size (index_bits, 0),
10759 index_expr);
10760 index_type = TREE_TYPE (index_expr);
10761 }
10762
10763 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10764 }
10765 emit_queue ();
10766 index = protect_from_queue (index, 0);
10767 do_pending_stack_adjust ();
10768
10769 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10770 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10771 (index, op_mode))
10772 index = copy_to_mode_reg (op_mode, index);
10773
10774 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10775
10776 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10777 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10778 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10779 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10780 (op1, op_mode))
10781 op1 = copy_to_mode_reg (op_mode, op1);
10782
10783 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10784
10785 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10786 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10787 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10788 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10789 (op2, op_mode))
10790 op2 = copy_to_mode_reg (op_mode, op2);
10791
10792 emit_jump_insn (gen_casesi (index, op1, op2,
10793 table_label, default_label));
10794 return 1;
10795 }
10796
10797 /* Attempt to generate a tablejump instruction; same concept. */
10798 #ifndef HAVE_tablejump
10799 #define HAVE_tablejump 0
10800 #define gen_tablejump(x, y) (0)
10801 #endif
10802
10803 /* Subroutine of the next function.
10804
10805 INDEX is the value being switched on, with the lowest value
10806 in the table already subtracted.
10807 MODE is its expected mode (needed if INDEX is constant).
10808 RANGE is the length of the jump table.
10809 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10810
10811 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10812 index value is out of range. */
10813
10814 static void
10815 do_tablejump (index, mode, range, table_label, default_label)
10816 rtx index, range, table_label, default_label;
10817 enum machine_mode mode;
10818 {
10819 rtx temp, vector;
10820
10821 /* Do an unsigned comparison (in the proper mode) between the index
10822 expression and the value which represents the length of the range.
10823 Since we just finished subtracting the lower bound of the range
10824 from the index expression, this comparison allows us to simultaneously
10825 check that the original index expression value is both greater than
10826 or equal to the minimum value of the range and less than or equal to
10827 the maximum value of the range. */
10828
10829 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10830 0, default_label);
10831
10832 /* If index is in range, it must fit in Pmode.
10833 Convert to Pmode so we can index with it. */
10834 if (mode != Pmode)
10835 index = convert_to_mode (Pmode, index, 1);
10836
10837 /* Don't let a MEM slip thru, because then INDEX that comes
10838 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10839 and break_out_memory_refs will go to work on it and mess it up. */
10840 #ifdef PIC_CASE_VECTOR_ADDRESS
10841 if (flag_pic && GET_CODE (index) != REG)
10842 index = copy_to_mode_reg (Pmode, index);
10843 #endif
10844
10845 /* If flag_force_addr were to affect this address
10846 it could interfere with the tricky assumptions made
10847 about addresses that contain label-refs,
10848 which may be valid only very near the tablejump itself. */
10849 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10850 GET_MODE_SIZE, because this indicates how large insns are. The other
10851 uses should all be Pmode, because they are addresses. This code
10852 could fail if addresses and insns are not the same size. */
10853 index = gen_rtx_PLUS (Pmode,
10854 gen_rtx_MULT (Pmode, index,
10855 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10856 gen_rtx_LABEL_REF (Pmode, table_label));
10857 #ifdef PIC_CASE_VECTOR_ADDRESS
10858 if (flag_pic)
10859 index = PIC_CASE_VECTOR_ADDRESS (index);
10860 else
10861 #endif
10862 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10863 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10864 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10865 RTX_UNCHANGING_P (vector) = 1;
10866 convert_move (temp, vector, 0);
10867
10868 emit_jump_insn (gen_tablejump (temp, table_label));
10869
10870 /* If we are generating PIC code or if the table is PC-relative, the
10871 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10872 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10873 emit_barrier ();
10874 }
10875
10876 int
10877 try_tablejump (index_type, index_expr, minval, range,
10878 table_label, default_label)
10879 tree index_type, index_expr, minval, range;
10880 rtx table_label, default_label;
10881 {
10882 rtx index;
10883
10884 if (! HAVE_tablejump)
10885 return 0;
10886
10887 index_expr = fold (build (MINUS_EXPR, index_type,
10888 convert (index_type, index_expr),
10889 convert (index_type, minval)));
10890 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10891 emit_queue ();
10892 index = protect_from_queue (index, 0);
10893 do_pending_stack_adjust ();
10894
10895 do_tablejump (index, TYPE_MODE (index_type),
10896 convert_modes (TYPE_MODE (index_type),
10897 TYPE_MODE (TREE_TYPE (range)),
10898 expand_expr (range, NULL_RTX,
10899 VOIDmode, 0),
10900 TREE_UNSIGNED (TREE_TYPE (range))),
10901 table_label, default_label);
10902 return 1;
10903 }