]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/expr.c
Makefile.in (c-decl.o): Depend on rtl.h and expr.h.
[thirdparty/gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "defaults.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 #ifndef ACCUMULATE_OUTGOING_ARGS
50 #define ACCUMULATE_OUTGOING_ARGS 0
51 #endif
52
53 /* Supply a default definition for PUSH_ARGS. */
54 #ifndef PUSH_ARGS
55 #ifdef PUSH_ROUNDING
56 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
57 #else
58 #define PUSH_ARGS 0
59 #endif
60 #endif
61
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
64
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
67
68 #ifdef PUSH_ROUNDING
69
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first */
72 #endif
73
74 #endif
75
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83
84 /* Assume that case vectors are not pc-relative. */
85 #ifndef CASE_VECTOR_PC_RELATIVE
86 #define CASE_VECTOR_PC_RELATIVE 0
87 #endif
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Nonzero to generate code for all the subroutines within an
98 expression before generating the upper levels of the expression.
99 Nowadays this is never zero. */
100 int do_preexpand_calls = 1;
101
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage;
106
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list = 0;
109
110 /* This structure is used by move_by_pieces to describe the move to
111 be performed. */
112 struct move_by_pieces
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 rtx from;
119 rtx from_addr;
120 int autinc_from;
121 int explicit_inc_from;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 int reverse;
125 };
126
127 /* This structure is used by clear_by_pieces to describe the clear to
128 be performed. */
129
130 struct clear_by_pieces
131 {
132 rtx to;
133 rtx to_addr;
134 int autinc_to;
135 int explicit_inc_to;
136 unsigned HOST_WIDE_INT len;
137 HOST_WIDE_INT offset;
138 int reverse;
139 };
140
141 extern struct obstack permanent_obstack;
142
143 static rtx get_push_address PARAMS ((int));
144
145 static rtx enqueue_insn PARAMS ((rtx, rtx));
146 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
147 PARAMS ((unsigned HOST_WIDE_INT,
148 unsigned int));
149 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
150 struct move_by_pieces *));
151 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
152 unsigned int));
153 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
154 enum machine_mode,
155 struct clear_by_pieces *));
156 static rtx get_subtarget PARAMS ((rtx));
157 static int is_zeros_p PARAMS ((tree));
158 static int mostly_zeros_p PARAMS ((tree));
159 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
160 HOST_WIDE_INT, enum machine_mode,
161 tree, tree, unsigned int, int));
162 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
163 HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int,
167 unsigned int, HOST_WIDE_INT, int));
168 static enum memory_use_mode
169 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
170 static tree save_noncopied_parts PARAMS ((tree, tree));
171 static tree init_noncopied_parts PARAMS ((tree, tree));
172 static int safe_from_p PARAMS ((rtx, tree, int));
173 static int fixed_type_p PARAMS ((tree));
174 static rtx var_rtx PARAMS ((tree));
175 static int readonly_fields_p PARAMS ((tree));
176 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
177 static rtx expand_increment PARAMS ((tree, int, int));
178 static void preexpand_calls PARAMS ((tree));
179 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
180 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
181 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
182 rtx, rtx));
183 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
184
185 /* Record for each mode whether we can move a register directly to or
186 from an object of that mode in memory. If we can't, we won't try
187 to use that mode directly when accessing a field of that mode. */
188
189 static char direct_load[NUM_MACHINE_MODES];
190 static char direct_store[NUM_MACHINE_MODES];
191
192 /* If a memory-to-memory move would take MOVE_RATIO or more simple
193 move-instruction sequences, we will do a movstr or libcall instead. */
194
195 #ifndef MOVE_RATIO
196 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
197 #define MOVE_RATIO 2
198 #else
199 /* If we are optimizing for space (-Os), cut down the default move ratio */
200 #define MOVE_RATIO (optimize_size ? 3 : 15)
201 #endif
202 #endif
203
204 /* This macro is used to determine whether move_by_pieces should be called
205 to perform a structure copy. */
206 #ifndef MOVE_BY_PIECES_P
207 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
208 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
209 #endif
210
211 /* This array records the insn_code of insns to perform block moves. */
212 enum insn_code movstr_optab[NUM_MACHINE_MODES];
213
214 /* This array records the insn_code of insns to perform block clears. */
215 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
216
217 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
218
219 #ifndef SLOW_UNALIGNED_ACCESS
220 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
221 #endif
222 \f
223 /* This is run once per compilation to set up which modes can be used
224 directly in memory and to initialize the block move optab. */
225
226 void
227 init_expr_once ()
228 {
229 rtx insn, pat;
230 enum machine_mode mode;
231 int num_clobbers;
232 rtx mem, mem1;
233 char *free_point;
234
235 start_sequence ();
236
237 /* Since we are on the permanent obstack, we must be sure we save this
238 spot AFTER we call start_sequence, since it will reuse the rtl it
239 makes. */
240 free_point = (char *) oballoc (0);
241
242 /* Try indexing by frame ptr and try by stack ptr.
243 It is known that on the Convex the stack ptr isn't a valid index.
244 With luck, one or the other is valid on any machine. */
245 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
246 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
247
248 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
249 pat = PATTERN (insn);
250
251 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
252 mode = (enum machine_mode) ((int) mode + 1))
253 {
254 int regno;
255 rtx reg;
256
257 direct_load[(int) mode] = direct_store[(int) mode] = 0;
258 PUT_MODE (mem, mode);
259 PUT_MODE (mem1, mode);
260
261 /* See if there is some register that can be used in this mode and
262 directly loaded or stored from memory. */
263
264 if (mode != VOIDmode && mode != BLKmode)
265 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
266 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
267 regno++)
268 {
269 if (! HARD_REGNO_MODE_OK (regno, mode))
270 continue;
271
272 reg = gen_rtx_REG (mode, regno);
273
274 SET_SRC (pat) = mem;
275 SET_DEST (pat) = reg;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_load[(int) mode] = 1;
278
279 SET_SRC (pat) = mem1;
280 SET_DEST (pat) = reg;
281 if (recog (pat, insn, &num_clobbers) >= 0)
282 direct_load[(int) mode] = 1;
283
284 SET_SRC (pat) = reg;
285 SET_DEST (pat) = mem;
286 if (recog (pat, insn, &num_clobbers) >= 0)
287 direct_store[(int) mode] = 1;
288
289 SET_SRC (pat) = reg;
290 SET_DEST (pat) = mem1;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_store[(int) mode] = 1;
293 }
294 }
295
296 end_sequence ();
297 obfree (free_point);
298 }
299
300 /* This is run at the start of compiling a function. */
301
302 void
303 init_expr ()
304 {
305 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
306
307 pending_chain = 0;
308 pending_stack_adjust = 0;
309 stack_pointer_delta = 0;
310 inhibit_defer_pop = 0;
311 saveregs_value = 0;
312 apply_args_value = 0;
313 forced_labels = 0;
314 }
315
316 void
317 mark_expr_status (p)
318 struct expr_status *p;
319 {
320 if (p == NULL)
321 return;
322
323 ggc_mark_rtx (p->x_saveregs_value);
324 ggc_mark_rtx (p->x_apply_args_value);
325 ggc_mark_rtx (p->x_forced_labels);
326 }
327
328 void
329 free_expr_status (f)
330 struct function *f;
331 {
332 free (f->expr);
333 f->expr = NULL;
334 }
335
336 /* Small sanity check that the queue is empty at the end of a function. */
337
338 void
339 finish_expr_for_function ()
340 {
341 if (pending_chain)
342 abort ();
343 }
344 \f
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
347
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
351
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
354
355 static rtx
356 enqueue_insn (var, body)
357 rtx var, body;
358 {
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
362 }
363
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
370
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
374
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
378
379 rtx
380 protect_from_queue (x, modify)
381 register rtx x;
382 int modify;
383 {
384 register RTX_CODE code = GET_CODE (x);
385
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
389 return x;
390 #endif
391
392 if (code != QUEUED)
393 {
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
398 shared. */
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
401 {
402 register rtx y = XEXP (x, 0);
403 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
404
405 MEM_COPY_ATTRIBUTES (new, x);
406
407 if (QUEUED_INSN (y))
408 {
409 register rtx temp = gen_reg_rtx (GET_MODE (new));
410 emit_insn_before (gen_move_insn (temp, new),
411 QUEUED_INSN (y));
412 return temp;
413 }
414 return new;
415 }
416 /* Otherwise, recursively protect the subexpressions of all
417 the kinds of rtx's that can contain a QUEUED. */
418 if (code == MEM)
419 {
420 rtx tem = protect_from_queue (XEXP (x, 0), 0);
421 if (tem != XEXP (x, 0))
422 {
423 x = copy_rtx (x);
424 XEXP (x, 0) = tem;
425 }
426 }
427 else if (code == PLUS || code == MULT)
428 {
429 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
430 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
431 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
432 {
433 x = copy_rtx (x);
434 XEXP (x, 0) = new0;
435 XEXP (x, 1) = new1;
436 }
437 }
438 return x;
439 }
440 /* If the increment has not happened, use the variable itself. */
441 if (QUEUED_INSN (x) == 0)
442 return QUEUED_VAR (x);
443 /* If the increment has happened and a pre-increment copy exists,
444 use that copy. */
445 if (QUEUED_COPY (x) != 0)
446 return QUEUED_COPY (x);
447 /* The increment has happened but we haven't set up a pre-increment copy.
448 Set one up now, and use it. */
449 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
450 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
451 QUEUED_INSN (x));
452 return QUEUED_COPY (x);
453 }
454
455 /* Return nonzero if X contains a QUEUED expression:
456 if it contains anything that will be altered by a queued increment.
457 We handle only combinations of MEM, PLUS, MINUS and MULT operators
458 since memory addresses generally contain only those. */
459
460 int
461 queued_subexp_p (x)
462 rtx x;
463 {
464 register enum rtx_code code = GET_CODE (x);
465 switch (code)
466 {
467 case QUEUED:
468 return 1;
469 case MEM:
470 return queued_subexp_p (XEXP (x, 0));
471 case MULT:
472 case PLUS:
473 case MINUS:
474 return (queued_subexp_p (XEXP (x, 0))
475 || queued_subexp_p (XEXP (x, 1)));
476 default:
477 return 0;
478 }
479 }
480
481 /* Perform all the pending incrementations. */
482
483 void
484 emit_queue ()
485 {
486 register rtx p;
487 while ((p = pending_chain))
488 {
489 rtx body = QUEUED_BODY (p);
490
491 if (GET_CODE (body) == SEQUENCE)
492 {
493 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
494 emit_insn (QUEUED_BODY (p));
495 }
496 else
497 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
498 pending_chain = QUEUED_NEXT (p);
499 }
500 }
501 \f
502 /* Copy data from FROM to TO, where the machine modes are not the same.
503 Both modes may be integer, or both may be floating.
504 UNSIGNEDP should be nonzero if FROM is an unsigned type.
505 This causes zero-extension instead of sign-extension. */
506
507 void
508 convert_move (to, from, unsignedp)
509 register rtx to, from;
510 int unsignedp;
511 {
512 enum machine_mode to_mode = GET_MODE (to);
513 enum machine_mode from_mode = GET_MODE (from);
514 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
515 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
516 enum insn_code code;
517 rtx libcall;
518
519 /* rtx code for making an equivalent value. */
520 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
521
522 to = protect_from_queue (to, 1);
523 from = protect_from_queue (from, 0);
524
525 if (to_real != from_real)
526 abort ();
527
528 /* If FROM is a SUBREG that indicates that we have already done at least
529 the required extension, strip it. We don't handle such SUBREGs as
530 TO here. */
531
532 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
533 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
534 >= GET_MODE_SIZE (to_mode))
535 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
536 from = gen_lowpart (to_mode, from), from_mode = to_mode;
537
538 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
539 abort ();
540
541 if (to_mode == from_mode
542 || (from_mode == VOIDmode && CONSTANT_P (from)))
543 {
544 emit_move_insn (to, from);
545 return;
546 }
547
548 if (to_real)
549 {
550 rtx value;
551
552 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
553 {
554 /* Try converting directly if the insn is supported. */
555 if ((code = can_extend_p (to_mode, from_mode, 0))
556 != CODE_FOR_nothing)
557 {
558 emit_unop_insn (code, to, from, UNKNOWN);
559 return;
560 }
561 }
562
563 #ifdef HAVE_trunchfqf2
564 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
565 {
566 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
567 return;
568 }
569 #endif
570 #ifdef HAVE_trunctqfqf2
571 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
572 {
573 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
574 return;
575 }
576 #endif
577 #ifdef HAVE_truncsfqf2
578 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584 #ifdef HAVE_truncdfqf2
585 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
586 {
587 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
588 return;
589 }
590 #endif
591 #ifdef HAVE_truncxfqf2
592 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
593 {
594 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
595 return;
596 }
597 #endif
598 #ifdef HAVE_trunctfqf2
599 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
602 return;
603 }
604 #endif
605
606 #ifdef HAVE_trunctqfhf2
607 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
608 {
609 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
610 return;
611 }
612 #endif
613 #ifdef HAVE_truncsfhf2
614 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
615 {
616 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
617 return;
618 }
619 #endif
620 #ifdef HAVE_truncdfhf2
621 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
622 {
623 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
624 return;
625 }
626 #endif
627 #ifdef HAVE_truncxfhf2
628 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_trunctfhf2
635 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
636 {
637 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641
642 #ifdef HAVE_truncsftqf2
643 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
644 {
645 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649 #ifdef HAVE_truncdftqf2
650 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
653 return;
654 }
655 #endif
656 #ifdef HAVE_truncxftqf2
657 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663 #ifdef HAVE_trunctftqf2
664 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
665 {
666 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670
671 #ifdef HAVE_truncdfsf2
672 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
673 {
674 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
675 return;
676 }
677 #endif
678 #ifdef HAVE_truncxfsf2
679 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
680 {
681 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
682 return;
683 }
684 #endif
685 #ifdef HAVE_trunctfsf2
686 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
687 {
688 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
689 return;
690 }
691 #endif
692 #ifdef HAVE_truncxfdf2
693 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
694 {
695 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
696 return;
697 }
698 #endif
699 #ifdef HAVE_trunctfdf2
700 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
701 {
702 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706
707 libcall = (rtx) 0;
708 switch (from_mode)
709 {
710 case SFmode:
711 switch (to_mode)
712 {
713 case DFmode:
714 libcall = extendsfdf2_libfunc;
715 break;
716
717 case XFmode:
718 libcall = extendsfxf2_libfunc;
719 break;
720
721 case TFmode:
722 libcall = extendsftf2_libfunc;
723 break;
724
725 default:
726 break;
727 }
728 break;
729
730 case DFmode:
731 switch (to_mode)
732 {
733 case SFmode:
734 libcall = truncdfsf2_libfunc;
735 break;
736
737 case XFmode:
738 libcall = extenddfxf2_libfunc;
739 break;
740
741 case TFmode:
742 libcall = extenddftf2_libfunc;
743 break;
744
745 default:
746 break;
747 }
748 break;
749
750 case XFmode:
751 switch (to_mode)
752 {
753 case SFmode:
754 libcall = truncxfsf2_libfunc;
755 break;
756
757 case DFmode:
758 libcall = truncxfdf2_libfunc;
759 break;
760
761 default:
762 break;
763 }
764 break;
765
766 case TFmode:
767 switch (to_mode)
768 {
769 case SFmode:
770 libcall = trunctfsf2_libfunc;
771 break;
772
773 case DFmode:
774 libcall = trunctfdf2_libfunc;
775 break;
776
777 default:
778 break;
779 }
780 break;
781
782 default:
783 break;
784 }
785
786 if (libcall == (rtx) 0)
787 /* This conversion is not implemented yet. */
788 abort ();
789
790 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
791 1, from, from_mode);
792 emit_move_insn (to, value);
793 return;
794 }
795
796 /* Now both modes are integers. */
797
798 /* Handle expanding beyond a word. */
799 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
800 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
801 {
802 rtx insns;
803 rtx lowpart;
804 rtx fill_value;
805 rtx lowfrom;
806 int i;
807 enum machine_mode lowpart_mode;
808 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
809
810 /* Try converting directly if the insn is supported. */
811 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
812 != CODE_FOR_nothing)
813 {
814 /* If FROM is a SUBREG, put it into a register. Do this
815 so that we always generate the same set of insns for
816 better cse'ing; if an intermediate assignment occurred,
817 we won't be doing the operation directly on the SUBREG. */
818 if (optimize > 0 && GET_CODE (from) == SUBREG)
819 from = force_reg (from_mode, from);
820 emit_unop_insn (code, to, from, equiv_code);
821 return;
822 }
823 /* Next, try converting via full word. */
824 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
825 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
826 != CODE_FOR_nothing))
827 {
828 if (GET_CODE (to) == REG)
829 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
830 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
831 emit_unop_insn (code, to,
832 gen_lowpart (word_mode, to), equiv_code);
833 return;
834 }
835
836 /* No special multiword conversion insn; do it by hand. */
837 start_sequence ();
838
839 /* Since we will turn this into a no conflict block, we must ensure
840 that the source does not overlap the target. */
841
842 if (reg_overlap_mentioned_p (to, from))
843 from = force_reg (from_mode, from);
844
845 /* Get a copy of FROM widened to a word, if necessary. */
846 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
847 lowpart_mode = word_mode;
848 else
849 lowpart_mode = from_mode;
850
851 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
852
853 lowpart = gen_lowpart (lowpart_mode, to);
854 emit_move_insn (lowpart, lowfrom);
855
856 /* Compute the value to put in each remaining word. */
857 if (unsignedp)
858 fill_value = const0_rtx;
859 else
860 {
861 #ifdef HAVE_slt
862 if (HAVE_slt
863 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
864 && STORE_FLAG_VALUE == -1)
865 {
866 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
867 lowpart_mode, 0, 0);
868 fill_value = gen_reg_rtx (word_mode);
869 emit_insn (gen_slt (fill_value));
870 }
871 else
872 #endif
873 {
874 fill_value
875 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
876 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
877 NULL_RTX, 0);
878 fill_value = convert_to_mode (word_mode, fill_value, 1);
879 }
880 }
881
882 /* Fill the remaining words. */
883 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
884 {
885 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
886 rtx subword = operand_subword (to, index, 1, to_mode);
887
888 if (subword == 0)
889 abort ();
890
891 if (fill_value != subword)
892 emit_move_insn (subword, fill_value);
893 }
894
895 insns = get_insns ();
896 end_sequence ();
897
898 emit_no_conflict_block (insns, to, from, NULL_RTX,
899 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
900 return;
901 }
902
903 /* Truncating multi-word to a word or less. */
904 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
905 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
906 {
907 if (!((GET_CODE (from) == MEM
908 && ! MEM_VOLATILE_P (from)
909 && direct_load[(int) to_mode]
910 && ! mode_dependent_address_p (XEXP (from, 0)))
911 || GET_CODE (from) == REG
912 || GET_CODE (from) == SUBREG))
913 from = force_reg (from_mode, from);
914 convert_move (to, gen_lowpart (word_mode, from), 0);
915 return;
916 }
917
918 /* Handle pointer conversion */ /* SPEE 900220 */
919 if (to_mode == PQImode)
920 {
921 if (from_mode != QImode)
922 from = convert_to_mode (QImode, from, unsignedp);
923
924 #ifdef HAVE_truncqipqi2
925 if (HAVE_truncqipqi2)
926 {
927 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
928 return;
929 }
930 #endif /* HAVE_truncqipqi2 */
931 abort ();
932 }
933
934 if (from_mode == PQImode)
935 {
936 if (to_mode != QImode)
937 {
938 from = convert_to_mode (QImode, from, unsignedp);
939 from_mode = QImode;
940 }
941 else
942 {
943 #ifdef HAVE_extendpqiqi2
944 if (HAVE_extendpqiqi2)
945 {
946 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
947 return;
948 }
949 #endif /* HAVE_extendpqiqi2 */
950 abort ();
951 }
952 }
953
954 if (to_mode == PSImode)
955 {
956 if (from_mode != SImode)
957 from = convert_to_mode (SImode, from, unsignedp);
958
959 #ifdef HAVE_truncsipsi2
960 if (HAVE_truncsipsi2)
961 {
962 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
963 return;
964 }
965 #endif /* HAVE_truncsipsi2 */
966 abort ();
967 }
968
969 if (from_mode == PSImode)
970 {
971 if (to_mode != SImode)
972 {
973 from = convert_to_mode (SImode, from, unsignedp);
974 from_mode = SImode;
975 }
976 else
977 {
978 #ifdef HAVE_extendpsisi2
979 if (HAVE_extendpsisi2)
980 {
981 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
982 return;
983 }
984 #endif /* HAVE_extendpsisi2 */
985 abort ();
986 }
987 }
988
989 if (to_mode == PDImode)
990 {
991 if (from_mode != DImode)
992 from = convert_to_mode (DImode, from, unsignedp);
993
994 #ifdef HAVE_truncdipdi2
995 if (HAVE_truncdipdi2)
996 {
997 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
998 return;
999 }
1000 #endif /* HAVE_truncdipdi2 */
1001 abort ();
1002 }
1003
1004 if (from_mode == PDImode)
1005 {
1006 if (to_mode != DImode)
1007 {
1008 from = convert_to_mode (DImode, from, unsignedp);
1009 from_mode = DImode;
1010 }
1011 else
1012 {
1013 #ifdef HAVE_extendpdidi2
1014 if (HAVE_extendpdidi2)
1015 {
1016 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1017 return;
1018 }
1019 #endif /* HAVE_extendpdidi2 */
1020 abort ();
1021 }
1022 }
1023
1024 /* Now follow all the conversions between integers
1025 no more than a word long. */
1026
1027 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1028 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1029 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1030 GET_MODE_BITSIZE (from_mode)))
1031 {
1032 if (!((GET_CODE (from) == MEM
1033 && ! MEM_VOLATILE_P (from)
1034 && direct_load[(int) to_mode]
1035 && ! mode_dependent_address_p (XEXP (from, 0)))
1036 || GET_CODE (from) == REG
1037 || GET_CODE (from) == SUBREG))
1038 from = force_reg (from_mode, from);
1039 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1040 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1041 from = copy_to_reg (from);
1042 emit_move_insn (to, gen_lowpart (to_mode, from));
1043 return;
1044 }
1045
1046 /* Handle extension. */
1047 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1048 {
1049 /* Convert directly if that works. */
1050 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1051 != CODE_FOR_nothing)
1052 {
1053 emit_unop_insn (code, to, from, equiv_code);
1054 return;
1055 }
1056 else
1057 {
1058 enum machine_mode intermediate;
1059 rtx tmp;
1060 tree shift_amount;
1061
1062 /* Search for a mode to convert via. */
1063 for (intermediate = from_mode; intermediate != VOIDmode;
1064 intermediate = GET_MODE_WIDER_MODE (intermediate))
1065 if (((can_extend_p (to_mode, intermediate, unsignedp)
1066 != CODE_FOR_nothing)
1067 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1068 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1069 GET_MODE_BITSIZE (intermediate))))
1070 && (can_extend_p (intermediate, from_mode, unsignedp)
1071 != CODE_FOR_nothing))
1072 {
1073 convert_move (to, convert_to_mode (intermediate, from,
1074 unsignedp), unsignedp);
1075 return;
1076 }
1077
1078 /* No suitable intermediate mode.
1079 Generate what we need with shifts. */
1080 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1081 - GET_MODE_BITSIZE (from_mode), 0);
1082 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1083 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1084 to, unsignedp);
1085 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1086 to, unsignedp);
1087 if (tmp != to)
1088 emit_move_insn (to, tmp);
1089 return;
1090 }
1091 }
1092
1093 /* Support special truncate insns for certain modes. */
1094
1095 if (from_mode == DImode && to_mode == SImode)
1096 {
1097 #ifdef HAVE_truncdisi2
1098 if (HAVE_truncdisi2)
1099 {
1100 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1101 return;
1102 }
1103 #endif
1104 convert_move (to, force_reg (from_mode, from), unsignedp);
1105 return;
1106 }
1107
1108 if (from_mode == DImode && to_mode == HImode)
1109 {
1110 #ifdef HAVE_truncdihi2
1111 if (HAVE_truncdihi2)
1112 {
1113 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1114 return;
1115 }
1116 #endif
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1118 return;
1119 }
1120
1121 if (from_mode == DImode && to_mode == QImode)
1122 {
1123 #ifdef HAVE_truncdiqi2
1124 if (HAVE_truncdiqi2)
1125 {
1126 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1127 return;
1128 }
1129 #endif
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 return;
1132 }
1133
1134 if (from_mode == SImode && to_mode == HImode)
1135 {
1136 #ifdef HAVE_truncsihi2
1137 if (HAVE_truncsihi2)
1138 {
1139 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1140 return;
1141 }
1142 #endif
1143 convert_move (to, force_reg (from_mode, from), unsignedp);
1144 return;
1145 }
1146
1147 if (from_mode == SImode && to_mode == QImode)
1148 {
1149 #ifdef HAVE_truncsiqi2
1150 if (HAVE_truncsiqi2)
1151 {
1152 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1153 return;
1154 }
1155 #endif
1156 convert_move (to, force_reg (from_mode, from), unsignedp);
1157 return;
1158 }
1159
1160 if (from_mode == HImode && to_mode == QImode)
1161 {
1162 #ifdef HAVE_trunchiqi2
1163 if (HAVE_trunchiqi2)
1164 {
1165 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1166 return;
1167 }
1168 #endif
1169 convert_move (to, force_reg (from_mode, from), unsignedp);
1170 return;
1171 }
1172
1173 if (from_mode == TImode && to_mode == DImode)
1174 {
1175 #ifdef HAVE_trunctidi2
1176 if (HAVE_trunctidi2)
1177 {
1178 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1179 return;
1180 }
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1184 }
1185
1186 if (from_mode == TImode && to_mode == SImode)
1187 {
1188 #ifdef HAVE_trunctisi2
1189 if (HAVE_trunctisi2)
1190 {
1191 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1192 return;
1193 }
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1197 }
1198
1199 if (from_mode == TImode && to_mode == HImode)
1200 {
1201 #ifdef HAVE_trunctihi2
1202 if (HAVE_trunctihi2)
1203 {
1204 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1205 return;
1206 }
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1210 }
1211
1212 if (from_mode == TImode && to_mode == QImode)
1213 {
1214 #ifdef HAVE_trunctiqi2
1215 if (HAVE_trunctiqi2)
1216 {
1217 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1218 return;
1219 }
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1223 }
1224
1225 /* Handle truncation of volatile memrefs, and so on;
1226 the things that couldn't be truncated directly,
1227 and for which there was no special instruction. */
1228 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1229 {
1230 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1231 emit_move_insn (to, temp);
1232 return;
1233 }
1234
1235 /* Mode combination is not recognized. */
1236 abort ();
1237 }
1238
1239 /* Return an rtx for a value that would result
1240 from converting X to mode MODE.
1241 Both X and MODE may be floating, or both integer.
1242 UNSIGNEDP is nonzero if X is an unsigned value.
1243 This can be done by referring to a part of X in place
1244 or by copying to a new temporary with conversion.
1245
1246 This function *must not* call protect_from_queue
1247 except when putting X into an insn (in which case convert_move does it). */
1248
1249 rtx
1250 convert_to_mode (mode, x, unsignedp)
1251 enum machine_mode mode;
1252 rtx x;
1253 int unsignedp;
1254 {
1255 return convert_modes (mode, VOIDmode, x, unsignedp);
1256 }
1257
1258 /* Return an rtx for a value that would result
1259 from converting X from mode OLDMODE to mode MODE.
1260 Both modes may be floating, or both integer.
1261 UNSIGNEDP is nonzero if X is an unsigned value.
1262
1263 This can be done by referring to a part of X in place
1264 or by copying to a new temporary with conversion.
1265
1266 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1267
1268 This function *must not* call protect_from_queue
1269 except when putting X into an insn (in which case convert_move does it). */
1270
1271 rtx
1272 convert_modes (mode, oldmode, x, unsignedp)
1273 enum machine_mode mode, oldmode;
1274 rtx x;
1275 int unsignedp;
1276 {
1277 register rtx temp;
1278
1279 /* If FROM is a SUBREG that indicates that we have already done at least
1280 the required extension, strip it. */
1281
1282 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1283 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1284 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1285 x = gen_lowpart (mode, x);
1286
1287 if (GET_MODE (x) != VOIDmode)
1288 oldmode = GET_MODE (x);
1289
1290 if (mode == oldmode)
1291 return x;
1292
1293 /* There is one case that we must handle specially: If we are converting
1294 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1295 we are to interpret the constant as unsigned, gen_lowpart will do
1296 the wrong if the constant appears negative. What we want to do is
1297 make the high-order word of the constant zero, not all ones. */
1298
1299 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1300 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1301 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1302 {
1303 HOST_WIDE_INT val = INTVAL (x);
1304
1305 if (oldmode != VOIDmode
1306 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1307 {
1308 int width = GET_MODE_BITSIZE (oldmode);
1309
1310 /* We need to zero extend VAL. */
1311 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1312 }
1313
1314 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1315 }
1316
1317 /* We can do this with a gen_lowpart if both desired and current modes
1318 are integer, and this is either a constant integer, a register, or a
1319 non-volatile MEM. Except for the constant case where MODE is no
1320 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1321
1322 if ((GET_CODE (x) == CONST_INT
1323 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1324 || (GET_MODE_CLASS (mode) == MODE_INT
1325 && GET_MODE_CLASS (oldmode) == MODE_INT
1326 && (GET_CODE (x) == CONST_DOUBLE
1327 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1328 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1329 && direct_load[(int) mode])
1330 || (GET_CODE (x) == REG
1331 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1332 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1333 {
1334 /* ?? If we don't know OLDMODE, we have to assume here that
1335 X does not need sign- or zero-extension. This may not be
1336 the case, but it's the best we can do. */
1337 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1338 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1339 {
1340 HOST_WIDE_INT val = INTVAL (x);
1341 int width = GET_MODE_BITSIZE (oldmode);
1342
1343 /* We must sign or zero-extend in this case. Start by
1344 zero-extending, then sign extend if we need to. */
1345 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1346 if (! unsignedp
1347 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1348 val |= (HOST_WIDE_INT) (-1) << width;
1349
1350 return GEN_INT (val);
1351 }
1352
1353 return gen_lowpart (mode, x);
1354 }
1355
1356 temp = gen_reg_rtx (mode);
1357 convert_move (temp, x, unsignedp);
1358 return temp;
1359 }
1360 \f
1361
1362 /* This macro is used to determine what the largest unit size that
1363 move_by_pieces can use is. */
1364
1365 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1366 move efficiently, as opposed to MOVE_MAX which is the maximum
1367 number of bytes we can move with a single instruction. */
1368
1369 #ifndef MOVE_MAX_PIECES
1370 #define MOVE_MAX_PIECES MOVE_MAX
1371 #endif
1372
1373 /* Generate several move instructions to copy LEN bytes
1374 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1375 The caller must pass FROM and TO
1376 through protect_from_queue before calling.
1377 ALIGN is maximum alignment we can assume. */
1378
1379 void
1380 move_by_pieces (to, from, len, align)
1381 rtx to, from;
1382 unsigned HOST_WIDE_INT len;
1383 unsigned int align;
1384 {
1385 struct move_by_pieces data;
1386 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1387 unsigned int max_size = MOVE_MAX_PIECES + 1;
1388 enum machine_mode mode = VOIDmode, tmode;
1389 enum insn_code icode;
1390
1391 data.offset = 0;
1392 data.to_addr = to_addr;
1393 data.from_addr = from_addr;
1394 data.to = to;
1395 data.from = from;
1396 data.autinc_to
1397 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1398 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1399 data.autinc_from
1400 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1401 || GET_CODE (from_addr) == POST_INC
1402 || GET_CODE (from_addr) == POST_DEC);
1403
1404 data.explicit_inc_from = 0;
1405 data.explicit_inc_to = 0;
1406 data.reverse
1407 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1408 if (data.reverse) data.offset = len;
1409 data.len = len;
1410
1411 /* If copying requires more than two move insns,
1412 copy addresses to registers (to make displacements shorter)
1413 and use post-increment if available. */
1414 if (!(data.autinc_from && data.autinc_to)
1415 && move_by_pieces_ninsns (len, align) > 2)
1416 {
1417 /* Find the mode of the largest move... */
1418 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1419 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1420 if (GET_MODE_SIZE (tmode) < max_size)
1421 mode = tmode;
1422
1423 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1424 {
1425 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1426 data.autinc_from = 1;
1427 data.explicit_inc_from = -1;
1428 }
1429 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1430 {
1431 data.from_addr = copy_addr_to_reg (from_addr);
1432 data.autinc_from = 1;
1433 data.explicit_inc_from = 1;
1434 }
1435 if (!data.autinc_from && CONSTANT_P (from_addr))
1436 data.from_addr = copy_addr_to_reg (from_addr);
1437 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1438 {
1439 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1440 data.autinc_to = 1;
1441 data.explicit_inc_to = -1;
1442 }
1443 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1444 {
1445 data.to_addr = copy_addr_to_reg (to_addr);
1446 data.autinc_to = 1;
1447 data.explicit_inc_to = 1;
1448 }
1449 if (!data.autinc_to && CONSTANT_P (to_addr))
1450 data.to_addr = copy_addr_to_reg (to_addr);
1451 }
1452
1453 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1454 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1455 align = MOVE_MAX * BITS_PER_UNIT;
1456
1457 /* First move what we can in the largest integer mode, then go to
1458 successively smaller modes. */
1459
1460 while (max_size > 1)
1461 {
1462 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1463 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1464 if (GET_MODE_SIZE (tmode) < max_size)
1465 mode = tmode;
1466
1467 if (mode == VOIDmode)
1468 break;
1469
1470 icode = mov_optab->handlers[(int) mode].insn_code;
1471 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1472 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1473
1474 max_size = GET_MODE_SIZE (mode);
1475 }
1476
1477 /* The code above should have handled everything. */
1478 if (data.len > 0)
1479 abort ();
1480 }
1481
1482 /* Return number of insns required to move L bytes by pieces.
1483 ALIGN (in bytes) is maximum alignment we can assume. */
1484
1485 static unsigned HOST_WIDE_INT
1486 move_by_pieces_ninsns (l, align)
1487 unsigned HOST_WIDE_INT l;
1488 unsigned int align;
1489 {
1490 unsigned HOST_WIDE_INT n_insns = 0;
1491 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1492
1493 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1494 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1495 align = MOVE_MAX * BITS_PER_UNIT;
1496
1497 while (max_size > 1)
1498 {
1499 enum machine_mode mode = VOIDmode, tmode;
1500 enum insn_code icode;
1501
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
1505 mode = tmode;
1506
1507 if (mode == VOIDmode)
1508 break;
1509
1510 icode = mov_optab->handlers[(int) mode].insn_code;
1511 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1512 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1513
1514 max_size = GET_MODE_SIZE (mode);
1515 }
1516
1517 return n_insns;
1518 }
1519
1520 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1521 with move instructions for mode MODE. GENFUN is the gen_... function
1522 to make a move insn for that mode. DATA has all the other info. */
1523
1524 static void
1525 move_by_pieces_1 (genfun, mode, data)
1526 rtx (*genfun) PARAMS ((rtx, ...));
1527 enum machine_mode mode;
1528 struct move_by_pieces *data;
1529 {
1530 unsigned int size = GET_MODE_SIZE (mode);
1531 rtx to1, from1;
1532
1533 while (data->len >= size)
1534 {
1535 if (data->reverse)
1536 data->offset -= size;
1537
1538 if (data->autinc_to)
1539 {
1540 to1 = gen_rtx_MEM (mode, data->to_addr);
1541 MEM_COPY_ATTRIBUTES (to1, data->to);
1542 }
1543 else
1544 to1 = change_address (data->to, mode,
1545 plus_constant (data->to_addr, data->offset));
1546
1547 if (data->autinc_from)
1548 {
1549 from1 = gen_rtx_MEM (mode, data->from_addr);
1550 MEM_COPY_ATTRIBUTES (from1, data->from);
1551 }
1552 else
1553 from1 = change_address (data->from, mode,
1554 plus_constant (data->from_addr, data->offset));
1555
1556 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1557 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1558 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1559 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1560
1561 emit_insn ((*genfun) (to1, from1));
1562
1563 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1564 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1565 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1566 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1567
1568 if (! data->reverse)
1569 data->offset += size;
1570
1571 data->len -= size;
1572 }
1573 }
1574 \f
1575 /* Emit code to move a block Y to a block X.
1576 This may be done with string-move instructions,
1577 with multiple scalar move instructions, or with a library call.
1578
1579 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1580 with mode BLKmode.
1581 SIZE is an rtx that says how long they are.
1582 ALIGN is the maximum alignment we can assume they have.
1583
1584 Return the address of the new block, if memcpy is called and returns it,
1585 0 otherwise. */
1586
1587 rtx
1588 emit_block_move (x, y, size, align)
1589 rtx x, y;
1590 rtx size;
1591 unsigned int align;
1592 {
1593 rtx retval = 0;
1594 #ifdef TARGET_MEM_FUNCTIONS
1595 static tree fn;
1596 tree call_expr, arg_list;
1597 #endif
1598
1599 if (GET_MODE (x) != BLKmode)
1600 abort ();
1601
1602 if (GET_MODE (y) != BLKmode)
1603 abort ();
1604
1605 x = protect_from_queue (x, 1);
1606 y = protect_from_queue (y, 0);
1607 size = protect_from_queue (size, 0);
1608
1609 if (GET_CODE (x) != MEM)
1610 abort ();
1611 if (GET_CODE (y) != MEM)
1612 abort ();
1613 if (size == 0)
1614 abort ();
1615
1616 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1617 move_by_pieces (x, y, INTVAL (size), align);
1618 else
1619 {
1620 /* Try the most limited insn first, because there's no point
1621 including more than one in the machine description unless
1622 the more limited one has some advantage. */
1623
1624 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1625 enum machine_mode mode;
1626
1627 /* Since this is a move insn, we don't care about volatility. */
1628 volatile_ok = 1;
1629
1630 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1631 mode = GET_MODE_WIDER_MODE (mode))
1632 {
1633 enum insn_code code = movstr_optab[(int) mode];
1634 insn_operand_predicate_fn pred;
1635
1636 if (code != CODE_FOR_nothing
1637 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1638 here because if SIZE is less than the mode mask, as it is
1639 returned by the macro, it will definitely be less than the
1640 actual mode mask. */
1641 && ((GET_CODE (size) == CONST_INT
1642 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1643 <= (GET_MODE_MASK (mode) >> 1)))
1644 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1645 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1646 || (*pred) (x, BLKmode))
1647 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1648 || (*pred) (y, BLKmode))
1649 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1650 || (*pred) (opalign, VOIDmode)))
1651 {
1652 rtx op2;
1653 rtx last = get_last_insn ();
1654 rtx pat;
1655
1656 op2 = convert_to_mode (mode, size, 1);
1657 pred = insn_data[(int) code].operand[2].predicate;
1658 if (pred != 0 && ! (*pred) (op2, mode))
1659 op2 = copy_to_mode_reg (mode, op2);
1660
1661 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1662 if (pat)
1663 {
1664 emit_insn (pat);
1665 volatile_ok = 0;
1666 return 0;
1667 }
1668 else
1669 delete_insns_since (last);
1670 }
1671 }
1672
1673 volatile_ok = 0;
1674
1675 /* X, Y, or SIZE may have been passed through protect_from_queue.
1676
1677 It is unsafe to save the value generated by protect_from_queue
1678 and reuse it later. Consider what happens if emit_queue is
1679 called before the return value from protect_from_queue is used.
1680
1681 Expansion of the CALL_EXPR below will call emit_queue before
1682 we are finished emitting RTL for argument setup. So if we are
1683 not careful we could get the wrong value for an argument.
1684
1685 To avoid this problem we go ahead and emit code to copy X, Y &
1686 SIZE into new pseudos. We can then place those new pseudos
1687 into an RTL_EXPR and use them later, even after a call to
1688 emit_queue.
1689
1690 Note this is not strictly needed for library calls since they
1691 do not call emit_queue before loading their arguments. However,
1692 we may need to have library calls call emit_queue in the future
1693 since failing to do so could cause problems for targets which
1694 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1695 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1696 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1697
1698 #ifdef TARGET_MEM_FUNCTIONS
1699 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1700 #else
1701 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1702 TREE_UNSIGNED (integer_type_node));
1703 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1704 #endif
1705
1706 #ifdef TARGET_MEM_FUNCTIONS
1707 /* It is incorrect to use the libcall calling conventions to call
1708 memcpy in this context.
1709
1710 This could be a user call to memcpy and the user may wish to
1711 examine the return value from memcpy.
1712
1713 For targets where libcalls and normal calls have different conventions
1714 for returning pointers, we could end up generating incorrect code.
1715
1716 So instead of using a libcall sequence we build up a suitable
1717 CALL_EXPR and expand the call in the normal fashion. */
1718 if (fn == NULL_TREE)
1719 {
1720 tree fntype;
1721
1722 /* This was copied from except.c, I don't know if all this is
1723 necessary in this context or not. */
1724 fn = get_identifier ("memcpy");
1725 push_obstacks_nochange ();
1726 end_temporary_allocation ();
1727 fntype = build_pointer_type (void_type_node);
1728 fntype = build_function_type (fntype, NULL_TREE);
1729 fn = build_decl (FUNCTION_DECL, fn, fntype);
1730 ggc_add_tree_root (&fn, 1);
1731 DECL_EXTERNAL (fn) = 1;
1732 TREE_PUBLIC (fn) = 1;
1733 DECL_ARTIFICIAL (fn) = 1;
1734 make_decl_rtl (fn, NULL_PTR, 1);
1735 assemble_external (fn);
1736 pop_obstacks ();
1737 }
1738
1739 /* We need to make an argument list for the function call.
1740
1741 memcpy has three arguments, the first two are void * addresses and
1742 the last is a size_t byte count for the copy. */
1743 arg_list
1744 = build_tree_list (NULL_TREE,
1745 make_tree (build_pointer_type (void_type_node), x));
1746 TREE_CHAIN (arg_list)
1747 = build_tree_list (NULL_TREE,
1748 make_tree (build_pointer_type (void_type_node), y));
1749 TREE_CHAIN (TREE_CHAIN (arg_list))
1750 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1751 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1752
1753 /* Now we have to build up the CALL_EXPR itself. */
1754 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1755 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1756 call_expr, arg_list, NULL_TREE);
1757 TREE_SIDE_EFFECTS (call_expr) = 1;
1758
1759 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1760 #else
1761 emit_library_call (bcopy_libfunc, 0,
1762 VOIDmode, 3, y, Pmode, x, Pmode,
1763 convert_to_mode (TYPE_MODE (integer_type_node), size,
1764 TREE_UNSIGNED (integer_type_node)),
1765 TYPE_MODE (integer_type_node));
1766 #endif
1767 }
1768
1769 return retval;
1770 }
1771 \f
1772 /* Copy all or part of a value X into registers starting at REGNO.
1773 The number of registers to be filled is NREGS. */
1774
1775 void
1776 move_block_to_reg (regno, x, nregs, mode)
1777 int regno;
1778 rtx x;
1779 int nregs;
1780 enum machine_mode mode;
1781 {
1782 int i;
1783 #ifdef HAVE_load_multiple
1784 rtx pat;
1785 rtx last;
1786 #endif
1787
1788 if (nregs == 0)
1789 return;
1790
1791 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1792 x = validize_mem (force_const_mem (mode, x));
1793
1794 /* See if the machine can do this with a load multiple insn. */
1795 #ifdef HAVE_load_multiple
1796 if (HAVE_load_multiple)
1797 {
1798 last = get_last_insn ();
1799 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1800 GEN_INT (nregs));
1801 if (pat)
1802 {
1803 emit_insn (pat);
1804 return;
1805 }
1806 else
1807 delete_insns_since (last);
1808 }
1809 #endif
1810
1811 for (i = 0; i < nregs; i++)
1812 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1813 operand_subword_force (x, i, mode));
1814 }
1815
1816 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1817 The number of registers to be filled is NREGS. SIZE indicates the number
1818 of bytes in the object X. */
1819
1820
1821 void
1822 move_block_from_reg (regno, x, nregs, size)
1823 int regno;
1824 rtx x;
1825 int nregs;
1826 int size;
1827 {
1828 int i;
1829 #ifdef HAVE_store_multiple
1830 rtx pat;
1831 rtx last;
1832 #endif
1833 enum machine_mode mode;
1834
1835 /* If SIZE is that of a mode no bigger than a word, just use that
1836 mode's store operation. */
1837 if (size <= UNITS_PER_WORD
1838 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1839 {
1840 emit_move_insn (change_address (x, mode, NULL),
1841 gen_rtx_REG (mode, regno));
1842 return;
1843 }
1844
1845 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1846 to the left before storing to memory. Note that the previous test
1847 doesn't handle all cases (e.g. SIZE == 3). */
1848 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1849 {
1850 rtx tem = operand_subword (x, 0, 1, BLKmode);
1851 rtx shift;
1852
1853 if (tem == 0)
1854 abort ();
1855
1856 shift = expand_shift (LSHIFT_EXPR, word_mode,
1857 gen_rtx_REG (word_mode, regno),
1858 build_int_2 ((UNITS_PER_WORD - size)
1859 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1860 emit_move_insn (tem, shift);
1861 return;
1862 }
1863
1864 /* See if the machine can do this with a store multiple insn. */
1865 #ifdef HAVE_store_multiple
1866 if (HAVE_store_multiple)
1867 {
1868 last = get_last_insn ();
1869 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1870 GEN_INT (nregs));
1871 if (pat)
1872 {
1873 emit_insn (pat);
1874 return;
1875 }
1876 else
1877 delete_insns_since (last);
1878 }
1879 #endif
1880
1881 for (i = 0; i < nregs; i++)
1882 {
1883 rtx tem = operand_subword (x, i, 1, BLKmode);
1884
1885 if (tem == 0)
1886 abort ();
1887
1888 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1889 }
1890 }
1891
1892 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1893 registers represented by a PARALLEL. SSIZE represents the total size of
1894 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1895 SRC in bits. */
1896 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1897 the balance will be in what would be the low-order memory addresses, i.e.
1898 left justified for big endian, right justified for little endian. This
1899 happens to be true for the targets currently using this support. If this
1900 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1901 would be needed. */
1902
1903 void
1904 emit_group_load (dst, orig_src, ssize, align)
1905 rtx dst, orig_src;
1906 unsigned int align;
1907 int ssize;
1908 {
1909 rtx *tmps, src;
1910 int start, i;
1911
1912 if (GET_CODE (dst) != PARALLEL)
1913 abort ();
1914
1915 /* Check for a NULL entry, used to indicate that the parameter goes
1916 both on the stack and in registers. */
1917 if (XEXP (XVECEXP (dst, 0, 0), 0))
1918 start = 0;
1919 else
1920 start = 1;
1921
1922 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1923
1924 /* If we won't be loading directly from memory, protect the real source
1925 from strange tricks we might play. */
1926 src = orig_src;
1927 if (GET_CODE (src) != MEM)
1928 {
1929 if (GET_CODE (src) == VOIDmode)
1930 src = gen_reg_rtx (GET_MODE (dst));
1931 else
1932 src = gen_reg_rtx (GET_MODE (orig_src));
1933 emit_move_insn (src, orig_src);
1934 }
1935
1936 /* Process the pieces. */
1937 for (i = start; i < XVECLEN (dst, 0); i++)
1938 {
1939 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1940 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1941 unsigned int bytelen = GET_MODE_SIZE (mode);
1942 int shift = 0;
1943
1944 /* Handle trailing fragments that run over the size of the struct. */
1945 if (ssize >= 0 && bytepos + bytelen > ssize)
1946 {
1947 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1948 bytelen = ssize - bytepos;
1949 if (bytelen <= 0)
1950 abort ();
1951 }
1952
1953 /* Optimize the access just a bit. */
1954 if (GET_CODE (src) == MEM
1955 && align >= GET_MODE_ALIGNMENT (mode)
1956 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1957 && bytelen == GET_MODE_SIZE (mode))
1958 {
1959 tmps[i] = gen_reg_rtx (mode);
1960 emit_move_insn (tmps[i],
1961 change_address (src, mode,
1962 plus_constant (XEXP (src, 0),
1963 bytepos)));
1964 }
1965 else if (GET_CODE (src) == CONCAT)
1966 {
1967 if (bytepos == 0
1968 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1969 tmps[i] = XEXP (src, 0);
1970 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1971 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1972 tmps[i] = XEXP (src, 1);
1973 else
1974 abort ();
1975 }
1976 else
1977 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1978 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1979 mode, mode, align, ssize);
1980
1981 if (BYTES_BIG_ENDIAN && shift)
1982 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1983 tmps[i], 0, OPTAB_WIDEN);
1984 }
1985
1986 emit_queue();
1987
1988 /* Copy the extracted pieces into the proper (probable) hard regs. */
1989 for (i = start; i < XVECLEN (dst, 0); i++)
1990 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1991 }
1992
1993 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1994 registers represented by a PARALLEL. SSIZE represents the total size of
1995 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1996
1997 void
1998 emit_group_store (orig_dst, src, ssize, align)
1999 rtx orig_dst, src;
2000 int ssize;
2001 unsigned int align;
2002 {
2003 rtx *tmps, dst;
2004 int start, i;
2005
2006 if (GET_CODE (src) != PARALLEL)
2007 abort ();
2008
2009 /* Check for a NULL entry, used to indicate that the parameter goes
2010 both on the stack and in registers. */
2011 if (XEXP (XVECEXP (src, 0, 0), 0))
2012 start = 0;
2013 else
2014 start = 1;
2015
2016 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2017
2018 /* Copy the (probable) hard regs into pseudos. */
2019 for (i = start; i < XVECLEN (src, 0); i++)
2020 {
2021 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2022 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2023 emit_move_insn (tmps[i], reg);
2024 }
2025 emit_queue();
2026
2027 /* If we won't be storing directly into memory, protect the real destination
2028 from strange tricks we might play. */
2029 dst = orig_dst;
2030 if (GET_CODE (dst) == PARALLEL)
2031 {
2032 rtx temp;
2033
2034 /* We can get a PARALLEL dst if there is a conditional expression in
2035 a return statement. In that case, the dst and src are the same,
2036 so no action is necessary. */
2037 if (rtx_equal_p (dst, src))
2038 return;
2039
2040 /* It is unclear if we can ever reach here, but we may as well handle
2041 it. Allocate a temporary, and split this into a store/load to/from
2042 the temporary. */
2043
2044 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2045 emit_group_store (temp, src, ssize, align);
2046 emit_group_load (dst, temp, ssize, align);
2047 return;
2048 }
2049 else if (GET_CODE (dst) != MEM)
2050 {
2051 dst = gen_reg_rtx (GET_MODE (orig_dst));
2052 /* Make life a bit easier for combine. */
2053 emit_move_insn (dst, const0_rtx);
2054 }
2055 else if (! MEM_IN_STRUCT_P (dst))
2056 {
2057 /* store_bit_field requires that memory operations have
2058 mem_in_struct_p set; we might not. */
2059
2060 dst = copy_rtx (orig_dst);
2061 MEM_SET_IN_STRUCT_P (dst, 1);
2062 }
2063
2064 /* Process the pieces. */
2065 for (i = start; i < XVECLEN (src, 0); i++)
2066 {
2067 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2068 enum machine_mode mode = GET_MODE (tmps[i]);
2069 unsigned int bytelen = GET_MODE_SIZE (mode);
2070
2071 /* Handle trailing fragments that run over the size of the struct. */
2072 if (ssize >= 0 && bytepos + bytelen > ssize)
2073 {
2074 if (BYTES_BIG_ENDIAN)
2075 {
2076 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2077 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2078 tmps[i], 0, OPTAB_WIDEN);
2079 }
2080 bytelen = ssize - bytepos;
2081 }
2082
2083 /* Optimize the access just a bit. */
2084 if (GET_CODE (dst) == MEM
2085 && align >= GET_MODE_ALIGNMENT (mode)
2086 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2087 && bytelen == GET_MODE_SIZE (mode))
2088 emit_move_insn (change_address (dst, mode,
2089 plus_constant (XEXP (dst, 0),
2090 bytepos)),
2091 tmps[i]);
2092 else
2093 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2094 mode, tmps[i], align, ssize);
2095 }
2096
2097 emit_queue();
2098
2099 /* Copy from the pseudo into the (probable) hard reg. */
2100 if (GET_CODE (dst) == REG)
2101 emit_move_insn (orig_dst, dst);
2102 }
2103
2104 /* Generate code to copy a BLKmode object of TYPE out of a
2105 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2106 is null, a stack temporary is created. TGTBLK is returned.
2107
2108 The primary purpose of this routine is to handle functions
2109 that return BLKmode structures in registers. Some machines
2110 (the PA for example) want to return all small structures
2111 in registers regardless of the structure's alignment. */
2112
2113 rtx
2114 copy_blkmode_from_reg (tgtblk, srcreg, type)
2115 rtx tgtblk;
2116 rtx srcreg;
2117 tree type;
2118 {
2119 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2120 rtx src = NULL, dst = NULL;
2121 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2122 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2123
2124 if (tgtblk == 0)
2125 {
2126 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2127 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2128 preserve_temp_slots (tgtblk);
2129 }
2130
2131 /* This code assumes srcreg is at least a full word. If it isn't,
2132 copy it into a new pseudo which is a full word. */
2133 if (GET_MODE (srcreg) != BLKmode
2134 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2135 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2136
2137 /* Structures whose size is not a multiple of a word are aligned
2138 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2139 machine, this means we must skip the empty high order bytes when
2140 calculating the bit offset. */
2141 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2142 big_endian_correction
2143 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2144
2145 /* Copy the structure BITSIZE bites at a time.
2146
2147 We could probably emit more efficient code for machines which do not use
2148 strict alignment, but it doesn't seem worth the effort at the current
2149 time. */
2150 for (bitpos = 0, xbitpos = big_endian_correction;
2151 bitpos < bytes * BITS_PER_UNIT;
2152 bitpos += bitsize, xbitpos += bitsize)
2153 {
2154 /* We need a new source operand each time xbitpos is on a
2155 word boundary and when xbitpos == big_endian_correction
2156 (the first time through). */
2157 if (xbitpos % BITS_PER_WORD == 0
2158 || xbitpos == big_endian_correction)
2159 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2160
2161 /* We need a new destination operand each time bitpos is on
2162 a word boundary. */
2163 if (bitpos % BITS_PER_WORD == 0)
2164 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2165
2166 /* Use xbitpos for the source extraction (right justified) and
2167 xbitpos for the destination store (left justified). */
2168 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2169 extract_bit_field (src, bitsize,
2170 xbitpos % BITS_PER_WORD, 1,
2171 NULL_RTX, word_mode, word_mode,
2172 bitsize, BITS_PER_WORD),
2173 bitsize, BITS_PER_WORD);
2174 }
2175
2176 return tgtblk;
2177 }
2178
2179 /* Add a USE expression for REG to the (possibly empty) list pointed
2180 to by CALL_FUSAGE. REG must denote a hard register. */
2181
2182 void
2183 use_reg (call_fusage, reg)
2184 rtx *call_fusage, reg;
2185 {
2186 if (GET_CODE (reg) != REG
2187 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2188 abort();
2189
2190 *call_fusage
2191 = gen_rtx_EXPR_LIST (VOIDmode,
2192 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2193 }
2194
2195 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2196 starting at REGNO. All of these registers must be hard registers. */
2197
2198 void
2199 use_regs (call_fusage, regno, nregs)
2200 rtx *call_fusage;
2201 int regno;
2202 int nregs;
2203 {
2204 int i;
2205
2206 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2207 abort ();
2208
2209 for (i = 0; i < nregs; i++)
2210 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2211 }
2212
2213 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2214 PARALLEL REGS. This is for calls that pass values in multiple
2215 non-contiguous locations. The Irix 6 ABI has examples of this. */
2216
2217 void
2218 use_group_regs (call_fusage, regs)
2219 rtx *call_fusage;
2220 rtx regs;
2221 {
2222 int i;
2223
2224 for (i = 0; i < XVECLEN (regs, 0); i++)
2225 {
2226 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2227
2228 /* A NULL entry means the parameter goes both on the stack and in
2229 registers. This can also be a MEM for targets that pass values
2230 partially on the stack and partially in registers. */
2231 if (reg != 0 && GET_CODE (reg) == REG)
2232 use_reg (call_fusage, reg);
2233 }
2234 }
2235 \f
2236 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2237 rtx with BLKmode). The caller must pass TO through protect_from_queue
2238 before calling. ALIGN is maximum alignment we can assume. */
2239
2240 static void
2241 clear_by_pieces (to, len, align)
2242 rtx to;
2243 unsigned HOST_WIDE_INT len;
2244 unsigned int align;
2245 {
2246 struct clear_by_pieces data;
2247 rtx to_addr = XEXP (to, 0);
2248 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2249 enum machine_mode mode = VOIDmode, tmode;
2250 enum insn_code icode;
2251
2252 data.offset = 0;
2253 data.to_addr = to_addr;
2254 data.to = to;
2255 data.autinc_to
2256 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2257 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2258
2259 data.explicit_inc_to = 0;
2260 data.reverse
2261 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2262 if (data.reverse) data.offset = len;
2263 data.len = len;
2264
2265 /* If copying requires more than two move insns,
2266 copy addresses to registers (to make displacements shorter)
2267 and use post-increment if available. */
2268 if (!data.autinc_to
2269 && move_by_pieces_ninsns (len, align) > 2)
2270 {
2271 /* Determine the main mode we'll be using */
2272 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2273 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2274 if (GET_MODE_SIZE (tmode) < max_size)
2275 mode = tmode;
2276
2277 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2278 {
2279 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2280 data.autinc_to = 1;
2281 data.explicit_inc_to = -1;
2282 }
2283
2284 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2285 && ! data.autinc_to)
2286 {
2287 data.to_addr = copy_addr_to_reg (to_addr);
2288 data.autinc_to = 1;
2289 data.explicit_inc_to = 1;
2290 }
2291
2292 if ( !data.autinc_to && CONSTANT_P (to_addr))
2293 data.to_addr = copy_addr_to_reg (to_addr);
2294 }
2295
2296 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2297 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2298 align = MOVE_MAX * BITS_PER_UNIT;
2299
2300 /* First move what we can in the largest integer mode, then go to
2301 successively smaller modes. */
2302
2303 while (max_size > 1)
2304 {
2305 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2306 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2307 if (GET_MODE_SIZE (tmode) < max_size)
2308 mode = tmode;
2309
2310 if (mode == VOIDmode)
2311 break;
2312
2313 icode = mov_optab->handlers[(int) mode].insn_code;
2314 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2315 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2316
2317 max_size = GET_MODE_SIZE (mode);
2318 }
2319
2320 /* The code above should have handled everything. */
2321 if (data.len != 0)
2322 abort ();
2323 }
2324
2325 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2326 with move instructions for mode MODE. GENFUN is the gen_... function
2327 to make a move insn for that mode. DATA has all the other info. */
2328
2329 static void
2330 clear_by_pieces_1 (genfun, mode, data)
2331 rtx (*genfun) PARAMS ((rtx, ...));
2332 enum machine_mode mode;
2333 struct clear_by_pieces *data;
2334 {
2335 unsigned int size = GET_MODE_SIZE (mode);
2336 rtx to1;
2337
2338 while (data->len >= size)
2339 {
2340 if (data->reverse)
2341 data->offset -= size;
2342
2343 if (data->autinc_to)
2344 {
2345 to1 = gen_rtx_MEM (mode, data->to_addr);
2346 MEM_COPY_ATTRIBUTES (to1, data->to);
2347 }
2348 else
2349 to1 = change_address (data->to, mode,
2350 plus_constant (data->to_addr, data->offset));
2351
2352 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2353 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2354
2355 emit_insn ((*genfun) (to1, const0_rtx));
2356
2357 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2358 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2359
2360 if (! data->reverse)
2361 data->offset += size;
2362
2363 data->len -= size;
2364 }
2365 }
2366 \f
2367 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2368 its length in bytes and ALIGN is the maximum alignment we can is has.
2369
2370 If we call a function that returns the length of the block, return it. */
2371
2372 rtx
2373 clear_storage (object, size, align)
2374 rtx object;
2375 rtx size;
2376 unsigned int align;
2377 {
2378 #ifdef TARGET_MEM_FUNCTIONS
2379 static tree fn;
2380 tree call_expr, arg_list;
2381 #endif
2382 rtx retval = 0;
2383
2384 if (GET_MODE (object) == BLKmode)
2385 {
2386 object = protect_from_queue (object, 1);
2387 size = protect_from_queue (size, 0);
2388
2389 if (GET_CODE (size) == CONST_INT
2390 && MOVE_BY_PIECES_P (INTVAL (size), align))
2391 clear_by_pieces (object, INTVAL (size), align);
2392 else
2393 {
2394 /* Try the most limited insn first, because there's no point
2395 including more than one in the machine description unless
2396 the more limited one has some advantage. */
2397
2398 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2399 enum machine_mode mode;
2400
2401 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2402 mode = GET_MODE_WIDER_MODE (mode))
2403 {
2404 enum insn_code code = clrstr_optab[(int) mode];
2405 insn_operand_predicate_fn pred;
2406
2407 if (code != CODE_FOR_nothing
2408 /* We don't need MODE to be narrower than
2409 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2410 the mode mask, as it is returned by the macro, it will
2411 definitely be less than the actual mode mask. */
2412 && ((GET_CODE (size) == CONST_INT
2413 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2414 <= (GET_MODE_MASK (mode) >> 1)))
2415 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2416 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2417 || (*pred) (object, BLKmode))
2418 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2419 || (*pred) (opalign, VOIDmode)))
2420 {
2421 rtx op1;
2422 rtx last = get_last_insn ();
2423 rtx pat;
2424
2425 op1 = convert_to_mode (mode, size, 1);
2426 pred = insn_data[(int) code].operand[1].predicate;
2427 if (pred != 0 && ! (*pred) (op1, mode))
2428 op1 = copy_to_mode_reg (mode, op1);
2429
2430 pat = GEN_FCN ((int) code) (object, op1, opalign);
2431 if (pat)
2432 {
2433 emit_insn (pat);
2434 return 0;
2435 }
2436 else
2437 delete_insns_since (last);
2438 }
2439 }
2440
2441 /* OBJECT or SIZE may have been passed through protect_from_queue.
2442
2443 It is unsafe to save the value generated by protect_from_queue
2444 and reuse it later. Consider what happens if emit_queue is
2445 called before the return value from protect_from_queue is used.
2446
2447 Expansion of the CALL_EXPR below will call emit_queue before
2448 we are finished emitting RTL for argument setup. So if we are
2449 not careful we could get the wrong value for an argument.
2450
2451 To avoid this problem we go ahead and emit code to copy OBJECT
2452 and SIZE into new pseudos. We can then place those new pseudos
2453 into an RTL_EXPR and use them later, even after a call to
2454 emit_queue.
2455
2456 Note this is not strictly needed for library calls since they
2457 do not call emit_queue before loading their arguments. However,
2458 we may need to have library calls call emit_queue in the future
2459 since failing to do so could cause problems for targets which
2460 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2461 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2462
2463 #ifdef TARGET_MEM_FUNCTIONS
2464 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2465 #else
2466 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2467 TREE_UNSIGNED (integer_type_node));
2468 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2469 #endif
2470
2471
2472 #ifdef TARGET_MEM_FUNCTIONS
2473 /* It is incorrect to use the libcall calling conventions to call
2474 memset in this context.
2475
2476 This could be a user call to memset and the user may wish to
2477 examine the return value from memset.
2478
2479 For targets where libcalls and normal calls have different
2480 conventions for returning pointers, we could end up generating
2481 incorrect code.
2482
2483 So instead of using a libcall sequence we build up a suitable
2484 CALL_EXPR and expand the call in the normal fashion. */
2485 if (fn == NULL_TREE)
2486 {
2487 tree fntype;
2488
2489 /* This was copied from except.c, I don't know if all this is
2490 necessary in this context or not. */
2491 fn = get_identifier ("memset");
2492 push_obstacks_nochange ();
2493 end_temporary_allocation ();
2494 fntype = build_pointer_type (void_type_node);
2495 fntype = build_function_type (fntype, NULL_TREE);
2496 fn = build_decl (FUNCTION_DECL, fn, fntype);
2497 ggc_add_tree_root (&fn, 1);
2498 DECL_EXTERNAL (fn) = 1;
2499 TREE_PUBLIC (fn) = 1;
2500 DECL_ARTIFICIAL (fn) = 1;
2501 make_decl_rtl (fn, NULL_PTR, 1);
2502 assemble_external (fn);
2503 pop_obstacks ();
2504 }
2505
2506 /* We need to make an argument list for the function call.
2507
2508 memset has three arguments, the first is a void * addresses, the
2509 second a integer with the initialization value, the last is a
2510 size_t byte count for the copy. */
2511 arg_list
2512 = build_tree_list (NULL_TREE,
2513 make_tree (build_pointer_type (void_type_node),
2514 object));
2515 TREE_CHAIN (arg_list)
2516 = build_tree_list (NULL_TREE,
2517 make_tree (integer_type_node, const0_rtx));
2518 TREE_CHAIN (TREE_CHAIN (arg_list))
2519 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2520 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2521
2522 /* Now we have to build up the CALL_EXPR itself. */
2523 call_expr = build1 (ADDR_EXPR,
2524 build_pointer_type (TREE_TYPE (fn)), fn);
2525 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2526 call_expr, arg_list, NULL_TREE);
2527 TREE_SIDE_EFFECTS (call_expr) = 1;
2528
2529 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2530 #else
2531 emit_library_call (bzero_libfunc, 0,
2532 VOIDmode, 2, object, Pmode, size,
2533 TYPE_MODE (integer_type_node));
2534 #endif
2535 }
2536 }
2537 else
2538 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2539
2540 return retval;
2541 }
2542
2543 /* Generate code to copy Y into X.
2544 Both Y and X must have the same mode, except that
2545 Y can be a constant with VOIDmode.
2546 This mode cannot be BLKmode; use emit_block_move for that.
2547
2548 Return the last instruction emitted. */
2549
2550 rtx
2551 emit_move_insn (x, y)
2552 rtx x, y;
2553 {
2554 enum machine_mode mode = GET_MODE (x);
2555
2556 x = protect_from_queue (x, 1);
2557 y = protect_from_queue (y, 0);
2558
2559 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2560 abort ();
2561
2562 /* Never force constant_p_rtx to memory. */
2563 if (GET_CODE (y) == CONSTANT_P_RTX)
2564 ;
2565 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2566 y = force_const_mem (mode, y);
2567
2568 /* If X or Y are memory references, verify that their addresses are valid
2569 for the machine. */
2570 if (GET_CODE (x) == MEM
2571 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2572 && ! push_operand (x, GET_MODE (x)))
2573 || (flag_force_addr
2574 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2575 x = change_address (x, VOIDmode, XEXP (x, 0));
2576
2577 if (GET_CODE (y) == MEM
2578 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2579 || (flag_force_addr
2580 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2581 y = change_address (y, VOIDmode, XEXP (y, 0));
2582
2583 if (mode == BLKmode)
2584 abort ();
2585
2586 return emit_move_insn_1 (x, y);
2587 }
2588
2589 /* Low level part of emit_move_insn.
2590 Called just like emit_move_insn, but assumes X and Y
2591 are basically valid. */
2592
2593 rtx
2594 emit_move_insn_1 (x, y)
2595 rtx x, y;
2596 {
2597 enum machine_mode mode = GET_MODE (x);
2598 enum machine_mode submode;
2599 enum mode_class class = GET_MODE_CLASS (mode);
2600 unsigned int i;
2601
2602 if (mode >= MAX_MACHINE_MODE)
2603 abort ();
2604
2605 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2606 return
2607 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2608
2609 /* Expand complex moves by moving real part and imag part, if possible. */
2610 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2611 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2612 * BITS_PER_UNIT),
2613 (class == MODE_COMPLEX_INT
2614 ? MODE_INT : MODE_FLOAT),
2615 0))
2616 && (mov_optab->handlers[(int) submode].insn_code
2617 != CODE_FOR_nothing))
2618 {
2619 /* Don't split destination if it is a stack push. */
2620 int stack = push_operand (x, GET_MODE (x));
2621
2622 /* If this is a stack, push the highpart first, so it
2623 will be in the argument order.
2624
2625 In that case, change_address is used only to convert
2626 the mode, not to change the address. */
2627 if (stack)
2628 {
2629 /* Note that the real part always precedes the imag part in memory
2630 regardless of machine's endianness. */
2631 #ifdef STACK_GROWS_DOWNWARD
2632 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2633 (gen_rtx_MEM (submode, XEXP (x, 0)),
2634 gen_imagpart (submode, y)));
2635 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2636 (gen_rtx_MEM (submode, XEXP (x, 0)),
2637 gen_realpart (submode, y)));
2638 #else
2639 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2640 (gen_rtx_MEM (submode, XEXP (x, 0)),
2641 gen_realpart (submode, y)));
2642 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2643 (gen_rtx_MEM (submode, XEXP (x, 0)),
2644 gen_imagpart (submode, y)));
2645 #endif
2646 }
2647 else
2648 {
2649 rtx realpart_x, realpart_y;
2650 rtx imagpart_x, imagpart_y;
2651
2652 /* If this is a complex value with each part being smaller than a
2653 word, the usual calling sequence will likely pack the pieces into
2654 a single register. Unfortunately, SUBREG of hard registers only
2655 deals in terms of words, so we have a problem converting input
2656 arguments to the CONCAT of two registers that is used elsewhere
2657 for complex values. If this is before reload, we can copy it into
2658 memory and reload. FIXME, we should see about using extract and
2659 insert on integer registers, but complex short and complex char
2660 variables should be rarely used. */
2661 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2662 && (reload_in_progress | reload_completed) == 0)
2663 {
2664 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2665 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2666
2667 if (packed_dest_p || packed_src_p)
2668 {
2669 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2670 ? MODE_FLOAT : MODE_INT);
2671
2672 enum machine_mode reg_mode =
2673 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2674
2675 if (reg_mode != BLKmode)
2676 {
2677 rtx mem = assign_stack_temp (reg_mode,
2678 GET_MODE_SIZE (mode), 0);
2679
2680 rtx cmem = change_address (mem, mode, NULL_RTX);
2681
2682 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2683
2684 if (packed_dest_p)
2685 {
2686 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2687 emit_move_insn_1 (cmem, y);
2688 return emit_move_insn_1 (sreg, mem);
2689 }
2690 else
2691 {
2692 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2693 emit_move_insn_1 (mem, sreg);
2694 return emit_move_insn_1 (x, cmem);
2695 }
2696 }
2697 }
2698 }
2699
2700 realpart_x = gen_realpart (submode, x);
2701 realpart_y = gen_realpart (submode, y);
2702 imagpart_x = gen_imagpart (submode, x);
2703 imagpart_y = gen_imagpart (submode, y);
2704
2705 /* Show the output dies here. This is necessary for SUBREGs
2706 of pseudos since we cannot track their lifetimes correctly;
2707 hard regs shouldn't appear here except as return values.
2708 We never want to emit such a clobber after reload. */
2709 if (x != y
2710 && ! (reload_in_progress || reload_completed)
2711 && (GET_CODE (realpart_x) == SUBREG
2712 || GET_CODE (imagpart_x) == SUBREG))
2713 {
2714 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2715 }
2716
2717 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2718 (realpart_x, realpart_y));
2719 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2720 (imagpart_x, imagpart_y));
2721 }
2722
2723 return get_last_insn ();
2724 }
2725
2726 /* This will handle any multi-word mode that lacks a move_insn pattern.
2727 However, you will get better code if you define such patterns,
2728 even if they must turn into multiple assembler instructions. */
2729 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2730 {
2731 rtx last_insn = 0;
2732 rtx seq, inner;
2733 int need_clobber;
2734
2735 #ifdef PUSH_ROUNDING
2736
2737 /* If X is a push on the stack, do the push now and replace
2738 X with a reference to the stack pointer. */
2739 if (push_operand (x, GET_MODE (x)))
2740 {
2741 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2742 x = change_address (x, VOIDmode, stack_pointer_rtx);
2743 }
2744 #endif
2745
2746 /* If we are in reload, see if either operand is a MEM whose address
2747 is scheduled for replacement. */
2748 if (reload_in_progress && GET_CODE (x) == MEM
2749 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2750 {
2751 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2752
2753 MEM_COPY_ATTRIBUTES (new, x);
2754 x = new;
2755 }
2756 if (reload_in_progress && GET_CODE (y) == MEM
2757 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2758 {
2759 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2760
2761 MEM_COPY_ATTRIBUTES (new, y);
2762 y = new;
2763 }
2764
2765 start_sequence ();
2766
2767 need_clobber = 0;
2768 for (i = 0;
2769 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2770 i++)
2771 {
2772 rtx xpart = operand_subword (x, i, 1, mode);
2773 rtx ypart = operand_subword (y, i, 1, mode);
2774
2775 /* If we can't get a part of Y, put Y into memory if it is a
2776 constant. Otherwise, force it into a register. If we still
2777 can't get a part of Y, abort. */
2778 if (ypart == 0 && CONSTANT_P (y))
2779 {
2780 y = force_const_mem (mode, y);
2781 ypart = operand_subword (y, i, 1, mode);
2782 }
2783 else if (ypart == 0)
2784 ypart = operand_subword_force (y, i, mode);
2785
2786 if (xpart == 0 || ypart == 0)
2787 abort ();
2788
2789 need_clobber |= (GET_CODE (xpart) == SUBREG);
2790
2791 last_insn = emit_move_insn (xpart, ypart);
2792 }
2793
2794 seq = gen_sequence ();
2795 end_sequence ();
2796
2797 /* Show the output dies here. This is necessary for SUBREGs
2798 of pseudos since we cannot track their lifetimes correctly;
2799 hard regs shouldn't appear here except as return values.
2800 We never want to emit such a clobber after reload. */
2801 if (x != y
2802 && ! (reload_in_progress || reload_completed)
2803 && need_clobber != 0)
2804 {
2805 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2806 }
2807
2808 emit_insn (seq);
2809
2810 return last_insn;
2811 }
2812 else
2813 abort ();
2814 }
2815 \f
2816 /* Pushing data onto the stack. */
2817
2818 /* Push a block of length SIZE (perhaps variable)
2819 and return an rtx to address the beginning of the block.
2820 Note that it is not possible for the value returned to be a QUEUED.
2821 The value may be virtual_outgoing_args_rtx.
2822
2823 EXTRA is the number of bytes of padding to push in addition to SIZE.
2824 BELOW nonzero means this padding comes at low addresses;
2825 otherwise, the padding comes at high addresses. */
2826
2827 rtx
2828 push_block (size, extra, below)
2829 rtx size;
2830 int extra, below;
2831 {
2832 register rtx temp;
2833
2834 size = convert_modes (Pmode, ptr_mode, size, 1);
2835 if (CONSTANT_P (size))
2836 anti_adjust_stack (plus_constant (size, extra));
2837 else if (GET_CODE (size) == REG && extra == 0)
2838 anti_adjust_stack (size);
2839 else
2840 {
2841 temp = copy_to_mode_reg (Pmode, size);
2842 if (extra != 0)
2843 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2844 temp, 0, OPTAB_LIB_WIDEN);
2845 anti_adjust_stack (temp);
2846 }
2847
2848 #ifndef STACK_GROWS_DOWNWARD
2849 #ifdef ARGS_GROW_DOWNWARD
2850 if (!ACCUMULATE_OUTGOING_ARGS)
2851 #else
2852 if (0)
2853 #endif
2854 #else
2855 if (1)
2856 #endif
2857 {
2858 /* Return the lowest stack address when STACK or ARGS grow downward and
2859 we are not aaccumulating outgoing arguments (the c4x port uses such
2860 conventions). */
2861 temp = virtual_outgoing_args_rtx;
2862 if (extra != 0 && below)
2863 temp = plus_constant (temp, extra);
2864 }
2865 else
2866 {
2867 if (GET_CODE (size) == CONST_INT)
2868 temp = plus_constant (virtual_outgoing_args_rtx,
2869 - INTVAL (size) - (below ? 0 : extra));
2870 else if (extra != 0 && !below)
2871 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2872 negate_rtx (Pmode, plus_constant (size, extra)));
2873 else
2874 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2875 negate_rtx (Pmode, size));
2876 }
2877
2878 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2879 }
2880
2881 rtx
2882 gen_push_operand ()
2883 {
2884 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2885 }
2886
2887 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2888 block of SIZE bytes. */
2889
2890 static rtx
2891 get_push_address (size)
2892 int size;
2893 {
2894 register rtx temp;
2895
2896 if (STACK_PUSH_CODE == POST_DEC)
2897 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2898 else if (STACK_PUSH_CODE == POST_INC)
2899 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2900 else
2901 temp = stack_pointer_rtx;
2902
2903 return copy_to_reg (temp);
2904 }
2905
2906 /* Generate code to push X onto the stack, assuming it has mode MODE and
2907 type TYPE.
2908 MODE is redundant except when X is a CONST_INT (since they don't
2909 carry mode info).
2910 SIZE is an rtx for the size of data to be copied (in bytes),
2911 needed only if X is BLKmode.
2912
2913 ALIGN is maximum alignment we can assume.
2914
2915 If PARTIAL and REG are both nonzero, then copy that many of the first
2916 words of X into registers starting with REG, and push the rest of X.
2917 The amount of space pushed is decreased by PARTIAL words,
2918 rounded *down* to a multiple of PARM_BOUNDARY.
2919 REG must be a hard register in this case.
2920 If REG is zero but PARTIAL is not, take any all others actions for an
2921 argument partially in registers, but do not actually load any
2922 registers.
2923
2924 EXTRA is the amount in bytes of extra space to leave next to this arg.
2925 This is ignored if an argument block has already been allocated.
2926
2927 On a machine that lacks real push insns, ARGS_ADDR is the address of
2928 the bottom of the argument block for this call. We use indexing off there
2929 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2930 argument block has not been preallocated.
2931
2932 ARGS_SO_FAR is the size of args previously pushed for this call.
2933
2934 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2935 for arguments passed in registers. If nonzero, it will be the number
2936 of bytes required. */
2937
2938 void
2939 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2940 args_addr, args_so_far, reg_parm_stack_space,
2941 alignment_pad)
2942 register rtx x;
2943 enum machine_mode mode;
2944 tree type;
2945 rtx size;
2946 unsigned int align;
2947 int partial;
2948 rtx reg;
2949 int extra;
2950 rtx args_addr;
2951 rtx args_so_far;
2952 int reg_parm_stack_space;
2953 rtx alignment_pad;
2954 {
2955 rtx xinner;
2956 enum direction stack_direction
2957 #ifdef STACK_GROWS_DOWNWARD
2958 = downward;
2959 #else
2960 = upward;
2961 #endif
2962
2963 /* Decide where to pad the argument: `downward' for below,
2964 `upward' for above, or `none' for don't pad it.
2965 Default is below for small data on big-endian machines; else above. */
2966 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2967
2968 /* Invert direction if stack is post-update. */
2969 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2970 if (where_pad != none)
2971 where_pad = (where_pad == downward ? upward : downward);
2972
2973 xinner = x = protect_from_queue (x, 0);
2974
2975 if (mode == BLKmode)
2976 {
2977 /* Copy a block into the stack, entirely or partially. */
2978
2979 register rtx temp;
2980 int used = partial * UNITS_PER_WORD;
2981 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2982 int skip;
2983
2984 if (size == 0)
2985 abort ();
2986
2987 used -= offset;
2988
2989 /* USED is now the # of bytes we need not copy to the stack
2990 because registers will take care of them. */
2991
2992 if (partial != 0)
2993 xinner = change_address (xinner, BLKmode,
2994 plus_constant (XEXP (xinner, 0), used));
2995
2996 /* If the partial register-part of the arg counts in its stack size,
2997 skip the part of stack space corresponding to the registers.
2998 Otherwise, start copying to the beginning of the stack space,
2999 by setting SKIP to 0. */
3000 skip = (reg_parm_stack_space == 0) ? 0 : used;
3001
3002 #ifdef PUSH_ROUNDING
3003 /* Do it with several push insns if that doesn't take lots of insns
3004 and if there is no difficulty with push insns that skip bytes
3005 on the stack for alignment purposes. */
3006 if (args_addr == 0
3007 && PUSH_ARGS
3008 && GET_CODE (size) == CONST_INT
3009 && skip == 0
3010 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3011 /* Here we avoid the case of a structure whose weak alignment
3012 forces many pushes of a small amount of data,
3013 and such small pushes do rounding that causes trouble. */
3014 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3015 || align >= BIGGEST_ALIGNMENT
3016 || PUSH_ROUNDING (align) == align)
3017 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3018 {
3019 /* Push padding now if padding above and stack grows down,
3020 or if padding below and stack grows up.
3021 But if space already allocated, this has already been done. */
3022 if (extra && args_addr == 0
3023 && where_pad != none && where_pad != stack_direction)
3024 anti_adjust_stack (GEN_INT (extra));
3025
3026 stack_pointer_delta += INTVAL (size) - used;
3027 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3028 INTVAL (size) - used, align);
3029
3030 if (current_function_check_memory_usage && ! in_check_memory_usage)
3031 {
3032 rtx temp;
3033
3034 in_check_memory_usage = 1;
3035 temp = get_push_address (INTVAL(size) - used);
3036 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3037 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3038 temp, Pmode,
3039 XEXP (xinner, 0), Pmode,
3040 GEN_INT (INTVAL(size) - used),
3041 TYPE_MODE (sizetype));
3042 else
3043 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3044 temp, Pmode,
3045 GEN_INT (INTVAL(size) - used),
3046 TYPE_MODE (sizetype),
3047 GEN_INT (MEMORY_USE_RW),
3048 TYPE_MODE (integer_type_node));
3049 in_check_memory_usage = 0;
3050 }
3051 }
3052 else
3053 #endif /* PUSH_ROUNDING */
3054 {
3055 /* Otherwise make space on the stack and copy the data
3056 to the address of that space. */
3057
3058 /* Deduct words put into registers from the size we must copy. */
3059 if (partial != 0)
3060 {
3061 if (GET_CODE (size) == CONST_INT)
3062 size = GEN_INT (INTVAL (size) - used);
3063 else
3064 size = expand_binop (GET_MODE (size), sub_optab, size,
3065 GEN_INT (used), NULL_RTX, 0,
3066 OPTAB_LIB_WIDEN);
3067 }
3068
3069 /* Get the address of the stack space.
3070 In this case, we do not deal with EXTRA separately.
3071 A single stack adjust will do. */
3072 if (! args_addr)
3073 {
3074 temp = push_block (size, extra, where_pad == downward);
3075 extra = 0;
3076 }
3077 else if (GET_CODE (args_so_far) == CONST_INT)
3078 temp = memory_address (BLKmode,
3079 plus_constant (args_addr,
3080 skip + INTVAL (args_so_far)));
3081 else
3082 temp = memory_address (BLKmode,
3083 plus_constant (gen_rtx_PLUS (Pmode,
3084 args_addr,
3085 args_so_far),
3086 skip));
3087 if (current_function_check_memory_usage && ! in_check_memory_usage)
3088 {
3089 rtx target;
3090
3091 in_check_memory_usage = 1;
3092 target = copy_to_reg (temp);
3093 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3094 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3095 target, Pmode,
3096 XEXP (xinner, 0), Pmode,
3097 size, TYPE_MODE (sizetype));
3098 else
3099 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3100 target, Pmode,
3101 size, TYPE_MODE (sizetype),
3102 GEN_INT (MEMORY_USE_RW),
3103 TYPE_MODE (integer_type_node));
3104 in_check_memory_usage = 0;
3105 }
3106
3107 /* TEMP is the address of the block. Copy the data there. */
3108 if (GET_CODE (size) == CONST_INT
3109 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3110 {
3111 rtx target = gen_rtx_MEM (BLKmode, temp);
3112
3113 if (type != 0)
3114 set_mem_attributes (target, type, 1);
3115
3116 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3117 INTVAL (size), align);
3118 goto ret;
3119 }
3120 else
3121 {
3122 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3123 enum machine_mode mode;
3124 rtx target = gen_rtx_MEM (BLKmode, temp);
3125
3126 if (type != 0)
3127 set_mem_attributes (target, type, 1);
3128
3129 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3130 mode != VOIDmode;
3131 mode = GET_MODE_WIDER_MODE (mode))
3132 {
3133 enum insn_code code = movstr_optab[(int) mode];
3134 insn_operand_predicate_fn pred;
3135
3136 if (code != CODE_FOR_nothing
3137 && ((GET_CODE (size) == CONST_INT
3138 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3139 <= (GET_MODE_MASK (mode) >> 1)))
3140 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3141 && (!(pred = insn_data[(int) code].operand[0].predicate)
3142 || ((*pred) (target, BLKmode)))
3143 && (!(pred = insn_data[(int) code].operand[1].predicate)
3144 || ((*pred) (xinner, BLKmode)))
3145 && (!(pred = insn_data[(int) code].operand[3].predicate)
3146 || ((*pred) (opalign, VOIDmode))))
3147 {
3148 rtx op2 = convert_to_mode (mode, size, 1);
3149 rtx last = get_last_insn ();
3150 rtx pat;
3151
3152 pred = insn_data[(int) code].operand[2].predicate;
3153 if (pred != 0 && ! (*pred) (op2, mode))
3154 op2 = copy_to_mode_reg (mode, op2);
3155
3156 pat = GEN_FCN ((int) code) (target, xinner,
3157 op2, opalign);
3158 if (pat)
3159 {
3160 emit_insn (pat);
3161 goto ret;
3162 }
3163 else
3164 delete_insns_since (last);
3165 }
3166 }
3167 }
3168
3169 if (!ACCUMULATE_OUTGOING_ARGS)
3170 {
3171 /* If the source is referenced relative to the stack pointer,
3172 copy it to another register to stabilize it. We do not need
3173 to do this if we know that we won't be changing sp. */
3174
3175 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3176 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3177 temp = copy_to_reg (temp);
3178 }
3179
3180 /* Make inhibit_defer_pop nonzero around the library call
3181 to force it to pop the bcopy-arguments right away. */
3182 NO_DEFER_POP;
3183 #ifdef TARGET_MEM_FUNCTIONS
3184 emit_library_call (memcpy_libfunc, 0,
3185 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3186 convert_to_mode (TYPE_MODE (sizetype),
3187 size, TREE_UNSIGNED (sizetype)),
3188 TYPE_MODE (sizetype));
3189 #else
3190 emit_library_call (bcopy_libfunc, 0,
3191 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3192 convert_to_mode (TYPE_MODE (integer_type_node),
3193 size,
3194 TREE_UNSIGNED (integer_type_node)),
3195 TYPE_MODE (integer_type_node));
3196 #endif
3197 OK_DEFER_POP;
3198 }
3199 }
3200 else if (partial > 0)
3201 {
3202 /* Scalar partly in registers. */
3203
3204 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3205 int i;
3206 int not_stack;
3207 /* # words of start of argument
3208 that we must make space for but need not store. */
3209 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3210 int args_offset = INTVAL (args_so_far);
3211 int skip;
3212
3213 /* Push padding now if padding above and stack grows down,
3214 or if padding below and stack grows up.
3215 But if space already allocated, this has already been done. */
3216 if (extra && args_addr == 0
3217 && where_pad != none && where_pad != stack_direction)
3218 anti_adjust_stack (GEN_INT (extra));
3219
3220 /* If we make space by pushing it, we might as well push
3221 the real data. Otherwise, we can leave OFFSET nonzero
3222 and leave the space uninitialized. */
3223 if (args_addr == 0)
3224 offset = 0;
3225
3226 /* Now NOT_STACK gets the number of words that we don't need to
3227 allocate on the stack. */
3228 not_stack = partial - offset;
3229
3230 /* If the partial register-part of the arg counts in its stack size,
3231 skip the part of stack space corresponding to the registers.
3232 Otherwise, start copying to the beginning of the stack space,
3233 by setting SKIP to 0. */
3234 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3235
3236 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3237 x = validize_mem (force_const_mem (mode, x));
3238
3239 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3240 SUBREGs of such registers are not allowed. */
3241 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3242 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3243 x = copy_to_reg (x);
3244
3245 /* Loop over all the words allocated on the stack for this arg. */
3246 /* We can do it by words, because any scalar bigger than a word
3247 has a size a multiple of a word. */
3248 #ifndef PUSH_ARGS_REVERSED
3249 for (i = not_stack; i < size; i++)
3250 #else
3251 for (i = size - 1; i >= not_stack; i--)
3252 #endif
3253 if (i >= not_stack + offset)
3254 emit_push_insn (operand_subword_force (x, i, mode),
3255 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3256 0, args_addr,
3257 GEN_INT (args_offset + ((i - not_stack + skip)
3258 * UNITS_PER_WORD)),
3259 reg_parm_stack_space, alignment_pad);
3260 }
3261 else
3262 {
3263 rtx addr;
3264 rtx target = NULL_RTX;
3265 rtx dest;
3266
3267 /* Push padding now if padding above and stack grows down,
3268 or if padding below and stack grows up.
3269 But if space already allocated, this has already been done. */
3270 if (extra && args_addr == 0
3271 && where_pad != none && where_pad != stack_direction)
3272 anti_adjust_stack (GEN_INT (extra));
3273
3274 #ifdef PUSH_ROUNDING
3275 if (args_addr == 0 && PUSH_ARGS)
3276 {
3277 addr = gen_push_operand ();
3278 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3279 }
3280 else
3281 #endif
3282 {
3283 if (GET_CODE (args_so_far) == CONST_INT)
3284 addr
3285 = memory_address (mode,
3286 plus_constant (args_addr,
3287 INTVAL (args_so_far)));
3288 else
3289 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3290 args_so_far));
3291 target = addr;
3292 }
3293
3294 dest = gen_rtx_MEM (mode, addr);
3295 if (type != 0)
3296 set_mem_attributes (dest, type, 1);
3297
3298 emit_move_insn (dest, x);
3299
3300 if (current_function_check_memory_usage && ! in_check_memory_usage)
3301 {
3302 in_check_memory_usage = 1;
3303 if (target == 0)
3304 target = get_push_address (GET_MODE_SIZE (mode));
3305
3306 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3307 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3308 target, Pmode,
3309 XEXP (x, 0), Pmode,
3310 GEN_INT (GET_MODE_SIZE (mode)),
3311 TYPE_MODE (sizetype));
3312 else
3313 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3314 target, Pmode,
3315 GEN_INT (GET_MODE_SIZE (mode)),
3316 TYPE_MODE (sizetype),
3317 GEN_INT (MEMORY_USE_RW),
3318 TYPE_MODE (integer_type_node));
3319 in_check_memory_usage = 0;
3320 }
3321 }
3322
3323 ret:
3324 /* If part should go in registers, copy that part
3325 into the appropriate registers. Do this now, at the end,
3326 since mem-to-mem copies above may do function calls. */
3327 if (partial > 0 && reg != 0)
3328 {
3329 /* Handle calls that pass values in multiple non-contiguous locations.
3330 The Irix 6 ABI has examples of this. */
3331 if (GET_CODE (reg) == PARALLEL)
3332 emit_group_load (reg, x, -1, align); /* ??? size? */
3333 else
3334 move_block_to_reg (REGNO (reg), x, partial, mode);
3335 }
3336
3337 if (extra && args_addr == 0 && where_pad == stack_direction)
3338 anti_adjust_stack (GEN_INT (extra));
3339
3340 if (alignment_pad && args_addr == 0)
3341 anti_adjust_stack (alignment_pad);
3342 }
3343 \f
3344 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3345 operations. */
3346
3347 static rtx
3348 get_subtarget (x)
3349 rtx x;
3350 {
3351 return ((x == 0
3352 /* Only registers can be subtargets. */
3353 || GET_CODE (x) != REG
3354 /* If the register is readonly, it can't be set more than once. */
3355 || RTX_UNCHANGING_P (x)
3356 /* Don't use hard regs to avoid extending their life. */
3357 || REGNO (x) < FIRST_PSEUDO_REGISTER
3358 /* Avoid subtargets inside loops,
3359 since they hide some invariant expressions. */
3360 || preserve_subexpressions_p ())
3361 ? 0 : x);
3362 }
3363
3364 /* Expand an assignment that stores the value of FROM into TO.
3365 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3366 (This may contain a QUEUED rtx;
3367 if the value is constant, this rtx is a constant.)
3368 Otherwise, the returned value is NULL_RTX.
3369
3370 SUGGEST_REG is no longer actually used.
3371 It used to mean, copy the value through a register
3372 and return that register, if that is possible.
3373 We now use WANT_VALUE to decide whether to do this. */
3374
3375 rtx
3376 expand_assignment (to, from, want_value, suggest_reg)
3377 tree to, from;
3378 int want_value;
3379 int suggest_reg ATTRIBUTE_UNUSED;
3380 {
3381 register rtx to_rtx = 0;
3382 rtx result;
3383
3384 /* Don't crash if the lhs of the assignment was erroneous. */
3385
3386 if (TREE_CODE (to) == ERROR_MARK)
3387 {
3388 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3389 return want_value ? result : NULL_RTX;
3390 }
3391
3392 /* Assignment of a structure component needs special treatment
3393 if the structure component's rtx is not simply a MEM.
3394 Assignment of an array element at a constant index, and assignment of
3395 an array element in an unaligned packed structure field, has the same
3396 problem. */
3397
3398 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3399 || TREE_CODE (to) == ARRAY_REF)
3400 {
3401 enum machine_mode mode1;
3402 HOST_WIDE_INT bitsize, bitpos;
3403 tree offset;
3404 int unsignedp;
3405 int volatilep = 0;
3406 tree tem;
3407 unsigned int alignment;
3408
3409 push_temp_slots ();
3410 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3411 &unsignedp, &volatilep, &alignment);
3412
3413 /* If we are going to use store_bit_field and extract_bit_field,
3414 make sure to_rtx will be safe for multiple use. */
3415
3416 if (mode1 == VOIDmode && want_value)
3417 tem = stabilize_reference (tem);
3418
3419 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3420 if (offset != 0)
3421 {
3422 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3423
3424 if (GET_CODE (to_rtx) != MEM)
3425 abort ();
3426
3427 if (GET_MODE (offset_rtx) != ptr_mode)
3428 {
3429 #ifdef POINTERS_EXTEND_UNSIGNED
3430 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3431 #else
3432 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3433 #endif
3434 }
3435
3436 /* A constant address in TO_RTX can have VOIDmode, we must not try
3437 to call force_reg for that case. Avoid that case. */
3438 if (GET_CODE (to_rtx) == MEM
3439 && GET_MODE (to_rtx) == BLKmode
3440 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3441 && bitsize
3442 && (bitpos % bitsize) == 0
3443 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3444 && alignment == GET_MODE_ALIGNMENT (mode1))
3445 {
3446 rtx temp = change_address (to_rtx, mode1,
3447 plus_constant (XEXP (to_rtx, 0),
3448 (bitpos /
3449 BITS_PER_UNIT)));
3450 if (GET_CODE (XEXP (temp, 0)) == REG)
3451 to_rtx = temp;
3452 else
3453 to_rtx = change_address (to_rtx, mode1,
3454 force_reg (GET_MODE (XEXP (temp, 0)),
3455 XEXP (temp, 0)));
3456 bitpos = 0;
3457 }
3458
3459 to_rtx = change_address (to_rtx, VOIDmode,
3460 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3461 force_reg (ptr_mode,
3462 offset_rtx)));
3463 }
3464
3465 if (volatilep)
3466 {
3467 if (GET_CODE (to_rtx) == MEM)
3468 {
3469 /* When the offset is zero, to_rtx is the address of the
3470 structure we are storing into, and hence may be shared.
3471 We must make a new MEM before setting the volatile bit. */
3472 if (offset == 0)
3473 to_rtx = copy_rtx (to_rtx);
3474
3475 MEM_VOLATILE_P (to_rtx) = 1;
3476 }
3477 #if 0 /* This was turned off because, when a field is volatile
3478 in an object which is not volatile, the object may be in a register,
3479 and then we would abort over here. */
3480 else
3481 abort ();
3482 #endif
3483 }
3484
3485 if (TREE_CODE (to) == COMPONENT_REF
3486 && TREE_READONLY (TREE_OPERAND (to, 1)))
3487 {
3488 if (offset == 0)
3489 to_rtx = copy_rtx (to_rtx);
3490
3491 RTX_UNCHANGING_P (to_rtx) = 1;
3492 }
3493
3494 /* Check the access. */
3495 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3496 {
3497 rtx to_addr;
3498 int size;
3499 int best_mode_size;
3500 enum machine_mode best_mode;
3501
3502 best_mode = get_best_mode (bitsize, bitpos,
3503 TYPE_ALIGN (TREE_TYPE (tem)),
3504 mode1, volatilep);
3505 if (best_mode == VOIDmode)
3506 best_mode = QImode;
3507
3508 best_mode_size = GET_MODE_BITSIZE (best_mode);
3509 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3510 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3511 size *= GET_MODE_SIZE (best_mode);
3512
3513 /* Check the access right of the pointer. */
3514 in_check_memory_usage = 1;
3515 if (size)
3516 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3517 to_addr, Pmode,
3518 GEN_INT (size), TYPE_MODE (sizetype),
3519 GEN_INT (MEMORY_USE_WO),
3520 TYPE_MODE (integer_type_node));
3521 in_check_memory_usage = 0;
3522 }
3523
3524 /* If this is a varying-length object, we must get the address of
3525 the source and do an explicit block move. */
3526 if (bitsize < 0)
3527 {
3528 unsigned int from_align;
3529 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3530 rtx inner_to_rtx
3531 = change_address (to_rtx, VOIDmode,
3532 plus_constant (XEXP (to_rtx, 0),
3533 bitpos / BITS_PER_UNIT));
3534
3535 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3536 MIN (alignment, from_align));
3537 free_temp_slots ();
3538 pop_temp_slots ();
3539 return to_rtx;
3540 }
3541 else
3542 {
3543 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3544 (want_value
3545 /* Spurious cast for HPUX compiler. */
3546 ? ((enum machine_mode)
3547 TYPE_MODE (TREE_TYPE (to)))
3548 : VOIDmode),
3549 unsignedp,
3550 alignment,
3551 int_size_in_bytes (TREE_TYPE (tem)),
3552 get_alias_set (to));
3553
3554 preserve_temp_slots (result);
3555 free_temp_slots ();
3556 pop_temp_slots ();
3557
3558 /* If the value is meaningful, convert RESULT to the proper mode.
3559 Otherwise, return nothing. */
3560 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3561 TYPE_MODE (TREE_TYPE (from)),
3562 result,
3563 TREE_UNSIGNED (TREE_TYPE (to)))
3564 : NULL_RTX);
3565 }
3566 }
3567
3568 /* If the rhs is a function call and its value is not an aggregate,
3569 call the function before we start to compute the lhs.
3570 This is needed for correct code for cases such as
3571 val = setjmp (buf) on machines where reference to val
3572 requires loading up part of an address in a separate insn.
3573
3574 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3575 since it might be a promoted variable where the zero- or sign- extension
3576 needs to be done. Handling this in the normal way is safe because no
3577 computation is done before the call. */
3578 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3579 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3580 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3581 && GET_CODE (DECL_RTL (to)) == REG))
3582 {
3583 rtx value;
3584
3585 push_temp_slots ();
3586 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3587 if (to_rtx == 0)
3588 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3589
3590 /* Handle calls that return values in multiple non-contiguous locations.
3591 The Irix 6 ABI has examples of this. */
3592 if (GET_CODE (to_rtx) == PARALLEL)
3593 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3594 TYPE_ALIGN (TREE_TYPE (from)));
3595 else if (GET_MODE (to_rtx) == BLKmode)
3596 emit_block_move (to_rtx, value, expr_size (from),
3597 TYPE_ALIGN (TREE_TYPE (from)));
3598 else
3599 {
3600 #ifdef POINTERS_EXTEND_UNSIGNED
3601 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3602 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3603 value = convert_memory_address (GET_MODE (to_rtx), value);
3604 #endif
3605 emit_move_insn (to_rtx, value);
3606 }
3607 preserve_temp_slots (to_rtx);
3608 free_temp_slots ();
3609 pop_temp_slots ();
3610 return want_value ? to_rtx : NULL_RTX;
3611 }
3612
3613 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3614 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3615
3616 if (to_rtx == 0)
3617 {
3618 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3619 if (GET_CODE (to_rtx) == MEM)
3620 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3621 }
3622
3623 /* Don't move directly into a return register. */
3624 if (TREE_CODE (to) == RESULT_DECL
3625 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3626 {
3627 rtx temp;
3628
3629 push_temp_slots ();
3630 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3631
3632 if (GET_CODE (to_rtx) == PARALLEL)
3633 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3634 TYPE_ALIGN (TREE_TYPE (from)));
3635 else
3636 emit_move_insn (to_rtx, temp);
3637
3638 preserve_temp_slots (to_rtx);
3639 free_temp_slots ();
3640 pop_temp_slots ();
3641 return want_value ? to_rtx : NULL_RTX;
3642 }
3643
3644 /* In case we are returning the contents of an object which overlaps
3645 the place the value is being stored, use a safe function when copying
3646 a value through a pointer into a structure value return block. */
3647 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3648 && current_function_returns_struct
3649 && !current_function_returns_pcc_struct)
3650 {
3651 rtx from_rtx, size;
3652
3653 push_temp_slots ();
3654 size = expr_size (from);
3655 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3656 EXPAND_MEMORY_USE_DONT);
3657
3658 /* Copy the rights of the bitmap. */
3659 if (current_function_check_memory_usage)
3660 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3661 XEXP (to_rtx, 0), Pmode,
3662 XEXP (from_rtx, 0), Pmode,
3663 convert_to_mode (TYPE_MODE (sizetype),
3664 size, TREE_UNSIGNED (sizetype)),
3665 TYPE_MODE (sizetype));
3666
3667 #ifdef TARGET_MEM_FUNCTIONS
3668 emit_library_call (memcpy_libfunc, 0,
3669 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3670 XEXP (from_rtx, 0), Pmode,
3671 convert_to_mode (TYPE_MODE (sizetype),
3672 size, TREE_UNSIGNED (sizetype)),
3673 TYPE_MODE (sizetype));
3674 #else
3675 emit_library_call (bcopy_libfunc, 0,
3676 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3677 XEXP (to_rtx, 0), Pmode,
3678 convert_to_mode (TYPE_MODE (integer_type_node),
3679 size, TREE_UNSIGNED (integer_type_node)),
3680 TYPE_MODE (integer_type_node));
3681 #endif
3682
3683 preserve_temp_slots (to_rtx);
3684 free_temp_slots ();
3685 pop_temp_slots ();
3686 return want_value ? to_rtx : NULL_RTX;
3687 }
3688
3689 /* Compute FROM and store the value in the rtx we got. */
3690
3691 push_temp_slots ();
3692 result = store_expr (from, to_rtx, want_value);
3693 preserve_temp_slots (result);
3694 free_temp_slots ();
3695 pop_temp_slots ();
3696 return want_value ? result : NULL_RTX;
3697 }
3698
3699 /* Generate code for computing expression EXP,
3700 and storing the value into TARGET.
3701 TARGET may contain a QUEUED rtx.
3702
3703 If WANT_VALUE is nonzero, return a copy of the value
3704 not in TARGET, so that we can be sure to use the proper
3705 value in a containing expression even if TARGET has something
3706 else stored in it. If possible, we copy the value through a pseudo
3707 and return that pseudo. Or, if the value is constant, we try to
3708 return the constant. In some cases, we return a pseudo
3709 copied *from* TARGET.
3710
3711 If the mode is BLKmode then we may return TARGET itself.
3712 It turns out that in BLKmode it doesn't cause a problem.
3713 because C has no operators that could combine two different
3714 assignments into the same BLKmode object with different values
3715 with no sequence point. Will other languages need this to
3716 be more thorough?
3717
3718 If WANT_VALUE is 0, we return NULL, to make sure
3719 to catch quickly any cases where the caller uses the value
3720 and fails to set WANT_VALUE. */
3721
3722 rtx
3723 store_expr (exp, target, want_value)
3724 register tree exp;
3725 register rtx target;
3726 int want_value;
3727 {
3728 register rtx temp;
3729 int dont_return_target = 0;
3730
3731 if (TREE_CODE (exp) == COMPOUND_EXPR)
3732 {
3733 /* Perform first part of compound expression, then assign from second
3734 part. */
3735 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3736 emit_queue ();
3737 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3738 }
3739 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3740 {
3741 /* For conditional expression, get safe form of the target. Then
3742 test the condition, doing the appropriate assignment on either
3743 side. This avoids the creation of unnecessary temporaries.
3744 For non-BLKmode, it is more efficient not to do this. */
3745
3746 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3747
3748 emit_queue ();
3749 target = protect_from_queue (target, 1);
3750
3751 do_pending_stack_adjust ();
3752 NO_DEFER_POP;
3753 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3754 start_cleanup_deferral ();
3755 store_expr (TREE_OPERAND (exp, 1), target, 0);
3756 end_cleanup_deferral ();
3757 emit_queue ();
3758 emit_jump_insn (gen_jump (lab2));
3759 emit_barrier ();
3760 emit_label (lab1);
3761 start_cleanup_deferral ();
3762 store_expr (TREE_OPERAND (exp, 2), target, 0);
3763 end_cleanup_deferral ();
3764 emit_queue ();
3765 emit_label (lab2);
3766 OK_DEFER_POP;
3767
3768 return want_value ? target : NULL_RTX;
3769 }
3770 else if (queued_subexp_p (target))
3771 /* If target contains a postincrement, let's not risk
3772 using it as the place to generate the rhs. */
3773 {
3774 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3775 {
3776 /* Expand EXP into a new pseudo. */
3777 temp = gen_reg_rtx (GET_MODE (target));
3778 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3779 }
3780 else
3781 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3782
3783 /* If target is volatile, ANSI requires accessing the value
3784 *from* the target, if it is accessed. So make that happen.
3785 In no case return the target itself. */
3786 if (! MEM_VOLATILE_P (target) && want_value)
3787 dont_return_target = 1;
3788 }
3789 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3790 && GET_MODE (target) != BLKmode)
3791 /* If target is in memory and caller wants value in a register instead,
3792 arrange that. Pass TARGET as target for expand_expr so that,
3793 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3794 We know expand_expr will not use the target in that case.
3795 Don't do this if TARGET is volatile because we are supposed
3796 to write it and then read it. */
3797 {
3798 temp = expand_expr (exp, target, GET_MODE (target), 0);
3799 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3800 temp = copy_to_reg (temp);
3801 dont_return_target = 1;
3802 }
3803 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3804 /* If this is an scalar in a register that is stored in a wider mode
3805 than the declared mode, compute the result into its declared mode
3806 and then convert to the wider mode. Our value is the computed
3807 expression. */
3808 {
3809 /* If we don't want a value, we can do the conversion inside EXP,
3810 which will often result in some optimizations. Do the conversion
3811 in two steps: first change the signedness, if needed, then
3812 the extend. But don't do this if the type of EXP is a subtype
3813 of something else since then the conversion might involve
3814 more than just converting modes. */
3815 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3816 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3817 {
3818 if (TREE_UNSIGNED (TREE_TYPE (exp))
3819 != SUBREG_PROMOTED_UNSIGNED_P (target))
3820 exp
3821 = convert
3822 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3823 TREE_TYPE (exp)),
3824 exp);
3825
3826 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3827 SUBREG_PROMOTED_UNSIGNED_P (target)),
3828 exp);
3829 }
3830
3831 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3832
3833 /* If TEMP is a volatile MEM and we want a result value, make
3834 the access now so it gets done only once. Likewise if
3835 it contains TARGET. */
3836 if (GET_CODE (temp) == MEM && want_value
3837 && (MEM_VOLATILE_P (temp)
3838 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3839 temp = copy_to_reg (temp);
3840
3841 /* If TEMP is a VOIDmode constant, use convert_modes to make
3842 sure that we properly convert it. */
3843 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3844 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3845 TYPE_MODE (TREE_TYPE (exp)), temp,
3846 SUBREG_PROMOTED_UNSIGNED_P (target));
3847
3848 convert_move (SUBREG_REG (target), temp,
3849 SUBREG_PROMOTED_UNSIGNED_P (target));
3850
3851 /* If we promoted a constant, change the mode back down to match
3852 target. Otherwise, the caller might get confused by a result whose
3853 mode is larger than expected. */
3854
3855 if (want_value && GET_MODE (temp) != GET_MODE (target)
3856 && GET_MODE (temp) != VOIDmode)
3857 {
3858 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3859 SUBREG_PROMOTED_VAR_P (temp) = 1;
3860 SUBREG_PROMOTED_UNSIGNED_P (temp)
3861 = SUBREG_PROMOTED_UNSIGNED_P (target);
3862 }
3863
3864 return want_value ? temp : NULL_RTX;
3865 }
3866 else
3867 {
3868 temp = expand_expr (exp, target, GET_MODE (target), 0);
3869 /* Return TARGET if it's a specified hardware register.
3870 If TARGET is a volatile mem ref, either return TARGET
3871 or return a reg copied *from* TARGET; ANSI requires this.
3872
3873 Otherwise, if TEMP is not TARGET, return TEMP
3874 if it is constant (for efficiency),
3875 or if we really want the correct value. */
3876 if (!(target && GET_CODE (target) == REG
3877 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3878 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3879 && ! rtx_equal_p (temp, target)
3880 && (CONSTANT_P (temp) || want_value))
3881 dont_return_target = 1;
3882 }
3883
3884 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3885 the same as that of TARGET, adjust the constant. This is needed, for
3886 example, in case it is a CONST_DOUBLE and we want only a word-sized
3887 value. */
3888 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3889 && TREE_CODE (exp) != ERROR_MARK
3890 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3891 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3892 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3893
3894 if (current_function_check_memory_usage
3895 && GET_CODE (target) == MEM
3896 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3897 {
3898 in_check_memory_usage = 1;
3899 if (GET_CODE (temp) == MEM)
3900 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3901 XEXP (target, 0), Pmode,
3902 XEXP (temp, 0), Pmode,
3903 expr_size (exp), TYPE_MODE (sizetype));
3904 else
3905 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3906 XEXP (target, 0), Pmode,
3907 expr_size (exp), TYPE_MODE (sizetype),
3908 GEN_INT (MEMORY_USE_WO),
3909 TYPE_MODE (integer_type_node));
3910 in_check_memory_usage = 0;
3911 }
3912
3913 /* If value was not generated in the target, store it there.
3914 Convert the value to TARGET's type first if nec. */
3915 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3916 one or both of them are volatile memory refs, we have to distinguish
3917 two cases:
3918 - expand_expr has used TARGET. In this case, we must not generate
3919 another copy. This can be detected by TARGET being equal according
3920 to == .
3921 - expand_expr has not used TARGET - that means that the source just
3922 happens to have the same RTX form. Since temp will have been created
3923 by expand_expr, it will compare unequal according to == .
3924 We must generate a copy in this case, to reach the correct number
3925 of volatile memory references. */
3926
3927 if ((! rtx_equal_p (temp, target)
3928 || (temp != target && (side_effects_p (temp)
3929 || side_effects_p (target))))
3930 && TREE_CODE (exp) != ERROR_MARK)
3931 {
3932 target = protect_from_queue (target, 1);
3933 if (GET_MODE (temp) != GET_MODE (target)
3934 && GET_MODE (temp) != VOIDmode)
3935 {
3936 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3937 if (dont_return_target)
3938 {
3939 /* In this case, we will return TEMP,
3940 so make sure it has the proper mode.
3941 But don't forget to store the value into TARGET. */
3942 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3943 emit_move_insn (target, temp);
3944 }
3945 else
3946 convert_move (target, temp, unsignedp);
3947 }
3948
3949 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3950 {
3951 /* Handle copying a string constant into an array.
3952 The string constant may be shorter than the array.
3953 So copy just the string's actual length, and clear the rest. */
3954 rtx size;
3955 rtx addr;
3956
3957 /* Get the size of the data type of the string,
3958 which is actually the size of the target. */
3959 size = expr_size (exp);
3960 if (GET_CODE (size) == CONST_INT
3961 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3962 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3963 else
3964 {
3965 /* Compute the size of the data to copy from the string. */
3966 tree copy_size
3967 = size_binop (MIN_EXPR,
3968 make_tree (sizetype, size),
3969 size_int (TREE_STRING_LENGTH (exp)));
3970 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3971 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3972 VOIDmode, 0);
3973 rtx label = 0;
3974
3975 /* Copy that much. */
3976 emit_block_move (target, temp, copy_size_rtx,
3977 TYPE_ALIGN (TREE_TYPE (exp)));
3978
3979 /* Figure out how much is left in TARGET that we have to clear.
3980 Do all calculations in ptr_mode. */
3981
3982 addr = XEXP (target, 0);
3983 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3984
3985 if (GET_CODE (copy_size_rtx) == CONST_INT)
3986 {
3987 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3988 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3989 align = MIN (align, (BITS_PER_UNIT
3990 * (INTVAL (copy_size_rtx)
3991 & - INTVAL (copy_size_rtx))));
3992 }
3993 else
3994 {
3995 addr = force_reg (ptr_mode, addr);
3996 addr = expand_binop (ptr_mode, add_optab, addr,
3997 copy_size_rtx, NULL_RTX, 0,
3998 OPTAB_LIB_WIDEN);
3999
4000 size = expand_binop (ptr_mode, sub_optab, size,
4001 copy_size_rtx, NULL_RTX, 0,
4002 OPTAB_LIB_WIDEN);
4003
4004 align = BITS_PER_UNIT;
4005 label = gen_label_rtx ();
4006 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4007 GET_MODE (size), 0, 0, label);
4008 }
4009 align = MIN (align, expr_align (copy_size));
4010
4011 if (size != const0_rtx)
4012 {
4013 rtx dest = gen_rtx_MEM (BLKmode, addr);
4014
4015 MEM_COPY_ATTRIBUTES (dest, target);
4016
4017 /* Be sure we can write on ADDR. */
4018 in_check_memory_usage = 1;
4019 if (current_function_check_memory_usage)
4020 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4021 addr, Pmode,
4022 size, TYPE_MODE (sizetype),
4023 GEN_INT (MEMORY_USE_WO),
4024 TYPE_MODE (integer_type_node));
4025 in_check_memory_usage = 0;
4026 clear_storage (target, size, align);
4027 }
4028
4029 if (label)
4030 emit_label (label);
4031 }
4032 }
4033 /* Handle calls that return values in multiple non-contiguous locations.
4034 The Irix 6 ABI has examples of this. */
4035 else if (GET_CODE (target) == PARALLEL)
4036 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4037 TYPE_ALIGN (TREE_TYPE (exp)));
4038 else if (GET_MODE (temp) == BLKmode)
4039 emit_block_move (target, temp, expr_size (exp),
4040 TYPE_ALIGN (TREE_TYPE (exp)));
4041 else
4042 emit_move_insn (target, temp);
4043 }
4044
4045 /* If we don't want a value, return NULL_RTX. */
4046 if (! want_value)
4047 return NULL_RTX;
4048
4049 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4050 ??? The latter test doesn't seem to make sense. */
4051 else if (dont_return_target && GET_CODE (temp) != MEM)
4052 return temp;
4053
4054 /* Return TARGET itself if it is a hard register. */
4055 else if (want_value && GET_MODE (target) != BLKmode
4056 && ! (GET_CODE (target) == REG
4057 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4058 return copy_to_reg (target);
4059
4060 else
4061 return target;
4062 }
4063 \f
4064 /* Return 1 if EXP just contains zeros. */
4065
4066 static int
4067 is_zeros_p (exp)
4068 tree exp;
4069 {
4070 tree elt;
4071
4072 switch (TREE_CODE (exp))
4073 {
4074 case CONVERT_EXPR:
4075 case NOP_EXPR:
4076 case NON_LVALUE_EXPR:
4077 return is_zeros_p (TREE_OPERAND (exp, 0));
4078
4079 case INTEGER_CST:
4080 return integer_zerop (exp);
4081
4082 case COMPLEX_CST:
4083 return
4084 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4085
4086 case REAL_CST:
4087 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4088
4089 case CONSTRUCTOR:
4090 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4091 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4092 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4093 if (! is_zeros_p (TREE_VALUE (elt)))
4094 return 0;
4095
4096 return 1;
4097
4098 default:
4099 return 0;
4100 }
4101 }
4102
4103 /* Return 1 if EXP contains mostly (3/4) zeros. */
4104
4105 static int
4106 mostly_zeros_p (exp)
4107 tree exp;
4108 {
4109 if (TREE_CODE (exp) == CONSTRUCTOR)
4110 {
4111 int elts = 0, zeros = 0;
4112 tree elt = CONSTRUCTOR_ELTS (exp);
4113 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4114 {
4115 /* If there are no ranges of true bits, it is all zero. */
4116 return elt == NULL_TREE;
4117 }
4118 for (; elt; elt = TREE_CHAIN (elt))
4119 {
4120 /* We do not handle the case where the index is a RANGE_EXPR,
4121 so the statistic will be somewhat inaccurate.
4122 We do make a more accurate count in store_constructor itself,
4123 so since this function is only used for nested array elements,
4124 this should be close enough. */
4125 if (mostly_zeros_p (TREE_VALUE (elt)))
4126 zeros++;
4127 elts++;
4128 }
4129
4130 return 4 * zeros >= 3 * elts;
4131 }
4132
4133 return is_zeros_p (exp);
4134 }
4135 \f
4136 /* Helper function for store_constructor.
4137 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4138 TYPE is the type of the CONSTRUCTOR, not the element type.
4139 ALIGN and CLEARED are as for store_constructor.
4140
4141 This provides a recursive shortcut back to store_constructor when it isn't
4142 necessary to go through store_field. This is so that we can pass through
4143 the cleared field to let store_constructor know that we may not have to
4144 clear a substructure if the outer structure has already been cleared. */
4145
4146 static void
4147 store_constructor_field (target, bitsize, bitpos,
4148 mode, exp, type, align, cleared)
4149 rtx target;
4150 unsigned HOST_WIDE_INT bitsize;
4151 HOST_WIDE_INT bitpos;
4152 enum machine_mode mode;
4153 tree exp, type;
4154 unsigned int align;
4155 int cleared;
4156 {
4157 if (TREE_CODE (exp) == CONSTRUCTOR
4158 && bitpos % BITS_PER_UNIT == 0
4159 /* If we have a non-zero bitpos for a register target, then we just
4160 let store_field do the bitfield handling. This is unlikely to
4161 generate unnecessary clear instructions anyways. */
4162 && (bitpos == 0 || GET_CODE (target) == MEM))
4163 {
4164 if (bitpos != 0)
4165 target
4166 = change_address (target,
4167 GET_MODE (target) == BLKmode
4168 || 0 != (bitpos
4169 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4170 ? BLKmode : VOIDmode,
4171 plus_constant (XEXP (target, 0),
4172 bitpos / BITS_PER_UNIT));
4173 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4174 }
4175 else
4176 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4177 int_size_in_bytes (type), 0);
4178 }
4179
4180 /* Store the value of constructor EXP into the rtx TARGET.
4181 TARGET is either a REG or a MEM.
4182 ALIGN is the maximum known alignment for TARGET.
4183 CLEARED is true if TARGET is known to have been zero'd.
4184 SIZE is the number of bytes of TARGET we are allowed to modify: this
4185 may not be the same as the size of EXP if we are assigning to a field
4186 which has been packed to exclude padding bits. */
4187
4188 static void
4189 store_constructor (exp, target, align, cleared, size)
4190 tree exp;
4191 rtx target;
4192 unsigned int align;
4193 int cleared;
4194 HOST_WIDE_INT size;
4195 {
4196 tree type = TREE_TYPE (exp);
4197 #ifdef WORD_REGISTER_OPERATIONS
4198 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4199 #endif
4200
4201 /* We know our target cannot conflict, since safe_from_p has been called. */
4202 #if 0
4203 /* Don't try copying piece by piece into a hard register
4204 since that is vulnerable to being clobbered by EXP.
4205 Instead, construct in a pseudo register and then copy it all. */
4206 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4207 {
4208 rtx temp = gen_reg_rtx (GET_MODE (target));
4209 store_constructor (exp, temp, align, cleared, size);
4210 emit_move_insn (target, temp);
4211 return;
4212 }
4213 #endif
4214
4215 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4216 || TREE_CODE (type) == QUAL_UNION_TYPE)
4217 {
4218 register tree elt;
4219
4220 /* Inform later passes that the whole union value is dead. */
4221 if ((TREE_CODE (type) == UNION_TYPE
4222 || TREE_CODE (type) == QUAL_UNION_TYPE)
4223 && ! cleared)
4224 {
4225 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4226
4227 /* If the constructor is empty, clear the union. */
4228 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4229 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4230 }
4231
4232 /* If we are building a static constructor into a register,
4233 set the initial value as zero so we can fold the value into
4234 a constant. But if more than one register is involved,
4235 this probably loses. */
4236 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4237 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4238 {
4239 if (! cleared)
4240 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4241
4242 cleared = 1;
4243 }
4244
4245 /* If the constructor has fewer fields than the structure
4246 or if we are initializing the structure to mostly zeros,
4247 clear the whole structure first. */
4248 else if (size > 0
4249 && ((list_length (CONSTRUCTOR_ELTS (exp))
4250 != fields_length (type))
4251 || mostly_zeros_p (exp)))
4252 {
4253 if (! cleared)
4254 clear_storage (target, GEN_INT (size), align);
4255
4256 cleared = 1;
4257 }
4258 else if (! cleared)
4259 /* Inform later passes that the old value is dead. */
4260 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4261
4262 /* Store each element of the constructor into
4263 the corresponding field of TARGET. */
4264
4265 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4266 {
4267 register tree field = TREE_PURPOSE (elt);
4268 #ifdef WORD_REGISTER_OPERATIONS
4269 tree value = TREE_VALUE (elt);
4270 #endif
4271 register enum machine_mode mode;
4272 HOST_WIDE_INT bitsize;
4273 HOST_WIDE_INT bitpos = 0;
4274 int unsignedp;
4275 tree offset;
4276 rtx to_rtx = target;
4277
4278 /* Just ignore missing fields.
4279 We cleared the whole structure, above,
4280 if any fields are missing. */
4281 if (field == 0)
4282 continue;
4283
4284 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4285 continue;
4286
4287 if (host_integerp (DECL_SIZE (field), 1))
4288 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4289 else
4290 bitsize = -1;
4291
4292 unsignedp = TREE_UNSIGNED (field);
4293 mode = DECL_MODE (field);
4294 if (DECL_BIT_FIELD (field))
4295 mode = VOIDmode;
4296
4297 offset = DECL_FIELD_OFFSET (field);
4298 if (host_integerp (offset, 0)
4299 && host_integerp (bit_position (field), 0))
4300 {
4301 bitpos = int_bit_position (field);
4302 offset = 0;
4303 }
4304 else
4305 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4306
4307 if (offset)
4308 {
4309 rtx offset_rtx;
4310
4311 if (contains_placeholder_p (offset))
4312 offset = build (WITH_RECORD_EXPR, sizetype,
4313 offset, make_tree (TREE_TYPE (exp), target));
4314
4315 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4316 if (GET_CODE (to_rtx) != MEM)
4317 abort ();
4318
4319 if (GET_MODE (offset_rtx) != ptr_mode)
4320 {
4321 #ifdef POINTERS_EXTEND_UNSIGNED
4322 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4323 #else
4324 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4325 #endif
4326 }
4327
4328 to_rtx
4329 = change_address (to_rtx, VOIDmode,
4330 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4331 force_reg (ptr_mode,
4332 offset_rtx)));
4333 align = DECL_OFFSET_ALIGN (field);
4334 }
4335
4336 if (TREE_READONLY (field))
4337 {
4338 if (GET_CODE (to_rtx) == MEM)
4339 to_rtx = copy_rtx (to_rtx);
4340
4341 RTX_UNCHANGING_P (to_rtx) = 1;
4342 }
4343
4344 #ifdef WORD_REGISTER_OPERATIONS
4345 /* If this initializes a field that is smaller than a word, at the
4346 start of a word, try to widen it to a full word.
4347 This special case allows us to output C++ member function
4348 initializations in a form that the optimizers can understand. */
4349 if (GET_CODE (target) == REG
4350 && bitsize < BITS_PER_WORD
4351 && bitpos % BITS_PER_WORD == 0
4352 && GET_MODE_CLASS (mode) == MODE_INT
4353 && TREE_CODE (value) == INTEGER_CST
4354 && exp_size >= 0
4355 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4356 {
4357 tree type = TREE_TYPE (value);
4358 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4359 {
4360 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4361 value = convert (type, value);
4362 }
4363 if (BYTES_BIG_ENDIAN)
4364 value
4365 = fold (build (LSHIFT_EXPR, type, value,
4366 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4367 bitsize = BITS_PER_WORD;
4368 mode = word_mode;
4369 }
4370 #endif
4371 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4372 TREE_VALUE (elt), type, align, cleared);
4373 }
4374 }
4375 else if (TREE_CODE (type) == ARRAY_TYPE)
4376 {
4377 register tree elt;
4378 register int i;
4379 int need_to_clear;
4380 tree domain = TYPE_DOMAIN (type);
4381 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4382 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4383 tree elttype = TREE_TYPE (type);
4384
4385 /* If the constructor has fewer elements than the array,
4386 clear the whole array first. Similarly if this is
4387 static constructor of a non-BLKmode object. */
4388 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4389 need_to_clear = 1;
4390 else
4391 {
4392 HOST_WIDE_INT count = 0, zero_count = 0;
4393 need_to_clear = 0;
4394 /* This loop is a more accurate version of the loop in
4395 mostly_zeros_p (it handles RANGE_EXPR in an index).
4396 It is also needed to check for missing elements. */
4397 for (elt = CONSTRUCTOR_ELTS (exp);
4398 elt != NULL_TREE;
4399 elt = TREE_CHAIN (elt))
4400 {
4401 tree index = TREE_PURPOSE (elt);
4402 HOST_WIDE_INT this_node_count;
4403
4404 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4405 {
4406 tree lo_index = TREE_OPERAND (index, 0);
4407 tree hi_index = TREE_OPERAND (index, 1);
4408
4409 if (! host_integerp (lo_index, 1)
4410 || ! host_integerp (hi_index, 1))
4411 {
4412 need_to_clear = 1;
4413 break;
4414 }
4415
4416 this_node_count = (tree_low_cst (hi_index, 1)
4417 - tree_low_cst (lo_index, 1) + 1);
4418 }
4419 else
4420 this_node_count = 1;
4421 count += this_node_count;
4422 if (mostly_zeros_p (TREE_VALUE (elt)))
4423 zero_count += this_node_count;
4424 }
4425 /* Clear the entire array first if there are any missing elements,
4426 or if the incidence of zero elements is >= 75%. */
4427 if (count < maxelt - minelt + 1
4428 || 4 * zero_count >= 3 * count)
4429 need_to_clear = 1;
4430 }
4431 if (need_to_clear && size > 0)
4432 {
4433 if (! cleared)
4434 clear_storage (target, GEN_INT (size), align);
4435 cleared = 1;
4436 }
4437 else
4438 /* Inform later passes that the old value is dead. */
4439 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4440
4441 /* Store each element of the constructor into
4442 the corresponding element of TARGET, determined
4443 by counting the elements. */
4444 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4445 elt;
4446 elt = TREE_CHAIN (elt), i++)
4447 {
4448 register enum machine_mode mode;
4449 HOST_WIDE_INT bitsize;
4450 HOST_WIDE_INT bitpos;
4451 int unsignedp;
4452 tree value = TREE_VALUE (elt);
4453 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4454 tree index = TREE_PURPOSE (elt);
4455 rtx xtarget = target;
4456
4457 if (cleared && is_zeros_p (value))
4458 continue;
4459
4460 unsignedp = TREE_UNSIGNED (elttype);
4461 mode = TYPE_MODE (elttype);
4462 if (mode == BLKmode)
4463 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4464 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4465 : -1);
4466 else
4467 bitsize = GET_MODE_BITSIZE (mode);
4468
4469 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4470 {
4471 tree lo_index = TREE_OPERAND (index, 0);
4472 tree hi_index = TREE_OPERAND (index, 1);
4473 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4474 struct nesting *loop;
4475 HOST_WIDE_INT lo, hi, count;
4476 tree position;
4477
4478 /* If the range is constant and "small", unroll the loop. */
4479 if (host_integerp (lo_index, 0)
4480 && host_integerp (hi_index, 0)
4481 && (lo = tree_low_cst (lo_index, 0),
4482 hi = tree_low_cst (hi_index, 0),
4483 count = hi - lo + 1,
4484 (GET_CODE (target) != MEM
4485 || count <= 2
4486 || (host_integerp (TYPE_SIZE (elttype), 1)
4487 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4488 <= 40 * 8)))))
4489 {
4490 lo -= minelt; hi -= minelt;
4491 for (; lo <= hi; lo++)
4492 {
4493 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4494 store_constructor_field (target, bitsize, bitpos, mode,
4495 value, type, align, cleared);
4496 }
4497 }
4498 else
4499 {
4500 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4501 loop_top = gen_label_rtx ();
4502 loop_end = gen_label_rtx ();
4503
4504 unsignedp = TREE_UNSIGNED (domain);
4505
4506 index = build_decl (VAR_DECL, NULL_TREE, domain);
4507
4508 DECL_RTL (index) = index_r
4509 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4510 &unsignedp, 0));
4511
4512 if (TREE_CODE (value) == SAVE_EXPR
4513 && SAVE_EXPR_RTL (value) == 0)
4514 {
4515 /* Make sure value gets expanded once before the
4516 loop. */
4517 expand_expr (value, const0_rtx, VOIDmode, 0);
4518 emit_queue ();
4519 }
4520 store_expr (lo_index, index_r, 0);
4521 loop = expand_start_loop (0);
4522
4523 /* Assign value to element index. */
4524 position
4525 = convert (ssizetype,
4526 fold (build (MINUS_EXPR, TREE_TYPE (index),
4527 index, TYPE_MIN_VALUE (domain))));
4528 position = size_binop (MULT_EXPR, position,
4529 convert (ssizetype,
4530 TYPE_SIZE_UNIT (elttype)));
4531
4532 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4533 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4534 xtarget = change_address (target, mode, addr);
4535 if (TREE_CODE (value) == CONSTRUCTOR)
4536 store_constructor (value, xtarget, align, cleared,
4537 bitsize / BITS_PER_UNIT);
4538 else
4539 store_expr (value, xtarget, 0);
4540
4541 expand_exit_loop_if_false (loop,
4542 build (LT_EXPR, integer_type_node,
4543 index, hi_index));
4544
4545 expand_increment (build (PREINCREMENT_EXPR,
4546 TREE_TYPE (index),
4547 index, integer_one_node), 0, 0);
4548 expand_end_loop ();
4549 emit_label (loop_end);
4550 }
4551 }
4552 else if ((index != 0 && ! host_integerp (index, 0))
4553 || ! host_integerp (TYPE_SIZE (elttype), 1))
4554 {
4555 rtx pos_rtx, addr;
4556 tree position;
4557
4558 if (index == 0)
4559 index = ssize_int (1);
4560
4561 if (minelt)
4562 index = convert (ssizetype,
4563 fold (build (MINUS_EXPR, index,
4564 TYPE_MIN_VALUE (domain))));
4565
4566 position = size_binop (MULT_EXPR, index,
4567 convert (ssizetype,
4568 TYPE_SIZE_UNIT (elttype)));
4569 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4570 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4571 xtarget = change_address (target, mode, addr);
4572 store_expr (value, xtarget, 0);
4573 }
4574 else
4575 {
4576 if (index != 0)
4577 bitpos = ((tree_low_cst (index, 0) - minelt)
4578 * tree_low_cst (TYPE_SIZE (elttype), 1));
4579 else
4580 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4581
4582 store_constructor_field (target, bitsize, bitpos, mode, value,
4583 type, align, cleared);
4584 }
4585 }
4586 }
4587
4588 /* Set constructor assignments */
4589 else if (TREE_CODE (type) == SET_TYPE)
4590 {
4591 tree elt = CONSTRUCTOR_ELTS (exp);
4592 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4593 tree domain = TYPE_DOMAIN (type);
4594 tree domain_min, domain_max, bitlength;
4595
4596 /* The default implementation strategy is to extract the constant
4597 parts of the constructor, use that to initialize the target,
4598 and then "or" in whatever non-constant ranges we need in addition.
4599
4600 If a large set is all zero or all ones, it is
4601 probably better to set it using memset (if available) or bzero.
4602 Also, if a large set has just a single range, it may also be
4603 better to first clear all the first clear the set (using
4604 bzero/memset), and set the bits we want. */
4605
4606 /* Check for all zeros. */
4607 if (elt == NULL_TREE && size > 0)
4608 {
4609 if (!cleared)
4610 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4611 return;
4612 }
4613
4614 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4615 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4616 bitlength = size_binop (PLUS_EXPR,
4617 size_diffop (domain_max, domain_min),
4618 ssize_int (1));
4619
4620 nbits = tree_low_cst (bitlength, 1);
4621
4622 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4623 are "complicated" (more than one range), initialize (the
4624 constant parts) by copying from a constant. */
4625 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4626 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4627 {
4628 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4629 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4630 char *bit_buffer = (char *) alloca (nbits);
4631 HOST_WIDE_INT word = 0;
4632 unsigned int bit_pos = 0;
4633 unsigned int ibit = 0;
4634 unsigned int offset = 0; /* In bytes from beginning of set. */
4635
4636 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4637 for (;;)
4638 {
4639 if (bit_buffer[ibit])
4640 {
4641 if (BYTES_BIG_ENDIAN)
4642 word |= (1 << (set_word_size - 1 - bit_pos));
4643 else
4644 word |= 1 << bit_pos;
4645 }
4646
4647 bit_pos++; ibit++;
4648 if (bit_pos >= set_word_size || ibit == nbits)
4649 {
4650 if (word != 0 || ! cleared)
4651 {
4652 rtx datum = GEN_INT (word);
4653 rtx to_rtx;
4654
4655 /* The assumption here is that it is safe to use
4656 XEXP if the set is multi-word, but not if
4657 it's single-word. */
4658 if (GET_CODE (target) == MEM)
4659 {
4660 to_rtx = plus_constant (XEXP (target, 0), offset);
4661 to_rtx = change_address (target, mode, to_rtx);
4662 }
4663 else if (offset == 0)
4664 to_rtx = target;
4665 else
4666 abort ();
4667 emit_move_insn (to_rtx, datum);
4668 }
4669
4670 if (ibit == nbits)
4671 break;
4672 word = 0;
4673 bit_pos = 0;
4674 offset += set_word_size / BITS_PER_UNIT;
4675 }
4676 }
4677 }
4678 else if (!cleared)
4679 /* Don't bother clearing storage if the set is all ones. */
4680 if (TREE_CHAIN (elt) != NULL_TREE
4681 || (TREE_PURPOSE (elt) == NULL_TREE
4682 ? nbits != 1
4683 : ( ! host_integerp (TREE_VALUE (elt), 0)
4684 || ! host_integerp (TREE_PURPOSE (elt), 0)
4685 || (tree_low_cst (TREE_VALUE (elt), 0)
4686 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4687 != (HOST_WIDE_INT) nbits))))
4688 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4689
4690 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4691 {
4692 /* start of range of element or NULL */
4693 tree startbit = TREE_PURPOSE (elt);
4694 /* end of range of element, or element value */
4695 tree endbit = TREE_VALUE (elt);
4696 #ifdef TARGET_MEM_FUNCTIONS
4697 HOST_WIDE_INT startb, endb;
4698 #endif
4699 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4700
4701 bitlength_rtx = expand_expr (bitlength,
4702 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4703
4704 /* handle non-range tuple element like [ expr ] */
4705 if (startbit == NULL_TREE)
4706 {
4707 startbit = save_expr (endbit);
4708 endbit = startbit;
4709 }
4710
4711 startbit = convert (sizetype, startbit);
4712 endbit = convert (sizetype, endbit);
4713 if (! integer_zerop (domain_min))
4714 {
4715 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4716 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4717 }
4718 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4719 EXPAND_CONST_ADDRESS);
4720 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4721 EXPAND_CONST_ADDRESS);
4722
4723 if (REG_P (target))
4724 {
4725 targetx = assign_stack_temp (GET_MODE (target),
4726 GET_MODE_SIZE (GET_MODE (target)),
4727 0);
4728 emit_move_insn (targetx, target);
4729 }
4730
4731 else if (GET_CODE (target) == MEM)
4732 targetx = target;
4733 else
4734 abort ();
4735
4736 #ifdef TARGET_MEM_FUNCTIONS
4737 /* Optimization: If startbit and endbit are
4738 constants divisible by BITS_PER_UNIT,
4739 call memset instead. */
4740 if (TREE_CODE (startbit) == INTEGER_CST
4741 && TREE_CODE (endbit) == INTEGER_CST
4742 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4743 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4744 {
4745 emit_library_call (memset_libfunc, 0,
4746 VOIDmode, 3,
4747 plus_constant (XEXP (targetx, 0),
4748 startb / BITS_PER_UNIT),
4749 Pmode,
4750 constm1_rtx, TYPE_MODE (integer_type_node),
4751 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4752 TYPE_MODE (sizetype));
4753 }
4754 else
4755 #endif
4756 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4757 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4758 bitlength_rtx, TYPE_MODE (sizetype),
4759 startbit_rtx, TYPE_MODE (sizetype),
4760 endbit_rtx, TYPE_MODE (sizetype));
4761
4762 if (REG_P (target))
4763 emit_move_insn (target, targetx);
4764 }
4765 }
4766
4767 else
4768 abort ();
4769 }
4770
4771 /* Store the value of EXP (an expression tree)
4772 into a subfield of TARGET which has mode MODE and occupies
4773 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4774 If MODE is VOIDmode, it means that we are storing into a bit-field.
4775
4776 If VALUE_MODE is VOIDmode, return nothing in particular.
4777 UNSIGNEDP is not used in this case.
4778
4779 Otherwise, return an rtx for the value stored. This rtx
4780 has mode VALUE_MODE if that is convenient to do.
4781 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4782
4783 ALIGN is the alignment that TARGET is known to have.
4784 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4785
4786 ALIAS_SET is the alias set for the destination. This value will
4787 (in general) be different from that for TARGET, since TARGET is a
4788 reference to the containing structure. */
4789
4790 static rtx
4791 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4792 unsignedp, align, total_size, alias_set)
4793 rtx target;
4794 HOST_WIDE_INT bitsize;
4795 HOST_WIDE_INT bitpos;
4796 enum machine_mode mode;
4797 tree exp;
4798 enum machine_mode value_mode;
4799 int unsignedp;
4800 unsigned int align;
4801 HOST_WIDE_INT total_size;
4802 int alias_set;
4803 {
4804 HOST_WIDE_INT width_mask = 0;
4805
4806 if (TREE_CODE (exp) == ERROR_MARK)
4807 return const0_rtx;
4808
4809 if (bitsize < HOST_BITS_PER_WIDE_INT)
4810 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4811
4812 /* If we are storing into an unaligned field of an aligned union that is
4813 in a register, we may have the mode of TARGET being an integer mode but
4814 MODE == BLKmode. In that case, get an aligned object whose size and
4815 alignment are the same as TARGET and store TARGET into it (we can avoid
4816 the store if the field being stored is the entire width of TARGET). Then
4817 call ourselves recursively to store the field into a BLKmode version of
4818 that object. Finally, load from the object into TARGET. This is not
4819 very efficient in general, but should only be slightly more expensive
4820 than the otherwise-required unaligned accesses. Perhaps this can be
4821 cleaned up later. */
4822
4823 if (mode == BLKmode
4824 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4825 {
4826 rtx object = assign_stack_temp (GET_MODE (target),
4827 GET_MODE_SIZE (GET_MODE (target)), 0);
4828 rtx blk_object = copy_rtx (object);
4829
4830 MEM_SET_IN_STRUCT_P (object, 1);
4831 MEM_SET_IN_STRUCT_P (blk_object, 1);
4832 PUT_MODE (blk_object, BLKmode);
4833
4834 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4835 emit_move_insn (object, target);
4836
4837 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4838 align, total_size, alias_set);
4839
4840 /* Even though we aren't returning target, we need to
4841 give it the updated value. */
4842 emit_move_insn (target, object);
4843
4844 return blk_object;
4845 }
4846
4847 if (GET_CODE (target) == CONCAT)
4848 {
4849 /* We're storing into a struct containing a single __complex. */
4850
4851 if (bitpos != 0)
4852 abort ();
4853 return store_expr (exp, target, 0);
4854 }
4855
4856 /* If the structure is in a register or if the component
4857 is a bit field, we cannot use addressing to access it.
4858 Use bit-field techniques or SUBREG to store in it. */
4859
4860 if (mode == VOIDmode
4861 || (mode != BLKmode && ! direct_store[(int) mode]
4862 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4863 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4864 || GET_CODE (target) == REG
4865 || GET_CODE (target) == SUBREG
4866 /* If the field isn't aligned enough to store as an ordinary memref,
4867 store it as a bit field. */
4868 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4869 && (align < GET_MODE_ALIGNMENT (mode)
4870 || bitpos % GET_MODE_ALIGNMENT (mode)))
4871 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4872 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4873 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4874 /* If the RHS and field are a constant size and the size of the
4875 RHS isn't the same size as the bitfield, we must use bitfield
4876 operations. */
4877 || (bitsize >= 0
4878 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4879 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4880 {
4881 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4882
4883 /* If BITSIZE is narrower than the size of the type of EXP
4884 we will be narrowing TEMP. Normally, what's wanted are the
4885 low-order bits. However, if EXP's type is a record and this is
4886 big-endian machine, we want the upper BITSIZE bits. */
4887 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4888 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4889 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4890 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4891 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4892 - bitsize),
4893 temp, 1);
4894
4895 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4896 MODE. */
4897 if (mode != VOIDmode && mode != BLKmode
4898 && mode != TYPE_MODE (TREE_TYPE (exp)))
4899 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4900
4901 /* If the modes of TARGET and TEMP are both BLKmode, both
4902 must be in memory and BITPOS must be aligned on a byte
4903 boundary. If so, we simply do a block copy. */
4904 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4905 {
4906 unsigned int exp_align = expr_align (exp);
4907
4908 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4909 || bitpos % BITS_PER_UNIT != 0)
4910 abort ();
4911
4912 target = change_address (target, VOIDmode,
4913 plus_constant (XEXP (target, 0),
4914 bitpos / BITS_PER_UNIT));
4915
4916 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4917 align = MIN (exp_align, align);
4918
4919 /* Find an alignment that is consistent with the bit position. */
4920 while ((bitpos % align) != 0)
4921 align >>= 1;
4922
4923 emit_block_move (target, temp,
4924 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4925 / BITS_PER_UNIT),
4926 align);
4927
4928 return value_mode == VOIDmode ? const0_rtx : target;
4929 }
4930
4931 /* Store the value in the bitfield. */
4932 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4933 if (value_mode != VOIDmode)
4934 {
4935 /* The caller wants an rtx for the value. */
4936 /* If possible, avoid refetching from the bitfield itself. */
4937 if (width_mask != 0
4938 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4939 {
4940 tree count;
4941 enum machine_mode tmode;
4942
4943 if (unsignedp)
4944 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4945 tmode = GET_MODE (temp);
4946 if (tmode == VOIDmode)
4947 tmode = value_mode;
4948 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4949 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4950 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4951 }
4952 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4953 NULL_RTX, value_mode, 0, align,
4954 total_size);
4955 }
4956 return const0_rtx;
4957 }
4958 else
4959 {
4960 rtx addr = XEXP (target, 0);
4961 rtx to_rtx;
4962
4963 /* If a value is wanted, it must be the lhs;
4964 so make the address stable for multiple use. */
4965
4966 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4967 && ! CONSTANT_ADDRESS_P (addr)
4968 /* A frame-pointer reference is already stable. */
4969 && ! (GET_CODE (addr) == PLUS
4970 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4971 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4972 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4973 addr = copy_to_reg (addr);
4974
4975 /* Now build a reference to just the desired component. */
4976
4977 to_rtx = copy_rtx (change_address (target, mode,
4978 plus_constant (addr,
4979 (bitpos
4980 / BITS_PER_UNIT))));
4981 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4982 MEM_ALIAS_SET (to_rtx) = alias_set;
4983
4984 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4985 }
4986 }
4987 \f
4988 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4989 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4990 ARRAY_REFs and find the ultimate containing object, which we return.
4991
4992 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4993 bit position, and *PUNSIGNEDP to the signedness of the field.
4994 If the position of the field is variable, we store a tree
4995 giving the variable offset (in units) in *POFFSET.
4996 This offset is in addition to the bit position.
4997 If the position is not variable, we store 0 in *POFFSET.
4998 We set *PALIGNMENT to the alignment of the address that will be
4999 computed. This is the alignment of the thing we return if *POFFSET
5000 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5001
5002 If any of the extraction expressions is volatile,
5003 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5004
5005 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5006 is a mode that can be used to access the field. In that case, *PBITSIZE
5007 is redundant.
5008
5009 If the field describes a variable-sized object, *PMODE is set to
5010 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5011 this case, but the address of the object can be found. */
5012
5013 tree
5014 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5015 punsignedp, pvolatilep, palignment)
5016 tree exp;
5017 HOST_WIDE_INT *pbitsize;
5018 HOST_WIDE_INT *pbitpos;
5019 tree *poffset;
5020 enum machine_mode *pmode;
5021 int *punsignedp;
5022 int *pvolatilep;
5023 unsigned int *palignment;
5024 {
5025 tree size_tree = 0;
5026 enum machine_mode mode = VOIDmode;
5027 tree offset = size_zero_node;
5028 tree bit_offset = bitsize_zero_node;
5029 unsigned int alignment = BIGGEST_ALIGNMENT;
5030 tree tem;
5031
5032 /* First get the mode, signedness, and size. We do this from just the
5033 outermost expression. */
5034 if (TREE_CODE (exp) == COMPONENT_REF)
5035 {
5036 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5037 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5038 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5039
5040 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5041 }
5042 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5043 {
5044 size_tree = TREE_OPERAND (exp, 1);
5045 *punsignedp = TREE_UNSIGNED (exp);
5046 }
5047 else
5048 {
5049 mode = TYPE_MODE (TREE_TYPE (exp));
5050 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5051
5052 if (mode == BLKmode)
5053 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5054 else
5055 *pbitsize = GET_MODE_BITSIZE (mode);
5056 }
5057
5058 if (size_tree != 0)
5059 {
5060 if (! host_integerp (size_tree, 1))
5061 mode = BLKmode, *pbitsize = -1;
5062 else
5063 *pbitsize = tree_low_cst (size_tree, 1);
5064 }
5065
5066 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5067 and find the ultimate containing object. */
5068 while (1)
5069 {
5070 if (TREE_CODE (exp) == BIT_FIELD_REF)
5071 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5072 else if (TREE_CODE (exp) == COMPONENT_REF)
5073 {
5074 tree field = TREE_OPERAND (exp, 1);
5075 tree this_offset = DECL_FIELD_OFFSET (field);
5076
5077 /* If this field hasn't been filled in yet, don't go
5078 past it. This should only happen when folding expressions
5079 made during type construction. */
5080 if (this_offset == 0)
5081 break;
5082 else if (! TREE_CONSTANT (this_offset)
5083 && contains_placeholder_p (this_offset))
5084 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5085
5086 offset = size_binop (PLUS_EXPR, offset, this_offset);
5087 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5088 DECL_FIELD_BIT_OFFSET (field));
5089
5090 if (! host_integerp (offset, 0))
5091 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5092 }
5093
5094 else if (TREE_CODE (exp) == ARRAY_REF)
5095 {
5096 tree index = TREE_OPERAND (exp, 1);
5097 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5098 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5099 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5100
5101 /* We assume all arrays have sizes that are a multiple of a byte.
5102 First subtract the lower bound, if any, in the type of the
5103 index, then convert to sizetype and multiply by the size of the
5104 array element. */
5105 if (low_bound != 0 && ! integer_zerop (low_bound))
5106 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5107 index, low_bound));
5108
5109 /* If the index has a self-referential type, pass it to a
5110 WITH_RECORD_EXPR; if the component size is, pass our
5111 component to one. */
5112 if (! TREE_CONSTANT (index)
5113 && contains_placeholder_p (index))
5114 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5115 if (! TREE_CONSTANT (unit_size)
5116 && contains_placeholder_p (unit_size))
5117 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5118 TREE_OPERAND (exp, 0));
5119
5120 offset = size_binop (PLUS_EXPR, offset,
5121 size_binop (MULT_EXPR,
5122 convert (sizetype, index),
5123 unit_size));
5124 }
5125
5126 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5127 && ! ((TREE_CODE (exp) == NOP_EXPR
5128 || TREE_CODE (exp) == CONVERT_EXPR)
5129 && (TYPE_MODE (TREE_TYPE (exp))
5130 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5131 break;
5132
5133 /* If any reference in the chain is volatile, the effect is volatile. */
5134 if (TREE_THIS_VOLATILE (exp))
5135 *pvolatilep = 1;
5136
5137 /* If the offset is non-constant already, then we can't assume any
5138 alignment more than the alignment here. */
5139 if (! TREE_CONSTANT (offset))
5140 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5141
5142 exp = TREE_OPERAND (exp, 0);
5143 }
5144
5145 if (DECL_P (exp))
5146 alignment = MIN (alignment, DECL_ALIGN (exp));
5147 else if (TREE_TYPE (exp) != 0)
5148 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5149
5150 /* If OFFSET is constant, see if we can return the whole thing as a
5151 constant bit position. Otherwise, split it up. */
5152 if (host_integerp (offset, 0)
5153 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5154 bitsize_unit_node))
5155 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5156 && host_integerp (tem, 0))
5157 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5158 else
5159 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5160
5161 *pmode = mode;
5162 *palignment = alignment;
5163 return exp;
5164 }
5165
5166 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5167
5168 static enum memory_use_mode
5169 get_memory_usage_from_modifier (modifier)
5170 enum expand_modifier modifier;
5171 {
5172 switch (modifier)
5173 {
5174 case EXPAND_NORMAL:
5175 case EXPAND_SUM:
5176 return MEMORY_USE_RO;
5177 break;
5178 case EXPAND_MEMORY_USE_WO:
5179 return MEMORY_USE_WO;
5180 break;
5181 case EXPAND_MEMORY_USE_RW:
5182 return MEMORY_USE_RW;
5183 break;
5184 case EXPAND_MEMORY_USE_DONT:
5185 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5186 MEMORY_USE_DONT, because they are modifiers to a call of
5187 expand_expr in the ADDR_EXPR case of expand_expr. */
5188 case EXPAND_CONST_ADDRESS:
5189 case EXPAND_INITIALIZER:
5190 return MEMORY_USE_DONT;
5191 case EXPAND_MEMORY_USE_BAD:
5192 default:
5193 abort ();
5194 }
5195 }
5196 \f
5197 /* Given an rtx VALUE that may contain additions and multiplications,
5198 return an equivalent value that just refers to a register or memory.
5199 This is done by generating instructions to perform the arithmetic
5200 and returning a pseudo-register containing the value.
5201
5202 The returned value may be a REG, SUBREG, MEM or constant. */
5203
5204 rtx
5205 force_operand (value, target)
5206 rtx value, target;
5207 {
5208 register optab binoptab = 0;
5209 /* Use a temporary to force order of execution of calls to
5210 `force_operand'. */
5211 rtx tmp;
5212 register rtx op2;
5213 /* Use subtarget as the target for operand 0 of a binary operation. */
5214 register rtx subtarget = get_subtarget (target);
5215
5216 /* Check for a PIC address load. */
5217 if (flag_pic
5218 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5219 && XEXP (value, 0) == pic_offset_table_rtx
5220 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5221 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5222 || GET_CODE (XEXP (value, 1)) == CONST))
5223 {
5224 if (!subtarget)
5225 subtarget = gen_reg_rtx (GET_MODE (value));
5226 emit_move_insn (subtarget, value);
5227 return subtarget;
5228 }
5229
5230 if (GET_CODE (value) == PLUS)
5231 binoptab = add_optab;
5232 else if (GET_CODE (value) == MINUS)
5233 binoptab = sub_optab;
5234 else if (GET_CODE (value) == MULT)
5235 {
5236 op2 = XEXP (value, 1);
5237 if (!CONSTANT_P (op2)
5238 && !(GET_CODE (op2) == REG && op2 != subtarget))
5239 subtarget = 0;
5240 tmp = force_operand (XEXP (value, 0), subtarget);
5241 return expand_mult (GET_MODE (value), tmp,
5242 force_operand (op2, NULL_RTX),
5243 target, 0);
5244 }
5245
5246 if (binoptab)
5247 {
5248 op2 = XEXP (value, 1);
5249 if (!CONSTANT_P (op2)
5250 && !(GET_CODE (op2) == REG && op2 != subtarget))
5251 subtarget = 0;
5252 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5253 {
5254 binoptab = add_optab;
5255 op2 = negate_rtx (GET_MODE (value), op2);
5256 }
5257
5258 /* Check for an addition with OP2 a constant integer and our first
5259 operand a PLUS of a virtual register and something else. In that
5260 case, we want to emit the sum of the virtual register and the
5261 constant first and then add the other value. This allows virtual
5262 register instantiation to simply modify the constant rather than
5263 creating another one around this addition. */
5264 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5265 && GET_CODE (XEXP (value, 0)) == PLUS
5266 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5267 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5268 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5269 {
5270 rtx temp = expand_binop (GET_MODE (value), binoptab,
5271 XEXP (XEXP (value, 0), 0), op2,
5272 subtarget, 0, OPTAB_LIB_WIDEN);
5273 return expand_binop (GET_MODE (value), binoptab, temp,
5274 force_operand (XEXP (XEXP (value, 0), 1), 0),
5275 target, 0, OPTAB_LIB_WIDEN);
5276 }
5277
5278 tmp = force_operand (XEXP (value, 0), subtarget);
5279 return expand_binop (GET_MODE (value), binoptab, tmp,
5280 force_operand (op2, NULL_RTX),
5281 target, 0, OPTAB_LIB_WIDEN);
5282 /* We give UNSIGNEDP = 0 to expand_binop
5283 because the only operations we are expanding here are signed ones. */
5284 }
5285 return value;
5286 }
5287 \f
5288 /* Subroutine of expand_expr:
5289 save the non-copied parts (LIST) of an expr (LHS), and return a list
5290 which can restore these values to their previous values,
5291 should something modify their storage. */
5292
5293 static tree
5294 save_noncopied_parts (lhs, list)
5295 tree lhs;
5296 tree list;
5297 {
5298 tree tail;
5299 tree parts = 0;
5300
5301 for (tail = list; tail; tail = TREE_CHAIN (tail))
5302 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5303 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5304 else
5305 {
5306 tree part = TREE_VALUE (tail);
5307 tree part_type = TREE_TYPE (part);
5308 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5309 rtx target = assign_temp (part_type, 0, 1, 1);
5310 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5311 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5312 parts = tree_cons (to_be_saved,
5313 build (RTL_EXPR, part_type, NULL_TREE,
5314 (tree) target),
5315 parts);
5316 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5317 }
5318 return parts;
5319 }
5320
5321 /* Subroutine of expand_expr:
5322 record the non-copied parts (LIST) of an expr (LHS), and return a list
5323 which specifies the initial values of these parts. */
5324
5325 static tree
5326 init_noncopied_parts (lhs, list)
5327 tree lhs;
5328 tree list;
5329 {
5330 tree tail;
5331 tree parts = 0;
5332
5333 for (tail = list; tail; tail = TREE_CHAIN (tail))
5334 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5335 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5336 else if (TREE_PURPOSE (tail))
5337 {
5338 tree part = TREE_VALUE (tail);
5339 tree part_type = TREE_TYPE (part);
5340 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5341 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5342 }
5343 return parts;
5344 }
5345
5346 /* Subroutine of expand_expr: return nonzero iff there is no way that
5347 EXP can reference X, which is being modified. TOP_P is nonzero if this
5348 call is going to be used to determine whether we need a temporary
5349 for EXP, as opposed to a recursive call to this function.
5350
5351 It is always safe for this routine to return zero since it merely
5352 searches for optimization opportunities. */
5353
5354 static int
5355 safe_from_p (x, exp, top_p)
5356 rtx x;
5357 tree exp;
5358 int top_p;
5359 {
5360 rtx exp_rtl = 0;
5361 int i, nops;
5362 static int save_expr_count;
5363 static int save_expr_size = 0;
5364 static tree *save_expr_rewritten;
5365 static tree save_expr_trees[256];
5366
5367 if (x == 0
5368 /* If EXP has varying size, we MUST use a target since we currently
5369 have no way of allocating temporaries of variable size
5370 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5371 So we assume here that something at a higher level has prevented a
5372 clash. This is somewhat bogus, but the best we can do. Only
5373 do this when X is BLKmode and when we are at the top level. */
5374 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5375 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5376 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5377 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5378 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5379 != INTEGER_CST)
5380 && GET_MODE (x) == BLKmode))
5381 return 1;
5382
5383 if (top_p && save_expr_size == 0)
5384 {
5385 int rtn;
5386
5387 save_expr_count = 0;
5388 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5389 save_expr_rewritten = &save_expr_trees[0];
5390
5391 rtn = safe_from_p (x, exp, 1);
5392
5393 for (i = 0; i < save_expr_count; ++i)
5394 {
5395 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5396 abort ();
5397 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5398 }
5399
5400 save_expr_size = 0;
5401
5402 return rtn;
5403 }
5404
5405 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5406 find the underlying pseudo. */
5407 if (GET_CODE (x) == SUBREG)
5408 {
5409 x = SUBREG_REG (x);
5410 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5411 return 0;
5412 }
5413
5414 /* If X is a location in the outgoing argument area, it is always safe. */
5415 if (GET_CODE (x) == MEM
5416 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5417 || (GET_CODE (XEXP (x, 0)) == PLUS
5418 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5419 return 1;
5420
5421 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5422 {
5423 case 'd':
5424 exp_rtl = DECL_RTL (exp);
5425 break;
5426
5427 case 'c':
5428 return 1;
5429
5430 case 'x':
5431 if (TREE_CODE (exp) == TREE_LIST)
5432 return ((TREE_VALUE (exp) == 0
5433 || safe_from_p (x, TREE_VALUE (exp), 0))
5434 && (TREE_CHAIN (exp) == 0
5435 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5436 else if (TREE_CODE (exp) == ERROR_MARK)
5437 return 1; /* An already-visited SAVE_EXPR? */
5438 else
5439 return 0;
5440
5441 case '1':
5442 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5443
5444 case '2':
5445 case '<':
5446 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5447 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5448
5449 case 'e':
5450 case 'r':
5451 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5452 the expression. If it is set, we conflict iff we are that rtx or
5453 both are in memory. Otherwise, we check all operands of the
5454 expression recursively. */
5455
5456 switch (TREE_CODE (exp))
5457 {
5458 case ADDR_EXPR:
5459 return (staticp (TREE_OPERAND (exp, 0))
5460 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5461 || TREE_STATIC (exp));
5462
5463 case INDIRECT_REF:
5464 if (GET_CODE (x) == MEM)
5465 return 0;
5466 break;
5467
5468 case CALL_EXPR:
5469 exp_rtl = CALL_EXPR_RTL (exp);
5470 if (exp_rtl == 0)
5471 {
5472 /* Assume that the call will clobber all hard registers and
5473 all of memory. */
5474 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5475 || GET_CODE (x) == MEM)
5476 return 0;
5477 }
5478
5479 break;
5480
5481 case RTL_EXPR:
5482 /* If a sequence exists, we would have to scan every instruction
5483 in the sequence to see if it was safe. This is probably not
5484 worthwhile. */
5485 if (RTL_EXPR_SEQUENCE (exp))
5486 return 0;
5487
5488 exp_rtl = RTL_EXPR_RTL (exp);
5489 break;
5490
5491 case WITH_CLEANUP_EXPR:
5492 exp_rtl = RTL_EXPR_RTL (exp);
5493 break;
5494
5495 case CLEANUP_POINT_EXPR:
5496 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5497
5498 case SAVE_EXPR:
5499 exp_rtl = SAVE_EXPR_RTL (exp);
5500 if (exp_rtl)
5501 break;
5502
5503 /* This SAVE_EXPR might appear many times in the top-level
5504 safe_from_p() expression, and if it has a complex
5505 subexpression, examining it multiple times could result
5506 in a combinatorial explosion. E.g. on an Alpha
5507 running at least 200MHz, a Fortran test case compiled with
5508 optimization took about 28 minutes to compile -- even though
5509 it was only a few lines long, and the complicated line causing
5510 so much time to be spent in the earlier version of safe_from_p()
5511 had only 293 or so unique nodes.
5512
5513 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5514 where it is so we can turn it back in the top-level safe_from_p()
5515 when we're done. */
5516
5517 /* For now, don't bother re-sizing the array. */
5518 if (save_expr_count >= save_expr_size)
5519 return 0;
5520 save_expr_rewritten[save_expr_count++] = exp;
5521
5522 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5523 for (i = 0; i < nops; i++)
5524 {
5525 tree operand = TREE_OPERAND (exp, i);
5526 if (operand == NULL_TREE)
5527 continue;
5528 TREE_SET_CODE (exp, ERROR_MARK);
5529 if (!safe_from_p (x, operand, 0))
5530 return 0;
5531 TREE_SET_CODE (exp, SAVE_EXPR);
5532 }
5533 TREE_SET_CODE (exp, ERROR_MARK);
5534 return 1;
5535
5536 case BIND_EXPR:
5537 /* The only operand we look at is operand 1. The rest aren't
5538 part of the expression. */
5539 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5540
5541 case METHOD_CALL_EXPR:
5542 /* This takes a rtx argument, but shouldn't appear here. */
5543 abort ();
5544
5545 default:
5546 break;
5547 }
5548
5549 /* If we have an rtx, we do not need to scan our operands. */
5550 if (exp_rtl)
5551 break;
5552
5553 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5554 for (i = 0; i < nops; i++)
5555 if (TREE_OPERAND (exp, i) != 0
5556 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5557 return 0;
5558 }
5559
5560 /* If we have an rtl, find any enclosed object. Then see if we conflict
5561 with it. */
5562 if (exp_rtl)
5563 {
5564 if (GET_CODE (exp_rtl) == SUBREG)
5565 {
5566 exp_rtl = SUBREG_REG (exp_rtl);
5567 if (GET_CODE (exp_rtl) == REG
5568 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5569 return 0;
5570 }
5571
5572 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5573 are memory and EXP is not readonly. */
5574 return ! (rtx_equal_p (x, exp_rtl)
5575 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5576 && ! TREE_READONLY (exp)));
5577 }
5578
5579 /* If we reach here, it is safe. */
5580 return 1;
5581 }
5582
5583 /* Subroutine of expand_expr: return nonzero iff EXP is an
5584 expression whose type is statically determinable. */
5585
5586 static int
5587 fixed_type_p (exp)
5588 tree exp;
5589 {
5590 if (TREE_CODE (exp) == PARM_DECL
5591 || TREE_CODE (exp) == VAR_DECL
5592 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5593 || TREE_CODE (exp) == COMPONENT_REF
5594 || TREE_CODE (exp) == ARRAY_REF)
5595 return 1;
5596 return 0;
5597 }
5598
5599 /* Subroutine of expand_expr: return rtx if EXP is a
5600 variable or parameter; else return 0. */
5601
5602 static rtx
5603 var_rtx (exp)
5604 tree exp;
5605 {
5606 STRIP_NOPS (exp);
5607 switch (TREE_CODE (exp))
5608 {
5609 case PARM_DECL:
5610 case VAR_DECL:
5611 return DECL_RTL (exp);
5612 default:
5613 return 0;
5614 }
5615 }
5616
5617 #ifdef MAX_INTEGER_COMPUTATION_MODE
5618 void
5619 check_max_integer_computation_mode (exp)
5620 tree exp;
5621 {
5622 enum tree_code code;
5623 enum machine_mode mode;
5624
5625 /* Strip any NOPs that don't change the mode. */
5626 STRIP_NOPS (exp);
5627 code = TREE_CODE (exp);
5628
5629 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5630 if (code == NOP_EXPR
5631 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5632 return;
5633
5634 /* First check the type of the overall operation. We need only look at
5635 unary, binary and relational operations. */
5636 if (TREE_CODE_CLASS (code) == '1'
5637 || TREE_CODE_CLASS (code) == '2'
5638 || TREE_CODE_CLASS (code) == '<')
5639 {
5640 mode = TYPE_MODE (TREE_TYPE (exp));
5641 if (GET_MODE_CLASS (mode) == MODE_INT
5642 && mode > MAX_INTEGER_COMPUTATION_MODE)
5643 fatal ("unsupported wide integer operation");
5644 }
5645
5646 /* Check operand of a unary op. */
5647 if (TREE_CODE_CLASS (code) == '1')
5648 {
5649 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5650 if (GET_MODE_CLASS (mode) == MODE_INT
5651 && mode > MAX_INTEGER_COMPUTATION_MODE)
5652 fatal ("unsupported wide integer operation");
5653 }
5654
5655 /* Check operands of a binary/comparison op. */
5656 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5657 {
5658 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5659 if (GET_MODE_CLASS (mode) == MODE_INT
5660 && mode > MAX_INTEGER_COMPUTATION_MODE)
5661 fatal ("unsupported wide integer operation");
5662
5663 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5664 if (GET_MODE_CLASS (mode) == MODE_INT
5665 && mode > MAX_INTEGER_COMPUTATION_MODE)
5666 fatal ("unsupported wide integer operation");
5667 }
5668 }
5669 #endif
5670
5671 \f
5672 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5673 has any readonly fields. If any of the fields have types that
5674 contain readonly fields, return true as well. */
5675
5676 static int
5677 readonly_fields_p (type)
5678 tree type;
5679 {
5680 tree field;
5681
5682 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5683 if (TREE_CODE (field) == FIELD_DECL
5684 && (TREE_READONLY (field)
5685 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5686 && readonly_fields_p (TREE_TYPE (field)))))
5687 return 1;
5688
5689 return 0;
5690 }
5691 \f
5692 /* expand_expr: generate code for computing expression EXP.
5693 An rtx for the computed value is returned. The value is never null.
5694 In the case of a void EXP, const0_rtx is returned.
5695
5696 The value may be stored in TARGET if TARGET is nonzero.
5697 TARGET is just a suggestion; callers must assume that
5698 the rtx returned may not be the same as TARGET.
5699
5700 If TARGET is CONST0_RTX, it means that the value will be ignored.
5701
5702 If TMODE is not VOIDmode, it suggests generating the
5703 result in mode TMODE. But this is done only when convenient.
5704 Otherwise, TMODE is ignored and the value generated in its natural mode.
5705 TMODE is just a suggestion; callers must assume that
5706 the rtx returned may not have mode TMODE.
5707
5708 Note that TARGET may have neither TMODE nor MODE. In that case, it
5709 probably will not be used.
5710
5711 If MODIFIER is EXPAND_SUM then when EXP is an addition
5712 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5713 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5714 products as above, or REG or MEM, or constant.
5715 Ordinarily in such cases we would output mul or add instructions
5716 and then return a pseudo reg containing the sum.
5717
5718 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5719 it also marks a label as absolutely required (it can't be dead).
5720 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5721 This is used for outputting expressions used in initializers.
5722
5723 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5724 with a constant address even if that address is not normally legitimate.
5725 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5726
5727 rtx
5728 expand_expr (exp, target, tmode, modifier)
5729 register tree exp;
5730 rtx target;
5731 enum machine_mode tmode;
5732 enum expand_modifier modifier;
5733 {
5734 register rtx op0, op1, temp;
5735 tree type = TREE_TYPE (exp);
5736 int unsignedp = TREE_UNSIGNED (type);
5737 register enum machine_mode mode;
5738 register enum tree_code code = TREE_CODE (exp);
5739 optab this_optab;
5740 rtx subtarget, original_target;
5741 int ignore;
5742 tree context;
5743 /* Used by check-memory-usage to make modifier read only. */
5744 enum expand_modifier ro_modifier;
5745
5746 /* Handle ERROR_MARK before anybody tries to access its type. */
5747 if (TREE_CODE (exp) == ERROR_MARK)
5748 {
5749 op0 = CONST0_RTX (tmode);
5750 if (op0 != 0)
5751 return op0;
5752 return const0_rtx;
5753 }
5754
5755 mode = TYPE_MODE (type);
5756 /* Use subtarget as the target for operand 0 of a binary operation. */
5757 subtarget = get_subtarget (target);
5758 original_target = target;
5759 ignore = (target == const0_rtx
5760 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5761 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5762 || code == COND_EXPR)
5763 && TREE_CODE (type) == VOID_TYPE));
5764
5765 /* Make a read-only version of the modifier. */
5766 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5767 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5768 ro_modifier = modifier;
5769 else
5770 ro_modifier = EXPAND_NORMAL;
5771
5772 /* If we are going to ignore this result, we need only do something
5773 if there is a side-effect somewhere in the expression. If there
5774 is, short-circuit the most common cases here. Note that we must
5775 not call expand_expr with anything but const0_rtx in case this
5776 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5777
5778 if (ignore)
5779 {
5780 if (! TREE_SIDE_EFFECTS (exp))
5781 return const0_rtx;
5782
5783 /* Ensure we reference a volatile object even if value is ignored, but
5784 don't do this if all we are doing is taking its address. */
5785 if (TREE_THIS_VOLATILE (exp)
5786 && TREE_CODE (exp) != FUNCTION_DECL
5787 && mode != VOIDmode && mode != BLKmode
5788 && modifier != EXPAND_CONST_ADDRESS)
5789 {
5790 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5791 if (GET_CODE (temp) == MEM)
5792 temp = copy_to_reg (temp);
5793 return const0_rtx;
5794 }
5795
5796 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5797 || code == INDIRECT_REF || code == BUFFER_REF)
5798 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5799 VOIDmode, ro_modifier);
5800 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5801 || code == ARRAY_REF)
5802 {
5803 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5804 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5805 return const0_rtx;
5806 }
5807 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5808 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5809 /* If the second operand has no side effects, just evaluate
5810 the first. */
5811 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5812 VOIDmode, ro_modifier);
5813 else if (code == BIT_FIELD_REF)
5814 {
5815 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5816 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5817 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5818 return const0_rtx;
5819 }
5820 ;
5821 target = 0;
5822 }
5823
5824 #ifdef MAX_INTEGER_COMPUTATION_MODE
5825 /* Only check stuff here if the mode we want is different from the mode
5826 of the expression; if it's the same, check_max_integer_computiation_mode
5827 will handle it. Do we really need to check this stuff at all? */
5828
5829 if (target
5830 && GET_MODE (target) != mode
5831 && TREE_CODE (exp) != INTEGER_CST
5832 && TREE_CODE (exp) != PARM_DECL
5833 && TREE_CODE (exp) != ARRAY_REF
5834 && TREE_CODE (exp) != COMPONENT_REF
5835 && TREE_CODE (exp) != BIT_FIELD_REF
5836 && TREE_CODE (exp) != INDIRECT_REF
5837 && TREE_CODE (exp) != CALL_EXPR
5838 && TREE_CODE (exp) != VAR_DECL
5839 && TREE_CODE (exp) != RTL_EXPR)
5840 {
5841 enum machine_mode mode = GET_MODE (target);
5842
5843 if (GET_MODE_CLASS (mode) == MODE_INT
5844 && mode > MAX_INTEGER_COMPUTATION_MODE)
5845 fatal ("unsupported wide integer operation");
5846 }
5847
5848 if (tmode != mode
5849 && TREE_CODE (exp) != INTEGER_CST
5850 && TREE_CODE (exp) != PARM_DECL
5851 && TREE_CODE (exp) != ARRAY_REF
5852 && TREE_CODE (exp) != COMPONENT_REF
5853 && TREE_CODE (exp) != BIT_FIELD_REF
5854 && TREE_CODE (exp) != INDIRECT_REF
5855 && TREE_CODE (exp) != VAR_DECL
5856 && TREE_CODE (exp) != CALL_EXPR
5857 && TREE_CODE (exp) != RTL_EXPR
5858 && GET_MODE_CLASS (tmode) == MODE_INT
5859 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5860 fatal ("unsupported wide integer operation");
5861
5862 check_max_integer_computation_mode (exp);
5863 #endif
5864
5865 /* If will do cse, generate all results into pseudo registers
5866 since 1) that allows cse to find more things
5867 and 2) otherwise cse could produce an insn the machine
5868 cannot support. */
5869
5870 if (! cse_not_expected && mode != BLKmode && target
5871 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5872 target = subtarget;
5873
5874 switch (code)
5875 {
5876 case LABEL_DECL:
5877 {
5878 tree function = decl_function_context (exp);
5879 /* Handle using a label in a containing function. */
5880 if (function != current_function_decl
5881 && function != inline_function_decl && function != 0)
5882 {
5883 struct function *p = find_function_data (function);
5884 /* Allocate in the memory associated with the function
5885 that the label is in. */
5886 push_obstacks (p->function_obstack,
5887 p->function_maybepermanent_obstack);
5888
5889 p->expr->x_forced_labels
5890 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5891 p->expr->x_forced_labels);
5892 pop_obstacks ();
5893 }
5894 else
5895 {
5896 if (modifier == EXPAND_INITIALIZER)
5897 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5898 label_rtx (exp),
5899 forced_labels);
5900 }
5901
5902 temp = gen_rtx_MEM (FUNCTION_MODE,
5903 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5904 if (function != current_function_decl
5905 && function != inline_function_decl && function != 0)
5906 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5907 return temp;
5908 }
5909
5910 case PARM_DECL:
5911 if (DECL_RTL (exp) == 0)
5912 {
5913 error_with_decl (exp, "prior parameter's size depends on `%s'");
5914 return CONST0_RTX (mode);
5915 }
5916
5917 /* ... fall through ... */
5918
5919 case VAR_DECL:
5920 /* If a static var's type was incomplete when the decl was written,
5921 but the type is complete now, lay out the decl now. */
5922 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5923 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5924 {
5925 push_obstacks_nochange ();
5926 end_temporary_allocation ();
5927 layout_decl (exp, 0);
5928 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5929 pop_obstacks ();
5930 }
5931
5932 /* Although static-storage variables start off initialized, according to
5933 ANSI C, a memcpy could overwrite them with uninitialized values. So
5934 we check them too. This also lets us check for read-only variables
5935 accessed via a non-const declaration, in case it won't be detected
5936 any other way (e.g., in an embedded system or OS kernel without
5937 memory protection).
5938
5939 Aggregates are not checked here; they're handled elsewhere. */
5940 if (cfun && current_function_check_memory_usage
5941 && code == VAR_DECL
5942 && GET_CODE (DECL_RTL (exp)) == MEM
5943 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5944 {
5945 enum memory_use_mode memory_usage;
5946 memory_usage = get_memory_usage_from_modifier (modifier);
5947
5948 in_check_memory_usage = 1;
5949 if (memory_usage != MEMORY_USE_DONT)
5950 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5951 XEXP (DECL_RTL (exp), 0), Pmode,
5952 GEN_INT (int_size_in_bytes (type)),
5953 TYPE_MODE (sizetype),
5954 GEN_INT (memory_usage),
5955 TYPE_MODE (integer_type_node));
5956 in_check_memory_usage = 0;
5957 }
5958
5959 /* ... fall through ... */
5960
5961 case FUNCTION_DECL:
5962 case RESULT_DECL:
5963 if (DECL_RTL (exp) == 0)
5964 abort ();
5965
5966 /* Ensure variable marked as used even if it doesn't go through
5967 a parser. If it hasn't be used yet, write out an external
5968 definition. */
5969 if (! TREE_USED (exp))
5970 {
5971 assemble_external (exp);
5972 TREE_USED (exp) = 1;
5973 }
5974
5975 /* Show we haven't gotten RTL for this yet. */
5976 temp = 0;
5977
5978 /* Handle variables inherited from containing functions. */
5979 context = decl_function_context (exp);
5980
5981 /* We treat inline_function_decl as an alias for the current function
5982 because that is the inline function whose vars, types, etc.
5983 are being merged into the current function.
5984 See expand_inline_function. */
5985
5986 if (context != 0 && context != current_function_decl
5987 && context != inline_function_decl
5988 /* If var is static, we don't need a static chain to access it. */
5989 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5990 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5991 {
5992 rtx addr;
5993
5994 /* Mark as non-local and addressable. */
5995 DECL_NONLOCAL (exp) = 1;
5996 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5997 abort ();
5998 mark_addressable (exp);
5999 if (GET_CODE (DECL_RTL (exp)) != MEM)
6000 abort ();
6001 addr = XEXP (DECL_RTL (exp), 0);
6002 if (GET_CODE (addr) == MEM)
6003 addr = change_address (addr, Pmode,
6004 fix_lexical_addr (XEXP (addr, 0), exp));
6005 else
6006 addr = fix_lexical_addr (addr, exp);
6007
6008 temp = change_address (DECL_RTL (exp), mode, addr);
6009 }
6010
6011 /* This is the case of an array whose size is to be determined
6012 from its initializer, while the initializer is still being parsed.
6013 See expand_decl. */
6014
6015 else if (GET_CODE (DECL_RTL (exp)) == MEM
6016 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6017 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6018 XEXP (DECL_RTL (exp), 0));
6019
6020 /* If DECL_RTL is memory, we are in the normal case and either
6021 the address is not valid or it is not a register and -fforce-addr
6022 is specified, get the address into a register. */
6023
6024 else if (GET_CODE (DECL_RTL (exp)) == MEM
6025 && modifier != EXPAND_CONST_ADDRESS
6026 && modifier != EXPAND_SUM
6027 && modifier != EXPAND_INITIALIZER
6028 && (! memory_address_p (DECL_MODE (exp),
6029 XEXP (DECL_RTL (exp), 0))
6030 || (flag_force_addr
6031 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6032 temp = change_address (DECL_RTL (exp), VOIDmode,
6033 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6034
6035 /* If we got something, return it. But first, set the alignment
6036 the address is a register. */
6037 if (temp != 0)
6038 {
6039 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6040 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6041
6042 return temp;
6043 }
6044
6045 /* If the mode of DECL_RTL does not match that of the decl, it
6046 must be a promoted value. We return a SUBREG of the wanted mode,
6047 but mark it so that we know that it was already extended. */
6048
6049 if (GET_CODE (DECL_RTL (exp)) == REG
6050 && GET_MODE (DECL_RTL (exp)) != mode)
6051 {
6052 /* Get the signedness used for this variable. Ensure we get the
6053 same mode we got when the variable was declared. */
6054 if (GET_MODE (DECL_RTL (exp))
6055 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6056 abort ();
6057
6058 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6059 SUBREG_PROMOTED_VAR_P (temp) = 1;
6060 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6061 return temp;
6062 }
6063
6064 return DECL_RTL (exp);
6065
6066 case INTEGER_CST:
6067 return immed_double_const (TREE_INT_CST_LOW (exp),
6068 TREE_INT_CST_HIGH (exp), mode);
6069
6070 case CONST_DECL:
6071 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6072 EXPAND_MEMORY_USE_BAD);
6073
6074 case REAL_CST:
6075 /* If optimized, generate immediate CONST_DOUBLE
6076 which will be turned into memory by reload if necessary.
6077
6078 We used to force a register so that loop.c could see it. But
6079 this does not allow gen_* patterns to perform optimizations with
6080 the constants. It also produces two insns in cases like "x = 1.0;".
6081 On most machines, floating-point constants are not permitted in
6082 many insns, so we'd end up copying it to a register in any case.
6083
6084 Now, we do the copying in expand_binop, if appropriate. */
6085 return immed_real_const (exp);
6086
6087 case COMPLEX_CST:
6088 case STRING_CST:
6089 if (! TREE_CST_RTL (exp))
6090 output_constant_def (exp);
6091
6092 /* TREE_CST_RTL probably contains a constant address.
6093 On RISC machines where a constant address isn't valid,
6094 make some insns to get that address into a register. */
6095 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6096 && modifier != EXPAND_CONST_ADDRESS
6097 && modifier != EXPAND_INITIALIZER
6098 && modifier != EXPAND_SUM
6099 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6100 || (flag_force_addr
6101 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6102 return change_address (TREE_CST_RTL (exp), VOIDmode,
6103 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6104 return TREE_CST_RTL (exp);
6105
6106 case EXPR_WITH_FILE_LOCATION:
6107 {
6108 rtx to_return;
6109 const char *saved_input_filename = input_filename;
6110 int saved_lineno = lineno;
6111 input_filename = EXPR_WFL_FILENAME (exp);
6112 lineno = EXPR_WFL_LINENO (exp);
6113 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6114 emit_line_note (input_filename, lineno);
6115 /* Possibly avoid switching back and force here */
6116 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6117 input_filename = saved_input_filename;
6118 lineno = saved_lineno;
6119 return to_return;
6120 }
6121
6122 case SAVE_EXPR:
6123 context = decl_function_context (exp);
6124
6125 /* If this SAVE_EXPR was at global context, assume we are an
6126 initialization function and move it into our context. */
6127 if (context == 0)
6128 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6129
6130 /* We treat inline_function_decl as an alias for the current function
6131 because that is the inline function whose vars, types, etc.
6132 are being merged into the current function.
6133 See expand_inline_function. */
6134 if (context == current_function_decl || context == inline_function_decl)
6135 context = 0;
6136
6137 /* If this is non-local, handle it. */
6138 if (context)
6139 {
6140 /* The following call just exists to abort if the context is
6141 not of a containing function. */
6142 find_function_data (context);
6143
6144 temp = SAVE_EXPR_RTL (exp);
6145 if (temp && GET_CODE (temp) == REG)
6146 {
6147 put_var_into_stack (exp);
6148 temp = SAVE_EXPR_RTL (exp);
6149 }
6150 if (temp == 0 || GET_CODE (temp) != MEM)
6151 abort ();
6152 return change_address (temp, mode,
6153 fix_lexical_addr (XEXP (temp, 0), exp));
6154 }
6155 if (SAVE_EXPR_RTL (exp) == 0)
6156 {
6157 if (mode == VOIDmode)
6158 temp = const0_rtx;
6159 else
6160 temp = assign_temp (type, 3, 0, 0);
6161
6162 SAVE_EXPR_RTL (exp) = temp;
6163 if (!optimize && GET_CODE (temp) == REG)
6164 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6165 save_expr_regs);
6166
6167 /* If the mode of TEMP does not match that of the expression, it
6168 must be a promoted value. We pass store_expr a SUBREG of the
6169 wanted mode but mark it so that we know that it was already
6170 extended. Note that `unsignedp' was modified above in
6171 this case. */
6172
6173 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6174 {
6175 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6176 SUBREG_PROMOTED_VAR_P (temp) = 1;
6177 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6178 }
6179
6180 if (temp == const0_rtx)
6181 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6182 EXPAND_MEMORY_USE_BAD);
6183 else
6184 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6185
6186 TREE_USED (exp) = 1;
6187 }
6188
6189 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6190 must be a promoted value. We return a SUBREG of the wanted mode,
6191 but mark it so that we know that it was already extended. */
6192
6193 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6194 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6195 {
6196 /* Compute the signedness and make the proper SUBREG. */
6197 promote_mode (type, mode, &unsignedp, 0);
6198 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6199 SUBREG_PROMOTED_VAR_P (temp) = 1;
6200 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6201 return temp;
6202 }
6203
6204 return SAVE_EXPR_RTL (exp);
6205
6206 case UNSAVE_EXPR:
6207 {
6208 rtx temp;
6209 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6210 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6211 return temp;
6212 }
6213
6214 case PLACEHOLDER_EXPR:
6215 {
6216 tree placeholder_expr;
6217
6218 /* If there is an object on the head of the placeholder list,
6219 see if some object in it of type TYPE or a pointer to it. For
6220 further information, see tree.def. */
6221 for (placeholder_expr = placeholder_list;
6222 placeholder_expr != 0;
6223 placeholder_expr = TREE_CHAIN (placeholder_expr))
6224 {
6225 tree need_type = TYPE_MAIN_VARIANT (type);
6226 tree object = 0;
6227 tree old_list = placeholder_list;
6228 tree elt;
6229
6230 /* Find the outermost reference that is of the type we want.
6231 If none, see if any object has a type that is a pointer to
6232 the type we want. */
6233 for (elt = TREE_PURPOSE (placeholder_expr);
6234 elt != 0 && object == 0;
6235 elt
6236 = ((TREE_CODE (elt) == COMPOUND_EXPR
6237 || TREE_CODE (elt) == COND_EXPR)
6238 ? TREE_OPERAND (elt, 1)
6239 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6240 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6241 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6242 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6243 ? TREE_OPERAND (elt, 0) : 0))
6244 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6245 object = elt;
6246
6247 for (elt = TREE_PURPOSE (placeholder_expr);
6248 elt != 0 && object == 0;
6249 elt
6250 = ((TREE_CODE (elt) == COMPOUND_EXPR
6251 || TREE_CODE (elt) == COND_EXPR)
6252 ? TREE_OPERAND (elt, 1)
6253 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6254 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6255 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6256 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6257 ? TREE_OPERAND (elt, 0) : 0))
6258 if (POINTER_TYPE_P (TREE_TYPE (elt))
6259 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6260 == need_type))
6261 object = build1 (INDIRECT_REF, need_type, elt);
6262
6263 if (object != 0)
6264 {
6265 /* Expand this object skipping the list entries before
6266 it was found in case it is also a PLACEHOLDER_EXPR.
6267 In that case, we want to translate it using subsequent
6268 entries. */
6269 placeholder_list = TREE_CHAIN (placeholder_expr);
6270 temp = expand_expr (object, original_target, tmode,
6271 ro_modifier);
6272 placeholder_list = old_list;
6273 return temp;
6274 }
6275 }
6276 }
6277
6278 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6279 abort ();
6280
6281 case WITH_RECORD_EXPR:
6282 /* Put the object on the placeholder list, expand our first operand,
6283 and pop the list. */
6284 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6285 placeholder_list);
6286 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6287 tmode, ro_modifier);
6288 placeholder_list = TREE_CHAIN (placeholder_list);
6289 return target;
6290
6291 case GOTO_EXPR:
6292 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6293 expand_goto (TREE_OPERAND (exp, 0));
6294 else
6295 expand_computed_goto (TREE_OPERAND (exp, 0));
6296 return const0_rtx;
6297
6298 case EXIT_EXPR:
6299 expand_exit_loop_if_false (NULL_PTR,
6300 invert_truthvalue (TREE_OPERAND (exp, 0)));
6301 return const0_rtx;
6302
6303 case LABELED_BLOCK_EXPR:
6304 if (LABELED_BLOCK_BODY (exp))
6305 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6306 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6307 return const0_rtx;
6308
6309 case EXIT_BLOCK_EXPR:
6310 if (EXIT_BLOCK_RETURN (exp))
6311 sorry ("returned value in block_exit_expr");
6312 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6313 return const0_rtx;
6314
6315 case LOOP_EXPR:
6316 push_temp_slots ();
6317 expand_start_loop (1);
6318 expand_expr_stmt (TREE_OPERAND (exp, 0));
6319 expand_end_loop ();
6320 pop_temp_slots ();
6321
6322 return const0_rtx;
6323
6324 case BIND_EXPR:
6325 {
6326 tree vars = TREE_OPERAND (exp, 0);
6327 int vars_need_expansion = 0;
6328
6329 /* Need to open a binding contour here because
6330 if there are any cleanups they must be contained here. */
6331 expand_start_bindings (2);
6332
6333 /* Mark the corresponding BLOCK for output in its proper place. */
6334 if (TREE_OPERAND (exp, 2) != 0
6335 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6336 insert_block (TREE_OPERAND (exp, 2));
6337
6338 /* If VARS have not yet been expanded, expand them now. */
6339 while (vars)
6340 {
6341 if (DECL_RTL (vars) == 0)
6342 {
6343 vars_need_expansion = 1;
6344 expand_decl (vars);
6345 }
6346 expand_decl_init (vars);
6347 vars = TREE_CHAIN (vars);
6348 }
6349
6350 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6351
6352 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6353
6354 return temp;
6355 }
6356
6357 case RTL_EXPR:
6358 if (RTL_EXPR_SEQUENCE (exp))
6359 {
6360 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6361 abort ();
6362 emit_insns (RTL_EXPR_SEQUENCE (exp));
6363 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6364 }
6365 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6366 free_temps_for_rtl_expr (exp);
6367 return RTL_EXPR_RTL (exp);
6368
6369 case CONSTRUCTOR:
6370 /* If we don't need the result, just ensure we evaluate any
6371 subexpressions. */
6372 if (ignore)
6373 {
6374 tree elt;
6375 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6376 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6377 EXPAND_MEMORY_USE_BAD);
6378 return const0_rtx;
6379 }
6380
6381 /* All elts simple constants => refer to a constant in memory. But
6382 if this is a non-BLKmode mode, let it store a field at a time
6383 since that should make a CONST_INT or CONST_DOUBLE when we
6384 fold. Likewise, if we have a target we can use, it is best to
6385 store directly into the target unless the type is large enough
6386 that memcpy will be used. If we are making an initializer and
6387 all operands are constant, put it in memory as well. */
6388 else if ((TREE_STATIC (exp)
6389 && ((mode == BLKmode
6390 && ! (target != 0 && safe_from_p (target, exp, 1)))
6391 || TREE_ADDRESSABLE (exp)
6392 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6393 && (! MOVE_BY_PIECES_P
6394 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6395 TYPE_ALIGN (type)))
6396 && ! mostly_zeros_p (exp))))
6397 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6398 {
6399 rtx constructor = output_constant_def (exp);
6400
6401 if (modifier != EXPAND_CONST_ADDRESS
6402 && modifier != EXPAND_INITIALIZER
6403 && modifier != EXPAND_SUM
6404 && (! memory_address_p (GET_MODE (constructor),
6405 XEXP (constructor, 0))
6406 || (flag_force_addr
6407 && GET_CODE (XEXP (constructor, 0)) != REG)))
6408 constructor = change_address (constructor, VOIDmode,
6409 XEXP (constructor, 0));
6410 return constructor;
6411 }
6412
6413 else
6414 {
6415 /* Handle calls that pass values in multiple non-contiguous
6416 locations. The Irix 6 ABI has examples of this. */
6417 if (target == 0 || ! safe_from_p (target, exp, 1)
6418 || GET_CODE (target) == PARALLEL)
6419 {
6420 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6421 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6422 else
6423 target = assign_temp (type, 0, 1, 1);
6424 }
6425
6426 if (TREE_READONLY (exp))
6427 {
6428 if (GET_CODE (target) == MEM)
6429 target = copy_rtx (target);
6430
6431 RTX_UNCHANGING_P (target) = 1;
6432 }
6433
6434 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6435 int_size_in_bytes (TREE_TYPE (exp)));
6436 return target;
6437 }
6438
6439 case INDIRECT_REF:
6440 {
6441 tree exp1 = TREE_OPERAND (exp, 0);
6442 tree index;
6443 tree string = string_constant (exp1, &index);
6444
6445 /* Try to optimize reads from const strings. */
6446 if (string
6447 && TREE_CODE (string) == STRING_CST
6448 && TREE_CODE (index) == INTEGER_CST
6449 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6450 && GET_MODE_CLASS (mode) == MODE_INT
6451 && GET_MODE_SIZE (mode) == 1
6452 && modifier != EXPAND_MEMORY_USE_WO)
6453 return
6454 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6455
6456 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6457 op0 = memory_address (mode, op0);
6458
6459 if (cfun && current_function_check_memory_usage
6460 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6461 {
6462 enum memory_use_mode memory_usage;
6463 memory_usage = get_memory_usage_from_modifier (modifier);
6464
6465 if (memory_usage != MEMORY_USE_DONT)
6466 {
6467 in_check_memory_usage = 1;
6468 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6469 op0, Pmode,
6470 GEN_INT (int_size_in_bytes (type)),
6471 TYPE_MODE (sizetype),
6472 GEN_INT (memory_usage),
6473 TYPE_MODE (integer_type_node));
6474 in_check_memory_usage = 0;
6475 }
6476 }
6477
6478 temp = gen_rtx_MEM (mode, op0);
6479 set_mem_attributes (temp, exp, 0);
6480
6481 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6482 here, because, in C and C++, the fact that a location is accessed
6483 through a pointer to const does not mean that the value there can
6484 never change. Languages where it can never change should
6485 also set TREE_STATIC. */
6486 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6487
6488 /* If we are writing to this object and its type is a record with
6489 readonly fields, we must mark it as readonly so it will
6490 conflict with readonly references to those fields. */
6491 if (modifier == EXPAND_MEMORY_USE_WO
6492 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6493 RTX_UNCHANGING_P (temp) = 1;
6494
6495 return temp;
6496 }
6497
6498 case ARRAY_REF:
6499 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6500 abort ();
6501
6502 {
6503 tree array = TREE_OPERAND (exp, 0);
6504 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6505 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6506 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6507 HOST_WIDE_INT i;
6508
6509 /* Optimize the special-case of a zero lower bound.
6510
6511 We convert the low_bound to sizetype to avoid some problems
6512 with constant folding. (E.g. suppose the lower bound is 1,
6513 and its mode is QI. Without the conversion, (ARRAY
6514 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6515 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6516
6517 if (! integer_zerop (low_bound))
6518 index = size_diffop (index, convert (sizetype, low_bound));
6519
6520 /* Fold an expression like: "foo"[2].
6521 This is not done in fold so it won't happen inside &.
6522 Don't fold if this is for wide characters since it's too
6523 difficult to do correctly and this is a very rare case. */
6524
6525 if (TREE_CODE (array) == STRING_CST
6526 && TREE_CODE (index) == INTEGER_CST
6527 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6528 && GET_MODE_CLASS (mode) == MODE_INT
6529 && GET_MODE_SIZE (mode) == 1)
6530 return
6531 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6532
6533 /* If this is a constant index into a constant array,
6534 just get the value from the array. Handle both the cases when
6535 we have an explicit constructor and when our operand is a variable
6536 that was declared const. */
6537
6538 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6539 && TREE_CODE (index) == INTEGER_CST
6540 && 0 > compare_tree_int (index,
6541 list_length (CONSTRUCTOR_ELTS
6542 (TREE_OPERAND (exp, 0)))))
6543 {
6544 tree elem;
6545
6546 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6547 i = TREE_INT_CST_LOW (index);
6548 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6549 ;
6550
6551 if (elem)
6552 return expand_expr (fold (TREE_VALUE (elem)), target,
6553 tmode, ro_modifier);
6554 }
6555
6556 else if (optimize >= 1
6557 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6558 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6559 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6560 {
6561 if (TREE_CODE (index) == INTEGER_CST)
6562 {
6563 tree init = DECL_INITIAL (array);
6564
6565 if (TREE_CODE (init) == CONSTRUCTOR)
6566 {
6567 tree elem;
6568
6569 for (elem = CONSTRUCTOR_ELTS (init);
6570 (elem
6571 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6572 elem = TREE_CHAIN (elem))
6573 ;
6574
6575 if (elem)
6576 return expand_expr (fold (TREE_VALUE (elem)), target,
6577 tmode, ro_modifier);
6578 }
6579 else if (TREE_CODE (init) == STRING_CST
6580 && 0 > compare_tree_int (index,
6581 TREE_STRING_LENGTH (init)))
6582 return (GEN_INT
6583 (TREE_STRING_POINTER
6584 (init)[TREE_INT_CST_LOW (index)]));
6585 }
6586 }
6587 }
6588
6589 /* ... fall through ... */
6590
6591 case COMPONENT_REF:
6592 case BIT_FIELD_REF:
6593 /* If the operand is a CONSTRUCTOR, we can just extract the
6594 appropriate field if it is present. Don't do this if we have
6595 already written the data since we want to refer to that copy
6596 and varasm.c assumes that's what we'll do. */
6597 if (code != ARRAY_REF
6598 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6599 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6600 {
6601 tree elt;
6602
6603 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6604 elt = TREE_CHAIN (elt))
6605 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6606 /* We can normally use the value of the field in the
6607 CONSTRUCTOR. However, if this is a bitfield in
6608 an integral mode that we can fit in a HOST_WIDE_INT,
6609 we must mask only the number of bits in the bitfield,
6610 since this is done implicitly by the constructor. If
6611 the bitfield does not meet either of those conditions,
6612 we can't do this optimization. */
6613 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6614 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6615 == MODE_INT)
6616 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6617 <= HOST_BITS_PER_WIDE_INT))))
6618 {
6619 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6620 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6621 {
6622 HOST_WIDE_INT bitsize
6623 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6624
6625 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6626 {
6627 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6628 op0 = expand_and (op0, op1, target);
6629 }
6630 else
6631 {
6632 enum machine_mode imode
6633 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6634 tree count
6635 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6636 0);
6637
6638 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6639 target, 0);
6640 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6641 target, 0);
6642 }
6643 }
6644
6645 return op0;
6646 }
6647 }
6648
6649 {
6650 enum machine_mode mode1;
6651 HOST_WIDE_INT bitsize, bitpos;
6652 tree offset;
6653 int volatilep = 0;
6654 unsigned int alignment;
6655 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6656 &mode1, &unsignedp, &volatilep,
6657 &alignment);
6658
6659 /* If we got back the original object, something is wrong. Perhaps
6660 we are evaluating an expression too early. In any event, don't
6661 infinitely recurse. */
6662 if (tem == exp)
6663 abort ();
6664
6665 /* If TEM's type is a union of variable size, pass TARGET to the inner
6666 computation, since it will need a temporary and TARGET is known
6667 to have to do. This occurs in unchecked conversion in Ada. */
6668
6669 op0 = expand_expr (tem,
6670 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6671 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6672 != INTEGER_CST)
6673 ? target : NULL_RTX),
6674 VOIDmode,
6675 (modifier == EXPAND_INITIALIZER
6676 || modifier == EXPAND_CONST_ADDRESS)
6677 ? modifier : EXPAND_NORMAL);
6678
6679 /* If this is a constant, put it into a register if it is a
6680 legitimate constant and OFFSET is 0 and memory if it isn't. */
6681 if (CONSTANT_P (op0))
6682 {
6683 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6684 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6685 && offset == 0)
6686 op0 = force_reg (mode, op0);
6687 else
6688 op0 = validize_mem (force_const_mem (mode, op0));
6689 }
6690
6691 if (offset != 0)
6692 {
6693 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6694
6695 /* If this object is in memory, put it into a register.
6696 This case can't occur in C, but can in Ada if we have
6697 unchecked conversion of an expression from a scalar type to
6698 an array or record type. */
6699 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6700 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6701 {
6702 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6703
6704 mark_temp_addr_taken (memloc);
6705 emit_move_insn (memloc, op0);
6706 op0 = memloc;
6707 }
6708
6709 if (GET_CODE (op0) != MEM)
6710 abort ();
6711
6712 if (GET_MODE (offset_rtx) != ptr_mode)
6713 {
6714 #ifdef POINTERS_EXTEND_UNSIGNED
6715 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6716 #else
6717 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6718 #endif
6719 }
6720
6721 /* A constant address in OP0 can have VOIDmode, we must not try
6722 to call force_reg for that case. Avoid that case. */
6723 if (GET_CODE (op0) == MEM
6724 && GET_MODE (op0) == BLKmode
6725 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6726 && bitsize != 0
6727 && (bitpos % bitsize) == 0
6728 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6729 && alignment == GET_MODE_ALIGNMENT (mode1))
6730 {
6731 rtx temp = change_address (op0, mode1,
6732 plus_constant (XEXP (op0, 0),
6733 (bitpos /
6734 BITS_PER_UNIT)));
6735 if (GET_CODE (XEXP (temp, 0)) == REG)
6736 op0 = temp;
6737 else
6738 op0 = change_address (op0, mode1,
6739 force_reg (GET_MODE (XEXP (temp, 0)),
6740 XEXP (temp, 0)));
6741 bitpos = 0;
6742 }
6743
6744
6745 op0 = change_address (op0, VOIDmode,
6746 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6747 force_reg (ptr_mode,
6748 offset_rtx)));
6749 }
6750
6751 /* Don't forget about volatility even if this is a bitfield. */
6752 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6753 {
6754 op0 = copy_rtx (op0);
6755 MEM_VOLATILE_P (op0) = 1;
6756 }
6757
6758 /* Check the access. */
6759 if (cfun != 0 && current_function_check_memory_usage
6760 && GET_CODE (op0) == MEM)
6761 {
6762 enum memory_use_mode memory_usage;
6763 memory_usage = get_memory_usage_from_modifier (modifier);
6764
6765 if (memory_usage != MEMORY_USE_DONT)
6766 {
6767 rtx to;
6768 int size;
6769
6770 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6771 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6772
6773 /* Check the access right of the pointer. */
6774 in_check_memory_usage = 1;
6775 if (size > BITS_PER_UNIT)
6776 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6777 to, Pmode,
6778 GEN_INT (size / BITS_PER_UNIT),
6779 TYPE_MODE (sizetype),
6780 GEN_INT (memory_usage),
6781 TYPE_MODE (integer_type_node));
6782 in_check_memory_usage = 0;
6783 }
6784 }
6785
6786 /* In cases where an aligned union has an unaligned object
6787 as a field, we might be extracting a BLKmode value from
6788 an integer-mode (e.g., SImode) object. Handle this case
6789 by doing the extract into an object as wide as the field
6790 (which we know to be the width of a basic mode), then
6791 storing into memory, and changing the mode to BLKmode.
6792 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6793 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6794 if (mode1 == VOIDmode
6795 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6796 || (modifier != EXPAND_CONST_ADDRESS
6797 && modifier != EXPAND_INITIALIZER
6798 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6799 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6800 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6801 /* If the field isn't aligned enough to fetch as a memref,
6802 fetch it as a bit field. */
6803 || (mode1 != BLKmode
6804 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6805 && ((TYPE_ALIGN (TREE_TYPE (tem))
6806 < GET_MODE_ALIGNMENT (mode))
6807 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6808 /* If the type and the field are a constant size and the
6809 size of the type isn't the same size as the bitfield,
6810 we must use bitfield operations. */
6811 || ((bitsize >= 0
6812 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6813 == INTEGER_CST)
6814 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6815 bitsize)))))
6816 || (modifier != EXPAND_CONST_ADDRESS
6817 && modifier != EXPAND_INITIALIZER
6818 && mode == BLKmode
6819 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6820 && (TYPE_ALIGN (type) > alignment
6821 || bitpos % TYPE_ALIGN (type) != 0)))
6822 {
6823 enum machine_mode ext_mode = mode;
6824
6825 if (ext_mode == BLKmode
6826 && ! (target != 0 && GET_CODE (op0) == MEM
6827 && GET_CODE (target) == MEM
6828 && bitpos % BITS_PER_UNIT == 0))
6829 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6830
6831 if (ext_mode == BLKmode)
6832 {
6833 /* In this case, BITPOS must start at a byte boundary and
6834 TARGET, if specified, must be a MEM. */
6835 if (GET_CODE (op0) != MEM
6836 || (target != 0 && GET_CODE (target) != MEM)
6837 || bitpos % BITS_PER_UNIT != 0)
6838 abort ();
6839
6840 op0 = change_address (op0, VOIDmode,
6841 plus_constant (XEXP (op0, 0),
6842 bitpos / BITS_PER_UNIT));
6843 if (target == 0)
6844 target = assign_temp (type, 0, 1, 1);
6845
6846 emit_block_move (target, op0,
6847 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6848 / BITS_PER_UNIT),
6849 BITS_PER_UNIT);
6850
6851 return target;
6852 }
6853
6854 op0 = validize_mem (op0);
6855
6856 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6857 mark_reg_pointer (XEXP (op0, 0), alignment);
6858
6859 op0 = extract_bit_field (op0, bitsize, bitpos,
6860 unsignedp, target, ext_mode, ext_mode,
6861 alignment,
6862 int_size_in_bytes (TREE_TYPE (tem)));
6863
6864 /* If the result is a record type and BITSIZE is narrower than
6865 the mode of OP0, an integral mode, and this is a big endian
6866 machine, we must put the field into the high-order bits. */
6867 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6868 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6869 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6870 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6871 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6872 - bitsize),
6873 op0, 1);
6874
6875 if (mode == BLKmode)
6876 {
6877 rtx new = assign_stack_temp (ext_mode,
6878 bitsize / BITS_PER_UNIT, 0);
6879
6880 emit_move_insn (new, op0);
6881 op0 = copy_rtx (new);
6882 PUT_MODE (op0, BLKmode);
6883 MEM_SET_IN_STRUCT_P (op0, 1);
6884 }
6885
6886 return op0;
6887 }
6888
6889 /* If the result is BLKmode, use that to access the object
6890 now as well. */
6891 if (mode == BLKmode)
6892 mode1 = BLKmode;
6893
6894 /* Get a reference to just this component. */
6895 if (modifier == EXPAND_CONST_ADDRESS
6896 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6897 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6898 (bitpos / BITS_PER_UNIT)));
6899 else
6900 op0 = change_address (op0, mode1,
6901 plus_constant (XEXP (op0, 0),
6902 (bitpos / BITS_PER_UNIT)));
6903
6904 set_mem_attributes (op0, exp, 0);
6905 if (GET_CODE (XEXP (op0, 0)) == REG)
6906 mark_reg_pointer (XEXP (op0, 0), alignment);
6907
6908 MEM_VOLATILE_P (op0) |= volatilep;
6909 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6910 || modifier == EXPAND_CONST_ADDRESS
6911 || modifier == EXPAND_INITIALIZER)
6912 return op0;
6913 else if (target == 0)
6914 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6915
6916 convert_move (target, op0, unsignedp);
6917 return target;
6918 }
6919
6920 /* Intended for a reference to a buffer of a file-object in Pascal.
6921 But it's not certain that a special tree code will really be
6922 necessary for these. INDIRECT_REF might work for them. */
6923 case BUFFER_REF:
6924 abort ();
6925
6926 case IN_EXPR:
6927 {
6928 /* Pascal set IN expression.
6929
6930 Algorithm:
6931 rlo = set_low - (set_low%bits_per_word);
6932 the_word = set [ (index - rlo)/bits_per_word ];
6933 bit_index = index % bits_per_word;
6934 bitmask = 1 << bit_index;
6935 return !!(the_word & bitmask); */
6936
6937 tree set = TREE_OPERAND (exp, 0);
6938 tree index = TREE_OPERAND (exp, 1);
6939 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6940 tree set_type = TREE_TYPE (set);
6941 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6942 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6943 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6944 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6945 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6946 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6947 rtx setaddr = XEXP (setval, 0);
6948 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6949 rtx rlow;
6950 rtx diff, quo, rem, addr, bit, result;
6951
6952 preexpand_calls (exp);
6953
6954 /* If domain is empty, answer is no. Likewise if index is constant
6955 and out of bounds. */
6956 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6957 && TREE_CODE (set_low_bound) == INTEGER_CST
6958 && tree_int_cst_lt (set_high_bound, set_low_bound))
6959 || (TREE_CODE (index) == INTEGER_CST
6960 && TREE_CODE (set_low_bound) == INTEGER_CST
6961 && tree_int_cst_lt (index, set_low_bound))
6962 || (TREE_CODE (set_high_bound) == INTEGER_CST
6963 && TREE_CODE (index) == INTEGER_CST
6964 && tree_int_cst_lt (set_high_bound, index))))
6965 return const0_rtx;
6966
6967 if (target == 0)
6968 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6969
6970 /* If we get here, we have to generate the code for both cases
6971 (in range and out of range). */
6972
6973 op0 = gen_label_rtx ();
6974 op1 = gen_label_rtx ();
6975
6976 if (! (GET_CODE (index_val) == CONST_INT
6977 && GET_CODE (lo_r) == CONST_INT))
6978 {
6979 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6980 GET_MODE (index_val), iunsignedp, 0, op1);
6981 }
6982
6983 if (! (GET_CODE (index_val) == CONST_INT
6984 && GET_CODE (hi_r) == CONST_INT))
6985 {
6986 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6987 GET_MODE (index_val), iunsignedp, 0, op1);
6988 }
6989
6990 /* Calculate the element number of bit zero in the first word
6991 of the set. */
6992 if (GET_CODE (lo_r) == CONST_INT)
6993 rlow = GEN_INT (INTVAL (lo_r)
6994 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6995 else
6996 rlow = expand_binop (index_mode, and_optab, lo_r,
6997 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6998 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6999
7000 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7001 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7002
7003 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7004 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7005 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7006 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7007
7008 addr = memory_address (byte_mode,
7009 expand_binop (index_mode, add_optab, diff,
7010 setaddr, NULL_RTX, iunsignedp,
7011 OPTAB_LIB_WIDEN));
7012
7013 /* Extract the bit we want to examine */
7014 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7015 gen_rtx_MEM (byte_mode, addr),
7016 make_tree (TREE_TYPE (index), rem),
7017 NULL_RTX, 1);
7018 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7019 GET_MODE (target) == byte_mode ? target : 0,
7020 1, OPTAB_LIB_WIDEN);
7021
7022 if (result != target)
7023 convert_move (target, result, 1);
7024
7025 /* Output the code to handle the out-of-range case. */
7026 emit_jump (op0);
7027 emit_label (op1);
7028 emit_move_insn (target, const0_rtx);
7029 emit_label (op0);
7030 return target;
7031 }
7032
7033 case WITH_CLEANUP_EXPR:
7034 if (RTL_EXPR_RTL (exp) == 0)
7035 {
7036 RTL_EXPR_RTL (exp)
7037 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7038 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7039
7040 /* That's it for this cleanup. */
7041 TREE_OPERAND (exp, 2) = 0;
7042 }
7043 return RTL_EXPR_RTL (exp);
7044
7045 case CLEANUP_POINT_EXPR:
7046 {
7047 /* Start a new binding layer that will keep track of all cleanup
7048 actions to be performed. */
7049 expand_start_bindings (2);
7050
7051 target_temp_slot_level = temp_slot_level;
7052
7053 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7054 /* If we're going to use this value, load it up now. */
7055 if (! ignore)
7056 op0 = force_not_mem (op0);
7057 preserve_temp_slots (op0);
7058 expand_end_bindings (NULL_TREE, 0, 0);
7059 }
7060 return op0;
7061
7062 case CALL_EXPR:
7063 /* Check for a built-in function. */
7064 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7065 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7066 == FUNCTION_DECL)
7067 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7068 return expand_builtin (exp, target, subtarget, tmode, ignore);
7069
7070 /* If this call was expanded already by preexpand_calls,
7071 just return the result we got. */
7072 if (CALL_EXPR_RTL (exp) != 0)
7073 return CALL_EXPR_RTL (exp);
7074
7075 return expand_call (exp, target, ignore);
7076
7077 case NON_LVALUE_EXPR:
7078 case NOP_EXPR:
7079 case CONVERT_EXPR:
7080 case REFERENCE_EXPR:
7081 if (TREE_CODE (type) == UNION_TYPE)
7082 {
7083 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7084
7085 /* If both input and output are BLKmode, this conversion
7086 isn't actually doing anything unless we need to make the
7087 alignment stricter. */
7088 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7089 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7090 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7091 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7092 modifier);
7093
7094 if (target == 0)
7095 {
7096 if (mode != BLKmode)
7097 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7098 else
7099 target = assign_temp (type, 0, 1, 1);
7100 }
7101
7102 if (GET_CODE (target) == MEM)
7103 /* Store data into beginning of memory target. */
7104 store_expr (TREE_OPERAND (exp, 0),
7105 change_address (target, TYPE_MODE (valtype), 0), 0);
7106
7107 else if (GET_CODE (target) == REG)
7108 /* Store this field into a union of the proper type. */
7109 store_field (target,
7110 MIN ((int_size_in_bytes (TREE_TYPE
7111 (TREE_OPERAND (exp, 0)))
7112 * BITS_PER_UNIT),
7113 GET_MODE_BITSIZE (mode)),
7114 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7115 VOIDmode, 0, BITS_PER_UNIT,
7116 int_size_in_bytes (type), 0);
7117 else
7118 abort ();
7119
7120 /* Return the entire union. */
7121 return target;
7122 }
7123
7124 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7125 {
7126 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7127 ro_modifier);
7128
7129 /* If the signedness of the conversion differs and OP0 is
7130 a promoted SUBREG, clear that indication since we now
7131 have to do the proper extension. */
7132 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7133 && GET_CODE (op0) == SUBREG)
7134 SUBREG_PROMOTED_VAR_P (op0) = 0;
7135
7136 return op0;
7137 }
7138
7139 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7140 if (GET_MODE (op0) == mode)
7141 return op0;
7142
7143 /* If OP0 is a constant, just convert it into the proper mode. */
7144 if (CONSTANT_P (op0))
7145 return
7146 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7147 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7148
7149 if (modifier == EXPAND_INITIALIZER)
7150 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7151
7152 if (target == 0)
7153 return
7154 convert_to_mode (mode, op0,
7155 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7156 else
7157 convert_move (target, op0,
7158 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7159 return target;
7160
7161 case PLUS_EXPR:
7162 /* We come here from MINUS_EXPR when the second operand is a
7163 constant. */
7164 plus_expr:
7165 this_optab = add_optab;
7166
7167 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7168 something else, make sure we add the register to the constant and
7169 then to the other thing. This case can occur during strength
7170 reduction and doing it this way will produce better code if the
7171 frame pointer or argument pointer is eliminated.
7172
7173 fold-const.c will ensure that the constant is always in the inner
7174 PLUS_EXPR, so the only case we need to do anything about is if
7175 sp, ap, or fp is our second argument, in which case we must swap
7176 the innermost first argument and our second argument. */
7177
7178 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7179 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7180 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7181 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7182 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7183 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7184 {
7185 tree t = TREE_OPERAND (exp, 1);
7186
7187 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7188 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7189 }
7190
7191 /* If the result is to be ptr_mode and we are adding an integer to
7192 something, we might be forming a constant. So try to use
7193 plus_constant. If it produces a sum and we can't accept it,
7194 use force_operand. This allows P = &ARR[const] to generate
7195 efficient code on machines where a SYMBOL_REF is not a valid
7196 address.
7197
7198 If this is an EXPAND_SUM call, always return the sum. */
7199 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7200 || mode == ptr_mode)
7201 {
7202 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7203 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7204 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7205 {
7206 rtx constant_part;
7207
7208 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7209 EXPAND_SUM);
7210 /* Use immed_double_const to ensure that the constant is
7211 truncated according to the mode of OP1, then sign extended
7212 to a HOST_WIDE_INT. Using the constant directly can result
7213 in non-canonical RTL in a 64x32 cross compile. */
7214 constant_part
7215 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7216 (HOST_WIDE_INT) 0,
7217 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7218 op1 = plus_constant (op1, INTVAL (constant_part));
7219 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7220 op1 = force_operand (op1, target);
7221 return op1;
7222 }
7223
7224 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7225 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7226 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7227 {
7228 rtx constant_part;
7229
7230 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7231 EXPAND_SUM);
7232 if (! CONSTANT_P (op0))
7233 {
7234 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7235 VOIDmode, modifier);
7236 /* Don't go to both_summands if modifier
7237 says it's not right to return a PLUS. */
7238 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7239 goto binop2;
7240 goto both_summands;
7241 }
7242 /* Use immed_double_const to ensure that the constant is
7243 truncated according to the mode of OP1, then sign extended
7244 to a HOST_WIDE_INT. Using the constant directly can result
7245 in non-canonical RTL in a 64x32 cross compile. */
7246 constant_part
7247 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7248 (HOST_WIDE_INT) 0,
7249 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7250 op0 = plus_constant (op0, INTVAL (constant_part));
7251 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7252 op0 = force_operand (op0, target);
7253 return op0;
7254 }
7255 }
7256
7257 /* No sense saving up arithmetic to be done
7258 if it's all in the wrong mode to form part of an address.
7259 And force_operand won't know whether to sign-extend or
7260 zero-extend. */
7261 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7262 || mode != ptr_mode)
7263 goto binop;
7264
7265 preexpand_calls (exp);
7266 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7267 subtarget = 0;
7268
7269 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7270 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7271
7272 both_summands:
7273 /* Make sure any term that's a sum with a constant comes last. */
7274 if (GET_CODE (op0) == PLUS
7275 && CONSTANT_P (XEXP (op0, 1)))
7276 {
7277 temp = op0;
7278 op0 = op1;
7279 op1 = temp;
7280 }
7281 /* If adding to a sum including a constant,
7282 associate it to put the constant outside. */
7283 if (GET_CODE (op1) == PLUS
7284 && CONSTANT_P (XEXP (op1, 1)))
7285 {
7286 rtx constant_term = const0_rtx;
7287
7288 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7289 if (temp != 0)
7290 op0 = temp;
7291 /* Ensure that MULT comes first if there is one. */
7292 else if (GET_CODE (op0) == MULT)
7293 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7294 else
7295 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7296
7297 /* Let's also eliminate constants from op0 if possible. */
7298 op0 = eliminate_constant_term (op0, &constant_term);
7299
7300 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7301 their sum should be a constant. Form it into OP1, since the
7302 result we want will then be OP0 + OP1. */
7303
7304 temp = simplify_binary_operation (PLUS, mode, constant_term,
7305 XEXP (op1, 1));
7306 if (temp != 0)
7307 op1 = temp;
7308 else
7309 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7310 }
7311
7312 /* Put a constant term last and put a multiplication first. */
7313 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7314 temp = op1, op1 = op0, op0 = temp;
7315
7316 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7317 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7318
7319 case MINUS_EXPR:
7320 /* For initializers, we are allowed to return a MINUS of two
7321 symbolic constants. Here we handle all cases when both operands
7322 are constant. */
7323 /* Handle difference of two symbolic constants,
7324 for the sake of an initializer. */
7325 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7326 && really_constant_p (TREE_OPERAND (exp, 0))
7327 && really_constant_p (TREE_OPERAND (exp, 1)))
7328 {
7329 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7330 VOIDmode, ro_modifier);
7331 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7332 VOIDmode, ro_modifier);
7333
7334 /* If the last operand is a CONST_INT, use plus_constant of
7335 the negated constant. Else make the MINUS. */
7336 if (GET_CODE (op1) == CONST_INT)
7337 return plus_constant (op0, - INTVAL (op1));
7338 else
7339 return gen_rtx_MINUS (mode, op0, op1);
7340 }
7341 /* Convert A - const to A + (-const). */
7342 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7343 {
7344 tree negated = fold (build1 (NEGATE_EXPR, type,
7345 TREE_OPERAND (exp, 1)));
7346
7347 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7348 /* If we can't negate the constant in TYPE, leave it alone and
7349 expand_binop will negate it for us. We used to try to do it
7350 here in the signed version of TYPE, but that doesn't work
7351 on POINTER_TYPEs. */;
7352 else
7353 {
7354 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7355 goto plus_expr;
7356 }
7357 }
7358 this_optab = sub_optab;
7359 goto binop;
7360
7361 case MULT_EXPR:
7362 preexpand_calls (exp);
7363 /* If first operand is constant, swap them.
7364 Thus the following special case checks need only
7365 check the second operand. */
7366 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7367 {
7368 register tree t1 = TREE_OPERAND (exp, 0);
7369 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7370 TREE_OPERAND (exp, 1) = t1;
7371 }
7372
7373 /* Attempt to return something suitable for generating an
7374 indexed address, for machines that support that. */
7375
7376 if (modifier == EXPAND_SUM && mode == ptr_mode
7377 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7378 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7379 {
7380 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7381 EXPAND_SUM);
7382
7383 /* Apply distributive law if OP0 is x+c. */
7384 if (GET_CODE (op0) == PLUS
7385 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7386 return
7387 gen_rtx_PLUS
7388 (mode,
7389 gen_rtx_MULT
7390 (mode, XEXP (op0, 0),
7391 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7392 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7393 * INTVAL (XEXP (op0, 1))));
7394
7395 if (GET_CODE (op0) != REG)
7396 op0 = force_operand (op0, NULL_RTX);
7397 if (GET_CODE (op0) != REG)
7398 op0 = copy_to_mode_reg (mode, op0);
7399
7400 return
7401 gen_rtx_MULT (mode, op0,
7402 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7403 }
7404
7405 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7406 subtarget = 0;
7407
7408 /* Check for multiplying things that have been extended
7409 from a narrower type. If this machine supports multiplying
7410 in that narrower type with a result in the desired type,
7411 do it that way, and avoid the explicit type-conversion. */
7412 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7413 && TREE_CODE (type) == INTEGER_TYPE
7414 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7415 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7416 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7417 && int_fits_type_p (TREE_OPERAND (exp, 1),
7418 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7419 /* Don't use a widening multiply if a shift will do. */
7420 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7421 > HOST_BITS_PER_WIDE_INT)
7422 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7423 ||
7424 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7425 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7426 ==
7427 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7428 /* If both operands are extended, they must either both
7429 be zero-extended or both be sign-extended. */
7430 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7431 ==
7432 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7433 {
7434 enum machine_mode innermode
7435 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7436 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7437 ? smul_widen_optab : umul_widen_optab);
7438 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7439 ? umul_widen_optab : smul_widen_optab);
7440 if (mode == GET_MODE_WIDER_MODE (innermode))
7441 {
7442 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7443 {
7444 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7445 NULL_RTX, VOIDmode, 0);
7446 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7447 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7448 VOIDmode, 0);
7449 else
7450 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7451 NULL_RTX, VOIDmode, 0);
7452 goto binop2;
7453 }
7454 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7455 && innermode == word_mode)
7456 {
7457 rtx htem;
7458 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7459 NULL_RTX, VOIDmode, 0);
7460 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7461 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7462 VOIDmode, 0);
7463 else
7464 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7465 NULL_RTX, VOIDmode, 0);
7466 temp = expand_binop (mode, other_optab, op0, op1, target,
7467 unsignedp, OPTAB_LIB_WIDEN);
7468 htem = expand_mult_highpart_adjust (innermode,
7469 gen_highpart (innermode, temp),
7470 op0, op1,
7471 gen_highpart (innermode, temp),
7472 unsignedp);
7473 emit_move_insn (gen_highpart (innermode, temp), htem);
7474 return temp;
7475 }
7476 }
7477 }
7478 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7479 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7480 return expand_mult (mode, op0, op1, target, unsignedp);
7481
7482 case TRUNC_DIV_EXPR:
7483 case FLOOR_DIV_EXPR:
7484 case CEIL_DIV_EXPR:
7485 case ROUND_DIV_EXPR:
7486 case EXACT_DIV_EXPR:
7487 preexpand_calls (exp);
7488 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7489 subtarget = 0;
7490 /* Possible optimization: compute the dividend with EXPAND_SUM
7491 then if the divisor is constant can optimize the case
7492 where some terms of the dividend have coeffs divisible by it. */
7493 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7494 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7495 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7496
7497 case RDIV_EXPR:
7498 this_optab = flodiv_optab;
7499 goto binop;
7500
7501 case TRUNC_MOD_EXPR:
7502 case FLOOR_MOD_EXPR:
7503 case CEIL_MOD_EXPR:
7504 case ROUND_MOD_EXPR:
7505 preexpand_calls (exp);
7506 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7507 subtarget = 0;
7508 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7509 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7510 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7511
7512 case FIX_ROUND_EXPR:
7513 case FIX_FLOOR_EXPR:
7514 case FIX_CEIL_EXPR:
7515 abort (); /* Not used for C. */
7516
7517 case FIX_TRUNC_EXPR:
7518 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7519 if (target == 0)
7520 target = gen_reg_rtx (mode);
7521 expand_fix (target, op0, unsignedp);
7522 return target;
7523
7524 case FLOAT_EXPR:
7525 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7526 if (target == 0)
7527 target = gen_reg_rtx (mode);
7528 /* expand_float can't figure out what to do if FROM has VOIDmode.
7529 So give it the correct mode. With -O, cse will optimize this. */
7530 if (GET_MODE (op0) == VOIDmode)
7531 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7532 op0);
7533 expand_float (target, op0,
7534 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7535 return target;
7536
7537 case NEGATE_EXPR:
7538 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7539 temp = expand_unop (mode, neg_optab, op0, target, 0);
7540 if (temp == 0)
7541 abort ();
7542 return temp;
7543
7544 case ABS_EXPR:
7545 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7546
7547 /* Handle complex values specially. */
7548 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7549 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7550 return expand_complex_abs (mode, op0, target, unsignedp);
7551
7552 /* Unsigned abs is simply the operand. Testing here means we don't
7553 risk generating incorrect code below. */
7554 if (TREE_UNSIGNED (type))
7555 return op0;
7556
7557 return expand_abs (mode, op0, target,
7558 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7559
7560 case MAX_EXPR:
7561 case MIN_EXPR:
7562 target = original_target;
7563 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7564 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7565 || GET_MODE (target) != mode
7566 || (GET_CODE (target) == REG
7567 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7568 target = gen_reg_rtx (mode);
7569 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7570 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7571
7572 /* First try to do it with a special MIN or MAX instruction.
7573 If that does not win, use a conditional jump to select the proper
7574 value. */
7575 this_optab = (TREE_UNSIGNED (type)
7576 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7577 : (code == MIN_EXPR ? smin_optab : smax_optab));
7578
7579 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7580 OPTAB_WIDEN);
7581 if (temp != 0)
7582 return temp;
7583
7584 /* At this point, a MEM target is no longer useful; we will get better
7585 code without it. */
7586
7587 if (GET_CODE (target) == MEM)
7588 target = gen_reg_rtx (mode);
7589
7590 if (target != op0)
7591 emit_move_insn (target, op0);
7592
7593 op0 = gen_label_rtx ();
7594
7595 /* If this mode is an integer too wide to compare properly,
7596 compare word by word. Rely on cse to optimize constant cases. */
7597 if (GET_MODE_CLASS (mode) == MODE_INT
7598 && ! can_compare_p (GE, mode, ccp_jump))
7599 {
7600 if (code == MAX_EXPR)
7601 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7602 target, op1, NULL_RTX, op0);
7603 else
7604 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7605 op1, target, NULL_RTX, op0);
7606 }
7607 else
7608 {
7609 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7610 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7611 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7612 op0);
7613 }
7614 emit_move_insn (target, op1);
7615 emit_label (op0);
7616 return target;
7617
7618 case BIT_NOT_EXPR:
7619 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7620 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7621 if (temp == 0)
7622 abort ();
7623 return temp;
7624
7625 case FFS_EXPR:
7626 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7627 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7628 if (temp == 0)
7629 abort ();
7630 return temp;
7631
7632 /* ??? Can optimize bitwise operations with one arg constant.
7633 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7634 and (a bitwise1 b) bitwise2 b (etc)
7635 but that is probably not worth while. */
7636
7637 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7638 boolean values when we want in all cases to compute both of them. In
7639 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7640 as actual zero-or-1 values and then bitwise anding. In cases where
7641 there cannot be any side effects, better code would be made by
7642 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7643 how to recognize those cases. */
7644
7645 case TRUTH_AND_EXPR:
7646 case BIT_AND_EXPR:
7647 this_optab = and_optab;
7648 goto binop;
7649
7650 case TRUTH_OR_EXPR:
7651 case BIT_IOR_EXPR:
7652 this_optab = ior_optab;
7653 goto binop;
7654
7655 case TRUTH_XOR_EXPR:
7656 case BIT_XOR_EXPR:
7657 this_optab = xor_optab;
7658 goto binop;
7659
7660 case LSHIFT_EXPR:
7661 case RSHIFT_EXPR:
7662 case LROTATE_EXPR:
7663 case RROTATE_EXPR:
7664 preexpand_calls (exp);
7665 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7666 subtarget = 0;
7667 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7668 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7669 unsignedp);
7670
7671 /* Could determine the answer when only additive constants differ. Also,
7672 the addition of one can be handled by changing the condition. */
7673 case LT_EXPR:
7674 case LE_EXPR:
7675 case GT_EXPR:
7676 case GE_EXPR:
7677 case EQ_EXPR:
7678 case NE_EXPR:
7679 case UNORDERED_EXPR:
7680 case ORDERED_EXPR:
7681 case UNLT_EXPR:
7682 case UNLE_EXPR:
7683 case UNGT_EXPR:
7684 case UNGE_EXPR:
7685 case UNEQ_EXPR:
7686 preexpand_calls (exp);
7687 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7688 if (temp != 0)
7689 return temp;
7690
7691 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7692 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7693 && original_target
7694 && GET_CODE (original_target) == REG
7695 && (GET_MODE (original_target)
7696 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7697 {
7698 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7699 VOIDmode, 0);
7700
7701 if (temp != original_target)
7702 temp = copy_to_reg (temp);
7703
7704 op1 = gen_label_rtx ();
7705 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7706 GET_MODE (temp), unsignedp, 0, op1);
7707 emit_move_insn (temp, const1_rtx);
7708 emit_label (op1);
7709 return temp;
7710 }
7711
7712 /* If no set-flag instruction, must generate a conditional
7713 store into a temporary variable. Drop through
7714 and handle this like && and ||. */
7715
7716 case TRUTH_ANDIF_EXPR:
7717 case TRUTH_ORIF_EXPR:
7718 if (! ignore
7719 && (target == 0 || ! safe_from_p (target, exp, 1)
7720 /* Make sure we don't have a hard reg (such as function's return
7721 value) live across basic blocks, if not optimizing. */
7722 || (!optimize && GET_CODE (target) == REG
7723 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7724 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7725
7726 if (target)
7727 emit_clr_insn (target);
7728
7729 op1 = gen_label_rtx ();
7730 jumpifnot (exp, op1);
7731
7732 if (target)
7733 emit_0_to_1_insn (target);
7734
7735 emit_label (op1);
7736 return ignore ? const0_rtx : target;
7737
7738 case TRUTH_NOT_EXPR:
7739 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7740 /* The parser is careful to generate TRUTH_NOT_EXPR
7741 only with operands that are always zero or one. */
7742 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7743 target, 1, OPTAB_LIB_WIDEN);
7744 if (temp == 0)
7745 abort ();
7746 return temp;
7747
7748 case COMPOUND_EXPR:
7749 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7750 emit_queue ();
7751 return expand_expr (TREE_OPERAND (exp, 1),
7752 (ignore ? const0_rtx : target),
7753 VOIDmode, 0);
7754
7755 case COND_EXPR:
7756 /* If we would have a "singleton" (see below) were it not for a
7757 conversion in each arm, bring that conversion back out. */
7758 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7759 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7760 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7761 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7762 {
7763 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7764 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7765
7766 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7767 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7768 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7769 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7770 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7771 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7772 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7773 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7774 return expand_expr (build1 (NOP_EXPR, type,
7775 build (COND_EXPR, TREE_TYPE (true),
7776 TREE_OPERAND (exp, 0),
7777 true, false)),
7778 target, tmode, modifier);
7779 }
7780
7781 {
7782 /* Note that COND_EXPRs whose type is a structure or union
7783 are required to be constructed to contain assignments of
7784 a temporary variable, so that we can evaluate them here
7785 for side effect only. If type is void, we must do likewise. */
7786
7787 /* If an arm of the branch requires a cleanup,
7788 only that cleanup is performed. */
7789
7790 tree singleton = 0;
7791 tree binary_op = 0, unary_op = 0;
7792
7793 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7794 convert it to our mode, if necessary. */
7795 if (integer_onep (TREE_OPERAND (exp, 1))
7796 && integer_zerop (TREE_OPERAND (exp, 2))
7797 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7798 {
7799 if (ignore)
7800 {
7801 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7802 ro_modifier);
7803 return const0_rtx;
7804 }
7805
7806 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7807 if (GET_MODE (op0) == mode)
7808 return op0;
7809
7810 if (target == 0)
7811 target = gen_reg_rtx (mode);
7812 convert_move (target, op0, unsignedp);
7813 return target;
7814 }
7815
7816 /* Check for X ? A + B : A. If we have this, we can copy A to the
7817 output and conditionally add B. Similarly for unary operations.
7818 Don't do this if X has side-effects because those side effects
7819 might affect A or B and the "?" operation is a sequence point in
7820 ANSI. (operand_equal_p tests for side effects.) */
7821
7822 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7823 && operand_equal_p (TREE_OPERAND (exp, 2),
7824 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7825 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7826 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7827 && operand_equal_p (TREE_OPERAND (exp, 1),
7828 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7829 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7830 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7831 && operand_equal_p (TREE_OPERAND (exp, 2),
7832 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7833 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7834 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7835 && operand_equal_p (TREE_OPERAND (exp, 1),
7836 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7837 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7838
7839 /* If we are not to produce a result, we have no target. Otherwise,
7840 if a target was specified use it; it will not be used as an
7841 intermediate target unless it is safe. If no target, use a
7842 temporary. */
7843
7844 if (ignore)
7845 temp = 0;
7846 else if (original_target
7847 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7848 || (singleton && GET_CODE (original_target) == REG
7849 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7850 && original_target == var_rtx (singleton)))
7851 && GET_MODE (original_target) == mode
7852 #ifdef HAVE_conditional_move
7853 && (! can_conditionally_move_p (mode)
7854 || GET_CODE (original_target) == REG
7855 || TREE_ADDRESSABLE (type))
7856 #endif
7857 && ! (GET_CODE (original_target) == MEM
7858 && MEM_VOLATILE_P (original_target)))
7859 temp = original_target;
7860 else if (TREE_ADDRESSABLE (type))
7861 abort ();
7862 else
7863 temp = assign_temp (type, 0, 0, 1);
7864
7865 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7866 do the test of X as a store-flag operation, do this as
7867 A + ((X != 0) << log C). Similarly for other simple binary
7868 operators. Only do for C == 1 if BRANCH_COST is low. */
7869 if (temp && singleton && binary_op
7870 && (TREE_CODE (binary_op) == PLUS_EXPR
7871 || TREE_CODE (binary_op) == MINUS_EXPR
7872 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7873 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7874 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7875 : integer_onep (TREE_OPERAND (binary_op, 1)))
7876 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7877 {
7878 rtx result;
7879 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7880 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7881 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7882 : xor_optab);
7883
7884 /* If we had X ? A : A + 1, do this as A + (X == 0).
7885
7886 We have to invert the truth value here and then put it
7887 back later if do_store_flag fails. We cannot simply copy
7888 TREE_OPERAND (exp, 0) to another variable and modify that
7889 because invert_truthvalue can modify the tree pointed to
7890 by its argument. */
7891 if (singleton == TREE_OPERAND (exp, 1))
7892 TREE_OPERAND (exp, 0)
7893 = invert_truthvalue (TREE_OPERAND (exp, 0));
7894
7895 result = do_store_flag (TREE_OPERAND (exp, 0),
7896 (safe_from_p (temp, singleton, 1)
7897 ? temp : NULL_RTX),
7898 mode, BRANCH_COST <= 1);
7899
7900 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7901 result = expand_shift (LSHIFT_EXPR, mode, result,
7902 build_int_2 (tree_log2
7903 (TREE_OPERAND
7904 (binary_op, 1)),
7905 0),
7906 (safe_from_p (temp, singleton, 1)
7907 ? temp : NULL_RTX), 0);
7908
7909 if (result)
7910 {
7911 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7912 return expand_binop (mode, boptab, op1, result, temp,
7913 unsignedp, OPTAB_LIB_WIDEN);
7914 }
7915 else if (singleton == TREE_OPERAND (exp, 1))
7916 TREE_OPERAND (exp, 0)
7917 = invert_truthvalue (TREE_OPERAND (exp, 0));
7918 }
7919
7920 do_pending_stack_adjust ();
7921 NO_DEFER_POP;
7922 op0 = gen_label_rtx ();
7923
7924 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7925 {
7926 if (temp != 0)
7927 {
7928 /* If the target conflicts with the other operand of the
7929 binary op, we can't use it. Also, we can't use the target
7930 if it is a hard register, because evaluating the condition
7931 might clobber it. */
7932 if ((binary_op
7933 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7934 || (GET_CODE (temp) == REG
7935 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7936 temp = gen_reg_rtx (mode);
7937 store_expr (singleton, temp, 0);
7938 }
7939 else
7940 expand_expr (singleton,
7941 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7942 if (singleton == TREE_OPERAND (exp, 1))
7943 jumpif (TREE_OPERAND (exp, 0), op0);
7944 else
7945 jumpifnot (TREE_OPERAND (exp, 0), op0);
7946
7947 start_cleanup_deferral ();
7948 if (binary_op && temp == 0)
7949 /* Just touch the other operand. */
7950 expand_expr (TREE_OPERAND (binary_op, 1),
7951 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7952 else if (binary_op)
7953 store_expr (build (TREE_CODE (binary_op), type,
7954 make_tree (type, temp),
7955 TREE_OPERAND (binary_op, 1)),
7956 temp, 0);
7957 else
7958 store_expr (build1 (TREE_CODE (unary_op), type,
7959 make_tree (type, temp)),
7960 temp, 0);
7961 op1 = op0;
7962 }
7963 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7964 comparison operator. If we have one of these cases, set the
7965 output to A, branch on A (cse will merge these two references),
7966 then set the output to FOO. */
7967 else if (temp
7968 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7969 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7970 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7971 TREE_OPERAND (exp, 1), 0)
7972 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7973 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7974 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7975 {
7976 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7977 temp = gen_reg_rtx (mode);
7978 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7979 jumpif (TREE_OPERAND (exp, 0), op0);
7980
7981 start_cleanup_deferral ();
7982 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7983 op1 = op0;
7984 }
7985 else if (temp
7986 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7987 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7988 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7989 TREE_OPERAND (exp, 2), 0)
7990 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7991 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7992 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7993 {
7994 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7995 temp = gen_reg_rtx (mode);
7996 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7997 jumpifnot (TREE_OPERAND (exp, 0), op0);
7998
7999 start_cleanup_deferral ();
8000 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8001 op1 = op0;
8002 }
8003 else
8004 {
8005 op1 = gen_label_rtx ();
8006 jumpifnot (TREE_OPERAND (exp, 0), op0);
8007
8008 start_cleanup_deferral ();
8009
8010 /* One branch of the cond can be void, if it never returns. For
8011 example A ? throw : E */
8012 if (temp != 0
8013 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8014 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8015 else
8016 expand_expr (TREE_OPERAND (exp, 1),
8017 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8018 end_cleanup_deferral ();
8019 emit_queue ();
8020 emit_jump_insn (gen_jump (op1));
8021 emit_barrier ();
8022 emit_label (op0);
8023 start_cleanup_deferral ();
8024 if (temp != 0
8025 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8026 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8027 else
8028 expand_expr (TREE_OPERAND (exp, 2),
8029 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8030 }
8031
8032 end_cleanup_deferral ();
8033
8034 emit_queue ();
8035 emit_label (op1);
8036 OK_DEFER_POP;
8037
8038 return temp;
8039 }
8040
8041 case TARGET_EXPR:
8042 {
8043 /* Something needs to be initialized, but we didn't know
8044 where that thing was when building the tree. For example,
8045 it could be the return value of a function, or a parameter
8046 to a function which lays down in the stack, or a temporary
8047 variable which must be passed by reference.
8048
8049 We guarantee that the expression will either be constructed
8050 or copied into our original target. */
8051
8052 tree slot = TREE_OPERAND (exp, 0);
8053 tree cleanups = NULL_TREE;
8054 tree exp1;
8055
8056 if (TREE_CODE (slot) != VAR_DECL)
8057 abort ();
8058
8059 if (! ignore)
8060 target = original_target;
8061
8062 /* Set this here so that if we get a target that refers to a
8063 register variable that's already been used, put_reg_into_stack
8064 knows that it should fix up those uses. */
8065 TREE_USED (slot) = 1;
8066
8067 if (target == 0)
8068 {
8069 if (DECL_RTL (slot) != 0)
8070 {
8071 target = DECL_RTL (slot);
8072 /* If we have already expanded the slot, so don't do
8073 it again. (mrs) */
8074 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8075 return target;
8076 }
8077 else
8078 {
8079 target = assign_temp (type, 2, 0, 1);
8080 /* All temp slots at this level must not conflict. */
8081 preserve_temp_slots (target);
8082 DECL_RTL (slot) = target;
8083 if (TREE_ADDRESSABLE (slot))
8084 {
8085 TREE_ADDRESSABLE (slot) = 0;
8086 mark_addressable (slot);
8087 }
8088
8089 /* Since SLOT is not known to the called function
8090 to belong to its stack frame, we must build an explicit
8091 cleanup. This case occurs when we must build up a reference
8092 to pass the reference as an argument. In this case,
8093 it is very likely that such a reference need not be
8094 built here. */
8095
8096 if (TREE_OPERAND (exp, 2) == 0)
8097 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8098 cleanups = TREE_OPERAND (exp, 2);
8099 }
8100 }
8101 else
8102 {
8103 /* This case does occur, when expanding a parameter which
8104 needs to be constructed on the stack. The target
8105 is the actual stack address that we want to initialize.
8106 The function we call will perform the cleanup in this case. */
8107
8108 /* If we have already assigned it space, use that space,
8109 not target that we were passed in, as our target
8110 parameter is only a hint. */
8111 if (DECL_RTL (slot) != 0)
8112 {
8113 target = DECL_RTL (slot);
8114 /* If we have already expanded the slot, so don't do
8115 it again. (mrs) */
8116 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8117 return target;
8118 }
8119 else
8120 {
8121 DECL_RTL (slot) = target;
8122 /* If we must have an addressable slot, then make sure that
8123 the RTL that we just stored in slot is OK. */
8124 if (TREE_ADDRESSABLE (slot))
8125 {
8126 TREE_ADDRESSABLE (slot) = 0;
8127 mark_addressable (slot);
8128 }
8129 }
8130 }
8131
8132 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8133 /* Mark it as expanded. */
8134 TREE_OPERAND (exp, 1) = NULL_TREE;
8135
8136 store_expr (exp1, target, 0);
8137
8138 expand_decl_cleanup (NULL_TREE, cleanups);
8139
8140 return target;
8141 }
8142
8143 case INIT_EXPR:
8144 {
8145 tree lhs = TREE_OPERAND (exp, 0);
8146 tree rhs = TREE_OPERAND (exp, 1);
8147 tree noncopied_parts = 0;
8148 tree lhs_type = TREE_TYPE (lhs);
8149
8150 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8151 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8152 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8153 TYPE_NONCOPIED_PARTS (lhs_type));
8154 while (noncopied_parts != 0)
8155 {
8156 expand_assignment (TREE_VALUE (noncopied_parts),
8157 TREE_PURPOSE (noncopied_parts), 0, 0);
8158 noncopied_parts = TREE_CHAIN (noncopied_parts);
8159 }
8160 return temp;
8161 }
8162
8163 case MODIFY_EXPR:
8164 {
8165 /* If lhs is complex, expand calls in rhs before computing it.
8166 That's so we don't compute a pointer and save it over a call.
8167 If lhs is simple, compute it first so we can give it as a
8168 target if the rhs is just a call. This avoids an extra temp and copy
8169 and that prevents a partial-subsumption which makes bad code.
8170 Actually we could treat component_ref's of vars like vars. */
8171
8172 tree lhs = TREE_OPERAND (exp, 0);
8173 tree rhs = TREE_OPERAND (exp, 1);
8174 tree noncopied_parts = 0;
8175 tree lhs_type = TREE_TYPE (lhs);
8176
8177 temp = 0;
8178
8179 if (TREE_CODE (lhs) != VAR_DECL
8180 && TREE_CODE (lhs) != RESULT_DECL
8181 && TREE_CODE (lhs) != PARM_DECL
8182 && ! (TREE_CODE (lhs) == INDIRECT_REF
8183 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8184 preexpand_calls (exp);
8185
8186 /* Check for |= or &= of a bitfield of size one into another bitfield
8187 of size 1. In this case, (unless we need the result of the
8188 assignment) we can do this more efficiently with a
8189 test followed by an assignment, if necessary.
8190
8191 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8192 things change so we do, this code should be enhanced to
8193 support it. */
8194 if (ignore
8195 && TREE_CODE (lhs) == COMPONENT_REF
8196 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8197 || TREE_CODE (rhs) == BIT_AND_EXPR)
8198 && TREE_OPERAND (rhs, 0) == lhs
8199 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8200 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8201 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8202 {
8203 rtx label = gen_label_rtx ();
8204
8205 do_jump (TREE_OPERAND (rhs, 1),
8206 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8207 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8208 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8209 (TREE_CODE (rhs) == BIT_IOR_EXPR
8210 ? integer_one_node
8211 : integer_zero_node)),
8212 0, 0);
8213 do_pending_stack_adjust ();
8214 emit_label (label);
8215 return const0_rtx;
8216 }
8217
8218 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8219 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8220 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8221 TYPE_NONCOPIED_PARTS (lhs_type));
8222
8223 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8224 while (noncopied_parts != 0)
8225 {
8226 expand_assignment (TREE_PURPOSE (noncopied_parts),
8227 TREE_VALUE (noncopied_parts), 0, 0);
8228 noncopied_parts = TREE_CHAIN (noncopied_parts);
8229 }
8230 return temp;
8231 }
8232
8233 case RETURN_EXPR:
8234 if (!TREE_OPERAND (exp, 0))
8235 expand_null_return ();
8236 else
8237 expand_return (TREE_OPERAND (exp, 0));
8238 return const0_rtx;
8239
8240 case PREINCREMENT_EXPR:
8241 case PREDECREMENT_EXPR:
8242 return expand_increment (exp, 0, ignore);
8243
8244 case POSTINCREMENT_EXPR:
8245 case POSTDECREMENT_EXPR:
8246 /* Faster to treat as pre-increment if result is not used. */
8247 return expand_increment (exp, ! ignore, ignore);
8248
8249 case ADDR_EXPR:
8250 /* If nonzero, TEMP will be set to the address of something that might
8251 be a MEM corresponding to a stack slot. */
8252 temp = 0;
8253
8254 /* Are we taking the address of a nested function? */
8255 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8256 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8257 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8258 && ! TREE_STATIC (exp))
8259 {
8260 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8261 op0 = force_operand (op0, target);
8262 }
8263 /* If we are taking the address of something erroneous, just
8264 return a zero. */
8265 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8266 return const0_rtx;
8267 else
8268 {
8269 /* We make sure to pass const0_rtx down if we came in with
8270 ignore set, to avoid doing the cleanups twice for something. */
8271 op0 = expand_expr (TREE_OPERAND (exp, 0),
8272 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8273 (modifier == EXPAND_INITIALIZER
8274 ? modifier : EXPAND_CONST_ADDRESS));
8275
8276 /* If we are going to ignore the result, OP0 will have been set
8277 to const0_rtx, so just return it. Don't get confused and
8278 think we are taking the address of the constant. */
8279 if (ignore)
8280 return op0;
8281
8282 op0 = protect_from_queue (op0, 0);
8283
8284 /* We would like the object in memory. If it is a constant, we can
8285 have it be statically allocated into memory. For a non-constant,
8286 we need to allocate some memory and store the value into it. */
8287
8288 if (CONSTANT_P (op0))
8289 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8290 op0);
8291 else if (GET_CODE (op0) == MEM)
8292 {
8293 mark_temp_addr_taken (op0);
8294 temp = XEXP (op0, 0);
8295 }
8296
8297 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8298 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8299 {
8300 /* If this object is in a register, it must be not
8301 be BLKmode. */
8302 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8303 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8304
8305 mark_temp_addr_taken (memloc);
8306 emit_move_insn (memloc, op0);
8307 op0 = memloc;
8308 }
8309
8310 if (GET_CODE (op0) != MEM)
8311 abort ();
8312
8313 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8314 {
8315 temp = XEXP (op0, 0);
8316 #ifdef POINTERS_EXTEND_UNSIGNED
8317 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8318 && mode == ptr_mode)
8319 temp = convert_memory_address (ptr_mode, temp);
8320 #endif
8321 return temp;
8322 }
8323
8324 op0 = force_operand (XEXP (op0, 0), target);
8325 }
8326
8327 if (flag_force_addr && GET_CODE (op0) != REG)
8328 op0 = force_reg (Pmode, op0);
8329
8330 if (GET_CODE (op0) == REG
8331 && ! REG_USERVAR_P (op0))
8332 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8333
8334 /* If we might have had a temp slot, add an equivalent address
8335 for it. */
8336 if (temp != 0)
8337 update_temp_slot_address (temp, op0);
8338
8339 #ifdef POINTERS_EXTEND_UNSIGNED
8340 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8341 && mode == ptr_mode)
8342 op0 = convert_memory_address (ptr_mode, op0);
8343 #endif
8344
8345 return op0;
8346
8347 case ENTRY_VALUE_EXPR:
8348 abort ();
8349
8350 /* COMPLEX type for Extended Pascal & Fortran */
8351 case COMPLEX_EXPR:
8352 {
8353 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8354 rtx insns;
8355
8356 /* Get the rtx code of the operands. */
8357 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8358 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8359
8360 if (! target)
8361 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8362
8363 start_sequence ();
8364
8365 /* Move the real (op0) and imaginary (op1) parts to their location. */
8366 emit_move_insn (gen_realpart (mode, target), op0);
8367 emit_move_insn (gen_imagpart (mode, target), op1);
8368
8369 insns = get_insns ();
8370 end_sequence ();
8371
8372 /* Complex construction should appear as a single unit. */
8373 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8374 each with a separate pseudo as destination.
8375 It's not correct for flow to treat them as a unit. */
8376 if (GET_CODE (target) != CONCAT)
8377 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8378 else
8379 emit_insns (insns);
8380
8381 return target;
8382 }
8383
8384 case REALPART_EXPR:
8385 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8386 return gen_realpart (mode, op0);
8387
8388 case IMAGPART_EXPR:
8389 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8390 return gen_imagpart (mode, op0);
8391
8392 case CONJ_EXPR:
8393 {
8394 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8395 rtx imag_t;
8396 rtx insns;
8397
8398 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8399
8400 if (! target)
8401 target = gen_reg_rtx (mode);
8402
8403 start_sequence ();
8404
8405 /* Store the realpart and the negated imagpart to target. */
8406 emit_move_insn (gen_realpart (partmode, target),
8407 gen_realpart (partmode, op0));
8408
8409 imag_t = gen_imagpart (partmode, target);
8410 temp = expand_unop (partmode, neg_optab,
8411 gen_imagpart (partmode, op0), imag_t, 0);
8412 if (temp != imag_t)
8413 emit_move_insn (imag_t, temp);
8414
8415 insns = get_insns ();
8416 end_sequence ();
8417
8418 /* Conjugate should appear as a single unit
8419 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8420 each with a separate pseudo as destination.
8421 It's not correct for flow to treat them as a unit. */
8422 if (GET_CODE (target) != CONCAT)
8423 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8424 else
8425 emit_insns (insns);
8426
8427 return target;
8428 }
8429
8430 case TRY_CATCH_EXPR:
8431 {
8432 tree handler = TREE_OPERAND (exp, 1);
8433
8434 expand_eh_region_start ();
8435
8436 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8437
8438 expand_eh_region_end (handler);
8439
8440 return op0;
8441 }
8442
8443 case TRY_FINALLY_EXPR:
8444 {
8445 tree try_block = TREE_OPERAND (exp, 0);
8446 tree finally_block = TREE_OPERAND (exp, 1);
8447 rtx finally_label = gen_label_rtx ();
8448 rtx done_label = gen_label_rtx ();
8449 rtx return_link = gen_reg_rtx (Pmode);
8450 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8451 (tree) finally_label, (tree) return_link);
8452 TREE_SIDE_EFFECTS (cleanup) = 1;
8453
8454 /* Start a new binding layer that will keep track of all cleanup
8455 actions to be performed. */
8456 expand_start_bindings (2);
8457
8458 target_temp_slot_level = temp_slot_level;
8459
8460 expand_decl_cleanup (NULL_TREE, cleanup);
8461 op0 = expand_expr (try_block, target, tmode, modifier);
8462
8463 preserve_temp_slots (op0);
8464 expand_end_bindings (NULL_TREE, 0, 0);
8465 emit_jump (done_label);
8466 emit_label (finally_label);
8467 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8468 emit_indirect_jump (return_link);
8469 emit_label (done_label);
8470 return op0;
8471 }
8472
8473 case GOTO_SUBROUTINE_EXPR:
8474 {
8475 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8476 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8477 rtx return_address = gen_label_rtx ();
8478 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8479 emit_jump (subr);
8480 emit_label (return_address);
8481 return const0_rtx;
8482 }
8483
8484 case POPDCC_EXPR:
8485 {
8486 rtx dcc = get_dynamic_cleanup_chain ();
8487 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8488 return const0_rtx;
8489 }
8490
8491 case POPDHC_EXPR:
8492 {
8493 rtx dhc = get_dynamic_handler_chain ();
8494 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8495 return const0_rtx;
8496 }
8497
8498 case VA_ARG_EXPR:
8499 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8500
8501 default:
8502 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8503 }
8504
8505 /* Here to do an ordinary binary operator, generating an instruction
8506 from the optab already placed in `this_optab'. */
8507 binop:
8508 preexpand_calls (exp);
8509 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8510 subtarget = 0;
8511 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8512 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8513 binop2:
8514 temp = expand_binop (mode, this_optab, op0, op1, target,
8515 unsignedp, OPTAB_LIB_WIDEN);
8516 if (temp == 0)
8517 abort ();
8518 return temp;
8519 }
8520 \f
8521 /* Similar to expand_expr, except that we don't specify a target, target
8522 mode, or modifier and we return the alignment of the inner type. This is
8523 used in cases where it is not necessary to align the result to the
8524 alignment of its type as long as we know the alignment of the result, for
8525 example for comparisons of BLKmode values. */
8526
8527 static rtx
8528 expand_expr_unaligned (exp, palign)
8529 register tree exp;
8530 unsigned int *palign;
8531 {
8532 register rtx op0;
8533 tree type = TREE_TYPE (exp);
8534 register enum machine_mode mode = TYPE_MODE (type);
8535
8536 /* Default the alignment we return to that of the type. */
8537 *palign = TYPE_ALIGN (type);
8538
8539 /* The only cases in which we do anything special is if the resulting mode
8540 is BLKmode. */
8541 if (mode != BLKmode)
8542 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8543
8544 switch (TREE_CODE (exp))
8545 {
8546 case CONVERT_EXPR:
8547 case NOP_EXPR:
8548 case NON_LVALUE_EXPR:
8549 /* Conversions between BLKmode values don't change the underlying
8550 alignment or value. */
8551 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8552 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8553 break;
8554
8555 case ARRAY_REF:
8556 /* Much of the code for this case is copied directly from expand_expr.
8557 We need to duplicate it here because we will do something different
8558 in the fall-through case, so we need to handle the same exceptions
8559 it does. */
8560 {
8561 tree array = TREE_OPERAND (exp, 0);
8562 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8563 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8564 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8565 HOST_WIDE_INT i;
8566
8567 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8568 abort ();
8569
8570 /* Optimize the special-case of a zero lower bound.
8571
8572 We convert the low_bound to sizetype to avoid some problems
8573 with constant folding. (E.g. suppose the lower bound is 1,
8574 and its mode is QI. Without the conversion, (ARRAY
8575 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8576 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8577
8578 if (! integer_zerop (low_bound))
8579 index = size_diffop (index, convert (sizetype, low_bound));
8580
8581 /* If this is a constant index into a constant array,
8582 just get the value from the array. Handle both the cases when
8583 we have an explicit constructor and when our operand is a variable
8584 that was declared const. */
8585
8586 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8587 && 0 > compare_tree_int (index,
8588 list_length (CONSTRUCTOR_ELTS
8589 (TREE_OPERAND (exp, 0)))))
8590 {
8591 tree elem;
8592
8593 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8594 i = TREE_INT_CST_LOW (index);
8595 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8596 ;
8597
8598 if (elem)
8599 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8600 }
8601
8602 else if (optimize >= 1
8603 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8604 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8605 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8606 {
8607 if (TREE_CODE (index) == INTEGER_CST)
8608 {
8609 tree init = DECL_INITIAL (array);
8610
8611 if (TREE_CODE (init) == CONSTRUCTOR)
8612 {
8613 tree elem;
8614
8615 for (elem = CONSTRUCTOR_ELTS (init);
8616 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8617 elem = TREE_CHAIN (elem))
8618 ;
8619
8620 if (elem)
8621 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8622 palign);
8623 }
8624 }
8625 }
8626 }
8627
8628 /* ... fall through ... */
8629
8630 case COMPONENT_REF:
8631 case BIT_FIELD_REF:
8632 /* If the operand is a CONSTRUCTOR, we can just extract the
8633 appropriate field if it is present. Don't do this if we have
8634 already written the data since we want to refer to that copy
8635 and varasm.c assumes that's what we'll do. */
8636 if (TREE_CODE (exp) != ARRAY_REF
8637 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8638 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8639 {
8640 tree elt;
8641
8642 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8643 elt = TREE_CHAIN (elt))
8644 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8645 /* Note that unlike the case in expand_expr, we know this is
8646 BLKmode and hence not an integer. */
8647 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8648 }
8649
8650 {
8651 enum machine_mode mode1;
8652 HOST_WIDE_INT bitsize, bitpos;
8653 tree offset;
8654 int volatilep = 0;
8655 unsigned int alignment;
8656 int unsignedp;
8657 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8658 &mode1, &unsignedp, &volatilep,
8659 &alignment);
8660
8661 /* If we got back the original object, something is wrong. Perhaps
8662 we are evaluating an expression too early. In any event, don't
8663 infinitely recurse. */
8664 if (tem == exp)
8665 abort ();
8666
8667 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8668
8669 /* If this is a constant, put it into a register if it is a
8670 legitimate constant and OFFSET is 0 and memory if it isn't. */
8671 if (CONSTANT_P (op0))
8672 {
8673 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8674
8675 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8676 && offset == 0)
8677 op0 = force_reg (inner_mode, op0);
8678 else
8679 op0 = validize_mem (force_const_mem (inner_mode, op0));
8680 }
8681
8682 if (offset != 0)
8683 {
8684 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8685
8686 /* If this object is in a register, put it into memory.
8687 This case can't occur in C, but can in Ada if we have
8688 unchecked conversion of an expression from a scalar type to
8689 an array or record type. */
8690 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8691 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8692 {
8693 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8694
8695 mark_temp_addr_taken (memloc);
8696 emit_move_insn (memloc, op0);
8697 op0 = memloc;
8698 }
8699
8700 if (GET_CODE (op0) != MEM)
8701 abort ();
8702
8703 if (GET_MODE (offset_rtx) != ptr_mode)
8704 {
8705 #ifdef POINTERS_EXTEND_UNSIGNED
8706 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8707 #else
8708 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8709 #endif
8710 }
8711
8712 op0 = change_address (op0, VOIDmode,
8713 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8714 force_reg (ptr_mode,
8715 offset_rtx)));
8716 }
8717
8718 /* Don't forget about volatility even if this is a bitfield. */
8719 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8720 {
8721 op0 = copy_rtx (op0);
8722 MEM_VOLATILE_P (op0) = 1;
8723 }
8724
8725 /* Check the access. */
8726 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8727 {
8728 rtx to;
8729 int size;
8730
8731 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8732 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8733
8734 /* Check the access right of the pointer. */
8735 in_check_memory_usage = 1;
8736 if (size > BITS_PER_UNIT)
8737 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8738 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8739 TYPE_MODE (sizetype),
8740 GEN_INT (MEMORY_USE_RO),
8741 TYPE_MODE (integer_type_node));
8742 in_check_memory_usage = 0;
8743 }
8744
8745 /* In cases where an aligned union has an unaligned object
8746 as a field, we might be extracting a BLKmode value from
8747 an integer-mode (e.g., SImode) object. Handle this case
8748 by doing the extract into an object as wide as the field
8749 (which we know to be the width of a basic mode), then
8750 storing into memory, and changing the mode to BLKmode.
8751 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8752 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8753 if (mode1 == VOIDmode
8754 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8755 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8756 && (TYPE_ALIGN (type) > alignment
8757 || bitpos % TYPE_ALIGN (type) != 0)))
8758 {
8759 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8760
8761 if (ext_mode == BLKmode)
8762 {
8763 /* In this case, BITPOS must start at a byte boundary. */
8764 if (GET_CODE (op0) != MEM
8765 || bitpos % BITS_PER_UNIT != 0)
8766 abort ();
8767
8768 op0 = change_address (op0, VOIDmode,
8769 plus_constant (XEXP (op0, 0),
8770 bitpos / BITS_PER_UNIT));
8771 }
8772 else
8773 {
8774 rtx new = assign_stack_temp (ext_mode,
8775 bitsize / BITS_PER_UNIT, 0);
8776
8777 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8778 unsignedp, NULL_RTX, ext_mode,
8779 ext_mode, alignment,
8780 int_size_in_bytes (TREE_TYPE (tem)));
8781
8782 /* If the result is a record type and BITSIZE is narrower than
8783 the mode of OP0, an integral mode, and this is a big endian
8784 machine, we must put the field into the high-order bits. */
8785 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8786 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8787 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8788 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8789 size_int (GET_MODE_BITSIZE
8790 (GET_MODE (op0))
8791 - bitsize),
8792 op0, 1);
8793
8794
8795 emit_move_insn (new, op0);
8796 op0 = copy_rtx (new);
8797 PUT_MODE (op0, BLKmode);
8798 }
8799 }
8800 else
8801 /* Get a reference to just this component. */
8802 op0 = change_address (op0, mode1,
8803 plus_constant (XEXP (op0, 0),
8804 (bitpos / BITS_PER_UNIT)));
8805
8806 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8807
8808 /* Adjust the alignment in case the bit position is not
8809 a multiple of the alignment of the inner object. */
8810 while (bitpos % alignment != 0)
8811 alignment >>= 1;
8812
8813 if (GET_CODE (XEXP (op0, 0)) == REG)
8814 mark_reg_pointer (XEXP (op0, 0), alignment);
8815
8816 MEM_IN_STRUCT_P (op0) = 1;
8817 MEM_VOLATILE_P (op0) |= volatilep;
8818
8819 *palign = alignment;
8820 return op0;
8821 }
8822
8823 default:
8824 break;
8825
8826 }
8827
8828 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8829 }
8830 \f
8831 /* Return the tree node if a ARG corresponds to a string constant or zero
8832 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8833 in bytes within the string that ARG is accessing. The type of the
8834 offset will be `sizetype'. */
8835
8836 tree
8837 string_constant (arg, ptr_offset)
8838 tree arg;
8839 tree *ptr_offset;
8840 {
8841 STRIP_NOPS (arg);
8842
8843 if (TREE_CODE (arg) == ADDR_EXPR
8844 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8845 {
8846 *ptr_offset = size_zero_node;
8847 return TREE_OPERAND (arg, 0);
8848 }
8849 else if (TREE_CODE (arg) == PLUS_EXPR)
8850 {
8851 tree arg0 = TREE_OPERAND (arg, 0);
8852 tree arg1 = TREE_OPERAND (arg, 1);
8853
8854 STRIP_NOPS (arg0);
8855 STRIP_NOPS (arg1);
8856
8857 if (TREE_CODE (arg0) == ADDR_EXPR
8858 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8859 {
8860 *ptr_offset = convert (sizetype, arg1);
8861 return TREE_OPERAND (arg0, 0);
8862 }
8863 else if (TREE_CODE (arg1) == ADDR_EXPR
8864 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8865 {
8866 *ptr_offset = convert (sizetype, arg0);
8867 return TREE_OPERAND (arg1, 0);
8868 }
8869 }
8870
8871 return 0;
8872 }
8873 \f
8874 /* Expand code for a post- or pre- increment or decrement
8875 and return the RTX for the result.
8876 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8877
8878 static rtx
8879 expand_increment (exp, post, ignore)
8880 register tree exp;
8881 int post, ignore;
8882 {
8883 register rtx op0, op1;
8884 register rtx temp, value;
8885 register tree incremented = TREE_OPERAND (exp, 0);
8886 optab this_optab = add_optab;
8887 int icode;
8888 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8889 int op0_is_copy = 0;
8890 int single_insn = 0;
8891 /* 1 means we can't store into OP0 directly,
8892 because it is a subreg narrower than a word,
8893 and we don't dare clobber the rest of the word. */
8894 int bad_subreg = 0;
8895
8896 /* Stabilize any component ref that might need to be
8897 evaluated more than once below. */
8898 if (!post
8899 || TREE_CODE (incremented) == BIT_FIELD_REF
8900 || (TREE_CODE (incremented) == COMPONENT_REF
8901 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8902 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8903 incremented = stabilize_reference (incremented);
8904 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8905 ones into save exprs so that they don't accidentally get evaluated
8906 more than once by the code below. */
8907 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8908 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8909 incremented = save_expr (incremented);
8910
8911 /* Compute the operands as RTX.
8912 Note whether OP0 is the actual lvalue or a copy of it:
8913 I believe it is a copy iff it is a register or subreg
8914 and insns were generated in computing it. */
8915
8916 temp = get_last_insn ();
8917 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8918
8919 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8920 in place but instead must do sign- or zero-extension during assignment,
8921 so we copy it into a new register and let the code below use it as
8922 a copy.
8923
8924 Note that we can safely modify this SUBREG since it is know not to be
8925 shared (it was made by the expand_expr call above). */
8926
8927 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8928 {
8929 if (post)
8930 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8931 else
8932 bad_subreg = 1;
8933 }
8934 else if (GET_CODE (op0) == SUBREG
8935 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8936 {
8937 /* We cannot increment this SUBREG in place. If we are
8938 post-incrementing, get a copy of the old value. Otherwise,
8939 just mark that we cannot increment in place. */
8940 if (post)
8941 op0 = copy_to_reg (op0);
8942 else
8943 bad_subreg = 1;
8944 }
8945
8946 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8947 && temp != get_last_insn ());
8948 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8949 EXPAND_MEMORY_USE_BAD);
8950
8951 /* Decide whether incrementing or decrementing. */
8952 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8953 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8954 this_optab = sub_optab;
8955
8956 /* Convert decrement by a constant into a negative increment. */
8957 if (this_optab == sub_optab
8958 && GET_CODE (op1) == CONST_INT)
8959 {
8960 op1 = GEN_INT (- INTVAL (op1));
8961 this_optab = add_optab;
8962 }
8963
8964 /* For a preincrement, see if we can do this with a single instruction. */
8965 if (!post)
8966 {
8967 icode = (int) this_optab->handlers[(int) mode].insn_code;
8968 if (icode != (int) CODE_FOR_nothing
8969 /* Make sure that OP0 is valid for operands 0 and 1
8970 of the insn we want to queue. */
8971 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8972 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8973 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8974 single_insn = 1;
8975 }
8976
8977 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8978 then we cannot just increment OP0. We must therefore contrive to
8979 increment the original value. Then, for postincrement, we can return
8980 OP0 since it is a copy of the old value. For preincrement, expand here
8981 unless we can do it with a single insn.
8982
8983 Likewise if storing directly into OP0 would clobber high bits
8984 we need to preserve (bad_subreg). */
8985 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8986 {
8987 /* This is the easiest way to increment the value wherever it is.
8988 Problems with multiple evaluation of INCREMENTED are prevented
8989 because either (1) it is a component_ref or preincrement,
8990 in which case it was stabilized above, or (2) it is an array_ref
8991 with constant index in an array in a register, which is
8992 safe to reevaluate. */
8993 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8994 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8995 ? MINUS_EXPR : PLUS_EXPR),
8996 TREE_TYPE (exp),
8997 incremented,
8998 TREE_OPERAND (exp, 1));
8999
9000 while (TREE_CODE (incremented) == NOP_EXPR
9001 || TREE_CODE (incremented) == CONVERT_EXPR)
9002 {
9003 newexp = convert (TREE_TYPE (incremented), newexp);
9004 incremented = TREE_OPERAND (incremented, 0);
9005 }
9006
9007 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9008 return post ? op0 : temp;
9009 }
9010
9011 if (post)
9012 {
9013 /* We have a true reference to the value in OP0.
9014 If there is an insn to add or subtract in this mode, queue it.
9015 Queueing the increment insn avoids the register shuffling
9016 that often results if we must increment now and first save
9017 the old value for subsequent use. */
9018
9019 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9020 op0 = stabilize (op0);
9021 #endif
9022
9023 icode = (int) this_optab->handlers[(int) mode].insn_code;
9024 if (icode != (int) CODE_FOR_nothing
9025 /* Make sure that OP0 is valid for operands 0 and 1
9026 of the insn we want to queue. */
9027 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9028 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9029 {
9030 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9031 op1 = force_reg (mode, op1);
9032
9033 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9034 }
9035 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9036 {
9037 rtx addr = (general_operand (XEXP (op0, 0), mode)
9038 ? force_reg (Pmode, XEXP (op0, 0))
9039 : copy_to_reg (XEXP (op0, 0)));
9040 rtx temp, result;
9041
9042 op0 = change_address (op0, VOIDmode, addr);
9043 temp = force_reg (GET_MODE (op0), op0);
9044 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9045 op1 = force_reg (mode, op1);
9046
9047 /* The increment queue is LIFO, thus we have to `queue'
9048 the instructions in reverse order. */
9049 enqueue_insn (op0, gen_move_insn (op0, temp));
9050 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9051 return result;
9052 }
9053 }
9054
9055 /* Preincrement, or we can't increment with one simple insn. */
9056 if (post)
9057 /* Save a copy of the value before inc or dec, to return it later. */
9058 temp = value = copy_to_reg (op0);
9059 else
9060 /* Arrange to return the incremented value. */
9061 /* Copy the rtx because expand_binop will protect from the queue,
9062 and the results of that would be invalid for us to return
9063 if our caller does emit_queue before using our result. */
9064 temp = copy_rtx (value = op0);
9065
9066 /* Increment however we can. */
9067 op1 = expand_binop (mode, this_optab, value, op1,
9068 current_function_check_memory_usage ? NULL_RTX : op0,
9069 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9070 /* Make sure the value is stored into OP0. */
9071 if (op1 != op0)
9072 emit_move_insn (op0, op1);
9073
9074 return temp;
9075 }
9076 \f
9077 /* Expand all function calls contained within EXP, innermost ones first.
9078 But don't look within expressions that have sequence points.
9079 For each CALL_EXPR, record the rtx for its value
9080 in the CALL_EXPR_RTL field. */
9081
9082 static void
9083 preexpand_calls (exp)
9084 tree exp;
9085 {
9086 register int nops, i;
9087 int class = TREE_CODE_CLASS (TREE_CODE (exp));
9088
9089 if (! do_preexpand_calls)
9090 return;
9091
9092 /* Only expressions and references can contain calls. */
9093
9094 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9095 return;
9096
9097 switch (TREE_CODE (exp))
9098 {
9099 case CALL_EXPR:
9100 /* Do nothing if already expanded. */
9101 if (CALL_EXPR_RTL (exp) != 0
9102 /* Do nothing if the call returns a variable-sized object. */
9103 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9104 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9105 /* Do nothing to built-in functions. */
9106 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9107 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9108 == FUNCTION_DECL)
9109 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9110 return;
9111
9112 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9113 return;
9114
9115 case COMPOUND_EXPR:
9116 case COND_EXPR:
9117 case TRUTH_ANDIF_EXPR:
9118 case TRUTH_ORIF_EXPR:
9119 /* If we find one of these, then we can be sure
9120 the adjust will be done for it (since it makes jumps).
9121 Do it now, so that if this is inside an argument
9122 of a function, we don't get the stack adjustment
9123 after some other args have already been pushed. */
9124 do_pending_stack_adjust ();
9125 return;
9126
9127 case BLOCK:
9128 case RTL_EXPR:
9129 case WITH_CLEANUP_EXPR:
9130 case CLEANUP_POINT_EXPR:
9131 case TRY_CATCH_EXPR:
9132 return;
9133
9134 case SAVE_EXPR:
9135 if (SAVE_EXPR_RTL (exp) != 0)
9136 return;
9137
9138 default:
9139 break;
9140 }
9141
9142 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
9143 for (i = 0; i < nops; i++)
9144 if (TREE_OPERAND (exp, i) != 0)
9145 {
9146 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9147 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9148 It doesn't happen before the call is made. */
9149 ;
9150 else
9151 {
9152 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9153 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9154 preexpand_calls (TREE_OPERAND (exp, i));
9155 }
9156 }
9157 }
9158 \f
9159 /* At the start of a function, record that we have no previously-pushed
9160 arguments waiting to be popped. */
9161
9162 void
9163 init_pending_stack_adjust ()
9164 {
9165 pending_stack_adjust = 0;
9166 }
9167
9168 /* When exiting from function, if safe, clear out any pending stack adjust
9169 so the adjustment won't get done.
9170
9171 Note, if the current function calls alloca, then it must have a
9172 frame pointer regardless of the value of flag_omit_frame_pointer. */
9173
9174 void
9175 clear_pending_stack_adjust ()
9176 {
9177 #ifdef EXIT_IGNORE_STACK
9178 if (optimize > 0
9179 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9180 && EXIT_IGNORE_STACK
9181 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9182 && ! flag_inline_functions)
9183 {
9184 stack_pointer_delta -= pending_stack_adjust,
9185 pending_stack_adjust = 0;
9186 }
9187 #endif
9188 }
9189
9190 /* Pop any previously-pushed arguments that have not been popped yet. */
9191
9192 void
9193 do_pending_stack_adjust ()
9194 {
9195 if (inhibit_defer_pop == 0)
9196 {
9197 if (pending_stack_adjust != 0)
9198 adjust_stack (GEN_INT (pending_stack_adjust));
9199 pending_stack_adjust = 0;
9200 }
9201 }
9202 \f
9203 /* Expand conditional expressions. */
9204
9205 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9206 LABEL is an rtx of code CODE_LABEL, in this function and all the
9207 functions here. */
9208
9209 void
9210 jumpifnot (exp, label)
9211 tree exp;
9212 rtx label;
9213 {
9214 do_jump (exp, label, NULL_RTX);
9215 }
9216
9217 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9218
9219 void
9220 jumpif (exp, label)
9221 tree exp;
9222 rtx label;
9223 {
9224 do_jump (exp, NULL_RTX, label);
9225 }
9226
9227 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9228 the result is zero, or IF_TRUE_LABEL if the result is one.
9229 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9230 meaning fall through in that case.
9231
9232 do_jump always does any pending stack adjust except when it does not
9233 actually perform a jump. An example where there is no jump
9234 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9235
9236 This function is responsible for optimizing cases such as
9237 &&, || and comparison operators in EXP. */
9238
9239 void
9240 do_jump (exp, if_false_label, if_true_label)
9241 tree exp;
9242 rtx if_false_label, if_true_label;
9243 {
9244 register enum tree_code code = TREE_CODE (exp);
9245 /* Some cases need to create a label to jump to
9246 in order to properly fall through.
9247 These cases set DROP_THROUGH_LABEL nonzero. */
9248 rtx drop_through_label = 0;
9249 rtx temp;
9250 int i;
9251 tree type;
9252 enum machine_mode mode;
9253
9254 #ifdef MAX_INTEGER_COMPUTATION_MODE
9255 check_max_integer_computation_mode (exp);
9256 #endif
9257
9258 emit_queue ();
9259
9260 switch (code)
9261 {
9262 case ERROR_MARK:
9263 break;
9264
9265 case INTEGER_CST:
9266 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9267 if (temp)
9268 emit_jump (temp);
9269 break;
9270
9271 #if 0
9272 /* This is not true with #pragma weak */
9273 case ADDR_EXPR:
9274 /* The address of something can never be zero. */
9275 if (if_true_label)
9276 emit_jump (if_true_label);
9277 break;
9278 #endif
9279
9280 case NOP_EXPR:
9281 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9282 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9283 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9284 goto normal;
9285 case CONVERT_EXPR:
9286 /* If we are narrowing the operand, we have to do the compare in the
9287 narrower mode. */
9288 if ((TYPE_PRECISION (TREE_TYPE (exp))
9289 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9290 goto normal;
9291 case NON_LVALUE_EXPR:
9292 case REFERENCE_EXPR:
9293 case ABS_EXPR:
9294 case NEGATE_EXPR:
9295 case LROTATE_EXPR:
9296 case RROTATE_EXPR:
9297 /* These cannot change zero->non-zero or vice versa. */
9298 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9299 break;
9300
9301 case WITH_RECORD_EXPR:
9302 /* Put the object on the placeholder list, recurse through our first
9303 operand, and pop the list. */
9304 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9305 placeholder_list);
9306 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9307 placeholder_list = TREE_CHAIN (placeholder_list);
9308 break;
9309
9310 #if 0
9311 /* This is never less insns than evaluating the PLUS_EXPR followed by
9312 a test and can be longer if the test is eliminated. */
9313 case PLUS_EXPR:
9314 /* Reduce to minus. */
9315 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9316 TREE_OPERAND (exp, 0),
9317 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9318 TREE_OPERAND (exp, 1))));
9319 /* Process as MINUS. */
9320 #endif
9321
9322 case MINUS_EXPR:
9323 /* Non-zero iff operands of minus differ. */
9324 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9325 TREE_OPERAND (exp, 0),
9326 TREE_OPERAND (exp, 1)),
9327 NE, NE, if_false_label, if_true_label);
9328 break;
9329
9330 case BIT_AND_EXPR:
9331 /* If we are AND'ing with a small constant, do this comparison in the
9332 smallest type that fits. If the machine doesn't have comparisons
9333 that small, it will be converted back to the wider comparison.
9334 This helps if we are testing the sign bit of a narrower object.
9335 combine can't do this for us because it can't know whether a
9336 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9337
9338 if (! SLOW_BYTE_ACCESS
9339 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9340 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9341 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9342 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9343 && (type = type_for_mode (mode, 1)) != 0
9344 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9345 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9346 != CODE_FOR_nothing))
9347 {
9348 do_jump (convert (type, exp), if_false_label, if_true_label);
9349 break;
9350 }
9351 goto normal;
9352
9353 case TRUTH_NOT_EXPR:
9354 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9355 break;
9356
9357 case TRUTH_ANDIF_EXPR:
9358 if (if_false_label == 0)
9359 if_false_label = drop_through_label = gen_label_rtx ();
9360 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9361 start_cleanup_deferral ();
9362 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9363 end_cleanup_deferral ();
9364 break;
9365
9366 case TRUTH_ORIF_EXPR:
9367 if (if_true_label == 0)
9368 if_true_label = drop_through_label = gen_label_rtx ();
9369 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9370 start_cleanup_deferral ();
9371 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9372 end_cleanup_deferral ();
9373 break;
9374
9375 case COMPOUND_EXPR:
9376 push_temp_slots ();
9377 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9378 preserve_temp_slots (NULL_RTX);
9379 free_temp_slots ();
9380 pop_temp_slots ();
9381 emit_queue ();
9382 do_pending_stack_adjust ();
9383 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9384 break;
9385
9386 case COMPONENT_REF:
9387 case BIT_FIELD_REF:
9388 case ARRAY_REF:
9389 {
9390 HOST_WIDE_INT bitsize, bitpos;
9391 int unsignedp;
9392 enum machine_mode mode;
9393 tree type;
9394 tree offset;
9395 int volatilep = 0;
9396 unsigned int alignment;
9397
9398 /* Get description of this reference. We don't actually care
9399 about the underlying object here. */
9400 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9401 &unsignedp, &volatilep, &alignment);
9402
9403 type = type_for_size (bitsize, unsignedp);
9404 if (! SLOW_BYTE_ACCESS
9405 && type != 0 && bitsize >= 0
9406 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9407 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9408 != CODE_FOR_nothing))
9409 {
9410 do_jump (convert (type, exp), if_false_label, if_true_label);
9411 break;
9412 }
9413 goto normal;
9414 }
9415
9416 case COND_EXPR:
9417 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9418 if (integer_onep (TREE_OPERAND (exp, 1))
9419 && integer_zerop (TREE_OPERAND (exp, 2)))
9420 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9421
9422 else if (integer_zerop (TREE_OPERAND (exp, 1))
9423 && integer_onep (TREE_OPERAND (exp, 2)))
9424 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9425
9426 else
9427 {
9428 register rtx label1 = gen_label_rtx ();
9429 drop_through_label = gen_label_rtx ();
9430
9431 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9432
9433 start_cleanup_deferral ();
9434 /* Now the THEN-expression. */
9435 do_jump (TREE_OPERAND (exp, 1),
9436 if_false_label ? if_false_label : drop_through_label,
9437 if_true_label ? if_true_label : drop_through_label);
9438 /* In case the do_jump just above never jumps. */
9439 do_pending_stack_adjust ();
9440 emit_label (label1);
9441
9442 /* Now the ELSE-expression. */
9443 do_jump (TREE_OPERAND (exp, 2),
9444 if_false_label ? if_false_label : drop_through_label,
9445 if_true_label ? if_true_label : drop_through_label);
9446 end_cleanup_deferral ();
9447 }
9448 break;
9449
9450 case EQ_EXPR:
9451 {
9452 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9453
9454 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9455 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9456 {
9457 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9458 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9459 do_jump
9460 (fold
9461 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9462 fold (build (EQ_EXPR, TREE_TYPE (exp),
9463 fold (build1 (REALPART_EXPR,
9464 TREE_TYPE (inner_type),
9465 exp0)),
9466 fold (build1 (REALPART_EXPR,
9467 TREE_TYPE (inner_type),
9468 exp1)))),
9469 fold (build (EQ_EXPR, TREE_TYPE (exp),
9470 fold (build1 (IMAGPART_EXPR,
9471 TREE_TYPE (inner_type),
9472 exp0)),
9473 fold (build1 (IMAGPART_EXPR,
9474 TREE_TYPE (inner_type),
9475 exp1)))))),
9476 if_false_label, if_true_label);
9477 }
9478
9479 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9480 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9481
9482 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9483 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9484 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9485 else
9486 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9487 break;
9488 }
9489
9490 case NE_EXPR:
9491 {
9492 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9493
9494 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9495 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9496 {
9497 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9498 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9499 do_jump
9500 (fold
9501 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9502 fold (build (NE_EXPR, TREE_TYPE (exp),
9503 fold (build1 (REALPART_EXPR,
9504 TREE_TYPE (inner_type),
9505 exp0)),
9506 fold (build1 (REALPART_EXPR,
9507 TREE_TYPE (inner_type),
9508 exp1)))),
9509 fold (build (NE_EXPR, TREE_TYPE (exp),
9510 fold (build1 (IMAGPART_EXPR,
9511 TREE_TYPE (inner_type),
9512 exp0)),
9513 fold (build1 (IMAGPART_EXPR,
9514 TREE_TYPE (inner_type),
9515 exp1)))))),
9516 if_false_label, if_true_label);
9517 }
9518
9519 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9520 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9521
9522 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9523 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9524 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9525 else
9526 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9527 break;
9528 }
9529
9530 case LT_EXPR:
9531 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9532 if (GET_MODE_CLASS (mode) == MODE_INT
9533 && ! can_compare_p (LT, mode, ccp_jump))
9534 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9535 else
9536 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9537 break;
9538
9539 case LE_EXPR:
9540 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9541 if (GET_MODE_CLASS (mode) == MODE_INT
9542 && ! can_compare_p (LE, mode, ccp_jump))
9543 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9544 else
9545 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9546 break;
9547
9548 case GT_EXPR:
9549 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9550 if (GET_MODE_CLASS (mode) == MODE_INT
9551 && ! can_compare_p (GT, mode, ccp_jump))
9552 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9553 else
9554 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9555 break;
9556
9557 case GE_EXPR:
9558 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9559 if (GET_MODE_CLASS (mode) == MODE_INT
9560 && ! can_compare_p (GE, mode, ccp_jump))
9561 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9562 else
9563 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9564 break;
9565
9566 case UNORDERED_EXPR:
9567 case ORDERED_EXPR:
9568 {
9569 enum rtx_code cmp, rcmp;
9570 int do_rev;
9571
9572 if (code == UNORDERED_EXPR)
9573 cmp = UNORDERED, rcmp = ORDERED;
9574 else
9575 cmp = ORDERED, rcmp = UNORDERED;
9576 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9577
9578 do_rev = 0;
9579 if (! can_compare_p (cmp, mode, ccp_jump)
9580 && (can_compare_p (rcmp, mode, ccp_jump)
9581 /* If the target doesn't provide either UNORDERED or ORDERED
9582 comparisons, canonicalize on UNORDERED for the library. */
9583 || rcmp == UNORDERED))
9584 do_rev = 1;
9585
9586 if (! do_rev)
9587 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9588 else
9589 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9590 }
9591 break;
9592
9593 {
9594 enum rtx_code rcode1;
9595 enum tree_code tcode2;
9596
9597 case UNLT_EXPR:
9598 rcode1 = UNLT;
9599 tcode2 = LT_EXPR;
9600 goto unordered_bcc;
9601 case UNLE_EXPR:
9602 rcode1 = UNLE;
9603 tcode2 = LE_EXPR;
9604 goto unordered_bcc;
9605 case UNGT_EXPR:
9606 rcode1 = UNGT;
9607 tcode2 = GT_EXPR;
9608 goto unordered_bcc;
9609 case UNGE_EXPR:
9610 rcode1 = UNGE;
9611 tcode2 = GE_EXPR;
9612 goto unordered_bcc;
9613 case UNEQ_EXPR:
9614 rcode1 = UNEQ;
9615 tcode2 = EQ_EXPR;
9616 goto unordered_bcc;
9617
9618 unordered_bcc:
9619 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9620 if (can_compare_p (rcode1, mode, ccp_jump))
9621 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9622 if_true_label);
9623 else
9624 {
9625 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9626 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9627 tree cmp0, cmp1;
9628
9629 /* If the target doesn't support combined unordered
9630 compares, decompose into UNORDERED + comparison. */
9631 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9632 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9633 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9634 do_jump (exp, if_false_label, if_true_label);
9635 }
9636 }
9637 break;
9638
9639 default:
9640 normal:
9641 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9642 #if 0
9643 /* This is not needed any more and causes poor code since it causes
9644 comparisons and tests from non-SI objects to have different code
9645 sequences. */
9646 /* Copy to register to avoid generating bad insns by cse
9647 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9648 if (!cse_not_expected && GET_CODE (temp) == MEM)
9649 temp = copy_to_reg (temp);
9650 #endif
9651 do_pending_stack_adjust ();
9652 /* Do any postincrements in the expression that was tested. */
9653 emit_queue ();
9654
9655 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9656 {
9657 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9658 if (target)
9659 emit_jump (target);
9660 }
9661 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9662 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9663 /* Note swapping the labels gives us not-equal. */
9664 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9665 else if (GET_MODE (temp) != VOIDmode)
9666 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9667 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9668 GET_MODE (temp), NULL_RTX, 0,
9669 if_false_label, if_true_label);
9670 else
9671 abort ();
9672 }
9673
9674 if (drop_through_label)
9675 {
9676 /* If do_jump produces code that might be jumped around,
9677 do any stack adjusts from that code, before the place
9678 where control merges in. */
9679 do_pending_stack_adjust ();
9680 emit_label (drop_through_label);
9681 }
9682 }
9683 \f
9684 /* Given a comparison expression EXP for values too wide to be compared
9685 with one insn, test the comparison and jump to the appropriate label.
9686 The code of EXP is ignored; we always test GT if SWAP is 0,
9687 and LT if SWAP is 1. */
9688
9689 static void
9690 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9691 tree exp;
9692 int swap;
9693 rtx if_false_label, if_true_label;
9694 {
9695 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9696 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9697 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9698 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9699
9700 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9701 }
9702
9703 /* Compare OP0 with OP1, word at a time, in mode MODE.
9704 UNSIGNEDP says to do unsigned comparison.
9705 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9706
9707 void
9708 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9709 enum machine_mode mode;
9710 int unsignedp;
9711 rtx op0, op1;
9712 rtx if_false_label, if_true_label;
9713 {
9714 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9715 rtx drop_through_label = 0;
9716 int i;
9717
9718 if (! if_true_label || ! if_false_label)
9719 drop_through_label = gen_label_rtx ();
9720 if (! if_true_label)
9721 if_true_label = drop_through_label;
9722 if (! if_false_label)
9723 if_false_label = drop_through_label;
9724
9725 /* Compare a word at a time, high order first. */
9726 for (i = 0; i < nwords; i++)
9727 {
9728 rtx op0_word, op1_word;
9729
9730 if (WORDS_BIG_ENDIAN)
9731 {
9732 op0_word = operand_subword_force (op0, i, mode);
9733 op1_word = operand_subword_force (op1, i, mode);
9734 }
9735 else
9736 {
9737 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9738 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9739 }
9740
9741 /* All but high-order word must be compared as unsigned. */
9742 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9743 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9744 NULL_RTX, if_true_label);
9745
9746 /* Consider lower words only if these are equal. */
9747 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9748 NULL_RTX, 0, NULL_RTX, if_false_label);
9749 }
9750
9751 if (if_false_label)
9752 emit_jump (if_false_label);
9753 if (drop_through_label)
9754 emit_label (drop_through_label);
9755 }
9756
9757 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9758 with one insn, test the comparison and jump to the appropriate label. */
9759
9760 static void
9761 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9762 tree exp;
9763 rtx if_false_label, if_true_label;
9764 {
9765 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9766 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9767 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9768 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9769 int i;
9770 rtx drop_through_label = 0;
9771
9772 if (! if_false_label)
9773 drop_through_label = if_false_label = gen_label_rtx ();
9774
9775 for (i = 0; i < nwords; i++)
9776 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9777 operand_subword_force (op1, i, mode),
9778 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9779 word_mode, NULL_RTX, 0, if_false_label,
9780 NULL_RTX);
9781
9782 if (if_true_label)
9783 emit_jump (if_true_label);
9784 if (drop_through_label)
9785 emit_label (drop_through_label);
9786 }
9787 \f
9788 /* Jump according to whether OP0 is 0.
9789 We assume that OP0 has an integer mode that is too wide
9790 for the available compare insns. */
9791
9792 void
9793 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9794 rtx op0;
9795 rtx if_false_label, if_true_label;
9796 {
9797 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9798 rtx part;
9799 int i;
9800 rtx drop_through_label = 0;
9801
9802 /* The fastest way of doing this comparison on almost any machine is to
9803 "or" all the words and compare the result. If all have to be loaded
9804 from memory and this is a very wide item, it's possible this may
9805 be slower, but that's highly unlikely. */
9806
9807 part = gen_reg_rtx (word_mode);
9808 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9809 for (i = 1; i < nwords && part != 0; i++)
9810 part = expand_binop (word_mode, ior_optab, part,
9811 operand_subword_force (op0, i, GET_MODE (op0)),
9812 part, 1, OPTAB_WIDEN);
9813
9814 if (part != 0)
9815 {
9816 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9817 NULL_RTX, 0, if_false_label, if_true_label);
9818
9819 return;
9820 }
9821
9822 /* If we couldn't do the "or" simply, do this with a series of compares. */
9823 if (! if_false_label)
9824 drop_through_label = if_false_label = gen_label_rtx ();
9825
9826 for (i = 0; i < nwords; i++)
9827 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9828 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9829 if_false_label, NULL_RTX);
9830
9831 if (if_true_label)
9832 emit_jump (if_true_label);
9833
9834 if (drop_through_label)
9835 emit_label (drop_through_label);
9836 }
9837 \f
9838 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9839 (including code to compute the values to be compared)
9840 and set (CC0) according to the result.
9841 The decision as to signed or unsigned comparison must be made by the caller.
9842
9843 We force a stack adjustment unless there are currently
9844 things pushed on the stack that aren't yet used.
9845
9846 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9847 compared.
9848
9849 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9850 size of MODE should be used. */
9851
9852 rtx
9853 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9854 register rtx op0, op1;
9855 enum rtx_code code;
9856 int unsignedp;
9857 enum machine_mode mode;
9858 rtx size;
9859 unsigned int align;
9860 {
9861 rtx tem;
9862
9863 /* If one operand is constant, make it the second one. Only do this
9864 if the other operand is not constant as well. */
9865
9866 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9867 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9868 {
9869 tem = op0;
9870 op0 = op1;
9871 op1 = tem;
9872 code = swap_condition (code);
9873 }
9874
9875 if (flag_force_mem)
9876 {
9877 op0 = force_not_mem (op0);
9878 op1 = force_not_mem (op1);
9879 }
9880
9881 do_pending_stack_adjust ();
9882
9883 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9884 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9885 return tem;
9886
9887 #if 0
9888 /* There's no need to do this now that combine.c can eliminate lots of
9889 sign extensions. This can be less efficient in certain cases on other
9890 machines. */
9891
9892 /* If this is a signed equality comparison, we can do it as an
9893 unsigned comparison since zero-extension is cheaper than sign
9894 extension and comparisons with zero are done as unsigned. This is
9895 the case even on machines that can do fast sign extension, since
9896 zero-extension is easier to combine with other operations than
9897 sign-extension is. If we are comparing against a constant, we must
9898 convert it to what it would look like unsigned. */
9899 if ((code == EQ || code == NE) && ! unsignedp
9900 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9901 {
9902 if (GET_CODE (op1) == CONST_INT
9903 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9904 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9905 unsignedp = 1;
9906 }
9907 #endif
9908
9909 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9910
9911 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9912 }
9913
9914 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9915 The decision as to signed or unsigned comparison must be made by the caller.
9916
9917 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9918 compared.
9919
9920 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9921 size of MODE should be used. */
9922
9923 void
9924 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9925 if_false_label, if_true_label)
9926 register rtx op0, op1;
9927 enum rtx_code code;
9928 int unsignedp;
9929 enum machine_mode mode;
9930 rtx size;
9931 unsigned int align;
9932 rtx if_false_label, if_true_label;
9933 {
9934 rtx tem;
9935 int dummy_true_label = 0;
9936
9937 /* Reverse the comparison if that is safe and we want to jump if it is
9938 false. */
9939 if (! if_true_label && ! FLOAT_MODE_P (mode))
9940 {
9941 if_true_label = if_false_label;
9942 if_false_label = 0;
9943 code = reverse_condition (code);
9944 }
9945
9946 /* If one operand is constant, make it the second one. Only do this
9947 if the other operand is not constant as well. */
9948
9949 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9950 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9951 {
9952 tem = op0;
9953 op0 = op1;
9954 op1 = tem;
9955 code = swap_condition (code);
9956 }
9957
9958 if (flag_force_mem)
9959 {
9960 op0 = force_not_mem (op0);
9961 op1 = force_not_mem (op1);
9962 }
9963
9964 do_pending_stack_adjust ();
9965
9966 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9967 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9968 {
9969 if (tem == const_true_rtx)
9970 {
9971 if (if_true_label)
9972 emit_jump (if_true_label);
9973 }
9974 else
9975 {
9976 if (if_false_label)
9977 emit_jump (if_false_label);
9978 }
9979 return;
9980 }
9981
9982 #if 0
9983 /* There's no need to do this now that combine.c can eliminate lots of
9984 sign extensions. This can be less efficient in certain cases on other
9985 machines. */
9986
9987 /* If this is a signed equality comparison, we can do it as an
9988 unsigned comparison since zero-extension is cheaper than sign
9989 extension and comparisons with zero are done as unsigned. This is
9990 the case even on machines that can do fast sign extension, since
9991 zero-extension is easier to combine with other operations than
9992 sign-extension is. If we are comparing against a constant, we must
9993 convert it to what it would look like unsigned. */
9994 if ((code == EQ || code == NE) && ! unsignedp
9995 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9996 {
9997 if (GET_CODE (op1) == CONST_INT
9998 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9999 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10000 unsignedp = 1;
10001 }
10002 #endif
10003
10004 if (! if_true_label)
10005 {
10006 dummy_true_label = 1;
10007 if_true_label = gen_label_rtx ();
10008 }
10009
10010 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10011 if_true_label);
10012
10013 if (if_false_label)
10014 emit_jump (if_false_label);
10015 if (dummy_true_label)
10016 emit_label (if_true_label);
10017 }
10018
10019 /* Generate code for a comparison expression EXP (including code to compute
10020 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10021 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10022 generated code will drop through.
10023 SIGNED_CODE should be the rtx operation for this comparison for
10024 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10025
10026 We force a stack adjustment unless there are currently
10027 things pushed on the stack that aren't yet used. */
10028
10029 static void
10030 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10031 if_true_label)
10032 register tree exp;
10033 enum rtx_code signed_code, unsigned_code;
10034 rtx if_false_label, if_true_label;
10035 {
10036 unsigned int align0, align1;
10037 register rtx op0, op1;
10038 register tree type;
10039 register enum machine_mode mode;
10040 int unsignedp;
10041 enum rtx_code code;
10042
10043 /* Don't crash if the comparison was erroneous. */
10044 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10045 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10046 return;
10047
10048 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10049 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10050 mode = TYPE_MODE (type);
10051 unsignedp = TREE_UNSIGNED (type);
10052 code = unsignedp ? unsigned_code : signed_code;
10053
10054 #ifdef HAVE_canonicalize_funcptr_for_compare
10055 /* If function pointers need to be "canonicalized" before they can
10056 be reliably compared, then canonicalize them. */
10057 if (HAVE_canonicalize_funcptr_for_compare
10058 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10059 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10060 == FUNCTION_TYPE))
10061 {
10062 rtx new_op0 = gen_reg_rtx (mode);
10063
10064 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10065 op0 = new_op0;
10066 }
10067
10068 if (HAVE_canonicalize_funcptr_for_compare
10069 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10070 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10071 == FUNCTION_TYPE))
10072 {
10073 rtx new_op1 = gen_reg_rtx (mode);
10074
10075 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10076 op1 = new_op1;
10077 }
10078 #endif
10079
10080 /* Do any postincrements in the expression that was tested. */
10081 emit_queue ();
10082
10083 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10084 ((mode == BLKmode)
10085 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10086 MIN (align0, align1),
10087 if_false_label, if_true_label);
10088 }
10089 \f
10090 /* Generate code to calculate EXP using a store-flag instruction
10091 and return an rtx for the result. EXP is either a comparison
10092 or a TRUTH_NOT_EXPR whose operand is a comparison.
10093
10094 If TARGET is nonzero, store the result there if convenient.
10095
10096 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10097 cheap.
10098
10099 Return zero if there is no suitable set-flag instruction
10100 available on this machine.
10101
10102 Once expand_expr has been called on the arguments of the comparison,
10103 we are committed to doing the store flag, since it is not safe to
10104 re-evaluate the expression. We emit the store-flag insn by calling
10105 emit_store_flag, but only expand the arguments if we have a reason
10106 to believe that emit_store_flag will be successful. If we think that
10107 it will, but it isn't, we have to simulate the store-flag with a
10108 set/jump/set sequence. */
10109
10110 static rtx
10111 do_store_flag (exp, target, mode, only_cheap)
10112 tree exp;
10113 rtx target;
10114 enum machine_mode mode;
10115 int only_cheap;
10116 {
10117 enum rtx_code code;
10118 tree arg0, arg1, type;
10119 tree tem;
10120 enum machine_mode operand_mode;
10121 int invert = 0;
10122 int unsignedp;
10123 rtx op0, op1;
10124 enum insn_code icode;
10125 rtx subtarget = target;
10126 rtx result, label;
10127
10128 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10129 result at the end. We can't simply invert the test since it would
10130 have already been inverted if it were valid. This case occurs for
10131 some floating-point comparisons. */
10132
10133 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10134 invert = 1, exp = TREE_OPERAND (exp, 0);
10135
10136 arg0 = TREE_OPERAND (exp, 0);
10137 arg1 = TREE_OPERAND (exp, 1);
10138 type = TREE_TYPE (arg0);
10139 operand_mode = TYPE_MODE (type);
10140 unsignedp = TREE_UNSIGNED (type);
10141
10142 /* We won't bother with BLKmode store-flag operations because it would mean
10143 passing a lot of information to emit_store_flag. */
10144 if (operand_mode == BLKmode)
10145 return 0;
10146
10147 /* We won't bother with store-flag operations involving function pointers
10148 when function pointers must be canonicalized before comparisons. */
10149 #ifdef HAVE_canonicalize_funcptr_for_compare
10150 if (HAVE_canonicalize_funcptr_for_compare
10151 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10152 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10153 == FUNCTION_TYPE))
10154 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10155 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10156 == FUNCTION_TYPE))))
10157 return 0;
10158 #endif
10159
10160 STRIP_NOPS (arg0);
10161 STRIP_NOPS (arg1);
10162
10163 /* Get the rtx comparison code to use. We know that EXP is a comparison
10164 operation of some type. Some comparisons against 1 and -1 can be
10165 converted to comparisons with zero. Do so here so that the tests
10166 below will be aware that we have a comparison with zero. These
10167 tests will not catch constants in the first operand, but constants
10168 are rarely passed as the first operand. */
10169
10170 switch (TREE_CODE (exp))
10171 {
10172 case EQ_EXPR:
10173 code = EQ;
10174 break;
10175 case NE_EXPR:
10176 code = NE;
10177 break;
10178 case LT_EXPR:
10179 if (integer_onep (arg1))
10180 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10181 else
10182 code = unsignedp ? LTU : LT;
10183 break;
10184 case LE_EXPR:
10185 if (! unsignedp && integer_all_onesp (arg1))
10186 arg1 = integer_zero_node, code = LT;
10187 else
10188 code = unsignedp ? LEU : LE;
10189 break;
10190 case GT_EXPR:
10191 if (! unsignedp && integer_all_onesp (arg1))
10192 arg1 = integer_zero_node, code = GE;
10193 else
10194 code = unsignedp ? GTU : GT;
10195 break;
10196 case GE_EXPR:
10197 if (integer_onep (arg1))
10198 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10199 else
10200 code = unsignedp ? GEU : GE;
10201 break;
10202
10203 case UNORDERED_EXPR:
10204 code = UNORDERED;
10205 break;
10206 case ORDERED_EXPR:
10207 code = ORDERED;
10208 break;
10209 case UNLT_EXPR:
10210 code = UNLT;
10211 break;
10212 case UNLE_EXPR:
10213 code = UNLE;
10214 break;
10215 case UNGT_EXPR:
10216 code = UNGT;
10217 break;
10218 case UNGE_EXPR:
10219 code = UNGE;
10220 break;
10221 case UNEQ_EXPR:
10222 code = UNEQ;
10223 break;
10224
10225 default:
10226 abort ();
10227 }
10228
10229 /* Put a constant second. */
10230 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10231 {
10232 tem = arg0; arg0 = arg1; arg1 = tem;
10233 code = swap_condition (code);
10234 }
10235
10236 /* If this is an equality or inequality test of a single bit, we can
10237 do this by shifting the bit being tested to the low-order bit and
10238 masking the result with the constant 1. If the condition was EQ,
10239 we xor it with 1. This does not require an scc insn and is faster
10240 than an scc insn even if we have it. */
10241
10242 if ((code == NE || code == EQ)
10243 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10244 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10245 {
10246 tree inner = TREE_OPERAND (arg0, 0);
10247 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10248 int ops_unsignedp;
10249
10250 /* If INNER is a right shift of a constant and it plus BITNUM does
10251 not overflow, adjust BITNUM and INNER. */
10252
10253 if (TREE_CODE (inner) == RSHIFT_EXPR
10254 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10255 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10256 && bitnum < TYPE_PRECISION (type)
10257 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10258 bitnum - TYPE_PRECISION (type)))
10259 {
10260 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10261 inner = TREE_OPERAND (inner, 0);
10262 }
10263
10264 /* If we are going to be able to omit the AND below, we must do our
10265 operations as unsigned. If we must use the AND, we have a choice.
10266 Normally unsigned is faster, but for some machines signed is. */
10267 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10268 #ifdef LOAD_EXTEND_OP
10269 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10270 #else
10271 : 1
10272 #endif
10273 );
10274
10275 if (! get_subtarget (subtarget)
10276 || GET_MODE (subtarget) != operand_mode
10277 || ! safe_from_p (subtarget, inner, 1))
10278 subtarget = 0;
10279
10280 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10281
10282 if (bitnum != 0)
10283 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10284 size_int (bitnum), subtarget, ops_unsignedp);
10285
10286 if (GET_MODE (op0) != mode)
10287 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10288
10289 if ((code == EQ && ! invert) || (code == NE && invert))
10290 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10291 ops_unsignedp, OPTAB_LIB_WIDEN);
10292
10293 /* Put the AND last so it can combine with more things. */
10294 if (bitnum != TYPE_PRECISION (type) - 1)
10295 op0 = expand_and (op0, const1_rtx, subtarget);
10296
10297 return op0;
10298 }
10299
10300 /* Now see if we are likely to be able to do this. Return if not. */
10301 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10302 return 0;
10303
10304 icode = setcc_gen_code[(int) code];
10305 if (icode == CODE_FOR_nothing
10306 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10307 {
10308 /* We can only do this if it is one of the special cases that
10309 can be handled without an scc insn. */
10310 if ((code == LT && integer_zerop (arg1))
10311 || (! only_cheap && code == GE && integer_zerop (arg1)))
10312 ;
10313 else if (BRANCH_COST >= 0
10314 && ! only_cheap && (code == NE || code == EQ)
10315 && TREE_CODE (type) != REAL_TYPE
10316 && ((abs_optab->handlers[(int) operand_mode].insn_code
10317 != CODE_FOR_nothing)
10318 || (ffs_optab->handlers[(int) operand_mode].insn_code
10319 != CODE_FOR_nothing)))
10320 ;
10321 else
10322 return 0;
10323 }
10324
10325 preexpand_calls (exp);
10326 if (! get_subtarget (target)
10327 || GET_MODE (subtarget) != operand_mode
10328 || ! safe_from_p (subtarget, arg1, 1))
10329 subtarget = 0;
10330
10331 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10332 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10333
10334 if (target == 0)
10335 target = gen_reg_rtx (mode);
10336
10337 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10338 because, if the emit_store_flag does anything it will succeed and
10339 OP0 and OP1 will not be used subsequently. */
10340
10341 result = emit_store_flag (target, code,
10342 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10343 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10344 operand_mode, unsignedp, 1);
10345
10346 if (result)
10347 {
10348 if (invert)
10349 result = expand_binop (mode, xor_optab, result, const1_rtx,
10350 result, 0, OPTAB_LIB_WIDEN);
10351 return result;
10352 }
10353
10354 /* If this failed, we have to do this with set/compare/jump/set code. */
10355 if (GET_CODE (target) != REG
10356 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10357 target = gen_reg_rtx (GET_MODE (target));
10358
10359 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10360 result = compare_from_rtx (op0, op1, code, unsignedp,
10361 operand_mode, NULL_RTX, 0);
10362 if (GET_CODE (result) == CONST_INT)
10363 return (((result == const0_rtx && ! invert)
10364 || (result != const0_rtx && invert))
10365 ? const0_rtx : const1_rtx);
10366
10367 label = gen_label_rtx ();
10368 if (bcc_gen_fctn[(int) code] == 0)
10369 abort ();
10370
10371 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10372 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10373 emit_label (label);
10374
10375 return target;
10376 }
10377 \f
10378 /* Generate a tablejump instruction (used for switch statements). */
10379
10380 #ifdef HAVE_tablejump
10381
10382 /* INDEX is the value being switched on, with the lowest value
10383 in the table already subtracted.
10384 MODE is its expected mode (needed if INDEX is constant).
10385 RANGE is the length of the jump table.
10386 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10387
10388 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10389 index value is out of range. */
10390
10391 void
10392 do_tablejump (index, mode, range, table_label, default_label)
10393 rtx index, range, table_label, default_label;
10394 enum machine_mode mode;
10395 {
10396 register rtx temp, vector;
10397
10398 /* Do an unsigned comparison (in the proper mode) between the index
10399 expression and the value which represents the length of the range.
10400 Since we just finished subtracting the lower bound of the range
10401 from the index expression, this comparison allows us to simultaneously
10402 check that the original index expression value is both greater than
10403 or equal to the minimum value of the range and less than or equal to
10404 the maximum value of the range. */
10405
10406 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10407 0, default_label);
10408
10409 /* If index is in range, it must fit in Pmode.
10410 Convert to Pmode so we can index with it. */
10411 if (mode != Pmode)
10412 index = convert_to_mode (Pmode, index, 1);
10413
10414 /* Don't let a MEM slip thru, because then INDEX that comes
10415 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10416 and break_out_memory_refs will go to work on it and mess it up. */
10417 #ifdef PIC_CASE_VECTOR_ADDRESS
10418 if (flag_pic && GET_CODE (index) != REG)
10419 index = copy_to_mode_reg (Pmode, index);
10420 #endif
10421
10422 /* If flag_force_addr were to affect this address
10423 it could interfere with the tricky assumptions made
10424 about addresses that contain label-refs,
10425 which may be valid only very near the tablejump itself. */
10426 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10427 GET_MODE_SIZE, because this indicates how large insns are. The other
10428 uses should all be Pmode, because they are addresses. This code
10429 could fail if addresses and insns are not the same size. */
10430 index = gen_rtx_PLUS (Pmode,
10431 gen_rtx_MULT (Pmode, index,
10432 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10433 gen_rtx_LABEL_REF (Pmode, table_label));
10434 #ifdef PIC_CASE_VECTOR_ADDRESS
10435 if (flag_pic)
10436 index = PIC_CASE_VECTOR_ADDRESS (index);
10437 else
10438 #endif
10439 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10440 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10441 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10442 RTX_UNCHANGING_P (vector) = 1;
10443 convert_move (temp, vector, 0);
10444
10445 emit_jump_insn (gen_tablejump (temp, table_label));
10446
10447 /* If we are generating PIC code or if the table is PC-relative, the
10448 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10449 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10450 emit_barrier ();
10451 }
10452
10453 #endif /* HAVE_tablejump */