]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/expr.c
Merge in gcc2 snapshot 19980929. See gcc/ChangeLog and gcc/FSFChangeLog for
[thirdparty/gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
51
52 #ifdef PUSH_ROUNDING
53
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
56 #endif
57
58 #endif
59
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
67
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
71 #endif
72
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
80
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
85
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
89
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop;
95
96 /* Nonzero means __builtin_saveregs has already been done in this function.
97 The value is the pseudoreg containing the value __builtin_saveregs
98 returned. */
99 static rtx saveregs_value;
100
101 /* Similarly for __builtin_apply_args. */
102 static rtx apply_args_value;
103
104 /* Don't check memory usage, since code is being emitted to check a memory
105 usage. Used when current_function_check_memory_usage is true, to avoid
106 infinite recursion. */
107 static int in_check_memory_usage;
108
109 /* Postincrements that still need to be expanded. */
110 static rtx pending_chain;
111
112 /* This structure is used by move_by_pieces to describe the move to
113 be performed. */
114 struct move_by_pieces
115 {
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 int to_struct;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 int from_struct;
126 int len;
127 int offset;
128 int reverse;
129 };
130
131 /* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
133
134 struct clear_by_pieces
135 {
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
144 };
145
146 extern struct obstack permanent_obstack;
147 extern rtx arg_pointer_save_area;
148
149 static rtx get_push_address PROTO ((int));
150
151 static rtx enqueue_insn PROTO((rtx, rtx));
152 static void init_queue PROTO((void));
153 static int move_by_pieces_ninsns PROTO((unsigned int, int));
154 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
155 struct move_by_pieces *));
156 static void clear_by_pieces PROTO((rtx, int, int));
157 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
158 struct clear_by_pieces *));
159 static int is_zeros_p PROTO((tree));
160 static int mostly_zeros_p PROTO((tree));
161 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
162 tree, tree, int));
163 static void store_constructor PROTO((tree, rtx, int));
164 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
165 enum machine_mode, int, int,
166 int, int));
167 static enum memory_use_mode
168 get_memory_usage_from_modifier PROTO((enum expand_modifier));
169 static tree save_noncopied_parts PROTO((tree, tree));
170 static tree init_noncopied_parts PROTO((tree, tree));
171 static int safe_from_p PROTO((rtx, tree, int));
172 static int fixed_type_p PROTO((tree));
173 static rtx var_rtx PROTO((tree));
174 static int get_pointer_alignment PROTO((tree, unsigned));
175 static tree string_constant PROTO((tree, tree *));
176 static tree c_strlen PROTO((tree));
177 static rtx get_memory_rtx PROTO((tree));
178 static rtx expand_builtin PROTO((tree, rtx, rtx,
179 enum machine_mode, int));
180 static int apply_args_size PROTO((void));
181 static int apply_result_size PROTO((void));
182 static rtx result_vector PROTO((int, rtx));
183 static rtx expand_builtin_apply_args PROTO((void));
184 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
185 static void expand_builtin_return PROTO((rtx));
186 static rtx expand_increment PROTO((tree, int, int));
187 static void preexpand_calls PROTO((tree));
188 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
189 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
190 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
191 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
192 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
193
194 /* Record for each mode whether we can move a register directly to or
195 from an object of that mode in memory. If we can't, we won't try
196 to use that mode directly when accessing a field of that mode. */
197
198 static char direct_load[NUM_MACHINE_MODES];
199 static char direct_store[NUM_MACHINE_MODES];
200
201 /* If a memory-to-memory move would take MOVE_RATIO or more simple
202 move-instruction sequences, we will do a movstr or libcall instead. */
203
204 #ifndef MOVE_RATIO
205 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
206 #define MOVE_RATIO 2
207 #else
208 /* If we are optimizing for space (-Os), cut down the default move ratio */
209 #define MOVE_RATIO (optimize_size ? 3 : 15)
210 #endif
211 #endif
212
213 /* This macro is used to determine whether move_by_pieces should be called
214 to perform a structure copy. */
215 #ifndef MOVE_BY_PIECES_P
216 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
217 (SIZE, ALIGN) < MOVE_RATIO)
218 #endif
219
220 /* This array records the insn_code of insns to perform block moves. */
221 enum insn_code movstr_optab[NUM_MACHINE_MODES];
222
223 /* This array records the insn_code of insns to perform block clears. */
224 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
225
226 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
227
228 #ifndef SLOW_UNALIGNED_ACCESS
229 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
230 #endif
231
232 /* Register mappings for target machines without register windows. */
233 #ifndef INCOMING_REGNO
234 #define INCOMING_REGNO(OUT) (OUT)
235 #endif
236 #ifndef OUTGOING_REGNO
237 #define OUTGOING_REGNO(IN) (IN)
238 #endif
239 \f
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
242
243 void
244 init_expr_once ()
245 {
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 char *free_point;
251
252 start_sequence ();
253
254 /* Since we are on the permanent obstack, we must be sure we save this
255 spot AFTER we call start_sequence, since it will reuse the rtl it
256 makes. */
257 free_point = (char *) oballoc (0);
258
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
264
265 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
266 pat = PATTERN (insn);
267
268 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
269 mode = (enum machine_mode) ((int) mode + 1))
270 {
271 int regno;
272 rtx reg;
273
274 direct_load[(int) mode] = direct_store[(int) mode] = 0;
275 PUT_MODE (mem, mode);
276 PUT_MODE (mem1, mode);
277
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
280
281 if (mode != VOIDmode && mode != BLKmode)
282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
283 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
284 regno++)
285 {
286 if (! HARD_REGNO_MODE_OK (regno, mode))
287 continue;
288
289 reg = gen_rtx_REG (mode, regno);
290
291 SET_SRC (pat) = mem;
292 SET_DEST (pat) = reg;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_load[(int) mode] = 1;
295
296 SET_SRC (pat) = mem1;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
300
301 SET_SRC (pat) = reg;
302 SET_DEST (pat) = mem;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_store[(int) mode] = 1;
305
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem1;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
310 }
311 }
312
313 end_sequence ();
314 obfree (free_point);
315 }
316
317 /* This is run at the start of compiling a function. */
318
319 void
320 init_expr ()
321 {
322 init_queue ();
323
324 pending_stack_adjust = 0;
325 inhibit_defer_pop = 0;
326 saveregs_value = 0;
327 apply_args_value = 0;
328 forced_labels = 0;
329 }
330
331 /* Save all variables describing the current status into the structure *P.
332 This is used before starting a nested function. */
333
334 void
335 save_expr_status (p)
336 struct function *p;
337 {
338 p->pending_chain = pending_chain;
339 p->pending_stack_adjust = pending_stack_adjust;
340 p->inhibit_defer_pop = inhibit_defer_pop;
341 p->saveregs_value = saveregs_value;
342 p->apply_args_value = apply_args_value;
343 p->forced_labels = forced_labels;
344
345 pending_chain = NULL_RTX;
346 pending_stack_adjust = 0;
347 inhibit_defer_pop = 0;
348 saveregs_value = 0;
349 apply_args_value = 0;
350 forced_labels = 0;
351 }
352
353 /* Restore all variables describing the current status from the structure *P.
354 This is used after a nested function. */
355
356 void
357 restore_expr_status (p)
358 struct function *p;
359 {
360 pending_chain = p->pending_chain;
361 pending_stack_adjust = p->pending_stack_adjust;
362 inhibit_defer_pop = p->inhibit_defer_pop;
363 saveregs_value = p->saveregs_value;
364 apply_args_value = p->apply_args_value;
365 forced_labels = p->forced_labels;
366 }
367 \f
368 /* Manage the queue of increment instructions to be output
369 for POSTINCREMENT_EXPR expressions, etc. */
370
371 /* Queue up to increment (or change) VAR later. BODY says how:
372 BODY should be the same thing you would pass to emit_insn
373 to increment right away. It will go to emit_insn later on.
374
375 The value is a QUEUED expression to be used in place of VAR
376 where you want to guarantee the pre-incrementation value of VAR. */
377
378 static rtx
379 enqueue_insn (var, body)
380 rtx var, body;
381 {
382 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
383 var, NULL_RTX, NULL_RTX, body,
384 pending_chain);
385 return pending_chain;
386 }
387
388 /* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
394
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
398
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
402
403 rtx
404 protect_from_queue (x, modify)
405 register rtx x;
406 int modify;
407 {
408 register RTX_CODE code = GET_CODE (x);
409
410 #if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
413 return x;
414 #endif
415
416 if (code != QUEUED)
417 {
418 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
419 use of autoincrement. Make a copy of the contents of the memory
420 location rather than a copy of the address, but not if the value is
421 of mode BLKmode. Don't modify X in place since it might be
422 shared. */
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
425 {
426 register rtx y = XEXP (x, 0);
427 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
428
429 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
430 MEM_COPY_ATTRIBUTES (new, x);
431 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
432
433 if (QUEUED_INSN (y))
434 {
435 register rtx temp = gen_reg_rtx (GET_MODE (new));
436 emit_insn_before (gen_move_insn (temp, new),
437 QUEUED_INSN (y));
438 return temp;
439 }
440 return new;
441 }
442 /* Otherwise, recursively protect the subexpressions of all
443 the kinds of rtx's that can contain a QUEUED. */
444 if (code == MEM)
445 {
446 rtx tem = protect_from_queue (XEXP (x, 0), 0);
447 if (tem != XEXP (x, 0))
448 {
449 x = copy_rtx (x);
450 XEXP (x, 0) = tem;
451 }
452 }
453 else if (code == PLUS || code == MULT)
454 {
455 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
456 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
457 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
458 {
459 x = copy_rtx (x);
460 XEXP (x, 0) = new0;
461 XEXP (x, 1) = new1;
462 }
463 }
464 return x;
465 }
466 /* If the increment has not happened, use the variable itself. */
467 if (QUEUED_INSN (x) == 0)
468 return QUEUED_VAR (x);
469 /* If the increment has happened and a pre-increment copy exists,
470 use that copy. */
471 if (QUEUED_COPY (x) != 0)
472 return QUEUED_COPY (x);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
477 QUEUED_INSN (x));
478 return QUEUED_COPY (x);
479 }
480
481 /* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
485
486 int
487 queued_subexp_p (x)
488 rtx x;
489 {
490 register enum rtx_code code = GET_CODE (x);
491 switch (code)
492 {
493 case QUEUED:
494 return 1;
495 case MEM:
496 return queued_subexp_p (XEXP (x, 0));
497 case MULT:
498 case PLUS:
499 case MINUS:
500 return (queued_subexp_p (XEXP (x, 0))
501 || queued_subexp_p (XEXP (x, 1)));
502 default:
503 return 0;
504 }
505 }
506
507 /* Perform all the pending incrementations. */
508
509 void
510 emit_queue ()
511 {
512 register rtx p;
513 while ((p = pending_chain))
514 {
515 rtx body = QUEUED_BODY (p);
516
517 if (GET_CODE (body) == SEQUENCE)
518 {
519 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
520 emit_insn (QUEUED_BODY (p));
521 }
522 else
523 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
524 pending_chain = QUEUED_NEXT (p);
525 }
526 }
527
528 static void
529 init_queue ()
530 {
531 if (pending_chain)
532 abort ();
533 }
534 \f
535 /* Copy data from FROM to TO, where the machine modes are not the same.
536 Both modes may be integer, or both may be floating.
537 UNSIGNEDP should be nonzero if FROM is an unsigned type.
538 This causes zero-extension instead of sign-extension. */
539
540 void
541 convert_move (to, from, unsignedp)
542 register rtx to, from;
543 int unsignedp;
544 {
545 enum machine_mode to_mode = GET_MODE (to);
546 enum machine_mode from_mode = GET_MODE (from);
547 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
548 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
549 enum insn_code code;
550 rtx libcall;
551
552 /* rtx code for making an equivalent value. */
553 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
554
555 to = protect_from_queue (to, 1);
556 from = protect_from_queue (from, 0);
557
558 if (to_real != from_real)
559 abort ();
560
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
563 TO here. */
564
565 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
567 >= GET_MODE_SIZE (to_mode))
568 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
569 from = gen_lowpart (to_mode, from), from_mode = to_mode;
570
571 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
572 abort ();
573
574 if (to_mode == from_mode
575 || (from_mode == VOIDmode && CONSTANT_P (from)))
576 {
577 emit_move_insn (to, from);
578 return;
579 }
580
581 if (to_real)
582 {
583 rtx value;
584
585 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
586 {
587 /* Try converting directly if the insn is supported. */
588 if ((code = can_extend_p (to_mode, from_mode, 0))
589 != CODE_FOR_nothing)
590 {
591 emit_unop_insn (code, to, from, UNKNOWN);
592 return;
593 }
594 }
595
596 #ifdef HAVE_trunchfqf2
597 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
598 {
599 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
600 return;
601 }
602 #endif
603 #ifdef HAVE_trunctqfqf2
604 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
605 {
606 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
607 return;
608 }
609 #endif
610 #ifdef HAVE_truncsfqf2
611 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
612 {
613 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
614 return;
615 }
616 #endif
617 #ifdef HAVE_truncdfqf2
618 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
619 {
620 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
621 return;
622 }
623 #endif
624 #ifdef HAVE_truncxfqf2
625 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
626 {
627 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
628 return;
629 }
630 #endif
631 #ifdef HAVE_trunctfqf2
632 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
633 {
634 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
635 return;
636 }
637 #endif
638
639 #ifdef HAVE_trunctqfhf2
640 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
641 {
642 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
643 return;
644 }
645 #endif
646 #ifdef HAVE_truncsfhf2
647 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
648 {
649 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
650 return;
651 }
652 #endif
653 #ifdef HAVE_truncdfhf2
654 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
655 {
656 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
657 return;
658 }
659 #endif
660 #ifdef HAVE_truncxfhf2
661 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
662 {
663 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
664 return;
665 }
666 #endif
667 #ifdef HAVE_trunctfhf2
668 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
669 {
670 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
671 return;
672 }
673 #endif
674
675 #ifdef HAVE_truncsftqf2
676 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
677 {
678 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
679 return;
680 }
681 #endif
682 #ifdef HAVE_truncdftqf2
683 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
684 {
685 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
686 return;
687 }
688 #endif
689 #ifdef HAVE_truncxftqf2
690 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
691 {
692 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
693 return;
694 }
695 #endif
696 #ifdef HAVE_trunctftqf2
697 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
698 {
699 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
700 return;
701 }
702 #endif
703
704 #ifdef HAVE_truncdfsf2
705 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
706 {
707 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
708 return;
709 }
710 #endif
711 #ifdef HAVE_truncxfsf2
712 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
713 {
714 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
715 return;
716 }
717 #endif
718 #ifdef HAVE_trunctfsf2
719 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
720 {
721 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
722 return;
723 }
724 #endif
725 #ifdef HAVE_truncxfdf2
726 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
727 {
728 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
729 return;
730 }
731 #endif
732 #ifdef HAVE_trunctfdf2
733 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
734 {
735 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
736 return;
737 }
738 #endif
739
740 libcall = (rtx) 0;
741 switch (from_mode)
742 {
743 case SFmode:
744 switch (to_mode)
745 {
746 case DFmode:
747 libcall = extendsfdf2_libfunc;
748 break;
749
750 case XFmode:
751 libcall = extendsfxf2_libfunc;
752 break;
753
754 case TFmode:
755 libcall = extendsftf2_libfunc;
756 break;
757
758 default:
759 break;
760 }
761 break;
762
763 case DFmode:
764 switch (to_mode)
765 {
766 case SFmode:
767 libcall = truncdfsf2_libfunc;
768 break;
769
770 case XFmode:
771 libcall = extenddfxf2_libfunc;
772 break;
773
774 case TFmode:
775 libcall = extenddftf2_libfunc;
776 break;
777
778 default:
779 break;
780 }
781 break;
782
783 case XFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = truncxfsf2_libfunc;
788 break;
789
790 case DFmode:
791 libcall = truncxfdf2_libfunc;
792 break;
793
794 default:
795 break;
796 }
797 break;
798
799 case TFmode:
800 switch (to_mode)
801 {
802 case SFmode:
803 libcall = trunctfsf2_libfunc;
804 break;
805
806 case DFmode:
807 libcall = trunctfdf2_libfunc;
808 break;
809
810 default:
811 break;
812 }
813 break;
814
815 default:
816 break;
817 }
818
819 if (libcall == (rtx) 0)
820 /* This conversion is not implemented yet. */
821 abort ();
822
823 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
824 1, from, from_mode);
825 emit_move_insn (to, value);
826 return;
827 }
828
829 /* Now both modes are integers. */
830
831 /* Handle expanding beyond a word. */
832 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
833 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
834 {
835 rtx insns;
836 rtx lowpart;
837 rtx fill_value;
838 rtx lowfrom;
839 int i;
840 enum machine_mode lowpart_mode;
841 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
842
843 /* Try converting directly if the insn is supported. */
844 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
845 != CODE_FOR_nothing)
846 {
847 /* If FROM is a SUBREG, put it into a register. Do this
848 so that we always generate the same set of insns for
849 better cse'ing; if an intermediate assignment occurred,
850 we won't be doing the operation directly on the SUBREG. */
851 if (optimize > 0 && GET_CODE (from) == SUBREG)
852 from = force_reg (from_mode, from);
853 emit_unop_insn (code, to, from, equiv_code);
854 return;
855 }
856 /* Next, try converting via full word. */
857 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
858 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
859 != CODE_FOR_nothing))
860 {
861 if (GET_CODE (to) == REG)
862 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
863 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
864 emit_unop_insn (code, to,
865 gen_lowpart (word_mode, to), equiv_code);
866 return;
867 }
868
869 /* No special multiword conversion insn; do it by hand. */
870 start_sequence ();
871
872 /* Since we will turn this into a no conflict block, we must ensure
873 that the source does not overlap the target. */
874
875 if (reg_overlap_mentioned_p (to, from))
876 from = force_reg (from_mode, from);
877
878 /* Get a copy of FROM widened to a word, if necessary. */
879 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
880 lowpart_mode = word_mode;
881 else
882 lowpart_mode = from_mode;
883
884 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
885
886 lowpart = gen_lowpart (lowpart_mode, to);
887 emit_move_insn (lowpart, lowfrom);
888
889 /* Compute the value to put in each remaining word. */
890 if (unsignedp)
891 fill_value = const0_rtx;
892 else
893 {
894 #ifdef HAVE_slt
895 if (HAVE_slt
896 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
897 && STORE_FLAG_VALUE == -1)
898 {
899 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
900 lowpart_mode, 0, 0);
901 fill_value = gen_reg_rtx (word_mode);
902 emit_insn (gen_slt (fill_value));
903 }
904 else
905 #endif
906 {
907 fill_value
908 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
909 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
910 NULL_RTX, 0);
911 fill_value = convert_to_mode (word_mode, fill_value, 1);
912 }
913 }
914
915 /* Fill the remaining words. */
916 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
917 {
918 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
919 rtx subword = operand_subword (to, index, 1, to_mode);
920
921 if (subword == 0)
922 abort ();
923
924 if (fill_value != subword)
925 emit_move_insn (subword, fill_value);
926 }
927
928 insns = get_insns ();
929 end_sequence ();
930
931 emit_no_conflict_block (insns, to, from, NULL_RTX,
932 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
933 return;
934 }
935
936 /* Truncating multi-word to a word or less. */
937 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
938 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
939 {
940 if (!((GET_CODE (from) == MEM
941 && ! MEM_VOLATILE_P (from)
942 && direct_load[(int) to_mode]
943 && ! mode_dependent_address_p (XEXP (from, 0)))
944 || GET_CODE (from) == REG
945 || GET_CODE (from) == SUBREG))
946 from = force_reg (from_mode, from);
947 convert_move (to, gen_lowpart (word_mode, from), 0);
948 return;
949 }
950
951 /* Handle pointer conversion */ /* SPEE 900220 */
952 if (to_mode == PQImode)
953 {
954 if (from_mode != QImode)
955 from = convert_to_mode (QImode, from, unsignedp);
956
957 #ifdef HAVE_truncqipqi2
958 if (HAVE_truncqipqi2)
959 {
960 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
961 return;
962 }
963 #endif /* HAVE_truncqipqi2 */
964 abort ();
965 }
966
967 if (from_mode == PQImode)
968 {
969 if (to_mode != QImode)
970 {
971 from = convert_to_mode (QImode, from, unsignedp);
972 from_mode = QImode;
973 }
974 else
975 {
976 #ifdef HAVE_extendpqiqi2
977 if (HAVE_extendpqiqi2)
978 {
979 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
980 return;
981 }
982 #endif /* HAVE_extendpqiqi2 */
983 abort ();
984 }
985 }
986
987 if (to_mode == PSImode)
988 {
989 if (from_mode != SImode)
990 from = convert_to_mode (SImode, from, unsignedp);
991
992 #ifdef HAVE_truncsipsi2
993 if (HAVE_truncsipsi2)
994 {
995 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
996 return;
997 }
998 #endif /* HAVE_truncsipsi2 */
999 abort ();
1000 }
1001
1002 if (from_mode == PSImode)
1003 {
1004 if (to_mode != SImode)
1005 {
1006 from = convert_to_mode (SImode, from, unsignedp);
1007 from_mode = SImode;
1008 }
1009 else
1010 {
1011 #ifdef HAVE_extendpsisi2
1012 if (HAVE_extendpsisi2)
1013 {
1014 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1015 return;
1016 }
1017 #endif /* HAVE_extendpsisi2 */
1018 abort ();
1019 }
1020 }
1021
1022 if (to_mode == PDImode)
1023 {
1024 if (from_mode != DImode)
1025 from = convert_to_mode (DImode, from, unsignedp);
1026
1027 #ifdef HAVE_truncdipdi2
1028 if (HAVE_truncdipdi2)
1029 {
1030 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1031 return;
1032 }
1033 #endif /* HAVE_truncdipdi2 */
1034 abort ();
1035 }
1036
1037 if (from_mode == PDImode)
1038 {
1039 if (to_mode != DImode)
1040 {
1041 from = convert_to_mode (DImode, from, unsignedp);
1042 from_mode = DImode;
1043 }
1044 else
1045 {
1046 #ifdef HAVE_extendpdidi2
1047 if (HAVE_extendpdidi2)
1048 {
1049 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1050 return;
1051 }
1052 #endif /* HAVE_extendpdidi2 */
1053 abort ();
1054 }
1055 }
1056
1057 /* Now follow all the conversions between integers
1058 no more than a word long. */
1059
1060 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1061 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1062 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1063 GET_MODE_BITSIZE (from_mode)))
1064 {
1065 if (!((GET_CODE (from) == MEM
1066 && ! MEM_VOLATILE_P (from)
1067 && direct_load[(int) to_mode]
1068 && ! mode_dependent_address_p (XEXP (from, 0)))
1069 || GET_CODE (from) == REG
1070 || GET_CODE (from) == SUBREG))
1071 from = force_reg (from_mode, from);
1072 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1073 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1074 from = copy_to_reg (from);
1075 emit_move_insn (to, gen_lowpart (to_mode, from));
1076 return;
1077 }
1078
1079 /* Handle extension. */
1080 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1081 {
1082 /* Convert directly if that works. */
1083 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1084 != CODE_FOR_nothing)
1085 {
1086 emit_unop_insn (code, to, from, equiv_code);
1087 return;
1088 }
1089 else
1090 {
1091 enum machine_mode intermediate;
1092 rtx tmp;
1093 tree shift_amount;
1094
1095 /* Search for a mode to convert via. */
1096 for (intermediate = from_mode; intermediate != VOIDmode;
1097 intermediate = GET_MODE_WIDER_MODE (intermediate))
1098 if (((can_extend_p (to_mode, intermediate, unsignedp)
1099 != CODE_FOR_nothing)
1100 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1101 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1102 && (can_extend_p (intermediate, from_mode, unsignedp)
1103 != CODE_FOR_nothing))
1104 {
1105 convert_move (to, convert_to_mode (intermediate, from,
1106 unsignedp), unsignedp);
1107 return;
1108 }
1109
1110 /* No suitable intermediate mode.
1111 Generate what we need with shifts. */
1112 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1113 - GET_MODE_BITSIZE (from_mode), 0);
1114 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1115 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1116 to, unsignedp);
1117 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1118 to, unsignedp);
1119 if (tmp != to)
1120 emit_move_insn (to, tmp);
1121 return;
1122 }
1123 }
1124
1125 /* Support special truncate insns for certain modes. */
1126
1127 if (from_mode == DImode && to_mode == SImode)
1128 {
1129 #ifdef HAVE_truncdisi2
1130 if (HAVE_truncdisi2)
1131 {
1132 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1133 return;
1134 }
1135 #endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1138 }
1139
1140 if (from_mode == DImode && to_mode == HImode)
1141 {
1142 #ifdef HAVE_truncdihi2
1143 if (HAVE_truncdihi2)
1144 {
1145 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1146 return;
1147 }
1148 #endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1151 }
1152
1153 if (from_mode == DImode && to_mode == QImode)
1154 {
1155 #ifdef HAVE_truncdiqi2
1156 if (HAVE_truncdiqi2)
1157 {
1158 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1159 return;
1160 }
1161 #endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1164 }
1165
1166 if (from_mode == SImode && to_mode == HImode)
1167 {
1168 #ifdef HAVE_truncsihi2
1169 if (HAVE_truncsihi2)
1170 {
1171 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1172 return;
1173 }
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == SImode && to_mode == QImode)
1180 {
1181 #ifdef HAVE_truncsiqi2
1182 if (HAVE_truncsiqi2)
1183 {
1184 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1185 return;
1186 }
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == HImode && to_mode == QImode)
1193 {
1194 #ifdef HAVE_trunchiqi2
1195 if (HAVE_trunchiqi2)
1196 {
1197 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1198 return;
1199 }
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
1205 if (from_mode == TImode && to_mode == DImode)
1206 {
1207 #ifdef HAVE_trunctidi2
1208 if (HAVE_trunctidi2)
1209 {
1210 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1211 return;
1212 }
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
1218 if (from_mode == TImode && to_mode == SImode)
1219 {
1220 #ifdef HAVE_trunctisi2
1221 if (HAVE_trunctisi2)
1222 {
1223 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1224 return;
1225 }
1226 #endif
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1228 return;
1229 }
1230
1231 if (from_mode == TImode && to_mode == HImode)
1232 {
1233 #ifdef HAVE_trunctihi2
1234 if (HAVE_trunctihi2)
1235 {
1236 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1237 return;
1238 }
1239 #endif
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1241 return;
1242 }
1243
1244 if (from_mode == TImode && to_mode == QImode)
1245 {
1246 #ifdef HAVE_trunctiqi2
1247 if (HAVE_trunctiqi2)
1248 {
1249 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1250 return;
1251 }
1252 #endif
1253 convert_move (to, force_reg (from_mode, from), unsignedp);
1254 return;
1255 }
1256
1257 /* Handle truncation of volatile memrefs, and so on;
1258 the things that couldn't be truncated directly,
1259 and for which there was no special instruction. */
1260 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1261 {
1262 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1263 emit_move_insn (to, temp);
1264 return;
1265 }
1266
1267 /* Mode combination is not recognized. */
1268 abort ();
1269 }
1270
1271 /* Return an rtx for a value that would result
1272 from converting X to mode MODE.
1273 Both X and MODE may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1275 This can be done by referring to a part of X in place
1276 or by copying to a new temporary with conversion.
1277
1278 This function *must not* call protect_from_queue
1279 except when putting X into an insn (in which case convert_move does it). */
1280
1281 rtx
1282 convert_to_mode (mode, x, unsignedp)
1283 enum machine_mode mode;
1284 rtx x;
1285 int unsignedp;
1286 {
1287 return convert_modes (mode, VOIDmode, x, unsignedp);
1288 }
1289
1290 /* Return an rtx for a value that would result
1291 from converting X from mode OLDMODE to mode MODE.
1292 Both modes may be floating, or both integer.
1293 UNSIGNEDP is nonzero if X is an unsigned value.
1294
1295 This can be done by referring to a part of X in place
1296 or by copying to a new temporary with conversion.
1297
1298 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1299
1300 This function *must not* call protect_from_queue
1301 except when putting X into an insn (in which case convert_move does it). */
1302
1303 rtx
1304 convert_modes (mode, oldmode, x, unsignedp)
1305 enum machine_mode mode, oldmode;
1306 rtx x;
1307 int unsignedp;
1308 {
1309 register rtx temp;
1310
1311 /* If FROM is a SUBREG that indicates that we have already done at least
1312 the required extension, strip it. */
1313
1314 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1315 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1316 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1317 x = gen_lowpart (mode, x);
1318
1319 if (GET_MODE (x) != VOIDmode)
1320 oldmode = GET_MODE (x);
1321
1322 if (mode == oldmode)
1323 return x;
1324
1325 /* There is one case that we must handle specially: If we are converting
1326 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1327 we are to interpret the constant as unsigned, gen_lowpart will do
1328 the wrong if the constant appears negative. What we want to do is
1329 make the high-order word of the constant zero, not all ones. */
1330
1331 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1332 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1333 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1334 {
1335 HOST_WIDE_INT val = INTVAL (x);
1336
1337 if (oldmode != VOIDmode
1338 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1339 {
1340 int width = GET_MODE_BITSIZE (oldmode);
1341
1342 /* We need to zero extend VAL. */
1343 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1344 }
1345
1346 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1347 }
1348
1349 /* We can do this with a gen_lowpart if both desired and current modes
1350 are integer, and this is either a constant integer, a register, or a
1351 non-volatile MEM. Except for the constant case where MODE is no
1352 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1353
1354 if ((GET_CODE (x) == CONST_INT
1355 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1356 || (GET_MODE_CLASS (mode) == MODE_INT
1357 && GET_MODE_CLASS (oldmode) == MODE_INT
1358 && (GET_CODE (x) == CONST_DOUBLE
1359 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1360 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1361 && direct_load[(int) mode])
1362 || (GET_CODE (x) == REG
1363 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1364 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1365 {
1366 /* ?? If we don't know OLDMODE, we have to assume here that
1367 X does not need sign- or zero-extension. This may not be
1368 the case, but it's the best we can do. */
1369 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1370 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1371 {
1372 HOST_WIDE_INT val = INTVAL (x);
1373 int width = GET_MODE_BITSIZE (oldmode);
1374
1375 /* We must sign or zero-extend in this case. Start by
1376 zero-extending, then sign extend if we need to. */
1377 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1378 if (! unsignedp
1379 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1380 val |= (HOST_WIDE_INT) (-1) << width;
1381
1382 return GEN_INT (val);
1383 }
1384
1385 return gen_lowpart (mode, x);
1386 }
1387
1388 temp = gen_reg_rtx (mode);
1389 convert_move (temp, x, unsignedp);
1390 return temp;
1391 }
1392 \f
1393
1394 /* This macro is used to determine what the largest unit size that
1395 move_by_pieces can use is. */
1396
1397 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1398 move efficiently, as opposed to MOVE_MAX which is the maximum
1399 number of bhytes we can move with a single instruction. */
1400
1401 #ifndef MOVE_MAX_PIECES
1402 #define MOVE_MAX_PIECES MOVE_MAX
1403 #endif
1404
1405 /* Some architectures do not have complete pre/post increment/decrement
1406 instruction sets, or only move some modes efficiently. these macros
1407 allow us to fine tune move_by_pieces for these targets. */
1408
1409 #ifndef USE_LOAD_POST_INCREMENT
1410 #define USE_LOAD_POST_INCREMENT(MODE) HAVE_POST_INCREMENT
1411 #endif
1412
1413 #ifndef USE_LOAD_PRE_DECREMENT
1414 #define USE_LOAD_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT
1415 #endif
1416
1417 #ifndef USE_STORE_POST_INCREMENT
1418 #define USE_STORE_POST_INCREMENT(MODE) HAVE_POST_INCREMENT
1419 #endif
1420
1421 #ifndef USE_STORE_PRE_DECREMENT
1422 #define USE_STORE_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT
1423 #endif
1424
1425 /* Generate several move instructions to copy LEN bytes
1426 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1427 The caller must pass FROM and TO
1428 through protect_from_queue before calling.
1429 ALIGN (in bytes) is maximum alignment we can assume. */
1430
1431 void
1432 move_by_pieces (to, from, len, align)
1433 rtx to, from;
1434 int len, align;
1435 {
1436 struct move_by_pieces data;
1437 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1438 int max_size = MOVE_MAX_PIECES + 1;
1439 enum machine_mode mode = VOIDmode, tmode;
1440 enum insn_code icode;
1441
1442 data.offset = 0;
1443 data.to_addr = to_addr;
1444 data.from_addr = from_addr;
1445 data.to = to;
1446 data.from = from;
1447 data.autinc_to
1448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1450 data.autinc_from
1451 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1452 || GET_CODE (from_addr) == POST_INC
1453 || GET_CODE (from_addr) == POST_DEC);
1454
1455 data.explicit_inc_from = 0;
1456 data.explicit_inc_to = 0;
1457 data.reverse
1458 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1459 if (data.reverse) data.offset = len;
1460 data.len = len;
1461
1462 data.to_struct = MEM_IN_STRUCT_P (to);
1463 data.from_struct = MEM_IN_STRUCT_P (from);
1464
1465 /* If copying requires more than two move insns,
1466 copy addresses to registers (to make displacements shorter)
1467 and use post-increment if available. */
1468 if (!(data.autinc_from && data.autinc_to)
1469 && move_by_pieces_ninsns (len, align) > 2)
1470 {
1471 /* Find the mode of the largest move... */
1472 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1473 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1474 if (GET_MODE_SIZE (tmode) < max_size)
1475 mode = tmode;
1476
1477 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1478 {
1479 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1480 data.autinc_from = 1;
1481 data.explicit_inc_from = -1;
1482 }
1483 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1484 {
1485 data.from_addr = copy_addr_to_reg (from_addr);
1486 data.autinc_from = 1;
1487 data.explicit_inc_from = 1;
1488 }
1489 if (!data.autinc_from && CONSTANT_P (from_addr))
1490 data.from_addr = copy_addr_to_reg (from_addr);
1491 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1492 {
1493 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1494 data.autinc_to = 1;
1495 data.explicit_inc_to = -1;
1496 }
1497 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1498 {
1499 data.to_addr = copy_addr_to_reg (to_addr);
1500 data.autinc_to = 1;
1501 data.explicit_inc_to = 1;
1502 }
1503 if (!data.autinc_to && CONSTANT_P (to_addr))
1504 data.to_addr = copy_addr_to_reg (to_addr);
1505 }
1506
1507 if (! SLOW_UNALIGNED_ACCESS
1508 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1509 align = MOVE_MAX;
1510
1511 /* First move what we can in the largest integer mode, then go to
1512 successively smaller modes. */
1513
1514 while (max_size > 1)
1515 {
1516 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1517 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1518 if (GET_MODE_SIZE (tmode) < max_size)
1519 mode = tmode;
1520
1521 if (mode == VOIDmode)
1522 break;
1523
1524 icode = mov_optab->handlers[(int) mode].insn_code;
1525 if (icode != CODE_FOR_nothing
1526 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1527 GET_MODE_SIZE (mode)))
1528 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1529
1530 max_size = GET_MODE_SIZE (mode);
1531 }
1532
1533 /* The code above should have handled everything. */
1534 if (data.len > 0)
1535 abort ();
1536 }
1537
1538 /* Return number of insns required to move L bytes by pieces.
1539 ALIGN (in bytes) is maximum alignment we can assume. */
1540
1541 static int
1542 move_by_pieces_ninsns (l, align)
1543 unsigned int l;
1544 int align;
1545 {
1546 register int n_insns = 0;
1547 int max_size = MOVE_MAX + 1;
1548
1549 if (! SLOW_UNALIGNED_ACCESS
1550 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1551 align = MOVE_MAX;
1552
1553 while (max_size > 1)
1554 {
1555 enum machine_mode mode = VOIDmode, tmode;
1556 enum insn_code icode;
1557
1558 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1559 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1560 if (GET_MODE_SIZE (tmode) < max_size)
1561 mode = tmode;
1562
1563 if (mode == VOIDmode)
1564 break;
1565
1566 icode = mov_optab->handlers[(int) mode].insn_code;
1567 if (icode != CODE_FOR_nothing
1568 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1569 GET_MODE_SIZE (mode)))
1570 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1571
1572 max_size = GET_MODE_SIZE (mode);
1573 }
1574
1575 return n_insns;
1576 }
1577
1578 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1579 with move instructions for mode MODE. GENFUN is the gen_... function
1580 to make a move insn for that mode. DATA has all the other info. */
1581
1582 static void
1583 move_by_pieces_1 (genfun, mode, data)
1584 rtx (*genfun) PROTO ((rtx, ...));
1585 enum machine_mode mode;
1586 struct move_by_pieces *data;
1587 {
1588 register int size = GET_MODE_SIZE (mode);
1589 register rtx to1, from1;
1590
1591 while (data->len >= size)
1592 {
1593 if (data->reverse) data->offset -= size;
1594
1595 to1 = (data->autinc_to
1596 ? gen_rtx_MEM (mode, data->to_addr)
1597 : copy_rtx (change_address (data->to, mode,
1598 plus_constant (data->to_addr,
1599 data->offset))));
1600 MEM_IN_STRUCT_P (to1) = data->to_struct;
1601
1602 from1
1603 = (data->autinc_from
1604 ? gen_rtx_MEM (mode, data->from_addr)
1605 : copy_rtx (change_address (data->from, mode,
1606 plus_constant (data->from_addr,
1607 data->offset))));
1608 MEM_IN_STRUCT_P (from1) = data->from_struct;
1609
1610 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1611 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1612 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1613 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1614
1615 emit_insn ((*genfun) (to1, from1));
1616 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1617 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1618 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1619 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1620
1621 if (! data->reverse) data->offset += size;
1622
1623 data->len -= size;
1624 }
1625 }
1626 \f
1627 /* Emit code to move a block Y to a block X.
1628 This may be done with string-move instructions,
1629 with multiple scalar move instructions, or with a library call.
1630
1631 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1632 with mode BLKmode.
1633 SIZE is an rtx that says how long they are.
1634 ALIGN is the maximum alignment we can assume they have,
1635 measured in bytes.
1636
1637 Return the address of the new block, if memcpy is called and returns it,
1638 0 otherwise. */
1639
1640 rtx
1641 emit_block_move (x, y, size, align)
1642 rtx x, y;
1643 rtx size;
1644 int align;
1645 {
1646 rtx retval = 0;
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 static tree fn;
1649 tree call_expr, arg_list;
1650 #endif
1651
1652 if (GET_MODE (x) != BLKmode)
1653 abort ();
1654
1655 if (GET_MODE (y) != BLKmode)
1656 abort ();
1657
1658 x = protect_from_queue (x, 1);
1659 y = protect_from_queue (y, 0);
1660 size = protect_from_queue (size, 0);
1661
1662 if (GET_CODE (x) != MEM)
1663 abort ();
1664 if (GET_CODE (y) != MEM)
1665 abort ();
1666 if (size == 0)
1667 abort ();
1668
1669 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1670 move_by_pieces (x, y, INTVAL (size), align);
1671 else
1672 {
1673 /* Try the most limited insn first, because there's no point
1674 including more than one in the machine description unless
1675 the more limited one has some advantage. */
1676
1677 rtx opalign = GEN_INT (align);
1678 enum machine_mode mode;
1679
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
1682 {
1683 enum insn_code code = movstr_optab[(int) mode];
1684
1685 if (code != CODE_FOR_nothing
1686 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1687 here because if SIZE is less than the mode mask, as it is
1688 returned by the macro, it will definitely be less than the
1689 actual mode mask. */
1690 && ((GET_CODE (size) == CONST_INT
1691 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1692 <= (GET_MODE_MASK (mode) >> 1)))
1693 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1694 && (insn_operand_predicate[(int) code][0] == 0
1695 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1696 && (insn_operand_predicate[(int) code][1] == 0
1697 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1698 && (insn_operand_predicate[(int) code][3] == 0
1699 || (*insn_operand_predicate[(int) code][3]) (opalign,
1700 VOIDmode)))
1701 {
1702 rtx op2;
1703 rtx last = get_last_insn ();
1704 rtx pat;
1705
1706 op2 = convert_to_mode (mode, size, 1);
1707 if (insn_operand_predicate[(int) code][2] != 0
1708 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1709 op2 = copy_to_mode_reg (mode, op2);
1710
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1712 if (pat)
1713 {
1714 emit_insn (pat);
1715 return 0;
1716 }
1717 else
1718 delete_insns_since (last);
1719 }
1720 }
1721
1722 #ifdef TARGET_MEM_FUNCTIONS
1723 /* It is incorrect to use the libcall calling conventions to call
1724 memcpy in this context.
1725
1726 This could be a user call to memcpy and the user may wish to
1727 examine the return value from memcpy.
1728
1729 For targets where libcalls and normal calls have different conventions
1730 for returning pointers, we could end up generating incorrect code.
1731
1732 So instead of using a libcall sequence we build up a suitable
1733 CALL_EXPR and expand the call in the normal fashion. */
1734 if (fn == NULL_TREE)
1735 {
1736 tree fntype;
1737
1738 /* This was copied from except.c, I don't know if all this is
1739 necessary in this context or not. */
1740 fn = get_identifier ("memcpy");
1741 push_obstacks_nochange ();
1742 end_temporary_allocation ();
1743 fntype = build_pointer_type (void_type_node);
1744 fntype = build_function_type (fntype, NULL_TREE);
1745 fn = build_decl (FUNCTION_DECL, fn, fntype);
1746 DECL_EXTERNAL (fn) = 1;
1747 TREE_PUBLIC (fn) = 1;
1748 DECL_ARTIFICIAL (fn) = 1;
1749 make_decl_rtl (fn, NULL_PTR, 1);
1750 assemble_external (fn);
1751 pop_obstacks ();
1752 }
1753
1754 /* We need to make an argument list for the function call.
1755
1756 memcpy has three arguments, the first two are void * addresses and
1757 the last is a size_t byte count for the copy. */
1758 arg_list
1759 = build_tree_list (NULL_TREE,
1760 make_tree (build_pointer_type (void_type_node),
1761 XEXP (x, 0)));
1762 TREE_CHAIN (arg_list)
1763 = build_tree_list (NULL_TREE,
1764 make_tree (build_pointer_type (void_type_node),
1765 XEXP (y, 0)));
1766 TREE_CHAIN (TREE_CHAIN (arg_list))
1767 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1768 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1769
1770 /* Now we have to build up the CALL_EXPR itself. */
1771 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1772 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1773 call_expr, arg_list, NULL_TREE);
1774 TREE_SIDE_EFFECTS (call_expr) = 1;
1775
1776 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1777 #else
1778 emit_library_call (bcopy_libfunc, 0,
1779 VOIDmode, 3, XEXP (y, 0), Pmode,
1780 XEXP (x, 0), Pmode,
1781 convert_to_mode (TYPE_MODE (integer_type_node), size,
1782 TREE_UNSIGNED (integer_type_node)),
1783 TYPE_MODE (integer_type_node));
1784 #endif
1785 }
1786
1787 return retval;
1788 }
1789 \f
1790 /* Copy all or part of a value X into registers starting at REGNO.
1791 The number of registers to be filled is NREGS. */
1792
1793 void
1794 move_block_to_reg (regno, x, nregs, mode)
1795 int regno;
1796 rtx x;
1797 int nregs;
1798 enum machine_mode mode;
1799 {
1800 int i;
1801 #ifdef HAVE_load_multiple
1802 rtx pat;
1803 rtx last;
1804 #endif
1805
1806 if (nregs == 0)
1807 return;
1808
1809 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1810 x = validize_mem (force_const_mem (mode, x));
1811
1812 /* See if the machine can do this with a load multiple insn. */
1813 #ifdef HAVE_load_multiple
1814 if (HAVE_load_multiple)
1815 {
1816 last = get_last_insn ();
1817 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1818 GEN_INT (nregs));
1819 if (pat)
1820 {
1821 emit_insn (pat);
1822 return;
1823 }
1824 else
1825 delete_insns_since (last);
1826 }
1827 #endif
1828
1829 for (i = 0; i < nregs; i++)
1830 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1831 operand_subword_force (x, i, mode));
1832 }
1833
1834 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1835 The number of registers to be filled is NREGS. SIZE indicates the number
1836 of bytes in the object X. */
1837
1838
1839 void
1840 move_block_from_reg (regno, x, nregs, size)
1841 int regno;
1842 rtx x;
1843 int nregs;
1844 int size;
1845 {
1846 int i;
1847 #ifdef HAVE_store_multiple
1848 rtx pat;
1849 rtx last;
1850 #endif
1851 enum machine_mode mode;
1852
1853 /* If SIZE is that of a mode no bigger than a word, just use that
1854 mode's store operation. */
1855 if (size <= UNITS_PER_WORD
1856 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1857 {
1858 emit_move_insn (change_address (x, mode, NULL),
1859 gen_rtx_REG (mode, regno));
1860 return;
1861 }
1862
1863 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1864 to the left before storing to memory. Note that the previous test
1865 doesn't handle all cases (e.g. SIZE == 3). */
1866 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1867 {
1868 rtx tem = operand_subword (x, 0, 1, BLKmode);
1869 rtx shift;
1870
1871 if (tem == 0)
1872 abort ();
1873
1874 shift = expand_shift (LSHIFT_EXPR, word_mode,
1875 gen_rtx_REG (word_mode, regno),
1876 build_int_2 ((UNITS_PER_WORD - size)
1877 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1878 emit_move_insn (tem, shift);
1879 return;
1880 }
1881
1882 /* See if the machine can do this with a store multiple insn. */
1883 #ifdef HAVE_store_multiple
1884 if (HAVE_store_multiple)
1885 {
1886 last = get_last_insn ();
1887 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1888 GEN_INT (nregs));
1889 if (pat)
1890 {
1891 emit_insn (pat);
1892 return;
1893 }
1894 else
1895 delete_insns_since (last);
1896 }
1897 #endif
1898
1899 for (i = 0; i < nregs; i++)
1900 {
1901 rtx tem = operand_subword (x, i, 1, BLKmode);
1902
1903 if (tem == 0)
1904 abort ();
1905
1906 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1907 }
1908 }
1909
1910 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1911 registers represented by a PARALLEL. SSIZE represents the total size of
1912 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1913 SRC in bits. */
1914 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1915 the balance will be in what would be the low-order memory addresses, i.e.
1916 left justified for big endian, right justified for little endian. This
1917 happens to be true for the targets currently using this support. If this
1918 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1919 would be needed. */
1920
1921 void
1922 emit_group_load (dst, orig_src, ssize, align)
1923 rtx dst, orig_src;
1924 int align, ssize;
1925 {
1926 rtx *tmps, src;
1927 int start, i;
1928
1929 if (GET_CODE (dst) != PARALLEL)
1930 abort ();
1931
1932 /* Check for a NULL entry, used to indicate that the parameter goes
1933 both on the stack and in registers. */
1934 if (XEXP (XVECEXP (dst, 0, 0), 0))
1935 start = 0;
1936 else
1937 start = 1;
1938
1939 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1940
1941 /* If we won't be loading directly from memory, protect the real source
1942 from strange tricks we might play. */
1943 src = orig_src;
1944 if (GET_CODE (src) != MEM)
1945 {
1946 src = gen_reg_rtx (GET_MODE (orig_src));
1947 emit_move_insn (src, orig_src);
1948 }
1949
1950 /* Process the pieces. */
1951 for (i = start; i < XVECLEN (dst, 0); i++)
1952 {
1953 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1954 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1955 int bytelen = GET_MODE_SIZE (mode);
1956 int shift = 0;
1957
1958 /* Handle trailing fragments that run over the size of the struct. */
1959 if (ssize >= 0 && bytepos + bytelen > ssize)
1960 {
1961 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1962 bytelen = ssize - bytepos;
1963 if (bytelen <= 0)
1964 abort();
1965 }
1966
1967 /* Optimize the access just a bit. */
1968 if (GET_CODE (src) == MEM
1969 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1970 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1971 && bytelen == GET_MODE_SIZE (mode))
1972 {
1973 tmps[i] = gen_reg_rtx (mode);
1974 emit_move_insn (tmps[i],
1975 change_address (src, mode,
1976 plus_constant (XEXP (src, 0),
1977 bytepos)));
1978 }
1979 else
1980 {
1981 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1982 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1983 mode, mode, align, ssize);
1984 }
1985
1986 if (BYTES_BIG_ENDIAN && shift)
1987 {
1988 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1989 tmps[i], 0, OPTAB_WIDEN);
1990 }
1991 }
1992 emit_queue();
1993
1994 /* Copy the extracted pieces into the proper (probable) hard regs. */
1995 for (i = start; i < XVECLEN (dst, 0); i++)
1996 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1997 }
1998
1999 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2000 registers represented by a PARALLEL. SSIZE represents the total size of
2001 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2002
2003 void
2004 emit_group_store (orig_dst, src, ssize, align)
2005 rtx orig_dst, src;
2006 int ssize, align;
2007 {
2008 rtx *tmps, dst;
2009 int start, i;
2010
2011 if (GET_CODE (src) != PARALLEL)
2012 abort ();
2013
2014 /* Check for a NULL entry, used to indicate that the parameter goes
2015 both on the stack and in registers. */
2016 if (XEXP (XVECEXP (src, 0, 0), 0))
2017 start = 0;
2018 else
2019 start = 1;
2020
2021 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2022
2023 /* Copy the (probable) hard regs into pseudos. */
2024 for (i = start; i < XVECLEN (src, 0); i++)
2025 {
2026 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2027 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2028 emit_move_insn (tmps[i], reg);
2029 }
2030 emit_queue();
2031
2032 /* If we won't be storing directly into memory, protect the real destination
2033 from strange tricks we might play. */
2034 dst = orig_dst;
2035 if (GET_CODE (dst) == PARALLEL)
2036 {
2037 rtx temp;
2038
2039 /* We can get a PARALLEL dst if there is a conditional expression in
2040 a return statement. In that case, the dst and src are the same,
2041 so no action is necessary. */
2042 if (rtx_equal_p (dst, src))
2043 return;
2044
2045 /* It is unclear if we can ever reach here, but we may as well handle
2046 it. Allocate a temporary, and split this into a store/load to/from
2047 the temporary. */
2048
2049 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2050 emit_group_store (temp, src, ssize, align);
2051 emit_group_load (dst, temp, ssize, align);
2052 return;
2053 }
2054 else if (GET_CODE (dst) != MEM)
2055 {
2056 dst = gen_reg_rtx (GET_MODE (orig_dst));
2057 /* Make life a bit easier for combine. */
2058 emit_move_insn (dst, const0_rtx);
2059 }
2060 else if (! MEM_IN_STRUCT_P (dst))
2061 {
2062 /* store_bit_field requires that memory operations have
2063 mem_in_struct_p set; we might not. */
2064
2065 dst = copy_rtx (orig_dst);
2066 MEM_SET_IN_STRUCT_P (dst, 1);
2067 }
2068
2069 /* Process the pieces. */
2070 for (i = start; i < XVECLEN (src, 0); i++)
2071 {
2072 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2073 enum machine_mode mode = GET_MODE (tmps[i]);
2074 int bytelen = GET_MODE_SIZE (mode);
2075
2076 /* Handle trailing fragments that run over the size of the struct. */
2077 if (ssize >= 0 && bytepos + bytelen > ssize)
2078 {
2079 if (BYTES_BIG_ENDIAN)
2080 {
2081 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2082 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2083 tmps[i], 0, OPTAB_WIDEN);
2084 }
2085 bytelen = ssize - bytepos;
2086 }
2087
2088 /* Optimize the access just a bit. */
2089 if (GET_CODE (dst) == MEM
2090 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2091 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2092 && bytelen == GET_MODE_SIZE (mode))
2093 {
2094 emit_move_insn (change_address (dst, mode,
2095 plus_constant (XEXP (dst, 0),
2096 bytepos)),
2097 tmps[i]);
2098 }
2099 else
2100 {
2101 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2102 mode, tmps[i], align, ssize);
2103 }
2104 }
2105 emit_queue();
2106
2107 /* Copy from the pseudo into the (probable) hard reg. */
2108 if (GET_CODE (dst) == REG)
2109 emit_move_insn (orig_dst, dst);
2110 }
2111
2112 /* Generate code to copy a BLKmode object of TYPE out of a
2113 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2114 is null, a stack temporary is created. TGTBLK is returned.
2115
2116 The primary purpose of this routine is to handle functions
2117 that return BLKmode structures in registers. Some machines
2118 (the PA for example) want to return all small structures
2119 in registers regardless of the structure's alignment.
2120 */
2121
2122 rtx
2123 copy_blkmode_from_reg(tgtblk,srcreg,type)
2124 rtx tgtblk;
2125 rtx srcreg;
2126 tree type;
2127 {
2128 int bytes = int_size_in_bytes (type);
2129 rtx src = NULL, dst = NULL;
2130 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2131 int bitpos, xbitpos, big_endian_correction = 0;
2132
2133 if (tgtblk == 0)
2134 {
2135 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2136 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2137 preserve_temp_slots (tgtblk);
2138 }
2139
2140 /* This code assumes srcreg is at least a full word. If it isn't,
2141 copy it into a new pseudo which is a full word. */
2142 if (GET_MODE (srcreg) != BLKmode
2143 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2144 srcreg = convert_to_mode (word_mode, srcreg,
2145 TREE_UNSIGNED (type));
2146
2147 /* Structures whose size is not a multiple of a word are aligned
2148 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2149 machine, this means we must skip the empty high order bytes when
2150 calculating the bit offset. */
2151 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2152 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2153 * BITS_PER_UNIT));
2154
2155 /* Copy the structure BITSIZE bites at a time.
2156
2157 We could probably emit more efficient code for machines
2158 which do not use strict alignment, but it doesn't seem
2159 worth the effort at the current time. */
2160 for (bitpos = 0, xbitpos = big_endian_correction;
2161 bitpos < bytes * BITS_PER_UNIT;
2162 bitpos += bitsize, xbitpos += bitsize)
2163 {
2164
2165 /* We need a new source operand each time xbitpos is on a
2166 word boundary and when xbitpos == big_endian_correction
2167 (the first time through). */
2168 if (xbitpos % BITS_PER_WORD == 0
2169 || xbitpos == big_endian_correction)
2170 src = operand_subword_force (srcreg,
2171 xbitpos / BITS_PER_WORD,
2172 BLKmode);
2173
2174 /* We need a new destination operand each time bitpos is on
2175 a word boundary. */
2176 if (bitpos % BITS_PER_WORD == 0)
2177 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2178
2179 /* Use xbitpos for the source extraction (right justified) and
2180 xbitpos for the destination store (left justified). */
2181 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2182 extract_bit_field (src, bitsize,
2183 xbitpos % BITS_PER_WORD, 1,
2184 NULL_RTX, word_mode,
2185 word_mode,
2186 bitsize / BITS_PER_UNIT,
2187 BITS_PER_WORD),
2188 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2189 }
2190 return tgtblk;
2191 }
2192
2193
2194 /* Add a USE expression for REG to the (possibly empty) list pointed
2195 to by CALL_FUSAGE. REG must denote a hard register. */
2196
2197 void
2198 use_reg (call_fusage, reg)
2199 rtx *call_fusage, reg;
2200 {
2201 if (GET_CODE (reg) != REG
2202 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2203 abort();
2204
2205 *call_fusage
2206 = gen_rtx_EXPR_LIST (VOIDmode,
2207 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2208 }
2209
2210 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2211 starting at REGNO. All of these registers must be hard registers. */
2212
2213 void
2214 use_regs (call_fusage, regno, nregs)
2215 rtx *call_fusage;
2216 int regno;
2217 int nregs;
2218 {
2219 int i;
2220
2221 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2222 abort ();
2223
2224 for (i = 0; i < nregs; i++)
2225 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2226 }
2227
2228 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2229 PARALLEL REGS. This is for calls that pass values in multiple
2230 non-contiguous locations. The Irix 6 ABI has examples of this. */
2231
2232 void
2233 use_group_regs (call_fusage, regs)
2234 rtx *call_fusage;
2235 rtx regs;
2236 {
2237 int i;
2238
2239 for (i = 0; i < XVECLEN (regs, 0); i++)
2240 {
2241 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2242
2243 /* A NULL entry means the parameter goes both on the stack and in
2244 registers. This can also be a MEM for targets that pass values
2245 partially on the stack and partially in registers. */
2246 if (reg != 0 && GET_CODE (reg) == REG)
2247 use_reg (call_fusage, reg);
2248 }
2249 }
2250 \f
2251 /* Generate several move instructions to clear LEN bytes of block TO.
2252 (A MEM rtx with BLKmode). The caller must pass TO through
2253 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2254 we can assume. */
2255
2256 static void
2257 clear_by_pieces (to, len, align)
2258 rtx to;
2259 int len, align;
2260 {
2261 struct clear_by_pieces data;
2262 rtx to_addr = XEXP (to, 0);
2263 int max_size = MOVE_MAX_PIECES + 1;
2264 enum machine_mode mode = VOIDmode, tmode;
2265 enum insn_code icode;
2266
2267 data.offset = 0;
2268 data.to_addr = to_addr;
2269 data.to = to;
2270 data.autinc_to
2271 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2272 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2273
2274 data.explicit_inc_to = 0;
2275 data.reverse
2276 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2277 if (data.reverse) data.offset = len;
2278 data.len = len;
2279
2280 data.to_struct = MEM_IN_STRUCT_P (to);
2281
2282 /* If copying requires more than two move insns,
2283 copy addresses to registers (to make displacements shorter)
2284 and use post-increment if available. */
2285 if (!data.autinc_to
2286 && move_by_pieces_ninsns (len, align) > 2)
2287 {
2288 /* Determine the main mode we'll be using */
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2290 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) < max_size)
2292 mode = tmode;
2293
2294 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2295 {
2296 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2297 data.autinc_to = 1;
2298 data.explicit_inc_to = -1;
2299 }
2300 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2301 {
2302 data.to_addr = copy_addr_to_reg (to_addr);
2303 data.autinc_to = 1;
2304 data.explicit_inc_to = 1;
2305 }
2306 if (!data.autinc_to && CONSTANT_P (to_addr))
2307 data.to_addr = copy_addr_to_reg (to_addr);
2308 }
2309
2310 if (! SLOW_UNALIGNED_ACCESS
2311 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2312 align = MOVE_MAX;
2313
2314 /* First move what we can in the largest integer mode, then go to
2315 successively smaller modes. */
2316
2317 while (max_size > 1)
2318 {
2319 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2320 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2321 if (GET_MODE_SIZE (tmode) < max_size)
2322 mode = tmode;
2323
2324 if (mode == VOIDmode)
2325 break;
2326
2327 icode = mov_optab->handlers[(int) mode].insn_code;
2328 if (icode != CODE_FOR_nothing
2329 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2330 GET_MODE_SIZE (mode)))
2331 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2332
2333 max_size = GET_MODE_SIZE (mode);
2334 }
2335
2336 /* The code above should have handled everything. */
2337 if (data.len != 0)
2338 abort ();
2339 }
2340
2341 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2342 with move instructions for mode MODE. GENFUN is the gen_... function
2343 to make a move insn for that mode. DATA has all the other info. */
2344
2345 static void
2346 clear_by_pieces_1 (genfun, mode, data)
2347 rtx (*genfun) PROTO ((rtx, ...));
2348 enum machine_mode mode;
2349 struct clear_by_pieces *data;
2350 {
2351 register int size = GET_MODE_SIZE (mode);
2352 register rtx to1;
2353
2354 while (data->len >= size)
2355 {
2356 if (data->reverse) data->offset -= size;
2357
2358 to1 = (data->autinc_to
2359 ? gen_rtx_MEM (mode, data->to_addr)
2360 : copy_rtx (change_address (data->to, mode,
2361 plus_constant (data->to_addr,
2362 data->offset))));
2363 MEM_IN_STRUCT_P (to1) = data->to_struct;
2364
2365 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2366 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2367
2368 emit_insn ((*genfun) (to1, const0_rtx));
2369 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2370 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2371
2372 if (! data->reverse) data->offset += size;
2373
2374 data->len -= size;
2375 }
2376 }
2377 \f
2378 /* Write zeros through the storage of OBJECT.
2379 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2380 the maximum alignment we can is has, measured in bytes.
2381
2382 If we call a function that returns the length of the block, return it. */
2383
2384 rtx
2385 clear_storage (object, size, align)
2386 rtx object;
2387 rtx size;
2388 int align;
2389 {
2390 #ifdef TARGET_MEM_FUNCTIONS
2391 static tree fn;
2392 tree call_expr, arg_list;
2393 #endif
2394 rtx retval = 0;
2395
2396 if (GET_MODE (object) == BLKmode)
2397 {
2398 object = protect_from_queue (object, 1);
2399 size = protect_from_queue (size, 0);
2400
2401 if (GET_CODE (size) == CONST_INT
2402 && MOVE_BY_PIECES_P (INTVAL (size), align))
2403 clear_by_pieces (object, INTVAL (size), align);
2404
2405 else
2406 {
2407 /* Try the most limited insn first, because there's no point
2408 including more than one in the machine description unless
2409 the more limited one has some advantage. */
2410
2411 rtx opalign = GEN_INT (align);
2412 enum machine_mode mode;
2413
2414 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2415 mode = GET_MODE_WIDER_MODE (mode))
2416 {
2417 enum insn_code code = clrstr_optab[(int) mode];
2418
2419 if (code != CODE_FOR_nothing
2420 /* We don't need MODE to be narrower than
2421 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2422 the mode mask, as it is returned by the macro, it will
2423 definitely be less than the actual mode mask. */
2424 && ((GET_CODE (size) == CONST_INT
2425 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2426 <= (GET_MODE_MASK (mode) >> 1)))
2427 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2428 && (insn_operand_predicate[(int) code][0] == 0
2429 || (*insn_operand_predicate[(int) code][0]) (object,
2430 BLKmode))
2431 && (insn_operand_predicate[(int) code][2] == 0
2432 || (*insn_operand_predicate[(int) code][2]) (opalign,
2433 VOIDmode)))
2434 {
2435 rtx op1;
2436 rtx last = get_last_insn ();
2437 rtx pat;
2438
2439 op1 = convert_to_mode (mode, size, 1);
2440 if (insn_operand_predicate[(int) code][1] != 0
2441 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2442 mode))
2443 op1 = copy_to_mode_reg (mode, op1);
2444
2445 pat = GEN_FCN ((int) code) (object, op1, opalign);
2446 if (pat)
2447 {
2448 emit_insn (pat);
2449 return 0;
2450 }
2451 else
2452 delete_insns_since (last);
2453 }
2454 }
2455
2456
2457 #ifdef TARGET_MEM_FUNCTIONS
2458 /* It is incorrect to use the libcall calling conventions to call
2459 memset in this context.
2460
2461 This could be a user call to memset and the user may wish to
2462 examine the return value from memset.
2463
2464 For targets where libcalls and normal calls have different conventions
2465 for returning pointers, we could end up generating incorrect code.
2466
2467 So instead of using a libcall sequence we build up a suitable
2468 CALL_EXPR and expand the call in the normal fashion. */
2469 if (fn == NULL_TREE)
2470 {
2471 tree fntype;
2472
2473 /* This was copied from except.c, I don't know if all this is
2474 necessary in this context or not. */
2475 fn = get_identifier ("memset");
2476 push_obstacks_nochange ();
2477 end_temporary_allocation ();
2478 fntype = build_pointer_type (void_type_node);
2479 fntype = build_function_type (fntype, NULL_TREE);
2480 fn = build_decl (FUNCTION_DECL, fn, fntype);
2481 DECL_EXTERNAL (fn) = 1;
2482 TREE_PUBLIC (fn) = 1;
2483 DECL_ARTIFICIAL (fn) = 1;
2484 make_decl_rtl (fn, NULL_PTR, 1);
2485 assemble_external (fn);
2486 pop_obstacks ();
2487 }
2488
2489 /* We need to make an argument list for the function call.
2490
2491 memset has three arguments, the first is a void * addresses, the
2492 second a integer with the initialization value, the last is a size_t
2493 byte count for the copy. */
2494 arg_list
2495 = build_tree_list (NULL_TREE,
2496 make_tree (build_pointer_type (void_type_node),
2497 XEXP (object, 0)));
2498 TREE_CHAIN (arg_list)
2499 = build_tree_list (NULL_TREE,
2500 make_tree (integer_type_node, const0_rtx));
2501 TREE_CHAIN (TREE_CHAIN (arg_list))
2502 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2503 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2504
2505 /* Now we have to build up the CALL_EXPR itself. */
2506 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2507 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2508 call_expr, arg_list, NULL_TREE);
2509 TREE_SIDE_EFFECTS (call_expr) = 1;
2510
2511 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2512 #else
2513 emit_library_call (bzero_libfunc, 0,
2514 VOIDmode, 2,
2515 XEXP (object, 0), Pmode,
2516 convert_to_mode
2517 (TYPE_MODE (integer_type_node), size,
2518 TREE_UNSIGNED (integer_type_node)),
2519 TYPE_MODE (integer_type_node));
2520 #endif
2521 }
2522 }
2523 else
2524 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2525
2526 return retval;
2527 }
2528
2529 /* Generate code to copy Y into X.
2530 Both Y and X must have the same mode, except that
2531 Y can be a constant with VOIDmode.
2532 This mode cannot be BLKmode; use emit_block_move for that.
2533
2534 Return the last instruction emitted. */
2535
2536 rtx
2537 emit_move_insn (x, y)
2538 rtx x, y;
2539 {
2540 enum machine_mode mode = GET_MODE (x);
2541
2542 x = protect_from_queue (x, 1);
2543 y = protect_from_queue (y, 0);
2544
2545 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2546 abort ();
2547
2548 /* Never force constant_p_rtx to memory. */
2549 if (GET_CODE (y) == CONSTANT_P_RTX)
2550 ;
2551 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2552 y = force_const_mem (mode, y);
2553
2554 /* If X or Y are memory references, verify that their addresses are valid
2555 for the machine. */
2556 if (GET_CODE (x) == MEM
2557 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2558 && ! push_operand (x, GET_MODE (x)))
2559 || (flag_force_addr
2560 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2561 x = change_address (x, VOIDmode, XEXP (x, 0));
2562
2563 if (GET_CODE (y) == MEM
2564 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2565 || (flag_force_addr
2566 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2567 y = change_address (y, VOIDmode, XEXP (y, 0));
2568
2569 if (mode == BLKmode)
2570 abort ();
2571
2572 return emit_move_insn_1 (x, y);
2573 }
2574
2575 /* Low level part of emit_move_insn.
2576 Called just like emit_move_insn, but assumes X and Y
2577 are basically valid. */
2578
2579 rtx
2580 emit_move_insn_1 (x, y)
2581 rtx x, y;
2582 {
2583 enum machine_mode mode = GET_MODE (x);
2584 enum machine_mode submode;
2585 enum mode_class class = GET_MODE_CLASS (mode);
2586 int i;
2587
2588 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2589 return
2590 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2591
2592 /* Expand complex moves by moving real part and imag part, if possible. */
2593 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2594 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2595 * BITS_PER_UNIT),
2596 (class == MODE_COMPLEX_INT
2597 ? MODE_INT : MODE_FLOAT),
2598 0))
2599 && (mov_optab->handlers[(int) submode].insn_code
2600 != CODE_FOR_nothing))
2601 {
2602 /* Don't split destination if it is a stack push. */
2603 int stack = push_operand (x, GET_MODE (x));
2604
2605 /* If this is a stack, push the highpart first, so it
2606 will be in the argument order.
2607
2608 In that case, change_address is used only to convert
2609 the mode, not to change the address. */
2610 if (stack)
2611 {
2612 /* Note that the real part always precedes the imag part in memory
2613 regardless of machine's endianness. */
2614 #ifdef STACK_GROWS_DOWNWARD
2615 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2616 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2617 gen_imagpart (submode, y)));
2618 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2619 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2620 gen_realpart (submode, y)));
2621 #else
2622 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2623 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2624 gen_realpart (submode, y)));
2625 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2626 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2627 gen_imagpart (submode, y)));
2628 #endif
2629 }
2630 else
2631 {
2632 /* Show the output dies here. This is necessary for pseudos;
2633 hard regs shouldn't appear here except as return values.
2634 We never want to emit such a clobber after reload. */
2635 if (x != y
2636 && ! (reload_in_progress || reload_completed))
2637 {
2638 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2639 }
2640
2641 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2642 (gen_realpart (submode, x), gen_realpart (submode, y)));
2643 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2644 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2645 }
2646
2647 return get_last_insn ();
2648 }
2649
2650 /* This will handle any multi-word mode that lacks a move_insn pattern.
2651 However, you will get better code if you define such patterns,
2652 even if they must turn into multiple assembler instructions. */
2653 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2654 {
2655 rtx last_insn = 0;
2656
2657 #ifdef PUSH_ROUNDING
2658
2659 /* If X is a push on the stack, do the push now and replace
2660 X with a reference to the stack pointer. */
2661 if (push_operand (x, GET_MODE (x)))
2662 {
2663 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2664 x = change_address (x, VOIDmode, stack_pointer_rtx);
2665 }
2666 #endif
2667
2668 /* Show the output dies here. This is necessary for pseudos;
2669 hard regs shouldn't appear here except as return values.
2670 We never want to emit such a clobber after reload. */
2671 if (x != y
2672 && ! (reload_in_progress || reload_completed))
2673 {
2674 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2675 }
2676
2677 for (i = 0;
2678 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2679 i++)
2680 {
2681 rtx xpart = operand_subword (x, i, 1, mode);
2682 rtx ypart = operand_subword (y, i, 1, mode);
2683
2684 /* If we can't get a part of Y, put Y into memory if it is a
2685 constant. Otherwise, force it into a register. If we still
2686 can't get a part of Y, abort. */
2687 if (ypart == 0 && CONSTANT_P (y))
2688 {
2689 y = force_const_mem (mode, y);
2690 ypart = operand_subword (y, i, 1, mode);
2691 }
2692 else if (ypart == 0)
2693 ypart = operand_subword_force (y, i, mode);
2694
2695 if (xpart == 0 || ypart == 0)
2696 abort ();
2697
2698 last_insn = emit_move_insn (xpart, ypart);
2699 }
2700
2701 return last_insn;
2702 }
2703 else
2704 abort ();
2705 }
2706 \f
2707 /* Pushing data onto the stack. */
2708
2709 /* Push a block of length SIZE (perhaps variable)
2710 and return an rtx to address the beginning of the block.
2711 Note that it is not possible for the value returned to be a QUEUED.
2712 The value may be virtual_outgoing_args_rtx.
2713
2714 EXTRA is the number of bytes of padding to push in addition to SIZE.
2715 BELOW nonzero means this padding comes at low addresses;
2716 otherwise, the padding comes at high addresses. */
2717
2718 rtx
2719 push_block (size, extra, below)
2720 rtx size;
2721 int extra, below;
2722 {
2723 register rtx temp;
2724
2725 size = convert_modes (Pmode, ptr_mode, size, 1);
2726 if (CONSTANT_P (size))
2727 anti_adjust_stack (plus_constant (size, extra));
2728 else if (GET_CODE (size) == REG && extra == 0)
2729 anti_adjust_stack (size);
2730 else
2731 {
2732 rtx temp = copy_to_mode_reg (Pmode, size);
2733 if (extra != 0)
2734 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2735 temp, 0, OPTAB_LIB_WIDEN);
2736 anti_adjust_stack (temp);
2737 }
2738
2739 #if defined (STACK_GROWS_DOWNWARD) \
2740 || (defined (ARGS_GROW_DOWNWARD) \
2741 && !defined (ACCUMULATE_OUTGOING_ARGS))
2742
2743 /* Return the lowest stack address when STACK or ARGS grow downward and
2744 we are not aaccumulating outgoing arguments (the c4x port uses such
2745 conventions). */
2746 temp = virtual_outgoing_args_rtx;
2747 if (extra != 0 && below)
2748 temp = plus_constant (temp, extra);
2749 #else
2750 if (GET_CODE (size) == CONST_INT)
2751 temp = plus_constant (virtual_outgoing_args_rtx,
2752 - INTVAL (size) - (below ? 0 : extra));
2753 else if (extra != 0 && !below)
2754 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2755 negate_rtx (Pmode, plus_constant (size, extra)));
2756 else
2757 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2758 negate_rtx (Pmode, size));
2759 #endif
2760
2761 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2762 }
2763
2764 rtx
2765 gen_push_operand ()
2766 {
2767 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2768 }
2769
2770 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2771 block of SIZE bytes. */
2772
2773 static rtx
2774 get_push_address (size)
2775 int size;
2776 {
2777 register rtx temp;
2778
2779 if (STACK_PUSH_CODE == POST_DEC)
2780 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2781 else if (STACK_PUSH_CODE == POST_INC)
2782 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2783 else
2784 temp = stack_pointer_rtx;
2785
2786 return copy_to_reg (temp);
2787 }
2788
2789 /* Generate code to push X onto the stack, assuming it has mode MODE and
2790 type TYPE.
2791 MODE is redundant except when X is a CONST_INT (since they don't
2792 carry mode info).
2793 SIZE is an rtx for the size of data to be copied (in bytes),
2794 needed only if X is BLKmode.
2795
2796 ALIGN (in bytes) is maximum alignment we can assume.
2797
2798 If PARTIAL and REG are both nonzero, then copy that many of the first
2799 words of X into registers starting with REG, and push the rest of X.
2800 The amount of space pushed is decreased by PARTIAL words,
2801 rounded *down* to a multiple of PARM_BOUNDARY.
2802 REG must be a hard register in this case.
2803 If REG is zero but PARTIAL is not, take any all others actions for an
2804 argument partially in registers, but do not actually load any
2805 registers.
2806
2807 EXTRA is the amount in bytes of extra space to leave next to this arg.
2808 This is ignored if an argument block has already been allocated.
2809
2810 On a machine that lacks real push insns, ARGS_ADDR is the address of
2811 the bottom of the argument block for this call. We use indexing off there
2812 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2813 argument block has not been preallocated.
2814
2815 ARGS_SO_FAR is the size of args previously pushed for this call.
2816
2817 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2818 for arguments passed in registers. If nonzero, it will be the number
2819 of bytes required. */
2820
2821 void
2822 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2823 args_addr, args_so_far, reg_parm_stack_space)
2824 register rtx x;
2825 enum machine_mode mode;
2826 tree type;
2827 rtx size;
2828 int align;
2829 int partial;
2830 rtx reg;
2831 int extra;
2832 rtx args_addr;
2833 rtx args_so_far;
2834 int reg_parm_stack_space;
2835 {
2836 rtx xinner;
2837 enum direction stack_direction
2838 #ifdef STACK_GROWS_DOWNWARD
2839 = downward;
2840 #else
2841 = upward;
2842 #endif
2843
2844 /* Decide where to pad the argument: `downward' for below,
2845 `upward' for above, or `none' for don't pad it.
2846 Default is below for small data on big-endian machines; else above. */
2847 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2848
2849 /* Invert direction if stack is post-update. */
2850 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2851 if (where_pad != none)
2852 where_pad = (where_pad == downward ? upward : downward);
2853
2854 xinner = x = protect_from_queue (x, 0);
2855
2856 if (mode == BLKmode)
2857 {
2858 /* Copy a block into the stack, entirely or partially. */
2859
2860 register rtx temp;
2861 int used = partial * UNITS_PER_WORD;
2862 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2863 int skip;
2864
2865 if (size == 0)
2866 abort ();
2867
2868 used -= offset;
2869
2870 /* USED is now the # of bytes we need not copy to the stack
2871 because registers will take care of them. */
2872
2873 if (partial != 0)
2874 xinner = change_address (xinner, BLKmode,
2875 plus_constant (XEXP (xinner, 0), used));
2876
2877 /* If the partial register-part of the arg counts in its stack size,
2878 skip the part of stack space corresponding to the registers.
2879 Otherwise, start copying to the beginning of the stack space,
2880 by setting SKIP to 0. */
2881 skip = (reg_parm_stack_space == 0) ? 0 : used;
2882
2883 #ifdef PUSH_ROUNDING
2884 /* Do it with several push insns if that doesn't take lots of insns
2885 and if there is no difficulty with push insns that skip bytes
2886 on the stack for alignment purposes. */
2887 if (args_addr == 0
2888 && GET_CODE (size) == CONST_INT
2889 && skip == 0
2890 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2891 /* Here we avoid the case of a structure whose weak alignment
2892 forces many pushes of a small amount of data,
2893 and such small pushes do rounding that causes trouble. */
2894 && ((! SLOW_UNALIGNED_ACCESS)
2895 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2896 || PUSH_ROUNDING (align) == align)
2897 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2898 {
2899 /* Push padding now if padding above and stack grows down,
2900 or if padding below and stack grows up.
2901 But if space already allocated, this has already been done. */
2902 if (extra && args_addr == 0
2903 && where_pad != none && where_pad != stack_direction)
2904 anti_adjust_stack (GEN_INT (extra));
2905
2906 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2907 INTVAL (size) - used, align);
2908
2909 if (current_function_check_memory_usage && ! in_check_memory_usage)
2910 {
2911 rtx temp;
2912
2913 in_check_memory_usage = 1;
2914 temp = get_push_address (INTVAL(size) - used);
2915 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2916 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2917 temp, ptr_mode,
2918 XEXP (xinner, 0), ptr_mode,
2919 GEN_INT (INTVAL(size) - used),
2920 TYPE_MODE (sizetype));
2921 else
2922 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2923 temp, ptr_mode,
2924 GEN_INT (INTVAL(size) - used),
2925 TYPE_MODE (sizetype),
2926 GEN_INT (MEMORY_USE_RW),
2927 TYPE_MODE (integer_type_node));
2928 in_check_memory_usage = 0;
2929 }
2930 }
2931 else
2932 #endif /* PUSH_ROUNDING */
2933 {
2934 /* Otherwise make space on the stack and copy the data
2935 to the address of that space. */
2936
2937 /* Deduct words put into registers from the size we must copy. */
2938 if (partial != 0)
2939 {
2940 if (GET_CODE (size) == CONST_INT)
2941 size = GEN_INT (INTVAL (size) - used);
2942 else
2943 size = expand_binop (GET_MODE (size), sub_optab, size,
2944 GEN_INT (used), NULL_RTX, 0,
2945 OPTAB_LIB_WIDEN);
2946 }
2947
2948 /* Get the address of the stack space.
2949 In this case, we do not deal with EXTRA separately.
2950 A single stack adjust will do. */
2951 if (! args_addr)
2952 {
2953 temp = push_block (size, extra, where_pad == downward);
2954 extra = 0;
2955 }
2956 else if (GET_CODE (args_so_far) == CONST_INT)
2957 temp = memory_address (BLKmode,
2958 plus_constant (args_addr,
2959 skip + INTVAL (args_so_far)));
2960 else
2961 temp = memory_address (BLKmode,
2962 plus_constant (gen_rtx_PLUS (Pmode,
2963 args_addr,
2964 args_so_far),
2965 skip));
2966 if (current_function_check_memory_usage && ! in_check_memory_usage)
2967 {
2968 rtx target;
2969
2970 in_check_memory_usage = 1;
2971 target = copy_to_reg (temp);
2972 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2973 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2974 target, ptr_mode,
2975 XEXP (xinner, 0), ptr_mode,
2976 size, TYPE_MODE (sizetype));
2977 else
2978 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2979 target, ptr_mode,
2980 size, TYPE_MODE (sizetype),
2981 GEN_INT (MEMORY_USE_RW),
2982 TYPE_MODE (integer_type_node));
2983 in_check_memory_usage = 0;
2984 }
2985
2986 /* TEMP is the address of the block. Copy the data there. */
2987 if (GET_CODE (size) == CONST_INT
2988 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
2989 {
2990 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2991 INTVAL (size), align);
2992 goto ret;
2993 }
2994 else
2995 {
2996 rtx opalign = GEN_INT (align);
2997 enum machine_mode mode;
2998 rtx target = gen_rtx_MEM (BLKmode, temp);
2999
3000 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3001 mode != VOIDmode;
3002 mode = GET_MODE_WIDER_MODE (mode))
3003 {
3004 enum insn_code code = movstr_optab[(int) mode];
3005
3006 if (code != CODE_FOR_nothing
3007 && ((GET_CODE (size) == CONST_INT
3008 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3009 <= (GET_MODE_MASK (mode) >> 1)))
3010 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3011 && (insn_operand_predicate[(int) code][0] == 0
3012 || ((*insn_operand_predicate[(int) code][0])
3013 (target, BLKmode)))
3014 && (insn_operand_predicate[(int) code][1] == 0
3015 || ((*insn_operand_predicate[(int) code][1])
3016 (xinner, BLKmode)))
3017 && (insn_operand_predicate[(int) code][3] == 0
3018 || ((*insn_operand_predicate[(int) code][3])
3019 (opalign, VOIDmode))))
3020 {
3021 rtx op2 = convert_to_mode (mode, size, 1);
3022 rtx last = get_last_insn ();
3023 rtx pat;
3024
3025 if (insn_operand_predicate[(int) code][2] != 0
3026 && ! ((*insn_operand_predicate[(int) code][2])
3027 (op2, mode)))
3028 op2 = copy_to_mode_reg (mode, op2);
3029
3030 pat = GEN_FCN ((int) code) (target, xinner,
3031 op2, opalign);
3032 if (pat)
3033 {
3034 emit_insn (pat);
3035 goto ret;
3036 }
3037 else
3038 delete_insns_since (last);
3039 }
3040 }
3041 }
3042
3043 #ifndef ACCUMULATE_OUTGOING_ARGS
3044 /* If the source is referenced relative to the stack pointer,
3045 copy it to another register to stabilize it. We do not need
3046 to do this if we know that we won't be changing sp. */
3047
3048 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3049 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3050 temp = copy_to_reg (temp);
3051 #endif
3052
3053 /* Make inhibit_defer_pop nonzero around the library call
3054 to force it to pop the bcopy-arguments right away. */
3055 NO_DEFER_POP;
3056 #ifdef TARGET_MEM_FUNCTIONS
3057 emit_library_call (memcpy_libfunc, 0,
3058 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3059 convert_to_mode (TYPE_MODE (sizetype),
3060 size, TREE_UNSIGNED (sizetype)),
3061 TYPE_MODE (sizetype));
3062 #else
3063 emit_library_call (bcopy_libfunc, 0,
3064 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3065 convert_to_mode (TYPE_MODE (integer_type_node),
3066 size,
3067 TREE_UNSIGNED (integer_type_node)),
3068 TYPE_MODE (integer_type_node));
3069 #endif
3070 OK_DEFER_POP;
3071 }
3072 }
3073 else if (partial > 0)
3074 {
3075 /* Scalar partly in registers. */
3076
3077 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3078 int i;
3079 int not_stack;
3080 /* # words of start of argument
3081 that we must make space for but need not store. */
3082 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3083 int args_offset = INTVAL (args_so_far);
3084 int skip;
3085
3086 /* Push padding now if padding above and stack grows down,
3087 or if padding below and stack grows up.
3088 But if space already allocated, this has already been done. */
3089 if (extra && args_addr == 0
3090 && where_pad != none && where_pad != stack_direction)
3091 anti_adjust_stack (GEN_INT (extra));
3092
3093 /* If we make space by pushing it, we might as well push
3094 the real data. Otherwise, we can leave OFFSET nonzero
3095 and leave the space uninitialized. */
3096 if (args_addr == 0)
3097 offset = 0;
3098
3099 /* Now NOT_STACK gets the number of words that we don't need to
3100 allocate on the stack. */
3101 not_stack = partial - offset;
3102
3103 /* If the partial register-part of the arg counts in its stack size,
3104 skip the part of stack space corresponding to the registers.
3105 Otherwise, start copying to the beginning of the stack space,
3106 by setting SKIP to 0. */
3107 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3108
3109 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3110 x = validize_mem (force_const_mem (mode, x));
3111
3112 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3113 SUBREGs of such registers are not allowed. */
3114 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3115 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3116 x = copy_to_reg (x);
3117
3118 /* Loop over all the words allocated on the stack for this arg. */
3119 /* We can do it by words, because any scalar bigger than a word
3120 has a size a multiple of a word. */
3121 #ifndef PUSH_ARGS_REVERSED
3122 for (i = not_stack; i < size; i++)
3123 #else
3124 for (i = size - 1; i >= not_stack; i--)
3125 #endif
3126 if (i >= not_stack + offset)
3127 emit_push_insn (operand_subword_force (x, i, mode),
3128 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3129 0, args_addr,
3130 GEN_INT (args_offset + ((i - not_stack + skip)
3131 * UNITS_PER_WORD)),
3132 reg_parm_stack_space);
3133 }
3134 else
3135 {
3136 rtx addr;
3137 rtx target = NULL_RTX;
3138
3139 /* Push padding now if padding above and stack grows down,
3140 or if padding below and stack grows up.
3141 But if space already allocated, this has already been done. */
3142 if (extra && args_addr == 0
3143 && where_pad != none && where_pad != stack_direction)
3144 anti_adjust_stack (GEN_INT (extra));
3145
3146 #ifdef PUSH_ROUNDING
3147 if (args_addr == 0)
3148 addr = gen_push_operand ();
3149 else
3150 #endif
3151 {
3152 if (GET_CODE (args_so_far) == CONST_INT)
3153 addr
3154 = memory_address (mode,
3155 plus_constant (args_addr,
3156 INTVAL (args_so_far)));
3157 else
3158 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3159 args_so_far));
3160 target = addr;
3161 }
3162
3163 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3164
3165 if (current_function_check_memory_usage && ! in_check_memory_usage)
3166 {
3167 in_check_memory_usage = 1;
3168 if (target == 0)
3169 target = get_push_address (GET_MODE_SIZE (mode));
3170
3171 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3172 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3173 target, ptr_mode,
3174 XEXP (x, 0), ptr_mode,
3175 GEN_INT (GET_MODE_SIZE (mode)),
3176 TYPE_MODE (sizetype));
3177 else
3178 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3179 target, ptr_mode,
3180 GEN_INT (GET_MODE_SIZE (mode)),
3181 TYPE_MODE (sizetype),
3182 GEN_INT (MEMORY_USE_RW),
3183 TYPE_MODE (integer_type_node));
3184 in_check_memory_usage = 0;
3185 }
3186 }
3187
3188 ret:
3189 /* If part should go in registers, copy that part
3190 into the appropriate registers. Do this now, at the end,
3191 since mem-to-mem copies above may do function calls. */
3192 if (partial > 0 && reg != 0)
3193 {
3194 /* Handle calls that pass values in multiple non-contiguous locations.
3195 The Irix 6 ABI has examples of this. */
3196 if (GET_CODE (reg) == PARALLEL)
3197 emit_group_load (reg, x, -1, align); /* ??? size? */
3198 else
3199 move_block_to_reg (REGNO (reg), x, partial, mode);
3200 }
3201
3202 if (extra && args_addr == 0 && where_pad == stack_direction)
3203 anti_adjust_stack (GEN_INT (extra));
3204 }
3205 \f
3206 /* Expand an assignment that stores the value of FROM into TO.
3207 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3208 (This may contain a QUEUED rtx;
3209 if the value is constant, this rtx is a constant.)
3210 Otherwise, the returned value is NULL_RTX.
3211
3212 SUGGEST_REG is no longer actually used.
3213 It used to mean, copy the value through a register
3214 and return that register, if that is possible.
3215 We now use WANT_VALUE to decide whether to do this. */
3216
3217 rtx
3218 expand_assignment (to, from, want_value, suggest_reg)
3219 tree to, from;
3220 int want_value;
3221 int suggest_reg;
3222 {
3223 register rtx to_rtx = 0;
3224 rtx result;
3225
3226 /* Don't crash if the lhs of the assignment was erroneous. */
3227
3228 if (TREE_CODE (to) == ERROR_MARK)
3229 {
3230 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3231 return want_value ? result : NULL_RTX;
3232 }
3233
3234 /* Assignment of a structure component needs special treatment
3235 if the structure component's rtx is not simply a MEM.
3236 Assignment of an array element at a constant index, and assignment of
3237 an array element in an unaligned packed structure field, has the same
3238 problem. */
3239
3240 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3241 || TREE_CODE (to) == ARRAY_REF)
3242 {
3243 enum machine_mode mode1;
3244 int bitsize;
3245 int bitpos;
3246 tree offset;
3247 int unsignedp;
3248 int volatilep = 0;
3249 tree tem;
3250 int alignment;
3251
3252 push_temp_slots ();
3253 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3254 &unsignedp, &volatilep, &alignment);
3255
3256 /* If we are going to use store_bit_field and extract_bit_field,
3257 make sure to_rtx will be safe for multiple use. */
3258
3259 if (mode1 == VOIDmode && want_value)
3260 tem = stabilize_reference (tem);
3261
3262 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3263 if (offset != 0)
3264 {
3265 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3266
3267 if (GET_CODE (to_rtx) != MEM)
3268 abort ();
3269
3270 if (GET_MODE (offset_rtx) != ptr_mode)
3271 {
3272 #ifdef POINTERS_EXTEND_UNSIGNED
3273 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3274 #else
3275 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3276 #endif
3277 }
3278
3279 if (GET_CODE (to_rtx) == MEM
3280 && GET_MODE (to_rtx) == BLKmode
3281 && bitsize
3282 && (bitpos % bitsize) == 0
3283 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3284 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3285 {
3286 rtx temp = change_address (to_rtx, mode1,
3287 plus_constant (XEXP (to_rtx, 0),
3288 (bitpos /
3289 BITS_PER_UNIT)));
3290 if (GET_CODE (XEXP (temp, 0)) == REG)
3291 to_rtx = temp;
3292 else
3293 to_rtx = change_address (to_rtx, mode1,
3294 force_reg (GET_MODE (XEXP (temp, 0)),
3295 XEXP (temp, 0)));
3296 bitpos = 0;
3297 }
3298
3299 to_rtx = change_address (to_rtx, VOIDmode,
3300 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3301 force_reg (ptr_mode, offset_rtx)));
3302 }
3303 if (volatilep)
3304 {
3305 if (GET_CODE (to_rtx) == MEM)
3306 {
3307 /* When the offset is zero, to_rtx is the address of the
3308 structure we are storing into, and hence may be shared.
3309 We must make a new MEM before setting the volatile bit. */
3310 if (offset == 0)
3311 to_rtx = copy_rtx (to_rtx);
3312
3313 MEM_VOLATILE_P (to_rtx) = 1;
3314 }
3315 #if 0 /* This was turned off because, when a field is volatile
3316 in an object which is not volatile, the object may be in a register,
3317 and then we would abort over here. */
3318 else
3319 abort ();
3320 #endif
3321 }
3322
3323 if (TREE_CODE (to) == COMPONENT_REF
3324 && TREE_READONLY (TREE_OPERAND (to, 1)))
3325 {
3326 if (offset == 0)
3327 to_rtx = copy_rtx (to_rtx);
3328
3329 RTX_UNCHANGING_P (to_rtx) = 1;
3330 }
3331
3332 /* Check the access. */
3333 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3334 {
3335 rtx to_addr;
3336 int size;
3337 int best_mode_size;
3338 enum machine_mode best_mode;
3339
3340 best_mode = get_best_mode (bitsize, bitpos,
3341 TYPE_ALIGN (TREE_TYPE (tem)),
3342 mode1, volatilep);
3343 if (best_mode == VOIDmode)
3344 best_mode = QImode;
3345
3346 best_mode_size = GET_MODE_BITSIZE (best_mode);
3347 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3348 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3349 size *= GET_MODE_SIZE (best_mode);
3350
3351 /* Check the access right of the pointer. */
3352 if (size)
3353 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3354 to_addr, ptr_mode,
3355 GEN_INT (size), TYPE_MODE (sizetype),
3356 GEN_INT (MEMORY_USE_WO),
3357 TYPE_MODE (integer_type_node));
3358 }
3359
3360 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3361 (want_value
3362 /* Spurious cast makes HPUX compiler happy. */
3363 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3364 : VOIDmode),
3365 unsignedp,
3366 /* Required alignment of containing datum. */
3367 alignment,
3368 int_size_in_bytes (TREE_TYPE (tem)),
3369 get_alias_set (to));
3370 preserve_temp_slots (result);
3371 free_temp_slots ();
3372 pop_temp_slots ();
3373
3374 /* If the value is meaningful, convert RESULT to the proper mode.
3375 Otherwise, return nothing. */
3376 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3377 TYPE_MODE (TREE_TYPE (from)),
3378 result,
3379 TREE_UNSIGNED (TREE_TYPE (to)))
3380 : NULL_RTX);
3381 }
3382
3383 /* If the rhs is a function call and its value is not an aggregate,
3384 call the function before we start to compute the lhs.
3385 This is needed for correct code for cases such as
3386 val = setjmp (buf) on machines where reference to val
3387 requires loading up part of an address in a separate insn.
3388
3389 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3390 a promoted variable where the zero- or sign- extension needs to be done.
3391 Handling this in the normal way is safe because no computation is done
3392 before the call. */
3393 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3394 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3395 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3396 {
3397 rtx value;
3398
3399 push_temp_slots ();
3400 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3401 if (to_rtx == 0)
3402 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3403
3404 /* Handle calls that return values in multiple non-contiguous locations.
3405 The Irix 6 ABI has examples of this. */
3406 if (GET_CODE (to_rtx) == PARALLEL)
3407 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3408 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3409 else if (GET_MODE (to_rtx) == BLKmode)
3410 emit_block_move (to_rtx, value, expr_size (from),
3411 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3412 else
3413 emit_move_insn (to_rtx, value);
3414 preserve_temp_slots (to_rtx);
3415 free_temp_slots ();
3416 pop_temp_slots ();
3417 return want_value ? to_rtx : NULL_RTX;
3418 }
3419
3420 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3421 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3422
3423 if (to_rtx == 0)
3424 {
3425 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3426 if (GET_CODE (to_rtx) == MEM)
3427 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3428 }
3429
3430 /* Don't move directly into a return register. */
3431 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3432 {
3433 rtx temp;
3434
3435 push_temp_slots ();
3436 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3437 emit_move_insn (to_rtx, temp);
3438 preserve_temp_slots (to_rtx);
3439 free_temp_slots ();
3440 pop_temp_slots ();
3441 return want_value ? to_rtx : NULL_RTX;
3442 }
3443
3444 /* In case we are returning the contents of an object which overlaps
3445 the place the value is being stored, use a safe function when copying
3446 a value through a pointer into a structure value return block. */
3447 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3448 && current_function_returns_struct
3449 && !current_function_returns_pcc_struct)
3450 {
3451 rtx from_rtx, size;
3452
3453 push_temp_slots ();
3454 size = expr_size (from);
3455 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3456 EXPAND_MEMORY_USE_DONT);
3457
3458 /* Copy the rights of the bitmap. */
3459 if (current_function_check_memory_usage)
3460 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3461 XEXP (to_rtx, 0), ptr_mode,
3462 XEXP (from_rtx, 0), ptr_mode,
3463 convert_to_mode (TYPE_MODE (sizetype),
3464 size, TREE_UNSIGNED (sizetype)),
3465 TYPE_MODE (sizetype));
3466
3467 #ifdef TARGET_MEM_FUNCTIONS
3468 emit_library_call (memcpy_libfunc, 0,
3469 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3470 XEXP (from_rtx, 0), Pmode,
3471 convert_to_mode (TYPE_MODE (sizetype),
3472 size, TREE_UNSIGNED (sizetype)),
3473 TYPE_MODE (sizetype));
3474 #else
3475 emit_library_call (bcopy_libfunc, 0,
3476 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3477 XEXP (to_rtx, 0), Pmode,
3478 convert_to_mode (TYPE_MODE (integer_type_node),
3479 size, TREE_UNSIGNED (integer_type_node)),
3480 TYPE_MODE (integer_type_node));
3481 #endif
3482
3483 preserve_temp_slots (to_rtx);
3484 free_temp_slots ();
3485 pop_temp_slots ();
3486 return want_value ? to_rtx : NULL_RTX;
3487 }
3488
3489 /* Compute FROM and store the value in the rtx we got. */
3490
3491 push_temp_slots ();
3492 result = store_expr (from, to_rtx, want_value);
3493 preserve_temp_slots (result);
3494 free_temp_slots ();
3495 pop_temp_slots ();
3496 return want_value ? result : NULL_RTX;
3497 }
3498
3499 /* Generate code for computing expression EXP,
3500 and storing the value into TARGET.
3501 TARGET may contain a QUEUED rtx.
3502
3503 If WANT_VALUE is nonzero, return a copy of the value
3504 not in TARGET, so that we can be sure to use the proper
3505 value in a containing expression even if TARGET has something
3506 else stored in it. If possible, we copy the value through a pseudo
3507 and return that pseudo. Or, if the value is constant, we try to
3508 return the constant. In some cases, we return a pseudo
3509 copied *from* TARGET.
3510
3511 If the mode is BLKmode then we may return TARGET itself.
3512 It turns out that in BLKmode it doesn't cause a problem.
3513 because C has no operators that could combine two different
3514 assignments into the same BLKmode object with different values
3515 with no sequence point. Will other languages need this to
3516 be more thorough?
3517
3518 If WANT_VALUE is 0, we return NULL, to make sure
3519 to catch quickly any cases where the caller uses the value
3520 and fails to set WANT_VALUE. */
3521
3522 rtx
3523 store_expr (exp, target, want_value)
3524 register tree exp;
3525 register rtx target;
3526 int want_value;
3527 {
3528 register rtx temp;
3529 int dont_return_target = 0;
3530
3531 if (TREE_CODE (exp) == COMPOUND_EXPR)
3532 {
3533 /* Perform first part of compound expression, then assign from second
3534 part. */
3535 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3536 emit_queue ();
3537 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3538 }
3539 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3540 {
3541 /* For conditional expression, get safe form of the target. Then
3542 test the condition, doing the appropriate assignment on either
3543 side. This avoids the creation of unnecessary temporaries.
3544 For non-BLKmode, it is more efficient not to do this. */
3545
3546 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3547
3548 emit_queue ();
3549 target = protect_from_queue (target, 1);
3550
3551 do_pending_stack_adjust ();
3552 NO_DEFER_POP;
3553 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3554 start_cleanup_deferral ();
3555 store_expr (TREE_OPERAND (exp, 1), target, 0);
3556 end_cleanup_deferral ();
3557 emit_queue ();
3558 emit_jump_insn (gen_jump (lab2));
3559 emit_barrier ();
3560 emit_label (lab1);
3561 start_cleanup_deferral ();
3562 store_expr (TREE_OPERAND (exp, 2), target, 0);
3563 end_cleanup_deferral ();
3564 emit_queue ();
3565 emit_label (lab2);
3566 OK_DEFER_POP;
3567
3568 return want_value ? target : NULL_RTX;
3569 }
3570 else if (queued_subexp_p (target))
3571 /* If target contains a postincrement, let's not risk
3572 using it as the place to generate the rhs. */
3573 {
3574 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3575 {
3576 /* Expand EXP into a new pseudo. */
3577 temp = gen_reg_rtx (GET_MODE (target));
3578 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3579 }
3580 else
3581 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3582
3583 /* If target is volatile, ANSI requires accessing the value
3584 *from* the target, if it is accessed. So make that happen.
3585 In no case return the target itself. */
3586 if (! MEM_VOLATILE_P (target) && want_value)
3587 dont_return_target = 1;
3588 }
3589 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3590 && GET_MODE (target) != BLKmode)
3591 /* If target is in memory and caller wants value in a register instead,
3592 arrange that. Pass TARGET as target for expand_expr so that,
3593 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3594 We know expand_expr will not use the target in that case.
3595 Don't do this if TARGET is volatile because we are supposed
3596 to write it and then read it. */
3597 {
3598 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3599 GET_MODE (target), 0);
3600 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3601 temp = copy_to_reg (temp);
3602 dont_return_target = 1;
3603 }
3604 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3605 /* If this is an scalar in a register that is stored in a wider mode
3606 than the declared mode, compute the result into its declared mode
3607 and then convert to the wider mode. Our value is the computed
3608 expression. */
3609 {
3610 /* If we don't want a value, we can do the conversion inside EXP,
3611 which will often result in some optimizations. Do the conversion
3612 in two steps: first change the signedness, if needed, then
3613 the extend. But don't do this if the type of EXP is a subtype
3614 of something else since then the conversion might involve
3615 more than just converting modes. */
3616 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3617 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3618 {
3619 if (TREE_UNSIGNED (TREE_TYPE (exp))
3620 != SUBREG_PROMOTED_UNSIGNED_P (target))
3621 exp
3622 = convert
3623 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3624 TREE_TYPE (exp)),
3625 exp);
3626
3627 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3628 SUBREG_PROMOTED_UNSIGNED_P (target)),
3629 exp);
3630 }
3631
3632 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3633
3634 /* If TEMP is a volatile MEM and we want a result value, make
3635 the access now so it gets done only once. Likewise if
3636 it contains TARGET. */
3637 if (GET_CODE (temp) == MEM && want_value
3638 && (MEM_VOLATILE_P (temp)
3639 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3640 temp = copy_to_reg (temp);
3641
3642 /* If TEMP is a VOIDmode constant, use convert_modes to make
3643 sure that we properly convert it. */
3644 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3645 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3646 TYPE_MODE (TREE_TYPE (exp)), temp,
3647 SUBREG_PROMOTED_UNSIGNED_P (target));
3648
3649 convert_move (SUBREG_REG (target), temp,
3650 SUBREG_PROMOTED_UNSIGNED_P (target));
3651 return want_value ? temp : NULL_RTX;
3652 }
3653 else
3654 {
3655 temp = expand_expr (exp, target, GET_MODE (target), 0);
3656 /* Return TARGET if it's a specified hardware register.
3657 If TARGET is a volatile mem ref, either return TARGET
3658 or return a reg copied *from* TARGET; ANSI requires this.
3659
3660 Otherwise, if TEMP is not TARGET, return TEMP
3661 if it is constant (for efficiency),
3662 or if we really want the correct value. */
3663 if (!(target && GET_CODE (target) == REG
3664 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3665 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3666 && ! rtx_equal_p (temp, target)
3667 && (CONSTANT_P (temp) || want_value))
3668 dont_return_target = 1;
3669 }
3670
3671 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3672 the same as that of TARGET, adjust the constant. This is needed, for
3673 example, in case it is a CONST_DOUBLE and we want only a word-sized
3674 value. */
3675 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3676 && TREE_CODE (exp) != ERROR_MARK
3677 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3678 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3679 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3680
3681 if (current_function_check_memory_usage
3682 && GET_CODE (target) == MEM
3683 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3684 {
3685 if (GET_CODE (temp) == MEM)
3686 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3687 XEXP (target, 0), ptr_mode,
3688 XEXP (temp, 0), ptr_mode,
3689 expr_size (exp), TYPE_MODE (sizetype));
3690 else
3691 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3692 XEXP (target, 0), ptr_mode,
3693 expr_size (exp), TYPE_MODE (sizetype),
3694 GEN_INT (MEMORY_USE_WO),
3695 TYPE_MODE (integer_type_node));
3696 }
3697
3698 /* If value was not generated in the target, store it there.
3699 Convert the value to TARGET's type first if nec. */
3700 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3701 one or both of them are volatile memory refs, we have to distinguish
3702 two cases:
3703 - expand_expr has used TARGET. In this case, we must not generate
3704 another copy. This can be detected by TARGET being equal according
3705 to == .
3706 - expand_expr has not used TARGET - that means that the source just
3707 happens to have the same RTX form. Since temp will have been created
3708 by expand_expr, it will compare unequal according to == .
3709 We must generate a copy in this case, to reach the correct number
3710 of volatile memory references. */
3711
3712 if ((! rtx_equal_p (temp, target)
3713 || (temp != target && (side_effects_p (temp)
3714 || side_effects_p (target))))
3715 && TREE_CODE (exp) != ERROR_MARK)
3716 {
3717 target = protect_from_queue (target, 1);
3718 if (GET_MODE (temp) != GET_MODE (target)
3719 && GET_MODE (temp) != VOIDmode)
3720 {
3721 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3722 if (dont_return_target)
3723 {
3724 /* In this case, we will return TEMP,
3725 so make sure it has the proper mode.
3726 But don't forget to store the value into TARGET. */
3727 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3728 emit_move_insn (target, temp);
3729 }
3730 else
3731 convert_move (target, temp, unsignedp);
3732 }
3733
3734 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3735 {
3736 /* Handle copying a string constant into an array.
3737 The string constant may be shorter than the array.
3738 So copy just the string's actual length, and clear the rest. */
3739 rtx size;
3740 rtx addr;
3741
3742 /* Get the size of the data type of the string,
3743 which is actually the size of the target. */
3744 size = expr_size (exp);
3745 if (GET_CODE (size) == CONST_INT
3746 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3747 emit_block_move (target, temp, size,
3748 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3749 else
3750 {
3751 /* Compute the size of the data to copy from the string. */
3752 tree copy_size
3753 = size_binop (MIN_EXPR,
3754 make_tree (sizetype, size),
3755 convert (sizetype,
3756 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3757 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3758 VOIDmode, 0);
3759 rtx label = 0;
3760
3761 /* Copy that much. */
3762 emit_block_move (target, temp, copy_size_rtx,
3763 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3764
3765 /* Figure out how much is left in TARGET that we have to clear.
3766 Do all calculations in ptr_mode. */
3767
3768 addr = XEXP (target, 0);
3769 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3770
3771 if (GET_CODE (copy_size_rtx) == CONST_INT)
3772 {
3773 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3774 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3775 }
3776 else
3777 {
3778 addr = force_reg (ptr_mode, addr);
3779 addr = expand_binop (ptr_mode, add_optab, addr,
3780 copy_size_rtx, NULL_RTX, 0,
3781 OPTAB_LIB_WIDEN);
3782
3783 size = expand_binop (ptr_mode, sub_optab, size,
3784 copy_size_rtx, NULL_RTX, 0,
3785 OPTAB_LIB_WIDEN);
3786
3787 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3788 GET_MODE (size), 0, 0);
3789 label = gen_label_rtx ();
3790 emit_jump_insn (gen_blt (label));
3791 }
3792
3793 if (size != const0_rtx)
3794 {
3795 /* Be sure we can write on ADDR. */
3796 if (current_function_check_memory_usage)
3797 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3798 addr, ptr_mode,
3799 size, TYPE_MODE (sizetype),
3800 GEN_INT (MEMORY_USE_WO),
3801 TYPE_MODE (integer_type_node));
3802 #ifdef TARGET_MEM_FUNCTIONS
3803 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3804 addr, ptr_mode,
3805 const0_rtx, TYPE_MODE (integer_type_node),
3806 convert_to_mode (TYPE_MODE (sizetype),
3807 size,
3808 TREE_UNSIGNED (sizetype)),
3809 TYPE_MODE (sizetype));
3810 #else
3811 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3812 addr, ptr_mode,
3813 convert_to_mode (TYPE_MODE (integer_type_node),
3814 size,
3815 TREE_UNSIGNED (integer_type_node)),
3816 TYPE_MODE (integer_type_node));
3817 #endif
3818 }
3819
3820 if (label)
3821 emit_label (label);
3822 }
3823 }
3824 /* Handle calls that return values in multiple non-contiguous locations.
3825 The Irix 6 ABI has examples of this. */
3826 else if (GET_CODE (target) == PARALLEL)
3827 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3828 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3829 else if (GET_MODE (temp) == BLKmode)
3830 emit_block_move (target, temp, expr_size (exp),
3831 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3832 else
3833 emit_move_insn (target, temp);
3834 }
3835
3836 /* If we don't want a value, return NULL_RTX. */
3837 if (! want_value)
3838 return NULL_RTX;
3839
3840 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3841 ??? The latter test doesn't seem to make sense. */
3842 else if (dont_return_target && GET_CODE (temp) != MEM)
3843 return temp;
3844
3845 /* Return TARGET itself if it is a hard register. */
3846 else if (want_value && GET_MODE (target) != BLKmode
3847 && ! (GET_CODE (target) == REG
3848 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3849 return copy_to_reg (target);
3850
3851 else
3852 return target;
3853 }
3854 \f
3855 /* Return 1 if EXP just contains zeros. */
3856
3857 static int
3858 is_zeros_p (exp)
3859 tree exp;
3860 {
3861 tree elt;
3862
3863 switch (TREE_CODE (exp))
3864 {
3865 case CONVERT_EXPR:
3866 case NOP_EXPR:
3867 case NON_LVALUE_EXPR:
3868 return is_zeros_p (TREE_OPERAND (exp, 0));
3869
3870 case INTEGER_CST:
3871 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3872
3873 case COMPLEX_CST:
3874 return
3875 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3876
3877 case REAL_CST:
3878 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3879
3880 case CONSTRUCTOR:
3881 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3882 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3883 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3884 if (! is_zeros_p (TREE_VALUE (elt)))
3885 return 0;
3886
3887 return 1;
3888
3889 default:
3890 return 0;
3891 }
3892 }
3893
3894 /* Return 1 if EXP contains mostly (3/4) zeros. */
3895
3896 static int
3897 mostly_zeros_p (exp)
3898 tree exp;
3899 {
3900 if (TREE_CODE (exp) == CONSTRUCTOR)
3901 {
3902 int elts = 0, zeros = 0;
3903 tree elt = CONSTRUCTOR_ELTS (exp);
3904 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3905 {
3906 /* If there are no ranges of true bits, it is all zero. */
3907 return elt == NULL_TREE;
3908 }
3909 for (; elt; elt = TREE_CHAIN (elt))
3910 {
3911 /* We do not handle the case where the index is a RANGE_EXPR,
3912 so the statistic will be somewhat inaccurate.
3913 We do make a more accurate count in store_constructor itself,
3914 so since this function is only used for nested array elements,
3915 this should be close enough. */
3916 if (mostly_zeros_p (TREE_VALUE (elt)))
3917 zeros++;
3918 elts++;
3919 }
3920
3921 return 4 * zeros >= 3 * elts;
3922 }
3923
3924 return is_zeros_p (exp);
3925 }
3926 \f
3927 /* Helper function for store_constructor.
3928 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3929 TYPE is the type of the CONSTRUCTOR, not the element type.
3930 CLEARED is as for store_constructor.
3931
3932 This provides a recursive shortcut back to store_constructor when it isn't
3933 necessary to go through store_field. This is so that we can pass through
3934 the cleared field to let store_constructor know that we may not have to
3935 clear a substructure if the outer structure has already been cleared. */
3936
3937 static void
3938 store_constructor_field (target, bitsize, bitpos,
3939 mode, exp, type, cleared)
3940 rtx target;
3941 int bitsize, bitpos;
3942 enum machine_mode mode;
3943 tree exp, type;
3944 int cleared;
3945 {
3946 if (TREE_CODE (exp) == CONSTRUCTOR
3947 && bitpos % BITS_PER_UNIT == 0
3948 /* If we have a non-zero bitpos for a register target, then we just
3949 let store_field do the bitfield handling. This is unlikely to
3950 generate unnecessary clear instructions anyways. */
3951 && (bitpos == 0 || GET_CODE (target) == MEM))
3952 {
3953 if (bitpos != 0)
3954 target = change_address (target, VOIDmode,
3955 plus_constant (XEXP (target, 0),
3956 bitpos / BITS_PER_UNIT));
3957 store_constructor (exp, target, cleared);
3958 }
3959 else
3960 store_field (target, bitsize, bitpos, mode, exp,
3961 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3962 int_size_in_bytes (type), 0);
3963 }
3964
3965 /* Store the value of constructor EXP into the rtx TARGET.
3966 TARGET is either a REG or a MEM.
3967 CLEARED is true if TARGET is known to have been zero'd. */
3968
3969 static void
3970 store_constructor (exp, target, cleared)
3971 tree exp;
3972 rtx target;
3973 int cleared;
3974 {
3975 tree type = TREE_TYPE (exp);
3976 rtx exp_size = expr_size (exp);
3977
3978 /* We know our target cannot conflict, since safe_from_p has been called. */
3979 #if 0
3980 /* Don't try copying piece by piece into a hard register
3981 since that is vulnerable to being clobbered by EXP.
3982 Instead, construct in a pseudo register and then copy it all. */
3983 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3984 {
3985 rtx temp = gen_reg_rtx (GET_MODE (target));
3986 store_constructor (exp, temp, 0);
3987 emit_move_insn (target, temp);
3988 return;
3989 }
3990 #endif
3991
3992 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3993 || TREE_CODE (type) == QUAL_UNION_TYPE)
3994 {
3995 register tree elt;
3996
3997 /* Inform later passes that the whole union value is dead. */
3998 if (TREE_CODE (type) == UNION_TYPE
3999 || TREE_CODE (type) == QUAL_UNION_TYPE)
4000 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4001
4002 /* If we are building a static constructor into a register,
4003 set the initial value as zero so we can fold the value into
4004 a constant. But if more than one register is involved,
4005 this probably loses. */
4006 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4007 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4008 {
4009 if (! cleared)
4010 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4011
4012 cleared = 1;
4013 }
4014
4015 /* If the constructor has fewer fields than the structure
4016 or if we are initializing the structure to mostly zeros,
4017 clear the whole structure first. */
4018 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4019 != list_length (TYPE_FIELDS (type)))
4020 || mostly_zeros_p (exp))
4021 {
4022 if (! cleared)
4023 clear_storage (target, expr_size (exp),
4024 TYPE_ALIGN (type) / BITS_PER_UNIT);
4025
4026 cleared = 1;
4027 }
4028 else
4029 /* Inform later passes that the old value is dead. */
4030 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4031
4032 /* Store each element of the constructor into
4033 the corresponding field of TARGET. */
4034
4035 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4036 {
4037 register tree field = TREE_PURPOSE (elt);
4038 tree value = TREE_VALUE (elt);
4039 register enum machine_mode mode;
4040 int bitsize;
4041 int bitpos = 0;
4042 int unsignedp;
4043 tree pos, constant = 0, offset = 0;
4044 rtx to_rtx = target;
4045
4046 /* Just ignore missing fields.
4047 We cleared the whole structure, above,
4048 if any fields are missing. */
4049 if (field == 0)
4050 continue;
4051
4052 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4053 continue;
4054
4055 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4056 unsignedp = TREE_UNSIGNED (field);
4057 mode = DECL_MODE (field);
4058 if (DECL_BIT_FIELD (field))
4059 mode = VOIDmode;
4060
4061 pos = DECL_FIELD_BITPOS (field);
4062 if (TREE_CODE (pos) == INTEGER_CST)
4063 constant = pos;
4064 else if (TREE_CODE (pos) == PLUS_EXPR
4065 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4066 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4067 else
4068 offset = pos;
4069
4070 if (constant)
4071 bitpos = TREE_INT_CST_LOW (constant);
4072
4073 if (offset)
4074 {
4075 rtx offset_rtx;
4076
4077 if (contains_placeholder_p (offset))
4078 offset = build (WITH_RECORD_EXPR, sizetype,
4079 offset, make_tree (TREE_TYPE (exp), target));
4080
4081 offset = size_binop (FLOOR_DIV_EXPR, offset,
4082 size_int (BITS_PER_UNIT));
4083
4084 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4085 if (GET_CODE (to_rtx) != MEM)
4086 abort ();
4087
4088 if (GET_MODE (offset_rtx) != ptr_mode)
4089 {
4090 #ifdef POINTERS_EXTEND_UNSIGNED
4091 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4092 #else
4093 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4094 #endif
4095 }
4096
4097 to_rtx
4098 = change_address (to_rtx, VOIDmode,
4099 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4100 force_reg (ptr_mode, offset_rtx)));
4101 }
4102 if (TREE_READONLY (field))
4103 {
4104 if (GET_CODE (to_rtx) == MEM)
4105 to_rtx = copy_rtx (to_rtx);
4106
4107 RTX_UNCHANGING_P (to_rtx) = 1;
4108 }
4109
4110 #ifdef WORD_REGISTER_OPERATIONS
4111 /* If this initializes a field that is smaller than a word, at the
4112 start of a word, try to widen it to a full word.
4113 This special case allows us to output C++ member function
4114 initializations in a form that the optimizers can understand. */
4115 if (constant
4116 && GET_CODE (target) == REG
4117 && bitsize < BITS_PER_WORD
4118 && bitpos % BITS_PER_WORD == 0
4119 && GET_MODE_CLASS (mode) == MODE_INT
4120 && TREE_CODE (value) == INTEGER_CST
4121 && GET_CODE (exp_size) == CONST_INT
4122 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4123 {
4124 tree type = TREE_TYPE (value);
4125 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4126 {
4127 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4128 value = convert (type, value);
4129 }
4130 if (BYTES_BIG_ENDIAN)
4131 value
4132 = fold (build (LSHIFT_EXPR, type, value,
4133 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4134 bitsize = BITS_PER_WORD;
4135 mode = word_mode;
4136 }
4137 #endif
4138 store_constructor_field (to_rtx, bitsize, bitpos,
4139 mode, value, type, cleared);
4140 }
4141 }
4142 else if (TREE_CODE (type) == ARRAY_TYPE)
4143 {
4144 register tree elt;
4145 register int i;
4146 int need_to_clear;
4147 tree domain = TYPE_DOMAIN (type);
4148 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4149 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4150 tree elttype = TREE_TYPE (type);
4151
4152 /* If the constructor has fewer elements than the array,
4153 clear the whole array first. Similarly if this is
4154 static constructor of a non-BLKmode object. */
4155 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4156 need_to_clear = 1;
4157 else
4158 {
4159 HOST_WIDE_INT count = 0, zero_count = 0;
4160 need_to_clear = 0;
4161 /* This loop is a more accurate version of the loop in
4162 mostly_zeros_p (it handles RANGE_EXPR in an index).
4163 It is also needed to check for missing elements. */
4164 for (elt = CONSTRUCTOR_ELTS (exp);
4165 elt != NULL_TREE;
4166 elt = TREE_CHAIN (elt))
4167 {
4168 tree index = TREE_PURPOSE (elt);
4169 HOST_WIDE_INT this_node_count;
4170 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4171 {
4172 tree lo_index = TREE_OPERAND (index, 0);
4173 tree hi_index = TREE_OPERAND (index, 1);
4174 if (TREE_CODE (lo_index) != INTEGER_CST
4175 || TREE_CODE (hi_index) != INTEGER_CST)
4176 {
4177 need_to_clear = 1;
4178 break;
4179 }
4180 this_node_count = TREE_INT_CST_LOW (hi_index)
4181 - TREE_INT_CST_LOW (lo_index) + 1;
4182 }
4183 else
4184 this_node_count = 1;
4185 count += this_node_count;
4186 if (mostly_zeros_p (TREE_VALUE (elt)))
4187 zero_count += this_node_count;
4188 }
4189 /* Clear the entire array first if there are any missing elements,
4190 or if the incidence of zero elements is >= 75%. */
4191 if (count < maxelt - minelt + 1
4192 || 4 * zero_count >= 3 * count)
4193 need_to_clear = 1;
4194 }
4195 if (need_to_clear)
4196 {
4197 if (! cleared)
4198 clear_storage (target, expr_size (exp),
4199 TYPE_ALIGN (type) / BITS_PER_UNIT);
4200 cleared = 1;
4201 }
4202 else
4203 /* Inform later passes that the old value is dead. */
4204 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4205
4206 /* Store each element of the constructor into
4207 the corresponding element of TARGET, determined
4208 by counting the elements. */
4209 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4210 elt;
4211 elt = TREE_CHAIN (elt), i++)
4212 {
4213 register enum machine_mode mode;
4214 int bitsize;
4215 int bitpos;
4216 int unsignedp;
4217 tree value = TREE_VALUE (elt);
4218 tree index = TREE_PURPOSE (elt);
4219 rtx xtarget = target;
4220
4221 if (cleared && is_zeros_p (value))
4222 continue;
4223
4224 mode = TYPE_MODE (elttype);
4225 bitsize = GET_MODE_BITSIZE (mode);
4226 unsignedp = TREE_UNSIGNED (elttype);
4227
4228 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4229 {
4230 tree lo_index = TREE_OPERAND (index, 0);
4231 tree hi_index = TREE_OPERAND (index, 1);
4232 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4233 struct nesting *loop;
4234 HOST_WIDE_INT lo, hi, count;
4235 tree position;
4236
4237 /* If the range is constant and "small", unroll the loop. */
4238 if (TREE_CODE (lo_index) == INTEGER_CST
4239 && TREE_CODE (hi_index) == INTEGER_CST
4240 && (lo = TREE_INT_CST_LOW (lo_index),
4241 hi = TREE_INT_CST_LOW (hi_index),
4242 count = hi - lo + 1,
4243 (GET_CODE (target) != MEM
4244 || count <= 2
4245 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4246 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4247 <= 40 * 8))))
4248 {
4249 lo -= minelt; hi -= minelt;
4250 for (; lo <= hi; lo++)
4251 {
4252 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4253 store_constructor_field (target, bitsize, bitpos,
4254 mode, value, type, cleared);
4255 }
4256 }
4257 else
4258 {
4259 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4260 loop_top = gen_label_rtx ();
4261 loop_end = gen_label_rtx ();
4262
4263 unsignedp = TREE_UNSIGNED (domain);
4264
4265 index = build_decl (VAR_DECL, NULL_TREE, domain);
4266
4267 DECL_RTL (index) = index_r
4268 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4269 &unsignedp, 0));
4270
4271 if (TREE_CODE (value) == SAVE_EXPR
4272 && SAVE_EXPR_RTL (value) == 0)
4273 {
4274 /* Make sure value gets expanded once before the
4275 loop. */
4276 expand_expr (value, const0_rtx, VOIDmode, 0);
4277 emit_queue ();
4278 }
4279 store_expr (lo_index, index_r, 0);
4280 loop = expand_start_loop (0);
4281
4282 /* Assign value to element index. */
4283 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4284 size_int (BITS_PER_UNIT));
4285 position = size_binop (MULT_EXPR,
4286 size_binop (MINUS_EXPR, index,
4287 TYPE_MIN_VALUE (domain)),
4288 position);
4289 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4290 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4291 xtarget = change_address (target, mode, addr);
4292 if (TREE_CODE (value) == CONSTRUCTOR)
4293 store_constructor (value, xtarget, cleared);
4294 else
4295 store_expr (value, xtarget, 0);
4296
4297 expand_exit_loop_if_false (loop,
4298 build (LT_EXPR, integer_type_node,
4299 index, hi_index));
4300
4301 expand_increment (build (PREINCREMENT_EXPR,
4302 TREE_TYPE (index),
4303 index, integer_one_node), 0, 0);
4304 expand_end_loop ();
4305 emit_label (loop_end);
4306
4307 /* Needed by stupid register allocation. to extend the
4308 lifetime of pseudo-regs used by target past the end
4309 of the loop. */
4310 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4311 }
4312 }
4313 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4314 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4315 {
4316 rtx pos_rtx, addr;
4317 tree position;
4318
4319 if (index == 0)
4320 index = size_int (i);
4321
4322 if (minelt)
4323 index = size_binop (MINUS_EXPR, index,
4324 TYPE_MIN_VALUE (domain));
4325 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4326 size_int (BITS_PER_UNIT));
4327 position = size_binop (MULT_EXPR, index, position);
4328 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4329 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4330 xtarget = change_address (target, mode, addr);
4331 store_expr (value, xtarget, 0);
4332 }
4333 else
4334 {
4335 if (index != 0)
4336 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4337 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4338 else
4339 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4340 store_constructor_field (target, bitsize, bitpos,
4341 mode, value, type, cleared);
4342 }
4343 }
4344 }
4345 /* set constructor assignments */
4346 else if (TREE_CODE (type) == SET_TYPE)
4347 {
4348 tree elt = CONSTRUCTOR_ELTS (exp);
4349 int nbytes = int_size_in_bytes (type), nbits;
4350 tree domain = TYPE_DOMAIN (type);
4351 tree domain_min, domain_max, bitlength;
4352
4353 /* The default implementation strategy is to extract the constant
4354 parts of the constructor, use that to initialize the target,
4355 and then "or" in whatever non-constant ranges we need in addition.
4356
4357 If a large set is all zero or all ones, it is
4358 probably better to set it using memset (if available) or bzero.
4359 Also, if a large set has just a single range, it may also be
4360 better to first clear all the first clear the set (using
4361 bzero/memset), and set the bits we want. */
4362
4363 /* Check for all zeros. */
4364 if (elt == NULL_TREE)
4365 {
4366 if (!cleared)
4367 clear_storage (target, expr_size (exp),
4368 TYPE_ALIGN (type) / BITS_PER_UNIT);
4369 return;
4370 }
4371
4372 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4373 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4374 bitlength = size_binop (PLUS_EXPR,
4375 size_binop (MINUS_EXPR, domain_max, domain_min),
4376 size_one_node);
4377
4378 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4379 abort ();
4380 nbits = TREE_INT_CST_LOW (bitlength);
4381
4382 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4383 are "complicated" (more than one range), initialize (the
4384 constant parts) by copying from a constant. */
4385 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4386 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4387 {
4388 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4389 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4390 char *bit_buffer = (char *) alloca (nbits);
4391 HOST_WIDE_INT word = 0;
4392 int bit_pos = 0;
4393 int ibit = 0;
4394 int offset = 0; /* In bytes from beginning of set. */
4395 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4396 for (;;)
4397 {
4398 if (bit_buffer[ibit])
4399 {
4400 if (BYTES_BIG_ENDIAN)
4401 word |= (1 << (set_word_size - 1 - bit_pos));
4402 else
4403 word |= 1 << bit_pos;
4404 }
4405 bit_pos++; ibit++;
4406 if (bit_pos >= set_word_size || ibit == nbits)
4407 {
4408 if (word != 0 || ! cleared)
4409 {
4410 rtx datum = GEN_INT (word);
4411 rtx to_rtx;
4412 /* The assumption here is that it is safe to use
4413 XEXP if the set is multi-word, but not if
4414 it's single-word. */
4415 if (GET_CODE (target) == MEM)
4416 {
4417 to_rtx = plus_constant (XEXP (target, 0), offset);
4418 to_rtx = change_address (target, mode, to_rtx);
4419 }
4420 else if (offset == 0)
4421 to_rtx = target;
4422 else
4423 abort ();
4424 emit_move_insn (to_rtx, datum);
4425 }
4426 if (ibit == nbits)
4427 break;
4428 word = 0;
4429 bit_pos = 0;
4430 offset += set_word_size / BITS_PER_UNIT;
4431 }
4432 }
4433 }
4434 else if (!cleared)
4435 {
4436 /* Don't bother clearing storage if the set is all ones. */
4437 if (TREE_CHAIN (elt) != NULL_TREE
4438 || (TREE_PURPOSE (elt) == NULL_TREE
4439 ? nbits != 1
4440 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4441 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4442 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4443 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4444 != nbits))))
4445 clear_storage (target, expr_size (exp),
4446 TYPE_ALIGN (type) / BITS_PER_UNIT);
4447 }
4448
4449 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4450 {
4451 /* start of range of element or NULL */
4452 tree startbit = TREE_PURPOSE (elt);
4453 /* end of range of element, or element value */
4454 tree endbit = TREE_VALUE (elt);
4455 #ifdef TARGET_MEM_FUNCTIONS
4456 HOST_WIDE_INT startb, endb;
4457 #endif
4458 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4459
4460 bitlength_rtx = expand_expr (bitlength,
4461 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4462
4463 /* handle non-range tuple element like [ expr ] */
4464 if (startbit == NULL_TREE)
4465 {
4466 startbit = save_expr (endbit);
4467 endbit = startbit;
4468 }
4469 startbit = convert (sizetype, startbit);
4470 endbit = convert (sizetype, endbit);
4471 if (! integer_zerop (domain_min))
4472 {
4473 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4474 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4475 }
4476 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4477 EXPAND_CONST_ADDRESS);
4478 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4479 EXPAND_CONST_ADDRESS);
4480
4481 if (REG_P (target))
4482 {
4483 targetx = assign_stack_temp (GET_MODE (target),
4484 GET_MODE_SIZE (GET_MODE (target)),
4485 0);
4486 emit_move_insn (targetx, target);
4487 }
4488 else if (GET_CODE (target) == MEM)
4489 targetx = target;
4490 else
4491 abort ();
4492
4493 #ifdef TARGET_MEM_FUNCTIONS
4494 /* Optimization: If startbit and endbit are
4495 constants divisible by BITS_PER_UNIT,
4496 call memset instead. */
4497 if (TREE_CODE (startbit) == INTEGER_CST
4498 && TREE_CODE (endbit) == INTEGER_CST
4499 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4500 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4501 {
4502 emit_library_call (memset_libfunc, 0,
4503 VOIDmode, 3,
4504 plus_constant (XEXP (targetx, 0),
4505 startb / BITS_PER_UNIT),
4506 Pmode,
4507 constm1_rtx, TYPE_MODE (integer_type_node),
4508 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4509 TYPE_MODE (sizetype));
4510 }
4511 else
4512 #endif
4513 {
4514 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4515 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4516 bitlength_rtx, TYPE_MODE (sizetype),
4517 startbit_rtx, TYPE_MODE (sizetype),
4518 endbit_rtx, TYPE_MODE (sizetype));
4519 }
4520 if (REG_P (target))
4521 emit_move_insn (target, targetx);
4522 }
4523 }
4524
4525 else
4526 abort ();
4527 }
4528
4529 /* Store the value of EXP (an expression tree)
4530 into a subfield of TARGET which has mode MODE and occupies
4531 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4532 If MODE is VOIDmode, it means that we are storing into a bit-field.
4533
4534 If VALUE_MODE is VOIDmode, return nothing in particular.
4535 UNSIGNEDP is not used in this case.
4536
4537 Otherwise, return an rtx for the value stored. This rtx
4538 has mode VALUE_MODE if that is convenient to do.
4539 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4540
4541 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4542 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4543
4544 ALIAS_SET is the alias set for the destination. This value will
4545 (in general) be different from that for TARGET, since TARGET is a
4546 reference to the containing structure. */
4547
4548 static rtx
4549 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4550 unsignedp, align, total_size, alias_set)
4551 rtx target;
4552 int bitsize, bitpos;
4553 enum machine_mode mode;
4554 tree exp;
4555 enum machine_mode value_mode;
4556 int unsignedp;
4557 int align;
4558 int total_size;
4559 int alias_set;
4560 {
4561 HOST_WIDE_INT width_mask = 0;
4562
4563 if (TREE_CODE (exp) == ERROR_MARK)
4564 return const0_rtx;
4565
4566 if (bitsize < HOST_BITS_PER_WIDE_INT)
4567 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4568
4569 /* If we are storing into an unaligned field of an aligned union that is
4570 in a register, we may have the mode of TARGET being an integer mode but
4571 MODE == BLKmode. In that case, get an aligned object whose size and
4572 alignment are the same as TARGET and store TARGET into it (we can avoid
4573 the store if the field being stored is the entire width of TARGET). Then
4574 call ourselves recursively to store the field into a BLKmode version of
4575 that object. Finally, load from the object into TARGET. This is not
4576 very efficient in general, but should only be slightly more expensive
4577 than the otherwise-required unaligned accesses. Perhaps this can be
4578 cleaned up later. */
4579
4580 if (mode == BLKmode
4581 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4582 {
4583 rtx object = assign_stack_temp (GET_MODE (target),
4584 GET_MODE_SIZE (GET_MODE (target)), 0);
4585 rtx blk_object = copy_rtx (object);
4586
4587 MEM_SET_IN_STRUCT_P (object, 1);
4588 MEM_SET_IN_STRUCT_P (blk_object, 1);
4589 PUT_MODE (blk_object, BLKmode);
4590
4591 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4592 emit_move_insn (object, target);
4593
4594 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4595 align, total_size, alias_set);
4596
4597 /* Even though we aren't returning target, we need to
4598 give it the updated value. */
4599 emit_move_insn (target, object);
4600
4601 return blk_object;
4602 }
4603
4604 /* If the structure is in a register or if the component
4605 is a bit field, we cannot use addressing to access it.
4606 Use bit-field techniques or SUBREG to store in it. */
4607
4608 if (mode == VOIDmode
4609 || (mode != BLKmode && ! direct_store[(int) mode])
4610 || GET_CODE (target) == REG
4611 || GET_CODE (target) == SUBREG
4612 /* If the field isn't aligned enough to store as an ordinary memref,
4613 store it as a bit field. */
4614 || (SLOW_UNALIGNED_ACCESS
4615 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4616 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4617 {
4618 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4619
4620 /* If BITSIZE is narrower than the size of the type of EXP
4621 we will be narrowing TEMP. Normally, what's wanted are the
4622 low-order bits. However, if EXP's type is a record and this is
4623 big-endian machine, we want the upper BITSIZE bits. */
4624 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4625 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4626 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4627 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4628 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4629 - bitsize),
4630 temp, 1);
4631
4632 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4633 MODE. */
4634 if (mode != VOIDmode && mode != BLKmode
4635 && mode != TYPE_MODE (TREE_TYPE (exp)))
4636 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4637
4638 /* If the modes of TARGET and TEMP are both BLKmode, both
4639 must be in memory and BITPOS must be aligned on a byte
4640 boundary. If so, we simply do a block copy. */
4641 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4642 {
4643 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4644 || bitpos % BITS_PER_UNIT != 0)
4645 abort ();
4646
4647 target = change_address (target, VOIDmode,
4648 plus_constant (XEXP (target, 0),
4649 bitpos / BITS_PER_UNIT));
4650
4651 emit_block_move (target, temp,
4652 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4653 / BITS_PER_UNIT),
4654 1);
4655
4656 return value_mode == VOIDmode ? const0_rtx : target;
4657 }
4658
4659 /* Store the value in the bitfield. */
4660 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4661 if (value_mode != VOIDmode)
4662 {
4663 /* The caller wants an rtx for the value. */
4664 /* If possible, avoid refetching from the bitfield itself. */
4665 if (width_mask != 0
4666 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4667 {
4668 tree count;
4669 enum machine_mode tmode;
4670
4671 if (unsignedp)
4672 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4673 tmode = GET_MODE (temp);
4674 if (tmode == VOIDmode)
4675 tmode = value_mode;
4676 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4677 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4678 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4679 }
4680 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4681 NULL_RTX, value_mode, 0, align,
4682 total_size);
4683 }
4684 return const0_rtx;
4685 }
4686 else
4687 {
4688 rtx addr = XEXP (target, 0);
4689 rtx to_rtx;
4690
4691 /* If a value is wanted, it must be the lhs;
4692 so make the address stable for multiple use. */
4693
4694 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4695 && ! CONSTANT_ADDRESS_P (addr)
4696 /* A frame-pointer reference is already stable. */
4697 && ! (GET_CODE (addr) == PLUS
4698 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4699 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4700 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4701 addr = copy_to_reg (addr);
4702
4703 /* Now build a reference to just the desired component. */
4704
4705 to_rtx = copy_rtx (change_address (target, mode,
4706 plus_constant (addr,
4707 (bitpos
4708 / BITS_PER_UNIT))));
4709 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4710 MEM_ALIAS_SET (to_rtx) = alias_set;
4711
4712 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4713 }
4714 }
4715 \f
4716 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4717 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4718 ARRAY_REFs and find the ultimate containing object, which we return.
4719
4720 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4721 bit position, and *PUNSIGNEDP to the signedness of the field.
4722 If the position of the field is variable, we store a tree
4723 giving the variable offset (in units) in *POFFSET.
4724 This offset is in addition to the bit position.
4725 If the position is not variable, we store 0 in *POFFSET.
4726 We set *PALIGNMENT to the alignment in bytes of the address that will be
4727 computed. This is the alignment of the thing we return if *POFFSET
4728 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4729
4730 If any of the extraction expressions is volatile,
4731 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4732
4733 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4734 is a mode that can be used to access the field. In that case, *PBITSIZE
4735 is redundant.
4736
4737 If the field describes a variable-sized object, *PMODE is set to
4738 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4739 this case, but the address of the object can be found. */
4740
4741 tree
4742 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4743 punsignedp, pvolatilep, palignment)
4744 tree exp;
4745 int *pbitsize;
4746 int *pbitpos;
4747 tree *poffset;
4748 enum machine_mode *pmode;
4749 int *punsignedp;
4750 int *pvolatilep;
4751 int *palignment;
4752 {
4753 tree orig_exp = exp;
4754 tree size_tree = 0;
4755 enum machine_mode mode = VOIDmode;
4756 tree offset = integer_zero_node;
4757 unsigned int alignment = BIGGEST_ALIGNMENT;
4758
4759 if (TREE_CODE (exp) == COMPONENT_REF)
4760 {
4761 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4762 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4763 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4764 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4765 }
4766 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4767 {
4768 size_tree = TREE_OPERAND (exp, 1);
4769 *punsignedp = TREE_UNSIGNED (exp);
4770 }
4771 else
4772 {
4773 mode = TYPE_MODE (TREE_TYPE (exp));
4774 if (mode == BLKmode)
4775 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4776
4777 *pbitsize = GET_MODE_BITSIZE (mode);
4778 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4779 }
4780
4781 if (size_tree)
4782 {
4783 if (TREE_CODE (size_tree) != INTEGER_CST)
4784 mode = BLKmode, *pbitsize = -1;
4785 else
4786 *pbitsize = TREE_INT_CST_LOW (size_tree);
4787 }
4788
4789 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4790 and find the ultimate containing object. */
4791
4792 *pbitpos = 0;
4793
4794 while (1)
4795 {
4796 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4797 {
4798 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4799 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4800 : TREE_OPERAND (exp, 2));
4801 tree constant = integer_zero_node, var = pos;
4802
4803 /* If this field hasn't been filled in yet, don't go
4804 past it. This should only happen when folding expressions
4805 made during type construction. */
4806 if (pos == 0)
4807 break;
4808
4809 /* Assume here that the offset is a multiple of a unit.
4810 If not, there should be an explicitly added constant. */
4811 if (TREE_CODE (pos) == PLUS_EXPR
4812 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4813 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4814 else if (TREE_CODE (pos) == INTEGER_CST)
4815 constant = pos, var = integer_zero_node;
4816
4817 *pbitpos += TREE_INT_CST_LOW (constant);
4818 offset = size_binop (PLUS_EXPR, offset,
4819 size_binop (EXACT_DIV_EXPR, var,
4820 size_int (BITS_PER_UNIT)));
4821 }
4822
4823 else if (TREE_CODE (exp) == ARRAY_REF)
4824 {
4825 /* This code is based on the code in case ARRAY_REF in expand_expr
4826 below. We assume here that the size of an array element is
4827 always an integral multiple of BITS_PER_UNIT. */
4828
4829 tree index = TREE_OPERAND (exp, 1);
4830 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4831 tree low_bound
4832 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4833 tree index_type = TREE_TYPE (index);
4834 tree xindex;
4835
4836 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4837 {
4838 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4839 index);
4840 index_type = TREE_TYPE (index);
4841 }
4842
4843 /* Optimize the special-case of a zero lower bound.
4844
4845 We convert the low_bound to sizetype to avoid some problems
4846 with constant folding. (E.g. suppose the lower bound is 1,
4847 and its mode is QI. Without the conversion, (ARRAY
4848 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4849 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4850
4851 But sizetype isn't quite right either (especially if
4852 the lowbound is negative). FIXME */
4853
4854 if (! integer_zerop (low_bound))
4855 index = fold (build (MINUS_EXPR, index_type, index,
4856 convert (sizetype, low_bound)));
4857
4858 if (TREE_CODE (index) == INTEGER_CST)
4859 {
4860 index = convert (sbitsizetype, index);
4861 index_type = TREE_TYPE (index);
4862 }
4863
4864 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4865 convert (sbitsizetype,
4866 TYPE_SIZE (TREE_TYPE (exp)))));
4867
4868 if (TREE_CODE (xindex) == INTEGER_CST
4869 && TREE_INT_CST_HIGH (xindex) == 0)
4870 *pbitpos += TREE_INT_CST_LOW (xindex);
4871 else
4872 {
4873 /* Either the bit offset calculated above is not constant, or
4874 it overflowed. In either case, redo the multiplication
4875 against the size in units. This is especially important
4876 in the non-constant case to avoid a division at runtime. */
4877 xindex = fold (build (MULT_EXPR, ssizetype, index,
4878 convert (ssizetype,
4879 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4880
4881 if (contains_placeholder_p (xindex))
4882 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4883
4884 offset = size_binop (PLUS_EXPR, offset, xindex);
4885 }
4886 }
4887 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4888 && ! ((TREE_CODE (exp) == NOP_EXPR
4889 || TREE_CODE (exp) == CONVERT_EXPR)
4890 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4891 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4892 != UNION_TYPE))
4893 && (TYPE_MODE (TREE_TYPE (exp))
4894 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4895 break;
4896
4897 /* If any reference in the chain is volatile, the effect is volatile. */
4898 if (TREE_THIS_VOLATILE (exp))
4899 *pvolatilep = 1;
4900
4901 /* If the offset is non-constant already, then we can't assume any
4902 alignment more than the alignment here. */
4903 if (! integer_zerop (offset))
4904 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4905
4906 exp = TREE_OPERAND (exp, 0);
4907 }
4908
4909 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4910 alignment = MIN (alignment, DECL_ALIGN (exp));
4911 else if (TREE_TYPE (exp) != 0)
4912 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4913
4914 if (integer_zerop (offset))
4915 offset = 0;
4916
4917 if (offset != 0 && contains_placeholder_p (offset))
4918 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4919
4920 *pmode = mode;
4921 *poffset = offset;
4922 *palignment = alignment / BITS_PER_UNIT;
4923 return exp;
4924 }
4925
4926 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4927 static enum memory_use_mode
4928 get_memory_usage_from_modifier (modifier)
4929 enum expand_modifier modifier;
4930 {
4931 switch (modifier)
4932 {
4933 case EXPAND_NORMAL:
4934 case EXPAND_SUM:
4935 return MEMORY_USE_RO;
4936 break;
4937 case EXPAND_MEMORY_USE_WO:
4938 return MEMORY_USE_WO;
4939 break;
4940 case EXPAND_MEMORY_USE_RW:
4941 return MEMORY_USE_RW;
4942 break;
4943 case EXPAND_MEMORY_USE_DONT:
4944 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4945 MEMORY_USE_DONT, because they are modifiers to a call of
4946 expand_expr in the ADDR_EXPR case of expand_expr. */
4947 case EXPAND_CONST_ADDRESS:
4948 case EXPAND_INITIALIZER:
4949 return MEMORY_USE_DONT;
4950 case EXPAND_MEMORY_USE_BAD:
4951 default:
4952 abort ();
4953 }
4954 }
4955 \f
4956 /* Given an rtx VALUE that may contain additions and multiplications,
4957 return an equivalent value that just refers to a register or memory.
4958 This is done by generating instructions to perform the arithmetic
4959 and returning a pseudo-register containing the value.
4960
4961 The returned value may be a REG, SUBREG, MEM or constant. */
4962
4963 rtx
4964 force_operand (value, target)
4965 rtx value, target;
4966 {
4967 register optab binoptab = 0;
4968 /* Use a temporary to force order of execution of calls to
4969 `force_operand'. */
4970 rtx tmp;
4971 register rtx op2;
4972 /* Use subtarget as the target for operand 0 of a binary operation. */
4973 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4974
4975 /* Check for a PIC address load. */
4976 if (flag_pic
4977 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4978 && XEXP (value, 0) == pic_offset_table_rtx
4979 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4980 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4981 || GET_CODE (XEXP (value, 1)) == CONST))
4982 {
4983 if (!subtarget)
4984 subtarget = gen_reg_rtx (GET_MODE (value));
4985 emit_move_insn (subtarget, value);
4986 return subtarget;
4987 }
4988
4989 if (GET_CODE (value) == PLUS)
4990 binoptab = add_optab;
4991 else if (GET_CODE (value) == MINUS)
4992 binoptab = sub_optab;
4993 else if (GET_CODE (value) == MULT)
4994 {
4995 op2 = XEXP (value, 1);
4996 if (!CONSTANT_P (op2)
4997 && !(GET_CODE (op2) == REG && op2 != subtarget))
4998 subtarget = 0;
4999 tmp = force_operand (XEXP (value, 0), subtarget);
5000 return expand_mult (GET_MODE (value), tmp,
5001 force_operand (op2, NULL_RTX),
5002 target, 0);
5003 }
5004
5005 if (binoptab)
5006 {
5007 op2 = XEXP (value, 1);
5008 if (!CONSTANT_P (op2)
5009 && !(GET_CODE (op2) == REG && op2 != subtarget))
5010 subtarget = 0;
5011 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5012 {
5013 binoptab = add_optab;
5014 op2 = negate_rtx (GET_MODE (value), op2);
5015 }
5016
5017 /* Check for an addition with OP2 a constant integer and our first
5018 operand a PLUS of a virtual register and something else. In that
5019 case, we want to emit the sum of the virtual register and the
5020 constant first and then add the other value. This allows virtual
5021 register instantiation to simply modify the constant rather than
5022 creating another one around this addition. */
5023 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5024 && GET_CODE (XEXP (value, 0)) == PLUS
5025 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5026 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5027 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5028 {
5029 rtx temp = expand_binop (GET_MODE (value), binoptab,
5030 XEXP (XEXP (value, 0), 0), op2,
5031 subtarget, 0, OPTAB_LIB_WIDEN);
5032 return expand_binop (GET_MODE (value), binoptab, temp,
5033 force_operand (XEXP (XEXP (value, 0), 1), 0),
5034 target, 0, OPTAB_LIB_WIDEN);
5035 }
5036
5037 tmp = force_operand (XEXP (value, 0), subtarget);
5038 return expand_binop (GET_MODE (value), binoptab, tmp,
5039 force_operand (op2, NULL_RTX),
5040 target, 0, OPTAB_LIB_WIDEN);
5041 /* We give UNSIGNEDP = 0 to expand_binop
5042 because the only operations we are expanding here are signed ones. */
5043 }
5044 return value;
5045 }
5046 \f
5047 /* Subroutine of expand_expr:
5048 save the non-copied parts (LIST) of an expr (LHS), and return a list
5049 which can restore these values to their previous values,
5050 should something modify their storage. */
5051
5052 static tree
5053 save_noncopied_parts (lhs, list)
5054 tree lhs;
5055 tree list;
5056 {
5057 tree tail;
5058 tree parts = 0;
5059
5060 for (tail = list; tail; tail = TREE_CHAIN (tail))
5061 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5062 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5063 else
5064 {
5065 tree part = TREE_VALUE (tail);
5066 tree part_type = TREE_TYPE (part);
5067 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5068 rtx target = assign_temp (part_type, 0, 1, 1);
5069 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5070 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5071 parts = tree_cons (to_be_saved,
5072 build (RTL_EXPR, part_type, NULL_TREE,
5073 (tree) target),
5074 parts);
5075 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5076 }
5077 return parts;
5078 }
5079
5080 /* Subroutine of expand_expr:
5081 record the non-copied parts (LIST) of an expr (LHS), and return a list
5082 which specifies the initial values of these parts. */
5083
5084 static tree
5085 init_noncopied_parts (lhs, list)
5086 tree lhs;
5087 tree list;
5088 {
5089 tree tail;
5090 tree parts = 0;
5091
5092 for (tail = list; tail; tail = TREE_CHAIN (tail))
5093 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5094 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5095 else
5096 {
5097 tree part = TREE_VALUE (tail);
5098 tree part_type = TREE_TYPE (part);
5099 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5100 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5101 }
5102 return parts;
5103 }
5104
5105 /* Subroutine of expand_expr: return nonzero iff there is no way that
5106 EXP can reference X, which is being modified. TOP_P is nonzero if this
5107 call is going to be used to determine whether we need a temporary
5108 for EXP, as opposed to a recursive call to this function.
5109
5110 It is always safe for this routine to return zero since it merely
5111 searches for optimization opportunities. */
5112
5113 static int
5114 safe_from_p (x, exp, top_p)
5115 rtx x;
5116 tree exp;
5117 int top_p;
5118 {
5119 rtx exp_rtl = 0;
5120 int i, nops;
5121 static int save_expr_count;
5122 static int save_expr_size = 0;
5123 static tree *save_expr_rewritten;
5124 static tree save_expr_trees[256];
5125
5126 if (x == 0
5127 /* If EXP has varying size, we MUST use a target since we currently
5128 have no way of allocating temporaries of variable size
5129 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5130 So we assume here that something at a higher level has prevented a
5131 clash. This is somewhat bogus, but the best we can do. Only
5132 do this when X is BLKmode and when we are at the top level. */
5133 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5134 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5135 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5136 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5137 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5138 != INTEGER_CST)
5139 && GET_MODE (x) == BLKmode))
5140 return 1;
5141
5142 if (top_p && save_expr_size == 0)
5143 {
5144 int rtn;
5145
5146 save_expr_count = 0;
5147 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5148 save_expr_rewritten = &save_expr_trees[0];
5149
5150 rtn = safe_from_p (x, exp, 1);
5151
5152 for (i = 0; i < save_expr_count; ++i)
5153 {
5154 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5155 abort ();
5156 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5157 }
5158
5159 save_expr_size = 0;
5160
5161 return rtn;
5162 }
5163
5164 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5165 find the underlying pseudo. */
5166 if (GET_CODE (x) == SUBREG)
5167 {
5168 x = SUBREG_REG (x);
5169 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5170 return 0;
5171 }
5172
5173 /* If X is a location in the outgoing argument area, it is always safe. */
5174 if (GET_CODE (x) == MEM
5175 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5176 || (GET_CODE (XEXP (x, 0)) == PLUS
5177 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5178 return 1;
5179
5180 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5181 {
5182 case 'd':
5183 exp_rtl = DECL_RTL (exp);
5184 break;
5185
5186 case 'c':
5187 return 1;
5188
5189 case 'x':
5190 if (TREE_CODE (exp) == TREE_LIST)
5191 return ((TREE_VALUE (exp) == 0
5192 || safe_from_p (x, TREE_VALUE (exp), 0))
5193 && (TREE_CHAIN (exp) == 0
5194 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5195 else if (TREE_CODE (exp) == ERROR_MARK)
5196 return 1; /* An already-visited SAVE_EXPR? */
5197 else
5198 return 0;
5199
5200 case '1':
5201 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5202
5203 case '2':
5204 case '<':
5205 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5206 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5207
5208 case 'e':
5209 case 'r':
5210 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5211 the expression. If it is set, we conflict iff we are that rtx or
5212 both are in memory. Otherwise, we check all operands of the
5213 expression recursively. */
5214
5215 switch (TREE_CODE (exp))
5216 {
5217 case ADDR_EXPR:
5218 return (staticp (TREE_OPERAND (exp, 0))
5219 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5220 || TREE_STATIC (exp));
5221
5222 case INDIRECT_REF:
5223 if (GET_CODE (x) == MEM)
5224 return 0;
5225 break;
5226
5227 case CALL_EXPR:
5228 exp_rtl = CALL_EXPR_RTL (exp);
5229 if (exp_rtl == 0)
5230 {
5231 /* Assume that the call will clobber all hard registers and
5232 all of memory. */
5233 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5234 || GET_CODE (x) == MEM)
5235 return 0;
5236 }
5237
5238 break;
5239
5240 case RTL_EXPR:
5241 /* If a sequence exists, we would have to scan every instruction
5242 in the sequence to see if it was safe. This is probably not
5243 worthwhile. */
5244 if (RTL_EXPR_SEQUENCE (exp))
5245 return 0;
5246
5247 exp_rtl = RTL_EXPR_RTL (exp);
5248 break;
5249
5250 case WITH_CLEANUP_EXPR:
5251 exp_rtl = RTL_EXPR_RTL (exp);
5252 break;
5253
5254 case CLEANUP_POINT_EXPR:
5255 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5256
5257 case SAVE_EXPR:
5258 exp_rtl = SAVE_EXPR_RTL (exp);
5259 if (exp_rtl)
5260 break;
5261
5262 /* This SAVE_EXPR might appear many times in the top-level
5263 safe_from_p() expression, and if it has a complex
5264 subexpression, examining it multiple times could result
5265 in a combinatorial explosion. E.g. on an Alpha
5266 running at least 200MHz, a Fortran test case compiled with
5267 optimization took about 28 minutes to compile -- even though
5268 it was only a few lines long, and the complicated line causing
5269 so much time to be spent in the earlier version of safe_from_p()
5270 had only 293 or so unique nodes.
5271
5272 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5273 where it is so we can turn it back in the top-level safe_from_p()
5274 when we're done. */
5275
5276 /* For now, don't bother re-sizing the array. */
5277 if (save_expr_count >= save_expr_size)
5278 return 0;
5279 save_expr_rewritten[save_expr_count++] = exp;
5280
5281 nops = tree_code_length[(int) SAVE_EXPR];
5282 for (i = 0; i < nops; i++)
5283 {
5284 tree operand = TREE_OPERAND (exp, i);
5285 if (operand == NULL_TREE)
5286 continue;
5287 TREE_SET_CODE (exp, ERROR_MARK);
5288 if (!safe_from_p (x, operand, 0))
5289 return 0;
5290 TREE_SET_CODE (exp, SAVE_EXPR);
5291 }
5292 TREE_SET_CODE (exp, ERROR_MARK);
5293 return 1;
5294
5295 case BIND_EXPR:
5296 /* The only operand we look at is operand 1. The rest aren't
5297 part of the expression. */
5298 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5299
5300 case METHOD_CALL_EXPR:
5301 /* This takes a rtx argument, but shouldn't appear here. */
5302 abort ();
5303
5304 default:
5305 break;
5306 }
5307
5308 /* If we have an rtx, we do not need to scan our operands. */
5309 if (exp_rtl)
5310 break;
5311
5312 nops = tree_code_length[(int) TREE_CODE (exp)];
5313 for (i = 0; i < nops; i++)
5314 if (TREE_OPERAND (exp, i) != 0
5315 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5316 return 0;
5317 }
5318
5319 /* If we have an rtl, find any enclosed object. Then see if we conflict
5320 with it. */
5321 if (exp_rtl)
5322 {
5323 if (GET_CODE (exp_rtl) == SUBREG)
5324 {
5325 exp_rtl = SUBREG_REG (exp_rtl);
5326 if (GET_CODE (exp_rtl) == REG
5327 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5328 return 0;
5329 }
5330
5331 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5332 are memory and EXP is not readonly. */
5333 return ! (rtx_equal_p (x, exp_rtl)
5334 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5335 && ! TREE_READONLY (exp)));
5336 }
5337
5338 /* If we reach here, it is safe. */
5339 return 1;
5340 }
5341
5342 /* Subroutine of expand_expr: return nonzero iff EXP is an
5343 expression whose type is statically determinable. */
5344
5345 static int
5346 fixed_type_p (exp)
5347 tree exp;
5348 {
5349 if (TREE_CODE (exp) == PARM_DECL
5350 || TREE_CODE (exp) == VAR_DECL
5351 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5352 || TREE_CODE (exp) == COMPONENT_REF
5353 || TREE_CODE (exp) == ARRAY_REF)
5354 return 1;
5355 return 0;
5356 }
5357
5358 /* Subroutine of expand_expr: return rtx if EXP is a
5359 variable or parameter; else return 0. */
5360
5361 static rtx
5362 var_rtx (exp)
5363 tree exp;
5364 {
5365 STRIP_NOPS (exp);
5366 switch (TREE_CODE (exp))
5367 {
5368 case PARM_DECL:
5369 case VAR_DECL:
5370 return DECL_RTL (exp);
5371 default:
5372 return 0;
5373 }
5374 }
5375
5376 #ifdef MAX_INTEGER_COMPUTATION_MODE
5377 void
5378 check_max_integer_computation_mode (exp)
5379 tree exp;
5380 {
5381 enum tree_code code = TREE_CODE (exp);
5382 enum machine_mode mode;
5383
5384 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5385 if (code == NOP_EXPR
5386 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5387 return;
5388
5389 /* First check the type of the overall operation. We need only look at
5390 unary, binary and relational operations. */
5391 if (TREE_CODE_CLASS (code) == '1'
5392 || TREE_CODE_CLASS (code) == '2'
5393 || TREE_CODE_CLASS (code) == '<')
5394 {
5395 mode = TYPE_MODE (TREE_TYPE (exp));
5396 if (GET_MODE_CLASS (mode) == MODE_INT
5397 && mode > MAX_INTEGER_COMPUTATION_MODE)
5398 fatal ("unsupported wide integer operation");
5399 }
5400
5401 /* Check operand of a unary op. */
5402 if (TREE_CODE_CLASS (code) == '1')
5403 {
5404 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5405 if (GET_MODE_CLASS (mode) == MODE_INT
5406 && mode > MAX_INTEGER_COMPUTATION_MODE)
5407 fatal ("unsupported wide integer operation");
5408 }
5409
5410 /* Check operands of a binary/comparison op. */
5411 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5412 {
5413 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5414 if (GET_MODE_CLASS (mode) == MODE_INT
5415 && mode > MAX_INTEGER_COMPUTATION_MODE)
5416 fatal ("unsupported wide integer operation");
5417
5418 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5419 if (GET_MODE_CLASS (mode) == MODE_INT
5420 && mode > MAX_INTEGER_COMPUTATION_MODE)
5421 fatal ("unsupported wide integer operation");
5422 }
5423 }
5424 #endif
5425
5426 \f
5427 /* expand_expr: generate code for computing expression EXP.
5428 An rtx for the computed value is returned. The value is never null.
5429 In the case of a void EXP, const0_rtx is returned.
5430
5431 The value may be stored in TARGET if TARGET is nonzero.
5432 TARGET is just a suggestion; callers must assume that
5433 the rtx returned may not be the same as TARGET.
5434
5435 If TARGET is CONST0_RTX, it means that the value will be ignored.
5436
5437 If TMODE is not VOIDmode, it suggests generating the
5438 result in mode TMODE. But this is done only when convenient.
5439 Otherwise, TMODE is ignored and the value generated in its natural mode.
5440 TMODE is just a suggestion; callers must assume that
5441 the rtx returned may not have mode TMODE.
5442
5443 Note that TARGET may have neither TMODE nor MODE. In that case, it
5444 probably will not be used.
5445
5446 If MODIFIER is EXPAND_SUM then when EXP is an addition
5447 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5448 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5449 products as above, or REG or MEM, or constant.
5450 Ordinarily in such cases we would output mul or add instructions
5451 and then return a pseudo reg containing the sum.
5452
5453 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5454 it also marks a label as absolutely required (it can't be dead).
5455 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5456 This is used for outputting expressions used in initializers.
5457
5458 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5459 with a constant address even if that address is not normally legitimate.
5460 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5461
5462 rtx
5463 expand_expr (exp, target, tmode, modifier)
5464 register tree exp;
5465 rtx target;
5466 enum machine_mode tmode;
5467 enum expand_modifier modifier;
5468 {
5469 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5470 This is static so it will be accessible to our recursive callees. */
5471 static tree placeholder_list = 0;
5472 register rtx op0, op1, temp;
5473 tree type = TREE_TYPE (exp);
5474 int unsignedp = TREE_UNSIGNED (type);
5475 register enum machine_mode mode = TYPE_MODE (type);
5476 register enum tree_code code = TREE_CODE (exp);
5477 optab this_optab;
5478 /* Use subtarget as the target for operand 0 of a binary operation. */
5479 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5480 rtx original_target = target;
5481 int ignore = (target == const0_rtx
5482 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5483 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5484 || code == COND_EXPR)
5485 && TREE_CODE (type) == VOID_TYPE));
5486 tree context;
5487 /* Used by check-memory-usage to make modifier read only. */
5488 enum expand_modifier ro_modifier;
5489
5490 /* Make a read-only version of the modifier. */
5491 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5492 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5493 ro_modifier = modifier;
5494 else
5495 ro_modifier = EXPAND_NORMAL;
5496
5497 /* Don't use hard regs as subtargets, because the combiner
5498 can only handle pseudo regs. */
5499 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5500 subtarget = 0;
5501 /* Avoid subtargets inside loops,
5502 since they hide some invariant expressions. */
5503 if (preserve_subexpressions_p ())
5504 subtarget = 0;
5505
5506 /* If we are going to ignore this result, we need only do something
5507 if there is a side-effect somewhere in the expression. If there
5508 is, short-circuit the most common cases here. Note that we must
5509 not call expand_expr with anything but const0_rtx in case this
5510 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5511
5512 if (ignore)
5513 {
5514 if (! TREE_SIDE_EFFECTS (exp))
5515 return const0_rtx;
5516
5517 /* Ensure we reference a volatile object even if value is ignored. */
5518 if (TREE_THIS_VOLATILE (exp)
5519 && TREE_CODE (exp) != FUNCTION_DECL
5520 && mode != VOIDmode && mode != BLKmode)
5521 {
5522 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5523 if (GET_CODE (temp) == MEM)
5524 temp = copy_to_reg (temp);
5525 return const0_rtx;
5526 }
5527
5528 if (TREE_CODE_CLASS (code) == '1')
5529 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5530 VOIDmode, ro_modifier);
5531 else if (TREE_CODE_CLASS (code) == '2'
5532 || TREE_CODE_CLASS (code) == '<')
5533 {
5534 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5535 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5536 return const0_rtx;
5537 }
5538 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5539 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5540 /* If the second operand has no side effects, just evaluate
5541 the first. */
5542 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5543 VOIDmode, ro_modifier);
5544
5545 target = 0;
5546 }
5547
5548 #ifdef MAX_INTEGER_COMPUTATION_MODE
5549 if (target
5550 && TREE_CODE (exp) != INTEGER_CST
5551 && TREE_CODE (exp) != PARM_DECL
5552 && TREE_CODE (exp) != ARRAY_REF
5553 && TREE_CODE (exp) != COMPONENT_REF
5554 && TREE_CODE (exp) != BIT_FIELD_REF
5555 && TREE_CODE (exp) != INDIRECT_REF
5556 && TREE_CODE (exp) != VAR_DECL)
5557 {
5558 enum machine_mode mode = GET_MODE (target);
5559
5560 if (GET_MODE_CLASS (mode) == MODE_INT
5561 && mode > MAX_INTEGER_COMPUTATION_MODE)
5562 fatal ("unsupported wide integer operation");
5563 }
5564
5565 if (TREE_CODE (exp) != INTEGER_CST
5566 && TREE_CODE (exp) != PARM_DECL
5567 && TREE_CODE (exp) != ARRAY_REF
5568 && TREE_CODE (exp) != COMPONENT_REF
5569 && TREE_CODE (exp) != BIT_FIELD_REF
5570 && TREE_CODE (exp) != INDIRECT_REF
5571 && TREE_CODE (exp) != VAR_DECL
5572 && GET_MODE_CLASS (tmode) == MODE_INT
5573 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5574 fatal ("unsupported wide integer operation");
5575
5576 check_max_integer_computation_mode (exp);
5577 #endif
5578
5579 /* If will do cse, generate all results into pseudo registers
5580 since 1) that allows cse to find more things
5581 and 2) otherwise cse could produce an insn the machine
5582 cannot support. */
5583
5584 if (! cse_not_expected && mode != BLKmode && target
5585 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5586 target = subtarget;
5587
5588 switch (code)
5589 {
5590 case LABEL_DECL:
5591 {
5592 tree function = decl_function_context (exp);
5593 /* Handle using a label in a containing function. */
5594 if (function != current_function_decl
5595 && function != inline_function_decl && function != 0)
5596 {
5597 struct function *p = find_function_data (function);
5598 /* Allocate in the memory associated with the function
5599 that the label is in. */
5600 push_obstacks (p->function_obstack,
5601 p->function_maybepermanent_obstack);
5602
5603 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5604 label_rtx (exp),
5605 p->forced_labels);
5606 p->addresses_labels = 1;
5607 pop_obstacks ();
5608 }
5609 else
5610 {
5611 current_function_addresses_labels = 1;
5612 if (modifier == EXPAND_INITIALIZER)
5613 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5614 label_rtx (exp),
5615 forced_labels);
5616 }
5617 temp = gen_rtx_MEM (FUNCTION_MODE,
5618 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5619 if (function != current_function_decl
5620 && function != inline_function_decl && function != 0)
5621 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5622 return temp;
5623 }
5624
5625 case PARM_DECL:
5626 if (DECL_RTL (exp) == 0)
5627 {
5628 error_with_decl (exp, "prior parameter's size depends on `%s'");
5629 return CONST0_RTX (mode);
5630 }
5631
5632 /* ... fall through ... */
5633
5634 case VAR_DECL:
5635 /* If a static var's type was incomplete when the decl was written,
5636 but the type is complete now, lay out the decl now. */
5637 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5638 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5639 {
5640 push_obstacks_nochange ();
5641 end_temporary_allocation ();
5642 layout_decl (exp, 0);
5643 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5644 pop_obstacks ();
5645 }
5646
5647 /* Although static-storage variables start off initialized, according to
5648 ANSI C, a memcpy could overwrite them with uninitialized values. So
5649 we check them too. This also lets us check for read-only variables
5650 accessed via a non-const declaration, in case it won't be detected
5651 any other way (e.g., in an embedded system or OS kernel without
5652 memory protection).
5653
5654 Aggregates are not checked here; they're handled elsewhere. */
5655 if (current_function_check_memory_usage && code == VAR_DECL
5656 && GET_CODE (DECL_RTL (exp)) == MEM
5657 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5658 {
5659 enum memory_use_mode memory_usage;
5660 memory_usage = get_memory_usage_from_modifier (modifier);
5661
5662 if (memory_usage != MEMORY_USE_DONT)
5663 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5664 XEXP (DECL_RTL (exp), 0), ptr_mode,
5665 GEN_INT (int_size_in_bytes (type)),
5666 TYPE_MODE (sizetype),
5667 GEN_INT (memory_usage),
5668 TYPE_MODE (integer_type_node));
5669 }
5670
5671 /* ... fall through ... */
5672
5673 case FUNCTION_DECL:
5674 case RESULT_DECL:
5675 if (DECL_RTL (exp) == 0)
5676 abort ();
5677
5678 /* Ensure variable marked as used even if it doesn't go through
5679 a parser. If it hasn't be used yet, write out an external
5680 definition. */
5681 if (! TREE_USED (exp))
5682 {
5683 assemble_external (exp);
5684 TREE_USED (exp) = 1;
5685 }
5686
5687 /* Show we haven't gotten RTL for this yet. */
5688 temp = 0;
5689
5690 /* Handle variables inherited from containing functions. */
5691 context = decl_function_context (exp);
5692
5693 /* We treat inline_function_decl as an alias for the current function
5694 because that is the inline function whose vars, types, etc.
5695 are being merged into the current function.
5696 See expand_inline_function. */
5697
5698 if (context != 0 && context != current_function_decl
5699 && context != inline_function_decl
5700 /* If var is static, we don't need a static chain to access it. */
5701 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5702 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5703 {
5704 rtx addr;
5705
5706 /* Mark as non-local and addressable. */
5707 DECL_NONLOCAL (exp) = 1;
5708 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5709 abort ();
5710 mark_addressable (exp);
5711 if (GET_CODE (DECL_RTL (exp)) != MEM)
5712 abort ();
5713 addr = XEXP (DECL_RTL (exp), 0);
5714 if (GET_CODE (addr) == MEM)
5715 addr = gen_rtx_MEM (Pmode,
5716 fix_lexical_addr (XEXP (addr, 0), exp));
5717 else
5718 addr = fix_lexical_addr (addr, exp);
5719 temp = change_address (DECL_RTL (exp), mode, addr);
5720 }
5721
5722 /* This is the case of an array whose size is to be determined
5723 from its initializer, while the initializer is still being parsed.
5724 See expand_decl. */
5725
5726 else if (GET_CODE (DECL_RTL (exp)) == MEM
5727 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5728 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5729 XEXP (DECL_RTL (exp), 0));
5730
5731 /* If DECL_RTL is memory, we are in the normal case and either
5732 the address is not valid or it is not a register and -fforce-addr
5733 is specified, get the address into a register. */
5734
5735 else if (GET_CODE (DECL_RTL (exp)) == MEM
5736 && modifier != EXPAND_CONST_ADDRESS
5737 && modifier != EXPAND_SUM
5738 && modifier != EXPAND_INITIALIZER
5739 && (! memory_address_p (DECL_MODE (exp),
5740 XEXP (DECL_RTL (exp), 0))
5741 || (flag_force_addr
5742 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5743 temp = change_address (DECL_RTL (exp), VOIDmode,
5744 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5745
5746 /* If we got something, return it. But first, set the alignment
5747 the address is a register. */
5748 if (temp != 0)
5749 {
5750 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5751 mark_reg_pointer (XEXP (temp, 0),
5752 DECL_ALIGN (exp) / BITS_PER_UNIT);
5753
5754 return temp;
5755 }
5756
5757 /* If the mode of DECL_RTL does not match that of the decl, it
5758 must be a promoted value. We return a SUBREG of the wanted mode,
5759 but mark it so that we know that it was already extended. */
5760
5761 if (GET_CODE (DECL_RTL (exp)) == REG
5762 && GET_MODE (DECL_RTL (exp)) != mode)
5763 {
5764 /* Get the signedness used for this variable. Ensure we get the
5765 same mode we got when the variable was declared. */
5766 if (GET_MODE (DECL_RTL (exp))
5767 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5768 abort ();
5769
5770 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5771 SUBREG_PROMOTED_VAR_P (temp) = 1;
5772 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5773 return temp;
5774 }
5775
5776 return DECL_RTL (exp);
5777
5778 case INTEGER_CST:
5779 return immed_double_const (TREE_INT_CST_LOW (exp),
5780 TREE_INT_CST_HIGH (exp),
5781 mode);
5782
5783 case CONST_DECL:
5784 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5785 EXPAND_MEMORY_USE_BAD);
5786
5787 case REAL_CST:
5788 /* If optimized, generate immediate CONST_DOUBLE
5789 which will be turned into memory by reload if necessary.
5790
5791 We used to force a register so that loop.c could see it. But
5792 this does not allow gen_* patterns to perform optimizations with
5793 the constants. It also produces two insns in cases like "x = 1.0;".
5794 On most machines, floating-point constants are not permitted in
5795 many insns, so we'd end up copying it to a register in any case.
5796
5797 Now, we do the copying in expand_binop, if appropriate. */
5798 return immed_real_const (exp);
5799
5800 case COMPLEX_CST:
5801 case STRING_CST:
5802 if (! TREE_CST_RTL (exp))
5803 output_constant_def (exp);
5804
5805 /* TREE_CST_RTL probably contains a constant address.
5806 On RISC machines where a constant address isn't valid,
5807 make some insns to get that address into a register. */
5808 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5809 && modifier != EXPAND_CONST_ADDRESS
5810 && modifier != EXPAND_INITIALIZER
5811 && modifier != EXPAND_SUM
5812 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5813 || (flag_force_addr
5814 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5815 return change_address (TREE_CST_RTL (exp), VOIDmode,
5816 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5817 return TREE_CST_RTL (exp);
5818
5819 case EXPR_WITH_FILE_LOCATION:
5820 {
5821 rtx to_return;
5822 char *saved_input_filename = input_filename;
5823 int saved_lineno = lineno;
5824 input_filename = EXPR_WFL_FILENAME (exp);
5825 lineno = EXPR_WFL_LINENO (exp);
5826 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5827 emit_line_note (input_filename, lineno);
5828 /* Possibly avoid switching back and force here */
5829 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5830 input_filename = saved_input_filename;
5831 lineno = saved_lineno;
5832 return to_return;
5833 }
5834
5835 case SAVE_EXPR:
5836 context = decl_function_context (exp);
5837
5838 /* If this SAVE_EXPR was at global context, assume we are an
5839 initialization function and move it into our context. */
5840 if (context == 0)
5841 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5842
5843 /* We treat inline_function_decl as an alias for the current function
5844 because that is the inline function whose vars, types, etc.
5845 are being merged into the current function.
5846 See expand_inline_function. */
5847 if (context == current_function_decl || context == inline_function_decl)
5848 context = 0;
5849
5850 /* If this is non-local, handle it. */
5851 if (context)
5852 {
5853 /* The following call just exists to abort if the context is
5854 not of a containing function. */
5855 find_function_data (context);
5856
5857 temp = SAVE_EXPR_RTL (exp);
5858 if (temp && GET_CODE (temp) == REG)
5859 {
5860 put_var_into_stack (exp);
5861 temp = SAVE_EXPR_RTL (exp);
5862 }
5863 if (temp == 0 || GET_CODE (temp) != MEM)
5864 abort ();
5865 return change_address (temp, mode,
5866 fix_lexical_addr (XEXP (temp, 0), exp));
5867 }
5868 if (SAVE_EXPR_RTL (exp) == 0)
5869 {
5870 if (mode == VOIDmode)
5871 temp = const0_rtx;
5872 else
5873 temp = assign_temp (type, 3, 0, 0);
5874
5875 SAVE_EXPR_RTL (exp) = temp;
5876 if (!optimize && GET_CODE (temp) == REG)
5877 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5878 save_expr_regs);
5879
5880 /* If the mode of TEMP does not match that of the expression, it
5881 must be a promoted value. We pass store_expr a SUBREG of the
5882 wanted mode but mark it so that we know that it was already
5883 extended. Note that `unsignedp' was modified above in
5884 this case. */
5885
5886 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5887 {
5888 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5889 SUBREG_PROMOTED_VAR_P (temp) = 1;
5890 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5891 }
5892
5893 if (temp == const0_rtx)
5894 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5895 EXPAND_MEMORY_USE_BAD);
5896 else
5897 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5898
5899 TREE_USED (exp) = 1;
5900 }
5901
5902 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5903 must be a promoted value. We return a SUBREG of the wanted mode,
5904 but mark it so that we know that it was already extended. */
5905
5906 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5907 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5908 {
5909 /* Compute the signedness and make the proper SUBREG. */
5910 promote_mode (type, mode, &unsignedp, 0);
5911 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5912 SUBREG_PROMOTED_VAR_P (temp) = 1;
5913 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5914 return temp;
5915 }
5916
5917 return SAVE_EXPR_RTL (exp);
5918
5919 case UNSAVE_EXPR:
5920 {
5921 rtx temp;
5922 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5923 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5924 return temp;
5925 }
5926
5927 case PLACEHOLDER_EXPR:
5928 {
5929 tree placeholder_expr;
5930
5931 /* If there is an object on the head of the placeholder list,
5932 see if some object in it of type TYPE or a pointer to it. For
5933 further information, see tree.def. */
5934 for (placeholder_expr = placeholder_list;
5935 placeholder_expr != 0;
5936 placeholder_expr = TREE_CHAIN (placeholder_expr))
5937 {
5938 tree need_type = TYPE_MAIN_VARIANT (type);
5939 tree object = 0;
5940 tree old_list = placeholder_list;
5941 tree elt;
5942
5943 /* Find the outermost reference that is of the type we want.
5944 If none, see if any object has a type that is a pointer to
5945 the type we want. */
5946 for (elt = TREE_PURPOSE (placeholder_expr);
5947 elt != 0 && object == 0;
5948 elt
5949 = ((TREE_CODE (elt) == COMPOUND_EXPR
5950 || TREE_CODE (elt) == COND_EXPR)
5951 ? TREE_OPERAND (elt, 1)
5952 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5953 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5954 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5955 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5956 ? TREE_OPERAND (elt, 0) : 0))
5957 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5958 object = elt;
5959
5960 for (elt = TREE_PURPOSE (placeholder_expr);
5961 elt != 0 && object == 0;
5962 elt
5963 = ((TREE_CODE (elt) == COMPOUND_EXPR
5964 || TREE_CODE (elt) == COND_EXPR)
5965 ? TREE_OPERAND (elt, 1)
5966 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5967 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5968 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5969 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5970 ? TREE_OPERAND (elt, 0) : 0))
5971 if (POINTER_TYPE_P (TREE_TYPE (elt))
5972 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5973 == need_type))
5974 object = build1 (INDIRECT_REF, need_type, elt);
5975
5976 if (object != 0)
5977 {
5978 /* Expand this object skipping the list entries before
5979 it was found in case it is also a PLACEHOLDER_EXPR.
5980 In that case, we want to translate it using subsequent
5981 entries. */
5982 placeholder_list = TREE_CHAIN (placeholder_expr);
5983 temp = expand_expr (object, original_target, tmode,
5984 ro_modifier);
5985 placeholder_list = old_list;
5986 return temp;
5987 }
5988 }
5989 }
5990
5991 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5992 abort ();
5993
5994 case WITH_RECORD_EXPR:
5995 /* Put the object on the placeholder list, expand our first operand,
5996 and pop the list. */
5997 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5998 placeholder_list);
5999 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6000 tmode, ro_modifier);
6001 placeholder_list = TREE_CHAIN (placeholder_list);
6002 return target;
6003
6004 case GOTO_EXPR:
6005 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6006 expand_goto (TREE_OPERAND (exp, 0));
6007 else
6008 expand_computed_goto (TREE_OPERAND (exp, 0));
6009 return const0_rtx;
6010
6011 case EXIT_EXPR:
6012 expand_exit_loop_if_false (NULL_PTR,
6013 invert_truthvalue (TREE_OPERAND (exp, 0)));
6014 return const0_rtx;
6015
6016 case LABELED_BLOCK_EXPR:
6017 if (LABELED_BLOCK_BODY (exp))
6018 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6019 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6020 return const0_rtx;
6021
6022 case EXIT_BLOCK_EXPR:
6023 if (EXIT_BLOCK_RETURN (exp))
6024 sorry ("returned value in block_exit_expr");
6025 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6026 return const0_rtx;
6027
6028 case LOOP_EXPR:
6029 push_temp_slots ();
6030 expand_start_loop (1);
6031 expand_expr_stmt (TREE_OPERAND (exp, 0));
6032 expand_end_loop ();
6033 pop_temp_slots ();
6034
6035 return const0_rtx;
6036
6037 case BIND_EXPR:
6038 {
6039 tree vars = TREE_OPERAND (exp, 0);
6040 int vars_need_expansion = 0;
6041
6042 /* Need to open a binding contour here because
6043 if there are any cleanups they must be contained here. */
6044 expand_start_bindings (0);
6045
6046 /* Mark the corresponding BLOCK for output in its proper place. */
6047 if (TREE_OPERAND (exp, 2) != 0
6048 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6049 insert_block (TREE_OPERAND (exp, 2));
6050
6051 /* If VARS have not yet been expanded, expand them now. */
6052 while (vars)
6053 {
6054 if (DECL_RTL (vars) == 0)
6055 {
6056 vars_need_expansion = 1;
6057 expand_decl (vars);
6058 }
6059 expand_decl_init (vars);
6060 vars = TREE_CHAIN (vars);
6061 }
6062
6063 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6064
6065 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6066
6067 return temp;
6068 }
6069
6070 case RTL_EXPR:
6071 if (RTL_EXPR_SEQUENCE (exp))
6072 {
6073 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6074 abort ();
6075 emit_insns (RTL_EXPR_SEQUENCE (exp));
6076 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6077 }
6078 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6079 free_temps_for_rtl_expr (exp);
6080 return RTL_EXPR_RTL (exp);
6081
6082 case CONSTRUCTOR:
6083 /* If we don't need the result, just ensure we evaluate any
6084 subexpressions. */
6085 if (ignore)
6086 {
6087 tree elt;
6088 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6089 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6090 EXPAND_MEMORY_USE_BAD);
6091 return const0_rtx;
6092 }
6093
6094 /* All elts simple constants => refer to a constant in memory. But
6095 if this is a non-BLKmode mode, let it store a field at a time
6096 since that should make a CONST_INT or CONST_DOUBLE when we
6097 fold. Likewise, if we have a target we can use, it is best to
6098 store directly into the target unless the type is large enough
6099 that memcpy will be used. If we are making an initializer and
6100 all operands are constant, put it in memory as well. */
6101 else if ((TREE_STATIC (exp)
6102 && ((mode == BLKmode
6103 && ! (target != 0 && safe_from_p (target, exp, 1)))
6104 || TREE_ADDRESSABLE (exp)
6105 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6106 && (!MOVE_BY_PIECES_P
6107 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6108 TYPE_ALIGN (type) / BITS_PER_UNIT))
6109 && ! mostly_zeros_p (exp))))
6110 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6111 {
6112 rtx constructor = output_constant_def (exp);
6113 if (modifier != EXPAND_CONST_ADDRESS
6114 && modifier != EXPAND_INITIALIZER
6115 && modifier != EXPAND_SUM
6116 && (! memory_address_p (GET_MODE (constructor),
6117 XEXP (constructor, 0))
6118 || (flag_force_addr
6119 && GET_CODE (XEXP (constructor, 0)) != REG)))
6120 constructor = change_address (constructor, VOIDmode,
6121 XEXP (constructor, 0));
6122 return constructor;
6123 }
6124
6125 else
6126 {
6127 /* Handle calls that pass values in multiple non-contiguous
6128 locations. The Irix 6 ABI has examples of this. */
6129 if (target == 0 || ! safe_from_p (target, exp, 1)
6130 || GET_CODE (target) == PARALLEL)
6131 {
6132 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6133 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6134 else
6135 target = assign_temp (type, 0, 1, 1);
6136 }
6137
6138 if (TREE_READONLY (exp))
6139 {
6140 if (GET_CODE (target) == MEM)
6141 target = copy_rtx (target);
6142
6143 RTX_UNCHANGING_P (target) = 1;
6144 }
6145
6146 store_constructor (exp, target, 0);
6147 return target;
6148 }
6149
6150 case INDIRECT_REF:
6151 {
6152 tree exp1 = TREE_OPERAND (exp, 0);
6153 tree exp2;
6154 tree index;
6155 tree string = string_constant (exp1, &index);
6156 int i;
6157
6158 /* Try to optimize reads from const strings. */
6159 if (string
6160 && TREE_CODE (string) == STRING_CST
6161 && TREE_CODE (index) == INTEGER_CST
6162 && !TREE_INT_CST_HIGH (index)
6163 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6164 && GET_MODE_CLASS (mode) == MODE_INT
6165 && GET_MODE_SIZE (mode) == 1
6166 && modifier != EXPAND_MEMORY_USE_WO)
6167 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6168
6169 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6170 op0 = memory_address (mode, op0);
6171
6172 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6173 {
6174 enum memory_use_mode memory_usage;
6175 memory_usage = get_memory_usage_from_modifier (modifier);
6176
6177 if (memory_usage != MEMORY_USE_DONT)
6178 {
6179 in_check_memory_usage = 1;
6180 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6181 op0, ptr_mode,
6182 GEN_INT (int_size_in_bytes (type)),
6183 TYPE_MODE (sizetype),
6184 GEN_INT (memory_usage),
6185 TYPE_MODE (integer_type_node));
6186 in_check_memory_usage = 0;
6187 }
6188 }
6189
6190 temp = gen_rtx_MEM (mode, op0);
6191 /* If address was computed by addition,
6192 mark this as an element of an aggregate. */
6193 if (TREE_CODE (exp1) == PLUS_EXPR
6194 || (TREE_CODE (exp1) == SAVE_EXPR
6195 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6196 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6197 || (TREE_CODE (exp1) == ADDR_EXPR
6198 && (exp2 = TREE_OPERAND (exp1, 0))
6199 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6200 MEM_SET_IN_STRUCT_P (temp, 1);
6201
6202 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6203 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6204
6205 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6206 here, because, in C and C++, the fact that a location is accessed
6207 through a pointer to const does not mean that the value there can
6208 never change. Languages where it can never change should
6209 also set TREE_STATIC. */
6210 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6211 return temp;
6212 }
6213
6214 case ARRAY_REF:
6215 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6216 abort ();
6217
6218 {
6219 tree array = TREE_OPERAND (exp, 0);
6220 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6221 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6222 tree index = TREE_OPERAND (exp, 1);
6223 tree index_type = TREE_TYPE (index);
6224 HOST_WIDE_INT i;
6225
6226 /* Optimize the special-case of a zero lower bound.
6227
6228 We convert the low_bound to sizetype to avoid some problems
6229 with constant folding. (E.g. suppose the lower bound is 1,
6230 and its mode is QI. Without the conversion, (ARRAY
6231 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6232 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6233
6234 But sizetype isn't quite right either (especially if
6235 the lowbound is negative). FIXME */
6236
6237 if (! integer_zerop (low_bound))
6238 index = fold (build (MINUS_EXPR, index_type, index,
6239 convert (sizetype, low_bound)));
6240
6241 /* Fold an expression like: "foo"[2].
6242 This is not done in fold so it won't happen inside &.
6243 Don't fold if this is for wide characters since it's too
6244 difficult to do correctly and this is a very rare case. */
6245
6246 if (TREE_CODE (array) == STRING_CST
6247 && TREE_CODE (index) == INTEGER_CST
6248 && !TREE_INT_CST_HIGH (index)
6249 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6250 && GET_MODE_CLASS (mode) == MODE_INT
6251 && GET_MODE_SIZE (mode) == 1)
6252 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6253
6254 /* If this is a constant index into a constant array,
6255 just get the value from the array. Handle both the cases when
6256 we have an explicit constructor and when our operand is a variable
6257 that was declared const. */
6258
6259 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6260 {
6261 if (TREE_CODE (index) == INTEGER_CST
6262 && TREE_INT_CST_HIGH (index) == 0)
6263 {
6264 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6265
6266 i = TREE_INT_CST_LOW (index);
6267 while (elem && i--)
6268 elem = TREE_CHAIN (elem);
6269 if (elem)
6270 return expand_expr (fold (TREE_VALUE (elem)), target,
6271 tmode, ro_modifier);
6272 }
6273 }
6274
6275 else if (optimize >= 1
6276 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6277 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6278 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6279 {
6280 if (TREE_CODE (index) == INTEGER_CST)
6281 {
6282 tree init = DECL_INITIAL (array);
6283
6284 i = TREE_INT_CST_LOW (index);
6285 if (TREE_CODE (init) == CONSTRUCTOR)
6286 {
6287 tree elem = CONSTRUCTOR_ELTS (init);
6288
6289 while (elem
6290 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6291 elem = TREE_CHAIN (elem);
6292 if (elem)
6293 return expand_expr (fold (TREE_VALUE (elem)), target,
6294 tmode, ro_modifier);
6295 }
6296 else if (TREE_CODE (init) == STRING_CST
6297 && TREE_INT_CST_HIGH (index) == 0
6298 && (TREE_INT_CST_LOW (index)
6299 < TREE_STRING_LENGTH (init)))
6300 return (GEN_INT
6301 (TREE_STRING_POINTER
6302 (init)[TREE_INT_CST_LOW (index)]));
6303 }
6304 }
6305 }
6306
6307 /* ... fall through ... */
6308
6309 case COMPONENT_REF:
6310 case BIT_FIELD_REF:
6311 /* If the operand is a CONSTRUCTOR, we can just extract the
6312 appropriate field if it is present. Don't do this if we have
6313 already written the data since we want to refer to that copy
6314 and varasm.c assumes that's what we'll do. */
6315 if (code != ARRAY_REF
6316 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6317 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6318 {
6319 tree elt;
6320
6321 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6322 elt = TREE_CHAIN (elt))
6323 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6324 /* We can normally use the value of the field in the
6325 CONSTRUCTOR. However, if this is a bitfield in
6326 an integral mode that we can fit in a HOST_WIDE_INT,
6327 we must mask only the number of bits in the bitfield,
6328 since this is done implicitly by the constructor. If
6329 the bitfield does not meet either of those conditions,
6330 we can't do this optimization. */
6331 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6332 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6333 == MODE_INT)
6334 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6335 <= HOST_BITS_PER_WIDE_INT))))
6336 {
6337 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6338 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6339 {
6340 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6341
6342 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6343 {
6344 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6345 op0 = expand_and (op0, op1, target);
6346 }
6347 else
6348 {
6349 enum machine_mode imode
6350 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6351 tree count
6352 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6353 0);
6354
6355 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6356 target, 0);
6357 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6358 target, 0);
6359 }
6360 }
6361
6362 return op0;
6363 }
6364 }
6365
6366 {
6367 enum machine_mode mode1;
6368 int bitsize;
6369 int bitpos;
6370 tree offset;
6371 int volatilep = 0;
6372 int alignment;
6373 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6374 &mode1, &unsignedp, &volatilep,
6375 &alignment);
6376
6377 /* If we got back the original object, something is wrong. Perhaps
6378 we are evaluating an expression too early. In any event, don't
6379 infinitely recurse. */
6380 if (tem == exp)
6381 abort ();
6382
6383 /* If TEM's type is a union of variable size, pass TARGET to the inner
6384 computation, since it will need a temporary and TARGET is known
6385 to have to do. This occurs in unchecked conversion in Ada. */
6386
6387 op0 = expand_expr (tem,
6388 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6389 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6390 != INTEGER_CST)
6391 ? target : NULL_RTX),
6392 VOIDmode,
6393 modifier == EXPAND_INITIALIZER
6394 ? modifier : EXPAND_NORMAL);
6395
6396 /* If this is a constant, put it into a register if it is a
6397 legitimate constant and memory if it isn't. */
6398 if (CONSTANT_P (op0))
6399 {
6400 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6401 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6402 op0 = force_reg (mode, op0);
6403 else
6404 op0 = validize_mem (force_const_mem (mode, op0));
6405 }
6406
6407 if (offset != 0)
6408 {
6409 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6410
6411 if (GET_CODE (op0) != MEM)
6412 abort ();
6413
6414 if (GET_MODE (offset_rtx) != ptr_mode)
6415 {
6416 #ifdef POINTERS_EXTEND_UNSIGNED
6417 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6418 #else
6419 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6420 #endif
6421 }
6422
6423 if (GET_CODE (op0) == MEM
6424 && GET_MODE (op0) == BLKmode
6425 && bitsize
6426 && (bitpos % bitsize) == 0
6427 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6428 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6429 {
6430 rtx temp = change_address (op0, mode1,
6431 plus_constant (XEXP (op0, 0),
6432 (bitpos /
6433 BITS_PER_UNIT)));
6434 if (GET_CODE (XEXP (temp, 0)) == REG)
6435 op0 = temp;
6436 else
6437 op0 = change_address (op0, mode1,
6438 force_reg (GET_MODE (XEXP (temp, 0)),
6439 XEXP (temp, 0)));
6440 bitpos = 0;
6441 }
6442
6443
6444 op0 = change_address (op0, VOIDmode,
6445 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6446 force_reg (ptr_mode, offset_rtx)));
6447 }
6448
6449 /* Don't forget about volatility even if this is a bitfield. */
6450 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6451 {
6452 op0 = copy_rtx (op0);
6453 MEM_VOLATILE_P (op0) = 1;
6454 }
6455
6456 /* Check the access. */
6457 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6458 {
6459 enum memory_use_mode memory_usage;
6460 memory_usage = get_memory_usage_from_modifier (modifier);
6461
6462 if (memory_usage != MEMORY_USE_DONT)
6463 {
6464 rtx to;
6465 int size;
6466
6467 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6468 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6469
6470 /* Check the access right of the pointer. */
6471 if (size > BITS_PER_UNIT)
6472 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6473 to, ptr_mode,
6474 GEN_INT (size / BITS_PER_UNIT),
6475 TYPE_MODE (sizetype),
6476 GEN_INT (memory_usage),
6477 TYPE_MODE (integer_type_node));
6478 }
6479 }
6480
6481 /* In cases where an aligned union has an unaligned object
6482 as a field, we might be extracting a BLKmode value from
6483 an integer-mode (e.g., SImode) object. Handle this case
6484 by doing the extract into an object as wide as the field
6485 (which we know to be the width of a basic mode), then
6486 storing into memory, and changing the mode to BLKmode.
6487 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6488 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6489 if (mode1 == VOIDmode
6490 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6491 || (modifier != EXPAND_CONST_ADDRESS
6492 && modifier != EXPAND_INITIALIZER
6493 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6494 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6495 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6496 /* If the field isn't aligned enough to fetch as a memref,
6497 fetch it as a bit field. */
6498 || (SLOW_UNALIGNED_ACCESS
6499 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6500 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6501 {
6502 enum machine_mode ext_mode = mode;
6503
6504 if (ext_mode == BLKmode)
6505 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6506
6507 if (ext_mode == BLKmode)
6508 {
6509 /* In this case, BITPOS must start at a byte boundary and
6510 TARGET, if specified, must be a MEM. */
6511 if (GET_CODE (op0) != MEM
6512 || (target != 0 && GET_CODE (target) != MEM)
6513 || bitpos % BITS_PER_UNIT != 0)
6514 abort ();
6515
6516 op0 = change_address (op0, VOIDmode,
6517 plus_constant (XEXP (op0, 0),
6518 bitpos / BITS_PER_UNIT));
6519 if (target == 0)
6520 target = assign_temp (type, 0, 1, 1);
6521
6522 emit_block_move (target, op0,
6523 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6524 / BITS_PER_UNIT),
6525 1);
6526
6527 return target;
6528 }
6529
6530 op0 = validize_mem (op0);
6531
6532 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6533 mark_reg_pointer (XEXP (op0, 0), alignment);
6534
6535 op0 = extract_bit_field (op0, bitsize, bitpos,
6536 unsignedp, target, ext_mode, ext_mode,
6537 alignment,
6538 int_size_in_bytes (TREE_TYPE (tem)));
6539
6540 /* If the result is a record type and BITSIZE is narrower than
6541 the mode of OP0, an integral mode, and this is a big endian
6542 machine, we must put the field into the high-order bits. */
6543 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6544 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6545 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6546 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6547 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6548 - bitsize),
6549 op0, 1);
6550
6551 if (mode == BLKmode)
6552 {
6553 rtx new = assign_stack_temp (ext_mode,
6554 bitsize / BITS_PER_UNIT, 0);
6555
6556 emit_move_insn (new, op0);
6557 op0 = copy_rtx (new);
6558 PUT_MODE (op0, BLKmode);
6559 MEM_SET_IN_STRUCT_P (op0, 1);
6560 }
6561
6562 return op0;
6563 }
6564
6565 /* If the result is BLKmode, use that to access the object
6566 now as well. */
6567 if (mode == BLKmode)
6568 mode1 = BLKmode;
6569
6570 /* Get a reference to just this component. */
6571 if (modifier == EXPAND_CONST_ADDRESS
6572 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6573 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6574 (bitpos / BITS_PER_UNIT)));
6575 else
6576 op0 = change_address (op0, mode1,
6577 plus_constant (XEXP (op0, 0),
6578 (bitpos / BITS_PER_UNIT)));
6579
6580 if (GET_CODE (op0) == MEM)
6581 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6582
6583 if (GET_CODE (XEXP (op0, 0)) == REG)
6584 mark_reg_pointer (XEXP (op0, 0), alignment);
6585
6586 MEM_SET_IN_STRUCT_P (op0, 1);
6587 MEM_VOLATILE_P (op0) |= volatilep;
6588 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6589 || modifier == EXPAND_CONST_ADDRESS
6590 || modifier == EXPAND_INITIALIZER)
6591 return op0;
6592 else if (target == 0)
6593 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6594
6595 convert_move (target, op0, unsignedp);
6596 return target;
6597 }
6598
6599 /* Intended for a reference to a buffer of a file-object in Pascal.
6600 But it's not certain that a special tree code will really be
6601 necessary for these. INDIRECT_REF might work for them. */
6602 case BUFFER_REF:
6603 abort ();
6604
6605 case IN_EXPR:
6606 {
6607 /* Pascal set IN expression.
6608
6609 Algorithm:
6610 rlo = set_low - (set_low%bits_per_word);
6611 the_word = set [ (index - rlo)/bits_per_word ];
6612 bit_index = index % bits_per_word;
6613 bitmask = 1 << bit_index;
6614 return !!(the_word & bitmask); */
6615
6616 tree set = TREE_OPERAND (exp, 0);
6617 tree index = TREE_OPERAND (exp, 1);
6618 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6619 tree set_type = TREE_TYPE (set);
6620 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6621 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6622 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6623 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6624 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6625 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6626 rtx setaddr = XEXP (setval, 0);
6627 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6628 rtx rlow;
6629 rtx diff, quo, rem, addr, bit, result;
6630
6631 preexpand_calls (exp);
6632
6633 /* If domain is empty, answer is no. Likewise if index is constant
6634 and out of bounds. */
6635 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6636 && TREE_CODE (set_low_bound) == INTEGER_CST
6637 && tree_int_cst_lt (set_high_bound, set_low_bound))
6638 || (TREE_CODE (index) == INTEGER_CST
6639 && TREE_CODE (set_low_bound) == INTEGER_CST
6640 && tree_int_cst_lt (index, set_low_bound))
6641 || (TREE_CODE (set_high_bound) == INTEGER_CST
6642 && TREE_CODE (index) == INTEGER_CST
6643 && tree_int_cst_lt (set_high_bound, index))))
6644 return const0_rtx;
6645
6646 if (target == 0)
6647 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6648
6649 /* If we get here, we have to generate the code for both cases
6650 (in range and out of range). */
6651
6652 op0 = gen_label_rtx ();
6653 op1 = gen_label_rtx ();
6654
6655 if (! (GET_CODE (index_val) == CONST_INT
6656 && GET_CODE (lo_r) == CONST_INT))
6657 {
6658 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6659 GET_MODE (index_val), iunsignedp, 0);
6660 emit_jump_insn (gen_blt (op1));
6661 }
6662
6663 if (! (GET_CODE (index_val) == CONST_INT
6664 && GET_CODE (hi_r) == CONST_INT))
6665 {
6666 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6667 GET_MODE (index_val), iunsignedp, 0);
6668 emit_jump_insn (gen_bgt (op1));
6669 }
6670
6671 /* Calculate the element number of bit zero in the first word
6672 of the set. */
6673 if (GET_CODE (lo_r) == CONST_INT)
6674 rlow = GEN_INT (INTVAL (lo_r)
6675 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6676 else
6677 rlow = expand_binop (index_mode, and_optab, lo_r,
6678 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6679 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6680
6681 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6682 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6683
6684 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6685 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6686 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6687 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6688
6689 addr = memory_address (byte_mode,
6690 expand_binop (index_mode, add_optab, diff,
6691 setaddr, NULL_RTX, iunsignedp,
6692 OPTAB_LIB_WIDEN));
6693
6694 /* Extract the bit we want to examine */
6695 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6696 gen_rtx_MEM (byte_mode, addr),
6697 make_tree (TREE_TYPE (index), rem),
6698 NULL_RTX, 1);
6699 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6700 GET_MODE (target) == byte_mode ? target : 0,
6701 1, OPTAB_LIB_WIDEN);
6702
6703 if (result != target)
6704 convert_move (target, result, 1);
6705
6706 /* Output the code to handle the out-of-range case. */
6707 emit_jump (op0);
6708 emit_label (op1);
6709 emit_move_insn (target, const0_rtx);
6710 emit_label (op0);
6711 return target;
6712 }
6713
6714 case WITH_CLEANUP_EXPR:
6715 if (RTL_EXPR_RTL (exp) == 0)
6716 {
6717 RTL_EXPR_RTL (exp)
6718 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6719 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6720
6721 /* That's it for this cleanup. */
6722 TREE_OPERAND (exp, 2) = 0;
6723 }
6724 return RTL_EXPR_RTL (exp);
6725
6726 case CLEANUP_POINT_EXPR:
6727 {
6728 extern int temp_slot_level;
6729 /* Start a new binding layer that will keep track of all cleanup
6730 actions to be performed. */
6731 expand_start_bindings (0);
6732
6733 target_temp_slot_level = temp_slot_level;
6734
6735 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6736 /* If we're going to use this value, load it up now. */
6737 if (! ignore)
6738 op0 = force_not_mem (op0);
6739 preserve_temp_slots (op0);
6740 expand_end_bindings (NULL_TREE, 0, 0);
6741 }
6742 return op0;
6743
6744 case CALL_EXPR:
6745 /* Check for a built-in function. */
6746 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6747 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6748 == FUNCTION_DECL)
6749 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6750 return expand_builtin (exp, target, subtarget, tmode, ignore);
6751
6752 /* If this call was expanded already by preexpand_calls,
6753 just return the result we got. */
6754 if (CALL_EXPR_RTL (exp) != 0)
6755 return CALL_EXPR_RTL (exp);
6756
6757 return expand_call (exp, target, ignore);
6758
6759 case NON_LVALUE_EXPR:
6760 case NOP_EXPR:
6761 case CONVERT_EXPR:
6762 case REFERENCE_EXPR:
6763 if (TREE_CODE (type) == UNION_TYPE)
6764 {
6765 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6766 if (target == 0)
6767 {
6768 if (mode != BLKmode)
6769 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6770 else
6771 target = assign_temp (type, 0, 1, 1);
6772 }
6773
6774 if (GET_CODE (target) == MEM)
6775 /* Store data into beginning of memory target. */
6776 store_expr (TREE_OPERAND (exp, 0),
6777 change_address (target, TYPE_MODE (valtype), 0), 0);
6778
6779 else if (GET_CODE (target) == REG)
6780 /* Store this field into a union of the proper type. */
6781 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6782 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6783 VOIDmode, 0, 1,
6784 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6785 0);
6786 else
6787 abort ();
6788
6789 /* Return the entire union. */
6790 return target;
6791 }
6792
6793 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6794 {
6795 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6796 ro_modifier);
6797
6798 /* If the signedness of the conversion differs and OP0 is
6799 a promoted SUBREG, clear that indication since we now
6800 have to do the proper extension. */
6801 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6802 && GET_CODE (op0) == SUBREG)
6803 SUBREG_PROMOTED_VAR_P (op0) = 0;
6804
6805 return op0;
6806 }
6807
6808 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6809 if (GET_MODE (op0) == mode)
6810 return op0;
6811
6812 /* If OP0 is a constant, just convert it into the proper mode. */
6813 if (CONSTANT_P (op0))
6814 return
6815 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6816 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6817
6818 if (modifier == EXPAND_INITIALIZER)
6819 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6820
6821 if (target == 0)
6822 return
6823 convert_to_mode (mode, op0,
6824 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6825 else
6826 convert_move (target, op0,
6827 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6828 return target;
6829
6830 case PLUS_EXPR:
6831 /* We come here from MINUS_EXPR when the second operand is a
6832 constant. */
6833 plus_expr:
6834 this_optab = add_optab;
6835
6836 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6837 something else, make sure we add the register to the constant and
6838 then to the other thing. This case can occur during strength
6839 reduction and doing it this way will produce better code if the
6840 frame pointer or argument pointer is eliminated.
6841
6842 fold-const.c will ensure that the constant is always in the inner
6843 PLUS_EXPR, so the only case we need to do anything about is if
6844 sp, ap, or fp is our second argument, in which case we must swap
6845 the innermost first argument and our second argument. */
6846
6847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6848 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6849 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6850 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6851 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6852 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6853 {
6854 tree t = TREE_OPERAND (exp, 1);
6855
6856 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6857 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6858 }
6859
6860 /* If the result is to be ptr_mode and we are adding an integer to
6861 something, we might be forming a constant. So try to use
6862 plus_constant. If it produces a sum and we can't accept it,
6863 use force_operand. This allows P = &ARR[const] to generate
6864 efficient code on machines where a SYMBOL_REF is not a valid
6865 address.
6866
6867 If this is an EXPAND_SUM call, always return the sum. */
6868 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6869 || mode == ptr_mode)
6870 {
6871 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6872 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6873 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6874 {
6875 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6876 EXPAND_SUM);
6877 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6878 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6879 op1 = force_operand (op1, target);
6880 return op1;
6881 }
6882
6883 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6884 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6885 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6886 {
6887 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6888 EXPAND_SUM);
6889 if (! CONSTANT_P (op0))
6890 {
6891 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6892 VOIDmode, modifier);
6893 /* Don't go to both_summands if modifier
6894 says it's not right to return a PLUS. */
6895 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6896 goto binop2;
6897 goto both_summands;
6898 }
6899 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6900 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6901 op0 = force_operand (op0, target);
6902 return op0;
6903 }
6904 }
6905
6906 /* No sense saving up arithmetic to be done
6907 if it's all in the wrong mode to form part of an address.
6908 And force_operand won't know whether to sign-extend or
6909 zero-extend. */
6910 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6911 || mode != ptr_mode)
6912 goto binop;
6913
6914 preexpand_calls (exp);
6915 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6916 subtarget = 0;
6917
6918 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6919 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6920
6921 both_summands:
6922 /* Make sure any term that's a sum with a constant comes last. */
6923 if (GET_CODE (op0) == PLUS
6924 && CONSTANT_P (XEXP (op0, 1)))
6925 {
6926 temp = op0;
6927 op0 = op1;
6928 op1 = temp;
6929 }
6930 /* If adding to a sum including a constant,
6931 associate it to put the constant outside. */
6932 if (GET_CODE (op1) == PLUS
6933 && CONSTANT_P (XEXP (op1, 1)))
6934 {
6935 rtx constant_term = const0_rtx;
6936
6937 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6938 if (temp != 0)
6939 op0 = temp;
6940 /* Ensure that MULT comes first if there is one. */
6941 else if (GET_CODE (op0) == MULT)
6942 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6943 else
6944 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6945
6946 /* Let's also eliminate constants from op0 if possible. */
6947 op0 = eliminate_constant_term (op0, &constant_term);
6948
6949 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6950 their sum should be a constant. Form it into OP1, since the
6951 result we want will then be OP0 + OP1. */
6952
6953 temp = simplify_binary_operation (PLUS, mode, constant_term,
6954 XEXP (op1, 1));
6955 if (temp != 0)
6956 op1 = temp;
6957 else
6958 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6959 }
6960
6961 /* Put a constant term last and put a multiplication first. */
6962 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6963 temp = op1, op1 = op0, op0 = temp;
6964
6965 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6966 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6967
6968 case MINUS_EXPR:
6969 /* For initializers, we are allowed to return a MINUS of two
6970 symbolic constants. Here we handle all cases when both operands
6971 are constant. */
6972 /* Handle difference of two symbolic constants,
6973 for the sake of an initializer. */
6974 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6975 && really_constant_p (TREE_OPERAND (exp, 0))
6976 && really_constant_p (TREE_OPERAND (exp, 1)))
6977 {
6978 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6979 VOIDmode, ro_modifier);
6980 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6981 VOIDmode, ro_modifier);
6982
6983 /* If the last operand is a CONST_INT, use plus_constant of
6984 the negated constant. Else make the MINUS. */
6985 if (GET_CODE (op1) == CONST_INT)
6986 return plus_constant (op0, - INTVAL (op1));
6987 else
6988 return gen_rtx_MINUS (mode, op0, op1);
6989 }
6990 /* Convert A - const to A + (-const). */
6991 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6992 {
6993 tree negated = fold (build1 (NEGATE_EXPR, type,
6994 TREE_OPERAND (exp, 1)));
6995
6996 /* Deal with the case where we can't negate the constant
6997 in TYPE. */
6998 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6999 {
7000 tree newtype = signed_type (type);
7001 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7002 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7003 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7004
7005 if (! TREE_OVERFLOW (newneg))
7006 return expand_expr (convert (type,
7007 build (PLUS_EXPR, newtype,
7008 newop0, newneg)),
7009 target, tmode, ro_modifier);
7010 }
7011 else
7012 {
7013 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7014 goto plus_expr;
7015 }
7016 }
7017 this_optab = sub_optab;
7018 goto binop;
7019
7020 case MULT_EXPR:
7021 preexpand_calls (exp);
7022 /* If first operand is constant, swap them.
7023 Thus the following special case checks need only
7024 check the second operand. */
7025 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7026 {
7027 register tree t1 = TREE_OPERAND (exp, 0);
7028 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7029 TREE_OPERAND (exp, 1) = t1;
7030 }
7031
7032 /* Attempt to return something suitable for generating an
7033 indexed address, for machines that support that. */
7034
7035 if (modifier == EXPAND_SUM && mode == ptr_mode
7036 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7037 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7038 {
7039 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7040 EXPAND_SUM);
7041
7042 /* Apply distributive law if OP0 is x+c. */
7043 if (GET_CODE (op0) == PLUS
7044 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7045 return gen_rtx_PLUS (mode,
7046 gen_rtx_MULT (mode, XEXP (op0, 0),
7047 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7048 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7049 * INTVAL (XEXP (op0, 1))));
7050
7051 if (GET_CODE (op0) != REG)
7052 op0 = force_operand (op0, NULL_RTX);
7053 if (GET_CODE (op0) != REG)
7054 op0 = copy_to_mode_reg (mode, op0);
7055
7056 return gen_rtx_MULT (mode, op0,
7057 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7058 }
7059
7060 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7061 subtarget = 0;
7062
7063 /* Check for multiplying things that have been extended
7064 from a narrower type. If this machine supports multiplying
7065 in that narrower type with a result in the desired type,
7066 do it that way, and avoid the explicit type-conversion. */
7067 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7068 && TREE_CODE (type) == INTEGER_TYPE
7069 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7070 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7071 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7072 && int_fits_type_p (TREE_OPERAND (exp, 1),
7073 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7074 /* Don't use a widening multiply if a shift will do. */
7075 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7076 > HOST_BITS_PER_WIDE_INT)
7077 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7078 ||
7079 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7080 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7081 ==
7082 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7083 /* If both operands are extended, they must either both
7084 be zero-extended or both be sign-extended. */
7085 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7086 ==
7087 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7088 {
7089 enum machine_mode innermode
7090 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7091 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7092 ? smul_widen_optab : umul_widen_optab);
7093 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7094 ? umul_widen_optab : smul_widen_optab);
7095 if (mode == GET_MODE_WIDER_MODE (innermode))
7096 {
7097 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7098 {
7099 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7100 NULL_RTX, VOIDmode, 0);
7101 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7102 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7103 VOIDmode, 0);
7104 else
7105 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7106 NULL_RTX, VOIDmode, 0);
7107 goto binop2;
7108 }
7109 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7110 && innermode == word_mode)
7111 {
7112 rtx htem;
7113 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7114 NULL_RTX, VOIDmode, 0);
7115 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7116 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7117 VOIDmode, 0);
7118 else
7119 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7120 NULL_RTX, VOIDmode, 0);
7121 temp = expand_binop (mode, other_optab, op0, op1, target,
7122 unsignedp, OPTAB_LIB_WIDEN);
7123 htem = expand_mult_highpart_adjust (innermode,
7124 gen_highpart (innermode, temp),
7125 op0, op1,
7126 gen_highpart (innermode, temp),
7127 unsignedp);
7128 emit_move_insn (gen_highpart (innermode, temp), htem);
7129 return temp;
7130 }
7131 }
7132 }
7133 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7134 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7135 return expand_mult (mode, op0, op1, target, unsignedp);
7136
7137 case TRUNC_DIV_EXPR:
7138 case FLOOR_DIV_EXPR:
7139 case CEIL_DIV_EXPR:
7140 case ROUND_DIV_EXPR:
7141 case EXACT_DIV_EXPR:
7142 preexpand_calls (exp);
7143 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7144 subtarget = 0;
7145 /* Possible optimization: compute the dividend with EXPAND_SUM
7146 then if the divisor is constant can optimize the case
7147 where some terms of the dividend have coeffs divisible by it. */
7148 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7149 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7150 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7151
7152 case RDIV_EXPR:
7153 this_optab = flodiv_optab;
7154 goto binop;
7155
7156 case TRUNC_MOD_EXPR:
7157 case FLOOR_MOD_EXPR:
7158 case CEIL_MOD_EXPR:
7159 case ROUND_MOD_EXPR:
7160 preexpand_calls (exp);
7161 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7162 subtarget = 0;
7163 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7164 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7165 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7166
7167 case FIX_ROUND_EXPR:
7168 case FIX_FLOOR_EXPR:
7169 case FIX_CEIL_EXPR:
7170 abort (); /* Not used for C. */
7171
7172 case FIX_TRUNC_EXPR:
7173 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7174 if (target == 0)
7175 target = gen_reg_rtx (mode);
7176 expand_fix (target, op0, unsignedp);
7177 return target;
7178
7179 case FLOAT_EXPR:
7180 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7181 if (target == 0)
7182 target = gen_reg_rtx (mode);
7183 /* expand_float can't figure out what to do if FROM has VOIDmode.
7184 So give it the correct mode. With -O, cse will optimize this. */
7185 if (GET_MODE (op0) == VOIDmode)
7186 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7187 op0);
7188 expand_float (target, op0,
7189 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7190 return target;
7191
7192 case NEGATE_EXPR:
7193 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7194 temp = expand_unop (mode, neg_optab, op0, target, 0);
7195 if (temp == 0)
7196 abort ();
7197 return temp;
7198
7199 case ABS_EXPR:
7200 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7201
7202 /* Handle complex values specially. */
7203 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7204 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7205 return expand_complex_abs (mode, op0, target, unsignedp);
7206
7207 /* Unsigned abs is simply the operand. Testing here means we don't
7208 risk generating incorrect code below. */
7209 if (TREE_UNSIGNED (type))
7210 return op0;
7211
7212 return expand_abs (mode, op0, target, unsignedp,
7213 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7214
7215 case MAX_EXPR:
7216 case MIN_EXPR:
7217 target = original_target;
7218 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7219 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7220 || GET_MODE (target) != mode
7221 || (GET_CODE (target) == REG
7222 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7223 target = gen_reg_rtx (mode);
7224 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7225 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7226
7227 /* First try to do it with a special MIN or MAX instruction.
7228 If that does not win, use a conditional jump to select the proper
7229 value. */
7230 this_optab = (TREE_UNSIGNED (type)
7231 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7232 : (code == MIN_EXPR ? smin_optab : smax_optab));
7233
7234 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7235 OPTAB_WIDEN);
7236 if (temp != 0)
7237 return temp;
7238
7239 /* At this point, a MEM target is no longer useful; we will get better
7240 code without it. */
7241
7242 if (GET_CODE (target) == MEM)
7243 target = gen_reg_rtx (mode);
7244
7245 if (target != op0)
7246 emit_move_insn (target, op0);
7247
7248 op0 = gen_label_rtx ();
7249
7250 /* If this mode is an integer too wide to compare properly,
7251 compare word by word. Rely on cse to optimize constant cases. */
7252 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7253 {
7254 if (code == MAX_EXPR)
7255 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7256 target, op1, NULL_RTX, op0);
7257 else
7258 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7259 op1, target, NULL_RTX, op0);
7260 emit_move_insn (target, op1);
7261 }
7262 else
7263 {
7264 if (code == MAX_EXPR)
7265 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7266 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7267 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7268 else
7269 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7270 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7271 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7272 if (temp == const0_rtx)
7273 emit_move_insn (target, op1);
7274 else if (temp != const_true_rtx)
7275 {
7276 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7277 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7278 else
7279 abort ();
7280 emit_move_insn (target, op1);
7281 }
7282 }
7283 emit_label (op0);
7284 return target;
7285
7286 case BIT_NOT_EXPR:
7287 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7288 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7289 if (temp == 0)
7290 abort ();
7291 return temp;
7292
7293 case FFS_EXPR:
7294 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7295 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7296 if (temp == 0)
7297 abort ();
7298 return temp;
7299
7300 /* ??? Can optimize bitwise operations with one arg constant.
7301 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7302 and (a bitwise1 b) bitwise2 b (etc)
7303 but that is probably not worth while. */
7304
7305 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7306 boolean values when we want in all cases to compute both of them. In
7307 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7308 as actual zero-or-1 values and then bitwise anding. In cases where
7309 there cannot be any side effects, better code would be made by
7310 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7311 how to recognize those cases. */
7312
7313 case TRUTH_AND_EXPR:
7314 case BIT_AND_EXPR:
7315 this_optab = and_optab;
7316 goto binop;
7317
7318 case TRUTH_OR_EXPR:
7319 case BIT_IOR_EXPR:
7320 this_optab = ior_optab;
7321 goto binop;
7322
7323 case TRUTH_XOR_EXPR:
7324 case BIT_XOR_EXPR:
7325 this_optab = xor_optab;
7326 goto binop;
7327
7328 case LSHIFT_EXPR:
7329 case RSHIFT_EXPR:
7330 case LROTATE_EXPR:
7331 case RROTATE_EXPR:
7332 preexpand_calls (exp);
7333 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7334 subtarget = 0;
7335 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7336 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7337 unsignedp);
7338
7339 /* Could determine the answer when only additive constants differ. Also,
7340 the addition of one can be handled by changing the condition. */
7341 case LT_EXPR:
7342 case LE_EXPR:
7343 case GT_EXPR:
7344 case GE_EXPR:
7345 case EQ_EXPR:
7346 case NE_EXPR:
7347 preexpand_calls (exp);
7348 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7349 if (temp != 0)
7350 return temp;
7351
7352 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7353 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7354 && original_target
7355 && GET_CODE (original_target) == REG
7356 && (GET_MODE (original_target)
7357 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7358 {
7359 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7360 VOIDmode, 0);
7361
7362 if (temp != original_target)
7363 temp = copy_to_reg (temp);
7364
7365 op1 = gen_label_rtx ();
7366 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
7367 GET_MODE (temp), unsignedp, 0);
7368 emit_jump_insn (gen_beq (op1));
7369 emit_move_insn (temp, const1_rtx);
7370 emit_label (op1);
7371 return temp;
7372 }
7373
7374 /* If no set-flag instruction, must generate a conditional
7375 store into a temporary variable. Drop through
7376 and handle this like && and ||. */
7377
7378 case TRUTH_ANDIF_EXPR:
7379 case TRUTH_ORIF_EXPR:
7380 if (! ignore
7381 && (target == 0 || ! safe_from_p (target, exp, 1)
7382 /* Make sure we don't have a hard reg (such as function's return
7383 value) live across basic blocks, if not optimizing. */
7384 || (!optimize && GET_CODE (target) == REG
7385 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7386 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7387
7388 if (target)
7389 emit_clr_insn (target);
7390
7391 op1 = gen_label_rtx ();
7392 jumpifnot (exp, op1);
7393
7394 if (target)
7395 emit_0_to_1_insn (target);
7396
7397 emit_label (op1);
7398 return ignore ? const0_rtx : target;
7399
7400 case TRUTH_NOT_EXPR:
7401 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7402 /* The parser is careful to generate TRUTH_NOT_EXPR
7403 only with operands that are always zero or one. */
7404 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7405 target, 1, OPTAB_LIB_WIDEN);
7406 if (temp == 0)
7407 abort ();
7408 return temp;
7409
7410 case COMPOUND_EXPR:
7411 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7412 emit_queue ();
7413 return expand_expr (TREE_OPERAND (exp, 1),
7414 (ignore ? const0_rtx : target),
7415 VOIDmode, 0);
7416
7417 case COND_EXPR:
7418 /* If we would have a "singleton" (see below) were it not for a
7419 conversion in each arm, bring that conversion back out. */
7420 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7421 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7422 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7423 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7424 {
7425 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7426 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7427
7428 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7429 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7430 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7431 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7432 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7433 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7434 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7435 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7436 return expand_expr (build1 (NOP_EXPR, type,
7437 build (COND_EXPR, TREE_TYPE (true),
7438 TREE_OPERAND (exp, 0),
7439 true, false)),
7440 target, tmode, modifier);
7441 }
7442
7443 {
7444 /* Note that COND_EXPRs whose type is a structure or union
7445 are required to be constructed to contain assignments of
7446 a temporary variable, so that we can evaluate them here
7447 for side effect only. If type is void, we must do likewise. */
7448
7449 /* If an arm of the branch requires a cleanup,
7450 only that cleanup is performed. */
7451
7452 tree singleton = 0;
7453 tree binary_op = 0, unary_op = 0;
7454
7455 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7456 convert it to our mode, if necessary. */
7457 if (integer_onep (TREE_OPERAND (exp, 1))
7458 && integer_zerop (TREE_OPERAND (exp, 2))
7459 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7460 {
7461 if (ignore)
7462 {
7463 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7464 ro_modifier);
7465 return const0_rtx;
7466 }
7467
7468 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7469 if (GET_MODE (op0) == mode)
7470 return op0;
7471
7472 if (target == 0)
7473 target = gen_reg_rtx (mode);
7474 convert_move (target, op0, unsignedp);
7475 return target;
7476 }
7477
7478 /* Check for X ? A + B : A. If we have this, we can copy A to the
7479 output and conditionally add B. Similarly for unary operations.
7480 Don't do this if X has side-effects because those side effects
7481 might affect A or B and the "?" operation is a sequence point in
7482 ANSI. (operand_equal_p tests for side effects.) */
7483
7484 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7485 && operand_equal_p (TREE_OPERAND (exp, 2),
7486 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7487 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7488 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7489 && operand_equal_p (TREE_OPERAND (exp, 1),
7490 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7491 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7492 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7493 && operand_equal_p (TREE_OPERAND (exp, 2),
7494 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7495 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7496 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7497 && operand_equal_p (TREE_OPERAND (exp, 1),
7498 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7499 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7500
7501 /* If we are not to produce a result, we have no target. Otherwise,
7502 if a target was specified use it; it will not be used as an
7503 intermediate target unless it is safe. If no target, use a
7504 temporary. */
7505
7506 if (ignore)
7507 temp = 0;
7508 else if (original_target
7509 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7510 || (singleton && GET_CODE (original_target) == REG
7511 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7512 && original_target == var_rtx (singleton)))
7513 && GET_MODE (original_target) == mode
7514 #ifdef HAVE_conditional_move
7515 && (! can_conditionally_move_p (mode)
7516 || GET_CODE (original_target) == REG
7517 || TREE_ADDRESSABLE (type))
7518 #endif
7519 && ! (GET_CODE (original_target) == MEM
7520 && MEM_VOLATILE_P (original_target)))
7521 temp = original_target;
7522 else if (TREE_ADDRESSABLE (type))
7523 abort ();
7524 else
7525 temp = assign_temp (type, 0, 0, 1);
7526
7527 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7528 do the test of X as a store-flag operation, do this as
7529 A + ((X != 0) << log C). Similarly for other simple binary
7530 operators. Only do for C == 1 if BRANCH_COST is low. */
7531 if (temp && singleton && binary_op
7532 && (TREE_CODE (binary_op) == PLUS_EXPR
7533 || TREE_CODE (binary_op) == MINUS_EXPR
7534 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7535 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7536 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7537 : integer_onep (TREE_OPERAND (binary_op, 1)))
7538 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7539 {
7540 rtx result;
7541 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7542 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7543 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7544 : xor_optab);
7545
7546 /* If we had X ? A : A + 1, do this as A + (X == 0).
7547
7548 We have to invert the truth value here and then put it
7549 back later if do_store_flag fails. We cannot simply copy
7550 TREE_OPERAND (exp, 0) to another variable and modify that
7551 because invert_truthvalue can modify the tree pointed to
7552 by its argument. */
7553 if (singleton == TREE_OPERAND (exp, 1))
7554 TREE_OPERAND (exp, 0)
7555 = invert_truthvalue (TREE_OPERAND (exp, 0));
7556
7557 result = do_store_flag (TREE_OPERAND (exp, 0),
7558 (safe_from_p (temp, singleton, 1)
7559 ? temp : NULL_RTX),
7560 mode, BRANCH_COST <= 1);
7561
7562 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7563 result = expand_shift (LSHIFT_EXPR, mode, result,
7564 build_int_2 (tree_log2
7565 (TREE_OPERAND
7566 (binary_op, 1)),
7567 0),
7568 (safe_from_p (temp, singleton, 1)
7569 ? temp : NULL_RTX), 0);
7570
7571 if (result)
7572 {
7573 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7574 return expand_binop (mode, boptab, op1, result, temp,
7575 unsignedp, OPTAB_LIB_WIDEN);
7576 }
7577 else if (singleton == TREE_OPERAND (exp, 1))
7578 TREE_OPERAND (exp, 0)
7579 = invert_truthvalue (TREE_OPERAND (exp, 0));
7580 }
7581
7582 do_pending_stack_adjust ();
7583 NO_DEFER_POP;
7584 op0 = gen_label_rtx ();
7585
7586 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7587 {
7588 if (temp != 0)
7589 {
7590 /* If the target conflicts with the other operand of the
7591 binary op, we can't use it. Also, we can't use the target
7592 if it is a hard register, because evaluating the condition
7593 might clobber it. */
7594 if ((binary_op
7595 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7596 || (GET_CODE (temp) == REG
7597 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7598 temp = gen_reg_rtx (mode);
7599 store_expr (singleton, temp, 0);
7600 }
7601 else
7602 expand_expr (singleton,
7603 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7604 if (singleton == TREE_OPERAND (exp, 1))
7605 jumpif (TREE_OPERAND (exp, 0), op0);
7606 else
7607 jumpifnot (TREE_OPERAND (exp, 0), op0);
7608
7609 start_cleanup_deferral ();
7610 if (binary_op && temp == 0)
7611 /* Just touch the other operand. */
7612 expand_expr (TREE_OPERAND (binary_op, 1),
7613 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7614 else if (binary_op)
7615 store_expr (build (TREE_CODE (binary_op), type,
7616 make_tree (type, temp),
7617 TREE_OPERAND (binary_op, 1)),
7618 temp, 0);
7619 else
7620 store_expr (build1 (TREE_CODE (unary_op), type,
7621 make_tree (type, temp)),
7622 temp, 0);
7623 op1 = op0;
7624 }
7625 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7626 comparison operator. If we have one of these cases, set the
7627 output to A, branch on A (cse will merge these two references),
7628 then set the output to FOO. */
7629 else if (temp
7630 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7631 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7632 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7633 TREE_OPERAND (exp, 1), 0)
7634 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7635 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7636 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7637 {
7638 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7639 temp = gen_reg_rtx (mode);
7640 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7641 jumpif (TREE_OPERAND (exp, 0), op0);
7642
7643 start_cleanup_deferral ();
7644 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7645 op1 = op0;
7646 }
7647 else if (temp
7648 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7649 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7650 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7651 TREE_OPERAND (exp, 2), 0)
7652 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7653 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7654 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7655 {
7656 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7657 temp = gen_reg_rtx (mode);
7658 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7659 jumpifnot (TREE_OPERAND (exp, 0), op0);
7660
7661 start_cleanup_deferral ();
7662 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7663 op1 = op0;
7664 }
7665 else
7666 {
7667 op1 = gen_label_rtx ();
7668 jumpifnot (TREE_OPERAND (exp, 0), op0);
7669
7670 start_cleanup_deferral ();
7671 if (temp != 0)
7672 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7673 else
7674 expand_expr (TREE_OPERAND (exp, 1),
7675 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7676 end_cleanup_deferral ();
7677 emit_queue ();
7678 emit_jump_insn (gen_jump (op1));
7679 emit_barrier ();
7680 emit_label (op0);
7681 start_cleanup_deferral ();
7682 if (temp != 0)
7683 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7684 else
7685 expand_expr (TREE_OPERAND (exp, 2),
7686 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7687 }
7688
7689 end_cleanup_deferral ();
7690
7691 emit_queue ();
7692 emit_label (op1);
7693 OK_DEFER_POP;
7694
7695 return temp;
7696 }
7697
7698 case TARGET_EXPR:
7699 {
7700 /* Something needs to be initialized, but we didn't know
7701 where that thing was when building the tree. For example,
7702 it could be the return value of a function, or a parameter
7703 to a function which lays down in the stack, or a temporary
7704 variable which must be passed by reference.
7705
7706 We guarantee that the expression will either be constructed
7707 or copied into our original target. */
7708
7709 tree slot = TREE_OPERAND (exp, 0);
7710 tree cleanups = NULL_TREE;
7711 tree exp1;
7712
7713 if (TREE_CODE (slot) != VAR_DECL)
7714 abort ();
7715
7716 if (! ignore)
7717 target = original_target;
7718
7719 if (target == 0)
7720 {
7721 if (DECL_RTL (slot) != 0)
7722 {
7723 target = DECL_RTL (slot);
7724 /* If we have already expanded the slot, so don't do
7725 it again. (mrs) */
7726 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7727 return target;
7728 }
7729 else
7730 {
7731 target = assign_temp (type, 2, 0, 1);
7732 /* All temp slots at this level must not conflict. */
7733 preserve_temp_slots (target);
7734 DECL_RTL (slot) = target;
7735 if (TREE_ADDRESSABLE (slot))
7736 {
7737 TREE_ADDRESSABLE (slot) = 0;
7738 mark_addressable (slot);
7739 }
7740
7741 /* Since SLOT is not known to the called function
7742 to belong to its stack frame, we must build an explicit
7743 cleanup. This case occurs when we must build up a reference
7744 to pass the reference as an argument. In this case,
7745 it is very likely that such a reference need not be
7746 built here. */
7747
7748 if (TREE_OPERAND (exp, 2) == 0)
7749 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7750 cleanups = TREE_OPERAND (exp, 2);
7751 }
7752 }
7753 else
7754 {
7755 /* This case does occur, when expanding a parameter which
7756 needs to be constructed on the stack. The target
7757 is the actual stack address that we want to initialize.
7758 The function we call will perform the cleanup in this case. */
7759
7760 /* If we have already assigned it space, use that space,
7761 not target that we were passed in, as our target
7762 parameter is only a hint. */
7763 if (DECL_RTL (slot) != 0)
7764 {
7765 target = DECL_RTL (slot);
7766 /* If we have already expanded the slot, so don't do
7767 it again. (mrs) */
7768 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7769 return target;
7770 }
7771 else
7772 {
7773 DECL_RTL (slot) = target;
7774 /* If we must have an addressable slot, then make sure that
7775 the RTL that we just stored in slot is OK. */
7776 if (TREE_ADDRESSABLE (slot))
7777 {
7778 TREE_ADDRESSABLE (slot) = 0;
7779 mark_addressable (slot);
7780 }
7781 }
7782 }
7783
7784 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7785 /* Mark it as expanded. */
7786 TREE_OPERAND (exp, 1) = NULL_TREE;
7787
7788 TREE_USED (slot) = 1;
7789 store_expr (exp1, target, 0);
7790
7791 expand_decl_cleanup (NULL_TREE, cleanups);
7792
7793 return target;
7794 }
7795
7796 case INIT_EXPR:
7797 {
7798 tree lhs = TREE_OPERAND (exp, 0);
7799 tree rhs = TREE_OPERAND (exp, 1);
7800 tree noncopied_parts = 0;
7801 tree lhs_type = TREE_TYPE (lhs);
7802
7803 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7804 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7805 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7806 TYPE_NONCOPIED_PARTS (lhs_type));
7807 while (noncopied_parts != 0)
7808 {
7809 expand_assignment (TREE_VALUE (noncopied_parts),
7810 TREE_PURPOSE (noncopied_parts), 0, 0);
7811 noncopied_parts = TREE_CHAIN (noncopied_parts);
7812 }
7813 return temp;
7814 }
7815
7816 case MODIFY_EXPR:
7817 {
7818 /* If lhs is complex, expand calls in rhs before computing it.
7819 That's so we don't compute a pointer and save it over a call.
7820 If lhs is simple, compute it first so we can give it as a
7821 target if the rhs is just a call. This avoids an extra temp and copy
7822 and that prevents a partial-subsumption which makes bad code.
7823 Actually we could treat component_ref's of vars like vars. */
7824
7825 tree lhs = TREE_OPERAND (exp, 0);
7826 tree rhs = TREE_OPERAND (exp, 1);
7827 tree noncopied_parts = 0;
7828 tree lhs_type = TREE_TYPE (lhs);
7829
7830 temp = 0;
7831
7832 if (TREE_CODE (lhs) != VAR_DECL
7833 && TREE_CODE (lhs) != RESULT_DECL
7834 && TREE_CODE (lhs) != PARM_DECL
7835 && ! (TREE_CODE (lhs) == INDIRECT_REF
7836 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7837 preexpand_calls (exp);
7838
7839 /* Check for |= or &= of a bitfield of size one into another bitfield
7840 of size 1. In this case, (unless we need the result of the
7841 assignment) we can do this more efficiently with a
7842 test followed by an assignment, if necessary.
7843
7844 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7845 things change so we do, this code should be enhanced to
7846 support it. */
7847 if (ignore
7848 && TREE_CODE (lhs) == COMPONENT_REF
7849 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7850 || TREE_CODE (rhs) == BIT_AND_EXPR)
7851 && TREE_OPERAND (rhs, 0) == lhs
7852 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7853 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7854 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7855 {
7856 rtx label = gen_label_rtx ();
7857
7858 do_jump (TREE_OPERAND (rhs, 1),
7859 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7860 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7861 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7862 (TREE_CODE (rhs) == BIT_IOR_EXPR
7863 ? integer_one_node
7864 : integer_zero_node)),
7865 0, 0);
7866 do_pending_stack_adjust ();
7867 emit_label (label);
7868 return const0_rtx;
7869 }
7870
7871 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7872 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7873 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7874 TYPE_NONCOPIED_PARTS (lhs_type));
7875
7876 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7877 while (noncopied_parts != 0)
7878 {
7879 expand_assignment (TREE_PURPOSE (noncopied_parts),
7880 TREE_VALUE (noncopied_parts), 0, 0);
7881 noncopied_parts = TREE_CHAIN (noncopied_parts);
7882 }
7883 return temp;
7884 }
7885
7886 case RETURN_EXPR:
7887 if (!TREE_OPERAND (exp, 0))
7888 expand_null_return ();
7889 else
7890 expand_return (TREE_OPERAND (exp, 0));
7891 return const0_rtx;
7892
7893 case PREINCREMENT_EXPR:
7894 case PREDECREMENT_EXPR:
7895 return expand_increment (exp, 0, ignore);
7896
7897 case POSTINCREMENT_EXPR:
7898 case POSTDECREMENT_EXPR:
7899 /* Faster to treat as pre-increment if result is not used. */
7900 return expand_increment (exp, ! ignore, ignore);
7901
7902 case ADDR_EXPR:
7903 /* If nonzero, TEMP will be set to the address of something that might
7904 be a MEM corresponding to a stack slot. */
7905 temp = 0;
7906
7907 /* Are we taking the address of a nested function? */
7908 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7909 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7910 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7911 && ! TREE_STATIC (exp))
7912 {
7913 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7914 op0 = force_operand (op0, target);
7915 }
7916 /* If we are taking the address of something erroneous, just
7917 return a zero. */
7918 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7919 return const0_rtx;
7920 else
7921 {
7922 /* We make sure to pass const0_rtx down if we came in with
7923 ignore set, to avoid doing the cleanups twice for something. */
7924 op0 = expand_expr (TREE_OPERAND (exp, 0),
7925 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7926 (modifier == EXPAND_INITIALIZER
7927 ? modifier : EXPAND_CONST_ADDRESS));
7928
7929 /* If we are going to ignore the result, OP0 will have been set
7930 to const0_rtx, so just return it. Don't get confused and
7931 think we are taking the address of the constant. */
7932 if (ignore)
7933 return op0;
7934
7935 op0 = protect_from_queue (op0, 0);
7936
7937 /* We would like the object in memory. If it is a constant,
7938 we can have it be statically allocated into memory. For
7939 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7940 memory and store the value into it. */
7941
7942 if (CONSTANT_P (op0))
7943 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7944 op0);
7945 else if (GET_CODE (op0) == MEM)
7946 {
7947 mark_temp_addr_taken (op0);
7948 temp = XEXP (op0, 0);
7949 }
7950
7951 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7952 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7953 {
7954 /* If this object is in a register, it must be not
7955 be BLKmode. */
7956 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7957 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7958
7959 mark_temp_addr_taken (memloc);
7960 emit_move_insn (memloc, op0);
7961 op0 = memloc;
7962 }
7963
7964 if (GET_CODE (op0) != MEM)
7965 abort ();
7966
7967 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7968 {
7969 temp = XEXP (op0, 0);
7970 #ifdef POINTERS_EXTEND_UNSIGNED
7971 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7972 && mode == ptr_mode)
7973 temp = convert_memory_address (ptr_mode, temp);
7974 #endif
7975 return temp;
7976 }
7977
7978 op0 = force_operand (XEXP (op0, 0), target);
7979 }
7980
7981 if (flag_force_addr && GET_CODE (op0) != REG)
7982 op0 = force_reg (Pmode, op0);
7983
7984 if (GET_CODE (op0) == REG
7985 && ! REG_USERVAR_P (op0))
7986 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7987
7988 /* If we might have had a temp slot, add an equivalent address
7989 for it. */
7990 if (temp != 0)
7991 update_temp_slot_address (temp, op0);
7992
7993 #ifdef POINTERS_EXTEND_UNSIGNED
7994 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7995 && mode == ptr_mode)
7996 op0 = convert_memory_address (ptr_mode, op0);
7997 #endif
7998
7999 return op0;
8000
8001 case ENTRY_VALUE_EXPR:
8002 abort ();
8003
8004 /* COMPLEX type for Extended Pascal & Fortran */
8005 case COMPLEX_EXPR:
8006 {
8007 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8008 rtx insns;
8009
8010 /* Get the rtx code of the operands. */
8011 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8012 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8013
8014 if (! target)
8015 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8016
8017 start_sequence ();
8018
8019 /* Move the real (op0) and imaginary (op1) parts to their location. */
8020 emit_move_insn (gen_realpart (mode, target), op0);
8021 emit_move_insn (gen_imagpart (mode, target), op1);
8022
8023 insns = get_insns ();
8024 end_sequence ();
8025
8026 /* Complex construction should appear as a single unit. */
8027 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8028 each with a separate pseudo as destination.
8029 It's not correct for flow to treat them as a unit. */
8030 if (GET_CODE (target) != CONCAT)
8031 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8032 else
8033 emit_insns (insns);
8034
8035 return target;
8036 }
8037
8038 case REALPART_EXPR:
8039 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8040 return gen_realpart (mode, op0);
8041
8042 case IMAGPART_EXPR:
8043 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8044 return gen_imagpart (mode, op0);
8045
8046 case CONJ_EXPR:
8047 {
8048 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8049 rtx imag_t;
8050 rtx insns;
8051
8052 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8053
8054 if (! target)
8055 target = gen_reg_rtx (mode);
8056
8057 start_sequence ();
8058
8059 /* Store the realpart and the negated imagpart to target. */
8060 emit_move_insn (gen_realpart (partmode, target),
8061 gen_realpart (partmode, op0));
8062
8063 imag_t = gen_imagpart (partmode, target);
8064 temp = expand_unop (partmode, neg_optab,
8065 gen_imagpart (partmode, op0), imag_t, 0);
8066 if (temp != imag_t)
8067 emit_move_insn (imag_t, temp);
8068
8069 insns = get_insns ();
8070 end_sequence ();
8071
8072 /* Conjugate should appear as a single unit
8073 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8074 each with a separate pseudo as destination.
8075 It's not correct for flow to treat them as a unit. */
8076 if (GET_CODE (target) != CONCAT)
8077 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8078 else
8079 emit_insns (insns);
8080
8081 return target;
8082 }
8083
8084 case TRY_CATCH_EXPR:
8085 {
8086 tree handler = TREE_OPERAND (exp, 1);
8087
8088 expand_eh_region_start ();
8089
8090 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8091
8092 expand_eh_region_end (handler);
8093
8094 return op0;
8095 }
8096
8097 case POPDCC_EXPR:
8098 {
8099 rtx dcc = get_dynamic_cleanup_chain ();
8100 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8101 return const0_rtx;
8102 }
8103
8104 case POPDHC_EXPR:
8105 {
8106 rtx dhc = get_dynamic_handler_chain ();
8107 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8108 return const0_rtx;
8109 }
8110
8111 case ERROR_MARK:
8112 op0 = CONST0_RTX (tmode);
8113 if (op0 != 0)
8114 return op0;
8115 return const0_rtx;
8116
8117 default:
8118 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8119 }
8120
8121 /* Here to do an ordinary binary operator, generating an instruction
8122 from the optab already placed in `this_optab'. */
8123 binop:
8124 preexpand_calls (exp);
8125 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8126 subtarget = 0;
8127 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8128 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8129 binop2:
8130 temp = expand_binop (mode, this_optab, op0, op1, target,
8131 unsignedp, OPTAB_LIB_WIDEN);
8132 if (temp == 0)
8133 abort ();
8134 return temp;
8135 }
8136
8137
8138 \f
8139 /* Return the alignment in bits of EXP, a pointer valued expression.
8140 But don't return more than MAX_ALIGN no matter what.
8141 The alignment returned is, by default, the alignment of the thing that
8142 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8143
8144 Otherwise, look at the expression to see if we can do better, i.e., if the
8145 expression is actually pointing at an object whose alignment is tighter. */
8146
8147 static int
8148 get_pointer_alignment (exp, max_align)
8149 tree exp;
8150 unsigned max_align;
8151 {
8152 unsigned align, inner;
8153
8154 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8155 return 0;
8156
8157 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8158 align = MIN (align, max_align);
8159
8160 while (1)
8161 {
8162 switch (TREE_CODE (exp))
8163 {
8164 case NOP_EXPR:
8165 case CONVERT_EXPR:
8166 case NON_LVALUE_EXPR:
8167 exp = TREE_OPERAND (exp, 0);
8168 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8169 return align;
8170 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8171 align = MIN (inner, max_align);
8172 break;
8173
8174 case PLUS_EXPR:
8175 /* If sum of pointer + int, restrict our maximum alignment to that
8176 imposed by the integer. If not, we can't do any better than
8177 ALIGN. */
8178 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8179 return align;
8180
8181 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8182 & (max_align - 1))
8183 != 0)
8184 max_align >>= 1;
8185
8186 exp = TREE_OPERAND (exp, 0);
8187 break;
8188
8189 case ADDR_EXPR:
8190 /* See what we are pointing at and look at its alignment. */
8191 exp = TREE_OPERAND (exp, 0);
8192 if (TREE_CODE (exp) == FUNCTION_DECL)
8193 align = FUNCTION_BOUNDARY;
8194 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8195 align = DECL_ALIGN (exp);
8196 #ifdef CONSTANT_ALIGNMENT
8197 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8198 align = CONSTANT_ALIGNMENT (exp, align);
8199 #endif
8200 return MIN (align, max_align);
8201
8202 default:
8203 return align;
8204 }
8205 }
8206 }
8207 \f
8208 /* Return the tree node and offset if a given argument corresponds to
8209 a string constant. */
8210
8211 static tree
8212 string_constant (arg, ptr_offset)
8213 tree arg;
8214 tree *ptr_offset;
8215 {
8216 STRIP_NOPS (arg);
8217
8218 if (TREE_CODE (arg) == ADDR_EXPR
8219 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8220 {
8221 *ptr_offset = integer_zero_node;
8222 return TREE_OPERAND (arg, 0);
8223 }
8224 else if (TREE_CODE (arg) == PLUS_EXPR)
8225 {
8226 tree arg0 = TREE_OPERAND (arg, 0);
8227 tree arg1 = TREE_OPERAND (arg, 1);
8228
8229 STRIP_NOPS (arg0);
8230 STRIP_NOPS (arg1);
8231
8232 if (TREE_CODE (arg0) == ADDR_EXPR
8233 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8234 {
8235 *ptr_offset = arg1;
8236 return TREE_OPERAND (arg0, 0);
8237 }
8238 else if (TREE_CODE (arg1) == ADDR_EXPR
8239 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8240 {
8241 *ptr_offset = arg0;
8242 return TREE_OPERAND (arg1, 0);
8243 }
8244 }
8245
8246 return 0;
8247 }
8248
8249 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8250 way, because it could contain a zero byte in the middle.
8251 TREE_STRING_LENGTH is the size of the character array, not the string.
8252
8253 Unfortunately, string_constant can't access the values of const char
8254 arrays with initializers, so neither can we do so here. */
8255
8256 static tree
8257 c_strlen (src)
8258 tree src;
8259 {
8260 tree offset_node;
8261 int offset, max;
8262 char *ptr;
8263
8264 src = string_constant (src, &offset_node);
8265 if (src == 0)
8266 return 0;
8267 max = TREE_STRING_LENGTH (src);
8268 ptr = TREE_STRING_POINTER (src);
8269 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8270 {
8271 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8272 compute the offset to the following null if we don't know where to
8273 start searching for it. */
8274 int i;
8275 for (i = 0; i < max; i++)
8276 if (ptr[i] == 0)
8277 return 0;
8278 /* We don't know the starting offset, but we do know that the string
8279 has no internal zero bytes. We can assume that the offset falls
8280 within the bounds of the string; otherwise, the programmer deserves
8281 what he gets. Subtract the offset from the length of the string,
8282 and return that. */
8283 /* This would perhaps not be valid if we were dealing with named
8284 arrays in addition to literal string constants. */
8285 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8286 }
8287
8288 /* We have a known offset into the string. Start searching there for
8289 a null character. */
8290 if (offset_node == 0)
8291 offset = 0;
8292 else
8293 {
8294 /* Did we get a long long offset? If so, punt. */
8295 if (TREE_INT_CST_HIGH (offset_node) != 0)
8296 return 0;
8297 offset = TREE_INT_CST_LOW (offset_node);
8298 }
8299 /* If the offset is known to be out of bounds, warn, and call strlen at
8300 runtime. */
8301 if (offset < 0 || offset > max)
8302 {
8303 warning ("offset outside bounds of constant string");
8304 return 0;
8305 }
8306 /* Use strlen to search for the first zero byte. Since any strings
8307 constructed with build_string will have nulls appended, we win even
8308 if we get handed something like (char[4])"abcd".
8309
8310 Since OFFSET is our starting index into the string, no further
8311 calculation is needed. */
8312 return size_int (strlen (ptr + offset));
8313 }
8314
8315 rtx
8316 expand_builtin_return_addr (fndecl_code, count, tem)
8317 enum built_in_function fndecl_code;
8318 int count;
8319 rtx tem;
8320 {
8321 int i;
8322
8323 /* Some machines need special handling before we can access
8324 arbitrary frames. For example, on the sparc, we must first flush
8325 all register windows to the stack. */
8326 #ifdef SETUP_FRAME_ADDRESSES
8327 if (count > 0)
8328 SETUP_FRAME_ADDRESSES ();
8329 #endif
8330
8331 /* On the sparc, the return address is not in the frame, it is in a
8332 register. There is no way to access it off of the current frame
8333 pointer, but it can be accessed off the previous frame pointer by
8334 reading the value from the register window save area. */
8335 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8336 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8337 count--;
8338 #endif
8339
8340 /* Scan back COUNT frames to the specified frame. */
8341 for (i = 0; i < count; i++)
8342 {
8343 /* Assume the dynamic chain pointer is in the word that the
8344 frame address points to, unless otherwise specified. */
8345 #ifdef DYNAMIC_CHAIN_ADDRESS
8346 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8347 #endif
8348 tem = memory_address (Pmode, tem);
8349 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8350 }
8351
8352 /* For __builtin_frame_address, return what we've got. */
8353 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8354 return tem;
8355
8356 /* For __builtin_return_address, Get the return address from that
8357 frame. */
8358 #ifdef RETURN_ADDR_RTX
8359 tem = RETURN_ADDR_RTX (count, tem);
8360 #else
8361 tem = memory_address (Pmode,
8362 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8363 tem = gen_rtx_MEM (Pmode, tem);
8364 #endif
8365 return tem;
8366 }
8367
8368 /* __builtin_setjmp is passed a pointer to an array of five words (not
8369 all will be used on all machines). It operates similarly to the C
8370 library function of the same name, but is more efficient. Much of
8371 the code below (and for longjmp) is copied from the handling of
8372 non-local gotos.
8373
8374 NOTE: This is intended for use by GNAT and the exception handling
8375 scheme in the compiler and will only work in the method used by
8376 them. */
8377
8378 rtx
8379 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8380 rtx buf_addr;
8381 rtx target;
8382 rtx first_label, next_label;
8383 {
8384 rtx lab1 = gen_label_rtx ();
8385 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8386 enum machine_mode value_mode;
8387 rtx stack_save;
8388
8389 value_mode = TYPE_MODE (integer_type_node);
8390
8391 #ifdef POINTERS_EXTEND_UNSIGNED
8392 buf_addr = convert_memory_address (Pmode, buf_addr);
8393 #endif
8394
8395 buf_addr = force_reg (Pmode, buf_addr);
8396
8397 if (target == 0 || GET_CODE (target) != REG
8398 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8399 target = gen_reg_rtx (value_mode);
8400
8401 emit_queue ();
8402
8403 /* We store the frame pointer and the address of lab1 in the buffer
8404 and use the rest of it for the stack save area, which is
8405 machine-dependent. */
8406
8407 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8408 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8409 #endif
8410
8411 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8412 BUILTIN_SETJMP_FRAME_VALUE);
8413 emit_move_insn (validize_mem
8414 (gen_rtx_MEM (Pmode,
8415 plus_constant (buf_addr,
8416 GET_MODE_SIZE (Pmode)))),
8417 gen_rtx_LABEL_REF (Pmode, lab1));
8418
8419 stack_save = gen_rtx_MEM (sa_mode,
8420 plus_constant (buf_addr,
8421 2 * GET_MODE_SIZE (Pmode)));
8422 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8423
8424 /* If there is further processing to do, do it. */
8425 #ifdef HAVE_builtin_setjmp_setup
8426 if (HAVE_builtin_setjmp_setup)
8427 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8428 #endif
8429
8430 /* Set TARGET to zero and branch to the first-time-through label. */
8431 emit_move_insn (target, const0_rtx);
8432 emit_jump_insn (gen_jump (first_label));
8433 emit_barrier ();
8434 emit_label (lab1);
8435
8436 /* Tell flow about the strange goings on. */
8437 current_function_has_nonlocal_label = 1;
8438
8439 /* Clobber the FP when we get here, so we have to make sure it's
8440 marked as used by this function. */
8441 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8442
8443 /* Mark the static chain as clobbered here so life information
8444 doesn't get messed up for it. */
8445 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8446
8447 /* Now put in the code to restore the frame pointer, and argument
8448 pointer, if needed. The code below is from expand_end_bindings
8449 in stmt.c; see detailed documentation there. */
8450 #ifdef HAVE_nonlocal_goto
8451 if (! HAVE_nonlocal_goto)
8452 #endif
8453 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8454
8455 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8456 if (fixed_regs[ARG_POINTER_REGNUM])
8457 {
8458 #ifdef ELIMINABLE_REGS
8459 size_t i;
8460 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8461
8462 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8463 if (elim_regs[i].from == ARG_POINTER_REGNUM
8464 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8465 break;
8466
8467 if (i == sizeof elim_regs / sizeof elim_regs [0])
8468 #endif
8469 {
8470 /* Now restore our arg pointer from the address at which it
8471 was saved in our stack frame.
8472 If there hasn't be space allocated for it yet, make
8473 some now. */
8474 if (arg_pointer_save_area == 0)
8475 arg_pointer_save_area
8476 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8477 emit_move_insn (virtual_incoming_args_rtx,
8478 copy_to_reg (arg_pointer_save_area));
8479 }
8480 }
8481 #endif
8482
8483 #ifdef HAVE_builtin_setjmp_receiver
8484 if (HAVE_builtin_setjmp_receiver)
8485 emit_insn (gen_builtin_setjmp_receiver (lab1));
8486 else
8487 #endif
8488 #ifdef HAVE_nonlocal_goto_receiver
8489 if (HAVE_nonlocal_goto_receiver)
8490 emit_insn (gen_nonlocal_goto_receiver ());
8491 else
8492 #endif
8493 {
8494 ; /* Nothing */
8495 }
8496
8497 /* Set TARGET, and branch to the next-time-through label. */
8498 emit_move_insn (target, const1_rtx);
8499 emit_jump_insn (gen_jump (next_label));
8500 emit_barrier ();
8501
8502 return target;
8503 }
8504
8505 void
8506 expand_builtin_longjmp (buf_addr, value)
8507 rtx buf_addr, value;
8508 {
8509 rtx fp, lab, stack;
8510 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8511
8512 #ifdef POINTERS_EXTEND_UNSIGNED
8513 buf_addr = convert_memory_address (Pmode, buf_addr);
8514 #endif
8515 buf_addr = force_reg (Pmode, buf_addr);
8516
8517 /* We used to store value in static_chain_rtx, but that fails if pointers
8518 are smaller than integers. We instead require that the user must pass
8519 a second argument of 1, because that is what builtin_setjmp will
8520 return. This also makes EH slightly more efficient, since we are no
8521 longer copying around a value that we don't care about. */
8522 if (value != const1_rtx)
8523 abort ();
8524
8525 #ifdef HAVE_builtin_longjmp
8526 if (HAVE_builtin_longjmp)
8527 emit_insn (gen_builtin_longjmp (buf_addr));
8528 else
8529 #endif
8530 {
8531 fp = gen_rtx_MEM (Pmode, buf_addr);
8532 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8533 GET_MODE_SIZE (Pmode)));
8534
8535 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8536 2 * GET_MODE_SIZE (Pmode)));
8537
8538 /* Pick up FP, label, and SP from the block and jump. This code is
8539 from expand_goto in stmt.c; see there for detailed comments. */
8540 #if HAVE_nonlocal_goto
8541 if (HAVE_nonlocal_goto)
8542 /* We have to pass a value to the nonlocal_goto pattern that will
8543 get copied into the static_chain pointer, but it does not matter
8544 what that value is, because builtin_setjmp does not use it. */
8545 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8546 else
8547 #endif
8548 {
8549 lab = copy_to_reg (lab);
8550
8551 emit_move_insn (hard_frame_pointer_rtx, fp);
8552 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8553
8554 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8555 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8556 emit_indirect_jump (lab);
8557 }
8558 }
8559 }
8560
8561 static rtx
8562 get_memory_rtx (exp)
8563 tree exp;
8564 {
8565 rtx mem;
8566 int is_aggregate;
8567
8568 mem = gen_rtx_MEM (BLKmode,
8569 memory_address (BLKmode,
8570 expand_expr (exp, NULL_RTX,
8571 ptr_mode, EXPAND_SUM)));
8572
8573 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8574
8575 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8576 if the value is the address of a structure or if the expression is
8577 cast to a pointer to structure type. */
8578 is_aggregate = 0;
8579
8580 while (TREE_CODE (exp) == NOP_EXPR)
8581 {
8582 tree cast_type = TREE_TYPE (exp);
8583 if (TREE_CODE (cast_type) == POINTER_TYPE
8584 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8585 {
8586 is_aggregate = 1;
8587 break;
8588 }
8589 exp = TREE_OPERAND (exp, 0);
8590 }
8591
8592 if (is_aggregate == 0)
8593 {
8594 tree type;
8595
8596 if (TREE_CODE (exp) == ADDR_EXPR)
8597 /* If this is the address of an object, check whether the
8598 object is an array. */
8599 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8600 else
8601 type = TREE_TYPE (TREE_TYPE (exp));
8602 is_aggregate = AGGREGATE_TYPE_P (type);
8603 }
8604
8605 MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8606 return mem;
8607 }
8608
8609 \f
8610 /* Expand an expression EXP that calls a built-in function,
8611 with result going to TARGET if that's convenient
8612 (and in mode MODE if that's convenient).
8613 SUBTARGET may be used as the target for computing one of EXP's operands.
8614 IGNORE is nonzero if the value is to be ignored. */
8615
8616 #define CALLED_AS_BUILT_IN(NODE) \
8617 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8618
8619 static rtx
8620 expand_builtin (exp, target, subtarget, mode, ignore)
8621 tree exp;
8622 rtx target;
8623 rtx subtarget;
8624 enum machine_mode mode;
8625 int ignore;
8626 {
8627 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8628 tree arglist = TREE_OPERAND (exp, 1);
8629 rtx op0;
8630 rtx lab1, insns;
8631 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8632 optab builtin_optab;
8633
8634 switch (DECL_FUNCTION_CODE (fndecl))
8635 {
8636 case BUILT_IN_ABS:
8637 case BUILT_IN_LABS:
8638 case BUILT_IN_FABS:
8639 /* build_function_call changes these into ABS_EXPR. */
8640 abort ();
8641
8642 case BUILT_IN_SIN:
8643 case BUILT_IN_COS:
8644 /* Treat these like sqrt, but only if the user asks for them. */
8645 if (! flag_fast_math)
8646 break;
8647 case BUILT_IN_FSQRT:
8648 /* If not optimizing, call the library function. */
8649 if (! optimize)
8650 break;
8651
8652 if (arglist == 0
8653 /* Arg could be wrong type if user redeclared this fcn wrong. */
8654 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8655 break;
8656
8657 /* Stabilize and compute the argument. */
8658 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8659 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8660 {
8661 exp = copy_node (exp);
8662 arglist = copy_node (arglist);
8663 TREE_OPERAND (exp, 1) = arglist;
8664 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8665 }
8666 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8667
8668 /* Make a suitable register to place result in. */
8669 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8670
8671 emit_queue ();
8672 start_sequence ();
8673
8674 switch (DECL_FUNCTION_CODE (fndecl))
8675 {
8676 case BUILT_IN_SIN:
8677 builtin_optab = sin_optab; break;
8678 case BUILT_IN_COS:
8679 builtin_optab = cos_optab; break;
8680 case BUILT_IN_FSQRT:
8681 builtin_optab = sqrt_optab; break;
8682 default:
8683 abort ();
8684 }
8685
8686 /* Compute into TARGET.
8687 Set TARGET to wherever the result comes back. */
8688 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8689 builtin_optab, op0, target, 0);
8690
8691 /* If we were unable to expand via the builtin, stop the
8692 sequence (without outputting the insns) and break, causing
8693 a call to the library function. */
8694 if (target == 0)
8695 {
8696 end_sequence ();
8697 break;
8698 }
8699
8700 /* Check the results by default. But if flag_fast_math is turned on,
8701 then assume sqrt will always be called with valid arguments. */
8702
8703 if (! flag_fast_math)
8704 {
8705 /* Don't define the builtin FP instructions
8706 if your machine is not IEEE. */
8707 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8708 abort ();
8709
8710 lab1 = gen_label_rtx ();
8711
8712 /* Test the result; if it is NaN, set errno=EDOM because
8713 the argument was not in the domain. */
8714 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8715 emit_jump_insn (gen_beq (lab1));
8716
8717 #ifdef TARGET_EDOM
8718 {
8719 #ifdef GEN_ERRNO_RTX
8720 rtx errno_rtx = GEN_ERRNO_RTX;
8721 #else
8722 rtx errno_rtx
8723 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8724 #endif
8725
8726 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8727 }
8728 #else
8729 /* We can't set errno=EDOM directly; let the library call do it.
8730 Pop the arguments right away in case the call gets deleted. */
8731 NO_DEFER_POP;
8732 expand_call (exp, target, 0);
8733 OK_DEFER_POP;
8734 #endif
8735
8736 emit_label (lab1);
8737 }
8738
8739 /* Output the entire sequence. */
8740 insns = get_insns ();
8741 end_sequence ();
8742 emit_insns (insns);
8743
8744 return target;
8745
8746 case BUILT_IN_FMOD:
8747 break;
8748
8749 /* __builtin_apply_args returns block of memory allocated on
8750 the stack into which is stored the arg pointer, structure
8751 value address, static chain, and all the registers that might
8752 possibly be used in performing a function call. The code is
8753 moved to the start of the function so the incoming values are
8754 saved. */
8755 case BUILT_IN_APPLY_ARGS:
8756 /* Don't do __builtin_apply_args more than once in a function.
8757 Save the result of the first call and reuse it. */
8758 if (apply_args_value != 0)
8759 return apply_args_value;
8760 {
8761 /* When this function is called, it means that registers must be
8762 saved on entry to this function. So we migrate the
8763 call to the first insn of this function. */
8764 rtx temp;
8765 rtx seq;
8766
8767 start_sequence ();
8768 temp = expand_builtin_apply_args ();
8769 seq = get_insns ();
8770 end_sequence ();
8771
8772 apply_args_value = temp;
8773
8774 /* Put the sequence after the NOTE that starts the function.
8775 If this is inside a SEQUENCE, make the outer-level insn
8776 chain current, so the code is placed at the start of the
8777 function. */
8778 push_topmost_sequence ();
8779 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8780 pop_topmost_sequence ();
8781 return temp;
8782 }
8783
8784 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8785 FUNCTION with a copy of the parameters described by
8786 ARGUMENTS, and ARGSIZE. It returns a block of memory
8787 allocated on the stack into which is stored all the registers
8788 that might possibly be used for returning the result of a
8789 function. ARGUMENTS is the value returned by
8790 __builtin_apply_args. ARGSIZE is the number of bytes of
8791 arguments that must be copied. ??? How should this value be
8792 computed? We'll also need a safe worst case value for varargs
8793 functions. */
8794 case BUILT_IN_APPLY:
8795 if (arglist == 0
8796 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8797 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8798 || TREE_CHAIN (arglist) == 0
8799 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8800 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8801 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8802 return const0_rtx;
8803 else
8804 {
8805 int i;
8806 tree t;
8807 rtx ops[3];
8808
8809 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8810 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8811
8812 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8813 }
8814
8815 /* __builtin_return (RESULT) causes the function to return the
8816 value described by RESULT. RESULT is address of the block of
8817 memory returned by __builtin_apply. */
8818 case BUILT_IN_RETURN:
8819 if (arglist
8820 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8821 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8822 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8823 NULL_RTX, VOIDmode, 0));
8824 return const0_rtx;
8825
8826 case BUILT_IN_SAVEREGS:
8827 /* Don't do __builtin_saveregs more than once in a function.
8828 Save the result of the first call and reuse it. */
8829 if (saveregs_value != 0)
8830 return saveregs_value;
8831 {
8832 /* When this function is called, it means that registers must be
8833 saved on entry to this function. So we migrate the
8834 call to the first insn of this function. */
8835 rtx temp;
8836 rtx seq;
8837
8838 /* Now really call the function. `expand_call' does not call
8839 expand_builtin, so there is no danger of infinite recursion here. */
8840 start_sequence ();
8841
8842 #ifdef EXPAND_BUILTIN_SAVEREGS
8843 /* Do whatever the machine needs done in this case. */
8844 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8845 #else
8846 /* The register where the function returns its value
8847 is likely to have something else in it, such as an argument.
8848 So preserve that register around the call. */
8849
8850 if (value_mode != VOIDmode)
8851 {
8852 rtx valreg = hard_libcall_value (value_mode);
8853 rtx saved_valreg = gen_reg_rtx (value_mode);
8854
8855 emit_move_insn (saved_valreg, valreg);
8856 temp = expand_call (exp, target, ignore);
8857 emit_move_insn (valreg, saved_valreg);
8858 }
8859 else
8860 /* Generate the call, putting the value in a pseudo. */
8861 temp = expand_call (exp, target, ignore);
8862 #endif
8863
8864 seq = get_insns ();
8865 end_sequence ();
8866
8867 saveregs_value = temp;
8868
8869 /* Put the sequence after the NOTE that starts the function.
8870 If this is inside a SEQUENCE, make the outer-level insn
8871 chain current, so the code is placed at the start of the
8872 function. */
8873 push_topmost_sequence ();
8874 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8875 pop_topmost_sequence ();
8876 return temp;
8877 }
8878
8879 /* __builtin_args_info (N) returns word N of the arg space info
8880 for the current function. The number and meanings of words
8881 is controlled by the definition of CUMULATIVE_ARGS. */
8882 case BUILT_IN_ARGS_INFO:
8883 {
8884 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8885 int *word_ptr = (int *) &current_function_args_info;
8886 #if 0
8887 /* These are used by the code below that is if 0'ed away */
8888 int i;
8889 tree type, elts, result;
8890 #endif
8891
8892 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8893 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8894 __FILE__, __LINE__);
8895
8896 if (arglist != 0)
8897 {
8898 tree arg = TREE_VALUE (arglist);
8899 if (TREE_CODE (arg) != INTEGER_CST)
8900 error ("argument of `__builtin_args_info' must be constant");
8901 else
8902 {
8903 int wordnum = TREE_INT_CST_LOW (arg);
8904
8905 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8906 error ("argument of `__builtin_args_info' out of range");
8907 else
8908 return GEN_INT (word_ptr[wordnum]);
8909 }
8910 }
8911 else
8912 error ("missing argument in `__builtin_args_info'");
8913
8914 return const0_rtx;
8915
8916 #if 0
8917 for (i = 0; i < nwords; i++)
8918 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8919
8920 type = build_array_type (integer_type_node,
8921 build_index_type (build_int_2 (nwords, 0)));
8922 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8923 TREE_CONSTANT (result) = 1;
8924 TREE_STATIC (result) = 1;
8925 result = build (INDIRECT_REF, build_pointer_type (type), result);
8926 TREE_CONSTANT (result) = 1;
8927 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8928 #endif
8929 }
8930
8931 /* Return the address of the first anonymous stack arg. */
8932 case BUILT_IN_NEXT_ARG:
8933 {
8934 tree fntype = TREE_TYPE (current_function_decl);
8935
8936 if ((TYPE_ARG_TYPES (fntype) == 0
8937 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8938 == void_type_node))
8939 && ! current_function_varargs)
8940 {
8941 error ("`va_start' used in function with fixed args");
8942 return const0_rtx;
8943 }
8944
8945 if (arglist)
8946 {
8947 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8948 tree arg = TREE_VALUE (arglist);
8949
8950 /* Strip off all nops for the sake of the comparison. This
8951 is not quite the same as STRIP_NOPS. It does more.
8952 We must also strip off INDIRECT_EXPR for C++ reference
8953 parameters. */
8954 while (TREE_CODE (arg) == NOP_EXPR
8955 || TREE_CODE (arg) == CONVERT_EXPR
8956 || TREE_CODE (arg) == NON_LVALUE_EXPR
8957 || TREE_CODE (arg) == INDIRECT_REF)
8958 arg = TREE_OPERAND (arg, 0);
8959 if (arg != last_parm)
8960 warning ("second parameter of `va_start' not last named argument");
8961 }
8962 else if (! current_function_varargs)
8963 /* Evidently an out of date version of <stdarg.h>; can't validate
8964 va_start's second argument, but can still work as intended. */
8965 warning ("`__builtin_next_arg' called without an argument");
8966 }
8967
8968 return expand_binop (Pmode, add_optab,
8969 current_function_internal_arg_pointer,
8970 current_function_arg_offset_rtx,
8971 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8972
8973 case BUILT_IN_CLASSIFY_TYPE:
8974 if (arglist != 0)
8975 {
8976 tree type = TREE_TYPE (TREE_VALUE (arglist));
8977 enum tree_code code = TREE_CODE (type);
8978 if (code == VOID_TYPE)
8979 return GEN_INT (void_type_class);
8980 if (code == INTEGER_TYPE)
8981 return GEN_INT (integer_type_class);
8982 if (code == CHAR_TYPE)
8983 return GEN_INT (char_type_class);
8984 if (code == ENUMERAL_TYPE)
8985 return GEN_INT (enumeral_type_class);
8986 if (code == BOOLEAN_TYPE)
8987 return GEN_INT (boolean_type_class);
8988 if (code == POINTER_TYPE)
8989 return GEN_INT (pointer_type_class);
8990 if (code == REFERENCE_TYPE)
8991 return GEN_INT (reference_type_class);
8992 if (code == OFFSET_TYPE)
8993 return GEN_INT (offset_type_class);
8994 if (code == REAL_TYPE)
8995 return GEN_INT (real_type_class);
8996 if (code == COMPLEX_TYPE)
8997 return GEN_INT (complex_type_class);
8998 if (code == FUNCTION_TYPE)
8999 return GEN_INT (function_type_class);
9000 if (code == METHOD_TYPE)
9001 return GEN_INT (method_type_class);
9002 if (code == RECORD_TYPE)
9003 return GEN_INT (record_type_class);
9004 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9005 return GEN_INT (union_type_class);
9006 if (code == ARRAY_TYPE)
9007 {
9008 if (TYPE_STRING_FLAG (type))
9009 return GEN_INT (string_type_class);
9010 else
9011 return GEN_INT (array_type_class);
9012 }
9013 if (code == SET_TYPE)
9014 return GEN_INT (set_type_class);
9015 if (code == FILE_TYPE)
9016 return GEN_INT (file_type_class);
9017 if (code == LANG_TYPE)
9018 return GEN_INT (lang_type_class);
9019 }
9020 return GEN_INT (no_type_class);
9021
9022 case BUILT_IN_CONSTANT_P:
9023 if (arglist == 0)
9024 return const0_rtx;
9025 else
9026 {
9027 tree arg = TREE_VALUE (arglist);
9028 rtx tmp;
9029
9030 /* We return 1 for a numeric type that's known to be a constant
9031 value at compile-time or for an aggregate type that's a
9032 literal constant. */
9033 STRIP_NOPS (arg);
9034
9035 /* If we know this is a constant, emit the constant of one. */
9036 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9037 || (TREE_CODE (arg) == CONSTRUCTOR
9038 && TREE_CONSTANT (arg))
9039 || (TREE_CODE (arg) == ADDR_EXPR
9040 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9041 return const1_rtx;
9042
9043 /* If we aren't going to be running CSE or this expression
9044 has side effects, show we don't know it to be a constant.
9045 Likewise if it's a pointer or aggregate type since in those
9046 case we only want literals, since those are only optimized
9047 when generating RTL, not later. */
9048 if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9049 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9050 || POINTER_TYPE_P (TREE_TYPE (arg)))
9051 return const0_rtx;
9052
9053 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9054 chance to see if it can deduce whether ARG is constant. */
9055
9056 tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9057 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9058 return tmp;
9059 }
9060
9061 case BUILT_IN_FRAME_ADDRESS:
9062 /* The argument must be a nonnegative integer constant.
9063 It counts the number of frames to scan up the stack.
9064 The value is the address of that frame. */
9065 case BUILT_IN_RETURN_ADDRESS:
9066 /* The argument must be a nonnegative integer constant.
9067 It counts the number of frames to scan up the stack.
9068 The value is the return address saved in that frame. */
9069 if (arglist == 0)
9070 /* Warning about missing arg was already issued. */
9071 return const0_rtx;
9072 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9073 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9074 {
9075 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9076 error ("invalid arg to `__builtin_frame_address'");
9077 else
9078 error ("invalid arg to `__builtin_return_address'");
9079 return const0_rtx;
9080 }
9081 else
9082 {
9083 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9084 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9085 hard_frame_pointer_rtx);
9086
9087 /* Some ports cannot access arbitrary stack frames. */
9088 if (tem == NULL)
9089 {
9090 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9091 warning ("unsupported arg to `__builtin_frame_address'");
9092 else
9093 warning ("unsupported arg to `__builtin_return_address'");
9094 return const0_rtx;
9095 }
9096
9097 /* For __builtin_frame_address, return what we've got. */
9098 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9099 return tem;
9100
9101 if (GET_CODE (tem) != REG)
9102 tem = copy_to_reg (tem);
9103 return tem;
9104 }
9105
9106 /* Returns the address of the area where the structure is returned.
9107 0 otherwise. */
9108 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9109 if (arglist != 0
9110 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9111 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9112 return const0_rtx;
9113 else
9114 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9115
9116 case BUILT_IN_ALLOCA:
9117 if (arglist == 0
9118 /* Arg could be non-integer if user redeclared this fcn wrong. */
9119 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9120 break;
9121
9122 /* Compute the argument. */
9123 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9124
9125 /* Allocate the desired space. */
9126 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9127
9128 case BUILT_IN_FFS:
9129 /* If not optimizing, call the library function. */
9130 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9131 break;
9132
9133 if (arglist == 0
9134 /* Arg could be non-integer if user redeclared this fcn wrong. */
9135 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9136 break;
9137
9138 /* Compute the argument. */
9139 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9140 /* Compute ffs, into TARGET if possible.
9141 Set TARGET to wherever the result comes back. */
9142 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9143 ffs_optab, op0, target, 1);
9144 if (target == 0)
9145 abort ();
9146 return target;
9147
9148 case BUILT_IN_STRLEN:
9149 /* If not optimizing, call the library function. */
9150 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9151 break;
9152
9153 if (arglist == 0
9154 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9155 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9156 break;
9157 else
9158 {
9159 tree src = TREE_VALUE (arglist);
9160 tree len = c_strlen (src);
9161
9162 int align
9163 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9164
9165 rtx result, src_rtx, char_rtx;
9166 enum machine_mode insn_mode = value_mode, char_mode;
9167 enum insn_code icode;
9168
9169 /* If the length is known, just return it. */
9170 if (len != 0)
9171 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9172
9173 /* If SRC is not a pointer type, don't do this operation inline. */
9174 if (align == 0)
9175 break;
9176
9177 /* Call a function if we can't compute strlen in the right mode. */
9178
9179 while (insn_mode != VOIDmode)
9180 {
9181 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9182 if (icode != CODE_FOR_nothing)
9183 break;
9184
9185 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9186 }
9187 if (insn_mode == VOIDmode)
9188 break;
9189
9190 /* Make a place to write the result of the instruction. */
9191 result = target;
9192 if (! (result != 0
9193 && GET_CODE (result) == REG
9194 && GET_MODE (result) == insn_mode
9195 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9196 result = gen_reg_rtx (insn_mode);
9197
9198 /* Make sure the operands are acceptable to the predicates. */
9199
9200 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9201 result = gen_reg_rtx (insn_mode);
9202 src_rtx = memory_address (BLKmode,
9203 expand_expr (src, NULL_RTX, ptr_mode,
9204 EXPAND_NORMAL));
9205
9206 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9207 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9208
9209 /* Check the string is readable and has an end. */
9210 if (current_function_check_memory_usage)
9211 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9212 src_rtx, ptr_mode,
9213 GEN_INT (MEMORY_USE_RO),
9214 TYPE_MODE (integer_type_node));
9215
9216 char_rtx = const0_rtx;
9217 char_mode = insn_operand_mode[(int)icode][2];
9218 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9219 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9220
9221 emit_insn (GEN_FCN (icode) (result,
9222 gen_rtx_MEM (BLKmode, src_rtx),
9223 char_rtx, GEN_INT (align)));
9224
9225 /* Return the value in the proper mode for this function. */
9226 if (GET_MODE (result) == value_mode)
9227 return result;
9228 else if (target != 0)
9229 {
9230 convert_move (target, result, 0);
9231 return target;
9232 }
9233 else
9234 return convert_to_mode (value_mode, result, 0);
9235 }
9236
9237 case BUILT_IN_STRCPY:
9238 /* If not optimizing, call the library function. */
9239 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9240 break;
9241
9242 if (arglist == 0
9243 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9244 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9245 || TREE_CHAIN (arglist) == 0
9246 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9247 break;
9248 else
9249 {
9250 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9251
9252 if (len == 0)
9253 break;
9254
9255 len = size_binop (PLUS_EXPR, len, integer_one_node);
9256
9257 chainon (arglist, build_tree_list (NULL_TREE, len));
9258 }
9259
9260 /* Drops in. */
9261 case BUILT_IN_MEMCPY:
9262 /* If not optimizing, call the library function. */
9263 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9264 break;
9265
9266 if (arglist == 0
9267 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9268 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9269 || TREE_CHAIN (arglist) == 0
9270 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9271 != POINTER_TYPE)
9272 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9273 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9274 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9275 != INTEGER_TYPE))
9276 break;
9277 else
9278 {
9279 tree dest = TREE_VALUE (arglist);
9280 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9281 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9282
9283 int src_align
9284 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9285 int dest_align
9286 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9287 rtx dest_mem, src_mem, dest_addr, len_rtx;
9288
9289 /* If either SRC or DEST is not a pointer type, don't do
9290 this operation in-line. */
9291 if (src_align == 0 || dest_align == 0)
9292 {
9293 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9294 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9295 break;
9296 }
9297
9298 dest_mem = get_memory_rtx (dest);
9299 src_mem = get_memory_rtx (src);
9300 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9301
9302 /* Just copy the rights of SRC to the rights of DEST. */
9303 if (current_function_check_memory_usage)
9304 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9305 XEXP (dest_mem, 0), ptr_mode,
9306 XEXP (src_mem, 0), ptr_mode,
9307 len_rtx, TYPE_MODE (sizetype));
9308
9309 /* Copy word part most expediently. */
9310 dest_addr
9311 = emit_block_move (dest_mem, src_mem, len_rtx,
9312 MIN (src_align, dest_align));
9313
9314 if (dest_addr == 0)
9315 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9316
9317 return dest_addr;
9318 }
9319
9320 case BUILT_IN_MEMSET:
9321 /* If not optimizing, call the library function. */
9322 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9323 break;
9324
9325 if (arglist == 0
9326 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9327 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9328 || TREE_CHAIN (arglist) == 0
9329 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9330 != INTEGER_TYPE)
9331 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9332 || (INTEGER_TYPE
9333 != (TREE_CODE (TREE_TYPE
9334 (TREE_VALUE
9335 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9336 break;
9337 else
9338 {
9339 tree dest = TREE_VALUE (arglist);
9340 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9341 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9342
9343 int dest_align
9344 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9345 rtx dest_mem, dest_addr, len_rtx;
9346
9347 /* If DEST is not a pointer type, don't do this
9348 operation in-line. */
9349 if (dest_align == 0)
9350 break;
9351
9352 /* If the arguments have side-effects, then we can only evaluate
9353 them at most once. The following code evaluates them twice if
9354 they are not constants because we break out to expand_call
9355 in that case. They can't be constants if they have side-effects
9356 so we can check for that first. Alternatively, we could call
9357 save_expr to make multiple evaluation safe. */
9358 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9359 break;
9360
9361 /* If VAL is not 0, don't do this operation in-line. */
9362 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9363 break;
9364
9365 /* If LEN does not expand to a constant, don't do this
9366 operation in-line. */
9367 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9368 if (GET_CODE (len_rtx) != CONST_INT)
9369 break;
9370
9371 dest_mem = get_memory_rtx (dest);
9372
9373 /* Just check DST is writable and mark it as readable. */
9374 if (current_function_check_memory_usage)
9375 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9376 XEXP (dest_mem, 0), ptr_mode,
9377 len_rtx, TYPE_MODE (sizetype),
9378 GEN_INT (MEMORY_USE_WO),
9379 TYPE_MODE (integer_type_node));
9380
9381
9382 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9383
9384 if (dest_addr == 0)
9385 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9386
9387 return dest_addr;
9388 }
9389
9390 /* These comparison functions need an instruction that returns an actual
9391 index. An ordinary compare that just sets the condition codes
9392 is not enough. */
9393 #ifdef HAVE_cmpstrsi
9394 case BUILT_IN_STRCMP:
9395 /* If not optimizing, call the library function. */
9396 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9397 break;
9398
9399 /* If we need to check memory accesses, call the library function. */
9400 if (current_function_check_memory_usage)
9401 break;
9402
9403 if (arglist == 0
9404 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9405 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9406 || TREE_CHAIN (arglist) == 0
9407 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9408 break;
9409 else if (!HAVE_cmpstrsi)
9410 break;
9411 {
9412 tree arg1 = TREE_VALUE (arglist);
9413 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9414 tree len, len2;
9415
9416 len = c_strlen (arg1);
9417 if (len)
9418 len = size_binop (PLUS_EXPR, integer_one_node, len);
9419 len2 = c_strlen (arg2);
9420 if (len2)
9421 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9422
9423 /* If we don't have a constant length for the first, use the length
9424 of the second, if we know it. We don't require a constant for
9425 this case; some cost analysis could be done if both are available
9426 but neither is constant. For now, assume they're equally cheap.
9427
9428 If both strings have constant lengths, use the smaller. This
9429 could arise if optimization results in strcpy being called with
9430 two fixed strings, or if the code was machine-generated. We should
9431 add some code to the `memcmp' handler below to deal with such
9432 situations, someday. */
9433 if (!len || TREE_CODE (len) != INTEGER_CST)
9434 {
9435 if (len2)
9436 len = len2;
9437 else if (len == 0)
9438 break;
9439 }
9440 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9441 {
9442 if (tree_int_cst_lt (len2, len))
9443 len = len2;
9444 }
9445
9446 chainon (arglist, build_tree_list (NULL_TREE, len));
9447 }
9448
9449 /* Drops in. */
9450 case BUILT_IN_MEMCMP:
9451 /* If not optimizing, call the library function. */
9452 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9453 break;
9454
9455 /* If we need to check memory accesses, call the library function. */
9456 if (current_function_check_memory_usage)
9457 break;
9458
9459 if (arglist == 0
9460 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9461 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9462 || TREE_CHAIN (arglist) == 0
9463 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9464 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9465 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9466 break;
9467 else if (!HAVE_cmpstrsi)
9468 break;
9469 {
9470 tree arg1 = TREE_VALUE (arglist);
9471 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9472 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9473 rtx result;
9474
9475 int arg1_align
9476 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9477 int arg2_align
9478 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9479 enum machine_mode insn_mode
9480 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9481
9482 /* If we don't have POINTER_TYPE, call the function. */
9483 if (arg1_align == 0 || arg2_align == 0)
9484 {
9485 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9486 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9487 break;
9488 }
9489
9490 /* Make a place to write the result of the instruction. */
9491 result = target;
9492 if (! (result != 0
9493 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9494 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9495 result = gen_reg_rtx (insn_mode);
9496
9497 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9498 get_memory_rtx (arg2),
9499 expand_expr (len, NULL_RTX, VOIDmode, 0),
9500 GEN_INT (MIN (arg1_align, arg2_align))));
9501
9502 /* Return the value in the proper mode for this function. */
9503 mode = TYPE_MODE (TREE_TYPE (exp));
9504 if (GET_MODE (result) == mode)
9505 return result;
9506 else if (target != 0)
9507 {
9508 convert_move (target, result, 0);
9509 return target;
9510 }
9511 else
9512 return convert_to_mode (mode, result, 0);
9513 }
9514 #else
9515 case BUILT_IN_STRCMP:
9516 case BUILT_IN_MEMCMP:
9517 break;
9518 #endif
9519
9520 case BUILT_IN_SETJMP:
9521 if (arglist == 0
9522 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9523 break;
9524 else
9525 {
9526 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9527 VOIDmode, 0);
9528 rtx lab = gen_label_rtx ();
9529 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9530 emit_label (lab);
9531 return ret;
9532 }
9533
9534 /* __builtin_longjmp is passed a pointer to an array of five words.
9535 It's similar to the C library longjmp function but works with
9536 __builtin_setjmp above. */
9537 case BUILT_IN_LONGJMP:
9538 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9539 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9540 break;
9541 else
9542 {
9543 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9544 VOIDmode, 0);
9545 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9546 NULL_RTX, VOIDmode, 0);
9547
9548 if (value != const1_rtx)
9549 {
9550 error ("__builtin_longjmp second argument must be 1");
9551 return const0_rtx;
9552 }
9553
9554 expand_builtin_longjmp (buf_addr, value);
9555 return const0_rtx;
9556 }
9557
9558 case BUILT_IN_TRAP:
9559 #ifdef HAVE_trap
9560 if (HAVE_trap)
9561 emit_insn (gen_trap ());
9562 else
9563 #endif
9564 error ("__builtin_trap not supported by this target");
9565 emit_barrier ();
9566 return const0_rtx;
9567
9568 /* Various hooks for the DWARF 2 __throw routine. */
9569 case BUILT_IN_UNWIND_INIT:
9570 expand_builtin_unwind_init ();
9571 return const0_rtx;
9572 case BUILT_IN_DWARF_CFA:
9573 return virtual_cfa_rtx;
9574 #ifdef DWARF2_UNWIND_INFO
9575 case BUILT_IN_DWARF_FP_REGNUM:
9576 return expand_builtin_dwarf_fp_regnum ();
9577 case BUILT_IN_DWARF_REG_SIZE:
9578 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9579 #endif
9580 case BUILT_IN_FROB_RETURN_ADDR:
9581 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9582 case BUILT_IN_EXTRACT_RETURN_ADDR:
9583 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9584 case BUILT_IN_EH_RETURN:
9585 expand_builtin_eh_return (TREE_VALUE (arglist),
9586 TREE_VALUE (TREE_CHAIN (arglist)),
9587 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9588 return const0_rtx;
9589
9590 default: /* just do library call, if unknown builtin */
9591 error ("built-in function `%s' not currently supported",
9592 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9593 }
9594
9595 /* The switch statement above can drop through to cause the function
9596 to be called normally. */
9597
9598 return expand_call (exp, target, ignore);
9599 }
9600 \f
9601 /* Built-in functions to perform an untyped call and return. */
9602
9603 /* For each register that may be used for calling a function, this
9604 gives a mode used to copy the register's value. VOIDmode indicates
9605 the register is not used for calling a function. If the machine
9606 has register windows, this gives only the outbound registers.
9607 INCOMING_REGNO gives the corresponding inbound register. */
9608 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9609
9610 /* For each register that may be used for returning values, this gives
9611 a mode used to copy the register's value. VOIDmode indicates the
9612 register is not used for returning values. If the machine has
9613 register windows, this gives only the outbound registers.
9614 INCOMING_REGNO gives the corresponding inbound register. */
9615 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9616
9617 /* For each register that may be used for calling a function, this
9618 gives the offset of that register into the block returned by
9619 __builtin_apply_args. 0 indicates that the register is not
9620 used for calling a function. */
9621 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9622
9623 /* Return the offset of register REGNO into the block returned by
9624 __builtin_apply_args. This is not declared static, since it is
9625 needed in objc-act.c. */
9626
9627 int
9628 apply_args_register_offset (regno)
9629 int regno;
9630 {
9631 apply_args_size ();
9632
9633 /* Arguments are always put in outgoing registers (in the argument
9634 block) if such make sense. */
9635 #ifdef OUTGOING_REGNO
9636 regno = OUTGOING_REGNO(regno);
9637 #endif
9638 return apply_args_reg_offset[regno];
9639 }
9640
9641 /* Return the size required for the block returned by __builtin_apply_args,
9642 and initialize apply_args_mode. */
9643
9644 static int
9645 apply_args_size ()
9646 {
9647 static int size = -1;
9648 int align, regno;
9649 enum machine_mode mode;
9650
9651 /* The values computed by this function never change. */
9652 if (size < 0)
9653 {
9654 /* The first value is the incoming arg-pointer. */
9655 size = GET_MODE_SIZE (Pmode);
9656
9657 /* The second value is the structure value address unless this is
9658 passed as an "invisible" first argument. */
9659 if (struct_value_rtx)
9660 size += GET_MODE_SIZE (Pmode);
9661
9662 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9663 if (FUNCTION_ARG_REGNO_P (regno))
9664 {
9665 /* Search for the proper mode for copying this register's
9666 value. I'm not sure this is right, but it works so far. */
9667 enum machine_mode best_mode = VOIDmode;
9668
9669 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9670 mode != VOIDmode;
9671 mode = GET_MODE_WIDER_MODE (mode))
9672 if (HARD_REGNO_MODE_OK (regno, mode)
9673 && HARD_REGNO_NREGS (regno, mode) == 1)
9674 best_mode = mode;
9675
9676 if (best_mode == VOIDmode)
9677 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9678 mode != VOIDmode;
9679 mode = GET_MODE_WIDER_MODE (mode))
9680 if (HARD_REGNO_MODE_OK (regno, mode)
9681 && (mov_optab->handlers[(int) mode].insn_code
9682 != CODE_FOR_nothing))
9683 best_mode = mode;
9684
9685 mode = best_mode;
9686 if (mode == VOIDmode)
9687 abort ();
9688
9689 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9690 if (size % align != 0)
9691 size = CEIL (size, align) * align;
9692 apply_args_reg_offset[regno] = size;
9693 size += GET_MODE_SIZE (mode);
9694 apply_args_mode[regno] = mode;
9695 }
9696 else
9697 {
9698 apply_args_mode[regno] = VOIDmode;
9699 apply_args_reg_offset[regno] = 0;
9700 }
9701 }
9702 return size;
9703 }
9704
9705 /* Return the size required for the block returned by __builtin_apply,
9706 and initialize apply_result_mode. */
9707
9708 static int
9709 apply_result_size ()
9710 {
9711 static int size = -1;
9712 int align, regno;
9713 enum machine_mode mode;
9714
9715 /* The values computed by this function never change. */
9716 if (size < 0)
9717 {
9718 size = 0;
9719
9720 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9721 if (FUNCTION_VALUE_REGNO_P (regno))
9722 {
9723 /* Search for the proper mode for copying this register's
9724 value. I'm not sure this is right, but it works so far. */
9725 enum machine_mode best_mode = VOIDmode;
9726
9727 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9728 mode != TImode;
9729 mode = GET_MODE_WIDER_MODE (mode))
9730 if (HARD_REGNO_MODE_OK (regno, mode))
9731 best_mode = mode;
9732
9733 if (best_mode == VOIDmode)
9734 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9735 mode != VOIDmode;
9736 mode = GET_MODE_WIDER_MODE (mode))
9737 if (HARD_REGNO_MODE_OK (regno, mode)
9738 && (mov_optab->handlers[(int) mode].insn_code
9739 != CODE_FOR_nothing))
9740 best_mode = mode;
9741
9742 mode = best_mode;
9743 if (mode == VOIDmode)
9744 abort ();
9745
9746 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9747 if (size % align != 0)
9748 size = CEIL (size, align) * align;
9749 size += GET_MODE_SIZE (mode);
9750 apply_result_mode[regno] = mode;
9751 }
9752 else
9753 apply_result_mode[regno] = VOIDmode;
9754
9755 /* Allow targets that use untyped_call and untyped_return to override
9756 the size so that machine-specific information can be stored here. */
9757 #ifdef APPLY_RESULT_SIZE
9758 size = APPLY_RESULT_SIZE;
9759 #endif
9760 }
9761 return size;
9762 }
9763
9764 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9765 /* Create a vector describing the result block RESULT. If SAVEP is true,
9766 the result block is used to save the values; otherwise it is used to
9767 restore the values. */
9768
9769 static rtx
9770 result_vector (savep, result)
9771 int savep;
9772 rtx result;
9773 {
9774 int regno, size, align, nelts;
9775 enum machine_mode mode;
9776 rtx reg, mem;
9777 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9778
9779 size = nelts = 0;
9780 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9781 if ((mode = apply_result_mode[regno]) != VOIDmode)
9782 {
9783 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9784 if (size % align != 0)
9785 size = CEIL (size, align) * align;
9786 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9787 mem = change_address (result, mode,
9788 plus_constant (XEXP (result, 0), size));
9789 savevec[nelts++] = (savep
9790 ? gen_rtx_SET (VOIDmode, mem, reg)
9791 : gen_rtx_SET (VOIDmode, reg, mem));
9792 size += GET_MODE_SIZE (mode);
9793 }
9794 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9795 }
9796 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9797
9798 /* Save the state required to perform an untyped call with the same
9799 arguments as were passed to the current function. */
9800
9801 static rtx
9802 expand_builtin_apply_args ()
9803 {
9804 rtx registers;
9805 int size, align, regno;
9806 enum machine_mode mode;
9807
9808 /* Create a block where the arg-pointer, structure value address,
9809 and argument registers can be saved. */
9810 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9811
9812 /* Walk past the arg-pointer and structure value address. */
9813 size = GET_MODE_SIZE (Pmode);
9814 if (struct_value_rtx)
9815 size += GET_MODE_SIZE (Pmode);
9816
9817 /* Save each register used in calling a function to the block. */
9818 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9819 if ((mode = apply_args_mode[regno]) != VOIDmode)
9820 {
9821 rtx tem;
9822
9823 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9824 if (size % align != 0)
9825 size = CEIL (size, align) * align;
9826
9827 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9828
9829 #ifdef STACK_REGS
9830 /* For reg-stack.c's stack register household.
9831 Compare with a similar piece of code in function.c. */
9832
9833 emit_insn (gen_rtx_USE (mode, tem));
9834 #endif
9835
9836 emit_move_insn (change_address (registers, mode,
9837 plus_constant (XEXP (registers, 0),
9838 size)),
9839 tem);
9840 size += GET_MODE_SIZE (mode);
9841 }
9842
9843 /* Save the arg pointer to the block. */
9844 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9845 copy_to_reg (virtual_incoming_args_rtx));
9846 size = GET_MODE_SIZE (Pmode);
9847
9848 /* Save the structure value address unless this is passed as an
9849 "invisible" first argument. */
9850 if (struct_value_incoming_rtx)
9851 {
9852 emit_move_insn (change_address (registers, Pmode,
9853 plus_constant (XEXP (registers, 0),
9854 size)),
9855 copy_to_reg (struct_value_incoming_rtx));
9856 size += GET_MODE_SIZE (Pmode);
9857 }
9858
9859 /* Return the address of the block. */
9860 return copy_addr_to_reg (XEXP (registers, 0));
9861 }
9862
9863 /* Perform an untyped call and save the state required to perform an
9864 untyped return of whatever value was returned by the given function. */
9865
9866 static rtx
9867 expand_builtin_apply (function, arguments, argsize)
9868 rtx function, arguments, argsize;
9869 {
9870 int size, align, regno;
9871 enum machine_mode mode;
9872 rtx incoming_args, result, reg, dest, call_insn;
9873 rtx old_stack_level = 0;
9874 rtx call_fusage = 0;
9875
9876 /* Create a block where the return registers can be saved. */
9877 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9878
9879 /* ??? The argsize value should be adjusted here. */
9880
9881 /* Fetch the arg pointer from the ARGUMENTS block. */
9882 incoming_args = gen_reg_rtx (Pmode);
9883 emit_move_insn (incoming_args,
9884 gen_rtx_MEM (Pmode, arguments));
9885 #ifndef STACK_GROWS_DOWNWARD
9886 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9887 incoming_args, 0, OPTAB_LIB_WIDEN);
9888 #endif
9889
9890 /* Perform postincrements before actually calling the function. */
9891 emit_queue ();
9892
9893 /* Push a new argument block and copy the arguments. */
9894 do_pending_stack_adjust ();
9895
9896 /* Save the stack with nonlocal if available */
9897 #ifdef HAVE_save_stack_nonlocal
9898 if (HAVE_save_stack_nonlocal)
9899 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9900 else
9901 #endif
9902 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9903
9904 /* Push a block of memory onto the stack to store the memory arguments.
9905 Save the address in a register, and copy the memory arguments. ??? I
9906 haven't figured out how the calling convention macros effect this,
9907 but it's likely that the source and/or destination addresses in
9908 the block copy will need updating in machine specific ways. */
9909 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9910 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9911 gen_rtx_MEM (BLKmode, incoming_args),
9912 argsize,
9913 PARM_BOUNDARY / BITS_PER_UNIT);
9914
9915 /* Refer to the argument block. */
9916 apply_args_size ();
9917 arguments = gen_rtx_MEM (BLKmode, arguments);
9918
9919 /* Walk past the arg-pointer and structure value address. */
9920 size = GET_MODE_SIZE (Pmode);
9921 if (struct_value_rtx)
9922 size += GET_MODE_SIZE (Pmode);
9923
9924 /* Restore each of the registers previously saved. Make USE insns
9925 for each of these registers for use in making the call. */
9926 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9927 if ((mode = apply_args_mode[regno]) != VOIDmode)
9928 {
9929 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9930 if (size % align != 0)
9931 size = CEIL (size, align) * align;
9932 reg = gen_rtx_REG (mode, regno);
9933 emit_move_insn (reg,
9934 change_address (arguments, mode,
9935 plus_constant (XEXP (arguments, 0),
9936 size)));
9937
9938 use_reg (&call_fusage, reg);
9939 size += GET_MODE_SIZE (mode);
9940 }
9941
9942 /* Restore the structure value address unless this is passed as an
9943 "invisible" first argument. */
9944 size = GET_MODE_SIZE (Pmode);
9945 if (struct_value_rtx)
9946 {
9947 rtx value = gen_reg_rtx (Pmode);
9948 emit_move_insn (value,
9949 change_address (arguments, Pmode,
9950 plus_constant (XEXP (arguments, 0),
9951 size)));
9952 emit_move_insn (struct_value_rtx, value);
9953 if (GET_CODE (struct_value_rtx) == REG)
9954 use_reg (&call_fusage, struct_value_rtx);
9955 size += GET_MODE_SIZE (Pmode);
9956 }
9957
9958 /* All arguments and registers used for the call are set up by now! */
9959 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9960
9961 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9962 and we don't want to load it into a register as an optimization,
9963 because prepare_call_address already did it if it should be done. */
9964 if (GET_CODE (function) != SYMBOL_REF)
9965 function = memory_address (FUNCTION_MODE, function);
9966
9967 /* Generate the actual call instruction and save the return value. */
9968 #ifdef HAVE_untyped_call
9969 if (HAVE_untyped_call)
9970 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9971 result, result_vector (1, result)));
9972 else
9973 #endif
9974 #ifdef HAVE_call_value
9975 if (HAVE_call_value)
9976 {
9977 rtx valreg = 0;
9978
9979 /* Locate the unique return register. It is not possible to
9980 express a call that sets more than one return register using
9981 call_value; use untyped_call for that. In fact, untyped_call
9982 only needs to save the return registers in the given block. */
9983 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9984 if ((mode = apply_result_mode[regno]) != VOIDmode)
9985 {
9986 if (valreg)
9987 abort (); /* HAVE_untyped_call required. */
9988 valreg = gen_rtx_REG (mode, regno);
9989 }
9990
9991 emit_call_insn (gen_call_value (valreg,
9992 gen_rtx_MEM (FUNCTION_MODE, function),
9993 const0_rtx, NULL_RTX, const0_rtx));
9994
9995 emit_move_insn (change_address (result, GET_MODE (valreg),
9996 XEXP (result, 0)),
9997 valreg);
9998 }
9999 else
10000 #endif
10001 abort ();
10002
10003 /* Find the CALL insn we just emitted. */
10004 for (call_insn = get_last_insn ();
10005 call_insn && GET_CODE (call_insn) != CALL_INSN;
10006 call_insn = PREV_INSN (call_insn))
10007 ;
10008
10009 if (! call_insn)
10010 abort ();
10011
10012 /* Put the register usage information on the CALL. If there is already
10013 some usage information, put ours at the end. */
10014 if (CALL_INSN_FUNCTION_USAGE (call_insn))
10015 {
10016 rtx link;
10017
10018 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10019 link = XEXP (link, 1))
10020 ;
10021
10022 XEXP (link, 1) = call_fusage;
10023 }
10024 else
10025 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10026
10027 /* Restore the stack. */
10028 #ifdef HAVE_save_stack_nonlocal
10029 if (HAVE_save_stack_nonlocal)
10030 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10031 else
10032 #endif
10033 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10034
10035 /* Return the address of the result block. */
10036 return copy_addr_to_reg (XEXP (result, 0));
10037 }
10038
10039 /* Perform an untyped return. */
10040
10041 static void
10042 expand_builtin_return (result)
10043 rtx result;
10044 {
10045 int size, align, regno;
10046 enum machine_mode mode;
10047 rtx reg;
10048 rtx call_fusage = 0;
10049
10050 apply_result_size ();
10051 result = gen_rtx_MEM (BLKmode, result);
10052
10053 #ifdef HAVE_untyped_return
10054 if (HAVE_untyped_return)
10055 {
10056 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10057 emit_barrier ();
10058 return;
10059 }
10060 #endif
10061
10062 /* Restore the return value and note that each value is used. */
10063 size = 0;
10064 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10065 if ((mode = apply_result_mode[regno]) != VOIDmode)
10066 {
10067 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10068 if (size % align != 0)
10069 size = CEIL (size, align) * align;
10070 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10071 emit_move_insn (reg,
10072 change_address (result, mode,
10073 plus_constant (XEXP (result, 0),
10074 size)));
10075
10076 push_to_sequence (call_fusage);
10077 emit_insn (gen_rtx_USE (VOIDmode, reg));
10078 call_fusage = get_insns ();
10079 end_sequence ();
10080 size += GET_MODE_SIZE (mode);
10081 }
10082
10083 /* Put the USE insns before the return. */
10084 emit_insns (call_fusage);
10085
10086 /* Return whatever values was restored by jumping directly to the end
10087 of the function. */
10088 expand_null_return ();
10089 }
10090 \f
10091 /* Expand code for a post- or pre- increment or decrement
10092 and return the RTX for the result.
10093 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10094
10095 static rtx
10096 expand_increment (exp, post, ignore)
10097 register tree exp;
10098 int post, ignore;
10099 {
10100 register rtx op0, op1;
10101 register rtx temp, value;
10102 register tree incremented = TREE_OPERAND (exp, 0);
10103 optab this_optab = add_optab;
10104 int icode;
10105 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10106 int op0_is_copy = 0;
10107 int single_insn = 0;
10108 /* 1 means we can't store into OP0 directly,
10109 because it is a subreg narrower than a word,
10110 and we don't dare clobber the rest of the word. */
10111 int bad_subreg = 0;
10112
10113 /* Stabilize any component ref that might need to be
10114 evaluated more than once below. */
10115 if (!post
10116 || TREE_CODE (incremented) == BIT_FIELD_REF
10117 || (TREE_CODE (incremented) == COMPONENT_REF
10118 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10119 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10120 incremented = stabilize_reference (incremented);
10121 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10122 ones into save exprs so that they don't accidentally get evaluated
10123 more than once by the code below. */
10124 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10125 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10126 incremented = save_expr (incremented);
10127
10128 /* Compute the operands as RTX.
10129 Note whether OP0 is the actual lvalue or a copy of it:
10130 I believe it is a copy iff it is a register or subreg
10131 and insns were generated in computing it. */
10132
10133 temp = get_last_insn ();
10134 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10135
10136 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10137 in place but instead must do sign- or zero-extension during assignment,
10138 so we copy it into a new register and let the code below use it as
10139 a copy.
10140
10141 Note that we can safely modify this SUBREG since it is know not to be
10142 shared (it was made by the expand_expr call above). */
10143
10144 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10145 {
10146 if (post)
10147 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10148 else
10149 bad_subreg = 1;
10150 }
10151 else if (GET_CODE (op0) == SUBREG
10152 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10153 {
10154 /* We cannot increment this SUBREG in place. If we are
10155 post-incrementing, get a copy of the old value. Otherwise,
10156 just mark that we cannot increment in place. */
10157 if (post)
10158 op0 = copy_to_reg (op0);
10159 else
10160 bad_subreg = 1;
10161 }
10162
10163 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10164 && temp != get_last_insn ());
10165 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10166 EXPAND_MEMORY_USE_BAD);
10167
10168 /* Decide whether incrementing or decrementing. */
10169 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10170 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10171 this_optab = sub_optab;
10172
10173 /* Convert decrement by a constant into a negative increment. */
10174 if (this_optab == sub_optab
10175 && GET_CODE (op1) == CONST_INT)
10176 {
10177 op1 = GEN_INT (- INTVAL (op1));
10178 this_optab = add_optab;
10179 }
10180
10181 /* For a preincrement, see if we can do this with a single instruction. */
10182 if (!post)
10183 {
10184 icode = (int) this_optab->handlers[(int) mode].insn_code;
10185 if (icode != (int) CODE_FOR_nothing
10186 /* Make sure that OP0 is valid for operands 0 and 1
10187 of the insn we want to queue. */
10188 && (*insn_operand_predicate[icode][0]) (op0, mode)
10189 && (*insn_operand_predicate[icode][1]) (op0, mode)
10190 && (*insn_operand_predicate[icode][2]) (op1, mode))
10191 single_insn = 1;
10192 }
10193
10194 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10195 then we cannot just increment OP0. We must therefore contrive to
10196 increment the original value. Then, for postincrement, we can return
10197 OP0 since it is a copy of the old value. For preincrement, expand here
10198 unless we can do it with a single insn.
10199
10200 Likewise if storing directly into OP0 would clobber high bits
10201 we need to preserve (bad_subreg). */
10202 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10203 {
10204 /* This is the easiest way to increment the value wherever it is.
10205 Problems with multiple evaluation of INCREMENTED are prevented
10206 because either (1) it is a component_ref or preincrement,
10207 in which case it was stabilized above, or (2) it is an array_ref
10208 with constant index in an array in a register, which is
10209 safe to reevaluate. */
10210 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10211 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10212 ? MINUS_EXPR : PLUS_EXPR),
10213 TREE_TYPE (exp),
10214 incremented,
10215 TREE_OPERAND (exp, 1));
10216
10217 while (TREE_CODE (incremented) == NOP_EXPR
10218 || TREE_CODE (incremented) == CONVERT_EXPR)
10219 {
10220 newexp = convert (TREE_TYPE (incremented), newexp);
10221 incremented = TREE_OPERAND (incremented, 0);
10222 }
10223
10224 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10225 return post ? op0 : temp;
10226 }
10227
10228 if (post)
10229 {
10230 /* We have a true reference to the value in OP0.
10231 If there is an insn to add or subtract in this mode, queue it.
10232 Queueing the increment insn avoids the register shuffling
10233 that often results if we must increment now and first save
10234 the old value for subsequent use. */
10235
10236 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10237 op0 = stabilize (op0);
10238 #endif
10239
10240 icode = (int) this_optab->handlers[(int) mode].insn_code;
10241 if (icode != (int) CODE_FOR_nothing
10242 /* Make sure that OP0 is valid for operands 0 and 1
10243 of the insn we want to queue. */
10244 && (*insn_operand_predicate[icode][0]) (op0, mode)
10245 && (*insn_operand_predicate[icode][1]) (op0, mode))
10246 {
10247 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10248 op1 = force_reg (mode, op1);
10249
10250 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10251 }
10252 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10253 {
10254 rtx addr = (general_operand (XEXP (op0, 0), mode)
10255 ? force_reg (Pmode, XEXP (op0, 0))
10256 : copy_to_reg (XEXP (op0, 0)));
10257 rtx temp, result;
10258
10259 op0 = change_address (op0, VOIDmode, addr);
10260 temp = force_reg (GET_MODE (op0), op0);
10261 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10262 op1 = force_reg (mode, op1);
10263
10264 /* The increment queue is LIFO, thus we have to `queue'
10265 the instructions in reverse order. */
10266 enqueue_insn (op0, gen_move_insn (op0, temp));
10267 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10268 return result;
10269 }
10270 }
10271
10272 /* Preincrement, or we can't increment with one simple insn. */
10273 if (post)
10274 /* Save a copy of the value before inc or dec, to return it later. */
10275 temp = value = copy_to_reg (op0);
10276 else
10277 /* Arrange to return the incremented value. */
10278 /* Copy the rtx because expand_binop will protect from the queue,
10279 and the results of that would be invalid for us to return
10280 if our caller does emit_queue before using our result. */
10281 temp = copy_rtx (value = op0);
10282
10283 /* Increment however we can. */
10284 op1 = expand_binop (mode, this_optab, value, op1,
10285 current_function_check_memory_usage ? NULL_RTX : op0,
10286 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10287 /* Make sure the value is stored into OP0. */
10288 if (op1 != op0)
10289 emit_move_insn (op0, op1);
10290
10291 return temp;
10292 }
10293 \f
10294 /* Expand all function calls contained within EXP, innermost ones first.
10295 But don't look within expressions that have sequence points.
10296 For each CALL_EXPR, record the rtx for its value
10297 in the CALL_EXPR_RTL field. */
10298
10299 static void
10300 preexpand_calls (exp)
10301 tree exp;
10302 {
10303 register int nops, i;
10304 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10305
10306 if (! do_preexpand_calls)
10307 return;
10308
10309 /* Only expressions and references can contain calls. */
10310
10311 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10312 return;
10313
10314 switch (TREE_CODE (exp))
10315 {
10316 case CALL_EXPR:
10317 /* Do nothing if already expanded. */
10318 if (CALL_EXPR_RTL (exp) != 0
10319 /* Do nothing if the call returns a variable-sized object. */
10320 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10321 /* Do nothing to built-in functions. */
10322 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10323 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10324 == FUNCTION_DECL)
10325 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10326 return;
10327
10328 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10329 return;
10330
10331 case COMPOUND_EXPR:
10332 case COND_EXPR:
10333 case TRUTH_ANDIF_EXPR:
10334 case TRUTH_ORIF_EXPR:
10335 /* If we find one of these, then we can be sure
10336 the adjust will be done for it (since it makes jumps).
10337 Do it now, so that if this is inside an argument
10338 of a function, we don't get the stack adjustment
10339 after some other args have already been pushed. */
10340 do_pending_stack_adjust ();
10341 return;
10342
10343 case BLOCK:
10344 case RTL_EXPR:
10345 case WITH_CLEANUP_EXPR:
10346 case CLEANUP_POINT_EXPR:
10347 case TRY_CATCH_EXPR:
10348 return;
10349
10350 case SAVE_EXPR:
10351 if (SAVE_EXPR_RTL (exp) != 0)
10352 return;
10353
10354 default:
10355 break;
10356 }
10357
10358 nops = tree_code_length[(int) TREE_CODE (exp)];
10359 for (i = 0; i < nops; i++)
10360 if (TREE_OPERAND (exp, i) != 0)
10361 {
10362 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10363 if (type == 'e' || type == '<' || type == '1' || type == '2'
10364 || type == 'r')
10365 preexpand_calls (TREE_OPERAND (exp, i));
10366 }
10367 }
10368 \f
10369 /* At the start of a function, record that we have no previously-pushed
10370 arguments waiting to be popped. */
10371
10372 void
10373 init_pending_stack_adjust ()
10374 {
10375 pending_stack_adjust = 0;
10376 }
10377
10378 /* When exiting from function, if safe, clear out any pending stack adjust
10379 so the adjustment won't get done.
10380
10381 Note, if the current function calls alloca, then it must have a
10382 frame pointer regardless of the value of flag_omit_frame_pointer. */
10383
10384 void
10385 clear_pending_stack_adjust ()
10386 {
10387 #ifdef EXIT_IGNORE_STACK
10388 if (optimize > 0
10389 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10390 && EXIT_IGNORE_STACK
10391 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10392 && ! flag_inline_functions)
10393 pending_stack_adjust = 0;
10394 #endif
10395 }
10396
10397 /* Pop any previously-pushed arguments that have not been popped yet. */
10398
10399 void
10400 do_pending_stack_adjust ()
10401 {
10402 if (inhibit_defer_pop == 0)
10403 {
10404 if (pending_stack_adjust != 0)
10405 adjust_stack (GEN_INT (pending_stack_adjust));
10406 pending_stack_adjust = 0;
10407 }
10408 }
10409 \f
10410 /* Expand conditional expressions. */
10411
10412 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10413 LABEL is an rtx of code CODE_LABEL, in this function and all the
10414 functions here. */
10415
10416 void
10417 jumpifnot (exp, label)
10418 tree exp;
10419 rtx label;
10420 {
10421 do_jump (exp, label, NULL_RTX);
10422 }
10423
10424 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10425
10426 void
10427 jumpif (exp, label)
10428 tree exp;
10429 rtx label;
10430 {
10431 do_jump (exp, NULL_RTX, label);
10432 }
10433
10434 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10435 the result is zero, or IF_TRUE_LABEL if the result is one.
10436 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10437 meaning fall through in that case.
10438
10439 do_jump always does any pending stack adjust except when it does not
10440 actually perform a jump. An example where there is no jump
10441 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10442
10443 This function is responsible for optimizing cases such as
10444 &&, || and comparison operators in EXP. */
10445
10446 void
10447 do_jump (exp, if_false_label, if_true_label)
10448 tree exp;
10449 rtx if_false_label, if_true_label;
10450 {
10451 register enum tree_code code = TREE_CODE (exp);
10452 /* Some cases need to create a label to jump to
10453 in order to properly fall through.
10454 These cases set DROP_THROUGH_LABEL nonzero. */
10455 rtx drop_through_label = 0;
10456 rtx temp;
10457 rtx comparison = 0;
10458 int i;
10459 tree type;
10460 enum machine_mode mode;
10461
10462 #ifdef MAX_INTEGER_COMPUTATION_MODE
10463 check_max_integer_computation_mode (exp);
10464 #endif
10465
10466 emit_queue ();
10467
10468 switch (code)
10469 {
10470 case ERROR_MARK:
10471 break;
10472
10473 case INTEGER_CST:
10474 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10475 if (temp)
10476 emit_jump (temp);
10477 break;
10478
10479 #if 0
10480 /* This is not true with #pragma weak */
10481 case ADDR_EXPR:
10482 /* The address of something can never be zero. */
10483 if (if_true_label)
10484 emit_jump (if_true_label);
10485 break;
10486 #endif
10487
10488 case NOP_EXPR:
10489 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10490 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10491 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10492 goto normal;
10493 case CONVERT_EXPR:
10494 /* If we are narrowing the operand, we have to do the compare in the
10495 narrower mode. */
10496 if ((TYPE_PRECISION (TREE_TYPE (exp))
10497 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10498 goto normal;
10499 case NON_LVALUE_EXPR:
10500 case REFERENCE_EXPR:
10501 case ABS_EXPR:
10502 case NEGATE_EXPR:
10503 case LROTATE_EXPR:
10504 case RROTATE_EXPR:
10505 /* These cannot change zero->non-zero or vice versa. */
10506 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10507 break;
10508
10509 #if 0
10510 /* This is never less insns than evaluating the PLUS_EXPR followed by
10511 a test and can be longer if the test is eliminated. */
10512 case PLUS_EXPR:
10513 /* Reduce to minus. */
10514 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10515 TREE_OPERAND (exp, 0),
10516 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10517 TREE_OPERAND (exp, 1))));
10518 /* Process as MINUS. */
10519 #endif
10520
10521 case MINUS_EXPR:
10522 /* Non-zero iff operands of minus differ. */
10523 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10524 TREE_OPERAND (exp, 0),
10525 TREE_OPERAND (exp, 1)),
10526 NE, NE);
10527 break;
10528
10529 case BIT_AND_EXPR:
10530 /* If we are AND'ing with a small constant, do this comparison in the
10531 smallest type that fits. If the machine doesn't have comparisons
10532 that small, it will be converted back to the wider comparison.
10533 This helps if we are testing the sign bit of a narrower object.
10534 combine can't do this for us because it can't know whether a
10535 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10536
10537 if (! SLOW_BYTE_ACCESS
10538 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10539 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10540 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10541 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10542 && (type = type_for_mode (mode, 1)) != 0
10543 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10544 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10545 != CODE_FOR_nothing))
10546 {
10547 do_jump (convert (type, exp), if_false_label, if_true_label);
10548 break;
10549 }
10550 goto normal;
10551
10552 case TRUTH_NOT_EXPR:
10553 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10554 break;
10555
10556 case TRUTH_ANDIF_EXPR:
10557 if (if_false_label == 0)
10558 if_false_label = drop_through_label = gen_label_rtx ();
10559 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10560 start_cleanup_deferral ();
10561 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10562 end_cleanup_deferral ();
10563 break;
10564
10565 case TRUTH_ORIF_EXPR:
10566 if (if_true_label == 0)
10567 if_true_label = drop_through_label = gen_label_rtx ();
10568 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10569 start_cleanup_deferral ();
10570 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10571 end_cleanup_deferral ();
10572 break;
10573
10574 case COMPOUND_EXPR:
10575 push_temp_slots ();
10576 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10577 preserve_temp_slots (NULL_RTX);
10578 free_temp_slots ();
10579 pop_temp_slots ();
10580 emit_queue ();
10581 do_pending_stack_adjust ();
10582 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10583 break;
10584
10585 case COMPONENT_REF:
10586 case BIT_FIELD_REF:
10587 case ARRAY_REF:
10588 {
10589 int bitsize, bitpos, unsignedp;
10590 enum machine_mode mode;
10591 tree type;
10592 tree offset;
10593 int volatilep = 0;
10594 int alignment;
10595
10596 /* Get description of this reference. We don't actually care
10597 about the underlying object here. */
10598 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10599 &mode, &unsignedp, &volatilep,
10600 &alignment);
10601
10602 type = type_for_size (bitsize, unsignedp);
10603 if (! SLOW_BYTE_ACCESS
10604 && type != 0 && bitsize >= 0
10605 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10606 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10607 != CODE_FOR_nothing))
10608 {
10609 do_jump (convert (type, exp), if_false_label, if_true_label);
10610 break;
10611 }
10612 goto normal;
10613 }
10614
10615 case COND_EXPR:
10616 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10617 if (integer_onep (TREE_OPERAND (exp, 1))
10618 && integer_zerop (TREE_OPERAND (exp, 2)))
10619 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10620
10621 else if (integer_zerop (TREE_OPERAND (exp, 1))
10622 && integer_onep (TREE_OPERAND (exp, 2)))
10623 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10624
10625 else
10626 {
10627 register rtx label1 = gen_label_rtx ();
10628 drop_through_label = gen_label_rtx ();
10629
10630 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10631
10632 start_cleanup_deferral ();
10633 /* Now the THEN-expression. */
10634 do_jump (TREE_OPERAND (exp, 1),
10635 if_false_label ? if_false_label : drop_through_label,
10636 if_true_label ? if_true_label : drop_through_label);
10637 /* In case the do_jump just above never jumps. */
10638 do_pending_stack_adjust ();
10639 emit_label (label1);
10640
10641 /* Now the ELSE-expression. */
10642 do_jump (TREE_OPERAND (exp, 2),
10643 if_false_label ? if_false_label : drop_through_label,
10644 if_true_label ? if_true_label : drop_through_label);
10645 end_cleanup_deferral ();
10646 }
10647 break;
10648
10649 case EQ_EXPR:
10650 {
10651 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10652
10653 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10654 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10655 {
10656 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10657 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10658 do_jump
10659 (fold
10660 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10661 fold (build (EQ_EXPR, TREE_TYPE (exp),
10662 fold (build1 (REALPART_EXPR,
10663 TREE_TYPE (inner_type),
10664 exp0)),
10665 fold (build1 (REALPART_EXPR,
10666 TREE_TYPE (inner_type),
10667 exp1)))),
10668 fold (build (EQ_EXPR, TREE_TYPE (exp),
10669 fold (build1 (IMAGPART_EXPR,
10670 TREE_TYPE (inner_type),
10671 exp0)),
10672 fold (build1 (IMAGPART_EXPR,
10673 TREE_TYPE (inner_type),
10674 exp1)))))),
10675 if_false_label, if_true_label);
10676 }
10677
10678 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10679 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10680
10681 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10682 && !can_compare_p (TYPE_MODE (inner_type)))
10683 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10684 else
10685 comparison = compare (exp, EQ, EQ);
10686 break;
10687 }
10688
10689 case NE_EXPR:
10690 {
10691 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10692
10693 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10694 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10695 {
10696 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10697 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10698 do_jump
10699 (fold
10700 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10701 fold (build (NE_EXPR, TREE_TYPE (exp),
10702 fold (build1 (REALPART_EXPR,
10703 TREE_TYPE (inner_type),
10704 exp0)),
10705 fold (build1 (REALPART_EXPR,
10706 TREE_TYPE (inner_type),
10707 exp1)))),
10708 fold (build (NE_EXPR, TREE_TYPE (exp),
10709 fold (build1 (IMAGPART_EXPR,
10710 TREE_TYPE (inner_type),
10711 exp0)),
10712 fold (build1 (IMAGPART_EXPR,
10713 TREE_TYPE (inner_type),
10714 exp1)))))),
10715 if_false_label, if_true_label);
10716 }
10717
10718 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10719 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10720
10721 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10722 && !can_compare_p (TYPE_MODE (inner_type)))
10723 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10724 else
10725 comparison = compare (exp, NE, NE);
10726 break;
10727 }
10728
10729 case LT_EXPR:
10730 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10731 == MODE_INT)
10732 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10733 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10734 else
10735 comparison = compare (exp, LT, LTU);
10736 break;
10737
10738 case LE_EXPR:
10739 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10740 == MODE_INT)
10741 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10742 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10743 else
10744 comparison = compare (exp, LE, LEU);
10745 break;
10746
10747 case GT_EXPR:
10748 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10749 == MODE_INT)
10750 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10751 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10752 else
10753 comparison = compare (exp, GT, GTU);
10754 break;
10755
10756 case GE_EXPR:
10757 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10758 == MODE_INT)
10759 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10760 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10761 else
10762 comparison = compare (exp, GE, GEU);
10763 break;
10764
10765 default:
10766 normal:
10767 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10768 #if 0
10769 /* This is not needed any more and causes poor code since it causes
10770 comparisons and tests from non-SI objects to have different code
10771 sequences. */
10772 /* Copy to register to avoid generating bad insns by cse
10773 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10774 if (!cse_not_expected && GET_CODE (temp) == MEM)
10775 temp = copy_to_reg (temp);
10776 #endif
10777 do_pending_stack_adjust ();
10778 if (GET_CODE (temp) == CONST_INT)
10779 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10780 else if (GET_CODE (temp) == LABEL_REF)
10781 comparison = const_true_rtx;
10782 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10783 && !can_compare_p (GET_MODE (temp)))
10784 /* Note swapping the labels gives us not-equal. */
10785 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10786 else if (GET_MODE (temp) != VOIDmode)
10787 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10788 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10789 GET_MODE (temp), NULL_RTX, 0);
10790 else
10791 abort ();
10792 }
10793
10794 /* Do any postincrements in the expression that was tested. */
10795 emit_queue ();
10796
10797 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10798 straight into a conditional jump instruction as the jump condition.
10799 Otherwise, all the work has been done already. */
10800
10801 if (comparison == const_true_rtx)
10802 {
10803 if (if_true_label)
10804 emit_jump (if_true_label);
10805 }
10806 else if (comparison == const0_rtx)
10807 {
10808 if (if_false_label)
10809 emit_jump (if_false_label);
10810 }
10811 else if (comparison)
10812 do_jump_for_compare (comparison, if_false_label, if_true_label);
10813
10814 if (drop_through_label)
10815 {
10816 /* If do_jump produces code that might be jumped around,
10817 do any stack adjusts from that code, before the place
10818 where control merges in. */
10819 do_pending_stack_adjust ();
10820 emit_label (drop_through_label);
10821 }
10822 }
10823 \f
10824 /* Given a comparison expression EXP for values too wide to be compared
10825 with one insn, test the comparison and jump to the appropriate label.
10826 The code of EXP is ignored; we always test GT if SWAP is 0,
10827 and LT if SWAP is 1. */
10828
10829 static void
10830 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10831 tree exp;
10832 int swap;
10833 rtx if_false_label, if_true_label;
10834 {
10835 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10836 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10837 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10838 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10839 rtx drop_through_label = 0;
10840 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10841 int i;
10842
10843 if (! if_true_label || ! if_false_label)
10844 drop_through_label = gen_label_rtx ();
10845 if (! if_true_label)
10846 if_true_label = drop_through_label;
10847 if (! if_false_label)
10848 if_false_label = drop_through_label;
10849
10850 /* Compare a word at a time, high order first. */
10851 for (i = 0; i < nwords; i++)
10852 {
10853 rtx comp;
10854 rtx op0_word, op1_word;
10855
10856 if (WORDS_BIG_ENDIAN)
10857 {
10858 op0_word = operand_subword_force (op0, i, mode);
10859 op1_word = operand_subword_force (op1, i, mode);
10860 }
10861 else
10862 {
10863 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10864 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10865 }
10866
10867 /* All but high-order word must be compared as unsigned. */
10868 comp = compare_from_rtx (op0_word, op1_word,
10869 (unsignedp || i > 0) ? GTU : GT,
10870 unsignedp, word_mode, NULL_RTX, 0);
10871 if (comp == const_true_rtx)
10872 emit_jump (if_true_label);
10873 else if (comp != const0_rtx)
10874 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10875
10876 /* Consider lower words only if these are equal. */
10877 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10878 NULL_RTX, 0);
10879 if (comp == const_true_rtx)
10880 emit_jump (if_false_label);
10881 else if (comp != const0_rtx)
10882 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10883 }
10884
10885 if (if_false_label)
10886 emit_jump (if_false_label);
10887 if (drop_through_label)
10888 emit_label (drop_through_label);
10889 }
10890
10891 /* Compare OP0 with OP1, word at a time, in mode MODE.
10892 UNSIGNEDP says to do unsigned comparison.
10893 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10894
10895 void
10896 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10897 enum machine_mode mode;
10898 int unsignedp;
10899 rtx op0, op1;
10900 rtx if_false_label, if_true_label;
10901 {
10902 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10903 rtx drop_through_label = 0;
10904 int i;
10905
10906 if (! if_true_label || ! if_false_label)
10907 drop_through_label = gen_label_rtx ();
10908 if (! if_true_label)
10909 if_true_label = drop_through_label;
10910 if (! if_false_label)
10911 if_false_label = drop_through_label;
10912
10913 /* Compare a word at a time, high order first. */
10914 for (i = 0; i < nwords; i++)
10915 {
10916 rtx comp;
10917 rtx op0_word, op1_word;
10918
10919 if (WORDS_BIG_ENDIAN)
10920 {
10921 op0_word = operand_subword_force (op0, i, mode);
10922 op1_word = operand_subword_force (op1, i, mode);
10923 }
10924 else
10925 {
10926 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10927 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10928 }
10929
10930 /* All but high-order word must be compared as unsigned. */
10931 comp = compare_from_rtx (op0_word, op1_word,
10932 (unsignedp || i > 0) ? GTU : GT,
10933 unsignedp, word_mode, NULL_RTX, 0);
10934 if (comp == const_true_rtx)
10935 emit_jump (if_true_label);
10936 else if (comp != const0_rtx)
10937 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10938
10939 /* Consider lower words only if these are equal. */
10940 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10941 NULL_RTX, 0);
10942 if (comp == const_true_rtx)
10943 emit_jump (if_false_label);
10944 else if (comp != const0_rtx)
10945 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10946 }
10947
10948 if (if_false_label)
10949 emit_jump (if_false_label);
10950 if (drop_through_label)
10951 emit_label (drop_through_label);
10952 }
10953
10954 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10955 with one insn, test the comparison and jump to the appropriate label. */
10956
10957 static void
10958 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10959 tree exp;
10960 rtx if_false_label, if_true_label;
10961 {
10962 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10963 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10964 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10965 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10966 int i;
10967 rtx drop_through_label = 0;
10968
10969 if (! if_false_label)
10970 drop_through_label = if_false_label = gen_label_rtx ();
10971
10972 for (i = 0; i < nwords; i++)
10973 {
10974 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10975 operand_subword_force (op1, i, mode),
10976 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10977 word_mode, NULL_RTX, 0);
10978 if (comp == const_true_rtx)
10979 emit_jump (if_false_label);
10980 else if (comp != const0_rtx)
10981 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10982 }
10983
10984 if (if_true_label)
10985 emit_jump (if_true_label);
10986 if (drop_through_label)
10987 emit_label (drop_through_label);
10988 }
10989 \f
10990 /* Jump according to whether OP0 is 0.
10991 We assume that OP0 has an integer mode that is too wide
10992 for the available compare insns. */
10993
10994 void
10995 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10996 rtx op0;
10997 rtx if_false_label, if_true_label;
10998 {
10999 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11000 rtx part;
11001 int i;
11002 rtx drop_through_label = 0;
11003
11004 /* The fastest way of doing this comparison on almost any machine is to
11005 "or" all the words and compare the result. If all have to be loaded
11006 from memory and this is a very wide item, it's possible this may
11007 be slower, but that's highly unlikely. */
11008
11009 part = gen_reg_rtx (word_mode);
11010 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11011 for (i = 1; i < nwords && part != 0; i++)
11012 part = expand_binop (word_mode, ior_optab, part,
11013 operand_subword_force (op0, i, GET_MODE (op0)),
11014 part, 1, OPTAB_WIDEN);
11015
11016 if (part != 0)
11017 {
11018 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11019 NULL_RTX, 0);
11020
11021 if (comp == const_true_rtx)
11022 emit_jump (if_false_label);
11023 else if (comp == const0_rtx)
11024 emit_jump (if_true_label);
11025 else
11026 do_jump_for_compare (comp, if_false_label, if_true_label);
11027
11028 return;
11029 }
11030
11031 /* If we couldn't do the "or" simply, do this with a series of compares. */
11032 if (! if_false_label)
11033 drop_through_label = if_false_label = gen_label_rtx ();
11034
11035 for (i = 0; i < nwords; i++)
11036 {
11037 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11038 GET_MODE (op0)),
11039 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11040 if (comp == const_true_rtx)
11041 emit_jump (if_false_label);
11042 else if (comp != const0_rtx)
11043 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11044 }
11045
11046 if (if_true_label)
11047 emit_jump (if_true_label);
11048
11049 if (drop_through_label)
11050 emit_label (drop_through_label);
11051 }
11052
11053 /* Given a comparison expression in rtl form, output conditional branches to
11054 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11055
11056 static void
11057 do_jump_for_compare (comparison, if_false_label, if_true_label)
11058 rtx comparison, if_false_label, if_true_label;
11059 {
11060 if (if_true_label)
11061 {
11062 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11063 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11064 (if_true_label));
11065 else
11066 abort ();
11067
11068 if (if_false_label)
11069 emit_jump (if_false_label);
11070 }
11071 else if (if_false_label)
11072 {
11073 rtx first = get_last_insn (), insn, branch;
11074 int br_count;
11075
11076 /* Output the branch with the opposite condition. Then try to invert
11077 what is generated. If more than one insn is a branch, or if the
11078 branch is not the last insn written, abort. If we can't invert
11079 the branch, emit make a true label, redirect this jump to that,
11080 emit a jump to the false label and define the true label. */
11081 /* ??? Note that we wouldn't have to do any of this nonsense if
11082 we passed both labels into a combined compare-and-branch.
11083 Ah well, jump threading does a good job of repairing the damage. */
11084
11085 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11086 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11087 (if_false_label));
11088 else
11089 abort ();
11090
11091 /* Here we get the first insn that was just emitted. It used to be the
11092 case that, on some machines, emitting the branch would discard
11093 the previous compare insn and emit a replacement. This isn't
11094 done anymore, but abort if we see that FIRST is deleted. */
11095
11096 if (first == 0)
11097 first = get_insns ();
11098 else if (INSN_DELETED_P (first))
11099 abort ();
11100 else
11101 first = NEXT_INSN (first);
11102
11103 /* Look for multiple branches in this sequence, as might be generated
11104 for a multi-word integer comparison. */
11105
11106 br_count = 0;
11107 branch = NULL_RTX;
11108 for (insn = first; insn ; insn = NEXT_INSN (insn))
11109 if (GET_CODE (insn) == JUMP_INSN)
11110 {
11111 branch = insn;
11112 br_count += 1;
11113 }
11114
11115 /* If we've got one branch at the end of the sequence,
11116 we can try to reverse it. */
11117
11118 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11119 {
11120 rtx insn_label;
11121 insn_label = XEXP (condjump_label (branch), 0);
11122 JUMP_LABEL (branch) = insn_label;
11123
11124 if (insn_label != if_false_label)
11125 abort ();
11126
11127 if (invert_jump (branch, if_false_label))
11128 return;
11129 }
11130
11131 /* Multiple branches, or reversion failed. Convert to branches
11132 around an unconditional jump. */
11133
11134 if_true_label = gen_label_rtx ();
11135 for (insn = first; insn; insn = NEXT_INSN (insn))
11136 if (GET_CODE (insn) == JUMP_INSN)
11137 {
11138 rtx insn_label;
11139 insn_label = XEXP (condjump_label (insn), 0);
11140 JUMP_LABEL (insn) = insn_label;
11141
11142 if (insn_label == if_false_label)
11143 redirect_jump (insn, if_true_label);
11144 }
11145 emit_jump (if_false_label);
11146 emit_label (if_true_label);
11147 }
11148 }
11149 \f
11150 /* Generate code for a comparison expression EXP
11151 (including code to compute the values to be compared)
11152 and set (CC0) according to the result.
11153 SIGNED_CODE should be the rtx operation for this comparison for
11154 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11155
11156 We force a stack adjustment unless there are currently
11157 things pushed on the stack that aren't yet used. */
11158
11159 static rtx
11160 compare (exp, signed_code, unsigned_code)
11161 register tree exp;
11162 enum rtx_code signed_code, unsigned_code;
11163 {
11164 register rtx op0
11165 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11166 register rtx op1
11167 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11168 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
11169 register enum machine_mode mode = TYPE_MODE (type);
11170 int unsignedp = TREE_UNSIGNED (type);
11171 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
11172
11173 #ifdef HAVE_canonicalize_funcptr_for_compare
11174 /* If function pointers need to be "canonicalized" before they can
11175 be reliably compared, then canonicalize them. */
11176 if (HAVE_canonicalize_funcptr_for_compare
11177 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11178 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11179 == FUNCTION_TYPE))
11180 {
11181 rtx new_op0 = gen_reg_rtx (mode);
11182
11183 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11184 op0 = new_op0;
11185 }
11186
11187 if (HAVE_canonicalize_funcptr_for_compare
11188 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11189 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11190 == FUNCTION_TYPE))
11191 {
11192 rtx new_op1 = gen_reg_rtx (mode);
11193
11194 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11195 op1 = new_op1;
11196 }
11197 #endif
11198
11199 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11200 ((mode == BLKmode)
11201 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11202 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11203 }
11204
11205 /* Like compare but expects the values to compare as two rtx's.
11206 The decision as to signed or unsigned comparison must be made by the caller.
11207
11208 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11209 compared.
11210
11211 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11212 size of MODE should be used. */
11213
11214 rtx
11215 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11216 register rtx op0, op1;
11217 enum rtx_code code;
11218 int unsignedp;
11219 enum machine_mode mode;
11220 rtx size;
11221 int align;
11222 {
11223 rtx tem;
11224
11225 /* If one operand is constant, make it the second one. Only do this
11226 if the other operand is not constant as well. */
11227
11228 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11229 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11230 {
11231 tem = op0;
11232 op0 = op1;
11233 op1 = tem;
11234 code = swap_condition (code);
11235 }
11236
11237 if (flag_force_mem)
11238 {
11239 op0 = force_not_mem (op0);
11240 op1 = force_not_mem (op1);
11241 }
11242
11243 do_pending_stack_adjust ();
11244
11245 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11246 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11247 return tem;
11248
11249 #if 0
11250 /* There's no need to do this now that combine.c can eliminate lots of
11251 sign extensions. This can be less efficient in certain cases on other
11252 machines. */
11253
11254 /* If this is a signed equality comparison, we can do it as an
11255 unsigned comparison since zero-extension is cheaper than sign
11256 extension and comparisons with zero are done as unsigned. This is
11257 the case even on machines that can do fast sign extension, since
11258 zero-extension is easier to combine with other operations than
11259 sign-extension is. If we are comparing against a constant, we must
11260 convert it to what it would look like unsigned. */
11261 if ((code == EQ || code == NE) && ! unsignedp
11262 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11263 {
11264 if (GET_CODE (op1) == CONST_INT
11265 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11266 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11267 unsignedp = 1;
11268 }
11269 #endif
11270
11271 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11272
11273 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11274 }
11275 \f
11276 /* Generate code to calculate EXP using a store-flag instruction
11277 and return an rtx for the result. EXP is either a comparison
11278 or a TRUTH_NOT_EXPR whose operand is a comparison.
11279
11280 If TARGET is nonzero, store the result there if convenient.
11281
11282 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11283 cheap.
11284
11285 Return zero if there is no suitable set-flag instruction
11286 available on this machine.
11287
11288 Once expand_expr has been called on the arguments of the comparison,
11289 we are committed to doing the store flag, since it is not safe to
11290 re-evaluate the expression. We emit the store-flag insn by calling
11291 emit_store_flag, but only expand the arguments if we have a reason
11292 to believe that emit_store_flag will be successful. If we think that
11293 it will, but it isn't, we have to simulate the store-flag with a
11294 set/jump/set sequence. */
11295
11296 static rtx
11297 do_store_flag (exp, target, mode, only_cheap)
11298 tree exp;
11299 rtx target;
11300 enum machine_mode mode;
11301 int only_cheap;
11302 {
11303 enum rtx_code code;
11304 tree arg0, arg1, type;
11305 tree tem;
11306 enum machine_mode operand_mode;
11307 int invert = 0;
11308 int unsignedp;
11309 rtx op0, op1;
11310 enum insn_code icode;
11311 rtx subtarget = target;
11312 rtx result, label;
11313
11314 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11315 result at the end. We can't simply invert the test since it would
11316 have already been inverted if it were valid. This case occurs for
11317 some floating-point comparisons. */
11318
11319 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11320 invert = 1, exp = TREE_OPERAND (exp, 0);
11321
11322 arg0 = TREE_OPERAND (exp, 0);
11323 arg1 = TREE_OPERAND (exp, 1);
11324 type = TREE_TYPE (arg0);
11325 operand_mode = TYPE_MODE (type);
11326 unsignedp = TREE_UNSIGNED (type);
11327
11328 /* We won't bother with BLKmode store-flag operations because it would mean
11329 passing a lot of information to emit_store_flag. */
11330 if (operand_mode == BLKmode)
11331 return 0;
11332
11333 /* We won't bother with store-flag operations involving function pointers
11334 when function pointers must be canonicalized before comparisons. */
11335 #ifdef HAVE_canonicalize_funcptr_for_compare
11336 if (HAVE_canonicalize_funcptr_for_compare
11337 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11338 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11339 == FUNCTION_TYPE))
11340 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11341 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11342 == FUNCTION_TYPE))))
11343 return 0;
11344 #endif
11345
11346 STRIP_NOPS (arg0);
11347 STRIP_NOPS (arg1);
11348
11349 /* Get the rtx comparison code to use. We know that EXP is a comparison
11350 operation of some type. Some comparisons against 1 and -1 can be
11351 converted to comparisons with zero. Do so here so that the tests
11352 below will be aware that we have a comparison with zero. These
11353 tests will not catch constants in the first operand, but constants
11354 are rarely passed as the first operand. */
11355
11356 switch (TREE_CODE (exp))
11357 {
11358 case EQ_EXPR:
11359 code = EQ;
11360 break;
11361 case NE_EXPR:
11362 code = NE;
11363 break;
11364 case LT_EXPR:
11365 if (integer_onep (arg1))
11366 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11367 else
11368 code = unsignedp ? LTU : LT;
11369 break;
11370 case LE_EXPR:
11371 if (! unsignedp && integer_all_onesp (arg1))
11372 arg1 = integer_zero_node, code = LT;
11373 else
11374 code = unsignedp ? LEU : LE;
11375 break;
11376 case GT_EXPR:
11377 if (! unsignedp && integer_all_onesp (arg1))
11378 arg1 = integer_zero_node, code = GE;
11379 else
11380 code = unsignedp ? GTU : GT;
11381 break;
11382 case GE_EXPR:
11383 if (integer_onep (arg1))
11384 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11385 else
11386 code = unsignedp ? GEU : GE;
11387 break;
11388 default:
11389 abort ();
11390 }
11391
11392 /* Put a constant second. */
11393 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11394 {
11395 tem = arg0; arg0 = arg1; arg1 = tem;
11396 code = swap_condition (code);
11397 }
11398
11399 /* If this is an equality or inequality test of a single bit, we can
11400 do this by shifting the bit being tested to the low-order bit and
11401 masking the result with the constant 1. If the condition was EQ,
11402 we xor it with 1. This does not require an scc insn and is faster
11403 than an scc insn even if we have it. */
11404
11405 if ((code == NE || code == EQ)
11406 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11407 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11408 {
11409 tree inner = TREE_OPERAND (arg0, 0);
11410 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11411 int ops_unsignedp;
11412
11413 /* If INNER is a right shift of a constant and it plus BITNUM does
11414 not overflow, adjust BITNUM and INNER. */
11415
11416 if (TREE_CODE (inner) == RSHIFT_EXPR
11417 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11418 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11419 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11420 < TYPE_PRECISION (type)))
11421 {
11422 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11423 inner = TREE_OPERAND (inner, 0);
11424 }
11425
11426 /* If we are going to be able to omit the AND below, we must do our
11427 operations as unsigned. If we must use the AND, we have a choice.
11428 Normally unsigned is faster, but for some machines signed is. */
11429 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11430 #ifdef LOAD_EXTEND_OP
11431 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11432 #else
11433 : 1
11434 #endif
11435 );
11436
11437 if (subtarget == 0 || GET_CODE (subtarget) != REG
11438 || GET_MODE (subtarget) != operand_mode
11439 || ! safe_from_p (subtarget, inner, 1))
11440 subtarget = 0;
11441
11442 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11443
11444 if (bitnum != 0)
11445 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11446 size_int (bitnum), subtarget, ops_unsignedp);
11447
11448 if (GET_MODE (op0) != mode)
11449 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11450
11451 if ((code == EQ && ! invert) || (code == NE && invert))
11452 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11453 ops_unsignedp, OPTAB_LIB_WIDEN);
11454
11455 /* Put the AND last so it can combine with more things. */
11456 if (bitnum != TYPE_PRECISION (type) - 1)
11457 op0 = expand_and (op0, const1_rtx, subtarget);
11458
11459 return op0;
11460 }
11461
11462 /* Now see if we are likely to be able to do this. Return if not. */
11463 if (! can_compare_p (operand_mode))
11464 return 0;
11465 icode = setcc_gen_code[(int) code];
11466 if (icode == CODE_FOR_nothing
11467 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11468 {
11469 /* We can only do this if it is one of the special cases that
11470 can be handled without an scc insn. */
11471 if ((code == LT && integer_zerop (arg1))
11472 || (! only_cheap && code == GE && integer_zerop (arg1)))
11473 ;
11474 else if (BRANCH_COST >= 0
11475 && ! only_cheap && (code == NE || code == EQ)
11476 && TREE_CODE (type) != REAL_TYPE
11477 && ((abs_optab->handlers[(int) operand_mode].insn_code
11478 != CODE_FOR_nothing)
11479 || (ffs_optab->handlers[(int) operand_mode].insn_code
11480 != CODE_FOR_nothing)))
11481 ;
11482 else
11483 return 0;
11484 }
11485
11486 preexpand_calls (exp);
11487 if (subtarget == 0 || GET_CODE (subtarget) != REG
11488 || GET_MODE (subtarget) != operand_mode
11489 || ! safe_from_p (subtarget, arg1, 1))
11490 subtarget = 0;
11491
11492 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11493 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11494
11495 if (target == 0)
11496 target = gen_reg_rtx (mode);
11497
11498 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11499 because, if the emit_store_flag does anything it will succeed and
11500 OP0 and OP1 will not be used subsequently. */
11501
11502 result = emit_store_flag (target, code,
11503 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11504 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11505 operand_mode, unsignedp, 1);
11506
11507 if (result)
11508 {
11509 if (invert)
11510 result = expand_binop (mode, xor_optab, result, const1_rtx,
11511 result, 0, OPTAB_LIB_WIDEN);
11512 return result;
11513 }
11514
11515 /* If this failed, we have to do this with set/compare/jump/set code. */
11516 if (GET_CODE (target) != REG
11517 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11518 target = gen_reg_rtx (GET_MODE (target));
11519
11520 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11521 result = compare_from_rtx (op0, op1, code, unsignedp,
11522 operand_mode, NULL_RTX, 0);
11523 if (GET_CODE (result) == CONST_INT)
11524 return (((result == const0_rtx && ! invert)
11525 || (result != const0_rtx && invert))
11526 ? const0_rtx : const1_rtx);
11527
11528 label = gen_label_rtx ();
11529 if (bcc_gen_fctn[(int) code] == 0)
11530 abort ();
11531
11532 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11533 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11534 emit_label (label);
11535
11536 return target;
11537 }
11538 \f
11539 /* Generate a tablejump instruction (used for switch statements). */
11540
11541 #ifdef HAVE_tablejump
11542
11543 /* INDEX is the value being switched on, with the lowest value
11544 in the table already subtracted.
11545 MODE is its expected mode (needed if INDEX is constant).
11546 RANGE is the length of the jump table.
11547 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11548
11549 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11550 index value is out of range. */
11551
11552 void
11553 do_tablejump (index, mode, range, table_label, default_label)
11554 rtx index, range, table_label, default_label;
11555 enum machine_mode mode;
11556 {
11557 register rtx temp, vector;
11558
11559 /* Do an unsigned comparison (in the proper mode) between the index
11560 expression and the value which represents the length of the range.
11561 Since we just finished subtracting the lower bound of the range
11562 from the index expression, this comparison allows us to simultaneously
11563 check that the original index expression value is both greater than
11564 or equal to the minimum value of the range and less than or equal to
11565 the maximum value of the range. */
11566
11567 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11568 emit_jump_insn (gen_bgtu (default_label));
11569
11570 /* If index is in range, it must fit in Pmode.
11571 Convert to Pmode so we can index with it. */
11572 if (mode != Pmode)
11573 index = convert_to_mode (Pmode, index, 1);
11574
11575 /* Don't let a MEM slip thru, because then INDEX that comes
11576 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11577 and break_out_memory_refs will go to work on it and mess it up. */
11578 #ifdef PIC_CASE_VECTOR_ADDRESS
11579 if (flag_pic && GET_CODE (index) != REG)
11580 index = copy_to_mode_reg (Pmode, index);
11581 #endif
11582
11583 /* If flag_force_addr were to affect this address
11584 it could interfere with the tricky assumptions made
11585 about addresses that contain label-refs,
11586 which may be valid only very near the tablejump itself. */
11587 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11588 GET_MODE_SIZE, because this indicates how large insns are. The other
11589 uses should all be Pmode, because they are addresses. This code
11590 could fail if addresses and insns are not the same size. */
11591 index = gen_rtx_PLUS (Pmode,
11592 gen_rtx_MULT (Pmode, index,
11593 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11594 gen_rtx_LABEL_REF (Pmode, table_label));
11595 #ifdef PIC_CASE_VECTOR_ADDRESS
11596 if (flag_pic)
11597 index = PIC_CASE_VECTOR_ADDRESS (index);
11598 else
11599 #endif
11600 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11601 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11602 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11603 RTX_UNCHANGING_P (vector) = 1;
11604 convert_move (temp, vector, 0);
11605
11606 emit_jump_insn (gen_tablejump (temp, table_label));
11607
11608 /* If we are generating PIC code or if the table is PC-relative, the
11609 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11610 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11611 emit_barrier ();
11612 }
11613
11614 #endif /* HAVE_tablejump */