1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
72 #define STACK_PUSH_CODE PRE_INC
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
81 #define TARGET_MEM_FUNCTIONS 0
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 tree placeholder_list
= 0;
96 /* This structure is used by move_by_pieces to describe the move to
107 int explicit_inc_from
;
108 unsigned HOST_WIDE_INT len
;
109 HOST_WIDE_INT offset
;
113 /* This structure is used by store_by_pieces to describe the clear to
116 struct store_by_pieces
122 unsigned HOST_WIDE_INT len
;
123 HOST_WIDE_INT offset
;
124 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
129 static rtx
enqueue_insn (rtx
, rtx
);
130 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
132 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
133 struct move_by_pieces
*);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
136 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
137 static tree
emit_block_move_libcall_fn (int);
138 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
139 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
140 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
141 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
142 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
143 struct store_by_pieces
*);
144 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
145 static rtx
clear_storage_via_libcall (rtx
, rtx
);
146 static tree
clear_storage_libcall_fn (int);
147 static rtx
compress_float_constant (rtx
, rtx
);
148 static rtx
get_subtarget (rtx
);
149 static int is_zeros_p (tree
);
150 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
151 HOST_WIDE_INT
, enum machine_mode
,
152 tree
, tree
, int, int);
153 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
154 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
155 tree
, enum machine_mode
, int, tree
, int);
156 static rtx
var_rtx (tree
);
158 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
159 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_type (tree
, tree
);
161 static int is_aligning_offset (tree
, tree
);
162 static rtx
expand_increment (tree
, int, int);
163 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
164 enum expand_modifier
);
165 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
167 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
169 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
170 static rtx
const_vector_from_tree (tree
);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load
[NUM_MACHINE_MODES
];
177 static char direct_store
[NUM_MACHINE_MODES
];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
213 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
215 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216 struct file_stack
*expr_wfl_stack
;
218 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
228 init_expr_once (void)
231 enum machine_mode mode
;
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
239 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
240 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg
= gen_rtx_REG (VOIDmode
, -1);
246 insn
= rtx_alloc (INSN
);
247 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
248 PATTERN (insn
) = pat
;
250 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
251 mode
= (enum machine_mode
) ((int) mode
+ 1))
255 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
256 PUT_MODE (mem
, mode
);
257 PUT_MODE (mem1
, mode
);
258 PUT_MODE (reg
, mode
);
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
263 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
264 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
265 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
268 if (! HARD_REGNO_MODE_OK (regno
, mode
))
274 SET_DEST (pat
) = reg
;
275 if (recog (pat
, insn
, &num_clobbers
) >= 0)
276 direct_load
[(int) mode
] = 1;
278 SET_SRC (pat
) = mem1
;
279 SET_DEST (pat
) = reg
;
280 if (recog (pat
, insn
, &num_clobbers
) >= 0)
281 direct_load
[(int) mode
] = 1;
284 SET_DEST (pat
) = mem
;
285 if (recog (pat
, insn
, &num_clobbers
) >= 0)
286 direct_store
[(int) mode
] = 1;
289 SET_DEST (pat
) = mem1
;
290 if (recog (pat
, insn
, &num_clobbers
) >= 0)
291 direct_store
[(int) mode
] = 1;
295 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
297 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
298 mode
= GET_MODE_WIDER_MODE (mode
))
300 enum machine_mode srcmode
;
301 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
302 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
306 ic
= can_extend_p (mode
, srcmode
, 0);
307 if (ic
== CODE_FOR_nothing
)
310 PUT_MODE (mem
, srcmode
);
312 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
313 float_extend_from_mem
[mode
][srcmode
] = true;
318 /* This is run at the start of compiling a function. */
323 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
326 /* Small sanity check that the queue is empty at the end of a function. */
329 finish_expr_for_function (void)
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
346 enqueue_insn (rtx var
, rtx body
)
348 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
349 body
, pending_chain
);
350 return pending_chain
;
353 /* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
369 protect_from_queue (rtx x
, int modify
)
371 RTX_CODE code
= GET_CODE (x
);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain
== 0)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
386 if (code
== MEM
&& GET_MODE (x
) != BLKmode
387 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
390 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
394 rtx temp
= gen_reg_rtx (GET_MODE (x
));
396 emit_insn_before (gen_move_insn (temp
, new),
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
410 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
411 if (tem
!= XEXP (x
, 0))
417 else if (code
== PLUS
|| code
== MULT
)
419 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
420 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
421 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
433 if (QUEUED_INSN (x
) == 0)
434 return copy_to_reg (QUEUED_VAR (x
));
435 /* If the increment has happened and a pre-increment copy exists,
437 if (QUEUED_COPY (x
) != 0)
438 return QUEUED_COPY (x
);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
444 return QUEUED_COPY (x
);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
453 queued_subexp_p (rtx x
)
455 enum rtx_code code
= GET_CODE (x
);
461 return queued_subexp_p (XEXP (x
, 0));
465 return (queued_subexp_p (XEXP (x
, 0))
466 || queued_subexp_p (XEXP (x
, 1)));
472 /* Perform all the pending incrementations. */
478 while ((p
= pending_chain
))
480 rtx body
= QUEUED_BODY (p
);
482 switch (GET_CODE (body
))
490 QUEUED_INSN (p
) = body
;
494 #ifdef ENABLE_CHECKING
501 QUEUED_INSN (p
) = emit_insn (body
);
505 pending_chain
= QUEUED_NEXT (p
);
509 /* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
515 convert_move (rtx to
, rtx from
, int unsignedp
)
517 enum machine_mode to_mode
= GET_MODE (to
);
518 enum machine_mode from_mode
= GET_MODE (from
);
519 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
520 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
524 /* rtx code for making an equivalent value. */
525 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
526 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
528 to
= protect_from_queue (to
, 1);
529 from
= protect_from_queue (from
, 0);
531 if (to_real
!= from_real
)
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
538 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
540 >= GET_MODE_SIZE (to_mode
))
541 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
542 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
544 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
547 if (to_mode
== from_mode
548 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
550 emit_move_insn (to
, from
);
554 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
556 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
559 if (VECTOR_MODE_P (to_mode
))
560 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
562 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
564 emit_move_insn (to
, from
);
568 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
570 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
571 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
580 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
582 else if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
587 /* Try converting directly if the insn is supported. */
589 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
590 if (code
!= CODE_FOR_nothing
)
592 emit_unop_insn (code
, to
, from
,
593 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
597 /* Otherwise use a libcall. */
598 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
601 /* This conversion is not implemented yet. */
605 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
607 insns
= get_insns ();
609 emit_libcall_block (insns
, to
, value
,
610 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
612 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
624 if (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
628 if (full_mode
!= from_mode
)
629 from
= convert_to_mode (full_mode
, from
, unsignedp
);
630 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
634 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
639 if (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
643 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
645 if (to_mode
== full_mode
)
648 /* else proceed to integer conversions below. */
649 from_mode
= full_mode
;
652 /* Now both modes are integers. */
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
656 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
663 enum machine_mode lowpart_mode
;
664 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
666 /* Try converting directly if the insn is supported. */
667 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
675 from
= force_reg (from_mode
, from
);
676 emit_unop_insn (code
, to
, from
, equiv_code
);
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
681 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
682 != CODE_FOR_nothing
))
684 if (GET_CODE (to
) == REG
)
685 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
686 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
687 emit_unop_insn (code
, to
,
688 gen_lowpart (word_mode
, to
), equiv_code
);
692 /* No special multiword conversion insn; do it by hand. */
695 /* Since we will turn this into a no conflict block, we must ensure
696 that the source does not overlap the target. */
698 if (reg_overlap_mentioned_p (to
, from
))
699 from
= force_reg (from_mode
, from
);
701 /* Get a copy of FROM widened to a word, if necessary. */
702 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
703 lowpart_mode
= word_mode
;
705 lowpart_mode
= from_mode
;
707 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
709 lowpart
= gen_lowpart (lowpart_mode
, to
);
710 emit_move_insn (lowpart
, lowfrom
);
712 /* Compute the value to put in each remaining word. */
714 fill_value
= const0_rtx
;
719 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
720 && STORE_FLAG_VALUE
== -1)
722 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
724 fill_value
= gen_reg_rtx (word_mode
);
725 emit_insn (gen_slt (fill_value
));
731 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
732 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
734 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
738 /* Fill the remaining words. */
739 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
741 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
742 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
747 if (fill_value
!= subword
)
748 emit_move_insn (subword
, fill_value
);
751 insns
= get_insns ();
754 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
755 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
759 /* Truncating multi-word to a word or less. */
760 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
761 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
763 if (!((GET_CODE (from
) == MEM
764 && ! MEM_VOLATILE_P (from
)
765 && direct_load
[(int) to_mode
]
766 && ! mode_dependent_address_p (XEXP (from
, 0)))
767 || GET_CODE (from
) == REG
768 || GET_CODE (from
) == SUBREG
))
769 from
= force_reg (from_mode
, from
);
770 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
774 /* Now follow all the conversions between integers
775 no more than a word long. */
777 /* For truncation, usually we can just refer to FROM in a narrower mode. */
778 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
779 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
780 GET_MODE_BITSIZE (from_mode
)))
782 if (!((GET_CODE (from
) == MEM
783 && ! MEM_VOLATILE_P (from
)
784 && direct_load
[(int) to_mode
]
785 && ! mode_dependent_address_p (XEXP (from
, 0)))
786 || GET_CODE (from
) == REG
787 || GET_CODE (from
) == SUBREG
))
788 from
= force_reg (from_mode
, from
);
789 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
790 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
791 from
= copy_to_reg (from
);
792 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
796 /* Handle extension. */
797 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
799 /* Convert directly if that works. */
800 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
804 from
= force_not_mem (from
);
806 emit_unop_insn (code
, to
, from
, equiv_code
);
811 enum machine_mode intermediate
;
815 /* Search for a mode to convert via. */
816 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
817 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
818 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
820 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
821 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
822 GET_MODE_BITSIZE (intermediate
))))
823 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
824 != CODE_FOR_nothing
))
826 convert_move (to
, convert_to_mode (intermediate
, from
,
827 unsignedp
), unsignedp
);
831 /* No suitable intermediate mode.
832 Generate what we need with shifts. */
833 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
834 - GET_MODE_BITSIZE (from_mode
), 0);
835 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
836 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
838 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
841 emit_move_insn (to
, tmp
);
846 /* Support special truncate insns for certain modes. */
847 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
849 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
854 /* Handle truncation of volatile memrefs, and so on;
855 the things that couldn't be truncated directly,
856 and for which there was no special instruction.
858 ??? Code above formerly short-circuited this, for most integer
859 mode pairs, with a force_reg in from_mode followed by a recursive
860 call to this routine. Appears always to have been wrong. */
861 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
863 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
864 emit_move_insn (to
, temp
);
868 /* Mode combination is not recognized. */
872 /* Return an rtx for a value that would result
873 from converting X to mode MODE.
874 Both X and MODE may be floating, or both integer.
875 UNSIGNEDP is nonzero if X is an unsigned value.
876 This can be done by referring to a part of X in place
877 or by copying to a new temporary with conversion.
879 This function *must not* call protect_from_queue
880 except when putting X into an insn (in which case convert_move does it). */
883 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
885 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
888 /* Return an rtx for a value that would result
889 from converting X from mode OLDMODE to mode MODE.
890 Both modes may be floating, or both integer.
891 UNSIGNEDP is nonzero if X is an unsigned value.
893 This can be done by referring to a part of X in place
894 or by copying to a new temporary with conversion.
896 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
898 This function *must not* call protect_from_queue
899 except when putting X into an insn (in which case convert_move does it). */
902 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
906 /* If FROM is a SUBREG that indicates that we have already done at least
907 the required extension, strip it. */
909 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
910 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
911 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
912 x
= gen_lowpart (mode
, x
);
914 if (GET_MODE (x
) != VOIDmode
)
915 oldmode
= GET_MODE (x
);
920 /* There is one case that we must handle specially: If we are converting
921 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
922 we are to interpret the constant as unsigned, gen_lowpart will do
923 the wrong if the constant appears negative. What we want to do is
924 make the high-order word of the constant zero, not all ones. */
926 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
927 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
928 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
930 HOST_WIDE_INT val
= INTVAL (x
);
932 if (oldmode
!= VOIDmode
933 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
935 int width
= GET_MODE_BITSIZE (oldmode
);
937 /* We need to zero extend VAL. */
938 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
941 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
944 /* We can do this with a gen_lowpart if both desired and current modes
945 are integer, and this is either a constant integer, a register, or a
946 non-volatile MEM. Except for the constant case where MODE is no
947 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
949 if ((GET_CODE (x
) == CONST_INT
950 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
951 || (GET_MODE_CLASS (mode
) == MODE_INT
952 && GET_MODE_CLASS (oldmode
) == MODE_INT
953 && (GET_CODE (x
) == CONST_DOUBLE
954 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
955 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
956 && direct_load
[(int) mode
])
957 || (GET_CODE (x
) == REG
958 && (! HARD_REGISTER_P (x
)
959 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
960 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
961 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
963 /* ?? If we don't know OLDMODE, we have to assume here that
964 X does not need sign- or zero-extension. This may not be
965 the case, but it's the best we can do. */
966 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
967 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
969 HOST_WIDE_INT val
= INTVAL (x
);
970 int width
= GET_MODE_BITSIZE (oldmode
);
972 /* We must sign or zero-extend in this case. Start by
973 zero-extending, then sign extend if we need to. */
974 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
976 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
977 val
|= (HOST_WIDE_INT
) (-1) << width
;
979 return gen_int_mode (val
, mode
);
982 return gen_lowpart (mode
, x
);
985 /* Converting from integer constant into mode is always equivalent to an
987 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
989 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
991 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
994 temp
= gen_reg_rtx (mode
);
995 convert_move (temp
, x
, unsignedp
);
999 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1000 store efficiently. Due to internal GCC limitations, this is
1001 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1002 for an immediate constant. */
1004 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1006 /* Determine whether the LEN bytes can be moved by using several move
1007 instructions. Return nonzero if a call to move_by_pieces should
1011 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1012 unsigned int align ATTRIBUTE_UNUSED
)
1014 return MOVE_BY_PIECES_P (len
, align
);
1017 /* Generate several move instructions to copy LEN bytes from block FROM to
1018 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1019 and TO through protect_from_queue before calling.
1021 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1022 used to push FROM to the stack.
1024 ALIGN is maximum stack alignment we can assume.
1026 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1027 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1031 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1032 unsigned int align
, int endp
)
1034 struct move_by_pieces data
;
1035 rtx to_addr
, from_addr
= XEXP (from
, 0);
1036 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1037 enum machine_mode mode
= VOIDmode
, tmode
;
1038 enum insn_code icode
;
1040 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1043 data
.from_addr
= from_addr
;
1046 to_addr
= XEXP (to
, 0);
1049 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1050 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1052 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1059 #ifdef STACK_GROWS_DOWNWARD
1065 data
.to_addr
= to_addr
;
1068 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1069 || GET_CODE (from_addr
) == POST_INC
1070 || GET_CODE (from_addr
) == POST_DEC
);
1072 data
.explicit_inc_from
= 0;
1073 data
.explicit_inc_to
= 0;
1074 if (data
.reverse
) data
.offset
= len
;
1077 /* If copying requires more than two move insns,
1078 copy addresses to registers (to make displacements shorter)
1079 and use post-increment if available. */
1080 if (!(data
.autinc_from
&& data
.autinc_to
)
1081 && move_by_pieces_ninsns (len
, align
) > 2)
1083 /* Find the mode of the largest move... */
1084 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1085 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1086 if (GET_MODE_SIZE (tmode
) < max_size
)
1089 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1091 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1092 data
.autinc_from
= 1;
1093 data
.explicit_inc_from
= -1;
1095 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1097 data
.from_addr
= copy_addr_to_reg (from_addr
);
1098 data
.autinc_from
= 1;
1099 data
.explicit_inc_from
= 1;
1101 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1102 data
.from_addr
= copy_addr_to_reg (from_addr
);
1103 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1105 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1107 data
.explicit_inc_to
= -1;
1109 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1111 data
.to_addr
= copy_addr_to_reg (to_addr
);
1113 data
.explicit_inc_to
= 1;
1115 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1116 data
.to_addr
= copy_addr_to_reg (to_addr
);
1119 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1120 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1121 align
= MOVE_MAX
* BITS_PER_UNIT
;
1123 /* First move what we can in the largest integer mode, then go to
1124 successively smaller modes. */
1126 while (max_size
> 1)
1128 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1129 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1130 if (GET_MODE_SIZE (tmode
) < max_size
)
1133 if (mode
== VOIDmode
)
1136 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1137 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1138 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1140 max_size
= GET_MODE_SIZE (mode
);
1143 /* The code above should have handled everything. */
1157 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1158 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1160 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1163 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1170 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1178 /* Return number of insns required to move L bytes by pieces.
1179 ALIGN (in bits) is maximum alignment we can assume. */
1181 static unsigned HOST_WIDE_INT
1182 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1184 unsigned HOST_WIDE_INT n_insns
= 0;
1185 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1187 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1188 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1189 align
= MOVE_MAX
* BITS_PER_UNIT
;
1191 while (max_size
> 1)
1193 enum machine_mode mode
= VOIDmode
, tmode
;
1194 enum insn_code icode
;
1196 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1197 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1198 if (GET_MODE_SIZE (tmode
) < max_size
)
1201 if (mode
== VOIDmode
)
1204 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1205 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1206 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1208 max_size
= GET_MODE_SIZE (mode
);
1216 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1217 with move instructions for mode MODE. GENFUN is the gen_... function
1218 to make a move insn for that mode. DATA has all the other info. */
1221 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1222 struct move_by_pieces
*data
)
1224 unsigned int size
= GET_MODE_SIZE (mode
);
1225 rtx to1
= NULL_RTX
, from1
;
1227 while (data
->len
>= size
)
1230 data
->offset
-= size
;
1234 if (data
->autinc_to
)
1235 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1238 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1241 if (data
->autinc_from
)
1242 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1245 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1247 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1248 emit_insn (gen_add2_insn (data
->to_addr
,
1249 GEN_INT (-(HOST_WIDE_INT
)size
)));
1250 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1251 emit_insn (gen_add2_insn (data
->from_addr
,
1252 GEN_INT (-(HOST_WIDE_INT
)size
)));
1255 emit_insn ((*genfun
) (to1
, from1
));
1258 #ifdef PUSH_ROUNDING
1259 emit_single_push_insn (mode
, from1
, NULL
);
1265 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1266 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1267 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1268 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1270 if (! data
->reverse
)
1271 data
->offset
+= size
;
1277 /* Emit code to move a block Y to a block X. This may be done with
1278 string-move instructions, with multiple scalar move instructions,
1279 or with a library call.
1281 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1282 SIZE is an rtx that says how long they are.
1283 ALIGN is the maximum alignment we can assume they have.
1284 METHOD describes what kind of copy this is, and what mechanisms may be used.
1286 Return the address of the new block, if memcpy is called and returns it,
1290 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1298 case BLOCK_OP_NORMAL
:
1299 may_use_call
= true;
1302 case BLOCK_OP_CALL_PARM
:
1303 may_use_call
= block_move_libcall_safe_for_call_parm ();
1305 /* Make inhibit_defer_pop nonzero around the library call
1306 to force it to pop the arguments right away. */
1310 case BLOCK_OP_NO_LIBCALL
:
1311 may_use_call
= false;
1318 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1320 if (GET_MODE (x
) != BLKmode
)
1322 if (GET_MODE (y
) != BLKmode
)
1325 x
= protect_from_queue (x
, 1);
1326 y
= protect_from_queue (y
, 0);
1327 size
= protect_from_queue (size
, 0);
1329 if (GET_CODE (x
) != MEM
)
1331 if (GET_CODE (y
) != MEM
)
1336 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1337 can be incorrect is coming from __builtin_memcpy. */
1338 if (GET_CODE (size
) == CONST_INT
)
1340 if (INTVAL (size
) == 0)
1343 x
= shallow_copy_rtx (x
);
1344 y
= shallow_copy_rtx (y
);
1345 set_mem_size (x
, size
);
1346 set_mem_size (y
, size
);
1349 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1350 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1351 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1353 else if (may_use_call
)
1354 retval
= emit_block_move_via_libcall (x
, y
, size
);
1356 emit_block_move_via_loop (x
, y
, size
, align
);
1358 if (method
== BLOCK_OP_CALL_PARM
)
1364 /* A subroutine of emit_block_move. Returns true if calling the
1365 block move libcall will not clobber any parameters which may have
1366 already been placed on the stack. */
1369 block_move_libcall_safe_for_call_parm (void)
1371 /* If arguments are pushed on the stack, then they're safe. */
1375 /* If registers go on the stack anyway, any argument is sure to clobber
1376 an outgoing argument. */
1377 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1379 tree fn
= emit_block_move_libcall_fn (false);
1381 if (REG_PARM_STACK_SPACE (fn
) != 0)
1386 /* If any argument goes in memory, then it might clobber an outgoing
1389 CUMULATIVE_ARGS args_so_far
;
1392 fn
= emit_block_move_libcall_fn (false);
1393 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1395 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1396 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1398 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1399 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1400 if (!tmp
|| !REG_P (tmp
))
1402 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1403 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1407 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1413 /* A subroutine of emit_block_move. Expand a movstr pattern;
1414 return true if successful. */
1417 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1419 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1420 int save_volatile_ok
= volatile_ok
;
1421 enum machine_mode mode
;
1423 /* Since this is a move insn, we don't care about volatility. */
1426 /* Try the most limited insn first, because there's no point
1427 including more than one in the machine description unless
1428 the more limited one has some advantage. */
1430 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1431 mode
= GET_MODE_WIDER_MODE (mode
))
1433 enum insn_code code
= movstr_optab
[(int) mode
];
1434 insn_operand_predicate_fn pred
;
1436 if (code
!= CODE_FOR_nothing
1437 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1438 here because if SIZE is less than the mode mask, as it is
1439 returned by the macro, it will definitely be less than the
1440 actual mode mask. */
1441 && ((GET_CODE (size
) == CONST_INT
1442 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1443 <= (GET_MODE_MASK (mode
) >> 1)))
1444 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1445 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1446 || (*pred
) (x
, BLKmode
))
1447 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1448 || (*pred
) (y
, BLKmode
))
1449 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1450 || (*pred
) (opalign
, VOIDmode
)))
1453 rtx last
= get_last_insn ();
1456 op2
= convert_to_mode (mode
, size
, 1);
1457 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1458 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1459 op2
= copy_to_mode_reg (mode
, op2
);
1461 /* ??? When called via emit_block_move_for_call, it'd be
1462 nice if there were some way to inform the backend, so
1463 that it doesn't fail the expansion because it thinks
1464 emitting the libcall would be more efficient. */
1466 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1470 volatile_ok
= save_volatile_ok
;
1474 delete_insns_since (last
);
1478 volatile_ok
= save_volatile_ok
;
1482 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1483 Return the return value from memcpy, 0 otherwise. */
1486 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1488 rtx dst_addr
, src_addr
;
1489 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1490 enum machine_mode size_mode
;
1493 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1495 It is unsafe to save the value generated by protect_from_queue and reuse
1496 it later. Consider what happens if emit_queue is called before the
1497 return value from protect_from_queue is used.
1499 Expansion of the CALL_EXPR below will call emit_queue before we are
1500 finished emitting RTL for argument setup. So if we are not careful we
1501 could get the wrong value for an argument.
1503 To avoid this problem we go ahead and emit code to copy the addresses of
1504 DST and SRC and SIZE into new pseudos. We can then place those new
1505 pseudos into an RTL_EXPR and use them later, even after a call to
1508 Note this is not strictly needed for library calls since they do not call
1509 emit_queue before loading their arguments. However, we may need to have
1510 library calls call emit_queue in the future since failing to do so could
1511 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1512 arguments in registers. */
1514 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1515 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1517 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1518 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1520 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1521 src_tree
= make_tree (ptr_type_node
, src_addr
);
1523 if (TARGET_MEM_FUNCTIONS
)
1524 size_mode
= TYPE_MODE (sizetype
);
1526 size_mode
= TYPE_MODE (unsigned_type_node
);
1528 size
= convert_to_mode (size_mode
, size
, 1);
1529 size
= copy_to_mode_reg (size_mode
, size
);
1531 /* It is incorrect to use the libcall calling conventions to call
1532 memcpy in this context. This could be a user call to memcpy and
1533 the user may wish to examine the return value from memcpy. For
1534 targets where libcalls and normal calls have different conventions
1535 for returning pointers, we could end up generating incorrect code.
1537 For convenience, we generate the call to bcopy this way as well. */
1539 if (TARGET_MEM_FUNCTIONS
)
1540 size_tree
= make_tree (sizetype
, size
);
1542 size_tree
= make_tree (unsigned_type_node
, size
);
1544 fn
= emit_block_move_libcall_fn (true);
1545 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1546 if (TARGET_MEM_FUNCTIONS
)
1548 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1549 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1553 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1554 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1557 /* Now we have to build up the CALL_EXPR itself. */
1558 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1559 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1560 call_expr
, arg_list
, NULL_TREE
);
1562 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1564 /* If we are initializing a readonly value, show the above call clobbered
1565 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1566 the delay slot scheduler might overlook conflicts and take nasty
1568 if (RTX_UNCHANGING_P (dst
))
1569 add_function_usage_to
1570 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
1571 gen_rtx_CLOBBER (VOIDmode
, dst
),
1574 return TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
;
1577 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1578 for the function we use for block copies. The first time FOR_CALL
1579 is true, we call assemble_external. */
1581 static GTY(()) tree block_move_fn
;
1584 init_block_move_fn (const char *asmspec
)
1590 if (TARGET_MEM_FUNCTIONS
)
1592 fn
= get_identifier ("memcpy");
1593 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1594 const_ptr_type_node
, sizetype
,
1599 fn
= get_identifier ("bcopy");
1600 args
= build_function_type_list (void_type_node
, const_ptr_type_node
,
1601 ptr_type_node
, unsigned_type_node
,
1605 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1606 DECL_EXTERNAL (fn
) = 1;
1607 TREE_PUBLIC (fn
) = 1;
1608 DECL_ARTIFICIAL (fn
) = 1;
1609 TREE_NOTHROW (fn
) = 1;
1616 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
1617 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
1622 emit_block_move_libcall_fn (int for_call
)
1624 static bool emitted_extern
;
1627 init_block_move_fn (NULL
);
1629 if (for_call
&& !emitted_extern
)
1631 emitted_extern
= true;
1632 make_decl_rtl (block_move_fn
, NULL
);
1633 assemble_external (block_move_fn
);
1636 return block_move_fn
;
1639 /* A subroutine of emit_block_move. Copy the data via an explicit
1640 loop. This is used only when libcalls are forbidden. */
1641 /* ??? It'd be nice to copy in hunks larger than QImode. */
1644 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1645 unsigned int align ATTRIBUTE_UNUSED
)
1647 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1648 enum machine_mode iter_mode
;
1650 iter_mode
= GET_MODE (size
);
1651 if (iter_mode
== VOIDmode
)
1652 iter_mode
= word_mode
;
1654 top_label
= gen_label_rtx ();
1655 cmp_label
= gen_label_rtx ();
1656 iter
= gen_reg_rtx (iter_mode
);
1658 emit_move_insn (iter
, const0_rtx
);
1660 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1661 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1662 do_pending_stack_adjust ();
1664 emit_note (NOTE_INSN_LOOP_BEG
);
1666 emit_jump (cmp_label
);
1667 emit_label (top_label
);
1669 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1670 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1671 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1672 x
= change_address (x
, QImode
, x_addr
);
1673 y
= change_address (y
, QImode
, y_addr
);
1675 emit_move_insn (x
, y
);
1677 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1678 true, OPTAB_LIB_WIDEN
);
1680 emit_move_insn (iter
, tmp
);
1682 emit_note (NOTE_INSN_LOOP_CONT
);
1683 emit_label (cmp_label
);
1685 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1688 emit_note (NOTE_INSN_LOOP_END
);
1691 /* Copy all or part of a value X into registers starting at REGNO.
1692 The number of registers to be filled is NREGS. */
1695 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1698 #ifdef HAVE_load_multiple
1706 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1707 x
= validize_mem (force_const_mem (mode
, x
));
1709 /* See if the machine can do this with a load multiple insn. */
1710 #ifdef HAVE_load_multiple
1711 if (HAVE_load_multiple
)
1713 last
= get_last_insn ();
1714 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1722 delete_insns_since (last
);
1726 for (i
= 0; i
< nregs
; i
++)
1727 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1728 operand_subword_force (x
, i
, mode
));
1731 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1732 The number of registers to be filled is NREGS. */
1735 move_block_from_reg (int regno
, rtx x
, int nregs
)
1742 /* See if the machine can do this with a store multiple insn. */
1743 #ifdef HAVE_store_multiple
1744 if (HAVE_store_multiple
)
1746 rtx last
= get_last_insn ();
1747 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1755 delete_insns_since (last
);
1759 for (i
= 0; i
< nregs
; i
++)
1761 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1766 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1770 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1771 ORIG, where ORIG is a non-consecutive group of registers represented by
1772 a PARALLEL. The clone is identical to the original except in that the
1773 original set of registers is replaced by a new set of pseudo registers.
1774 The new set has the same modes as the original set. */
1777 gen_group_rtx (rtx orig
)
1782 if (GET_CODE (orig
) != PARALLEL
)
1785 length
= XVECLEN (orig
, 0);
1786 tmps
= alloca (sizeof (rtx
) * length
);
1788 /* Skip a NULL entry in first slot. */
1789 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1794 for (; i
< length
; i
++)
1796 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1797 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1799 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1802 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1805 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1806 where DST is non-consecutive registers represented by a PARALLEL.
1807 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1811 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1816 if (GET_CODE (dst
) != PARALLEL
)
1819 /* Check for a NULL entry, used to indicate that the parameter goes
1820 both on the stack and in registers. */
1821 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1826 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1828 /* Process the pieces. */
1829 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1831 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1832 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1833 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1836 /* Handle trailing fragments that run over the size of the struct. */
1837 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1839 /* Arrange to shift the fragment to where it belongs.
1840 extract_bit_field loads to the lsb of the reg. */
1842 #ifdef BLOCK_REG_PADDING
1843 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1844 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1849 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1850 bytelen
= ssize
- bytepos
;
1855 /* If we won't be loading directly from memory, protect the real source
1856 from strange tricks we might play; but make sure that the source can
1857 be loaded directly into the destination. */
1859 if (GET_CODE (orig_src
) != MEM
1860 && (!CONSTANT_P (orig_src
)
1861 || (GET_MODE (orig_src
) != mode
1862 && GET_MODE (orig_src
) != VOIDmode
)))
1864 if (GET_MODE (orig_src
) == VOIDmode
)
1865 src
= gen_reg_rtx (mode
);
1867 src
= gen_reg_rtx (GET_MODE (orig_src
));
1869 emit_move_insn (src
, orig_src
);
1872 /* Optimize the access just a bit. */
1873 if (GET_CODE (src
) == MEM
1874 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1875 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1876 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1877 && bytelen
== GET_MODE_SIZE (mode
))
1879 tmps
[i
] = gen_reg_rtx (mode
);
1880 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1882 else if (GET_CODE (src
) == CONCAT
)
1884 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1885 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1887 if ((bytepos
== 0 && bytelen
== slen0
)
1888 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1890 /* The following assumes that the concatenated objects all
1891 have the same size. In this case, a simple calculation
1892 can be used to determine the object and the bit field
1894 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1895 if (! CONSTANT_P (tmps
[i
])
1896 && (GET_CODE (tmps
[i
]) != REG
|| GET_MODE (tmps
[i
]) != mode
))
1897 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1898 (bytepos
% slen0
) * BITS_PER_UNIT
,
1899 1, NULL_RTX
, mode
, mode
, ssize
);
1901 else if (bytepos
== 0)
1903 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1904 emit_move_insn (mem
, src
);
1905 tmps
[i
] = adjust_address (mem
, mode
, 0);
1910 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1911 SIMD register, which is currently broken. While we get GCC
1912 to emit proper RTL for these cases, let's dump to memory. */
1913 else if (VECTOR_MODE_P (GET_MODE (dst
))
1914 && GET_CODE (src
) == REG
)
1916 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1919 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1920 emit_move_insn (mem
, src
);
1921 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1923 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1924 && XVECLEN (dst
, 0) > 1)
1925 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1926 else if (CONSTANT_P (src
)
1927 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
1930 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1931 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1935 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1936 tmps
[i
], 0, OPTAB_WIDEN
);
1941 /* Copy the extracted pieces into the proper (probable) hard regs. */
1942 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1943 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1946 /* Emit code to move a block SRC to block DST, where SRC and DST are
1947 non-consecutive groups of registers, each represented by a PARALLEL. */
1950 emit_group_move (rtx dst
, rtx src
)
1954 if (GET_CODE (src
) != PARALLEL
1955 || GET_CODE (dst
) != PARALLEL
1956 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
1959 /* Skip first entry if NULL. */
1960 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1961 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1962 XEXP (XVECEXP (src
, 0, i
), 0));
1965 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1966 where SRC is non-consecutive registers represented by a PARALLEL.
1967 SSIZE represents the total size of block ORIG_DST, or -1 if not
1971 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1976 if (GET_CODE (src
) != PARALLEL
)
1979 /* Check for a NULL entry, used to indicate that the parameter goes
1980 both on the stack and in registers. */
1981 if (XEXP (XVECEXP (src
, 0, 0), 0))
1986 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
1988 /* Copy the (probable) hard regs into pseudos. */
1989 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1991 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1992 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1993 emit_move_insn (tmps
[i
], reg
);
1997 /* If we won't be storing directly into memory, protect the real destination
1998 from strange tricks we might play. */
2000 if (GET_CODE (dst
) == PARALLEL
)
2004 /* We can get a PARALLEL dst if there is a conditional expression in
2005 a return statement. In that case, the dst and src are the same,
2006 so no action is necessary. */
2007 if (rtx_equal_p (dst
, src
))
2010 /* It is unclear if we can ever reach here, but we may as well handle
2011 it. Allocate a temporary, and split this into a store/load to/from
2014 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2015 emit_group_store (temp
, src
, type
, ssize
);
2016 emit_group_load (dst
, temp
, type
, ssize
);
2019 else if (GET_CODE (dst
) != MEM
&& GET_CODE (dst
) != CONCAT
)
2021 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2022 /* Make life a bit easier for combine. */
2023 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2026 /* Process the pieces. */
2027 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2029 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2030 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2031 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2034 /* Handle trailing fragments that run over the size of the struct. */
2035 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2037 /* store_bit_field always takes its value from the lsb.
2038 Move the fragment to the lsb if it's not already there. */
2040 #ifdef BLOCK_REG_PADDING
2041 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2042 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2048 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2049 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2050 tmps
[i
], 0, OPTAB_WIDEN
);
2052 bytelen
= ssize
- bytepos
;
2055 if (GET_CODE (dst
) == CONCAT
)
2057 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2058 dest
= XEXP (dst
, 0);
2059 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2061 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2062 dest
= XEXP (dst
, 1);
2064 else if (bytepos
== 0 && XVECLEN (src
, 0))
2066 dest
= assign_stack_temp (GET_MODE (dest
),
2067 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2068 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2077 /* Optimize the access just a bit. */
2078 if (GET_CODE (dest
) == MEM
2079 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2080 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2081 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2082 && bytelen
== GET_MODE_SIZE (mode
))
2083 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2085 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2086 mode
, tmps
[i
], ssize
);
2091 /* Copy from the pseudo into the (probable) hard reg. */
2092 if (orig_dst
!= dst
)
2093 emit_move_insn (orig_dst
, dst
);
2096 /* Generate code to copy a BLKmode object of TYPE out of a
2097 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2098 is null, a stack temporary is created. TGTBLK is returned.
2100 The purpose of this routine is to handle functions that return
2101 BLKmode structures in registers. Some machines (the PA for example)
2102 want to return all small structures in registers regardless of the
2103 structure's alignment. */
2106 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2108 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2109 rtx src
= NULL
, dst
= NULL
;
2110 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2111 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2115 tgtblk
= assign_temp (build_qualified_type (type
,
2117 | TYPE_QUAL_CONST
)),
2119 preserve_temp_slots (tgtblk
);
2122 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2123 into a new pseudo which is a full word. */
2125 if (GET_MODE (srcreg
) != BLKmode
2126 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2127 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2129 /* If the structure doesn't take up a whole number of words, see whether
2130 SRCREG is padded on the left or on the right. If it's on the left,
2131 set PADDING_CORRECTION to the number of bits to skip.
2133 In most ABIs, the structure will be returned at the least end of
2134 the register, which translates to right padding on little-endian
2135 targets and left padding on big-endian targets. The opposite
2136 holds if the structure is returned at the most significant
2137 end of the register. */
2138 if (bytes
% UNITS_PER_WORD
!= 0
2139 && (targetm
.calls
.return_in_msb (type
)
2141 : BYTES_BIG_ENDIAN
))
2143 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2145 /* Copy the structure BITSIZE bites at a time.
2147 We could probably emit more efficient code for machines which do not use
2148 strict alignment, but it doesn't seem worth the effort at the current
2150 for (bitpos
= 0, xbitpos
= padding_correction
;
2151 bitpos
< bytes
* BITS_PER_UNIT
;
2152 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2154 /* We need a new source operand each time xbitpos is on a
2155 word boundary and when xbitpos == padding_correction
2156 (the first time through). */
2157 if (xbitpos
% BITS_PER_WORD
== 0
2158 || xbitpos
== padding_correction
)
2159 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2162 /* We need a new destination operand each time bitpos is on
2164 if (bitpos
% BITS_PER_WORD
== 0)
2165 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2167 /* Use xbitpos for the source extraction (right justified) and
2168 xbitpos for the destination store (left justified). */
2169 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2170 extract_bit_field (src
, bitsize
,
2171 xbitpos
% BITS_PER_WORD
, 1,
2172 NULL_RTX
, word_mode
, word_mode
,
2180 /* Add a USE expression for REG to the (possibly empty) list pointed
2181 to by CALL_FUSAGE. REG must denote a hard register. */
2184 use_reg (rtx
*call_fusage
, rtx reg
)
2186 if (GET_CODE (reg
) != REG
2187 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2191 = gen_rtx_EXPR_LIST (VOIDmode
,
2192 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2195 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2196 starting at REGNO. All of these registers must be hard registers. */
2199 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2203 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2206 for (i
= 0; i
< nregs
; i
++)
2207 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2210 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2211 PARALLEL REGS. This is for calls that pass values in multiple
2212 non-contiguous locations. The Irix 6 ABI has examples of this. */
2215 use_group_regs (rtx
*call_fusage
, rtx regs
)
2219 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2221 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2223 /* A NULL entry means the parameter goes both on the stack and in
2224 registers. This can also be a MEM for targets that pass values
2225 partially on the stack and partially in registers. */
2226 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2227 use_reg (call_fusage
, reg
);
2232 /* Determine whether the LEN bytes generated by CONSTFUN can be
2233 stored to memory using several move instructions. CONSTFUNDATA is
2234 a pointer which will be passed as argument in every CONSTFUN call.
2235 ALIGN is maximum alignment we can assume. Return nonzero if a
2236 call to store_by_pieces should succeed. */
2239 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2240 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2241 void *constfundata
, unsigned int align
)
2243 unsigned HOST_WIDE_INT max_size
, l
;
2244 HOST_WIDE_INT offset
= 0;
2245 enum machine_mode mode
, tmode
;
2246 enum insn_code icode
;
2253 if (! STORE_BY_PIECES_P (len
, align
))
2256 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2257 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2258 align
= MOVE_MAX
* BITS_PER_UNIT
;
2260 /* We would first store what we can in the largest integer mode, then go to
2261 successively smaller modes. */
2264 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2269 max_size
= STORE_MAX_PIECES
+ 1;
2270 while (max_size
> 1)
2272 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2273 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2274 if (GET_MODE_SIZE (tmode
) < max_size
)
2277 if (mode
== VOIDmode
)
2280 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2281 if (icode
!= CODE_FOR_nothing
2282 && align
>= GET_MODE_ALIGNMENT (mode
))
2284 unsigned int size
= GET_MODE_SIZE (mode
);
2291 cst
= (*constfun
) (constfundata
, offset
, mode
);
2292 if (!LEGITIMATE_CONSTANT_P (cst
))
2302 max_size
= GET_MODE_SIZE (mode
);
2305 /* The code above should have handled everything. */
2313 /* Generate several move instructions to store LEN bytes generated by
2314 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2315 pointer which will be passed as argument in every CONSTFUN call.
2316 ALIGN is maximum alignment we can assume.
2317 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2318 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2322 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2323 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2324 void *constfundata
, unsigned int align
, int endp
)
2326 struct store_by_pieces data
;
2335 if (! STORE_BY_PIECES_P (len
, align
))
2337 to
= protect_from_queue (to
, 1);
2338 data
.constfun
= constfun
;
2339 data
.constfundata
= constfundata
;
2342 store_by_pieces_1 (&data
, align
);
2353 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2354 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2356 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2359 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2366 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2374 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2375 rtx with BLKmode). The caller must pass TO through protect_from_queue
2376 before calling. ALIGN is maximum alignment we can assume. */
2379 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2381 struct store_by_pieces data
;
2386 data
.constfun
= clear_by_pieces_1
;
2387 data
.constfundata
= NULL
;
2390 store_by_pieces_1 (&data
, align
);
2393 /* Callback routine for clear_by_pieces.
2394 Return const0_rtx unconditionally. */
2397 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2398 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2399 enum machine_mode mode ATTRIBUTE_UNUSED
)
2404 /* Subroutine of clear_by_pieces and store_by_pieces.
2405 Generate several move instructions to store LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). The caller must pass TO through protect_from_queue
2407 before calling. ALIGN is maximum alignment we can assume. */
2410 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2411 unsigned int align ATTRIBUTE_UNUSED
)
2413 rtx to_addr
= XEXP (data
->to
, 0);
2414 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2415 enum machine_mode mode
= VOIDmode
, tmode
;
2416 enum insn_code icode
;
2419 data
->to_addr
= to_addr
;
2421 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2422 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2424 data
->explicit_inc_to
= 0;
2426 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2428 data
->offset
= data
->len
;
2430 /* If storing requires more than two move insns,
2431 copy addresses to registers (to make displacements shorter)
2432 and use post-increment if available. */
2433 if (!data
->autinc_to
2434 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2436 /* Determine the main mode we'll be using. */
2437 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2438 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2439 if (GET_MODE_SIZE (tmode
) < max_size
)
2442 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2444 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2445 data
->autinc_to
= 1;
2446 data
->explicit_inc_to
= -1;
2449 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2450 && ! data
->autinc_to
)
2452 data
->to_addr
= copy_addr_to_reg (to_addr
);
2453 data
->autinc_to
= 1;
2454 data
->explicit_inc_to
= 1;
2457 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2458 data
->to_addr
= copy_addr_to_reg (to_addr
);
2461 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2462 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2463 align
= MOVE_MAX
* BITS_PER_UNIT
;
2465 /* First store what we can in the largest integer mode, then go to
2466 successively smaller modes. */
2468 while (max_size
> 1)
2470 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2471 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2472 if (GET_MODE_SIZE (tmode
) < max_size
)
2475 if (mode
== VOIDmode
)
2478 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2479 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2480 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2482 max_size
= GET_MODE_SIZE (mode
);
2485 /* The code above should have handled everything. */
2490 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2491 with move instructions for mode MODE. GENFUN is the gen_... function
2492 to make a move insn for that mode. DATA has all the other info. */
2495 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2496 struct store_by_pieces
*data
)
2498 unsigned int size
= GET_MODE_SIZE (mode
);
2501 while (data
->len
>= size
)
2504 data
->offset
-= size
;
2506 if (data
->autinc_to
)
2507 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2510 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2512 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2513 emit_insn (gen_add2_insn (data
->to_addr
,
2514 GEN_INT (-(HOST_WIDE_INT
) size
)));
2516 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2517 emit_insn ((*genfun
) (to1
, cst
));
2519 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2520 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2522 if (! data
->reverse
)
2523 data
->offset
+= size
;
2529 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2530 its length in bytes. */
2533 clear_storage (rtx object
, rtx size
)
2536 unsigned int align
= (GET_CODE (object
) == MEM
? MEM_ALIGN (object
)
2537 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2539 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2540 just move a zero. Otherwise, do this a piece at a time. */
2541 if (GET_MODE (object
) != BLKmode
2542 && GET_CODE (size
) == CONST_INT
2543 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2544 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2547 object
= protect_from_queue (object
, 1);
2548 size
= protect_from_queue (size
, 0);
2550 if (size
== const0_rtx
)
2552 else if (GET_CODE (size
) == CONST_INT
2553 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2554 clear_by_pieces (object
, INTVAL (size
), align
);
2555 else if (clear_storage_via_clrstr (object
, size
, align
))
2558 retval
= clear_storage_via_libcall (object
, size
);
2564 /* A subroutine of clear_storage. Expand a clrstr pattern;
2565 return true if successful. */
2568 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
2570 /* Try the most limited insn first, because there's no point
2571 including more than one in the machine description unless
2572 the more limited one has some advantage. */
2574 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2575 enum machine_mode mode
;
2577 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2578 mode
= GET_MODE_WIDER_MODE (mode
))
2580 enum insn_code code
= clrstr_optab
[(int) mode
];
2581 insn_operand_predicate_fn pred
;
2583 if (code
!= CODE_FOR_nothing
2584 /* We don't need MODE to be narrower than
2585 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2586 the mode mask, as it is returned by the macro, it will
2587 definitely be less than the actual mode mask. */
2588 && ((GET_CODE (size
) == CONST_INT
2589 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2590 <= (GET_MODE_MASK (mode
) >> 1)))
2591 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2592 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2593 || (*pred
) (object
, BLKmode
))
2594 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2595 || (*pred
) (opalign
, VOIDmode
)))
2598 rtx last
= get_last_insn ();
2601 op1
= convert_to_mode (mode
, size
, 1);
2602 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2603 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2604 op1
= copy_to_mode_reg (mode
, op1
);
2606 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2613 delete_insns_since (last
);
2620 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2621 Return the return value of memset, 0 otherwise. */
2624 clear_storage_via_libcall (rtx object
, rtx size
)
2626 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2627 enum machine_mode size_mode
;
2630 /* OBJECT or SIZE may have been passed through protect_from_queue.
2632 It is unsafe to save the value generated by protect_from_queue
2633 and reuse it later. Consider what happens if emit_queue is
2634 called before the return value from protect_from_queue is used.
2636 Expansion of the CALL_EXPR below will call emit_queue before
2637 we are finished emitting RTL for argument setup. So if we are
2638 not careful we could get the wrong value for an argument.
2640 To avoid this problem we go ahead and emit code to copy OBJECT
2641 and SIZE into new pseudos. We can then place those new pseudos
2642 into an RTL_EXPR and use them later, even after a call to
2645 Note this is not strictly needed for library calls since they
2646 do not call emit_queue before loading their arguments. However,
2647 we may need to have library calls call emit_queue in the future
2648 since failing to do so could cause problems for targets which
2649 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2651 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2653 if (TARGET_MEM_FUNCTIONS
)
2654 size_mode
= TYPE_MODE (sizetype
);
2656 size_mode
= TYPE_MODE (unsigned_type_node
);
2657 size
= convert_to_mode (size_mode
, size
, 1);
2658 size
= copy_to_mode_reg (size_mode
, size
);
2660 /* It is incorrect to use the libcall calling conventions to call
2661 memset in this context. This could be a user call to memset and
2662 the user may wish to examine the return value from memset. For
2663 targets where libcalls and normal calls have different conventions
2664 for returning pointers, we could end up generating incorrect code.
2666 For convenience, we generate the call to bzero this way as well. */
2668 object_tree
= make_tree (ptr_type_node
, object
);
2669 if (TARGET_MEM_FUNCTIONS
)
2670 size_tree
= make_tree (sizetype
, size
);
2672 size_tree
= make_tree (unsigned_type_node
, size
);
2674 fn
= clear_storage_libcall_fn (true);
2675 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2676 if (TARGET_MEM_FUNCTIONS
)
2677 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2678 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2680 /* Now we have to build up the CALL_EXPR itself. */
2681 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2682 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2683 call_expr
, arg_list
, NULL_TREE
);
2685 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2687 /* If we are initializing a readonly value, show the above call
2688 clobbered it. Otherwise, a load from it may erroneously be
2689 hoisted from a loop. */
2690 if (RTX_UNCHANGING_P (object
))
2691 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2693 return (TARGET_MEM_FUNCTIONS
? retval
: NULL_RTX
);
2696 /* A subroutine of clear_storage_via_libcall. Create the tree node
2697 for the function we use for block clears. The first time FOR_CALL
2698 is true, we call assemble_external. */
2700 static GTY(()) tree block_clear_fn
;
2703 init_block_clear_fn (const char *asmspec
)
2705 if (!block_clear_fn
)
2709 if (TARGET_MEM_FUNCTIONS
)
2711 fn
= get_identifier ("memset");
2712 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2713 integer_type_node
, sizetype
,
2718 fn
= get_identifier ("bzero");
2719 args
= build_function_type_list (void_type_node
, ptr_type_node
,
2720 unsigned_type_node
, NULL_TREE
);
2723 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2724 DECL_EXTERNAL (fn
) = 1;
2725 TREE_PUBLIC (fn
) = 1;
2726 DECL_ARTIFICIAL (fn
) = 1;
2727 TREE_NOTHROW (fn
) = 1;
2729 block_clear_fn
= fn
;
2734 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
2735 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
2740 clear_storage_libcall_fn (int for_call
)
2742 static bool emitted_extern
;
2744 if (!block_clear_fn
)
2745 init_block_clear_fn (NULL
);
2747 if (for_call
&& !emitted_extern
)
2749 emitted_extern
= true;
2750 make_decl_rtl (block_clear_fn
, NULL
);
2751 assemble_external (block_clear_fn
);
2754 return block_clear_fn
;
2757 /* Generate code to copy Y into X.
2758 Both Y and X must have the same mode, except that
2759 Y can be a constant with VOIDmode.
2760 This mode cannot be BLKmode; use emit_block_move for that.
2762 Return the last instruction emitted. */
2765 emit_move_insn (rtx x
, rtx y
)
2767 enum machine_mode mode
= GET_MODE (x
);
2768 rtx y_cst
= NULL_RTX
;
2771 x
= protect_from_queue (x
, 1);
2772 y
= protect_from_queue (y
, 0);
2774 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2777 /* Never force constant_p_rtx to memory. */
2778 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2780 else if (CONSTANT_P (y
))
2783 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2784 && (last_insn
= compress_float_constant (x
, y
)))
2789 if (!LEGITIMATE_CONSTANT_P (y
))
2791 y
= force_const_mem (mode
, y
);
2793 /* If the target's cannot_force_const_mem prevented the spill,
2794 assume that the target's move expanders will also take care
2795 of the non-legitimate constant. */
2801 /* If X or Y are memory references, verify that their addresses are valid
2803 if (GET_CODE (x
) == MEM
2804 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2805 && ! push_operand (x
, GET_MODE (x
)))
2807 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2808 x
= validize_mem (x
);
2810 if (GET_CODE (y
) == MEM
2811 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2813 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2814 y
= validize_mem (y
);
2816 if (mode
== BLKmode
)
2819 last_insn
= emit_move_insn_1 (x
, y
);
2821 if (y_cst
&& GET_CODE (x
) == REG
2822 && (set
= single_set (last_insn
)) != NULL_RTX
2823 && SET_DEST (set
) == x
2824 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2825 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2830 /* Low level part of emit_move_insn.
2831 Called just like emit_move_insn, but assumes X and Y
2832 are basically valid. */
2835 emit_move_insn_1 (rtx x
, rtx y
)
2837 enum machine_mode mode
= GET_MODE (x
);
2838 enum machine_mode submode
;
2839 enum mode_class
class = GET_MODE_CLASS (mode
);
2841 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2844 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2846 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2848 /* Expand complex moves by moving real part and imag part, if possible. */
2849 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2850 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2851 && (mov_optab
->handlers
[(int) submode
].insn_code
2852 != CODE_FOR_nothing
))
2854 /* Don't split destination if it is a stack push. */
2855 int stack
= push_operand (x
, GET_MODE (x
));
2857 #ifdef PUSH_ROUNDING
2858 /* In case we output to the stack, but the size is smaller than the
2859 machine can push exactly, we need to use move instructions. */
2861 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2862 != GET_MODE_SIZE (submode
)))
2865 HOST_WIDE_INT offset1
, offset2
;
2867 /* Do not use anti_adjust_stack, since we don't want to update
2868 stack_pointer_delta. */
2869 temp
= expand_binop (Pmode
,
2870 #ifdef STACK_GROWS_DOWNWARD
2878 (GET_MODE_SIZE (GET_MODE (x
)))),
2879 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2881 if (temp
!= stack_pointer_rtx
)
2882 emit_move_insn (stack_pointer_rtx
, temp
);
2884 #ifdef STACK_GROWS_DOWNWARD
2886 offset2
= GET_MODE_SIZE (submode
);
2888 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2889 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2890 + GET_MODE_SIZE (submode
));
2893 emit_move_insn (change_address (x
, submode
,
2894 gen_rtx_PLUS (Pmode
,
2896 GEN_INT (offset1
))),
2897 gen_realpart (submode
, y
));
2898 emit_move_insn (change_address (x
, submode
,
2899 gen_rtx_PLUS (Pmode
,
2901 GEN_INT (offset2
))),
2902 gen_imagpart (submode
, y
));
2906 /* If this is a stack, push the highpart first, so it
2907 will be in the argument order.
2909 In that case, change_address is used only to convert
2910 the mode, not to change the address. */
2913 /* Note that the real part always precedes the imag part in memory
2914 regardless of machine's endianness. */
2915 #ifdef STACK_GROWS_DOWNWARD
2916 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2917 gen_imagpart (submode
, y
));
2918 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2919 gen_realpart (submode
, y
));
2921 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2922 gen_realpart (submode
, y
));
2923 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2924 gen_imagpart (submode
, y
));
2929 rtx realpart_x
, realpart_y
;
2930 rtx imagpart_x
, imagpart_y
;
2932 /* If this is a complex value with each part being smaller than a
2933 word, the usual calling sequence will likely pack the pieces into
2934 a single register. Unfortunately, SUBREG of hard registers only
2935 deals in terms of words, so we have a problem converting input
2936 arguments to the CONCAT of two registers that is used elsewhere
2937 for complex values. If this is before reload, we can copy it into
2938 memory and reload. FIXME, we should see about using extract and
2939 insert on integer registers, but complex short and complex char
2940 variables should be rarely used. */
2941 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2942 && (reload_in_progress
| reload_completed
) == 0)
2945 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2947 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2949 if (packed_dest_p
|| packed_src_p
)
2951 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2952 ? MODE_FLOAT
: MODE_INT
);
2954 enum machine_mode reg_mode
2955 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2957 if (reg_mode
!= BLKmode
)
2959 rtx mem
= assign_stack_temp (reg_mode
,
2960 GET_MODE_SIZE (mode
), 0);
2961 rtx cmem
= adjust_address (mem
, mode
, 0);
2964 = N_("function using short complex types cannot be inline");
2968 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2970 emit_move_insn_1 (cmem
, y
);
2971 return emit_move_insn_1 (sreg
, mem
);
2975 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2977 emit_move_insn_1 (mem
, sreg
);
2978 return emit_move_insn_1 (x
, cmem
);
2984 realpart_x
= gen_realpart (submode
, x
);
2985 realpart_y
= gen_realpart (submode
, y
);
2986 imagpart_x
= gen_imagpart (submode
, x
);
2987 imagpart_y
= gen_imagpart (submode
, y
);
2989 /* Show the output dies here. This is necessary for SUBREGs
2990 of pseudos since we cannot track their lifetimes correctly;
2991 hard regs shouldn't appear here except as return values.
2992 We never want to emit such a clobber after reload. */
2994 && ! (reload_in_progress
|| reload_completed
)
2995 && (GET_CODE (realpart_x
) == SUBREG
2996 || GET_CODE (imagpart_x
) == SUBREG
))
2997 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2999 emit_move_insn (realpart_x
, realpart_y
);
3000 emit_move_insn (imagpart_x
, imagpart_y
);
3003 return get_last_insn ();
3006 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3007 find a mode to do it in. If we have a movcc, use it. Otherwise,
3008 find the MODE_INT mode of the same width. */
3009 else if (GET_MODE_CLASS (mode
) == MODE_CC
3010 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
3012 enum insn_code insn_code
;
3013 enum machine_mode tmode
= VOIDmode
;
3017 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
3020 for (tmode
= QImode
; tmode
!= VOIDmode
;
3021 tmode
= GET_MODE_WIDER_MODE (tmode
))
3022 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
3025 if (tmode
== VOIDmode
)
3028 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3029 may call change_address which is not appropriate if we were
3030 called when a reload was in progress. We don't have to worry
3031 about changing the address since the size in bytes is supposed to
3032 be the same. Copy the MEM to change the mode and move any
3033 substitutions from the old MEM to the new one. */
3035 if (reload_in_progress
)
3037 x
= gen_lowpart_common (tmode
, x1
);
3038 if (x
== 0 && GET_CODE (x1
) == MEM
)
3040 x
= adjust_address_nv (x1
, tmode
, 0);
3041 copy_replacements (x1
, x
);
3044 y
= gen_lowpart_common (tmode
, y1
);
3045 if (y
== 0 && GET_CODE (y1
) == MEM
)
3047 y
= adjust_address_nv (y1
, tmode
, 0);
3048 copy_replacements (y1
, y
);
3053 x
= gen_lowpart (tmode
, x
);
3054 y
= gen_lowpart (tmode
, y
);
3057 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3058 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3061 /* Try using a move pattern for the corresponding integer mode. This is
3062 only safe when simplify_subreg can convert MODE constants into integer
3063 constants. At present, it can only do this reliably if the value
3064 fits within a HOST_WIDE_INT. */
3065 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3066 && (submode
= int_mode_for_mode (mode
)) != BLKmode
3067 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
3068 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
3069 (simplify_gen_subreg (submode
, x
, mode
, 0),
3070 simplify_gen_subreg (submode
, y
, mode
, 0)));
3072 /* This will handle any multi-word or full-word mode that lacks a move_insn
3073 pattern. However, you will get better code if you define such patterns,
3074 even if they must turn into multiple assembler instructions. */
3075 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3082 #ifdef PUSH_ROUNDING
3084 /* If X is a push on the stack, do the push now and replace
3085 X with a reference to the stack pointer. */
3086 if (push_operand (x
, GET_MODE (x
)))
3091 /* Do not use anti_adjust_stack, since we don't want to update
3092 stack_pointer_delta. */
3093 temp
= expand_binop (Pmode
,
3094 #ifdef STACK_GROWS_DOWNWARD
3102 (GET_MODE_SIZE (GET_MODE (x
)))),
3103 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3105 if (temp
!= stack_pointer_rtx
)
3106 emit_move_insn (stack_pointer_rtx
, temp
);
3108 code
= GET_CODE (XEXP (x
, 0));
3110 /* Just hope that small offsets off SP are OK. */
3111 if (code
== POST_INC
)
3112 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3113 GEN_INT (-((HOST_WIDE_INT
)
3114 GET_MODE_SIZE (GET_MODE (x
)))));
3115 else if (code
== POST_DEC
)
3116 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3117 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3119 temp
= stack_pointer_rtx
;
3121 x
= change_address (x
, VOIDmode
, temp
);
3125 /* If we are in reload, see if either operand is a MEM whose address
3126 is scheduled for replacement. */
3127 if (reload_in_progress
&& GET_CODE (x
) == MEM
3128 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3129 x
= replace_equiv_address_nv (x
, inner
);
3130 if (reload_in_progress
&& GET_CODE (y
) == MEM
3131 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3132 y
= replace_equiv_address_nv (y
, inner
);
3138 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3141 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3142 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3144 /* If we can't get a part of Y, put Y into memory if it is a
3145 constant. Otherwise, force it into a register. If we still
3146 can't get a part of Y, abort. */
3147 if (ypart
== 0 && CONSTANT_P (y
))
3149 y
= force_const_mem (mode
, y
);
3150 ypart
= operand_subword (y
, i
, 1, mode
);
3152 else if (ypart
== 0)
3153 ypart
= operand_subword_force (y
, i
, mode
);
3155 if (xpart
== 0 || ypart
== 0)
3158 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3160 last_insn
= emit_move_insn (xpart
, ypart
);
3166 /* Show the output dies here. This is necessary for SUBREGs
3167 of pseudos since we cannot track their lifetimes correctly;
3168 hard regs shouldn't appear here except as return values.
3169 We never want to emit such a clobber after reload. */
3171 && ! (reload_in_progress
|| reload_completed
)
3172 && need_clobber
!= 0)
3173 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3183 /* If Y is representable exactly in a narrower mode, and the target can
3184 perform the extension directly from constant or memory, then emit the
3185 move as an extension. */
3188 compress_float_constant (rtx x
, rtx y
)
3190 enum machine_mode dstmode
= GET_MODE (x
);
3191 enum machine_mode orig_srcmode
= GET_MODE (y
);
3192 enum machine_mode srcmode
;
3195 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3197 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3198 srcmode
!= orig_srcmode
;
3199 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3202 rtx trunc_y
, last_insn
;
3204 /* Skip if the target can't extend this way. */
3205 ic
= can_extend_p (dstmode
, srcmode
, 0);
3206 if (ic
== CODE_FOR_nothing
)
3209 /* Skip if the narrowed value isn't exact. */
3210 if (! exact_real_truncate (srcmode
, &r
))
3213 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3215 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3217 /* Skip if the target needs extra instructions to perform
3219 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3222 else if (float_extend_from_mem
[dstmode
][srcmode
])
3223 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3227 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3228 last_insn
= get_last_insn ();
3230 if (GET_CODE (x
) == REG
)
3231 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3239 /* Pushing data onto the stack. */
3241 /* Push a block of length SIZE (perhaps variable)
3242 and return an rtx to address the beginning of the block.
3243 Note that it is not possible for the value returned to be a QUEUED.
3244 The value may be virtual_outgoing_args_rtx.
3246 EXTRA is the number of bytes of padding to push in addition to SIZE.
3247 BELOW nonzero means this padding comes at low addresses;
3248 otherwise, the padding comes at high addresses. */
3251 push_block (rtx size
, int extra
, int below
)
3255 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3256 if (CONSTANT_P (size
))
3257 anti_adjust_stack (plus_constant (size
, extra
));
3258 else if (GET_CODE (size
) == REG
&& extra
== 0)
3259 anti_adjust_stack (size
);
3262 temp
= copy_to_mode_reg (Pmode
, size
);
3264 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3265 temp
, 0, OPTAB_LIB_WIDEN
);
3266 anti_adjust_stack (temp
);
3269 #ifndef STACK_GROWS_DOWNWARD
3275 temp
= virtual_outgoing_args_rtx
;
3276 if (extra
!= 0 && below
)
3277 temp
= plus_constant (temp
, extra
);
3281 if (GET_CODE (size
) == CONST_INT
)
3282 temp
= plus_constant (virtual_outgoing_args_rtx
,
3283 -INTVAL (size
) - (below
? 0 : extra
));
3284 else if (extra
!= 0 && !below
)
3285 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3286 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3288 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3289 negate_rtx (Pmode
, size
));
3292 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3295 #ifdef PUSH_ROUNDING
3297 /* Emit single push insn. */
3300 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3303 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3305 enum insn_code icode
;
3306 insn_operand_predicate_fn pred
;
3308 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3309 /* If there is push pattern, use it. Otherwise try old way of throwing
3310 MEM representing push operation to move expander. */
3311 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3312 if (icode
!= CODE_FOR_nothing
)
3314 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3315 && !((*pred
) (x
, mode
))))
3316 x
= force_reg (mode
, x
);
3317 emit_insn (GEN_FCN (icode
) (x
));
3320 if (GET_MODE_SIZE (mode
) == rounded_size
)
3321 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3322 /* If we are to pad downward, adjust the stack pointer first and
3323 then store X into the stack location using an offset. This is
3324 because emit_move_insn does not know how to pad; it does not have
3326 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3328 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3329 HOST_WIDE_INT offset
;
3331 emit_move_insn (stack_pointer_rtx
,
3332 expand_binop (Pmode
,
3333 #ifdef STACK_GROWS_DOWNWARD
3339 GEN_INT (rounded_size
),
3340 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3342 offset
= (HOST_WIDE_INT
) padding_size
;
3343 #ifdef STACK_GROWS_DOWNWARD
3344 if (STACK_PUSH_CODE
== POST_DEC
)
3345 /* We have already decremented the stack pointer, so get the
3347 offset
+= (HOST_WIDE_INT
) rounded_size
;
3349 if (STACK_PUSH_CODE
== POST_INC
)
3350 /* We have already incremented the stack pointer, so get the
3352 offset
-= (HOST_WIDE_INT
) rounded_size
;
3354 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3358 #ifdef STACK_GROWS_DOWNWARD
3359 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3360 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3361 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3363 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3364 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3365 GEN_INT (rounded_size
));
3367 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3370 dest
= gen_rtx_MEM (mode
, dest_addr
);
3374 set_mem_attributes (dest
, type
, 1);
3376 if (flag_optimize_sibling_calls
)
3377 /* Function incoming arguments may overlap with sibling call
3378 outgoing arguments and we cannot allow reordering of reads
3379 from function arguments with stores to outgoing arguments
3380 of sibling calls. */
3381 set_mem_alias_set (dest
, 0);
3383 emit_move_insn (dest
, x
);
3387 /* Generate code to push X onto the stack, assuming it has mode MODE and
3389 MODE is redundant except when X is a CONST_INT (since they don't
3391 SIZE is an rtx for the size of data to be copied (in bytes),
3392 needed only if X is BLKmode.
3394 ALIGN (in bits) is maximum alignment we can assume.
3396 If PARTIAL and REG are both nonzero, then copy that many of the first
3397 words of X into registers starting with REG, and push the rest of X.
3398 The amount of space pushed is decreased by PARTIAL words,
3399 rounded *down* to a multiple of PARM_BOUNDARY.
3400 REG must be a hard register in this case.
3401 If REG is zero but PARTIAL is not, take any all others actions for an
3402 argument partially in registers, but do not actually load any
3405 EXTRA is the amount in bytes of extra space to leave next to this arg.
3406 This is ignored if an argument block has already been allocated.
3408 On a machine that lacks real push insns, ARGS_ADDR is the address of
3409 the bottom of the argument block for this call. We use indexing off there
3410 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3411 argument block has not been preallocated.
3413 ARGS_SO_FAR is the size of args previously pushed for this call.
3415 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3416 for arguments passed in registers. If nonzero, it will be the number
3417 of bytes required. */
3420 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3421 unsigned int align
, int partial
, rtx reg
, int extra
,
3422 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3426 enum direction stack_direction
3427 #ifdef STACK_GROWS_DOWNWARD
3433 /* Decide where to pad the argument: `downward' for below,
3434 `upward' for above, or `none' for don't pad it.
3435 Default is below for small data on big-endian machines; else above. */
3436 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3438 /* Invert direction if stack is post-decrement.
3440 if (STACK_PUSH_CODE
== POST_DEC
)
3441 if (where_pad
!= none
)
3442 where_pad
= (where_pad
== downward
? upward
: downward
);
3444 xinner
= x
= protect_from_queue (x
, 0);
3446 if (mode
== BLKmode
)
3448 /* Copy a block into the stack, entirely or partially. */
3451 int used
= partial
* UNITS_PER_WORD
;
3455 if (reg
&& GET_CODE (reg
) == PARALLEL
)
3457 /* Use the size of the elt to compute offset. */
3458 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
3459 used
= partial
* GET_MODE_SIZE (GET_MODE (elt
));
3460 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3463 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3470 /* USED is now the # of bytes we need not copy to the stack
3471 because registers will take care of them. */
3474 xinner
= adjust_address (xinner
, BLKmode
, used
);
3476 /* If the partial register-part of the arg counts in its stack size,
3477 skip the part of stack space corresponding to the registers.
3478 Otherwise, start copying to the beginning of the stack space,
3479 by setting SKIP to 0. */
3480 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3482 #ifdef PUSH_ROUNDING
3483 /* Do it with several push insns if that doesn't take lots of insns
3484 and if there is no difficulty with push insns that skip bytes
3485 on the stack for alignment purposes. */
3488 && GET_CODE (size
) == CONST_INT
3490 && MEM_ALIGN (xinner
) >= align
3491 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3492 /* Here we avoid the case of a structure whose weak alignment
3493 forces many pushes of a small amount of data,
3494 and such small pushes do rounding that causes trouble. */
3495 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3496 || align
>= BIGGEST_ALIGNMENT
3497 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3498 == (align
/ BITS_PER_UNIT
)))
3499 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3501 /* Push padding now if padding above and stack grows down,
3502 or if padding below and stack grows up.
3503 But if space already allocated, this has already been done. */
3504 if (extra
&& args_addr
== 0
3505 && where_pad
!= none
&& where_pad
!= stack_direction
)
3506 anti_adjust_stack (GEN_INT (extra
));
3508 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3511 #endif /* PUSH_ROUNDING */
3515 /* Otherwise make space on the stack and copy the data
3516 to the address of that space. */
3518 /* Deduct words put into registers from the size we must copy. */
3521 if (GET_CODE (size
) == CONST_INT
)
3522 size
= GEN_INT (INTVAL (size
) - used
);
3524 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3525 GEN_INT (used
), NULL_RTX
, 0,
3529 /* Get the address of the stack space.
3530 In this case, we do not deal with EXTRA separately.
3531 A single stack adjust will do. */
3534 temp
= push_block (size
, extra
, where_pad
== downward
);
3537 else if (GET_CODE (args_so_far
) == CONST_INT
)
3538 temp
= memory_address (BLKmode
,
3539 plus_constant (args_addr
,
3540 skip
+ INTVAL (args_so_far
)));
3542 temp
= memory_address (BLKmode
,
3543 plus_constant (gen_rtx_PLUS (Pmode
,
3548 if (!ACCUMULATE_OUTGOING_ARGS
)
3550 /* If the source is referenced relative to the stack pointer,
3551 copy it to another register to stabilize it. We do not need
3552 to do this if we know that we won't be changing sp. */
3554 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3555 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3556 temp
= copy_to_reg (temp
);
3559 target
= gen_rtx_MEM (BLKmode
, temp
);
3563 set_mem_attributes (target
, type
, 1);
3564 /* Function incoming arguments may overlap with sibling call
3565 outgoing arguments and we cannot allow reordering of reads
3566 from function arguments with stores to outgoing arguments
3567 of sibling calls. */
3568 set_mem_alias_set (target
, 0);
3571 /* ALIGN may well be better aligned than TYPE, e.g. due to
3572 PARM_BOUNDARY. Assume the caller isn't lying. */
3573 set_mem_align (target
, align
);
3575 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3578 else if (partial
> 0)
3580 /* Scalar partly in registers. */
3582 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3585 /* # words of start of argument
3586 that we must make space for but need not store. */
3587 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3588 int args_offset
= INTVAL (args_so_far
);
3591 /* Push padding now if padding above and stack grows down,
3592 or if padding below and stack grows up.
3593 But if space already allocated, this has already been done. */
3594 if (extra
&& args_addr
== 0
3595 && where_pad
!= none
&& where_pad
!= stack_direction
)
3596 anti_adjust_stack (GEN_INT (extra
));
3598 /* If we make space by pushing it, we might as well push
3599 the real data. Otherwise, we can leave OFFSET nonzero
3600 and leave the space uninitialized. */
3604 /* Now NOT_STACK gets the number of words that we don't need to
3605 allocate on the stack. */
3606 not_stack
= partial
- offset
;
3608 /* If the partial register-part of the arg counts in its stack size,
3609 skip the part of stack space corresponding to the registers.
3610 Otherwise, start copying to the beginning of the stack space,
3611 by setting SKIP to 0. */
3612 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3614 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3615 x
= validize_mem (force_const_mem (mode
, x
));
3617 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3618 SUBREGs of such registers are not allowed. */
3619 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3620 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3621 x
= copy_to_reg (x
);
3623 /* Loop over all the words allocated on the stack for this arg. */
3624 /* We can do it by words, because any scalar bigger than a word
3625 has a size a multiple of a word. */
3626 #ifndef PUSH_ARGS_REVERSED
3627 for (i
= not_stack
; i
< size
; i
++)
3629 for (i
= size
- 1; i
>= not_stack
; i
--)
3631 if (i
>= not_stack
+ offset
)
3632 emit_push_insn (operand_subword_force (x
, i
, mode
),
3633 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3635 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3637 reg_parm_stack_space
, alignment_pad
);
3644 /* Push padding now if padding above and stack grows down,
3645 or if padding below and stack grows up.
3646 But if space already allocated, this has already been done. */
3647 if (extra
&& args_addr
== 0
3648 && where_pad
!= none
&& where_pad
!= stack_direction
)
3649 anti_adjust_stack (GEN_INT (extra
));
3651 #ifdef PUSH_ROUNDING
3652 if (args_addr
== 0 && PUSH_ARGS
)
3653 emit_single_push_insn (mode
, x
, type
);
3657 if (GET_CODE (args_so_far
) == CONST_INT
)
3659 = memory_address (mode
,
3660 plus_constant (args_addr
,
3661 INTVAL (args_so_far
)));
3663 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3665 dest
= gen_rtx_MEM (mode
, addr
);
3668 set_mem_attributes (dest
, type
, 1);
3669 /* Function incoming arguments may overlap with sibling call
3670 outgoing arguments and we cannot allow reordering of reads
3671 from function arguments with stores to outgoing arguments
3672 of sibling calls. */
3673 set_mem_alias_set (dest
, 0);
3676 emit_move_insn (dest
, x
);
3680 /* If part should go in registers, copy that part
3681 into the appropriate registers. Do this now, at the end,
3682 since mem-to-mem copies above may do function calls. */
3683 if (partial
> 0 && reg
!= 0)
3685 /* Handle calls that pass values in multiple non-contiguous locations.
3686 The Irix 6 ABI has examples of this. */
3687 if (GET_CODE (reg
) == PARALLEL
)
3688 emit_group_load (reg
, x
, type
, -1);
3690 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3693 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3694 anti_adjust_stack (GEN_INT (extra
));
3696 if (alignment_pad
&& args_addr
== 0)
3697 anti_adjust_stack (alignment_pad
);
3700 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3704 get_subtarget (rtx x
)
3707 /* Only registers can be subtargets. */
3708 || GET_CODE (x
) != REG
3709 /* If the register is readonly, it can't be set more than once. */
3710 || RTX_UNCHANGING_P (x
)
3711 /* Don't use hard regs to avoid extending their life. */
3712 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3713 /* Avoid subtargets inside loops,
3714 since they hide some invariant expressions. */
3715 || preserve_subexpressions_p ())
3719 /* Expand an assignment that stores the value of FROM into TO.
3720 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3721 (This may contain a QUEUED rtx;
3722 if the value is constant, this rtx is a constant.)
3723 Otherwise, the returned value is NULL_RTX. */
3726 expand_assignment (tree to
, tree from
, int want_value
)
3731 /* Don't crash if the lhs of the assignment was erroneous. */
3733 if (TREE_CODE (to
) == ERROR_MARK
)
3735 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3736 return want_value
? result
: NULL_RTX
;
3739 /* Assignment of a structure component needs special treatment
3740 if the structure component's rtx is not simply a MEM.
3741 Assignment of an array element at a constant index, and assignment of
3742 an array element in an unaligned packed structure field, has the same
3745 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3746 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3747 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3749 enum machine_mode mode1
;
3750 HOST_WIDE_INT bitsize
, bitpos
;
3758 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3759 &unsignedp
, &volatilep
);
3761 /* If we are going to use store_bit_field and extract_bit_field,
3762 make sure to_rtx will be safe for multiple use. */
3764 if (mode1
== VOIDmode
&& want_value
)
3765 tem
= stabilize_reference (tem
);
3767 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3771 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3773 if (GET_CODE (to_rtx
) != MEM
)
3776 #ifdef POINTERS_EXTEND_UNSIGNED
3777 if (GET_MODE (offset_rtx
) != Pmode
)
3778 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3780 if (GET_MODE (offset_rtx
) != ptr_mode
)
3781 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3784 /* A constant address in TO_RTX can have VOIDmode, we must not try
3785 to call force_reg for that case. Avoid that case. */
3786 if (GET_CODE (to_rtx
) == MEM
3787 && GET_MODE (to_rtx
) == BLKmode
3788 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3790 && (bitpos
% bitsize
) == 0
3791 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3792 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3794 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3798 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3799 highest_pow2_factor_for_type (TREE_TYPE (to
),
3803 if (GET_CODE (to_rtx
) == MEM
)
3805 /* If the field is at offset zero, we could have been given the
3806 DECL_RTX of the parent struct. Don't munge it. */
3807 to_rtx
= shallow_copy_rtx (to_rtx
);
3809 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3812 /* Deal with volatile and readonly fields. The former is only done
3813 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3814 if (volatilep
&& GET_CODE (to_rtx
) == MEM
)
3816 if (to_rtx
== orig_to_rtx
)
3817 to_rtx
= copy_rtx (to_rtx
);
3818 MEM_VOLATILE_P (to_rtx
) = 1;
3821 if (TREE_CODE (to
) == COMPONENT_REF
3822 && TREE_READONLY (TREE_OPERAND (to
, 1))
3823 /* We can't assert that a MEM won't be set more than once
3824 if the component is not addressable because another
3825 non-addressable component may be referenced by the same MEM. */
3826 && ! (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
)))
3828 if (to_rtx
== orig_to_rtx
)
3829 to_rtx
= copy_rtx (to_rtx
);
3830 RTX_UNCHANGING_P (to_rtx
) = 1;
3833 if (GET_CODE (to_rtx
) == MEM
&& ! can_address_p (to
))
3835 if (to_rtx
== orig_to_rtx
)
3836 to_rtx
= copy_rtx (to_rtx
);
3837 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3840 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3842 /* Spurious cast for HPUX compiler. */
3843 ? ((enum machine_mode
)
3844 TYPE_MODE (TREE_TYPE (to
)))
3846 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3848 preserve_temp_slots (result
);
3852 /* If the value is meaningful, convert RESULT to the proper mode.
3853 Otherwise, return nothing. */
3854 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3855 TYPE_MODE (TREE_TYPE (from
)),
3857 TREE_UNSIGNED (TREE_TYPE (to
)))
3861 /* If the rhs is a function call and its value is not an aggregate,
3862 call the function before we start to compute the lhs.
3863 This is needed for correct code for cases such as
3864 val = setjmp (buf) on machines where reference to val
3865 requires loading up part of an address in a separate insn.
3867 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3868 since it might be a promoted variable where the zero- or sign- extension
3869 needs to be done. Handling this in the normal way is safe because no
3870 computation is done before the call. */
3871 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3872 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3873 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3874 && GET_CODE (DECL_RTL (to
)) == REG
))
3879 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3881 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3883 /* Handle calls that return values in multiple non-contiguous locations.
3884 The Irix 6 ABI has examples of this. */
3885 if (GET_CODE (to_rtx
) == PARALLEL
)
3886 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3887 int_size_in_bytes (TREE_TYPE (from
)));
3888 else if (GET_MODE (to_rtx
) == BLKmode
)
3889 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3892 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3893 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3894 emit_move_insn (to_rtx
, value
);
3896 preserve_temp_slots (to_rtx
);
3899 return want_value
? to_rtx
: NULL_RTX
;
3902 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3903 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3906 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3908 /* Don't move directly into a return register. */
3909 if (TREE_CODE (to
) == RESULT_DECL
3910 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3915 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3917 if (GET_CODE (to_rtx
) == PARALLEL
)
3918 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
3919 int_size_in_bytes (TREE_TYPE (from
)));
3921 emit_move_insn (to_rtx
, temp
);
3923 preserve_temp_slots (to_rtx
);
3926 return want_value
? to_rtx
: NULL_RTX
;
3929 /* In case we are returning the contents of an object which overlaps
3930 the place the value is being stored, use a safe function when copying
3931 a value through a pointer into a structure value return block. */
3932 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3933 && current_function_returns_struct
3934 && !current_function_returns_pcc_struct
)
3939 size
= expr_size (from
);
3940 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3942 if (TARGET_MEM_FUNCTIONS
)
3943 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3944 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3945 XEXP (from_rtx
, 0), Pmode
,
3946 convert_to_mode (TYPE_MODE (sizetype
),
3947 size
, TREE_UNSIGNED (sizetype
)),
3948 TYPE_MODE (sizetype
));
3950 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3951 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3952 XEXP (to_rtx
, 0), Pmode
,
3953 convert_to_mode (TYPE_MODE (integer_type_node
),
3955 TREE_UNSIGNED (integer_type_node
)),
3956 TYPE_MODE (integer_type_node
));
3958 preserve_temp_slots (to_rtx
);
3961 return want_value
? to_rtx
: NULL_RTX
;
3964 /* Compute FROM and store the value in the rtx we got. */
3967 result
= store_expr (from
, to_rtx
, want_value
);
3968 preserve_temp_slots (result
);
3971 return want_value
? result
: NULL_RTX
;
3974 /* Generate code for computing expression EXP,
3975 and storing the value into TARGET.
3976 TARGET may contain a QUEUED rtx.
3978 If WANT_VALUE & 1 is nonzero, return a copy of the value
3979 not in TARGET, so that we can be sure to use the proper
3980 value in a containing expression even if TARGET has something
3981 else stored in it. If possible, we copy the value through a pseudo
3982 and return that pseudo. Or, if the value is constant, we try to
3983 return the constant. In some cases, we return a pseudo
3984 copied *from* TARGET.
3986 If the mode is BLKmode then we may return TARGET itself.
3987 It turns out that in BLKmode it doesn't cause a problem.
3988 because C has no operators that could combine two different
3989 assignments into the same BLKmode object with different values
3990 with no sequence point. Will other languages need this to
3993 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3994 to catch quickly any cases where the caller uses the value
3995 and fails to set WANT_VALUE.
3997 If WANT_VALUE & 2 is set, this is a store into a call param on the
3998 stack, and block moves may need to be treated specially. */
4001 store_expr (tree exp
, rtx target
, int want_value
)
4004 rtx alt_rtl
= NULL_RTX
;
4005 int dont_return_target
= 0;
4006 int dont_store_target
= 0;
4008 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4010 /* C++ can generate ?: expressions with a throw expression in one
4011 branch and an rvalue in the other. Here, we resolve attempts to
4012 store the throw expression's nonexistent result. */
4015 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4018 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4020 /* Perform first part of compound expression, then assign from second
4022 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4023 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4025 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4027 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4029 /* For conditional expression, get safe form of the target. Then
4030 test the condition, doing the appropriate assignment on either
4031 side. This avoids the creation of unnecessary temporaries.
4032 For non-BLKmode, it is more efficient not to do this. */
4034 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4037 target
= protect_from_queue (target
, 1);
4039 do_pending_stack_adjust ();
4041 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4042 start_cleanup_deferral ();
4043 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4044 end_cleanup_deferral ();
4046 emit_jump_insn (gen_jump (lab2
));
4049 start_cleanup_deferral ();
4050 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4051 end_cleanup_deferral ();
4056 return want_value
& 1 ? target
: NULL_RTX
;
4058 else if (queued_subexp_p (target
))
4059 /* If target contains a postincrement, let's not risk
4060 using it as the place to generate the rhs. */
4062 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4064 /* Expand EXP into a new pseudo. */
4065 temp
= gen_reg_rtx (GET_MODE (target
));
4066 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4068 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4071 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4073 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4075 /* If target is volatile, ANSI requires accessing the value
4076 *from* the target, if it is accessed. So make that happen.
4077 In no case return the target itself. */
4078 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4079 dont_return_target
= 1;
4081 else if ((want_value
& 1) != 0
4082 && GET_CODE (target
) == MEM
4083 && ! MEM_VOLATILE_P (target
)
4084 && GET_MODE (target
) != BLKmode
)
4085 /* If target is in memory and caller wants value in a register instead,
4086 arrange that. Pass TARGET as target for expand_expr so that,
4087 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4088 We know expand_expr will not use the target in that case.
4089 Don't do this if TARGET is volatile because we are supposed
4090 to write it and then read it. */
4092 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4093 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4094 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4096 /* If TEMP is already in the desired TARGET, only copy it from
4097 memory and don't store it there again. */
4099 || (rtx_equal_p (temp
, target
)
4100 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4101 dont_store_target
= 1;
4102 temp
= copy_to_reg (temp
);
4104 dont_return_target
= 1;
4106 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4107 /* If this is a scalar in a register that is stored in a wider mode
4108 than the declared mode, compute the result into its declared mode
4109 and then convert to the wider mode. Our value is the computed
4112 rtx inner_target
= 0;
4114 /* If we don't want a value, we can do the conversion inside EXP,
4115 which will often result in some optimizations. Do the conversion
4116 in two steps: first change the signedness, if needed, then
4117 the extend. But don't do this if the type of EXP is a subtype
4118 of something else since then the conversion might involve
4119 more than just converting modes. */
4120 if ((want_value
& 1) == 0
4121 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4122 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4124 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4125 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4127 ((*lang_hooks
.types
.signed_or_unsigned_type
)
4128 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4130 exp
= convert ((*lang_hooks
.types
.type_for_mode
)
4131 (GET_MODE (SUBREG_REG (target
)),
4132 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4135 inner_target
= SUBREG_REG (target
);
4138 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4139 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4141 /* If TEMP is a MEM and we want a result value, make the access
4142 now so it gets done only once. Strictly speaking, this is
4143 only necessary if the MEM is volatile, or if the address
4144 overlaps TARGET. But not performing the load twice also
4145 reduces the amount of rtl we generate and then have to CSE. */
4146 if (GET_CODE (temp
) == MEM
&& (want_value
& 1) != 0)
4147 temp
= copy_to_reg (temp
);
4149 /* If TEMP is a VOIDmode constant, use convert_modes to make
4150 sure that we properly convert it. */
4151 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4153 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4154 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4155 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4156 GET_MODE (target
), temp
,
4157 SUBREG_PROMOTED_UNSIGNED_P (target
));
4160 convert_move (SUBREG_REG (target
), temp
,
4161 SUBREG_PROMOTED_UNSIGNED_P (target
));
4163 /* If we promoted a constant, change the mode back down to match
4164 target. Otherwise, the caller might get confused by a result whose
4165 mode is larger than expected. */
4167 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4169 if (GET_MODE (temp
) != VOIDmode
)
4171 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4172 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4173 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4174 SUBREG_PROMOTED_UNSIGNED_P (target
));
4177 temp
= convert_modes (GET_MODE (target
),
4178 GET_MODE (SUBREG_REG (target
)),
4179 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4182 return want_value
& 1 ? temp
: NULL_RTX
;
4186 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4188 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4190 /* Return TARGET if it's a specified hardware register.
4191 If TARGET is a volatile mem ref, either return TARGET
4192 or return a reg copied *from* TARGET; ANSI requires this.
4194 Otherwise, if TEMP is not TARGET, return TEMP
4195 if it is constant (for efficiency),
4196 or if we really want the correct value. */
4197 if (!(target
&& GET_CODE (target
) == REG
4198 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4199 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4200 && ! rtx_equal_p (temp
, target
)
4201 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4202 dont_return_target
= 1;
4205 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4206 the same as that of TARGET, adjust the constant. This is needed, for
4207 example, in case it is a CONST_DOUBLE and we want only a word-sized
4209 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4210 && TREE_CODE (exp
) != ERROR_MARK
4211 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4212 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4213 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4215 /* If value was not generated in the target, store it there.
4216 Convert the value to TARGET's type first if necessary.
4217 If TEMP and TARGET compare equal according to rtx_equal_p, but
4218 one or both of them are volatile memory refs, we have to distinguish
4220 - expand_expr has used TARGET. In this case, we must not generate
4221 another copy. This can be detected by TARGET being equal according
4223 - expand_expr has not used TARGET - that means that the source just
4224 happens to have the same RTX form. Since temp will have been created
4225 by expand_expr, it will compare unequal according to == .
4226 We must generate a copy in this case, to reach the correct number
4227 of volatile memory references. */
4229 if ((! rtx_equal_p (temp
, target
)
4230 || (temp
!= target
&& (side_effects_p (temp
)
4231 || side_effects_p (target
))))
4232 && TREE_CODE (exp
) != ERROR_MARK
4233 && ! dont_store_target
4234 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4235 but TARGET is not valid memory reference, TEMP will differ
4236 from TARGET although it is really the same location. */
4237 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4238 /* If there's nothing to copy, don't bother. Don't call expr_size
4239 unless necessary, because some front-ends (C++) expr_size-hook
4240 aborts on objects that are not supposed to be bit-copied or
4242 && expr_size (exp
) != const0_rtx
)
4244 target
= protect_from_queue (target
, 1);
4245 if (GET_MODE (temp
) != GET_MODE (target
)
4246 && GET_MODE (temp
) != VOIDmode
)
4248 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4249 if (dont_return_target
)
4251 /* In this case, we will return TEMP,
4252 so make sure it has the proper mode.
4253 But don't forget to store the value into TARGET. */
4254 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4255 emit_move_insn (target
, temp
);
4258 convert_move (target
, temp
, unsignedp
);
4261 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4263 /* Handle copying a string constant into an array. The string
4264 constant may be shorter than the array. So copy just the string's
4265 actual length, and clear the rest. First get the size of the data
4266 type of the string, which is actually the size of the target. */
4267 rtx size
= expr_size (exp
);
4269 if (GET_CODE (size
) == CONST_INT
4270 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4271 emit_block_move (target
, temp
, size
,
4273 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4276 /* Compute the size of the data to copy from the string. */
4278 = size_binop (MIN_EXPR
,
4279 make_tree (sizetype
, size
),
4280 size_int (TREE_STRING_LENGTH (exp
)));
4282 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4284 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4287 /* Copy that much. */
4288 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4289 TREE_UNSIGNED (sizetype
));
4290 emit_block_move (target
, temp
, copy_size_rtx
,
4292 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4294 /* Figure out how much is left in TARGET that we have to clear.
4295 Do all calculations in ptr_mode. */
4296 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4298 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4299 target
= adjust_address (target
, BLKmode
,
4300 INTVAL (copy_size_rtx
));
4304 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4305 copy_size_rtx
, NULL_RTX
, 0,
4308 #ifdef POINTERS_EXTEND_UNSIGNED
4309 if (GET_MODE (copy_size_rtx
) != Pmode
)
4310 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4311 TREE_UNSIGNED (sizetype
));
4314 target
= offset_address (target
, copy_size_rtx
,
4315 highest_pow2_factor (copy_size
));
4316 label
= gen_label_rtx ();
4317 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4318 GET_MODE (size
), 0, label
);
4321 if (size
!= const0_rtx
)
4322 clear_storage (target
, size
);
4328 /* Handle calls that return values in multiple non-contiguous locations.
4329 The Irix 6 ABI has examples of this. */
4330 else if (GET_CODE (target
) == PARALLEL
)
4331 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4332 int_size_in_bytes (TREE_TYPE (exp
)));
4333 else if (GET_MODE (temp
) == BLKmode
)
4334 emit_block_move (target
, temp
, expr_size (exp
),
4336 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4339 temp
= force_operand (temp
, target
);
4341 emit_move_insn (target
, temp
);
4345 /* If we don't want a value, return NULL_RTX. */
4346 if ((want_value
& 1) == 0)
4349 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4350 ??? The latter test doesn't seem to make sense. */
4351 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4354 /* Return TARGET itself if it is a hard register. */
4355 else if ((want_value
& 1) != 0
4356 && GET_MODE (target
) != BLKmode
4357 && ! (GET_CODE (target
) == REG
4358 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4359 return copy_to_reg (target
);
4365 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4368 is_zeros_p (tree exp
)
4372 switch (TREE_CODE (exp
))
4376 case NON_LVALUE_EXPR
:
4377 case VIEW_CONVERT_EXPR
:
4378 return is_zeros_p (TREE_OPERAND (exp
, 0));
4381 return integer_zerop (exp
);
4385 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4388 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4391 for (elt
= TREE_VECTOR_CST_ELTS (exp
); elt
;
4392 elt
= TREE_CHAIN (elt
))
4393 if (!is_zeros_p (TREE_VALUE (elt
)))
4399 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4400 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4401 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4402 if (! is_zeros_p (TREE_VALUE (elt
)))
4412 /* Return 1 if EXP contains mostly (3/4) zeros. */
4415 mostly_zeros_p (tree exp
)
4417 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4419 int elts
= 0, zeros
= 0;
4420 tree elt
= CONSTRUCTOR_ELTS (exp
);
4421 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4423 /* If there are no ranges of true bits, it is all zero. */
4424 return elt
== NULL_TREE
;
4426 for (; elt
; elt
= TREE_CHAIN (elt
))
4428 /* We do not handle the case where the index is a RANGE_EXPR,
4429 so the statistic will be somewhat inaccurate.
4430 We do make a more accurate count in store_constructor itself,
4431 so since this function is only used for nested array elements,
4432 this should be close enough. */
4433 if (mostly_zeros_p (TREE_VALUE (elt
)))
4438 return 4 * zeros
>= 3 * elts
;
4441 return is_zeros_p (exp
);
4444 /* Helper function for store_constructor.
4445 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4446 TYPE is the type of the CONSTRUCTOR, not the element type.
4447 CLEARED is as for store_constructor.
4448 ALIAS_SET is the alias set to use for any stores.
4450 This provides a recursive shortcut back to store_constructor when it isn't
4451 necessary to go through store_field. This is so that we can pass through
4452 the cleared field to let store_constructor know that we may not have to
4453 clear a substructure if the outer structure has already been cleared. */
4456 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4457 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4458 tree exp
, tree type
, int cleared
, int alias_set
)
4460 if (TREE_CODE (exp
) == CONSTRUCTOR
4461 && bitpos
% BITS_PER_UNIT
== 0
4462 /* If we have a nonzero bitpos for a register target, then we just
4463 let store_field do the bitfield handling. This is unlikely to
4464 generate unnecessary clear instructions anyways. */
4465 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4467 if (GET_CODE (target
) == MEM
)
4469 = adjust_address (target
,
4470 GET_MODE (target
) == BLKmode
4472 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4473 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4476 /* Update the alias set, if required. */
4477 if (GET_CODE (target
) == MEM
&& ! MEM_KEEP_ALIAS_SET_P (target
)
4478 && MEM_ALIAS_SET (target
) != 0)
4480 target
= copy_rtx (target
);
4481 set_mem_alias_set (target
, alias_set
);
4484 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4487 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4491 /* Store the value of constructor EXP into the rtx TARGET.
4492 TARGET is either a REG or a MEM; we know it cannot conflict, since
4493 safe_from_p has been called.
4494 CLEARED is true if TARGET is known to have been zero'd.
4495 SIZE is the number of bytes of TARGET we are allowed to modify: this
4496 may not be the same as the size of EXP if we are assigning to a field
4497 which has been packed to exclude padding bits. */
4500 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4502 tree type
= TREE_TYPE (exp
);
4503 #ifdef WORD_REGISTER_OPERATIONS
4504 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4507 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4508 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4512 /* If size is zero or the target is already cleared, do nothing. */
4513 if (size
== 0 || cleared
)
4515 /* We either clear the aggregate or indicate the value is dead. */
4516 else if ((TREE_CODE (type
) == UNION_TYPE
4517 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4518 && ! CONSTRUCTOR_ELTS (exp
))
4519 /* If the constructor is empty, clear the union. */
4521 clear_storage (target
, expr_size (exp
));
4525 /* If we are building a static constructor into a register,
4526 set the initial value as zero so we can fold the value into
4527 a constant. But if more than one register is involved,
4528 this probably loses. */
4529 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4530 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4532 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4536 /* If the constructor has fewer fields than the structure
4537 or if we are initializing the structure to mostly zeros,
4538 clear the whole structure first. Don't do this if TARGET is a
4539 register whose mode size isn't equal to SIZE since clear_storage
4540 can't handle this case. */
4541 else if (((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4542 || mostly_zeros_p (exp
))
4543 && (GET_CODE (target
) != REG
4544 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4547 rtx xtarget
= target
;
4549 if (readonly_fields_p (type
))
4551 xtarget
= copy_rtx (xtarget
);
4552 RTX_UNCHANGING_P (xtarget
) = 1;
4555 clear_storage (xtarget
, GEN_INT (size
));
4560 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4562 /* Store each element of the constructor into
4563 the corresponding field of TARGET. */
4565 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4567 tree field
= TREE_PURPOSE (elt
);
4568 tree value
= TREE_VALUE (elt
);
4569 enum machine_mode mode
;
4570 HOST_WIDE_INT bitsize
;
4571 HOST_WIDE_INT bitpos
= 0;
4573 rtx to_rtx
= target
;
4575 /* Just ignore missing fields.
4576 We cleared the whole structure, above,
4577 if any fields are missing. */
4581 if (cleared
&& is_zeros_p (value
))
4584 if (host_integerp (DECL_SIZE (field
), 1))
4585 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4589 mode
= DECL_MODE (field
);
4590 if (DECL_BIT_FIELD (field
))
4593 offset
= DECL_FIELD_OFFSET (field
);
4594 if (host_integerp (offset
, 0)
4595 && host_integerp (bit_position (field
), 0))
4597 bitpos
= int_bit_position (field
);
4601 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4607 if (CONTAINS_PLACEHOLDER_P (offset
))
4608 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4609 offset
, make_tree (TREE_TYPE (exp
), target
));
4611 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4612 if (GET_CODE (to_rtx
) != MEM
)
4615 #ifdef POINTERS_EXTEND_UNSIGNED
4616 if (GET_MODE (offset_rtx
) != Pmode
)
4617 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4619 if (GET_MODE (offset_rtx
) != ptr_mode
)
4620 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4623 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4624 highest_pow2_factor (offset
));
4627 if (TREE_READONLY (field
))
4629 if (GET_CODE (to_rtx
) == MEM
)
4630 to_rtx
= copy_rtx (to_rtx
);
4632 RTX_UNCHANGING_P (to_rtx
) = 1;
4635 #ifdef WORD_REGISTER_OPERATIONS
4636 /* If this initializes a field that is smaller than a word, at the
4637 start of a word, try to widen it to a full word.
4638 This special case allows us to output C++ member function
4639 initializations in a form that the optimizers can understand. */
4640 if (GET_CODE (target
) == REG
4641 && bitsize
< BITS_PER_WORD
4642 && bitpos
% BITS_PER_WORD
== 0
4643 && GET_MODE_CLASS (mode
) == MODE_INT
4644 && TREE_CODE (value
) == INTEGER_CST
4646 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4648 tree type
= TREE_TYPE (value
);
4650 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4652 type
= (*lang_hooks
.types
.type_for_size
)
4653 (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4654 value
= convert (type
, value
);
4657 if (BYTES_BIG_ENDIAN
)
4659 = fold (build (LSHIFT_EXPR
, type
, value
,
4660 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4661 bitsize
= BITS_PER_WORD
;
4666 if (GET_CODE (to_rtx
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4667 && DECL_NONADDRESSABLE_P (field
))
4669 to_rtx
= copy_rtx (to_rtx
);
4670 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4673 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4674 value
, type
, cleared
,
4675 get_alias_set (TREE_TYPE (field
)));
4678 else if (TREE_CODE (type
) == ARRAY_TYPE
4679 || TREE_CODE (type
) == VECTOR_TYPE
)
4684 tree domain
= TYPE_DOMAIN (type
);
4685 tree elttype
= TREE_TYPE (type
);
4687 HOST_WIDE_INT minelt
= 0;
4688 HOST_WIDE_INT maxelt
= 0;
4692 unsigned n_elts
= 0;
4694 /* Vectors are like arrays, but the domain is stored via an array
4696 if (TREE_CODE (type
) == VECTOR_TYPE
)
4698 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4699 the same field as TYPE_DOMAIN, we are not guaranteed that
4701 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4702 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4703 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
4705 enum machine_mode mode
= GET_MODE (target
);
4707 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
4708 if (icode
!= CODE_FOR_nothing
)
4712 elt_size
= GET_MODE_SIZE (GET_MODE_INNER (mode
));
4713 n_elts
= (GET_MODE_SIZE (mode
) / elt_size
);
4714 vector
= alloca (n_elts
);
4715 for (i
= 0; i
< n_elts
; i
++)
4716 vector
[i
] = CONST0_RTX (GET_MODE_INNER (mode
));
4721 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4722 && TYPE_MAX_VALUE (domain
)
4723 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4724 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4726 /* If we have constant bounds for the range of the type, get them. */
4729 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4730 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4733 /* If the constructor has fewer elements than the array,
4734 clear the whole array first. Similarly if this is
4735 static constructor of a non-BLKmode object. */
4736 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4740 HOST_WIDE_INT count
= 0, zero_count
= 0;
4741 need_to_clear
= ! const_bounds_p
;
4743 /* This loop is a more accurate version of the loop in
4744 mostly_zeros_p (it handles RANGE_EXPR in an index).
4745 It is also needed to check for missing elements. */
4746 for (elt
= CONSTRUCTOR_ELTS (exp
);
4747 elt
!= NULL_TREE
&& ! need_to_clear
;
4748 elt
= TREE_CHAIN (elt
))
4750 tree index
= TREE_PURPOSE (elt
);
4751 HOST_WIDE_INT this_node_count
;
4753 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4755 tree lo_index
= TREE_OPERAND (index
, 0);
4756 tree hi_index
= TREE_OPERAND (index
, 1);
4758 if (! host_integerp (lo_index
, 1)
4759 || ! host_integerp (hi_index
, 1))
4765 this_node_count
= (tree_low_cst (hi_index
, 1)
4766 - tree_low_cst (lo_index
, 1) + 1);
4769 this_node_count
= 1;
4771 count
+= this_node_count
;
4772 if (mostly_zeros_p (TREE_VALUE (elt
)))
4773 zero_count
+= this_node_count
;
4776 /* Clear the entire array first if there are any missing elements,
4777 or if the incidence of zero elements is >= 75%. */
4779 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4783 if (need_to_clear
&& size
> 0 && !vector
)
4788 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4790 clear_storage (target
, GEN_INT (size
));
4794 else if (REG_P (target
))
4795 /* Inform later passes that the old value is dead. */
4796 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4798 /* Store each element of the constructor into
4799 the corresponding element of TARGET, determined
4800 by counting the elements. */
4801 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4803 elt
= TREE_CHAIN (elt
), i
++)
4805 enum machine_mode mode
;
4806 HOST_WIDE_INT bitsize
;
4807 HOST_WIDE_INT bitpos
;
4809 tree value
= TREE_VALUE (elt
);
4810 tree index
= TREE_PURPOSE (elt
);
4811 rtx xtarget
= target
;
4813 if (cleared
&& is_zeros_p (value
))
4816 unsignedp
= TREE_UNSIGNED (elttype
);
4817 mode
= TYPE_MODE (elttype
);
4818 if (mode
== BLKmode
)
4819 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4820 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4823 bitsize
= GET_MODE_BITSIZE (mode
);
4825 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4827 tree lo_index
= TREE_OPERAND (index
, 0);
4828 tree hi_index
= TREE_OPERAND (index
, 1);
4829 rtx index_r
, pos_rtx
, loop_end
;
4830 struct nesting
*loop
;
4831 HOST_WIDE_INT lo
, hi
, count
;
4837 /* If the range is constant and "small", unroll the loop. */
4839 && host_integerp (lo_index
, 0)
4840 && host_integerp (hi_index
, 0)
4841 && (lo
= tree_low_cst (lo_index
, 0),
4842 hi
= tree_low_cst (hi_index
, 0),
4843 count
= hi
- lo
+ 1,
4844 (GET_CODE (target
) != MEM
4846 || (host_integerp (TYPE_SIZE (elttype
), 1)
4847 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4850 lo
-= minelt
; hi
-= minelt
;
4851 for (; lo
<= hi
; lo
++)
4853 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4855 if (GET_CODE (target
) == MEM
4856 && !MEM_KEEP_ALIAS_SET_P (target
)
4857 && TREE_CODE (type
) == ARRAY_TYPE
4858 && TYPE_NONALIASED_COMPONENT (type
))
4860 target
= copy_rtx (target
);
4861 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4864 store_constructor_field
4865 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
4866 get_alias_set (elttype
));
4871 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4872 loop_end
= gen_label_rtx ();
4874 unsignedp
= TREE_UNSIGNED (domain
);
4876 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4879 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4881 SET_DECL_RTL (index
, index_r
);
4882 if (TREE_CODE (value
) == SAVE_EXPR
4883 && SAVE_EXPR_RTL (value
) == 0)
4885 /* Make sure value gets expanded once before the
4887 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4890 store_expr (lo_index
, index_r
, 0);
4891 loop
= expand_start_loop (0);
4893 /* Assign value to element index. */
4895 = convert (ssizetype
,
4896 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4897 index
, TYPE_MIN_VALUE (domain
))));
4898 position
= size_binop (MULT_EXPR
, position
,
4900 TYPE_SIZE_UNIT (elttype
)));
4902 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4903 xtarget
= offset_address (target
, pos_rtx
,
4904 highest_pow2_factor (position
));
4905 xtarget
= adjust_address (xtarget
, mode
, 0);
4906 if (TREE_CODE (value
) == CONSTRUCTOR
)
4907 store_constructor (value
, xtarget
, cleared
,
4908 bitsize
/ BITS_PER_UNIT
);
4910 store_expr (value
, xtarget
, 0);
4912 expand_exit_loop_if_false (loop
,
4913 build (LT_EXPR
, integer_type_node
,
4916 expand_increment (build (PREINCREMENT_EXPR
,
4918 index
, integer_one_node
), 0, 0);
4920 emit_label (loop_end
);
4923 else if ((index
!= 0 && ! host_integerp (index
, 0))
4924 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4932 index
= ssize_int (1);
4935 index
= convert (ssizetype
,
4936 fold (build (MINUS_EXPR
, index
,
4937 TYPE_MIN_VALUE (domain
))));
4939 position
= size_binop (MULT_EXPR
, index
,
4941 TYPE_SIZE_UNIT (elttype
)));
4942 xtarget
= offset_address (target
,
4943 expand_expr (position
, 0, VOIDmode
, 0),
4944 highest_pow2_factor (position
));
4945 xtarget
= adjust_address (xtarget
, mode
, 0);
4946 store_expr (value
, xtarget
, 0);
4953 pos
= tree_low_cst (index
, 0) - minelt
;
4956 vector
[pos
] = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
4961 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4962 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4964 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4966 if (GET_CODE (target
) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target
)
4967 && TREE_CODE (type
) == ARRAY_TYPE
4968 && TYPE_NONALIASED_COMPONENT (type
))
4970 target
= copy_rtx (target
);
4971 MEM_KEEP_ALIAS_SET_P (target
) = 1;
4973 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4974 type
, cleared
, get_alias_set (elttype
));
4979 emit_insn (GEN_FCN (icode
) (target
,
4980 gen_rtx_PARALLEL (GET_MODE (target
),
4981 gen_rtvec_v (n_elts
, vector
))));
4985 /* Set constructor assignments. */
4986 else if (TREE_CODE (type
) == SET_TYPE
)
4988 tree elt
= CONSTRUCTOR_ELTS (exp
);
4989 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4990 tree domain
= TYPE_DOMAIN (type
);
4991 tree domain_min
, domain_max
, bitlength
;
4993 /* The default implementation strategy is to extract the constant
4994 parts of the constructor, use that to initialize the target,
4995 and then "or" in whatever non-constant ranges we need in addition.
4997 If a large set is all zero or all ones, it is
4998 probably better to set it using memset (if available) or bzero.
4999 Also, if a large set has just a single range, it may also be
5000 better to first clear all the first clear the set (using
5001 bzero/memset), and set the bits we want. */
5003 /* Check for all zeros. */
5004 if (elt
== NULL_TREE
&& size
> 0)
5007 clear_storage (target
, GEN_INT (size
));
5011 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5012 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5013 bitlength
= size_binop (PLUS_EXPR
,
5014 size_diffop (domain_max
, domain_min
),
5017 nbits
= tree_low_cst (bitlength
, 1);
5019 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5020 are "complicated" (more than one range), initialize (the
5021 constant parts) by copying from a constant. */
5022 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5023 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5025 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5026 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5027 char *bit_buffer
= alloca (nbits
);
5028 HOST_WIDE_INT word
= 0;
5029 unsigned int bit_pos
= 0;
5030 unsigned int ibit
= 0;
5031 unsigned int offset
= 0; /* In bytes from beginning of set. */
5033 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5036 if (bit_buffer
[ibit
])
5038 if (BYTES_BIG_ENDIAN
)
5039 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5041 word
|= 1 << bit_pos
;
5045 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5047 if (word
!= 0 || ! cleared
)
5049 rtx datum
= GEN_INT (word
);
5052 /* The assumption here is that it is safe to use
5053 XEXP if the set is multi-word, but not if
5054 it's single-word. */
5055 if (GET_CODE (target
) == MEM
)
5056 to_rtx
= adjust_address (target
, mode
, offset
);
5057 else if (offset
== 0)
5061 emit_move_insn (to_rtx
, datum
);
5068 offset
+= set_word_size
/ BITS_PER_UNIT
;
5073 /* Don't bother clearing storage if the set is all ones. */
5074 if (TREE_CHAIN (elt
) != NULL_TREE
5075 || (TREE_PURPOSE (elt
) == NULL_TREE
5077 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5078 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5079 || (tree_low_cst (TREE_VALUE (elt
), 0)
5080 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5081 != (HOST_WIDE_INT
) nbits
))))
5082 clear_storage (target
, expr_size (exp
));
5084 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5086 /* Start of range of element or NULL. */
5087 tree startbit
= TREE_PURPOSE (elt
);
5088 /* End of range of element, or element value. */
5089 tree endbit
= TREE_VALUE (elt
);
5090 HOST_WIDE_INT startb
, endb
;
5091 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5093 bitlength_rtx
= expand_expr (bitlength
,
5094 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5096 /* Handle non-range tuple element like [ expr ]. */
5097 if (startbit
== NULL_TREE
)
5099 startbit
= save_expr (endbit
);
5103 startbit
= convert (sizetype
, startbit
);
5104 endbit
= convert (sizetype
, endbit
);
5105 if (! integer_zerop (domain_min
))
5107 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5108 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5110 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5111 EXPAND_CONST_ADDRESS
);
5112 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5113 EXPAND_CONST_ADDRESS
);
5119 ((build_qualified_type ((*lang_hooks
.types
.type_for_mode
)
5120 (GET_MODE (target
), 0),
5123 emit_move_insn (targetx
, target
);
5126 else if (GET_CODE (target
) == MEM
)
5131 /* Optimization: If startbit and endbit are constants divisible
5132 by BITS_PER_UNIT, call memset instead. */
5133 if (TARGET_MEM_FUNCTIONS
5134 && TREE_CODE (startbit
) == INTEGER_CST
5135 && TREE_CODE (endbit
) == INTEGER_CST
5136 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5137 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5139 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5141 plus_constant (XEXP (targetx
, 0),
5142 startb
/ BITS_PER_UNIT
),
5144 constm1_rtx
, TYPE_MODE (integer_type_node
),
5145 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5146 TYPE_MODE (sizetype
));
5149 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5150 VOIDmode
, 4, XEXP (targetx
, 0),
5151 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5152 startbit_rtx
, TYPE_MODE (sizetype
),
5153 endbit_rtx
, TYPE_MODE (sizetype
));
5156 emit_move_insn (target
, targetx
);
5164 /* Store the value of EXP (an expression tree)
5165 into a subfield of TARGET which has mode MODE and occupies
5166 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5167 If MODE is VOIDmode, it means that we are storing into a bit-field.
5169 If VALUE_MODE is VOIDmode, return nothing in particular.
5170 UNSIGNEDP is not used in this case.
5172 Otherwise, return an rtx for the value stored. This rtx
5173 has mode VALUE_MODE if that is convenient to do.
5174 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5176 TYPE is the type of the underlying object,
5178 ALIAS_SET is the alias set for the destination. This value will
5179 (in general) be different from that for TARGET, since TARGET is a
5180 reference to the containing structure. */
5183 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5184 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5185 int unsignedp
, tree type
, int alias_set
)
5187 HOST_WIDE_INT width_mask
= 0;
5189 if (TREE_CODE (exp
) == ERROR_MARK
)
5192 /* If we have nothing to store, do nothing unless the expression has
5195 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5196 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5197 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5199 /* If we are storing into an unaligned field of an aligned union that is
5200 in a register, we may have the mode of TARGET being an integer mode but
5201 MODE == BLKmode. In that case, get an aligned object whose size and
5202 alignment are the same as TARGET and store TARGET into it (we can avoid
5203 the store if the field being stored is the entire width of TARGET). Then
5204 call ourselves recursively to store the field into a BLKmode version of
5205 that object. Finally, load from the object into TARGET. This is not
5206 very efficient in general, but should only be slightly more expensive
5207 than the otherwise-required unaligned accesses. Perhaps this can be
5208 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5209 twice, once with emit_move_insn and once via store_field. */
5212 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5214 rtx object
= assign_temp (type
, 0, 1, 1);
5215 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5217 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5218 emit_move_insn (object
, target
);
5220 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5223 emit_move_insn (target
, object
);
5225 /* We want to return the BLKmode version of the data. */
5229 if (GET_CODE (target
) == CONCAT
)
5231 /* We're storing into a struct containing a single __complex. */
5235 return store_expr (exp
, target
, 0);
5238 /* If the structure is in a register or if the component
5239 is a bit field, we cannot use addressing to access it.
5240 Use bit-field techniques or SUBREG to store in it. */
5242 if (mode
== VOIDmode
5243 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5244 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5245 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5246 || GET_CODE (target
) == REG
5247 || GET_CODE (target
) == SUBREG
5248 /* If the field isn't aligned enough to store as an ordinary memref,
5249 store it as a bit field. */
5251 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5252 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5253 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5254 || (bitpos
% BITS_PER_UNIT
!= 0)))
5255 /* If the RHS and field are a constant size and the size of the
5256 RHS isn't the same size as the bitfield, we must use bitfield
5259 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5260 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5262 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5264 /* If BITSIZE is narrower than the size of the type of EXP
5265 we will be narrowing TEMP. Normally, what's wanted are the
5266 low-order bits. However, if EXP's type is a record and this is
5267 big-endian machine, we want the upper BITSIZE bits. */
5268 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5269 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5270 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5271 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5272 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5276 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5278 if (mode
!= VOIDmode
&& mode
!= BLKmode
5279 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5280 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5282 /* If the modes of TARGET and TEMP are both BLKmode, both
5283 must be in memory and BITPOS must be aligned on a byte
5284 boundary. If so, we simply do a block copy. */
5285 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5287 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5288 || bitpos
% BITS_PER_UNIT
!= 0)
5291 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5292 emit_block_move (target
, temp
,
5293 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5297 return value_mode
== VOIDmode
? const0_rtx
: target
;
5300 /* Store the value in the bitfield. */
5301 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5302 int_size_in_bytes (type
));
5304 if (value_mode
!= VOIDmode
)
5306 /* The caller wants an rtx for the value.
5307 If possible, avoid refetching from the bitfield itself. */
5309 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5312 enum machine_mode tmode
;
5314 tmode
= GET_MODE (temp
);
5315 if (tmode
== VOIDmode
)
5319 return expand_and (tmode
, temp
,
5320 gen_int_mode (width_mask
, tmode
),
5323 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5324 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5325 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5328 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5329 NULL_RTX
, value_mode
, VOIDmode
,
5330 int_size_in_bytes (type
));
5336 rtx addr
= XEXP (target
, 0);
5337 rtx to_rtx
= target
;
5339 /* If a value is wanted, it must be the lhs;
5340 so make the address stable for multiple use. */
5342 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5343 && ! CONSTANT_ADDRESS_P (addr
)
5344 /* A frame-pointer reference is already stable. */
5345 && ! (GET_CODE (addr
) == PLUS
5346 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5347 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5348 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5349 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5351 /* Now build a reference to just the desired component. */
5353 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5355 if (to_rtx
== target
)
5356 to_rtx
= copy_rtx (to_rtx
);
5358 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5359 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5360 set_mem_alias_set (to_rtx
, alias_set
);
5362 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5366 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5367 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5368 codes and find the ultimate containing object, which we return.
5370 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5371 bit position, and *PUNSIGNEDP to the signedness of the field.
5372 If the position of the field is variable, we store a tree
5373 giving the variable offset (in units) in *POFFSET.
5374 This offset is in addition to the bit position.
5375 If the position is not variable, we store 0 in *POFFSET.
5377 If any of the extraction expressions is volatile,
5378 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5380 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5381 is a mode that can be used to access the field. In that case, *PBITSIZE
5384 If the field describes a variable-sized object, *PMODE is set to
5385 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5386 this case, but the address of the object can be found. */
5389 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5390 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5391 enum machine_mode
*pmode
, int *punsignedp
,
5395 enum machine_mode mode
= VOIDmode
;
5396 tree offset
= size_zero_node
;
5397 tree bit_offset
= bitsize_zero_node
;
5398 tree placeholder_ptr
= 0;
5401 /* First get the mode, signedness, and size. We do this from just the
5402 outermost expression. */
5403 if (TREE_CODE (exp
) == COMPONENT_REF
)
5405 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5406 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5407 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5409 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5411 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5413 size_tree
= TREE_OPERAND (exp
, 1);
5414 *punsignedp
= TREE_UNSIGNED (exp
);
5418 mode
= TYPE_MODE (TREE_TYPE (exp
));
5419 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5421 if (mode
== BLKmode
)
5422 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5424 *pbitsize
= GET_MODE_BITSIZE (mode
);
5429 if (! host_integerp (size_tree
, 1))
5430 mode
= BLKmode
, *pbitsize
= -1;
5432 *pbitsize
= tree_low_cst (size_tree
, 1);
5435 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5436 and find the ultimate containing object. */
5439 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5440 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5441 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5443 tree field
= TREE_OPERAND (exp
, 1);
5444 tree this_offset
= DECL_FIELD_OFFSET (field
);
5446 /* If this field hasn't been filled in yet, don't go
5447 past it. This should only happen when folding expressions
5448 made during type construction. */
5449 if (this_offset
== 0)
5451 else if (CONTAINS_PLACEHOLDER_P (this_offset
))
5452 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5454 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5455 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5456 DECL_FIELD_BIT_OFFSET (field
));
5458 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5461 else if (TREE_CODE (exp
) == ARRAY_REF
5462 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5464 tree index
= TREE_OPERAND (exp
, 1);
5465 tree array
= TREE_OPERAND (exp
, 0);
5466 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5467 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5468 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array
)));
5470 /* We assume all arrays have sizes that are a multiple of a byte.
5471 First subtract the lower bound, if any, in the type of the
5472 index, then convert to sizetype and multiply by the size of the
5474 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5475 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5478 /* If the index has a self-referential type, pass it to a
5479 WITH_RECORD_EXPR; if the component size is, pass our
5480 component to one. */
5481 if (CONTAINS_PLACEHOLDER_P (index
))
5482 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5483 if (CONTAINS_PLACEHOLDER_P (unit_size
))
5484 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
, array
);
5486 offset
= size_binop (PLUS_EXPR
, offset
,
5487 size_binop (MULT_EXPR
,
5488 convert (sizetype
, index
),
5492 else if (TREE_CODE (exp
) == PLACEHOLDER_EXPR
)
5494 tree
new = find_placeholder (exp
, &placeholder_ptr
);
5496 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5497 We might have been called from tree optimization where we
5498 haven't set up an object yet. */
5507 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5508 conversions that don't change the mode, and all view conversions
5509 except those that need to "step up" the alignment. */
5510 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5511 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5512 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5513 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5515 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5516 < BIGGEST_ALIGNMENT
)
5517 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5518 || TYPE_ALIGN_OK (TREE_TYPE
5519 (TREE_OPERAND (exp
, 0))))))
5520 && ! ((TREE_CODE (exp
) == NOP_EXPR
5521 || TREE_CODE (exp
) == CONVERT_EXPR
)
5522 && (TYPE_MODE (TREE_TYPE (exp
))
5523 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5526 /* If any reference in the chain is volatile, the effect is volatile. */
5527 if (TREE_THIS_VOLATILE (exp
))
5530 exp
= TREE_OPERAND (exp
, 0);
5533 /* If OFFSET is constant, see if we can return the whole thing as a
5534 constant bit position. Otherwise, split it up. */
5535 if (host_integerp (offset
, 0)
5536 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5538 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5539 && host_integerp (tem
, 0))
5540 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5542 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5548 /* Return 1 if T is an expression that get_inner_reference handles. */
5551 handled_component_p (tree t
)
5553 switch (TREE_CODE (t
))
5558 case ARRAY_RANGE_REF
:
5559 case NON_LVALUE_EXPR
:
5560 case VIEW_CONVERT_EXPR
:
5563 /* ??? Sure they are handled, but get_inner_reference may return
5564 a different PBITSIZE, depending upon whether the expression is
5565 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5568 return (TYPE_MODE (TREE_TYPE (t
))
5569 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5576 /* Given an rtx VALUE that may contain additions and multiplications, return
5577 an equivalent value that just refers to a register, memory, or constant.
5578 This is done by generating instructions to perform the arithmetic and
5579 returning a pseudo-register containing the value.
5581 The returned value may be a REG, SUBREG, MEM or constant. */
5584 force_operand (rtx value
, rtx target
)
5587 /* Use subtarget as the target for operand 0 of a binary operation. */
5588 rtx subtarget
= get_subtarget (target
);
5589 enum rtx_code code
= GET_CODE (value
);
5591 /* Check for subreg applied to an expression produced by loop optimizer. */
5593 && GET_CODE (SUBREG_REG (value
)) != REG
5594 && GET_CODE (SUBREG_REG (value
)) != MEM
)
5596 value
= simplify_gen_subreg (GET_MODE (value
),
5597 force_reg (GET_MODE (SUBREG_REG (value
)),
5598 force_operand (SUBREG_REG (value
),
5600 GET_MODE (SUBREG_REG (value
)),
5601 SUBREG_BYTE (value
));
5602 code
= GET_CODE (value
);
5605 /* Check for a PIC address load. */
5606 if ((code
== PLUS
|| code
== MINUS
)
5607 && XEXP (value
, 0) == pic_offset_table_rtx
5608 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5609 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5610 || GET_CODE (XEXP (value
, 1)) == CONST
))
5613 subtarget
= gen_reg_rtx (GET_MODE (value
));
5614 emit_move_insn (subtarget
, value
);
5618 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5621 target
= gen_reg_rtx (GET_MODE (value
));
5622 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5623 code
== ZERO_EXTEND
);
5627 if (ARITHMETIC_P (value
))
5629 op2
= XEXP (value
, 1);
5630 if (!CONSTANT_P (op2
) && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5632 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5635 op2
= negate_rtx (GET_MODE (value
), op2
);
5638 /* Check for an addition with OP2 a constant integer and our first
5639 operand a PLUS of a virtual register and something else. In that
5640 case, we want to emit the sum of the virtual register and the
5641 constant first and then add the other value. This allows virtual
5642 register instantiation to simply modify the constant rather than
5643 creating another one around this addition. */
5644 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5645 && GET_CODE (XEXP (value
, 0)) == PLUS
5646 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5647 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5648 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5650 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5651 XEXP (XEXP (value
, 0), 0), op2
,
5652 subtarget
, 0, OPTAB_LIB_WIDEN
);
5653 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5654 force_operand (XEXP (XEXP (value
,
5656 target
, 0, OPTAB_LIB_WIDEN
);
5659 op1
= force_operand (XEXP (value
, 0), subtarget
);
5660 op2
= force_operand (op2
, NULL_RTX
);
5664 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5666 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5667 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5668 target
, 1, OPTAB_LIB_WIDEN
);
5670 return expand_divmod (0,
5671 FLOAT_MODE_P (GET_MODE (value
))
5672 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5673 GET_MODE (value
), op1
, op2
, target
, 0);
5676 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5680 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5684 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5688 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5689 target
, 0, OPTAB_LIB_WIDEN
);
5692 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5693 target
, 1, OPTAB_LIB_WIDEN
);
5696 if (UNARY_P (value
))
5698 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5699 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5702 #ifdef INSN_SCHEDULING
5703 /* On machines that have insn scheduling, we want all memory reference to be
5704 explicit, so we need to deal with such paradoxical SUBREGs. */
5705 if (GET_CODE (value
) == SUBREG
&& GET_CODE (SUBREG_REG (value
)) == MEM
5706 && (GET_MODE_SIZE (GET_MODE (value
))
5707 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5709 = simplify_gen_subreg (GET_MODE (value
),
5710 force_reg (GET_MODE (SUBREG_REG (value
)),
5711 force_operand (SUBREG_REG (value
),
5713 GET_MODE (SUBREG_REG (value
)),
5714 SUBREG_BYTE (value
));
5720 /* Subroutine of expand_expr: return nonzero iff there is no way that
5721 EXP can reference X, which is being modified. TOP_P is nonzero if this
5722 call is going to be used to determine whether we need a temporary
5723 for EXP, as opposed to a recursive call to this function.
5725 It is always safe for this routine to return zero since it merely
5726 searches for optimization opportunities. */
5729 safe_from_p (rtx x
, tree exp
, int top_p
)
5733 static tree save_expr_list
;
5736 /* If EXP has varying size, we MUST use a target since we currently
5737 have no way of allocating temporaries of variable size
5738 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5739 So we assume here that something at a higher level has prevented a
5740 clash. This is somewhat bogus, but the best we can do. Only
5741 do this when X is BLKmode and when we are at the top level. */
5742 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5743 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5744 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5745 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5746 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5748 && GET_MODE (x
) == BLKmode
)
5749 /* If X is in the outgoing argument area, it is always safe. */
5750 || (GET_CODE (x
) == MEM
5751 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5752 || (GET_CODE (XEXP (x
, 0)) == PLUS
5753 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5756 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5757 find the underlying pseudo. */
5758 if (GET_CODE (x
) == SUBREG
)
5761 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5765 /* A SAVE_EXPR might appear many times in the expression passed to the
5766 top-level safe_from_p call, and if it has a complex subexpression,
5767 examining it multiple times could result in a combinatorial explosion.
5768 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5769 with optimization took about 28 minutes to compile -- even though it was
5770 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5771 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5772 we have processed. Note that the only test of top_p was above. */
5781 rtn
= safe_from_p (x
, exp
, 0);
5783 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5784 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5789 /* Now look at our tree code and possibly recurse. */
5790 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5793 exp_rtl
= DECL_RTL_IF_SET (exp
);
5800 if (TREE_CODE (exp
) == TREE_LIST
)
5804 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5806 exp
= TREE_CHAIN (exp
);
5809 if (TREE_CODE (exp
) != TREE_LIST
)
5810 return safe_from_p (x
, exp
, 0);
5813 else if (TREE_CODE (exp
) == ERROR_MARK
)
5814 return 1; /* An already-visited SAVE_EXPR? */
5820 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5825 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5829 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5830 the expression. If it is set, we conflict iff we are that rtx or
5831 both are in memory. Otherwise, we check all operands of the
5832 expression recursively. */
5834 switch (TREE_CODE (exp
))
5837 /* If the operand is static or we are static, we can't conflict.
5838 Likewise if we don't conflict with the operand at all. */
5839 if (staticp (TREE_OPERAND (exp
, 0))
5840 || TREE_STATIC (exp
)
5841 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5844 /* Otherwise, the only way this can conflict is if we are taking
5845 the address of a DECL a that address if part of X, which is
5847 exp
= TREE_OPERAND (exp
, 0);
5850 if (!DECL_RTL_SET_P (exp
)
5851 || GET_CODE (DECL_RTL (exp
)) != MEM
)
5854 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
5859 if (GET_CODE (x
) == MEM
5860 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5861 get_alias_set (exp
)))
5866 /* Assume that the call will clobber all hard registers and
5868 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5869 || GET_CODE (x
) == MEM
)
5874 /* If a sequence exists, we would have to scan every instruction
5875 in the sequence to see if it was safe. This is probably not
5877 if (RTL_EXPR_SEQUENCE (exp
))
5880 exp_rtl
= RTL_EXPR_RTL (exp
);
5883 case WITH_CLEANUP_EXPR
:
5884 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
5887 case CLEANUP_POINT_EXPR
:
5888 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5891 exp_rtl
= SAVE_EXPR_RTL (exp
);
5895 /* If we've already scanned this, don't do it again. Otherwise,
5896 show we've scanned it and record for clearing the flag if we're
5898 if (TREE_PRIVATE (exp
))
5901 TREE_PRIVATE (exp
) = 1;
5902 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5904 TREE_PRIVATE (exp
) = 0;
5908 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5912 /* The only operand we look at is operand 1. The rest aren't
5913 part of the expression. */
5914 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5920 /* If we have an rtx, we do not need to scan our operands. */
5924 nops
= first_rtl_op (TREE_CODE (exp
));
5925 for (i
= 0; i
< nops
; i
++)
5926 if (TREE_OPERAND (exp
, i
) != 0
5927 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5930 /* If this is a language-specific tree code, it may require
5931 special handling. */
5932 if ((unsigned int) TREE_CODE (exp
)
5933 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5934 && !(*lang_hooks
.safe_from_p
) (x
, exp
))
5938 /* If we have an rtl, find any enclosed object. Then see if we conflict
5942 if (GET_CODE (exp_rtl
) == SUBREG
)
5944 exp_rtl
= SUBREG_REG (exp_rtl
);
5945 if (GET_CODE (exp_rtl
) == REG
5946 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5950 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5951 are memory and they conflict. */
5952 return ! (rtx_equal_p (x
, exp_rtl
)
5953 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5954 && true_dependence (exp_rtl
, VOIDmode
, x
,
5955 rtx_addr_varies_p
)));
5958 /* If we reach here, it is safe. */
5962 /* Subroutine of expand_expr: return rtx if EXP is a
5963 variable or parameter; else return 0. */
5969 switch (TREE_CODE (exp
))
5973 return DECL_RTL (exp
);
5979 /* Return the highest power of two that EXP is known to be a multiple of.
5980 This is used in updating alignment of MEMs in array references. */
5982 static unsigned HOST_WIDE_INT
5983 highest_pow2_factor (tree exp
)
5985 unsigned HOST_WIDE_INT c0
, c1
;
5987 switch (TREE_CODE (exp
))
5990 /* We can find the lowest bit that's a one. If the low
5991 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5992 We need to handle this case since we can find it in a COND_EXPR,
5993 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5994 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5996 if (TREE_CONSTANT_OVERFLOW (exp
))
5997 return BIGGEST_ALIGNMENT
;
6000 /* Note: tree_low_cst is intentionally not used here,
6001 we don't care about the upper bits. */
6002 c0
= TREE_INT_CST_LOW (exp
);
6004 return c0
? c0
: BIGGEST_ALIGNMENT
;
6008 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6009 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6010 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6011 return MIN (c0
, c1
);
6014 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6015 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6018 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6020 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6021 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6023 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6024 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6025 return MAX (1, c0
/ c1
);
6029 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6030 case SAVE_EXPR
: case WITH_RECORD_EXPR
:
6031 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6034 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6037 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6038 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6039 return MIN (c0
, c1
);
6048 /* Similar, except that it is known that the expression must be a multiple
6049 of the alignment of TYPE. */
6051 static unsigned HOST_WIDE_INT
6052 highest_pow2_factor_for_type (tree type
, tree exp
)
6054 unsigned HOST_WIDE_INT type_align
, factor
;
6056 factor
= highest_pow2_factor (exp
);
6057 type_align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
6058 return MAX (factor
, type_align
);
6061 /* Return an object on the placeholder list that matches EXP, a
6062 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6063 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6064 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6065 is a location which initially points to a starting location in the
6066 placeholder list (zero means start of the list) and where a pointer into
6067 the placeholder list at which the object is found is placed. */
6070 find_placeholder (tree exp
, tree
*plist
)
6072 tree type
= TREE_TYPE (exp
);
6073 tree placeholder_expr
;
6075 for (placeholder_expr
6076 = plist
&& *plist
? TREE_CHAIN (*plist
) : placeholder_list
;
6077 placeholder_expr
!= 0;
6078 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6080 tree need_type
= TYPE_MAIN_VARIANT (type
);
6083 /* Find the outermost reference that is of the type we want. If none,
6084 see if any object has a type that is a pointer to the type we
6086 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6087 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
6088 || TREE_CODE (elt
) == COND_EXPR
)
6089 ? TREE_OPERAND (elt
, 1)
6090 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6091 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6092 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6093 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6094 ? TREE_OPERAND (elt
, 0) : 0))
6095 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6098 *plist
= placeholder_expr
;
6102 for (elt
= TREE_PURPOSE (placeholder_expr
); elt
!= 0;
6104 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6105 || TREE_CODE (elt
) == COND_EXPR
)
6106 ? TREE_OPERAND (elt
, 1)
6107 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6108 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6109 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6110 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6111 ? TREE_OPERAND (elt
, 0) : 0))
6112 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6113 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6117 *plist
= placeholder_expr
;
6118 return build1 (INDIRECT_REF
, need_type
, elt
);
6125 /* Subroutine of expand_expr. Expand the two operands of a binary
6126 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6127 The value may be stored in TARGET if TARGET is nonzero. The
6128 MODIFIER argument is as documented by expand_expr. */
6131 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6132 enum expand_modifier modifier
)
6134 if (! safe_from_p (target
, exp1
, 1))
6136 if (operand_equal_p (exp0
, exp1
, 0))
6138 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6139 *op1
= copy_rtx (*op0
);
6143 /* If we need to preserve evaluation order, copy exp0 into its own
6144 temporary variable so that it can't be clobbered by exp1. */
6145 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6146 exp0
= save_expr (exp0
);
6147 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6148 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6153 /* expand_expr: generate code for computing expression EXP.
6154 An rtx for the computed value is returned. The value is never null.
6155 In the case of a void EXP, const0_rtx is returned.
6157 The value may be stored in TARGET if TARGET is nonzero.
6158 TARGET is just a suggestion; callers must assume that
6159 the rtx returned may not be the same as TARGET.
6161 If TARGET is CONST0_RTX, it means that the value will be ignored.
6163 If TMODE is not VOIDmode, it suggests generating the
6164 result in mode TMODE. But this is done only when convenient.
6165 Otherwise, TMODE is ignored and the value generated in its natural mode.
6166 TMODE is just a suggestion; callers must assume that
6167 the rtx returned may not have mode TMODE.
6169 Note that TARGET may have neither TMODE nor MODE. In that case, it
6170 probably will not be used.
6172 If MODIFIER is EXPAND_SUM then when EXP is an addition
6173 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6174 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6175 products as above, or REG or MEM, or constant.
6176 Ordinarily in such cases we would output mul or add instructions
6177 and then return a pseudo reg containing the sum.
6179 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6180 it also marks a label as absolutely required (it can't be dead).
6181 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6182 This is used for outputting expressions used in initializers.
6184 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6185 with a constant address even if that address is not normally legitimate.
6186 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6188 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6189 a call parameter. Such targets require special care as we haven't yet
6190 marked TARGET so that it's safe from being trashed by libcalls. We
6191 don't want to use TARGET for anything but the final result;
6192 Intermediate values must go elsewhere. Additionally, calls to
6193 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6195 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6196 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6197 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6198 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6202 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6203 enum expand_modifier modifier
, rtx
*alt_rtl
)
6206 tree type
= TREE_TYPE (exp
);
6207 int unsignedp
= TREE_UNSIGNED (type
);
6208 enum machine_mode mode
;
6209 enum tree_code code
= TREE_CODE (exp
);
6211 rtx subtarget
, original_target
;
6215 /* Handle ERROR_MARK before anybody tries to access its type. */
6216 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
6218 op0
= CONST0_RTX (tmode
);
6224 mode
= TYPE_MODE (type
);
6225 /* Use subtarget as the target for operand 0 of a binary operation. */
6226 subtarget
= get_subtarget (target
);
6227 original_target
= target
;
6228 ignore
= (target
== const0_rtx
6229 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6230 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6231 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6232 && TREE_CODE (type
) == VOID_TYPE
));
6234 /* If we are going to ignore this result, we need only do something
6235 if there is a side-effect somewhere in the expression. If there
6236 is, short-circuit the most common cases here. Note that we must
6237 not call expand_expr with anything but const0_rtx in case this
6238 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6242 if (! TREE_SIDE_EFFECTS (exp
))
6245 /* Ensure we reference a volatile object even if value is ignored, but
6246 don't do this if all we are doing is taking its address. */
6247 if (TREE_THIS_VOLATILE (exp
)
6248 && TREE_CODE (exp
) != FUNCTION_DECL
6249 && mode
!= VOIDmode
&& mode
!= BLKmode
6250 && modifier
!= EXPAND_CONST_ADDRESS
)
6252 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6253 if (GET_CODE (temp
) == MEM
)
6254 temp
= copy_to_reg (temp
);
6258 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6259 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6260 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6263 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6264 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6266 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6267 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6270 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6271 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6272 /* If the second operand has no side effects, just evaluate
6274 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6276 else if (code
== BIT_FIELD_REF
)
6278 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6279 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6280 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6287 /* If will do cse, generate all results into pseudo registers
6288 since 1) that allows cse to find more things
6289 and 2) otherwise cse could produce an insn the machine
6290 cannot support. An exception is a CONSTRUCTOR into a multi-word
6291 MEM: that's much more likely to be most efficient into the MEM.
6292 Another is a CALL_EXPR which must return in memory. */
6294 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6295 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6296 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6297 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6304 tree function
= decl_function_context (exp
);
6305 /* Labels in containing functions, or labels used from initializers,
6307 if (modifier
== EXPAND_INITIALIZER
6308 || (function
!= current_function_decl
6309 && function
!= inline_function_decl
6311 temp
= force_label_rtx (exp
);
6313 temp
= label_rtx (exp
);
6315 temp
= gen_rtx_MEM (FUNCTION_MODE
, gen_rtx_LABEL_REF (Pmode
, temp
));
6316 if (function
!= current_function_decl
6317 && function
!= inline_function_decl
&& function
!= 0)
6318 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6323 if (!DECL_RTL_SET_P (exp
))
6325 error ("%Jprior parameter's size depends on '%D'", exp
, exp
);
6326 return CONST0_RTX (mode
);
6329 /* ... fall through ... */
6332 /* If a static var's type was incomplete when the decl was written,
6333 but the type is complete now, lay out the decl now. */
6334 if (DECL_SIZE (exp
) == 0
6335 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6336 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6337 layout_decl (exp
, 0);
6339 /* ... fall through ... */
6343 if (DECL_RTL (exp
) == 0)
6346 /* Ensure variable marked as used even if it doesn't go through
6347 a parser. If it hasn't be used yet, write out an external
6349 if (! TREE_USED (exp
))
6351 assemble_external (exp
);
6352 TREE_USED (exp
) = 1;
6355 /* Show we haven't gotten RTL for this yet. */
6358 /* Handle variables inherited from containing functions. */
6359 context
= decl_function_context (exp
);
6361 /* We treat inline_function_decl as an alias for the current function
6362 because that is the inline function whose vars, types, etc.
6363 are being merged into the current function.
6364 See expand_inline_function. */
6366 if (context
!= 0 && context
!= current_function_decl
6367 && context
!= inline_function_decl
6368 /* If var is static, we don't need a static chain to access it. */
6369 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6370 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6374 /* Mark as non-local and addressable. */
6375 DECL_NONLOCAL (exp
) = 1;
6376 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6378 (*lang_hooks
.mark_addressable
) (exp
);
6379 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6381 addr
= XEXP (DECL_RTL (exp
), 0);
6382 if (GET_CODE (addr
) == MEM
)
6384 = replace_equiv_address (addr
,
6385 fix_lexical_addr (XEXP (addr
, 0), exp
));
6387 addr
= fix_lexical_addr (addr
, exp
);
6389 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6392 /* This is the case of an array whose size is to be determined
6393 from its initializer, while the initializer is still being parsed.
6396 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6397 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6398 temp
= validize_mem (DECL_RTL (exp
));
6400 /* If DECL_RTL is memory, we are in the normal case and either
6401 the address is not valid or it is not a register and -fforce-addr
6402 is specified, get the address into a register. */
6404 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6405 && modifier
!= EXPAND_CONST_ADDRESS
6406 && modifier
!= EXPAND_SUM
6407 && modifier
!= EXPAND_INITIALIZER
6408 && (! memory_address_p (DECL_MODE (exp
),
6409 XEXP (DECL_RTL (exp
), 0))
6411 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6414 *alt_rtl
= DECL_RTL (exp
);
6415 temp
= replace_equiv_address (DECL_RTL (exp
),
6416 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6419 /* If we got something, return it. But first, set the alignment
6420 if the address is a register. */
6423 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6424 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6429 /* If the mode of DECL_RTL does not match that of the decl, it
6430 must be a promoted value. We return a SUBREG of the wanted mode,
6431 but mark it so that we know that it was already extended. */
6433 if (GET_CODE (DECL_RTL (exp
)) == REG
6434 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6436 /* Get the signedness used for this variable. Ensure we get the
6437 same mode we got when the variable was declared. */
6438 if (GET_MODE (DECL_RTL (exp
))
6439 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6440 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6443 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6444 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6445 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6449 return DECL_RTL (exp
);
6452 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6453 TREE_INT_CST_HIGH (exp
), mode
);
6455 /* ??? If overflow is set, fold will have done an incomplete job,
6456 which can result in (plus xx (const_int 0)), which can get
6457 simplified by validate_replace_rtx during virtual register
6458 instantiation, which can result in unrecognizable insns.
6459 Avoid this by forcing all overflows into registers. */
6460 if (TREE_CONSTANT_OVERFLOW (exp
)
6461 && modifier
!= EXPAND_INITIALIZER
)
6462 temp
= force_reg (mode
, temp
);
6467 return const_vector_from_tree (exp
);
6470 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6473 /* If optimized, generate immediate CONST_DOUBLE
6474 which will be turned into memory by reload if necessary.
6476 We used to force a register so that loop.c could see it. But
6477 this does not allow gen_* patterns to perform optimizations with
6478 the constants. It also produces two insns in cases like "x = 1.0;".
6479 On most machines, floating-point constants are not permitted in
6480 many insns, so we'd end up copying it to a register in any case.
6482 Now, we do the copying in expand_binop, if appropriate. */
6483 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6484 TYPE_MODE (TREE_TYPE (exp
)));
6487 /* Handle evaluating a complex constant in a CONCAT target. */
6488 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6490 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6493 rtarg
= XEXP (original_target
, 0);
6494 itarg
= XEXP (original_target
, 1);
6496 /* Move the real and imaginary parts separately. */
6497 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6498 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6501 emit_move_insn (rtarg
, op0
);
6503 emit_move_insn (itarg
, op1
);
6505 return original_target
;
6508 /* ... fall through ... */
6511 temp
= output_constant_def (exp
, 1);
6513 /* temp contains a constant address.
6514 On RISC machines where a constant address isn't valid,
6515 make some insns to get that address into a register. */
6516 if (modifier
!= EXPAND_CONST_ADDRESS
6517 && modifier
!= EXPAND_INITIALIZER
6518 && modifier
!= EXPAND_SUM
6519 && (! memory_address_p (mode
, XEXP (temp
, 0))
6520 || flag_force_addr
))
6521 return replace_equiv_address (temp
,
6522 copy_rtx (XEXP (temp
, 0)));
6525 case EXPR_WITH_FILE_LOCATION
:
6528 struct file_stack fs
;
6530 fs
.location
= input_location
;
6531 fs
.next
= expr_wfl_stack
;
6532 input_filename
= EXPR_WFL_FILENAME (exp
);
6533 input_line
= EXPR_WFL_LINENO (exp
);
6534 expr_wfl_stack
= &fs
;
6535 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6536 emit_line_note (input_location
);
6537 /* Possibly avoid switching back and forth here. */
6538 to_return
= expand_expr (EXPR_WFL_NODE (exp
),
6539 (ignore
? const0_rtx
: target
),
6541 if (expr_wfl_stack
!= &fs
)
6543 input_location
= fs
.location
;
6544 expr_wfl_stack
= fs
.next
;
6549 context
= decl_function_context (exp
);
6551 /* If this SAVE_EXPR was at global context, assume we are an
6552 initialization function and move it into our context. */
6554 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6556 /* We treat inline_function_decl as an alias for the current function
6557 because that is the inline function whose vars, types, etc.
6558 are being merged into the current function.
6559 See expand_inline_function. */
6560 if (context
== current_function_decl
|| context
== inline_function_decl
)
6563 /* If this is non-local, handle it. */
6566 /* The following call just exists to abort if the context is
6567 not of a containing function. */
6568 find_function_data (context
);
6570 temp
= SAVE_EXPR_RTL (exp
);
6571 if (temp
&& GET_CODE (temp
) == REG
)
6573 put_var_into_stack (exp
, /*rescan=*/true);
6574 temp
= SAVE_EXPR_RTL (exp
);
6576 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6579 replace_equiv_address (temp
,
6580 fix_lexical_addr (XEXP (temp
, 0), exp
));
6582 if (SAVE_EXPR_RTL (exp
) == 0)
6584 if (mode
== VOIDmode
)
6587 temp
= assign_temp (build_qualified_type (type
,
6589 | TYPE_QUAL_CONST
)),
6592 SAVE_EXPR_RTL (exp
) = temp
;
6593 if (!optimize
&& GET_CODE (temp
) == REG
)
6594 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6597 /* If the mode of TEMP does not match that of the expression, it
6598 must be a promoted value. We pass store_expr a SUBREG of the
6599 wanted mode but mark it so that we know that it was already
6602 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6604 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6605 promote_mode (type
, mode
, &unsignedp
, 0);
6606 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6607 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6610 if (temp
== const0_rtx
)
6611 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6613 store_expr (TREE_OPERAND (exp
, 0), temp
,
6614 modifier
== EXPAND_STACK_PARM
? 2 : 0);
6616 TREE_USED (exp
) = 1;
6619 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6620 must be a promoted value. We return a SUBREG of the wanted mode,
6621 but mark it so that we know that it was already extended. */
6623 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6624 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6626 /* Compute the signedness and make the proper SUBREG. */
6627 promote_mode (type
, mode
, &unsignedp
, 0);
6628 temp
= gen_lowpart_SUBREG (mode
, SAVE_EXPR_RTL (exp
));
6629 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6630 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6634 return SAVE_EXPR_RTL (exp
);
6639 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6640 TREE_OPERAND (exp
, 0)
6641 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
6645 case PLACEHOLDER_EXPR
:
6647 tree old_list
= placeholder_list
;
6648 tree placeholder_expr
= 0;
6650 exp
= find_placeholder (exp
, &placeholder_expr
);
6654 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6655 temp
= expand_expr (exp
, original_target
, tmode
, modifier
);
6656 placeholder_list
= old_list
;
6660 case WITH_RECORD_EXPR
:
6661 /* Put the object on the placeholder list, expand our first operand,
6662 and pop the list. */
6663 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6665 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, tmode
,
6667 placeholder_list
= TREE_CHAIN (placeholder_list
);
6671 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6672 expand_goto (TREE_OPERAND (exp
, 0));
6674 expand_computed_goto (TREE_OPERAND (exp
, 0));
6678 expand_exit_loop_if_false (NULL
,
6679 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6682 case LABELED_BLOCK_EXPR
:
6683 if (LABELED_BLOCK_BODY (exp
))
6684 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp
), 0, 1);
6685 /* Should perhaps use expand_label, but this is simpler and safer. */
6686 do_pending_stack_adjust ();
6687 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6690 case EXIT_BLOCK_EXPR
:
6691 if (EXIT_BLOCK_RETURN (exp
))
6692 sorry ("returned value in block_exit_expr");
6693 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6698 expand_start_loop (1);
6699 expand_expr_stmt_value (TREE_OPERAND (exp
, 0), 0, 1);
6707 tree vars
= TREE_OPERAND (exp
, 0);
6709 /* Need to open a binding contour here because
6710 if there are any cleanups they must be contained here. */
6711 expand_start_bindings (2);
6713 /* Mark the corresponding BLOCK for output in its proper place. */
6714 if (TREE_OPERAND (exp
, 2) != 0
6715 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6716 (*lang_hooks
.decls
.insert_block
) (TREE_OPERAND (exp
, 2));
6718 /* If VARS have not yet been expanded, expand them now. */
6721 if (!DECL_RTL_SET_P (vars
))
6723 expand_decl_init (vars
);
6724 vars
= TREE_CHAIN (vars
);
6727 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
6729 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6735 if (RTL_EXPR_SEQUENCE (exp
))
6737 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6739 emit_insn (RTL_EXPR_SEQUENCE (exp
));
6740 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6742 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6743 free_temps_for_rtl_expr (exp
);
6745 *alt_rtl
= RTL_EXPR_ALT_RTL (exp
);
6746 return RTL_EXPR_RTL (exp
);
6749 /* If we don't need the result, just ensure we evaluate any
6755 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6756 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6761 /* All elts simple constants => refer to a constant in memory. But
6762 if this is a non-BLKmode mode, let it store a field at a time
6763 since that should make a CONST_INT or CONST_DOUBLE when we
6764 fold. Likewise, if we have a target we can use, it is best to
6765 store directly into the target unless the type is large enough
6766 that memcpy will be used. If we are making an initializer and
6767 all operands are constant, put it in memory as well.
6769 FIXME: Avoid trying to fill vector constructors piece-meal.
6770 Output them with output_constant_def below unless we're sure
6771 they're zeros. This should go away when vector initializers
6772 are treated like VECTOR_CST instead of arrays.
6774 else if ((TREE_STATIC (exp
)
6775 && ((mode
== BLKmode
6776 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6777 || TREE_ADDRESSABLE (exp
)
6778 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6779 && (! MOVE_BY_PIECES_P
6780 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6782 && ((TREE_CODE (type
) == VECTOR_TYPE
6783 && !is_zeros_p (exp
))
6784 || ! mostly_zeros_p (exp
)))))
6785 || ((modifier
== EXPAND_INITIALIZER
6786 || modifier
== EXPAND_CONST_ADDRESS
)
6787 && TREE_CONSTANT (exp
)))
6789 rtx constructor
= output_constant_def (exp
, 1);
6791 if (modifier
!= EXPAND_CONST_ADDRESS
6792 && modifier
!= EXPAND_INITIALIZER
6793 && modifier
!= EXPAND_SUM
)
6794 constructor
= validize_mem (constructor
);
6800 /* Handle calls that pass values in multiple non-contiguous
6801 locations. The Irix 6 ABI has examples of this. */
6802 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6803 || GET_CODE (target
) == PARALLEL
6804 || modifier
== EXPAND_STACK_PARM
)
6806 = assign_temp (build_qualified_type (type
,
6808 | (TREE_READONLY (exp
)
6809 * TYPE_QUAL_CONST
))),
6810 0, TREE_ADDRESSABLE (exp
), 1);
6812 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6818 tree exp1
= TREE_OPERAND (exp
, 0);
6820 tree string
= string_constant (exp1
, &index
);
6822 /* Try to optimize reads from const strings. */
6824 && TREE_CODE (string
) == STRING_CST
6825 && TREE_CODE (index
) == INTEGER_CST
6826 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6827 && GET_MODE_CLASS (mode
) == MODE_INT
6828 && GET_MODE_SIZE (mode
) == 1
6829 && modifier
!= EXPAND_WRITE
)
6830 return gen_int_mode (TREE_STRING_POINTER (string
)
6831 [TREE_INT_CST_LOW (index
)], mode
);
6833 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6834 op0
= memory_address (mode
, op0
);
6835 temp
= gen_rtx_MEM (mode
, op0
);
6836 set_mem_attributes (temp
, exp
, 0);
6838 /* If we are writing to this object and its type is a record with
6839 readonly fields, we must mark it as readonly so it will
6840 conflict with readonly references to those fields. */
6841 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6842 RTX_UNCHANGING_P (temp
) = 1;
6848 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6852 tree array
= TREE_OPERAND (exp
, 0);
6853 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6854 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6855 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6858 /* Optimize the special-case of a zero lower bound.
6860 We convert the low_bound to sizetype to avoid some problems
6861 with constant folding. (E.g. suppose the lower bound is 1,
6862 and its mode is QI. Without the conversion, (ARRAY
6863 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6864 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6866 if (! integer_zerop (low_bound
))
6867 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6869 /* Fold an expression like: "foo"[2].
6870 This is not done in fold so it won't happen inside &.
6871 Don't fold if this is for wide characters since it's too
6872 difficult to do correctly and this is a very rare case. */
6874 if (modifier
!= EXPAND_CONST_ADDRESS
6875 && modifier
!= EXPAND_INITIALIZER
6876 && modifier
!= EXPAND_MEMORY
6877 && TREE_CODE (array
) == STRING_CST
6878 && TREE_CODE (index
) == INTEGER_CST
6879 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6880 && GET_MODE_CLASS (mode
) == MODE_INT
6881 && GET_MODE_SIZE (mode
) == 1)
6882 return gen_int_mode (TREE_STRING_POINTER (array
)
6883 [TREE_INT_CST_LOW (index
)], mode
);
6885 /* If this is a constant index into a constant array,
6886 just get the value from the array. Handle both the cases when
6887 we have an explicit constructor and when our operand is a variable
6888 that was declared const. */
6890 if (modifier
!= EXPAND_CONST_ADDRESS
6891 && modifier
!= EXPAND_INITIALIZER
6892 && modifier
!= EXPAND_MEMORY
6893 && TREE_CODE (array
) == CONSTRUCTOR
6894 && ! TREE_SIDE_EFFECTS (array
)
6895 && TREE_CODE (index
) == INTEGER_CST
6896 && 0 > compare_tree_int (index
,
6897 list_length (CONSTRUCTOR_ELTS
6898 (TREE_OPERAND (exp
, 0)))))
6902 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6903 i
= TREE_INT_CST_LOW (index
);
6904 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6908 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6912 else if (optimize
>= 1
6913 && modifier
!= EXPAND_CONST_ADDRESS
6914 && modifier
!= EXPAND_INITIALIZER
6915 && modifier
!= EXPAND_MEMORY
6916 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6917 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6918 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
6919 && targetm
.binds_local_p (array
))
6921 if (TREE_CODE (index
) == INTEGER_CST
)
6923 tree init
= DECL_INITIAL (array
);
6925 if (TREE_CODE (init
) == CONSTRUCTOR
)
6929 for (elem
= CONSTRUCTOR_ELTS (init
);
6931 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6932 elem
= TREE_CHAIN (elem
))
6935 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6936 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6939 else if (TREE_CODE (init
) == STRING_CST
6940 && 0 > compare_tree_int (index
,
6941 TREE_STRING_LENGTH (init
)))
6943 tree type
= TREE_TYPE (TREE_TYPE (init
));
6944 enum machine_mode mode
= TYPE_MODE (type
);
6946 if (GET_MODE_CLASS (mode
) == MODE_INT
6947 && GET_MODE_SIZE (mode
) == 1)
6948 return gen_int_mode (TREE_STRING_POINTER (init
)
6949 [TREE_INT_CST_LOW (index
)], mode
);
6954 goto normal_inner_ref
;
6957 /* If the operand is a CONSTRUCTOR, we can just extract the
6958 appropriate field if it is present. */
6959 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
6963 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6964 elt
= TREE_CHAIN (elt
))
6965 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6966 /* We can normally use the value of the field in the
6967 CONSTRUCTOR. However, if this is a bitfield in
6968 an integral mode that we can fit in a HOST_WIDE_INT,
6969 we must mask only the number of bits in the bitfield,
6970 since this is done implicitly by the constructor. If
6971 the bitfield does not meet either of those conditions,
6972 we can't do this optimization. */
6973 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6974 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6976 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6977 <= HOST_BITS_PER_WIDE_INT
))))
6979 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6980 && modifier
== EXPAND_STACK_PARM
)
6982 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6983 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6985 HOST_WIDE_INT bitsize
6986 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6987 enum machine_mode imode
6988 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6990 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6992 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6993 op0
= expand_and (imode
, op0
, op1
, target
);
6998 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7001 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7003 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7011 goto normal_inner_ref
;
7014 case ARRAY_RANGE_REF
:
7017 enum machine_mode mode1
;
7018 HOST_WIDE_INT bitsize
, bitpos
;
7021 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7022 &mode1
, &unsignedp
, &volatilep
);
7025 /* If we got back the original object, something is wrong. Perhaps
7026 we are evaluating an expression too early. In any event, don't
7027 infinitely recurse. */
7031 /* If TEM's type is a union of variable size, pass TARGET to the inner
7032 computation, since it will need a temporary and TARGET is known
7033 to have to do. This occurs in unchecked conversion in Ada. */
7037 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7038 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7040 && modifier
!= EXPAND_STACK_PARM
7041 ? target
: NULL_RTX
),
7043 (modifier
== EXPAND_INITIALIZER
7044 || modifier
== EXPAND_CONST_ADDRESS
7045 || modifier
== EXPAND_STACK_PARM
)
7046 ? modifier
: EXPAND_NORMAL
);
7048 /* If this is a constant, put it into a register if it is a
7049 legitimate constant and OFFSET is 0 and memory if it isn't. */
7050 if (CONSTANT_P (op0
))
7052 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7053 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7055 op0
= force_reg (mode
, op0
);
7057 op0
= validize_mem (force_const_mem (mode
, op0
));
7060 /* Otherwise, if this object not in memory and we either have an
7061 offset or a BLKmode result, put it there. This case can't occur in
7062 C, but can in Ada if we have unchecked conversion of an expression
7063 from a scalar type to an array or record type or for an
7064 ARRAY_RANGE_REF whose type is BLKmode. */
7065 else if (GET_CODE (op0
) != MEM
7067 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7069 /* If the operand is a SAVE_EXPR, we can deal with this by
7070 forcing the SAVE_EXPR into memory. */
7071 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
7073 put_var_into_stack (TREE_OPERAND (exp
, 0),
7075 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
7080 = build_qualified_type (TREE_TYPE (tem
),
7081 (TYPE_QUALS (TREE_TYPE (tem
))
7082 | TYPE_QUAL_CONST
));
7083 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7085 emit_move_insn (memloc
, op0
);
7092 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7095 if (GET_CODE (op0
) != MEM
)
7098 #ifdef POINTERS_EXTEND_UNSIGNED
7099 if (GET_MODE (offset_rtx
) != Pmode
)
7100 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7102 if (GET_MODE (offset_rtx
) != ptr_mode
)
7103 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7106 if (GET_MODE (op0
) == BLKmode
7107 /* A constant address in OP0 can have VOIDmode, we must
7108 not try to call force_reg in that case. */
7109 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7111 && (bitpos
% bitsize
) == 0
7112 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7113 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7115 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7119 op0
= offset_address (op0
, offset_rtx
,
7120 highest_pow2_factor (offset
));
7123 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7124 record its alignment as BIGGEST_ALIGNMENT. */
7125 if (GET_CODE (op0
) == MEM
&& bitpos
== 0 && offset
!= 0
7126 && is_aligning_offset (offset
, tem
))
7127 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7129 /* Don't forget about volatility even if this is a bitfield. */
7130 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
7132 if (op0
== orig_op0
)
7133 op0
= copy_rtx (op0
);
7135 MEM_VOLATILE_P (op0
) = 1;
7138 /* The following code doesn't handle CONCAT.
7139 Assume only bitpos == 0 can be used for CONCAT, due to
7140 one element arrays having the same mode as its element. */
7141 if (GET_CODE (op0
) == CONCAT
)
7143 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7148 /* In cases where an aligned union has an unaligned object
7149 as a field, we might be extracting a BLKmode value from
7150 an integer-mode (e.g., SImode) object. Handle this case
7151 by doing the extract into an object as wide as the field
7152 (which we know to be the width of a basic mode), then
7153 storing into memory, and changing the mode to BLKmode. */
7154 if (mode1
== VOIDmode
7155 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7156 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7157 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7158 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7159 && modifier
!= EXPAND_CONST_ADDRESS
7160 && modifier
!= EXPAND_INITIALIZER
)
7161 /* If the field isn't aligned enough to fetch as a memref,
7162 fetch it as a bit field. */
7163 || (mode1
!= BLKmode
7164 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7165 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7166 || (GET_CODE (op0
) == MEM
7167 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7168 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7169 && ((modifier
== EXPAND_CONST_ADDRESS
7170 || modifier
== EXPAND_INITIALIZER
)
7172 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7173 || (bitpos
% BITS_PER_UNIT
!= 0)))
7174 /* If the type and the field are a constant size and the
7175 size of the type isn't the same size as the bitfield,
7176 we must use bitfield operations. */
7178 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7180 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7183 enum machine_mode ext_mode
= mode
;
7185 if (ext_mode
== BLKmode
7186 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7187 && GET_CODE (target
) == MEM
7188 && bitpos
% BITS_PER_UNIT
== 0))
7189 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7191 if (ext_mode
== BLKmode
)
7194 target
= assign_temp (type
, 0, 1, 1);
7199 /* In this case, BITPOS must start at a byte boundary and
7200 TARGET, if specified, must be a MEM. */
7201 if (GET_CODE (op0
) != MEM
7202 || (target
!= 0 && GET_CODE (target
) != MEM
)
7203 || bitpos
% BITS_PER_UNIT
!= 0)
7206 emit_block_move (target
,
7207 adjust_address (op0
, VOIDmode
,
7208 bitpos
/ BITS_PER_UNIT
),
7209 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7211 (modifier
== EXPAND_STACK_PARM
7212 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7217 op0
= validize_mem (op0
);
7219 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7220 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7222 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7223 (modifier
== EXPAND_STACK_PARM
7224 ? NULL_RTX
: target
),
7226 int_size_in_bytes (TREE_TYPE (tem
)));
7228 /* If the result is a record type and BITSIZE is narrower than
7229 the mode of OP0, an integral mode, and this is a big endian
7230 machine, we must put the field into the high-order bits. */
7231 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7232 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7233 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7234 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7235 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7239 if (mode
== BLKmode
)
7241 rtx
new = assign_temp (build_qualified_type
7242 ((*lang_hooks
.types
.type_for_mode
)
7244 TYPE_QUAL_CONST
), 0, 1, 1);
7246 emit_move_insn (new, op0
);
7247 op0
= copy_rtx (new);
7248 PUT_MODE (op0
, BLKmode
);
7249 set_mem_attributes (op0
, exp
, 1);
7255 /* If the result is BLKmode, use that to access the object
7257 if (mode
== BLKmode
)
7260 /* Get a reference to just this component. */
7261 if (modifier
== EXPAND_CONST_ADDRESS
7262 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7263 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7265 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7267 if (op0
== orig_op0
)
7268 op0
= copy_rtx (op0
);
7270 set_mem_attributes (op0
, exp
, 0);
7271 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7272 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7274 MEM_VOLATILE_P (op0
) |= volatilep
;
7275 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7276 || modifier
== EXPAND_CONST_ADDRESS
7277 || modifier
== EXPAND_INITIALIZER
)
7279 else if (target
== 0)
7280 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7282 convert_move (target
, op0
, unsignedp
);
7288 rtx insn
, before
= get_last_insn (), vtbl_ref
;
7290 /* Evaluate the interior expression. */
7291 subtarget
= expand_expr (TREE_OPERAND (exp
, 0), target
,
7294 /* Get or create an instruction off which to hang a note. */
7295 if (REG_P (subtarget
))
7298 insn
= get_last_insn ();
7301 if (! INSN_P (insn
))
7302 insn
= prev_nonnote_insn (insn
);
7306 target
= gen_reg_rtx (GET_MODE (subtarget
));
7307 insn
= emit_move_insn (target
, subtarget
);
7310 /* Collect the data for the note. */
7311 vtbl_ref
= XEXP (DECL_RTL (TREE_OPERAND (exp
, 1)), 0);
7312 vtbl_ref
= plus_constant (vtbl_ref
,
7313 tree_low_cst (TREE_OPERAND (exp
, 2), 0));
7314 /* Discard the initial CONST that was added. */
7315 vtbl_ref
= XEXP (vtbl_ref
, 0);
7318 = gen_rtx_EXPR_LIST (REG_VTABLE_REF
, vtbl_ref
, REG_NOTES (insn
));
7323 /* Intended for a reference to a buffer of a file-object in Pascal.
7324 But it's not certain that a special tree code will really be
7325 necessary for these. INDIRECT_REF might work for them. */
7331 /* Pascal set IN expression.
7334 rlo = set_low - (set_low%bits_per_word);
7335 the_word = set [ (index - rlo)/bits_per_word ];
7336 bit_index = index % bits_per_word;
7337 bitmask = 1 << bit_index;
7338 return !!(the_word & bitmask); */
7340 tree set
= TREE_OPERAND (exp
, 0);
7341 tree index
= TREE_OPERAND (exp
, 1);
7342 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7343 tree set_type
= TREE_TYPE (set
);
7344 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7345 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7346 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7347 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7348 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7349 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7350 rtx setaddr
= XEXP (setval
, 0);
7351 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7353 rtx diff
, quo
, rem
, addr
, bit
, result
;
7355 /* If domain is empty, answer is no. Likewise if index is constant
7356 and out of bounds. */
7357 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7358 && TREE_CODE (set_low_bound
) == INTEGER_CST
7359 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7360 || (TREE_CODE (index
) == INTEGER_CST
7361 && TREE_CODE (set_low_bound
) == INTEGER_CST
7362 && tree_int_cst_lt (index
, set_low_bound
))
7363 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7364 && TREE_CODE (index
) == INTEGER_CST
7365 && tree_int_cst_lt (set_high_bound
, index
))))
7369 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7371 /* If we get here, we have to generate the code for both cases
7372 (in range and out of range). */
7374 op0
= gen_label_rtx ();
7375 op1
= gen_label_rtx ();
7377 if (! (GET_CODE (index_val
) == CONST_INT
7378 && GET_CODE (lo_r
) == CONST_INT
))
7379 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7380 GET_MODE (index_val
), iunsignedp
, op1
);
7382 if (! (GET_CODE (index_val
) == CONST_INT
7383 && GET_CODE (hi_r
) == CONST_INT
))
7384 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7385 GET_MODE (index_val
), iunsignedp
, op1
);
7387 /* Calculate the element number of bit zero in the first word
7389 if (GET_CODE (lo_r
) == CONST_INT
)
7390 rlow
= GEN_INT (INTVAL (lo_r
)
7391 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7393 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7394 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7395 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7397 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7398 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7400 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7401 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7402 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7403 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7405 addr
= memory_address (byte_mode
,
7406 expand_binop (index_mode
, add_optab
, diff
,
7407 setaddr
, NULL_RTX
, iunsignedp
,
7410 /* Extract the bit we want to examine. */
7411 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7412 gen_rtx_MEM (byte_mode
, addr
),
7413 make_tree (TREE_TYPE (index
), rem
),
7415 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7416 GET_MODE (target
) == byte_mode
? target
: 0,
7417 1, OPTAB_LIB_WIDEN
);
7419 if (result
!= target
)
7420 convert_move (target
, result
, 1);
7422 /* Output the code to handle the out-of-range case. */
7425 emit_move_insn (target
, const0_rtx
);
7430 case WITH_CLEANUP_EXPR
:
7431 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7433 WITH_CLEANUP_EXPR_RTL (exp
)
7434 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7435 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7436 CLEANUP_EH_ONLY (exp
));
7438 /* That's it for this cleanup. */
7439 TREE_OPERAND (exp
, 1) = 0;
7441 return WITH_CLEANUP_EXPR_RTL (exp
);
7443 case CLEANUP_POINT_EXPR
:
7445 /* Start a new binding layer that will keep track of all cleanup
7446 actions to be performed. */
7447 expand_start_bindings (2);
7449 target_temp_slot_level
= temp_slot_level
;
7451 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7452 /* If we're going to use this value, load it up now. */
7454 op0
= force_not_mem (op0
);
7455 preserve_temp_slots (op0
);
7456 expand_end_bindings (NULL_TREE
, 0, 0);
7461 /* Check for a built-in function. */
7462 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7463 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7465 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7467 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7468 == BUILT_IN_FRONTEND
)
7469 return (*lang_hooks
.expand_expr
) (exp
, original_target
,
7473 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7476 return expand_call (exp
, target
, ignore
);
7478 case NON_LVALUE_EXPR
:
7481 case REFERENCE_EXPR
:
7482 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7485 if (TREE_CODE (type
) == UNION_TYPE
)
7487 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7489 /* If both input and output are BLKmode, this conversion isn't doing
7490 anything except possibly changing memory attribute. */
7491 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7493 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7496 result
= copy_rtx (result
);
7497 set_mem_attributes (result
, exp
, 0);
7503 if (TYPE_MODE (type
) != BLKmode
)
7504 target
= gen_reg_rtx (TYPE_MODE (type
));
7506 target
= assign_temp (type
, 0, 1, 1);
7509 if (GET_CODE (target
) == MEM
)
7510 /* Store data into beginning of memory target. */
7511 store_expr (TREE_OPERAND (exp
, 0),
7512 adjust_address (target
, TYPE_MODE (valtype
), 0),
7513 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7515 else if (GET_CODE (target
) == REG
)
7516 /* Store this field into a union of the proper type. */
7517 store_field (target
,
7518 MIN ((int_size_in_bytes (TREE_TYPE
7519 (TREE_OPERAND (exp
, 0)))
7521 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7522 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7523 VOIDmode
, 0, type
, 0);
7527 /* Return the entire union. */
7531 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7533 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7536 /* If the signedness of the conversion differs and OP0 is
7537 a promoted SUBREG, clear that indication since we now
7538 have to do the proper extension. */
7539 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7540 && GET_CODE (op0
) == SUBREG
)
7541 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7546 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7547 if (GET_MODE (op0
) == mode
)
7550 /* If OP0 is a constant, just convert it into the proper mode. */
7551 if (CONSTANT_P (op0
))
7553 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7554 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7556 if (modifier
== EXPAND_INITIALIZER
)
7557 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7558 subreg_lowpart_offset (mode
,
7561 return convert_modes (mode
, inner_mode
, op0
,
7562 TREE_UNSIGNED (inner_type
));
7565 if (modifier
== EXPAND_INITIALIZER
)
7566 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7570 convert_to_mode (mode
, op0
,
7571 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7573 convert_move (target
, op0
,
7574 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7577 case VIEW_CONVERT_EXPR
:
7578 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7580 /* If the input and output modes are both the same, we are done.
7581 Otherwise, if neither mode is BLKmode and both are integral and within
7582 a word, we can use gen_lowpart. If neither is true, make sure the
7583 operand is in memory and convert the MEM to the new mode. */
7584 if (TYPE_MODE (type
) == GET_MODE (op0
))
7586 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7587 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7588 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7589 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7590 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7591 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7592 else if (GET_CODE (op0
) != MEM
)
7594 /* If the operand is not a MEM, force it into memory. Since we
7595 are going to be be changing the mode of the MEM, don't call
7596 force_const_mem for constants because we don't allow pool
7597 constants to change mode. */
7598 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7600 if (TREE_ADDRESSABLE (exp
))
7603 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7605 = assign_stack_temp_for_type
7606 (TYPE_MODE (inner_type
),
7607 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7609 emit_move_insn (target
, op0
);
7613 /* At this point, OP0 is in the correct mode. If the output type is such
7614 that the operand is known to be aligned, indicate that it is.
7615 Otherwise, we need only be concerned about alignment for non-BLKmode
7617 if (GET_CODE (op0
) == MEM
)
7619 op0
= copy_rtx (op0
);
7621 if (TYPE_ALIGN_OK (type
))
7622 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7623 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7624 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7626 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7627 HOST_WIDE_INT temp_size
7628 = MAX (int_size_in_bytes (inner_type
),
7629 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7630 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7631 temp_size
, 0, type
);
7632 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7634 if (TREE_ADDRESSABLE (exp
))
7637 if (GET_MODE (op0
) == BLKmode
)
7638 emit_block_move (new_with_op0_mode
, op0
,
7639 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7640 (modifier
== EXPAND_STACK_PARM
7641 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7643 emit_move_insn (new_with_op0_mode
, op0
);
7648 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7654 this_optab
= ! unsignedp
&& flag_trapv
7655 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7656 ? addv_optab
: add_optab
;
7658 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7659 something else, make sure we add the register to the constant and
7660 then to the other thing. This case can occur during strength
7661 reduction and doing it this way will produce better code if the
7662 frame pointer or argument pointer is eliminated.
7664 fold-const.c will ensure that the constant is always in the inner
7665 PLUS_EXPR, so the only case we need to do anything about is if
7666 sp, ap, or fp is our second argument, in which case we must swap
7667 the innermost first argument and our second argument. */
7669 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7670 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7671 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7672 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7673 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7674 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7676 tree t
= TREE_OPERAND (exp
, 1);
7678 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7679 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7682 /* If the result is to be ptr_mode and we are adding an integer to
7683 something, we might be forming a constant. So try to use
7684 plus_constant. If it produces a sum and we can't accept it,
7685 use force_operand. This allows P = &ARR[const] to generate
7686 efficient code on machines where a SYMBOL_REF is not a valid
7689 If this is an EXPAND_SUM call, always return the sum. */
7690 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7691 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7693 if (modifier
== EXPAND_STACK_PARM
)
7695 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7696 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7697 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7701 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7703 /* Use immed_double_const to ensure that the constant is
7704 truncated according to the mode of OP1, then sign extended
7705 to a HOST_WIDE_INT. Using the constant directly can result
7706 in non-canonical RTL in a 64x32 cross compile. */
7708 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7710 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7711 op1
= plus_constant (op1
, INTVAL (constant_part
));
7712 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7713 op1
= force_operand (op1
, target
);
7717 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7718 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7719 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7723 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7724 (modifier
== EXPAND_INITIALIZER
7725 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7726 if (! CONSTANT_P (op0
))
7728 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7729 VOIDmode
, modifier
);
7730 /* Return a PLUS if modifier says it's OK. */
7731 if (modifier
== EXPAND_SUM
7732 || modifier
== EXPAND_INITIALIZER
)
7733 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7736 /* Use immed_double_const to ensure that the constant is
7737 truncated according to the mode of OP1, then sign extended
7738 to a HOST_WIDE_INT. Using the constant directly can result
7739 in non-canonical RTL in a 64x32 cross compile. */
7741 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7743 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7744 op0
= plus_constant (op0
, INTVAL (constant_part
));
7745 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7746 op0
= force_operand (op0
, target
);
7751 /* No sense saving up arithmetic to be done
7752 if it's all in the wrong mode to form part of an address.
7753 And force_operand won't know whether to sign-extend or
7755 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7756 || mode
!= ptr_mode
)
7758 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7759 subtarget
, &op0
, &op1
, 0);
7760 if (op0
== const0_rtx
)
7762 if (op1
== const0_rtx
)
7767 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7768 subtarget
, &op0
, &op1
, modifier
);
7769 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7772 /* For initializers, we are allowed to return a MINUS of two
7773 symbolic constants. Here we handle all cases when both operands
7775 /* Handle difference of two symbolic constants,
7776 for the sake of an initializer. */
7777 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7778 && really_constant_p (TREE_OPERAND (exp
, 0))
7779 && really_constant_p (TREE_OPERAND (exp
, 1)))
7781 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7782 NULL_RTX
, &op0
, &op1
, modifier
);
7784 /* If the last operand is a CONST_INT, use plus_constant of
7785 the negated constant. Else make the MINUS. */
7786 if (GET_CODE (op1
) == CONST_INT
)
7787 return plus_constant (op0
, - INTVAL (op1
));
7789 return gen_rtx_MINUS (mode
, op0
, op1
);
7792 this_optab
= ! unsignedp
&& flag_trapv
7793 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7794 ? subv_optab
: sub_optab
;
7796 /* No sense saving up arithmetic to be done
7797 if it's all in the wrong mode to form part of an address.
7798 And force_operand won't know whether to sign-extend or
7800 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7801 || mode
!= ptr_mode
)
7804 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7805 subtarget
, &op0
, &op1
, modifier
);
7807 /* Convert A - const to A + (-const). */
7808 if (GET_CODE (op1
) == CONST_INT
)
7810 op1
= negate_rtx (mode
, op1
);
7811 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7817 /* If first operand is constant, swap them.
7818 Thus the following special case checks need only
7819 check the second operand. */
7820 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7822 tree t1
= TREE_OPERAND (exp
, 0);
7823 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7824 TREE_OPERAND (exp
, 1) = t1
;
7827 /* Attempt to return something suitable for generating an
7828 indexed address, for machines that support that. */
7830 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7831 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7833 tree exp1
= TREE_OPERAND (exp
, 1);
7835 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7838 if (GET_CODE (op0
) != REG
)
7839 op0
= force_operand (op0
, NULL_RTX
);
7840 if (GET_CODE (op0
) != REG
)
7841 op0
= copy_to_mode_reg (mode
, op0
);
7843 return gen_rtx_MULT (mode
, op0
,
7844 gen_int_mode (tree_low_cst (exp1
, 0),
7845 TYPE_MODE (TREE_TYPE (exp1
))));
7848 if (modifier
== EXPAND_STACK_PARM
)
7851 /* Check for multiplying things that have been extended
7852 from a narrower type. If this machine supports multiplying
7853 in that narrower type with a result in the desired type,
7854 do it that way, and avoid the explicit type-conversion. */
7855 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7856 && TREE_CODE (type
) == INTEGER_TYPE
7857 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7858 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7859 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7860 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7861 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7862 /* Don't use a widening multiply if a shift will do. */
7863 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7864 > HOST_BITS_PER_WIDE_INT
)
7865 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7867 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7868 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7870 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7871 /* If both operands are extended, they must either both
7872 be zero-extended or both be sign-extended. */
7873 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7875 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7877 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7878 enum machine_mode innermode
= TYPE_MODE (op0type
);
7879 bool zextend_p
= TREE_UNSIGNED (op0type
);
7880 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7881 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7883 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7885 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7887 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7888 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7889 TREE_OPERAND (exp
, 1),
7890 NULL_RTX
, &op0
, &op1
, 0);
7892 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7893 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7894 NULL_RTX
, &op0
, &op1
, 0);
7897 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7898 && innermode
== word_mode
)
7901 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7902 NULL_RTX
, VOIDmode
, 0);
7903 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7904 op1
= convert_modes (innermode
, mode
,
7905 expand_expr (TREE_OPERAND (exp
, 1),
7906 NULL_RTX
, VOIDmode
, 0),
7909 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7910 NULL_RTX
, VOIDmode
, 0);
7911 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7912 unsignedp
, OPTAB_LIB_WIDEN
);
7913 hipart
= gen_highpart (innermode
, temp
);
7914 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
7918 emit_move_insn (hipart
, htem
);
7923 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7924 subtarget
, &op0
, &op1
, 0);
7925 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7927 case TRUNC_DIV_EXPR
:
7928 case FLOOR_DIV_EXPR
:
7930 case ROUND_DIV_EXPR
:
7931 case EXACT_DIV_EXPR
:
7932 if (modifier
== EXPAND_STACK_PARM
)
7934 /* Possible optimization: compute the dividend with EXPAND_SUM
7935 then if the divisor is constant can optimize the case
7936 where some terms of the dividend have coeffs divisible by it. */
7937 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7938 subtarget
, &op0
, &op1
, 0);
7939 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7942 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7943 expensive divide. If not, combine will rebuild the original
7945 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7946 && TREE_CODE (type
) == REAL_TYPE
7947 && !real_onep (TREE_OPERAND (exp
, 0)))
7948 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7949 build (RDIV_EXPR
, type
,
7950 build_real (type
, dconst1
),
7951 TREE_OPERAND (exp
, 1))),
7952 target
, tmode
, modifier
);
7953 this_optab
= sdiv_optab
;
7956 case TRUNC_MOD_EXPR
:
7957 case FLOOR_MOD_EXPR
:
7959 case ROUND_MOD_EXPR
:
7960 if (modifier
== EXPAND_STACK_PARM
)
7962 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7963 subtarget
, &op0
, &op1
, 0);
7964 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7966 case FIX_ROUND_EXPR
:
7967 case FIX_FLOOR_EXPR
:
7969 abort (); /* Not used for C. */
7971 case FIX_TRUNC_EXPR
:
7972 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7973 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7974 target
= gen_reg_rtx (mode
);
7975 expand_fix (target
, op0
, unsignedp
);
7979 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7980 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7981 target
= gen_reg_rtx (mode
);
7982 /* expand_float can't figure out what to do if FROM has VOIDmode.
7983 So give it the correct mode. With -O, cse will optimize this. */
7984 if (GET_MODE (op0
) == VOIDmode
)
7985 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7987 expand_float (target
, op0
,
7988 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7992 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7993 if (modifier
== EXPAND_STACK_PARM
)
7995 temp
= expand_unop (mode
,
7996 ! unsignedp
&& flag_trapv
7997 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7998 ? negv_optab
: neg_optab
, op0
, target
, 0);
8004 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8005 if (modifier
== EXPAND_STACK_PARM
)
8008 /* ABS_EXPR is not valid for complex arguments. */
8009 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8010 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8013 /* Unsigned abs is simply the operand. Testing here means we don't
8014 risk generating incorrect code below. */
8015 if (TREE_UNSIGNED (type
))
8018 return expand_abs (mode
, op0
, target
, unsignedp
,
8019 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8023 target
= original_target
;
8025 || modifier
== EXPAND_STACK_PARM
8026 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
8027 || GET_MODE (target
) != mode
8028 || (GET_CODE (target
) == REG
8029 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8030 target
= gen_reg_rtx (mode
);
8031 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8032 target
, &op0
, &op1
, 0);
8034 /* First try to do it with a special MIN or MAX instruction.
8035 If that does not win, use a conditional jump to select the proper
8037 this_optab
= (unsignedp
8038 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8039 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8041 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8046 /* At this point, a MEM target is no longer useful; we will get better
8049 if (GET_CODE (target
) == MEM
)
8050 target
= gen_reg_rtx (mode
);
8052 /* If op1 was placed in target, swap op0 and op1. */
8053 if (target
!= op0
&& target
== op1
)
8061 emit_move_insn (target
, op0
);
8063 op0
= gen_label_rtx ();
8065 /* If this mode is an integer too wide to compare properly,
8066 compare word by word. Rely on cse to optimize constant cases. */
8067 if (GET_MODE_CLASS (mode
) == MODE_INT
8068 && ! can_compare_p (GE
, mode
, ccp_jump
))
8070 if (code
== MAX_EXPR
)
8071 do_jump_by_parts_greater_rtx (mode
, unsignedp
, target
, op1
,
8074 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op1
, target
,
8079 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8080 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, op0
);
8082 emit_move_insn (target
, op1
);
8087 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8088 if (modifier
== EXPAND_STACK_PARM
)
8090 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8095 /* ??? Can optimize bitwise operations with one arg constant.
8096 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8097 and (a bitwise1 b) bitwise2 b (etc)
8098 but that is probably not worth while. */
8100 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8101 boolean values when we want in all cases to compute both of them. In
8102 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8103 as actual zero-or-1 values and then bitwise anding. In cases where
8104 there cannot be any side effects, better code would be made by
8105 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8106 how to recognize those cases. */
8108 case TRUTH_AND_EXPR
:
8110 this_optab
= and_optab
;
8115 this_optab
= ior_optab
;
8118 case TRUTH_XOR_EXPR
:
8120 this_optab
= xor_optab
;
8127 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8129 if (modifier
== EXPAND_STACK_PARM
)
8131 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8132 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8135 /* Could determine the answer when only additive constants differ. Also,
8136 the addition of one can be handled by changing the condition. */
8143 case UNORDERED_EXPR
:
8150 temp
= do_store_flag (exp
,
8151 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8152 tmode
!= VOIDmode
? tmode
: mode
, 0);
8156 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8157 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8159 && GET_CODE (original_target
) == REG
8160 && (GET_MODE (original_target
)
8161 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8163 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8166 /* If temp is constant, we can just compute the result. */
8167 if (GET_CODE (temp
) == CONST_INT
)
8169 if (INTVAL (temp
) != 0)
8170 emit_move_insn (target
, const1_rtx
);
8172 emit_move_insn (target
, const0_rtx
);
8177 if (temp
!= original_target
)
8179 enum machine_mode mode1
= GET_MODE (temp
);
8180 if (mode1
== VOIDmode
)
8181 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8183 temp
= copy_to_mode_reg (mode1
, temp
);
8186 op1
= gen_label_rtx ();
8187 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8188 GET_MODE (temp
), unsignedp
, op1
);
8189 emit_move_insn (temp
, const1_rtx
);
8194 /* If no set-flag instruction, must generate a conditional
8195 store into a temporary variable. Drop through
8196 and handle this like && and ||. */
8198 case TRUTH_ANDIF_EXPR
:
8199 case TRUTH_ORIF_EXPR
:
8202 || modifier
== EXPAND_STACK_PARM
8203 || ! safe_from_p (target
, exp
, 1)
8204 /* Make sure we don't have a hard reg (such as function's return
8205 value) live across basic blocks, if not optimizing. */
8206 || (!optimize
&& GET_CODE (target
) == REG
8207 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8208 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8211 emit_clr_insn (target
);
8213 op1
= gen_label_rtx ();
8214 jumpifnot (exp
, op1
);
8217 emit_0_to_1_insn (target
);
8220 return ignore
? const0_rtx
: target
;
8222 case TRUTH_NOT_EXPR
:
8223 if (modifier
== EXPAND_STACK_PARM
)
8225 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8226 /* The parser is careful to generate TRUTH_NOT_EXPR
8227 only with operands that are always zero or one. */
8228 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8229 target
, 1, OPTAB_LIB_WIDEN
);
8235 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8237 return expand_expr_real (TREE_OPERAND (exp
, 1),
8238 (ignore
? const0_rtx
: target
),
8239 VOIDmode
, modifier
, alt_rtl
);
8242 /* If we would have a "singleton" (see below) were it not for a
8243 conversion in each arm, bring that conversion back out. */
8244 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8245 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8246 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8247 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8249 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8250 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8252 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8253 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8254 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8255 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8256 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8257 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8258 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8259 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8260 return expand_expr (build1 (NOP_EXPR
, type
,
8261 build (COND_EXPR
, TREE_TYPE (iftrue
),
8262 TREE_OPERAND (exp
, 0),
8264 target
, tmode
, modifier
);
8268 /* Note that COND_EXPRs whose type is a structure or union
8269 are required to be constructed to contain assignments of
8270 a temporary variable, so that we can evaluate them here
8271 for side effect only. If type is void, we must do likewise. */
8273 /* If an arm of the branch requires a cleanup,
8274 only that cleanup is performed. */
8277 tree binary_op
= 0, unary_op
= 0;
8279 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8280 convert it to our mode, if necessary. */
8281 if (integer_onep (TREE_OPERAND (exp
, 1))
8282 && integer_zerop (TREE_OPERAND (exp
, 2))
8283 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8287 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8292 if (modifier
== EXPAND_STACK_PARM
)
8294 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8295 if (GET_MODE (op0
) == mode
)
8299 target
= gen_reg_rtx (mode
);
8300 convert_move (target
, op0
, unsignedp
);
8304 /* Check for X ? A + B : A. If we have this, we can copy A to the
8305 output and conditionally add B. Similarly for unary operations.
8306 Don't do this if X has side-effects because those side effects
8307 might affect A or B and the "?" operation is a sequence point in
8308 ANSI. (operand_equal_p tests for side effects.) */
8310 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8311 && operand_equal_p (TREE_OPERAND (exp
, 2),
8312 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8313 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8314 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8315 && operand_equal_p (TREE_OPERAND (exp
, 1),
8316 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8317 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8318 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8319 && operand_equal_p (TREE_OPERAND (exp
, 2),
8320 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8321 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8322 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8323 && operand_equal_p (TREE_OPERAND (exp
, 1),
8324 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8325 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8327 /* If we are not to produce a result, we have no target. Otherwise,
8328 if a target was specified use it; it will not be used as an
8329 intermediate target unless it is safe. If no target, use a
8334 else if (modifier
== EXPAND_STACK_PARM
)
8335 temp
= assign_temp (type
, 0, 0, 1);
8336 else if (original_target
8337 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8338 || (singleton
&& GET_CODE (original_target
) == REG
8339 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8340 && original_target
== var_rtx (singleton
)))
8341 && GET_MODE (original_target
) == mode
8342 #ifdef HAVE_conditional_move
8343 && (! can_conditionally_move_p (mode
)
8344 || GET_CODE (original_target
) == REG
8345 || TREE_ADDRESSABLE (type
))
8347 && (GET_CODE (original_target
) != MEM
8348 || TREE_ADDRESSABLE (type
)))
8349 temp
= original_target
;
8350 else if (TREE_ADDRESSABLE (type
))
8353 temp
= assign_temp (type
, 0, 0, 1);
8355 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8356 do the test of X as a store-flag operation, do this as
8357 A + ((X != 0) << log C). Similarly for other simple binary
8358 operators. Only do for C == 1 if BRANCH_COST is low. */
8359 if (temp
&& singleton
&& binary_op
8360 && (TREE_CODE (binary_op
) == PLUS_EXPR
8361 || TREE_CODE (binary_op
) == MINUS_EXPR
8362 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8363 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8364 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8365 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8366 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8370 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8371 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8372 ? addv_optab
: add_optab
)
8373 : TREE_CODE (binary_op
) == MINUS_EXPR
8374 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8375 ? subv_optab
: sub_optab
)
8376 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8379 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8380 if (singleton
== TREE_OPERAND (exp
, 1))
8381 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8383 cond
= TREE_OPERAND (exp
, 0);
8385 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8387 mode
, BRANCH_COST
<= 1);
8389 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8390 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8391 build_int_2 (tree_log2
8395 (safe_from_p (temp
, singleton
, 1)
8396 ? temp
: NULL_RTX
), 0);
8400 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8401 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8402 unsignedp
, OPTAB_LIB_WIDEN
);
8406 do_pending_stack_adjust ();
8408 op0
= gen_label_rtx ();
8410 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8414 /* If the target conflicts with the other operand of the
8415 binary op, we can't use it. Also, we can't use the target
8416 if it is a hard register, because evaluating the condition
8417 might clobber it. */
8419 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8420 || (GET_CODE (temp
) == REG
8421 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8422 temp
= gen_reg_rtx (mode
);
8423 store_expr (singleton
, temp
,
8424 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8427 expand_expr (singleton
,
8428 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8429 if (singleton
== TREE_OPERAND (exp
, 1))
8430 jumpif (TREE_OPERAND (exp
, 0), op0
);
8432 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8434 start_cleanup_deferral ();
8435 if (binary_op
&& temp
== 0)
8436 /* Just touch the other operand. */
8437 expand_expr (TREE_OPERAND (binary_op
, 1),
8438 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8440 store_expr (build (TREE_CODE (binary_op
), type
,
8441 make_tree (type
, temp
),
8442 TREE_OPERAND (binary_op
, 1)),
8443 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8445 store_expr (build1 (TREE_CODE (unary_op
), type
,
8446 make_tree (type
, temp
)),
8447 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8450 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8451 comparison operator. If we have one of these cases, set the
8452 output to A, branch on A (cse will merge these two references),
8453 then set the output to FOO. */
8455 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8456 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8457 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8458 TREE_OPERAND (exp
, 1), 0)
8459 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8460 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8461 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8463 if (GET_CODE (temp
) == REG
8464 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8465 temp
= gen_reg_rtx (mode
);
8466 store_expr (TREE_OPERAND (exp
, 1), temp
,
8467 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8468 jumpif (TREE_OPERAND (exp
, 0), op0
);
8470 start_cleanup_deferral ();
8471 if (TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8472 store_expr (TREE_OPERAND (exp
, 2), temp
,
8473 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8475 expand_expr (TREE_OPERAND (exp
, 2),
8476 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8480 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8481 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8482 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8483 TREE_OPERAND (exp
, 2), 0)
8484 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8485 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8486 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8488 if (GET_CODE (temp
) == REG
8489 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8490 temp
= gen_reg_rtx (mode
);
8491 store_expr (TREE_OPERAND (exp
, 2), temp
,
8492 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8493 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8495 start_cleanup_deferral ();
8496 if (TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8497 store_expr (TREE_OPERAND (exp
, 1), temp
,
8498 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8500 expand_expr (TREE_OPERAND (exp
, 1),
8501 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8506 op1
= gen_label_rtx ();
8507 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8509 start_cleanup_deferral ();
8511 /* One branch of the cond can be void, if it never returns. For
8512 example A ? throw : E */
8514 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8515 store_expr (TREE_OPERAND (exp
, 1), temp
,
8516 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8518 expand_expr (TREE_OPERAND (exp
, 1),
8519 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8520 end_cleanup_deferral ();
8522 emit_jump_insn (gen_jump (op1
));
8525 start_cleanup_deferral ();
8527 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8528 store_expr (TREE_OPERAND (exp
, 2), temp
,
8529 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8531 expand_expr (TREE_OPERAND (exp
, 2),
8532 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8535 end_cleanup_deferral ();
8546 /* Something needs to be initialized, but we didn't know
8547 where that thing was when building the tree. For example,
8548 it could be the return value of a function, or a parameter
8549 to a function which lays down in the stack, or a temporary
8550 variable which must be passed by reference.
8552 We guarantee that the expression will either be constructed
8553 or copied into our original target. */
8555 tree slot
= TREE_OPERAND (exp
, 0);
8556 tree cleanups
= NULL_TREE
;
8559 if (TREE_CODE (slot
) != VAR_DECL
)
8563 target
= original_target
;
8565 /* Set this here so that if we get a target that refers to a
8566 register variable that's already been used, put_reg_into_stack
8567 knows that it should fix up those uses. */
8568 TREE_USED (slot
) = 1;
8572 if (DECL_RTL_SET_P (slot
))
8574 target
= DECL_RTL (slot
);
8575 /* If we have already expanded the slot, so don't do
8577 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8582 target
= assign_temp (type
, 2, 0, 1);
8583 /* All temp slots at this level must not conflict. */
8584 preserve_temp_slots (target
);
8585 SET_DECL_RTL (slot
, target
);
8586 if (TREE_ADDRESSABLE (slot
))
8587 put_var_into_stack (slot
, /*rescan=*/false);
8589 /* Since SLOT is not known to the called function
8590 to belong to its stack frame, we must build an explicit
8591 cleanup. This case occurs when we must build up a reference
8592 to pass the reference as an argument. In this case,
8593 it is very likely that such a reference need not be
8596 if (TREE_OPERAND (exp
, 2) == 0)
8597 TREE_OPERAND (exp
, 2)
8598 = (*lang_hooks
.maybe_build_cleanup
) (slot
);
8599 cleanups
= TREE_OPERAND (exp
, 2);
8604 /* This case does occur, when expanding a parameter which
8605 needs to be constructed on the stack. The target
8606 is the actual stack address that we want to initialize.
8607 The function we call will perform the cleanup in this case. */
8609 /* If we have already assigned it space, use that space,
8610 not target that we were passed in, as our target
8611 parameter is only a hint. */
8612 if (DECL_RTL_SET_P (slot
))
8614 target
= DECL_RTL (slot
);
8615 /* If we have already expanded the slot, so don't do
8617 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8622 SET_DECL_RTL (slot
, target
);
8623 /* If we must have an addressable slot, then make sure that
8624 the RTL that we just stored in slot is OK. */
8625 if (TREE_ADDRESSABLE (slot
))
8626 put_var_into_stack (slot
, /*rescan=*/true);
8630 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8631 /* Mark it as expanded. */
8632 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8634 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8636 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8643 tree lhs
= TREE_OPERAND (exp
, 0);
8644 tree rhs
= TREE_OPERAND (exp
, 1);
8646 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8652 /* If lhs is complex, expand calls in rhs before computing it.
8653 That's so we don't compute a pointer and save it over a
8654 call. If lhs is simple, compute it first so we can give it
8655 as a target if the rhs is just a call. This avoids an
8656 extra temp and copy and that prevents a partial-subsumption
8657 which makes bad code. Actually we could treat
8658 component_ref's of vars like vars. */
8660 tree lhs
= TREE_OPERAND (exp
, 0);
8661 tree rhs
= TREE_OPERAND (exp
, 1);
8665 /* Check for |= or &= of a bitfield of size one into another bitfield
8666 of size 1. In this case, (unless we need the result of the
8667 assignment) we can do this more efficiently with a
8668 test followed by an assignment, if necessary.
8670 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8671 things change so we do, this code should be enhanced to
8674 && TREE_CODE (lhs
) == COMPONENT_REF
8675 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8676 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8677 && TREE_OPERAND (rhs
, 0) == lhs
8678 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8679 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8680 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8682 rtx label
= gen_label_rtx ();
8684 do_jump (TREE_OPERAND (rhs
, 1),
8685 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8686 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8687 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8688 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8690 : integer_zero_node
)),
8692 do_pending_stack_adjust ();
8697 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8703 if (!TREE_OPERAND (exp
, 0))
8704 expand_null_return ();
8706 expand_return (TREE_OPERAND (exp
, 0));
8709 case PREINCREMENT_EXPR
:
8710 case PREDECREMENT_EXPR
:
8711 return expand_increment (exp
, 0, ignore
);
8713 case POSTINCREMENT_EXPR
:
8714 case POSTDECREMENT_EXPR
:
8715 /* Faster to treat as pre-increment if result is not used. */
8716 return expand_increment (exp
, ! ignore
, ignore
);
8719 if (modifier
== EXPAND_STACK_PARM
)
8721 /* Are we taking the address of a nested function? */
8722 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8723 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8724 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8725 && ! TREE_STATIC (exp
))
8727 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8728 op0
= force_operand (op0
, target
);
8730 /* If we are taking the address of something erroneous, just
8732 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8734 /* If we are taking the address of a constant and are at the
8735 top level, we have to use output_constant_def since we can't
8736 call force_const_mem at top level. */
8738 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8739 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8741 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8744 /* We make sure to pass const0_rtx down if we came in with
8745 ignore set, to avoid doing the cleanups twice for something. */
8746 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8747 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8748 (modifier
== EXPAND_INITIALIZER
8749 ? modifier
: EXPAND_CONST_ADDRESS
));
8751 /* If we are going to ignore the result, OP0 will have been set
8752 to const0_rtx, so just return it. Don't get confused and
8753 think we are taking the address of the constant. */
8757 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8758 clever and returns a REG when given a MEM. */
8759 op0
= protect_from_queue (op0
, 1);
8761 /* We would like the object in memory. If it is a constant, we can
8762 have it be statically allocated into memory. For a non-constant,
8763 we need to allocate some memory and store the value into it. */
8765 if (CONSTANT_P (op0
))
8766 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8768 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8769 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8770 || GET_CODE (op0
) == PARALLEL
|| GET_CODE (op0
) == LO_SUM
)
8772 /* If the operand is a SAVE_EXPR, we can deal with this by
8773 forcing the SAVE_EXPR into memory. */
8774 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
)
8776 put_var_into_stack (TREE_OPERAND (exp
, 0),
8778 op0
= SAVE_EXPR_RTL (TREE_OPERAND (exp
, 0));
8782 /* If this object is in a register, it can't be BLKmode. */
8783 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8784 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8786 if (GET_CODE (op0
) == PARALLEL
)
8787 /* Handle calls that pass values in multiple
8788 non-contiguous locations. The Irix 6 ABI has examples
8790 emit_group_store (memloc
, op0
, inner_type
,
8791 int_size_in_bytes (inner_type
));
8793 emit_move_insn (memloc
, op0
);
8799 if (GET_CODE (op0
) != MEM
)
8802 mark_temp_addr_taken (op0
);
8803 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8805 op0
= XEXP (op0
, 0);
8806 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8807 op0
= convert_memory_address (ptr_mode
, op0
);
8811 /* If OP0 is not aligned as least as much as the type requires, we
8812 need to make a temporary, copy OP0 to it, and take the address of
8813 the temporary. We want to use the alignment of the type, not of
8814 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8815 the test for BLKmode means that can't happen. The test for
8816 BLKmode is because we never make mis-aligned MEMs with
8819 We don't need to do this at all if the machine doesn't have
8820 strict alignment. */
8821 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8822 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8824 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8826 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8829 if (TYPE_ALIGN_OK (inner_type
))
8832 if (TREE_ADDRESSABLE (inner_type
))
8834 /* We can't make a bitwise copy of this object, so fail. */
8835 error ("cannot take the address of an unaligned member");
8839 new = assign_stack_temp_for_type
8840 (TYPE_MODE (inner_type
),
8841 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8842 : int_size_in_bytes (inner_type
),
8843 1, build_qualified_type (inner_type
,
8844 (TYPE_QUALS (inner_type
)
8845 | TYPE_QUAL_CONST
)));
8847 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
8848 (modifier
== EXPAND_STACK_PARM
8849 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8854 op0
= force_operand (XEXP (op0
, 0), target
);
8858 && GET_CODE (op0
) != REG
8859 && modifier
!= EXPAND_CONST_ADDRESS
8860 && modifier
!= EXPAND_INITIALIZER
8861 && modifier
!= EXPAND_SUM
)
8862 op0
= force_reg (Pmode
, op0
);
8864 if (GET_CODE (op0
) == REG
8865 && ! REG_USERVAR_P (op0
))
8866 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8868 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8869 op0
= convert_memory_address (ptr_mode
, op0
);
8873 case ENTRY_VALUE_EXPR
:
8876 /* COMPLEX type for Extended Pascal & Fortran */
8879 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8882 /* Get the rtx code of the operands. */
8883 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8884 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8887 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8891 /* Move the real (op0) and imaginary (op1) parts to their location. */
8892 emit_move_insn (gen_realpart (mode
, target
), op0
);
8893 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8895 insns
= get_insns ();
8898 /* Complex construction should appear as a single unit. */
8899 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8900 each with a separate pseudo as destination.
8901 It's not correct for flow to treat them as a unit. */
8902 if (GET_CODE (target
) != CONCAT
)
8903 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8911 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8912 return gen_realpart (mode
, op0
);
8915 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8916 return gen_imagpart (mode
, op0
);
8920 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8924 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8927 target
= gen_reg_rtx (mode
);
8931 /* Store the realpart and the negated imagpart to target. */
8932 emit_move_insn (gen_realpart (partmode
, target
),
8933 gen_realpart (partmode
, op0
));
8935 imag_t
= gen_imagpart (partmode
, target
);
8936 temp
= expand_unop (partmode
,
8937 ! unsignedp
&& flag_trapv
8938 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8939 ? negv_optab
: neg_optab
,
8940 gen_imagpart (partmode
, op0
), imag_t
, 0);
8942 emit_move_insn (imag_t
, temp
);
8944 insns
= get_insns ();
8947 /* Conjugate should appear as a single unit
8948 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8949 each with a separate pseudo as destination.
8950 It's not correct for flow to treat them as a unit. */
8951 if (GET_CODE (target
) != CONCAT
)
8952 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8959 case TRY_CATCH_EXPR
:
8961 tree handler
= TREE_OPERAND (exp
, 1);
8963 expand_eh_region_start ();
8965 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8967 expand_eh_region_end_cleanup (handler
);
8972 case TRY_FINALLY_EXPR
:
8974 tree try_block
= TREE_OPERAND (exp
, 0);
8975 tree finally_block
= TREE_OPERAND (exp
, 1);
8977 if (!optimize
|| unsafe_for_reeval (finally_block
) > 1)
8979 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8980 is not sufficient, so we cannot expand the block twice.
8981 So we play games with GOTO_SUBROUTINE_EXPR to let us
8982 expand the thing only once. */
8983 /* When not optimizing, we go ahead with this form since
8984 (1) user breakpoints operate more predictably without
8985 code duplication, and
8986 (2) we're not running any of the global optimizers
8987 that would explode in time/space with the highly
8988 connected CFG created by the indirect branching. */
8990 rtx finally_label
= gen_label_rtx ();
8991 rtx done_label
= gen_label_rtx ();
8992 rtx return_link
= gen_reg_rtx (Pmode
);
8993 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8994 (tree
) finally_label
, (tree
) return_link
);
8995 TREE_SIDE_EFFECTS (cleanup
) = 1;
8997 /* Start a new binding layer that will keep track of all cleanup
8998 actions to be performed. */
8999 expand_start_bindings (2);
9000 target_temp_slot_level
= temp_slot_level
;
9002 expand_decl_cleanup (NULL_TREE
, cleanup
);
9003 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9005 preserve_temp_slots (op0
);
9006 expand_end_bindings (NULL_TREE
, 0, 0);
9007 emit_jump (done_label
);
9008 emit_label (finally_label
);
9009 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9010 emit_indirect_jump (return_link
);
9011 emit_label (done_label
);
9015 expand_start_bindings (2);
9016 target_temp_slot_level
= temp_slot_level
;
9018 expand_decl_cleanup (NULL_TREE
, finally_block
);
9019 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9021 preserve_temp_slots (op0
);
9022 expand_end_bindings (NULL_TREE
, 0, 0);
9028 case GOTO_SUBROUTINE_EXPR
:
9030 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9031 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9032 rtx return_address
= gen_label_rtx ();
9033 emit_move_insn (return_link
,
9034 gen_rtx_LABEL_REF (Pmode
, return_address
));
9036 emit_label (return_address
);
9041 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9044 return get_exception_pointer (cfun
);
9047 /* Function descriptors are not valid except for as
9048 initialization constants, and should not be expanded. */
9052 return (*lang_hooks
.expand_expr
) (exp
, original_target
, tmode
, modifier
,
9056 /* Here to do an ordinary binary operator, generating an instruction
9057 from the optab already placed in `this_optab'. */
9059 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9060 subtarget
, &op0
, &op1
, 0);
9062 if (modifier
== EXPAND_STACK_PARM
)
9064 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9065 unsignedp
, OPTAB_LIB_WIDEN
);
9071 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9072 when applied to the address of EXP produces an address known to be
9073 aligned more than BIGGEST_ALIGNMENT. */
9076 is_aligning_offset (tree offset
, tree exp
)
9078 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9079 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9080 || TREE_CODE (offset
) == NOP_EXPR
9081 || TREE_CODE (offset
) == CONVERT_EXPR
9082 || TREE_CODE (offset
) == WITH_RECORD_EXPR
)
9083 offset
= TREE_OPERAND (offset
, 0);
9085 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9086 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9087 if (TREE_CODE (offset
) != BIT_AND_EXPR
9088 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9089 || compare_tree_int (TREE_OPERAND (offset
, 1),
9090 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9091 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9094 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9095 It must be NEGATE_EXPR. Then strip any more conversions. */
9096 offset
= TREE_OPERAND (offset
, 0);
9097 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9098 || TREE_CODE (offset
) == NOP_EXPR
9099 || TREE_CODE (offset
) == CONVERT_EXPR
)
9100 offset
= TREE_OPERAND (offset
, 0);
9102 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9105 offset
= TREE_OPERAND (offset
, 0);
9106 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9107 || TREE_CODE (offset
) == NOP_EXPR
9108 || TREE_CODE (offset
) == CONVERT_EXPR
)
9109 offset
= TREE_OPERAND (offset
, 0);
9111 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9112 whose type is the same as EXP. */
9113 return (TREE_CODE (offset
) == ADDR_EXPR
9114 && (TREE_OPERAND (offset
, 0) == exp
9115 || (TREE_CODE (TREE_OPERAND (offset
, 0)) == PLACEHOLDER_EXPR
9116 && (TREE_TYPE (TREE_OPERAND (offset
, 0))
9117 == TREE_TYPE (exp
)))));
9120 /* Return the tree node if an ARG corresponds to a string constant or zero
9121 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9122 in bytes within the string that ARG is accessing. The type of the
9123 offset will be `sizetype'. */
9126 string_constant (tree arg
, tree
*ptr_offset
)
9130 if (TREE_CODE (arg
) == ADDR_EXPR
9131 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9133 *ptr_offset
= size_zero_node
;
9134 return TREE_OPERAND (arg
, 0);
9136 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9138 tree arg0
= TREE_OPERAND (arg
, 0);
9139 tree arg1
= TREE_OPERAND (arg
, 1);
9144 if (TREE_CODE (arg0
) == ADDR_EXPR
9145 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9147 *ptr_offset
= convert (sizetype
, arg1
);
9148 return TREE_OPERAND (arg0
, 0);
9150 else if (TREE_CODE (arg1
) == ADDR_EXPR
9151 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9153 *ptr_offset
= convert (sizetype
, arg0
);
9154 return TREE_OPERAND (arg1
, 0);
9161 /* Expand code for a post- or pre- increment or decrement
9162 and return the RTX for the result.
9163 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9166 expand_increment (tree exp
, int post
, int ignore
)
9170 tree incremented
= TREE_OPERAND (exp
, 0);
9171 optab this_optab
= add_optab
;
9173 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9174 int op0_is_copy
= 0;
9175 int single_insn
= 0;
9176 /* 1 means we can't store into OP0 directly,
9177 because it is a subreg narrower than a word,
9178 and we don't dare clobber the rest of the word. */
9181 /* Stabilize any component ref that might need to be
9182 evaluated more than once below. */
9184 || TREE_CODE (incremented
) == BIT_FIELD_REF
9185 || (TREE_CODE (incremented
) == COMPONENT_REF
9186 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9187 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9188 incremented
= stabilize_reference (incremented
);
9189 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9190 ones into save exprs so that they don't accidentally get evaluated
9191 more than once by the code below. */
9192 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9193 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9194 incremented
= save_expr (incremented
);
9196 /* Compute the operands as RTX.
9197 Note whether OP0 is the actual lvalue or a copy of it:
9198 I believe it is a copy iff it is a register or subreg
9199 and insns were generated in computing it. */
9201 temp
= get_last_insn ();
9202 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9204 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9205 in place but instead must do sign- or zero-extension during assignment,
9206 so we copy it into a new register and let the code below use it as
9209 Note that we can safely modify this SUBREG since it is know not to be
9210 shared (it was made by the expand_expr call above). */
9212 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9215 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9219 else if (GET_CODE (op0
) == SUBREG
9220 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9222 /* We cannot increment this SUBREG in place. If we are
9223 post-incrementing, get a copy of the old value. Otherwise,
9224 just mark that we cannot increment in place. */
9226 op0
= copy_to_reg (op0
);
9231 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9232 && temp
!= get_last_insn ());
9233 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9235 /* Decide whether incrementing or decrementing. */
9236 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9237 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9238 this_optab
= sub_optab
;
9240 /* Convert decrement by a constant into a negative increment. */
9241 if (this_optab
== sub_optab
9242 && GET_CODE (op1
) == CONST_INT
)
9244 op1
= GEN_INT (-INTVAL (op1
));
9245 this_optab
= add_optab
;
9248 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9249 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9251 /* For a preincrement, see if we can do this with a single instruction. */
9254 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9255 if (icode
!= (int) CODE_FOR_nothing
9256 /* Make sure that OP0 is valid for operands 0 and 1
9257 of the insn we want to queue. */
9258 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9259 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9260 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9264 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9265 then we cannot just increment OP0. We must therefore contrive to
9266 increment the original value. Then, for postincrement, we can return
9267 OP0 since it is a copy of the old value. For preincrement, expand here
9268 unless we can do it with a single insn.
9270 Likewise if storing directly into OP0 would clobber high bits
9271 we need to preserve (bad_subreg). */
9272 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9274 /* This is the easiest way to increment the value wherever it is.
9275 Problems with multiple evaluation of INCREMENTED are prevented
9276 because either (1) it is a component_ref or preincrement,
9277 in which case it was stabilized above, or (2) it is an array_ref
9278 with constant index in an array in a register, which is
9279 safe to reevaluate. */
9280 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9281 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9282 ? MINUS_EXPR
: PLUS_EXPR
),
9285 TREE_OPERAND (exp
, 1));
9287 while (TREE_CODE (incremented
) == NOP_EXPR
9288 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9290 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9291 incremented
= TREE_OPERAND (incremented
, 0);
9294 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
);
9295 return post
? op0
: temp
;
9300 /* We have a true reference to the value in OP0.
9301 If there is an insn to add or subtract in this mode, queue it.
9302 Queuing the increment insn avoids the register shuffling
9303 that often results if we must increment now and first save
9304 the old value for subsequent use. */
9306 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9307 op0
= stabilize (op0
);
9310 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9311 if (icode
!= (int) CODE_FOR_nothing
9312 /* Make sure that OP0 is valid for operands 0 and 1
9313 of the insn we want to queue. */
9314 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9315 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9317 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9318 op1
= force_reg (mode
, op1
);
9320 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9322 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9324 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9325 ? force_reg (Pmode
, XEXP (op0
, 0))
9326 : copy_to_reg (XEXP (op0
, 0)));
9329 op0
= replace_equiv_address (op0
, addr
);
9330 temp
= force_reg (GET_MODE (op0
), op0
);
9331 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9332 op1
= force_reg (mode
, op1
);
9334 /* The increment queue is LIFO, thus we have to `queue'
9335 the instructions in reverse order. */
9336 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9337 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9342 /* Preincrement, or we can't increment with one simple insn. */
9344 /* Save a copy of the value before inc or dec, to return it later. */
9345 temp
= value
= copy_to_reg (op0
);
9347 /* Arrange to return the incremented value. */
9348 /* Copy the rtx because expand_binop will protect from the queue,
9349 and the results of that would be invalid for us to return
9350 if our caller does emit_queue before using our result. */
9351 temp
= copy_rtx (value
= op0
);
9353 /* Increment however we can. */
9354 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9355 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9357 /* Make sure the value is stored into OP0. */
9359 emit_move_insn (op0
, op1
);
9364 /* Generate code to calculate EXP using a store-flag instruction
9365 and return an rtx for the result. EXP is either a comparison
9366 or a TRUTH_NOT_EXPR whose operand is a comparison.
9368 If TARGET is nonzero, store the result there if convenient.
9370 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9373 Return zero if there is no suitable set-flag instruction
9374 available on this machine.
9376 Once expand_expr has been called on the arguments of the comparison,
9377 we are committed to doing the store flag, since it is not safe to
9378 re-evaluate the expression. We emit the store-flag insn by calling
9379 emit_store_flag, but only expand the arguments if we have a reason
9380 to believe that emit_store_flag will be successful. If we think that
9381 it will, but it isn't, we have to simulate the store-flag with a
9382 set/jump/set sequence. */
9385 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9388 tree arg0
, arg1
, type
;
9390 enum machine_mode operand_mode
;
9394 enum insn_code icode
;
9395 rtx subtarget
= target
;
9398 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9399 result at the end. We can't simply invert the test since it would
9400 have already been inverted if it were valid. This case occurs for
9401 some floating-point comparisons. */
9403 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9404 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9406 arg0
= TREE_OPERAND (exp
, 0);
9407 arg1
= TREE_OPERAND (exp
, 1);
9409 /* Don't crash if the comparison was erroneous. */
9410 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9413 type
= TREE_TYPE (arg0
);
9414 operand_mode
= TYPE_MODE (type
);
9415 unsignedp
= TREE_UNSIGNED (type
);
9417 /* We won't bother with BLKmode store-flag operations because it would mean
9418 passing a lot of information to emit_store_flag. */
9419 if (operand_mode
== BLKmode
)
9422 /* We won't bother with store-flag operations involving function pointers
9423 when function pointers must be canonicalized before comparisons. */
9424 #ifdef HAVE_canonicalize_funcptr_for_compare
9425 if (HAVE_canonicalize_funcptr_for_compare
9426 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9427 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9429 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9430 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9431 == FUNCTION_TYPE
))))
9438 /* Get the rtx comparison code to use. We know that EXP is a comparison
9439 operation of some type. Some comparisons against 1 and -1 can be
9440 converted to comparisons with zero. Do so here so that the tests
9441 below will be aware that we have a comparison with zero. These
9442 tests will not catch constants in the first operand, but constants
9443 are rarely passed as the first operand. */
9445 switch (TREE_CODE (exp
))
9454 if (integer_onep (arg1
))
9455 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9457 code
= unsignedp
? LTU
: LT
;
9460 if (! unsignedp
&& integer_all_onesp (arg1
))
9461 arg1
= integer_zero_node
, code
= LT
;
9463 code
= unsignedp
? LEU
: LE
;
9466 if (! unsignedp
&& integer_all_onesp (arg1
))
9467 arg1
= integer_zero_node
, code
= GE
;
9469 code
= unsignedp
? GTU
: GT
;
9472 if (integer_onep (arg1
))
9473 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9475 code
= unsignedp
? GEU
: GE
;
9478 case UNORDERED_EXPR
:
9504 /* Put a constant second. */
9505 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9507 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9508 code
= swap_condition (code
);
9511 /* If this is an equality or inequality test of a single bit, we can
9512 do this by shifting the bit being tested to the low-order bit and
9513 masking the result with the constant 1. If the condition was EQ,
9514 we xor it with 1. This does not require an scc insn and is faster
9515 than an scc insn even if we have it.
9517 The code to make this transformation was moved into fold_single_bit_test,
9518 so we just call into the folder and expand its result. */
9520 if ((code
== NE
|| code
== EQ
)
9521 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9522 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9524 tree type
= (*lang_hooks
.types
.type_for_mode
) (mode
, unsignedp
);
9525 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9527 target
, VOIDmode
, EXPAND_NORMAL
);
9530 /* Now see if we are likely to be able to do this. Return if not. */
9531 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9534 icode
= setcc_gen_code
[(int) code
];
9535 if (icode
== CODE_FOR_nothing
9536 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9538 /* We can only do this if it is one of the special cases that
9539 can be handled without an scc insn. */
9540 if ((code
== LT
&& integer_zerop (arg1
))
9541 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9543 else if (BRANCH_COST
>= 0
9544 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9545 && TREE_CODE (type
) != REAL_TYPE
9546 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9547 != CODE_FOR_nothing
)
9548 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9549 != CODE_FOR_nothing
)))
9555 if (! get_subtarget (target
)
9556 || GET_MODE (subtarget
) != operand_mode
)
9559 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9562 target
= gen_reg_rtx (mode
);
9564 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9565 because, if the emit_store_flag does anything it will succeed and
9566 OP0 and OP1 will not be used subsequently. */
9568 result
= emit_store_flag (target
, code
,
9569 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9570 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9571 operand_mode
, unsignedp
, 1);
9576 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9577 result
, 0, OPTAB_LIB_WIDEN
);
9581 /* If this failed, we have to do this with set/compare/jump/set code. */
9582 if (GET_CODE (target
) != REG
9583 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9584 target
= gen_reg_rtx (GET_MODE (target
));
9586 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9587 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9588 operand_mode
, NULL_RTX
);
9589 if (GET_CODE (result
) == CONST_INT
)
9590 return (((result
== const0_rtx
&& ! invert
)
9591 || (result
!= const0_rtx
&& invert
))
9592 ? const0_rtx
: const1_rtx
);
9594 /* The code of RESULT may not match CODE if compare_from_rtx
9595 decided to swap its operands and reverse the original code.
9597 We know that compare_from_rtx returns either a CONST_INT or
9598 a new comparison code, so it is safe to just extract the
9599 code from RESULT. */
9600 code
= GET_CODE (result
);
9602 label
= gen_label_rtx ();
9603 if (bcc_gen_fctn
[(int) code
] == 0)
9606 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9607 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9614 /* Stubs in case we haven't got a casesi insn. */
9616 # define HAVE_casesi 0
9617 # define gen_casesi(a, b, c, d, e) (0)
9618 # define CODE_FOR_casesi CODE_FOR_nothing
9621 /* If the machine does not have a case insn that compares the bounds,
9622 this means extra overhead for dispatch tables, which raises the
9623 threshold for using them. */
9624 #ifndef CASE_VALUES_THRESHOLD
9625 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9626 #endif /* CASE_VALUES_THRESHOLD */
9629 case_values_threshold (void)
9631 return CASE_VALUES_THRESHOLD
;
9634 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9635 0 otherwise (i.e. if there is no casesi instruction). */
9637 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9638 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9640 enum machine_mode index_mode
= SImode
;
9641 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9642 rtx op1
, op2
, index
;
9643 enum machine_mode op_mode
;
9648 /* Convert the index to SImode. */
9649 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9651 enum machine_mode omode
= TYPE_MODE (index_type
);
9652 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9654 /* We must handle the endpoints in the original mode. */
9655 index_expr
= build (MINUS_EXPR
, index_type
,
9656 index_expr
, minval
);
9657 minval
= integer_zero_node
;
9658 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9659 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9660 omode
, 1, default_label
);
9661 /* Now we can safely truncate. */
9662 index
= convert_to_mode (index_mode
, index
, 0);
9666 if (TYPE_MODE (index_type
) != index_mode
)
9668 index_expr
= convert ((*lang_hooks
.types
.type_for_size
)
9669 (index_bits
, 0), index_expr
);
9670 index_type
= TREE_TYPE (index_expr
);
9673 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9676 index
= protect_from_queue (index
, 0);
9677 do_pending_stack_adjust ();
9679 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9680 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9682 index
= copy_to_mode_reg (op_mode
, index
);
9684 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
9686 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9687 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9688 op1
, TREE_UNSIGNED (TREE_TYPE (minval
)));
9689 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9691 op1
= copy_to_mode_reg (op_mode
, op1
);
9693 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9695 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9696 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9697 op2
, TREE_UNSIGNED (TREE_TYPE (range
)));
9698 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9700 op2
= copy_to_mode_reg (op_mode
, op2
);
9702 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9703 table_label
, default_label
));
9707 /* Attempt to generate a tablejump instruction; same concept. */
9708 #ifndef HAVE_tablejump
9709 #define HAVE_tablejump 0
9710 #define gen_tablejump(x, y) (0)
9713 /* Subroutine of the next function.
9715 INDEX is the value being switched on, with the lowest value
9716 in the table already subtracted.
9717 MODE is its expected mode (needed if INDEX is constant).
9718 RANGE is the length of the jump table.
9719 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9721 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9722 index value is out of range. */
9725 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9730 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9731 cfun
->max_jumptable_ents
= INTVAL (range
);
9733 /* Do an unsigned comparison (in the proper mode) between the index
9734 expression and the value which represents the length of the range.
9735 Since we just finished subtracting the lower bound of the range
9736 from the index expression, this comparison allows us to simultaneously
9737 check that the original index expression value is both greater than
9738 or equal to the minimum value of the range and less than or equal to
9739 the maximum value of the range. */
9741 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9744 /* If index is in range, it must fit in Pmode.
9745 Convert to Pmode so we can index with it. */
9747 index
= convert_to_mode (Pmode
, index
, 1);
9749 /* Don't let a MEM slip through, because then INDEX that comes
9750 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9751 and break_out_memory_refs will go to work on it and mess it up. */
9752 #ifdef PIC_CASE_VECTOR_ADDRESS
9753 if (flag_pic
&& GET_CODE (index
) != REG
)
9754 index
= copy_to_mode_reg (Pmode
, index
);
9757 /* If flag_force_addr were to affect this address
9758 it could interfere with the tricky assumptions made
9759 about addresses that contain label-refs,
9760 which may be valid only very near the tablejump itself. */
9761 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9762 GET_MODE_SIZE, because this indicates how large insns are. The other
9763 uses should all be Pmode, because they are addresses. This code
9764 could fail if addresses and insns are not the same size. */
9765 index
= gen_rtx_PLUS (Pmode
,
9766 gen_rtx_MULT (Pmode
, index
,
9767 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9768 gen_rtx_LABEL_REF (Pmode
, table_label
));
9769 #ifdef PIC_CASE_VECTOR_ADDRESS
9771 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9774 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9775 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9776 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
9777 RTX_UNCHANGING_P (vector
) = 1;
9778 MEM_NOTRAP_P (vector
) = 1;
9779 convert_move (temp
, vector
, 0);
9781 emit_jump_insn (gen_tablejump (temp
, table_label
));
9783 /* If we are generating PIC code or if the table is PC-relative, the
9784 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9785 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9790 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
9791 rtx table_label
, rtx default_label
)
9795 if (! HAVE_tablejump
)
9798 index_expr
= fold (build (MINUS_EXPR
, index_type
,
9799 convert (index_type
, index_expr
),
9800 convert (index_type
, minval
)));
9801 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9803 index
= protect_from_queue (index
, 0);
9804 do_pending_stack_adjust ();
9806 do_tablejump (index
, TYPE_MODE (index_type
),
9807 convert_modes (TYPE_MODE (index_type
),
9808 TYPE_MODE (TREE_TYPE (range
)),
9809 expand_expr (range
, NULL_RTX
,
9811 TREE_UNSIGNED (TREE_TYPE (range
))),
9812 table_label
, default_label
);
9816 /* Nonzero if the mode is a valid vector mode for this architecture.
9817 This returns nonzero even if there is no hardware support for the
9818 vector mode, but we can emulate with narrower modes. */
9821 vector_mode_valid_p (enum machine_mode mode
)
9823 enum mode_class
class = GET_MODE_CLASS (mode
);
9824 enum machine_mode innermode
;
9826 /* Doh! What's going on? */
9827 if (class != MODE_VECTOR_INT
9828 && class != MODE_VECTOR_FLOAT
)
9831 /* Hardware support. Woo hoo! */
9832 if (VECTOR_MODE_SUPPORTED_P (mode
))
9835 innermode
= GET_MODE_INNER (mode
);
9837 /* We should probably return 1 if requesting V4DI and we have no DI,
9838 but we have V2DI, but this is probably very unlikely. */
9840 /* If we have support for the inner mode, we can safely emulate it.
9841 We may not have V2DI, but me can emulate with a pair of DIs. */
9842 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
9845 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9847 const_vector_from_tree (tree exp
)
9852 enum machine_mode inner
, mode
;
9854 mode
= TYPE_MODE (TREE_TYPE (exp
));
9856 if (is_zeros_p (exp
))
9857 return CONST0_RTX (mode
);
9859 units
= GET_MODE_NUNITS (mode
);
9860 inner
= GET_MODE_INNER (mode
);
9862 v
= rtvec_alloc (units
);
9864 link
= TREE_VECTOR_CST_ELTS (exp
);
9865 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
9867 elt
= TREE_VALUE (link
);
9869 if (TREE_CODE (elt
) == REAL_CST
)
9870 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
9873 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
9874 TREE_INT_CST_HIGH (elt
),
9878 /* Initialize remaining elements to 0. */
9879 for (; i
< units
; ++i
)
9880 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
9882 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
9885 #include "gt-expr.h"