]>
Commit | Line | Data |
---|---|---|
10f307d9 | 1 | /* Convert tree expression to rtl instructions, for GNU compiler. |
e1439bcb | 2 | Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, |
d513ec2f | 3 | 2000, 2001, 2002 Free Software Foundation, Inc. |
10f307d9 | 4 | |
f12b58b3 | 5 | This file is part of GCC. |
10f307d9 | 6 | |
f12b58b3 | 7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
10f307d9 | 11 | |
f12b58b3 | 12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
10f307d9 | 16 | |
17 | You should have received a copy of the GNU General Public License | |
f12b58b3 | 18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
10f307d9 | 21 | |
10f307d9 | 22 | #include "config.h" |
405711de | 23 | #include "system.h" |
805e22b2 | 24 | #include "coretypes.h" |
25 | #include "tm.h" | |
649d8da6 | 26 | #include "machmode.h" |
ef258422 | 27 | #include "real.h" |
10f307d9 | 28 | #include "rtl.h" |
29 | #include "tree.h" | |
30 | #include "flags.h" | |
09994a52 | 31 | #include "regs.h" |
261db321 | 32 | #include "hard-reg-set.h" |
037a5228 | 33 | #include "except.h" |
10f307d9 | 34 | #include "function.h" |
10f307d9 | 35 | #include "insn-config.h" |
3084721c | 36 | #include "insn-attr.h" |
fa56dc1d | 37 | /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ |
cd03a192 | 38 | #include "expr.h" |
d8fc4d0b | 39 | #include "optabs.h" |
40 | #include "libfuncs.h" | |
10f307d9 | 41 | #include "recog.h" |
6702c250 | 42 | #include "reload.h" |
10f307d9 | 43 | #include "output.h" |
10f307d9 | 44 | #include "typeclass.h" |
12874aaf | 45 | #include "toplev.h" |
521dd524 | 46 | #include "ggc.h" |
b3187c7c | 47 | #include "langhooks.h" |
a3c49299 | 48 | #include "intl.h" |
075136a2 | 49 | #include "tm_p.h" |
10f307d9 | 50 | |
10f307d9 | 51 | /* Decide whether a function's arguments should be processed |
7473731d | 52 | from first to last or from last to first. |
53 | ||
54 | They should if the stack and args grow in opposite directions, but | |
55 | only if we have push insns. */ | |
10f307d9 | 56 | |
10f307d9 | 57 | #ifdef PUSH_ROUNDING |
7473731d | 58 | |
2a8e54a4 | 59 | #ifndef PUSH_ARGS_REVERSED |
cd5f9545 | 60 | #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) |
fa56dc1d | 61 | #define PUSH_ARGS_REVERSED /* If it's last to first. */ |
10f307d9 | 62 | #endif |
2a8e54a4 | 63 | #endif |
7473731d | 64 | |
10f307d9 | 65 | #endif |
66 | ||
67 | #ifndef STACK_PUSH_CODE | |
68 | #ifdef STACK_GROWS_DOWNWARD | |
69 | #define STACK_PUSH_CODE PRE_DEC | |
70 | #else | |
71 | #define STACK_PUSH_CODE PRE_INC | |
72 | #endif | |
73 | #endif | |
74 | ||
25d1d1e9 | 75 | /* Assume that case vectors are not pc-relative. */ |
76 | #ifndef CASE_VECTOR_PC_RELATIVE | |
77 | #define CASE_VECTOR_PC_RELATIVE 0 | |
78 | #endif | |
79 | ||
c0bfc78e | 80 | /* Convert defined/undefined to boolean. */ |
81 | #ifdef TARGET_MEM_FUNCTIONS | |
82 | #undef TARGET_MEM_FUNCTIONS | |
83 | #define TARGET_MEM_FUNCTIONS 1 | |
84 | #else | |
85 | #define TARGET_MEM_FUNCTIONS 0 | |
86 | #endif | |
87 | ||
88 | ||
10f307d9 | 89 | /* If this is nonzero, we do not bother generating VOLATILE |
90 | around volatile memory references, and we are willing to | |
91 | output indirect addresses. If cse is to follow, we reject | |
92 | indirect addresses so a useful potential cse is generated; | |
93 | if it is used only once, instruction combination will produce | |
94 | the same indirect address eventually. */ | |
95 | int cse_not_expected; | |
96 | ||
155b05dc | 97 | /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */ |
98 | static tree placeholder_list = 0; | |
99 | ||
3ebd94bd | 100 | /* This structure is used by move_by_pieces to describe the move to |
101 | be performed. */ | |
3ebd94bd | 102 | struct move_by_pieces |
103 | { | |
104 | rtx to; | |
105 | rtx to_addr; | |
106 | int autinc_to; | |
107 | int explicit_inc_to; | |
108 | rtx from; | |
109 | rtx from_addr; | |
110 | int autinc_from; | |
111 | int explicit_inc_from; | |
f7c44134 | 112 | unsigned HOST_WIDE_INT len; |
113 | HOST_WIDE_INT offset; | |
3ebd94bd | 114 | int reverse; |
115 | }; | |
116 | ||
6840589f | 117 | /* This structure is used by store_by_pieces to describe the clear to |
dbd14dc5 | 118 | be performed. */ |
119 | ||
6840589f | 120 | struct store_by_pieces |
dbd14dc5 | 121 | { |
122 | rtx to; | |
123 | rtx to_addr; | |
124 | int autinc_to; | |
125 | int explicit_inc_to; | |
f7c44134 | 126 | unsigned HOST_WIDE_INT len; |
127 | HOST_WIDE_INT offset; | |
6840589f | 128 | rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); |
129 | PTR constfundata; | |
dbd14dc5 | 130 | int reverse; |
131 | }; | |
132 | ||
621f6678 | 133 | static rtx enqueue_insn PARAMS ((rtx, rtx)); |
f7c44134 | 134 | static unsigned HOST_WIDE_INT move_by_pieces_ninsns |
135 | PARAMS ((unsigned HOST_WIDE_INT, | |
136 | unsigned int)); | |
621f6678 | 137 | static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode, |
138 | struct move_by_pieces *)); | |
0378dbdc | 139 | static bool block_move_libcall_safe_for_call_parm PARAMS ((void)); |
c0bfc78e | 140 | static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned)); |
141 | static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx)); | |
142 | static tree emit_block_move_libcall_fn PARAMS ((int)); | |
0378dbdc | 143 | static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned)); |
6840589f | 144 | static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT, |
145 | enum machine_mode)); | |
f7c44134 | 146 | static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT, |
147 | unsigned int)); | |
6840589f | 148 | static void store_by_pieces_1 PARAMS ((struct store_by_pieces *, |
149 | unsigned int)); | |
150 | static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...), | |
621f6678 | 151 | enum machine_mode, |
6840589f | 152 | struct store_by_pieces *)); |
c0bfc78e | 153 | static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned)); |
154 | static rtx clear_storage_via_libcall PARAMS ((rtx, rtx)); | |
155 | static tree clear_storage_libcall_fn PARAMS ((int)); | |
c0c4a46d | 156 | static rtx compress_float_constant PARAMS ((rtx, rtx)); |
d8e5b213 | 157 | static rtx get_subtarget PARAMS ((rtx)); |
621f6678 | 158 | static int is_zeros_p PARAMS ((tree)); |
159 | static int mostly_zeros_p PARAMS ((tree)); | |
02e7a332 | 160 | static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT, |
161 | HOST_WIDE_INT, enum machine_mode, | |
2c269e73 | 162 | tree, tree, int, int)); |
163 | static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT)); | |
02e7a332 | 164 | static rtx store_field PARAMS ((rtx, HOST_WIDE_INT, |
165 | HOST_WIDE_INT, enum machine_mode, | |
2b96c5f6 | 166 | tree, enum machine_mode, int, tree, |
167 | int)); | |
621f6678 | 168 | static rtx var_rtx PARAMS ((tree)); |
fcdc122e | 169 | static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree)); |
5b965633 | 170 | static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree)); |
67c68e45 | 171 | static int is_aligning_offset PARAMS ((tree, tree)); |
621f6678 | 172 | static rtx expand_increment PARAMS ((tree, int, int)); |
621f6678 | 173 | static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx)); |
174 | static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx)); | |
02e7a332 | 175 | static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, |
176 | rtx, rtx)); | |
621f6678 | 177 | static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int)); |
fad4a30c | 178 | #ifdef PUSH_ROUNDING |
ef7dc4b4 | 179 | static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree)); |
fad4a30c | 180 | #endif |
539a3a92 | 181 | static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx)); |
10f307d9 | 182 | |
07edfa02 | 183 | /* Record for each mode whether we can move a register directly to or |
184 | from an object of that mode in memory. If we can't, we won't try | |
185 | to use that mode directly when accessing a field of that mode. */ | |
186 | ||
187 | static char direct_load[NUM_MACHINE_MODES]; | |
188 | static char direct_store[NUM_MACHINE_MODES]; | |
189 | ||
c0c4a46d | 190 | /* Record for each mode whether we can float-extend from memory. */ |
191 | ||
192 | static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; | |
193 | ||
cbdc0179 | 194 | /* If a memory-to-memory move would take MOVE_RATIO or more simple |
195 | move-instruction sequences, we will do a movstr or libcall instead. */ | |
10f307d9 | 196 | |
197 | #ifndef MOVE_RATIO | |
a5fd5157 | 198 | #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti) |
10f307d9 | 199 | #define MOVE_RATIO 2 |
200 | #else | |
fa56dc1d | 201 | /* If we are optimizing for space (-Os), cut down the default move ratio. */ |
abf8a363 | 202 | #define MOVE_RATIO (optimize_size ? 3 : 15) |
10f307d9 | 203 | #endif |
204 | #endif | |
35f44ac1 | 205 | |
53bd09ab | 206 | /* This macro is used to determine whether move_by_pieces should be called |
fa56dc1d | 207 | to perform a structure copy. */ |
53bd09ab | 208 | #ifndef MOVE_BY_PIECES_P |
325d1c45 | 209 | #define MOVE_BY_PIECES_P(SIZE, ALIGN) \ |
e1439bcb | 210 | (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO) |
53bd09ab | 211 | #endif |
212 | ||
310d3ec9 | 213 | /* If a clear memory operation would take CLEAR_RATIO or more simple |
214 | move-instruction sequences, we will do a clrstr or libcall instead. */ | |
215 | ||
216 | #ifndef CLEAR_RATIO | |
217 | #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti) | |
218 | #define CLEAR_RATIO 2 | |
219 | #else | |
220 | /* If we are optimizing for space, cut down the default clear ratio. */ | |
221 | #define CLEAR_RATIO (optimize_size ? 3 : 15) | |
222 | #endif | |
223 | #endif | |
224 | ||
225 | /* This macro is used to determine whether clear_by_pieces should be | |
226 | called to clear storage. */ | |
227 | #ifndef CLEAR_BY_PIECES_P | |
228 | #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \ | |
229 | (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO) | |
230 | #endif | |
231 | ||
805e22b2 | 232 | /* This macro is used to determine whether store_by_pieces should be |
233 | called to "memset" storage with byte values other than zero, or | |
234 | to "memcpy" storage when the source is a constant string. */ | |
235 | #ifndef STORE_BY_PIECES_P | |
236 | #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN) | |
237 | #endif | |
238 | ||
a5fd5157 | 239 | /* This array records the insn_code of insns to perform block moves. */ |
a42d921d | 240 | enum insn_code movstr_optab[NUM_MACHINE_MODES]; |
a5fd5157 | 241 | |
dbd14dc5 | 242 | /* This array records the insn_code of insns to perform block clears. */ |
243 | enum insn_code clrstr_optab[NUM_MACHINE_MODES]; | |
244 | ||
6ef828f9 | 245 | /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */ |
35f44ac1 | 246 | |
247 | #ifndef SLOW_UNALIGNED_ACCESS | |
9439ebf7 | 248 | #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT |
35f44ac1 | 249 | #endif |
10f307d9 | 250 | \f |
07edfa02 | 251 | /* This is run once per compilation to set up which modes can be used |
a5fd5157 | 252 | directly in memory and to initialize the block move optab. */ |
07edfa02 | 253 | |
254 | void | |
255 | init_expr_once () | |
256 | { | |
257 | rtx insn, pat; | |
258 | enum machine_mode mode; | |
6fa98783 | 259 | int num_clobbers; |
9e042f31 | 260 | rtx mem, mem1; |
0c7f5242 | 261 | rtx reg; |
9e042f31 | 262 | |
a97fcedd | 263 | /* Try indexing by frame ptr and try by stack ptr. |
264 | It is known that on the Convex the stack ptr isn't a valid index. | |
265 | With luck, one or the other is valid on any machine. */ | |
9e042f31 | 266 | mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx); |
267 | mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx); | |
07edfa02 | 268 | |
0c7f5242 | 269 | /* A scratch register we can modify in-place below to avoid |
270 | useless RTL allocations. */ | |
271 | reg = gen_rtx_REG (VOIDmode, -1); | |
272 | ||
7a5749cc | 273 | insn = rtx_alloc (INSN); |
274 | pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX); | |
275 | PATTERN (insn) = pat; | |
07edfa02 | 276 | |
277 | for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES; | |
278 | mode = (enum machine_mode) ((int) mode + 1)) | |
279 | { | |
280 | int regno; | |
07edfa02 | 281 | |
282 | direct_load[(int) mode] = direct_store[(int) mode] = 0; | |
283 | PUT_MODE (mem, mode); | |
a97fcedd | 284 | PUT_MODE (mem1, mode); |
0c7f5242 | 285 | PUT_MODE (reg, mode); |
07edfa02 | 286 | |
3c209fda | 287 | /* See if there is some register that can be used in this mode and |
288 | directly loaded or stored from memory. */ | |
289 | ||
b63679d2 | 290 | if (mode != VOIDmode && mode != BLKmode) |
291 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER | |
292 | && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0); | |
293 | regno++) | |
294 | { | |
295 | if (! HARD_REGNO_MODE_OK (regno, mode)) | |
296 | continue; | |
3c209fda | 297 | |
0c7f5242 | 298 | REGNO (reg) = regno; |
3c209fda | 299 | |
b63679d2 | 300 | SET_SRC (pat) = mem; |
301 | SET_DEST (pat) = reg; | |
302 | if (recog (pat, insn, &num_clobbers) >= 0) | |
303 | direct_load[(int) mode] = 1; | |
3c209fda | 304 | |
a97fcedd | 305 | SET_SRC (pat) = mem1; |
306 | SET_DEST (pat) = reg; | |
307 | if (recog (pat, insn, &num_clobbers) >= 0) | |
308 | direct_load[(int) mode] = 1; | |
309 | ||
b63679d2 | 310 | SET_SRC (pat) = reg; |
311 | SET_DEST (pat) = mem; | |
312 | if (recog (pat, insn, &num_clobbers) >= 0) | |
313 | direct_store[(int) mode] = 1; | |
a97fcedd | 314 | |
315 | SET_SRC (pat) = reg; | |
316 | SET_DEST (pat) = mem1; | |
317 | if (recog (pat, insn, &num_clobbers) >= 0) | |
318 | direct_store[(int) mode] = 1; | |
b63679d2 | 319 | } |
07edfa02 | 320 | } |
321 | ||
c0c4a46d | 322 | mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000)); |
323 | ||
324 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode; | |
325 | mode = GET_MODE_WIDER_MODE (mode)) | |
326 | { | |
327 | enum machine_mode srcmode; | |
328 | for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode; | |
ff385626 | 329 | srcmode = GET_MODE_WIDER_MODE (srcmode)) |
c0c4a46d | 330 | { |
331 | enum insn_code ic; | |
332 | ||
333 | ic = can_extend_p (mode, srcmode, 0); | |
334 | if (ic == CODE_FOR_nothing) | |
335 | continue; | |
336 | ||
337 | PUT_MODE (mem, srcmode); | |
ff385626 | 338 | |
c0c4a46d | 339 | if ((*insn_data[ic].operand[1].predicate) (mem, srcmode)) |
340 | float_extend_from_mem[mode][srcmode] = true; | |
341 | } | |
342 | } | |
07edfa02 | 343 | } |
6fa98783 | 344 | |
10f307d9 | 345 | /* This is run at the start of compiling a function. */ |
346 | ||
347 | void | |
348 | init_expr () | |
349 | { | |
1f3233d1 | 350 | cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status)); |
10f307d9 | 351 | |
0a893c29 | 352 | pending_chain = 0; |
10f307d9 | 353 | pending_stack_adjust = 0; |
91b70175 | 354 | stack_pointer_delta = 0; |
10f307d9 | 355 | inhibit_defer_pop = 0; |
10f307d9 | 356 | saveregs_value = 0; |
a7f0d7fb | 357 | apply_args_value = 0; |
35f44ac1 | 358 | forced_labels = 0; |
10f307d9 | 359 | } |
360 | ||
0a893c29 | 361 | /* Small sanity check that the queue is empty at the end of a function. */ |
d8e5b213 | 362 | |
10f307d9 | 363 | void |
0a893c29 | 364 | finish_expr_for_function () |
10f307d9 | 365 | { |
0a893c29 | 366 | if (pending_chain) |
367 | abort (); | |
10f307d9 | 368 | } |
369 | \f | |
370 | /* Manage the queue of increment instructions to be output | |
371 | for POSTINCREMENT_EXPR expressions, etc. */ | |
372 | ||
10f307d9 | 373 | /* Queue up to increment (or change) VAR later. BODY says how: |
374 | BODY should be the same thing you would pass to emit_insn | |
375 | to increment right away. It will go to emit_insn later on. | |
376 | ||
377 | The value is a QUEUED expression to be used in place of VAR | |
378 | where you want to guarantee the pre-incrementation value of VAR. */ | |
379 | ||
380 | static rtx | |
381 | enqueue_insn (var, body) | |
382 | rtx var, body; | |
383 | { | |
7014838c | 384 | pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX, |
385 | body, pending_chain); | |
10f307d9 | 386 | return pending_chain; |
387 | } | |
388 | ||
389 | /* Use protect_from_queue to convert a QUEUED expression | |
390 | into something that you can put immediately into an instruction. | |
391 | If the queued incrementation has not happened yet, | |
392 | protect_from_queue returns the variable itself. | |
393 | If the incrementation has happened, protect_from_queue returns a temp | |
394 | that contains a copy of the old value of the variable. | |
395 | ||
396 | Any time an rtx which might possibly be a QUEUED is to be put | |
397 | into an instruction, it must be passed through protect_from_queue first. | |
398 | QUEUED expressions are not meaningful in instructions. | |
399 | ||
400 | Do not pass a value through protect_from_queue and then hold | |
401 | on to it for a while before putting it in an instruction! | |
402 | If the queue is flushed in between, incorrect code will result. */ | |
403 | ||
404 | rtx | |
405 | protect_from_queue (x, modify) | |
19cb6b50 | 406 | rtx x; |
10f307d9 | 407 | int modify; |
408 | { | |
19cb6b50 | 409 | RTX_CODE code = GET_CODE (x); |
10f307d9 | 410 | |
411 | #if 0 /* A QUEUED can hang around after the queue is forced out. */ | |
412 | /* Shortcut for most common case. */ | |
413 | if (pending_chain == 0) | |
414 | return x; | |
415 | #endif | |
416 | ||
417 | if (code != QUEUED) | |
418 | { | |
2f6a905f | 419 | /* A special hack for read access to (MEM (QUEUED ...)) to facilitate |
420 | use of autoincrement. Make a copy of the contents of the memory | |
421 | location rather than a copy of the address, but not if the value is | |
422 | of mode BLKmode. Don't modify X in place since it might be | |
423 | shared. */ | |
10f307d9 | 424 | if (code == MEM && GET_MODE (x) != BLKmode |
425 | && GET_CODE (XEXP (x, 0)) == QUEUED && !modify) | |
426 | { | |
e4e86ec5 | 427 | rtx y = XEXP (x, 0); |
428 | rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y)); | |
2f6a905f | 429 | |
10f307d9 | 430 | if (QUEUED_INSN (y)) |
431 | { | |
e4e86ec5 | 432 | rtx temp = gen_reg_rtx (GET_MODE (x)); |
433 | ||
2f6a905f | 434 | emit_insn_before (gen_move_insn (temp, new), |
10f307d9 | 435 | QUEUED_INSN (y)); |
436 | return temp; | |
437 | } | |
e4e86ec5 | 438 | |
f9636a66 | 439 | /* Copy the address into a pseudo, so that the returned value |
440 | remains correct across calls to emit_queue. */ | |
e4e86ec5 | 441 | return replace_equiv_address (new, copy_to_reg (XEXP (new, 0))); |
10f307d9 | 442 | } |
e4e86ec5 | 443 | |
10f307d9 | 444 | /* Otherwise, recursively protect the subexpressions of all |
445 | the kinds of rtx's that can contain a QUEUED. */ | |
446 | if (code == MEM) | |
c0377bb2 | 447 | { |
448 | rtx tem = protect_from_queue (XEXP (x, 0), 0); | |
449 | if (tem != XEXP (x, 0)) | |
450 | { | |
451 | x = copy_rtx (x); | |
452 | XEXP (x, 0) = tem; | |
453 | } | |
454 | } | |
10f307d9 | 455 | else if (code == PLUS || code == MULT) |
456 | { | |
c0377bb2 | 457 | rtx new0 = protect_from_queue (XEXP (x, 0), 0); |
458 | rtx new1 = protect_from_queue (XEXP (x, 1), 0); | |
459 | if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) | |
460 | { | |
461 | x = copy_rtx (x); | |
462 | XEXP (x, 0) = new0; | |
463 | XEXP (x, 1) = new1; | |
464 | } | |
10f307d9 | 465 | } |
466 | return x; | |
467 | } | |
f9636a66 | 468 | /* If the increment has not happened, use the variable itself. Copy it |
469 | into a new pseudo so that the value remains correct across calls to | |
470 | emit_queue. */ | |
10f307d9 | 471 | if (QUEUED_INSN (x) == 0) |
f9636a66 | 472 | return copy_to_reg (QUEUED_VAR (x)); |
10f307d9 | 473 | /* If the increment has happened and a pre-increment copy exists, |
474 | use that copy. */ | |
475 | if (QUEUED_COPY (x) != 0) | |
476 | return QUEUED_COPY (x); | |
477 | /* The increment has happened but we haven't set up a pre-increment copy. | |
478 | Set one up now, and use it. */ | |
479 | QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x))); | |
480 | emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)), | |
481 | QUEUED_INSN (x)); | |
482 | return QUEUED_COPY (x); | |
483 | } | |
484 | ||
485 | /* Return nonzero if X contains a QUEUED expression: | |
486 | if it contains anything that will be altered by a queued increment. | |
487 | We handle only combinations of MEM, PLUS, MINUS and MULT operators | |
488 | since memory addresses generally contain only those. */ | |
489 | ||
6d2e66f1 | 490 | int |
10f307d9 | 491 | queued_subexp_p (x) |
492 | rtx x; | |
493 | { | |
19cb6b50 | 494 | enum rtx_code code = GET_CODE (x); |
10f307d9 | 495 | switch (code) |
496 | { | |
497 | case QUEUED: | |
498 | return 1; | |
499 | case MEM: | |
500 | return queued_subexp_p (XEXP (x, 0)); | |
501 | case MULT: | |
502 | case PLUS: | |
503 | case MINUS: | |
0dbd1c74 | 504 | return (queued_subexp_p (XEXP (x, 0)) |
505 | || queued_subexp_p (XEXP (x, 1))); | |
506 | default: | |
507 | return 0; | |
10f307d9 | 508 | } |
10f307d9 | 509 | } |
510 | ||
511 | /* Perform all the pending incrementations. */ | |
512 | ||
513 | void | |
514 | emit_queue () | |
515 | { | |
19cb6b50 | 516 | rtx p; |
0c22b90f | 517 | while ((p = pending_chain)) |
10f307d9 | 518 | { |
ec91253b | 519 | rtx body = QUEUED_BODY (p); |
520 | ||
31d3e01c | 521 | switch (GET_CODE (body)) |
522 | { | |
523 | case INSN: | |
524 | case JUMP_INSN: | |
525 | case CALL_INSN: | |
526 | case CODE_LABEL: | |
527 | case BARRIER: | |
528 | case NOTE: | |
529 | QUEUED_INSN (p) = body; | |
530 | emit_insn (body); | |
531 | break; | |
532 | ||
533 | #ifdef ENABLE_CHECKING | |
534 | case SEQUENCE: | |
535 | abort (); | |
536 | break; | |
537 | #endif | |
538 | ||
539 | default: | |
540 | QUEUED_INSN (p) = emit_insn (body); | |
541 | break; | |
ec91253b | 542 | } |
31d3e01c | 543 | |
10f307d9 | 544 | pending_chain = QUEUED_NEXT (p); |
545 | } | |
546 | } | |
10f307d9 | 547 | \f |
548 | /* Copy data from FROM to TO, where the machine modes are not the same. | |
549 | Both modes may be integer, or both may be floating. | |
550 | UNSIGNEDP should be nonzero if FROM is an unsigned type. | |
551 | This causes zero-extension instead of sign-extension. */ | |
552 | ||
553 | void | |
554 | convert_move (to, from, unsignedp) | |
19cb6b50 | 555 | rtx to, from; |
10f307d9 | 556 | int unsignedp; |
557 | { | |
558 | enum machine_mode to_mode = GET_MODE (to); | |
559 | enum machine_mode from_mode = GET_MODE (from); | |
560 | int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT; | |
561 | int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT; | |
562 | enum insn_code code; | |
563 | rtx libcall; | |
564 | ||
565 | /* rtx code for making an equivalent value. */ | |
65923445 | 566 | enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN |
567 | : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND)); | |
10f307d9 | 568 | |
569 | to = protect_from_queue (to, 1); | |
570 | from = protect_from_queue (from, 0); | |
571 | ||
572 | if (to_real != from_real) | |
573 | abort (); | |
574 | ||
acfb31e5 | 575 | /* If FROM is a SUBREG that indicates that we have already done at least |
576 | the required extension, strip it. We don't handle such SUBREGs as | |
577 | TO here. */ | |
578 | ||
579 | if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from) | |
580 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from))) | |
581 | >= GET_MODE_SIZE (to_mode)) | |
582 | && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp) | |
583 | from = gen_lowpart (to_mode, from), from_mode = to_mode; | |
584 | ||
585 | if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to)) | |
586 | abort (); | |
587 | ||
10f307d9 | 588 | if (to_mode == from_mode |
589 | || (from_mode == VOIDmode && CONSTANT_P (from))) | |
590 | { | |
591 | emit_move_insn (to, from); | |
592 | return; | |
593 | } | |
594 | ||
8a95ab85 | 595 | if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) |
596 | { | |
597 | if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode)) | |
598 | abort (); | |
fa56dc1d | 599 | |
8a95ab85 | 600 | if (VECTOR_MODE_P (to_mode)) |
1c0d4c2c | 601 | from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); |
8a95ab85 | 602 | else |
1c0d4c2c | 603 | to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0); |
8a95ab85 | 604 | |
605 | emit_move_insn (to, from); | |
606 | return; | |
607 | } | |
608 | ||
609 | if (to_real != from_real) | |
610 | abort (); | |
611 | ||
10f307d9 | 612 | if (to_real) |
613 | { | |
542baf17 | 614 | rtx value, insns; |
ece3ba9a | 615 | |
dd8a4c60 | 616 | if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)) |
8e9a8d6d | 617 | { |
dd8a4c60 | 618 | /* Try converting directly if the insn is supported. */ |
619 | if ((code = can_extend_p (to_mode, from_mode, 0)) | |
620 | != CODE_FOR_nothing) | |
621 | { | |
622 | emit_unop_insn (code, to, from, UNKNOWN); | |
623 | return; | |
624 | } | |
10f307d9 | 625 | } |
fa56dc1d | 626 | |
8e9a8d6d | 627 | #ifdef HAVE_trunchfqf2 |
628 | if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode) | |
629 | { | |
630 | emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN); | |
631 | return; | |
632 | } | |
633 | #endif | |
de253fa3 | 634 | #ifdef HAVE_trunctqfqf2 |
635 | if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode) | |
636 | { | |
637 | emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN); | |
638 | return; | |
639 | } | |
640 | #endif | |
8e9a8d6d | 641 | #ifdef HAVE_truncsfqf2 |
642 | if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode) | |
643 | { | |
644 | emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN); | |
645 | return; | |
646 | } | |
647 | #endif | |
648 | #ifdef HAVE_truncdfqf2 | |
649 | if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode) | |
650 | { | |
651 | emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN); | |
652 | return; | |
653 | } | |
654 | #endif | |
655 | #ifdef HAVE_truncxfqf2 | |
656 | if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode) | |
657 | { | |
658 | emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN); | |
659 | return; | |
660 | } | |
661 | #endif | |
662 | #ifdef HAVE_trunctfqf2 | |
663 | if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode) | |
664 | { | |
665 | emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN); | |
666 | return; | |
667 | } | |
668 | #endif | |
10d75470 | 669 | |
670 | #ifdef HAVE_trunctqfhf2 | |
671 | if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode) | |
672 | { | |
673 | emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN); | |
674 | return; | |
675 | } | |
676 | #endif | |
8e9a8d6d | 677 | #ifdef HAVE_truncsfhf2 |
678 | if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode) | |
679 | { | |
680 | emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN); | |
681 | return; | |
682 | } | |
683 | #endif | |
684 | #ifdef HAVE_truncdfhf2 | |
685 | if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode) | |
686 | { | |
687 | emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN); | |
688 | return; | |
689 | } | |
690 | #endif | |
691 | #ifdef HAVE_truncxfhf2 | |
692 | if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode) | |
693 | { | |
694 | emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN); | |
695 | return; | |
696 | } | |
697 | #endif | |
698 | #ifdef HAVE_trunctfhf2 | |
699 | if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode) | |
700 | { | |
701 | emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN); | |
702 | return; | |
703 | } | |
704 | #endif | |
dd8a4c60 | 705 | |
706 | #ifdef HAVE_truncsftqf2 | |
707 | if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode) | |
708 | { | |
709 | emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN); | |
710 | return; | |
711 | } | |
712 | #endif | |
713 | #ifdef HAVE_truncdftqf2 | |
714 | if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode) | |
715 | { | |
716 | emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN); | |
717 | return; | |
718 | } | |
719 | #endif | |
720 | #ifdef HAVE_truncxftqf2 | |
721 | if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode) | |
722 | { | |
723 | emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN); | |
724 | return; | |
725 | } | |
726 | #endif | |
727 | #ifdef HAVE_trunctftqf2 | |
728 | if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode) | |
729 | { | |
730 | emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN); | |
731 | return; | |
732 | } | |
733 | #endif | |
734 | ||
10f307d9 | 735 | #ifdef HAVE_truncdfsf2 |
736 | if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode) | |
737 | { | |
738 | emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN); | |
739 | return; | |
740 | } | |
741 | #endif | |
c2a91a88 | 742 | #ifdef HAVE_truncxfsf2 |
743 | if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode) | |
744 | { | |
745 | emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN); | |
746 | return; | |
747 | } | |
748 | #endif | |
10f307d9 | 749 | #ifdef HAVE_trunctfsf2 |
750 | if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode) | |
751 | { | |
752 | emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN); | |
753 | return; | |
754 | } | |
755 | #endif | |
c2a91a88 | 756 | #ifdef HAVE_truncxfdf2 |
757 | if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode) | |
758 | { | |
759 | emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN); | |
760 | return; | |
761 | } | |
762 | #endif | |
10f307d9 | 763 | #ifdef HAVE_trunctfdf2 |
764 | if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode) | |
765 | { | |
766 | emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN); | |
767 | return; | |
768 | } | |
769 | #endif | |
770 | ||
c2a91a88 | 771 | libcall = (rtx) 0; |
772 | switch (from_mode) | |
773 | { | |
774 | case SFmode: | |
775 | switch (to_mode) | |
776 | { | |
777 | case DFmode: | |
778 | libcall = extendsfdf2_libfunc; | |
779 | break; | |
780 | ||
781 | case XFmode: | |
782 | libcall = extendsfxf2_libfunc; | |
783 | break; | |
784 | ||
785 | case TFmode: | |
786 | libcall = extendsftf2_libfunc; | |
787 | break; | |
fa56dc1d | 788 | |
0dbd1c74 | 789 | default: |
790 | break; | |
c2a91a88 | 791 | } |
792 | break; | |
793 | ||
794 | case DFmode: | |
795 | switch (to_mode) | |
796 | { | |
797 | case SFmode: | |
798 | libcall = truncdfsf2_libfunc; | |
799 | break; | |
800 | ||
801 | case XFmode: | |
802 | libcall = extenddfxf2_libfunc; | |
803 | break; | |
804 | ||
805 | case TFmode: | |
806 | libcall = extenddftf2_libfunc; | |
807 | break; | |
fa56dc1d | 808 | |
0dbd1c74 | 809 | default: |
810 | break; | |
c2a91a88 | 811 | } |
812 | break; | |
813 | ||
814 | case XFmode: | |
815 | switch (to_mode) | |
816 | { | |
817 | case SFmode: | |
818 | libcall = truncxfsf2_libfunc; | |
819 | break; | |
820 | ||
821 | case DFmode: | |
822 | libcall = truncxfdf2_libfunc; | |
823 | break; | |
fa56dc1d | 824 | |
0dbd1c74 | 825 | default: |
826 | break; | |
c2a91a88 | 827 | } |
828 | break; | |
829 | ||
830 | case TFmode: | |
831 | switch (to_mode) | |
832 | { | |
833 | case SFmode: | |
834 | libcall = trunctfsf2_libfunc; | |
835 | break; | |
836 | ||
837 | case DFmode: | |
838 | libcall = trunctfdf2_libfunc; | |
839 | break; | |
fa56dc1d | 840 | |
0dbd1c74 | 841 | default: |
842 | break; | |
c2a91a88 | 843 | } |
844 | break; | |
fa56dc1d | 845 | |
0dbd1c74 | 846 | default: |
847 | break; | |
c2a91a88 | 848 | } |
849 | ||
850 | if (libcall == (rtx) 0) | |
851 | /* This conversion is not implemented yet. */ | |
10f307d9 | 852 | abort (); |
853 | ||
542baf17 | 854 | start_sequence (); |
2c5d421b | 855 | value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, |
ece3ba9a | 856 | 1, from, from_mode); |
542baf17 | 857 | insns = get_insns (); |
858 | end_sequence (); | |
859 | emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode, | |
860 | from)); | |
10f307d9 | 861 | return; |
862 | } | |
863 | ||
864 | /* Now both modes are integers. */ | |
865 | ||
866 | /* Handle expanding beyond a word. */ | |
867 | if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode) | |
868 | && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD) | |
869 | { | |
870 | rtx insns; | |
871 | rtx lowpart; | |
872 | rtx fill_value; | |
873 | rtx lowfrom; | |
874 | int i; | |
875 | enum machine_mode lowpart_mode; | |
876 | int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD); | |
877 | ||
878 | /* Try converting directly if the insn is supported. */ | |
879 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
880 | != CODE_FOR_nothing) | |
881 | { | |
6a0b5011 | 882 | /* If FROM is a SUBREG, put it into a register. Do this |
883 | so that we always generate the same set of insns for | |
884 | better cse'ing; if an intermediate assignment occurred, | |
885 | we won't be doing the operation directly on the SUBREG. */ | |
886 | if (optimize > 0 && GET_CODE (from) == SUBREG) | |
887 | from = force_reg (from_mode, from); | |
10f307d9 | 888 | emit_unop_insn (code, to, from, equiv_code); |
889 | return; | |
890 | } | |
891 | /* Next, try converting via full word. */ | |
892 | else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD | |
893 | && ((code = can_extend_p (to_mode, word_mode, unsignedp)) | |
894 | != CODE_FOR_nothing)) | |
895 | { | |
676a4833 | 896 | if (GET_CODE (to) == REG) |
941522d6 | 897 | emit_insn (gen_rtx_CLOBBER (VOIDmode, to)); |
10f307d9 | 898 | convert_move (gen_lowpart (word_mode, to), from, unsignedp); |
899 | emit_unop_insn (code, to, | |
900 | gen_lowpart (word_mode, to), equiv_code); | |
901 | return; | |
902 | } | |
903 | ||
904 | /* No special multiword conversion insn; do it by hand. */ | |
905 | start_sequence (); | |
906 | ||
ab72e117 | 907 | /* Since we will turn this into a no conflict block, we must ensure |
908 | that the source does not overlap the target. */ | |
909 | ||
910 | if (reg_overlap_mentioned_p (to, from)) | |
911 | from = force_reg (from_mode, from); | |
912 | ||
10f307d9 | 913 | /* Get a copy of FROM widened to a word, if necessary. */ |
914 | if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD) | |
915 | lowpart_mode = word_mode; | |
916 | else | |
917 | lowpart_mode = from_mode; | |
918 | ||
919 | lowfrom = convert_to_mode (lowpart_mode, from, unsignedp); | |
920 | ||
921 | lowpart = gen_lowpart (lowpart_mode, to); | |
922 | emit_move_insn (lowpart, lowfrom); | |
923 | ||
924 | /* Compute the value to put in each remaining word. */ | |
925 | if (unsignedp) | |
926 | fill_value = const0_rtx; | |
927 | else | |
928 | { | |
929 | #ifdef HAVE_slt | |
930 | if (HAVE_slt | |
6357eaae | 931 | && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode |
10f307d9 | 932 | && STORE_FLAG_VALUE == -1) |
933 | { | |
b572011e | 934 | emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX, |
2b96c5f6 | 935 | lowpart_mode, 0); |
10f307d9 | 936 | fill_value = gen_reg_rtx (word_mode); |
937 | emit_insn (gen_slt (fill_value)); | |
938 | } | |
939 | else | |
940 | #endif | |
941 | { | |
942 | fill_value | |
943 | = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom, | |
944 | size_int (GET_MODE_BITSIZE (lowpart_mode) - 1), | |
b572011e | 945 | NULL_RTX, 0); |
10f307d9 | 946 | fill_value = convert_to_mode (word_mode, fill_value, 1); |
947 | } | |
948 | } | |
949 | ||
950 | /* Fill the remaining words. */ | |
951 | for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++) | |
952 | { | |
953 | int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i); | |
954 | rtx subword = operand_subword (to, index, 1, to_mode); | |
955 | ||
956 | if (subword == 0) | |
957 | abort (); | |
958 | ||
959 | if (fill_value != subword) | |
960 | emit_move_insn (subword, fill_value); | |
961 | } | |
962 | ||
963 | insns = get_insns (); | |
964 | end_sequence (); | |
965 | ||
b572011e | 966 | emit_no_conflict_block (insns, to, from, NULL_RTX, |
941522d6 | 967 | gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from))); |
10f307d9 | 968 | return; |
969 | } | |
970 | ||
5602c36d | 971 | /* Truncating multi-word to a word or less. */ |
972 | if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD | |
973 | && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD) | |
10f307d9 | 974 | { |
d5601bb1 | 975 | if (!((GET_CODE (from) == MEM |
976 | && ! MEM_VOLATILE_P (from) | |
977 | && direct_load[(int) to_mode] | |
978 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
979 | || GET_CODE (from) == REG | |
980 | || GET_CODE (from) == SUBREG)) | |
981 | from = force_reg (from_mode, from); | |
10f307d9 | 982 | convert_move (to, gen_lowpart (word_mode, from), 0); |
983 | return; | |
984 | } | |
985 | ||
fa56dc1d | 986 | /* Handle pointer conversion. */ /* SPEE 900220. */ |
997d68fe | 987 | if (to_mode == PQImode) |
988 | { | |
989 | if (from_mode != QImode) | |
990 | from = convert_to_mode (QImode, from, unsignedp); | |
991 | ||
992 | #ifdef HAVE_truncqipqi2 | |
993 | if (HAVE_truncqipqi2) | |
994 | { | |
995 | emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN); | |
996 | return; | |
997 | } | |
998 | #endif /* HAVE_truncqipqi2 */ | |
999 | abort (); | |
1000 | } | |
1001 | ||
1002 | if (from_mode == PQImode) | |
1003 | { | |
1004 | if (to_mode != QImode) | |
1005 | { | |
1006 | from = convert_to_mode (QImode, from, unsignedp); | |
1007 | from_mode = QImode; | |
1008 | } | |
1009 | else | |
1010 | { | |
1011 | #ifdef HAVE_extendpqiqi2 | |
1012 | if (HAVE_extendpqiqi2) | |
1013 | { | |
1014 | emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN); | |
1015 | return; | |
1016 | } | |
1017 | #endif /* HAVE_extendpqiqi2 */ | |
1018 | abort (); | |
1019 | } | |
1020 | } | |
1021 | ||
10f307d9 | 1022 | if (to_mode == PSImode) |
1023 | { | |
1024 | if (from_mode != SImode) | |
1025 | from = convert_to_mode (SImode, from, unsignedp); | |
1026 | ||
8b8ce612 | 1027 | #ifdef HAVE_truncsipsi2 |
1028 | if (HAVE_truncsipsi2) | |
10f307d9 | 1029 | { |
8b8ce612 | 1030 | emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN); |
10f307d9 | 1031 | return; |
1032 | } | |
8b8ce612 | 1033 | #endif /* HAVE_truncsipsi2 */ |
10f307d9 | 1034 | abort (); |
1035 | } | |
1036 | ||
1037 | if (from_mode == PSImode) | |
1038 | { | |
1039 | if (to_mode != SImode) | |
1040 | { | |
1041 | from = convert_to_mode (SImode, from, unsignedp); | |
1042 | from_mode = SImode; | |
1043 | } | |
1044 | else | |
1045 | { | |
8b8ce612 | 1046 | #ifdef HAVE_extendpsisi2 |
12882c87 | 1047 | if (! unsignedp && HAVE_extendpsisi2) |
10f307d9 | 1048 | { |
8b8ce612 | 1049 | emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN); |
10f307d9 | 1050 | return; |
1051 | } | |
8b8ce612 | 1052 | #endif /* HAVE_extendpsisi2 */ |
12882c87 | 1053 | #ifdef HAVE_zero_extendpsisi2 |
1054 | if (unsignedp && HAVE_zero_extendpsisi2) | |
1055 | { | |
1056 | emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN); | |
1057 | return; | |
1058 | } | |
1059 | #endif /* HAVE_zero_extendpsisi2 */ | |
10f307d9 | 1060 | abort (); |
1061 | } | |
1062 | } | |
1063 | ||
9b0c8597 | 1064 | if (to_mode == PDImode) |
1065 | { | |
1066 | if (from_mode != DImode) | |
1067 | from = convert_to_mode (DImode, from, unsignedp); | |
1068 | ||
1069 | #ifdef HAVE_truncdipdi2 | |
1070 | if (HAVE_truncdipdi2) | |
1071 | { | |
1072 | emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN); | |
1073 | return; | |
1074 | } | |
1075 | #endif /* HAVE_truncdipdi2 */ | |
1076 | abort (); | |
1077 | } | |
1078 | ||
1079 | if (from_mode == PDImode) | |
1080 | { | |
1081 | if (to_mode != DImode) | |
1082 | { | |
1083 | from = convert_to_mode (DImode, from, unsignedp); | |
1084 | from_mode = DImode; | |
1085 | } | |
1086 | else | |
1087 | { | |
1088 | #ifdef HAVE_extendpdidi2 | |
1089 | if (HAVE_extendpdidi2) | |
1090 | { | |
1091 | emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN); | |
1092 | return; | |
1093 | } | |
1094 | #endif /* HAVE_extendpdidi2 */ | |
1095 | abort (); | |
1096 | } | |
1097 | } | |
1098 | ||
10f307d9 | 1099 | /* Now follow all the conversions between integers |
1100 | no more than a word long. */ | |
1101 | ||
1102 | /* For truncation, usually we can just refer to FROM in a narrower mode. */ | |
1103 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode) | |
1104 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), | |
5602c36d | 1105 | GET_MODE_BITSIZE (from_mode))) |
10f307d9 | 1106 | { |
5602c36d | 1107 | if (!((GET_CODE (from) == MEM |
1108 | && ! MEM_VOLATILE_P (from) | |
1109 | && direct_load[(int) to_mode] | |
1110 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
1111 | || GET_CODE (from) == REG | |
1112 | || GET_CODE (from) == SUBREG)) | |
1113 | from = force_reg (from_mode, from); | |
7de79a05 | 1114 | if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER |
1115 | && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode)) | |
1116 | from = copy_to_reg (from); | |
10f307d9 | 1117 | emit_move_insn (to, gen_lowpart (to_mode, from)); |
1118 | return; | |
1119 | } | |
1120 | ||
5602c36d | 1121 | /* Handle extension. */ |
10f307d9 | 1122 | if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode)) |
1123 | { | |
1124 | /* Convert directly if that works. */ | |
1125 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
1126 | != CODE_FOR_nothing) | |
1127 | { | |
d243ee41 | 1128 | if (flag_force_mem) |
1129 | from = force_not_mem (from); | |
1130 | ||
10f307d9 | 1131 | emit_unop_insn (code, to, from, equiv_code); |
1132 | return; | |
1133 | } | |
1134 | else | |
1135 | { | |
1136 | enum machine_mode intermediate; | |
851e6849 | 1137 | rtx tmp; |
1138 | tree shift_amount; | |
10f307d9 | 1139 | |
1140 | /* Search for a mode to convert via. */ | |
1141 | for (intermediate = from_mode; intermediate != VOIDmode; | |
1142 | intermediate = GET_MODE_WIDER_MODE (intermediate)) | |
0f22a35c | 1143 | if (((can_extend_p (to_mode, intermediate, unsignedp) |
1144 | != CODE_FOR_nothing) | |
1145 | || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate) | |
fc10bba7 | 1146 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), |
1147 | GET_MODE_BITSIZE (intermediate)))) | |
10f307d9 | 1148 | && (can_extend_p (intermediate, from_mode, unsignedp) |
1149 | != CODE_FOR_nothing)) | |
1150 | { | |
1151 | convert_move (to, convert_to_mode (intermediate, from, | |
1152 | unsignedp), unsignedp); | |
1153 | return; | |
1154 | } | |
1155 | ||
851e6849 | 1156 | /* No suitable intermediate mode. |
fa56dc1d | 1157 | Generate what we need with shifts. */ |
851e6849 | 1158 | shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode) |
1159 | - GET_MODE_BITSIZE (from_mode), 0); | |
1160 | from = gen_lowpart (to_mode, force_reg (from_mode, from)); | |
1161 | tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount, | |
1162 | to, unsignedp); | |
fa56dc1d | 1163 | tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount, |
851e6849 | 1164 | to, unsignedp); |
1165 | if (tmp != to) | |
1166 | emit_move_insn (to, tmp); | |
1167 | return; | |
10f307d9 | 1168 | } |
1169 | } | |
1170 | ||
fa56dc1d | 1171 | /* Support special truncate insns for certain modes. */ |
10f307d9 | 1172 | |
1173 | if (from_mode == DImode && to_mode == SImode) | |
1174 | { | |
1175 | #ifdef HAVE_truncdisi2 | |
1176 | if (HAVE_truncdisi2) | |
1177 | { | |
1178 | emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN); | |
1179 | return; | |
1180 | } | |
1181 | #endif | |
1182 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1183 | return; | |
1184 | } | |
1185 | ||
1186 | if (from_mode == DImode && to_mode == HImode) | |
1187 | { | |
1188 | #ifdef HAVE_truncdihi2 | |
1189 | if (HAVE_truncdihi2) | |
1190 | { | |
1191 | emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN); | |
1192 | return; | |
1193 | } | |
1194 | #endif | |
1195 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1196 | return; | |
1197 | } | |
1198 | ||
1199 | if (from_mode == DImode && to_mode == QImode) | |
1200 | { | |
1201 | #ifdef HAVE_truncdiqi2 | |
1202 | if (HAVE_truncdiqi2) | |
1203 | { | |
1204 | emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN); | |
1205 | return; | |
1206 | } | |
1207 | #endif | |
1208 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1209 | return; | |
1210 | } | |
1211 | ||
1212 | if (from_mode == SImode && to_mode == HImode) | |
1213 | { | |
1214 | #ifdef HAVE_truncsihi2 | |
1215 | if (HAVE_truncsihi2) | |
1216 | { | |
1217 | emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN); | |
1218 | return; | |
1219 | } | |
1220 | #endif | |
1221 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1222 | return; | |
1223 | } | |
1224 | ||
1225 | if (from_mode == SImode && to_mode == QImode) | |
1226 | { | |
1227 | #ifdef HAVE_truncsiqi2 | |
1228 | if (HAVE_truncsiqi2) | |
1229 | { | |
1230 | emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN); | |
1231 | return; | |
1232 | } | |
1233 | #endif | |
1234 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1235 | return; | |
1236 | } | |
1237 | ||
1238 | if (from_mode == HImode && to_mode == QImode) | |
1239 | { | |
1240 | #ifdef HAVE_trunchiqi2 | |
1241 | if (HAVE_trunchiqi2) | |
1242 | { | |
1243 | emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN); | |
1244 | return; | |
1245 | } | |
1246 | #endif | |
1247 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1248 | return; | |
1249 | } | |
1250 | ||
cd0fdd24 | 1251 | if (from_mode == TImode && to_mode == DImode) |
1252 | { | |
1253 | #ifdef HAVE_trunctidi2 | |
1254 | if (HAVE_trunctidi2) | |
1255 | { | |
1256 | emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN); | |
1257 | return; | |
1258 | } | |
1259 | #endif | |
1260 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1261 | return; | |
1262 | } | |
1263 | ||
1264 | if (from_mode == TImode && to_mode == SImode) | |
1265 | { | |
1266 | #ifdef HAVE_trunctisi2 | |
1267 | if (HAVE_trunctisi2) | |
1268 | { | |
1269 | emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN); | |
1270 | return; | |
1271 | } | |
1272 | #endif | |
1273 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1274 | return; | |
1275 | } | |
1276 | ||
1277 | if (from_mode == TImode && to_mode == HImode) | |
1278 | { | |
1279 | #ifdef HAVE_trunctihi2 | |
1280 | if (HAVE_trunctihi2) | |
1281 | { | |
1282 | emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN); | |
1283 | return; | |
1284 | } | |
1285 | #endif | |
1286 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1287 | return; | |
1288 | } | |
1289 | ||
1290 | if (from_mode == TImode && to_mode == QImode) | |
1291 | { | |
1292 | #ifdef HAVE_trunctiqi2 | |
1293 | if (HAVE_trunctiqi2) | |
1294 | { | |
1295 | emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN); | |
1296 | return; | |
1297 | } | |
1298 | #endif | |
1299 | convert_move (to, force_reg (from_mode, from), unsignedp); | |
1300 | return; | |
1301 | } | |
1302 | ||
10f307d9 | 1303 | /* Handle truncation of volatile memrefs, and so on; |
1304 | the things that couldn't be truncated directly, | |
1305 | and for which there was no special instruction. */ | |
1306 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)) | |
1307 | { | |
1308 | rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from)); | |
1309 | emit_move_insn (to, temp); | |
1310 | return; | |
1311 | } | |
1312 | ||
1313 | /* Mode combination is not recognized. */ | |
1314 | abort (); | |
1315 | } | |
1316 | ||
1317 | /* Return an rtx for a value that would result | |
1318 | from converting X to mode MODE. | |
1319 | Both X and MODE may be floating, or both integer. | |
1320 | UNSIGNEDP is nonzero if X is an unsigned value. | |
1321 | This can be done by referring to a part of X in place | |
2e906c1b | 1322 | or by copying to a new temporary with conversion. |
1323 | ||
1324 | This function *must not* call protect_from_queue | |
1325 | except when putting X into an insn (in which case convert_move does it). */ | |
10f307d9 | 1326 | |
1327 | rtx | |
1328 | convert_to_mode (mode, x, unsignedp) | |
1329 | enum machine_mode mode; | |
1330 | rtx x; | |
1331 | int unsignedp; | |
a63e1c46 | 1332 | { |
1333 | return convert_modes (mode, VOIDmode, x, unsignedp); | |
1334 | } | |
1335 | ||
1336 | /* Return an rtx for a value that would result | |
1337 | from converting X from mode OLDMODE to mode MODE. | |
1338 | Both modes may be floating, or both integer. | |
1339 | UNSIGNEDP is nonzero if X is an unsigned value. | |
1340 | ||
1341 | This can be done by referring to a part of X in place | |
1342 | or by copying to a new temporary with conversion. | |
1343 | ||
1344 | You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. | |
1345 | ||
1346 | This function *must not* call protect_from_queue | |
1347 | except when putting X into an insn (in which case convert_move does it). */ | |
1348 | ||
1349 | rtx | |
1350 | convert_modes (mode, oldmode, x, unsignedp) | |
1351 | enum machine_mode mode, oldmode; | |
1352 | rtx x; | |
1353 | int unsignedp; | |
10f307d9 | 1354 | { |
19cb6b50 | 1355 | rtx temp; |
a63e1c46 | 1356 | |
acfb31e5 | 1357 | /* If FROM is a SUBREG that indicates that we have already done at least |
1358 | the required extension, strip it. */ | |
1359 | ||
1360 | if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) | |
1361 | && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode) | |
1362 | && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp) | |
1363 | x = gen_lowpart (mode, x); | |
10f307d9 | 1364 | |
e4d9bbd7 | 1365 | if (GET_MODE (x) != VOIDmode) |
1366 | oldmode = GET_MODE (x); | |
fa56dc1d | 1367 | |
a63e1c46 | 1368 | if (mode == oldmode) |
10f307d9 | 1369 | return x; |
1370 | ||
1371 | /* There is one case that we must handle specially: If we are converting | |
b572011e | 1372 | a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and |
10f307d9 | 1373 | we are to interpret the constant as unsigned, gen_lowpart will do |
1374 | the wrong if the constant appears negative. What we want to do is | |
1375 | make the high-order word of the constant zero, not all ones. */ | |
1376 | ||
1377 | if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT | |
b572011e | 1378 | && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT |
10f307d9 | 1379 | && GET_CODE (x) == CONST_INT && INTVAL (x) < 0) |
e92ac421 | 1380 | { |
1381 | HOST_WIDE_INT val = INTVAL (x); | |
1382 | ||
1383 | if (oldmode != VOIDmode | |
1384 | && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode)) | |
1385 | { | |
1386 | int width = GET_MODE_BITSIZE (oldmode); | |
1387 | ||
1388 | /* We need to zero extend VAL. */ | |
1389 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
1390 | } | |
1391 | ||
1392 | return immed_double_const (val, (HOST_WIDE_INT) 0, mode); | |
1393 | } | |
10f307d9 | 1394 | |
1395 | /* We can do this with a gen_lowpart if both desired and current modes | |
1396 | are integer, and this is either a constant integer, a register, or a | |
d3177667 | 1397 | non-volatile MEM. Except for the constant case where MODE is no |
1398 | wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */ | |
10f307d9 | 1399 | |
d3177667 | 1400 | if ((GET_CODE (x) == CONST_INT |
1401 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
10f307d9 | 1402 | || (GET_MODE_CLASS (mode) == MODE_INT |
a63e1c46 | 1403 | && GET_MODE_CLASS (oldmode) == MODE_INT |
10f307d9 | 1404 | && (GET_CODE (x) == CONST_DOUBLE |
a63e1c46 | 1405 | || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode) |
e2f3d3c1 | 1406 | && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x) |
1407 | && direct_load[(int) mode]) | |
ec557174 | 1408 | || (GET_CODE (x) == REG |
1409 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), | |
1410 | GET_MODE_BITSIZE (GET_MODE (x))))))))) | |
d3177667 | 1411 | { |
1412 | /* ?? If we don't know OLDMODE, we have to assume here that | |
1413 | X does not need sign- or zero-extension. This may not be | |
1414 | the case, but it's the best we can do. */ | |
1415 | if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode | |
1416 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode)) | |
1417 | { | |
1418 | HOST_WIDE_INT val = INTVAL (x); | |
1419 | int width = GET_MODE_BITSIZE (oldmode); | |
1420 | ||
1421 | /* We must sign or zero-extend in this case. Start by | |
1422 | zero-extending, then sign extend if we need to. */ | |
1423 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
1424 | if (! unsignedp | |
1425 | && (val & ((HOST_WIDE_INT) 1 << (width - 1)))) | |
1426 | val |= (HOST_WIDE_INT) (-1) << width; | |
1427 | ||
2d232d05 | 1428 | return gen_int_mode (val, mode); |
d3177667 | 1429 | } |
1430 | ||
1431 | return gen_lowpart (mode, x); | |
1432 | } | |
10f307d9 | 1433 | |
1434 | temp = gen_reg_rtx (mode); | |
1435 | convert_move (temp, x, unsignedp); | |
1436 | return temp; | |
1437 | } | |
1438 | \f | |
53bd09ab | 1439 | /* This macro is used to determine what the largest unit size that |
fa56dc1d | 1440 | move_by_pieces can use is. */ |
53bd09ab | 1441 | |
1442 | /* MOVE_MAX_PIECES is the number of bytes at a time which we can | |
1443 | move efficiently, as opposed to MOVE_MAX which is the maximum | |
fa56dc1d | 1444 | number of bytes we can move with a single instruction. */ |
53bd09ab | 1445 | |
1446 | #ifndef MOVE_MAX_PIECES | |
1447 | #define MOVE_MAX_PIECES MOVE_MAX | |
1448 | #endif | |
1449 | ||
d1f6ae0c | 1450 | /* STORE_MAX_PIECES is the number of bytes at a time that we can |
1451 | store efficiently. Due to internal GCC limitations, this is | |
1452 | MOVE_MAX_PIECES limited by the number of bytes GCC can represent | |
1453 | for an immediate constant. */ | |
1454 | ||
1455 | #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT)) | |
1456 | ||
fad4a30c | 1457 | /* Generate several move instructions to copy LEN bytes from block FROM to |
1458 | block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM | |
1459 | and TO through protect_from_queue before calling. | |
ef7dc4b4 | 1460 | |
fad4a30c | 1461 | If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is |
1462 | used to push FROM to the stack. | |
ef7dc4b4 | 1463 | |
325d1c45 | 1464 | ALIGN is maximum alignment we can assume. */ |
10f307d9 | 1465 | |
9d7facc8 | 1466 | void |
10f307d9 | 1467 | move_by_pieces (to, from, len, align) |
1468 | rtx to, from; | |
f7c44134 | 1469 | unsigned HOST_WIDE_INT len; |
fe352cf1 | 1470 | unsigned int align; |
10f307d9 | 1471 | { |
1472 | struct move_by_pieces data; | |
ef7dc4b4 | 1473 | rtx to_addr, from_addr = XEXP (from, 0); |
02e7a332 | 1474 | unsigned int max_size = MOVE_MAX_PIECES + 1; |
53bd09ab | 1475 | enum machine_mode mode = VOIDmode, tmode; |
1476 | enum insn_code icode; | |
10f307d9 | 1477 | |
1478 | data.offset = 0; | |
10f307d9 | 1479 | data.from_addr = from_addr; |
ef7dc4b4 | 1480 | if (to) |
1481 | { | |
1482 | to_addr = XEXP (to, 0); | |
1483 | data.to = to; | |
1484 | data.autinc_to | |
1485 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC | |
1486 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
1487 | data.reverse | |
1488 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); | |
1489 | } | |
1490 | else | |
1491 | { | |
1492 | to_addr = NULL_RTX; | |
1493 | data.to = NULL_RTX; | |
1494 | data.autinc_to = 1; | |
1495 | #ifdef STACK_GROWS_DOWNWARD | |
1496 | data.reverse = 1; | |
1497 | #else | |
1498 | data.reverse = 0; | |
1499 | #endif | |
1500 | } | |
1501 | data.to_addr = to_addr; | |
10f307d9 | 1502 | data.from = from; |
10f307d9 | 1503 | data.autinc_from |
1504 | = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC | |
1505 | || GET_CODE (from_addr) == POST_INC | |
1506 | || GET_CODE (from_addr) == POST_DEC); | |
1507 | ||
1508 | data.explicit_inc_from = 0; | |
1509 | data.explicit_inc_to = 0; | |
10f307d9 | 1510 | if (data.reverse) data.offset = len; |
1511 | data.len = len; | |
1512 | ||
1513 | /* If copying requires more than two move insns, | |
1514 | copy addresses to registers (to make displacements shorter) | |
1515 | and use post-increment if available. */ | |
1516 | if (!(data.autinc_from && data.autinc_to) | |
1517 | && move_by_pieces_ninsns (len, align) > 2) | |
1518 | { | |
fa56dc1d | 1519 | /* Find the mode of the largest move... */ |
53bd09ab | 1520 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1521 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1522 | if (GET_MODE_SIZE (tmode) < max_size) | |
1523 | mode = tmode; | |
1524 | ||
1525 | if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) | |
10f307d9 | 1526 | { |
1527 | data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len)); | |
1528 | data.autinc_from = 1; | |
1529 | data.explicit_inc_from = -1; | |
1530 | } | |
53bd09ab | 1531 | if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from) |
10f307d9 | 1532 | { |
1533 | data.from_addr = copy_addr_to_reg (from_addr); | |
1534 | data.autinc_from = 1; | |
1535 | data.explicit_inc_from = 1; | |
1536 | } | |
10f307d9 | 1537 | if (!data.autinc_from && CONSTANT_P (from_addr)) |
1538 | data.from_addr = copy_addr_to_reg (from_addr); | |
53bd09ab | 1539 | if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) |
10f307d9 | 1540 | { |
1541 | data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len)); | |
1542 | data.autinc_to = 1; | |
1543 | data.explicit_inc_to = -1; | |
1544 | } | |
53bd09ab | 1545 | if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to) |
10f307d9 | 1546 | { |
1547 | data.to_addr = copy_addr_to_reg (to_addr); | |
1548 | data.autinc_to = 1; | |
1549 | data.explicit_inc_to = 1; | |
1550 | } | |
10f307d9 | 1551 | if (!data.autinc_to && CONSTANT_P (to_addr)) |
1552 | data.to_addr = copy_addr_to_reg (to_addr); | |
1553 | } | |
1554 | ||
9439ebf7 | 1555 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
325d1c45 | 1556 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
1557 | align = MOVE_MAX * BITS_PER_UNIT; | |
10f307d9 | 1558 | |
1559 | /* First move what we can in the largest integer mode, then go to | |
1560 | successively smaller modes. */ | |
1561 | ||
1562 | while (max_size > 1) | |
1563 | { | |
01ab6370 | 1564 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1565 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1566 | if (GET_MODE_SIZE (tmode) < max_size) | |
10f307d9 | 1567 | mode = tmode; |
1568 | ||
1569 | if (mode == VOIDmode) | |
1570 | break; | |
1571 | ||
1572 | icode = mov_optab->handlers[(int) mode].insn_code; | |
325d1c45 | 1573 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
10f307d9 | 1574 | move_by_pieces_1 (GEN_FCN (icode), mode, &data); |
1575 | ||
1576 | max_size = GET_MODE_SIZE (mode); | |
1577 | } | |
1578 | ||
1579 | /* The code above should have handled everything. */ | |
f9675788 | 1580 | if (data.len > 0) |
10f307d9 | 1581 | abort (); |
1582 | } | |
1583 | ||
1584 | /* Return number of insns required to move L bytes by pieces. | |
decd7a45 | 1585 | ALIGN (in bits) is maximum alignment we can assume. */ |
10f307d9 | 1586 | |
f7c44134 | 1587 | static unsigned HOST_WIDE_INT |
10f307d9 | 1588 | move_by_pieces_ninsns (l, align) |
f7c44134 | 1589 | unsigned HOST_WIDE_INT l; |
fe352cf1 | 1590 | unsigned int align; |
10f307d9 | 1591 | { |
f7c44134 | 1592 | unsigned HOST_WIDE_INT n_insns = 0; |
1593 | unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1; | |
10f307d9 | 1594 | |
9439ebf7 | 1595 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
325d1c45 | 1596 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
a0eeb1e3 | 1597 | align = MOVE_MAX * BITS_PER_UNIT; |
10f307d9 | 1598 | |
1599 | while (max_size > 1) | |
1600 | { | |
1601 | enum machine_mode mode = VOIDmode, tmode; | |
1602 | enum insn_code icode; | |
1603 | ||
01ab6370 | 1604 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1605 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1606 | if (GET_MODE_SIZE (tmode) < max_size) | |
10f307d9 | 1607 | mode = tmode; |
1608 | ||
1609 | if (mode == VOIDmode) | |
1610 | break; | |
1611 | ||
1612 | icode = mov_optab->handlers[(int) mode].insn_code; | |
325d1c45 | 1613 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
10f307d9 | 1614 | n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode); |
1615 | ||
1616 | max_size = GET_MODE_SIZE (mode); | |
1617 | } | |
1618 | ||
f9b86811 | 1619 | if (l) |
1620 | abort (); | |
10f307d9 | 1621 | return n_insns; |
1622 | } | |
1623 | ||
1624 | /* Subroutine of move_by_pieces. Move as many bytes as appropriate | |
1625 | with move instructions for mode MODE. GENFUN is the gen_... function | |
1626 | to make a move insn for that mode. DATA has all the other info. */ | |
1627 | ||
1628 | static void | |
1629 | move_by_pieces_1 (genfun, mode, data) | |
621f6678 | 1630 | rtx (*genfun) PARAMS ((rtx, ...)); |
10f307d9 | 1631 | enum machine_mode mode; |
1632 | struct move_by_pieces *data; | |
1633 | { | |
f7c44134 | 1634 | unsigned int size = GET_MODE_SIZE (mode); |
97b330ca | 1635 | rtx to1 = NULL_RTX, from1; |
10f307d9 | 1636 | |
1637 | while (data->len >= size) | |
1638 | { | |
f7c44134 | 1639 | if (data->reverse) |
1640 | data->offset -= size; | |
1641 | ||
ef7dc4b4 | 1642 | if (data->to) |
f7c44134 | 1643 | { |
ef7dc4b4 | 1644 | if (data->autinc_to) |
bf42c62d | 1645 | to1 = adjust_automodify_address (data->to, mode, data->to_addr, |
1646 | data->offset); | |
ef7dc4b4 | 1647 | else |
e513d163 | 1648 | to1 = adjust_address (data->to, mode, data->offset); |
f7c44134 | 1649 | } |
f7c44134 | 1650 | |
1651 | if (data->autinc_from) | |
bf42c62d | 1652 | from1 = adjust_automodify_address (data->from, mode, data->from_addr, |
1653 | data->offset); | |
f7c44134 | 1654 | else |
e513d163 | 1655 | from1 = adjust_address (data->from, mode, data->offset); |
10f307d9 | 1656 | |
e4e498cf | 1657 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
e1855348 | 1658 | emit_insn (gen_add2_insn (data->to_addr, |
1659 | GEN_INT (-(HOST_WIDE_INT)size))); | |
e4e498cf | 1660 | if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) |
e1855348 | 1661 | emit_insn (gen_add2_insn (data->from_addr, |
1662 | GEN_INT (-(HOST_WIDE_INT)size))); | |
10f307d9 | 1663 | |
ef7dc4b4 | 1664 | if (data->to) |
1665 | emit_insn ((*genfun) (to1, from1)); | |
1666 | else | |
fad4a30c | 1667 | { |
1668 | #ifdef PUSH_ROUNDING | |
1669 | emit_single_push_insn (mode, from1, NULL); | |
1670 | #else | |
1671 | abort (); | |
1672 | #endif | |
1673 | } | |
f7c44134 | 1674 | |
e4e498cf | 1675 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
b572011e | 1676 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
e4e498cf | 1677 | if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0) |
b572011e | 1678 | emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size))); |
10f307d9 | 1679 | |
f7c44134 | 1680 | if (! data->reverse) |
1681 | data->offset += size; | |
10f307d9 | 1682 | |
1683 | data->len -= size; | |
1684 | } | |
1685 | } | |
1686 | \f | |
c0bfc78e | 1687 | /* Emit code to move a block Y to a block X. This may be done with |
1688 | string-move instructions, with multiple scalar move instructions, | |
1689 | or with a library call. | |
10f307d9 | 1690 | |
c0bfc78e | 1691 | Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode. |
10f307d9 | 1692 | SIZE is an rtx that says how long they are. |
325d1c45 | 1693 | ALIGN is the maximum alignment we can assume they have. |
0378dbdc | 1694 | METHOD describes what kind of copy this is, and what mechanisms may be used. |
10f307d9 | 1695 | |
0dbd1c74 | 1696 | Return the address of the new block, if memcpy is called and returns it, |
1697 | 0 otherwise. */ | |
1698 | ||
1699 | rtx | |
0378dbdc | 1700 | emit_block_move (x, y, size, method) |
c0bfc78e | 1701 | rtx x, y, size; |
0378dbdc | 1702 | enum block_op_methods method; |
10f307d9 | 1703 | { |
0378dbdc | 1704 | bool may_use_call; |
0dbd1c74 | 1705 | rtx retval = 0; |
0378dbdc | 1706 | unsigned int align; |
1707 | ||
1708 | switch (method) | |
1709 | { | |
1710 | case BLOCK_OP_NORMAL: | |
1711 | may_use_call = true; | |
1712 | break; | |
1713 | ||
1714 | case BLOCK_OP_CALL_PARM: | |
1715 | may_use_call = block_move_libcall_safe_for_call_parm (); | |
1716 | ||
1717 | /* Make inhibit_defer_pop nonzero around the library call | |
1718 | to force it to pop the arguments right away. */ | |
1719 | NO_DEFER_POP; | |
1720 | break; | |
1721 | ||
1722 | case BLOCK_OP_NO_LIBCALL: | |
1723 | may_use_call = false; | |
1724 | break; | |
1725 | ||
1726 | default: | |
1727 | abort (); | |
1728 | } | |
1729 | ||
1730 | align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); | |
0dbd1c74 | 1731 | |
10f307d9 | 1732 | if (GET_MODE (x) != BLKmode) |
1733 | abort (); | |
10f307d9 | 1734 | if (GET_MODE (y) != BLKmode) |
1735 | abort (); | |
1736 | ||
1737 | x = protect_from_queue (x, 1); | |
1738 | y = protect_from_queue (y, 0); | |
2e906c1b | 1739 | size = protect_from_queue (size, 0); |
10f307d9 | 1740 | |
1741 | if (GET_CODE (x) != MEM) | |
1742 | abort (); | |
1743 | if (GET_CODE (y) != MEM) | |
1744 | abort (); | |
1745 | if (size == 0) | |
1746 | abort (); | |
1747 | ||
e83ff88b | 1748 | /* Set MEM_SIZE as appropriate for this block copy. The main place this |
1749 | can be incorrect is coming from __builtin_memcpy. */ | |
1750 | if (GET_CODE (size) == CONST_INT) | |
1751 | { | |
1752 | x = shallow_copy_rtx (x); | |
1753 | y = shallow_copy_rtx (y); | |
1754 | set_mem_size (x, size); | |
1755 | set_mem_size (y, size); | |
1756 | } | |
1757 | ||
53bd09ab | 1758 | if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align)) |
10f307d9 | 1759 | move_by_pieces (x, y, INTVAL (size), align); |
c0bfc78e | 1760 | else if (emit_block_move_via_movstr (x, y, size, align)) |
1761 | ; | |
0378dbdc | 1762 | else if (may_use_call) |
c0bfc78e | 1763 | retval = emit_block_move_via_libcall (x, y, size); |
0378dbdc | 1764 | else |
1765 | emit_block_move_via_loop (x, y, size, align); | |
1766 | ||
1767 | if (method == BLOCK_OP_CALL_PARM) | |
1768 | OK_DEFER_POP; | |
a5fd5157 | 1769 | |
c0bfc78e | 1770 | return retval; |
1771 | } | |
a5fd5157 | 1772 | |
0378dbdc | 1773 | /* A subroutine of emit_block_move. Returns true if calling the |
1774 | block move libcall will not clobber any parameters which may have | |
1775 | already been placed on the stack. */ | |
1776 | ||
1777 | static bool | |
1778 | block_move_libcall_safe_for_call_parm () | |
1779 | { | |
1780 | if (PUSH_ARGS) | |
1781 | return true; | |
1782 | else | |
1783 | { | |
1784 | /* Check to see whether memcpy takes all register arguments. */ | |
1785 | static enum { | |
1786 | takes_regs_uninit, takes_regs_no, takes_regs_yes | |
1787 | } takes_regs = takes_regs_uninit; | |
1788 | ||
1789 | switch (takes_regs) | |
1790 | { | |
1791 | case takes_regs_uninit: | |
1792 | { | |
1793 | CUMULATIVE_ARGS args_so_far; | |
1794 | tree fn, arg; | |
1795 | ||
1796 | fn = emit_block_move_libcall_fn (false); | |
1797 | INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0); | |
1798 | ||
1799 | arg = TYPE_ARG_TYPES (TREE_TYPE (fn)); | |
1800 | for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg)) | |
1801 | { | |
719c3cf4 | 1802 | enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg)); |
0378dbdc | 1803 | rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1); |
1804 | if (!tmp || !REG_P (tmp)) | |
1805 | goto fail_takes_regs; | |
1806 | #ifdef FUNCTION_ARG_PARTIAL_NREGS | |
1807 | if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, | |
1808 | NULL_TREE, 1)) | |
1809 | goto fail_takes_regs; | |
1810 | #endif | |
1811 | FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1); | |
1812 | } | |
1813 | } | |
1814 | takes_regs = takes_regs_yes; | |
1815 | /* FALLTHRU */ | |
1816 | ||
1817 | case takes_regs_yes: | |
1818 | return true; | |
1819 | ||
1820 | fail_takes_regs: | |
1821 | takes_regs = takes_regs_no; | |
1822 | /* FALLTHRU */ | |
1823 | case takes_regs_no: | |
1824 | return false; | |
1825 | ||
1826 | default: | |
1827 | abort (); | |
1828 | } | |
1829 | } | |
1830 | } | |
1831 | ||
c0bfc78e | 1832 | /* A subroutine of emit_block_move. Expand a movstr pattern; |
1833 | return true if successful. */ | |
6702c250 | 1834 | |
c0bfc78e | 1835 | static bool |
1836 | emit_block_move_via_movstr (x, y, size, align) | |
1837 | rtx x, y, size; | |
1838 | unsigned int align; | |
1839 | { | |
1840 | /* Try the most limited insn first, because there's no point | |
1841 | including more than one in the machine description unless | |
1842 | the more limited one has some advantage. */ | |
a5fd5157 | 1843 | |
c0bfc78e | 1844 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); |
1845 | enum machine_mode mode; | |
a5fd5157 | 1846 | |
c0bfc78e | 1847 | /* Since this is a move insn, we don't care about volatility. */ |
1848 | volatile_ok = 1; | |
1849 | ||
1850 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; | |
1851 | mode = GET_MODE_WIDER_MODE (mode)) | |
1852 | { | |
1853 | enum insn_code code = movstr_optab[(int) mode]; | |
1854 | insn_operand_predicate_fn pred; | |
1855 | ||
1856 | if (code != CODE_FOR_nothing | |
1857 | /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT | |
1858 | here because if SIZE is less than the mode mask, as it is | |
1859 | returned by the macro, it will definitely be less than the | |
1860 | actual mode mask. */ | |
1861 | && ((GET_CODE (size) == CONST_INT | |
1862 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
1863 | <= (GET_MODE_MASK (mode) >> 1))) | |
1864 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) | |
1865 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 | |
1866 | || (*pred) (x, BLKmode)) | |
1867 | && ((pred = insn_data[(int) code].operand[1].predicate) == 0 | |
1868 | || (*pred) (y, BLKmode)) | |
1869 | && ((pred = insn_data[(int) code].operand[3].predicate) == 0 | |
1870 | || (*pred) (opalign, VOIDmode))) | |
1871 | { | |
1872 | rtx op2; | |
1873 | rtx last = get_last_insn (); | |
1874 | rtx pat; | |
1875 | ||
1876 | op2 = convert_to_mode (mode, size, 1); | |
1877 | pred = insn_data[(int) code].operand[2].predicate; | |
1878 | if (pred != 0 && ! (*pred) (op2, mode)) | |
1879 | op2 = copy_to_mode_reg (mode, op2); | |
1880 | ||
1881 | /* ??? When called via emit_block_move_for_call, it'd be | |
1882 | nice if there were some way to inform the backend, so | |
1883 | that it doesn't fail the expansion because it thinks | |
1884 | emitting the libcall would be more efficient. */ | |
1885 | ||
1886 | pat = GEN_FCN ((int) code) (x, y, op2, opalign); | |
1887 | if (pat) | |
1888 | { | |
1889 | emit_insn (pat); | |
1890 | volatile_ok = 0; | |
1891 | return true; | |
10f307d9 | 1892 | } |
c0bfc78e | 1893 | else |
1894 | delete_insns_since (last); | |
10f307d9 | 1895 | } |
c0bfc78e | 1896 | } |
10f307d9 | 1897 | |
c0bfc78e | 1898 | volatile_ok = 0; |
1899 | return false; | |
1900 | } | |
6702c250 | 1901 | |
c0bfc78e | 1902 | /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy. |
1903 | Return the return value from memcpy, 0 otherwise. */ | |
06b8e3db | 1904 | |
c0bfc78e | 1905 | static rtx |
1906 | emit_block_move_via_libcall (dst, src, size) | |
1907 | rtx dst, src, size; | |
1908 | { | |
1909 | tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree; | |
1910 | enum machine_mode size_mode; | |
1911 | rtx retval; | |
06b8e3db | 1912 | |
c0bfc78e | 1913 | /* DST, SRC, or SIZE may have been passed through protect_from_queue. |
06b8e3db | 1914 | |
c0bfc78e | 1915 | It is unsafe to save the value generated by protect_from_queue |
1916 | and reuse it later. Consider what happens if emit_queue is | |
1917 | called before the return value from protect_from_queue is used. | |
06b8e3db | 1918 | |
c0bfc78e | 1919 | Expansion of the CALL_EXPR below will call emit_queue before |
1920 | we are finished emitting RTL for argument setup. So if we are | |
1921 | not careful we could get the wrong value for an argument. | |
06b8e3db | 1922 | |
c0bfc78e | 1923 | To avoid this problem we go ahead and emit code to copy X, Y & |
1924 | SIZE into new pseudos. We can then place those new pseudos | |
1925 | into an RTL_EXPR and use them later, even after a call to | |
1926 | emit_queue. | |
06b8e3db | 1927 | |
c0bfc78e | 1928 | Note this is not strictly needed for library calls since they |
1929 | do not call emit_queue before loading their arguments. However, | |
1930 | we may need to have library calls call emit_queue in the future | |
1931 | since failing to do so could cause problems for targets which | |
1932 | define SMALL_REGISTER_CLASSES and pass arguments in registers. */ | |
1933 | ||
1934 | dst = copy_to_mode_reg (Pmode, XEXP (dst, 0)); | |
1935 | src = copy_to_mode_reg (Pmode, XEXP (src, 0)); | |
1936 | ||
1937 | if (TARGET_MEM_FUNCTIONS) | |
1938 | size_mode = TYPE_MODE (sizetype); | |
1939 | else | |
1940 | size_mode = TYPE_MODE (unsigned_type_node); | |
1941 | size = convert_to_mode (size_mode, size, 1); | |
1942 | size = copy_to_mode_reg (size_mode, size); | |
1943 | ||
1944 | /* It is incorrect to use the libcall calling conventions to call | |
1945 | memcpy in this context. This could be a user call to memcpy and | |
1946 | the user may wish to examine the return value from memcpy. For | |
1947 | targets where libcalls and normal calls have different conventions | |
1948 | for returning pointers, we could end up generating incorrect code. | |
1949 | ||
1950 | For convenience, we generate the call to bcopy this way as well. */ | |
1951 | ||
1952 | dst_tree = make_tree (ptr_type_node, dst); | |
1953 | src_tree = make_tree (ptr_type_node, src); | |
1954 | if (TARGET_MEM_FUNCTIONS) | |
1955 | size_tree = make_tree (sizetype, size); | |
1956 | else | |
1957 | size_tree = make_tree (unsigned_type_node, size); | |
1958 | ||
1959 | fn = emit_block_move_libcall_fn (true); | |
1960 | arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE); | |
1961 | if (TARGET_MEM_FUNCTIONS) | |
1962 | { | |
1963 | arg_list = tree_cons (NULL_TREE, src_tree, arg_list); | |
1964 | arg_list = tree_cons (NULL_TREE, dst_tree, arg_list); | |
1965 | } | |
1966 | else | |
1967 | { | |
1968 | arg_list = tree_cons (NULL_TREE, dst_tree, arg_list); | |
1969 | arg_list = tree_cons (NULL_TREE, src_tree, arg_list); | |
1970 | } | |
1971 | ||
1972 | /* Now we have to build up the CALL_EXPR itself. */ | |
1973 | call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); | |
1974 | call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), | |
1975 | call_expr, arg_list, NULL_TREE); | |
1976 | TREE_SIDE_EFFECTS (call_expr) = 1; | |
1977 | ||
1978 | retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); | |
1979 | ||
1980 | /* If we are initializing a readonly value, show the above call | |
1981 | clobbered it. Otherwise, a load from it may erroneously be | |
1982 | hoisted from a loop. */ | |
1983 | if (RTX_UNCHANGING_P (dst)) | |
1984 | emit_insn (gen_rtx_CLOBBER (VOIDmode, dst)); | |
1985 | ||
1986 | return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX); | |
1987 | } | |
f708f8fd | 1988 | |
c0bfc78e | 1989 | /* A subroutine of emit_block_move_via_libcall. Create the tree node |
1990 | for the function we use for block copies. The first time FOR_CALL | |
1991 | is true, we call assemble_external. */ | |
f708f8fd | 1992 | |
c0bfc78e | 1993 | static GTY(()) tree block_move_fn; |
1994 | ||
1995 | static tree | |
1996 | emit_block_move_libcall_fn (for_call) | |
1997 | int for_call; | |
1998 | { | |
1999 | static bool emitted_extern; | |
2000 | tree fn = block_move_fn, args; | |
f708f8fd | 2001 | |
c0bfc78e | 2002 | if (!fn) |
2003 | { | |
2004 | if (TARGET_MEM_FUNCTIONS) | |
f708f8fd | 2005 | { |
c0bfc78e | 2006 | fn = get_identifier ("memcpy"); |
2007 | args = build_function_type_list (ptr_type_node, ptr_type_node, | |
2008 | const_ptr_type_node, sizetype, | |
2009 | NULL_TREE); | |
2010 | } | |
2011 | else | |
2012 | { | |
2013 | fn = get_identifier ("bcopy"); | |
2014 | args = build_function_type_list (void_type_node, const_ptr_type_node, | |
2015 | ptr_type_node, unsigned_type_node, | |
2016 | NULL_TREE); | |
f708f8fd | 2017 | } |
2018 | ||
c0bfc78e | 2019 | fn = build_decl (FUNCTION_DECL, fn, args); |
2020 | DECL_EXTERNAL (fn) = 1; | |
2021 | TREE_PUBLIC (fn) = 1; | |
2022 | DECL_ARTIFICIAL (fn) = 1; | |
2023 | TREE_NOTHROW (fn) = 1; | |
8ca560c1 | 2024 | |
c0bfc78e | 2025 | block_move_fn = fn; |
10f307d9 | 2026 | } |
0dbd1c74 | 2027 | |
c0bfc78e | 2028 | if (for_call && !emitted_extern) |
2029 | { | |
2030 | emitted_extern = true; | |
2031 | make_decl_rtl (fn, NULL); | |
2032 | assemble_external (fn); | |
2033 | } | |
2034 | ||
2035 | return fn; | |
10f307d9 | 2036 | } |
0378dbdc | 2037 | |
2038 | /* A subroutine of emit_block_move. Copy the data via an explicit | |
2039 | loop. This is used only when libcalls are forbidden. */ | |
2040 | /* ??? It'd be nice to copy in hunks larger than QImode. */ | |
2041 | ||
2042 | static void | |
2043 | emit_block_move_via_loop (x, y, size, align) | |
2044 | rtx x, y, size; | |
2045 | unsigned int align ATTRIBUTE_UNUSED; | |
2046 | { | |
2047 | rtx cmp_label, top_label, iter, x_addr, y_addr, tmp; | |
2048 | enum machine_mode iter_mode; | |
2049 | ||
2050 | iter_mode = GET_MODE (size); | |
2051 | if (iter_mode == VOIDmode) | |
2052 | iter_mode = word_mode; | |
2053 | ||
2054 | top_label = gen_label_rtx (); | |
2055 | cmp_label = gen_label_rtx (); | |
2056 | iter = gen_reg_rtx (iter_mode); | |
2057 | ||
2058 | emit_move_insn (iter, const0_rtx); | |
2059 | ||
2060 | x_addr = force_operand (XEXP (x, 0), NULL_RTX); | |
2061 | y_addr = force_operand (XEXP (y, 0), NULL_RTX); | |
2062 | do_pending_stack_adjust (); | |
2063 | ||
2064 | emit_note (NULL, NOTE_INSN_LOOP_BEG); | |
2065 | ||
2066 | emit_jump (cmp_label); | |
2067 | emit_label (top_label); | |
2068 | ||
2069 | tmp = convert_modes (Pmode, iter_mode, iter, true); | |
2070 | x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp); | |
2071 | y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp); | |
2072 | x = change_address (x, QImode, x_addr); | |
2073 | y = change_address (y, QImode, y_addr); | |
2074 | ||
2075 | emit_move_insn (x, y); | |
2076 | ||
2077 | tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter, | |
2078 | true, OPTAB_LIB_WIDEN); | |
2079 | if (tmp != iter) | |
2080 | emit_move_insn (iter, tmp); | |
2081 | ||
2082 | emit_note (NULL, NOTE_INSN_LOOP_CONT); | |
2083 | emit_label (cmp_label); | |
2084 | ||
2085 | emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode, | |
2086 | true, top_label); | |
2087 | ||
2088 | emit_note (NULL, NOTE_INSN_LOOP_END); | |
2089 | } | |
10f307d9 | 2090 | \f |
2091 | /* Copy all or part of a value X into registers starting at REGNO. | |
2092 | The number of registers to be filled is NREGS. */ | |
2093 | ||
2094 | void | |
2095 | move_block_to_reg (regno, x, nregs, mode) | |
2096 | int regno; | |
2097 | rtx x; | |
2098 | int nregs; | |
2099 | enum machine_mode mode; | |
2100 | { | |
2101 | int i; | |
0c22b90f | 2102 | #ifdef HAVE_load_multiple |
fa56dc1d | 2103 | rtx pat; |
0c22b90f | 2104 | rtx last; |
2105 | #endif | |
10f307d9 | 2106 | |
c9750f6d | 2107 | if (nregs == 0) |
2108 | return; | |
2109 | ||
10f307d9 | 2110 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) |
2111 | x = validize_mem (force_const_mem (mode, x)); | |
2112 | ||
2113 | /* See if the machine can do this with a load multiple insn. */ | |
2114 | #ifdef HAVE_load_multiple | |
d3afc10f | 2115 | if (HAVE_load_multiple) |
10f307d9 | 2116 | { |
d3afc10f | 2117 | last = get_last_insn (); |
941522d6 | 2118 | pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x, |
d3afc10f | 2119 | GEN_INT (nregs)); |
2120 | if (pat) | |
2121 | { | |
2122 | emit_insn (pat); | |
2123 | return; | |
2124 | } | |
2125 | else | |
2126 | delete_insns_since (last); | |
10f307d9 | 2127 | } |
10f307d9 | 2128 | #endif |
2129 | ||
2130 | for (i = 0; i < nregs; i++) | |
941522d6 | 2131 | emit_move_insn (gen_rtx_REG (word_mode, regno + i), |
10f307d9 | 2132 | operand_subword_force (x, i, mode)); |
2133 | } | |
2134 | ||
2135 | /* Copy all or part of a BLKmode value X out of registers starting at REGNO. | |
db7bca86 | 2136 | The number of registers to be filled is NREGS. SIZE indicates the number |
2137 | of bytes in the object X. */ | |
2138 | ||
10f307d9 | 2139 | void |
db7bca86 | 2140 | move_block_from_reg (regno, x, nregs, size) |
10f307d9 | 2141 | int regno; |
2142 | rtx x; | |
2143 | int nregs; | |
db7bca86 | 2144 | int size; |
10f307d9 | 2145 | { |
2146 | int i; | |
0c22b90f | 2147 | #ifdef HAVE_store_multiple |
fa56dc1d | 2148 | rtx pat; |
0c22b90f | 2149 | rtx last; |
2150 | #endif | |
42ca9c04 | 2151 | enum machine_mode mode; |
10f307d9 | 2152 | |
cc119c14 | 2153 | if (nregs == 0) |
2154 | return; | |
2155 | ||
42ca9c04 | 2156 | /* If SIZE is that of a mode no bigger than a word, just use that |
2157 | mode's store operation. */ | |
2158 | if (size <= UNITS_PER_WORD | |
f4a0a478 | 2159 | && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode) |
42ca9c04 | 2160 | { |
537ffcfc | 2161 | emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno)); |
42ca9c04 | 2162 | return; |
2163 | } | |
fa56dc1d | 2164 | |
db7bca86 | 2165 | /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned |
42ca9c04 | 2166 | to the left before storing to memory. Note that the previous test |
2167 | doesn't handle all cases (e.g. SIZE == 3). */ | |
f4a0a478 | 2168 | if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN) |
db7bca86 | 2169 | { |
2170 | rtx tem = operand_subword (x, 0, 1, BLKmode); | |
2171 | rtx shift; | |
2172 | ||
2173 | if (tem == 0) | |
2174 | abort (); | |
2175 | ||
2176 | shift = expand_shift (LSHIFT_EXPR, word_mode, | |
941522d6 | 2177 | gen_rtx_REG (word_mode, regno), |
db7bca86 | 2178 | build_int_2 ((UNITS_PER_WORD - size) |
2179 | * BITS_PER_UNIT, 0), NULL_RTX, 0); | |
2180 | emit_move_insn (tem, shift); | |
2181 | return; | |
2182 | } | |
2183 | ||
10f307d9 | 2184 | /* See if the machine can do this with a store multiple insn. */ |
2185 | #ifdef HAVE_store_multiple | |
d3afc10f | 2186 | if (HAVE_store_multiple) |
10f307d9 | 2187 | { |
d3afc10f | 2188 | last = get_last_insn (); |
941522d6 | 2189 | pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno), |
d3afc10f | 2190 | GEN_INT (nregs)); |
2191 | if (pat) | |
2192 | { | |
2193 | emit_insn (pat); | |
2194 | return; | |
2195 | } | |
2196 | else | |
2197 | delete_insns_since (last); | |
10f307d9 | 2198 | } |
10f307d9 | 2199 | #endif |
2200 | ||
2201 | for (i = 0; i < nregs; i++) | |
2202 | { | |
2203 | rtx tem = operand_subword (x, i, 1, BLKmode); | |
2204 | ||
2205 | if (tem == 0) | |
2206 | abort (); | |
2207 | ||
941522d6 | 2208 | emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i)); |
10f307d9 | 2209 | } |
2210 | } | |
2211 | ||
b566e2e5 | 2212 | /* Generate a PARALLEL rtx for a new non-consecutive group of registers from |
2213 | ORIG, where ORIG is a non-consecutive group of registers represented by | |
2214 | a PARALLEL. The clone is identical to the original except in that the | |
2215 | original set of registers is replaced by a new set of pseudo registers. | |
2216 | The new set has the same modes as the original set. */ | |
2217 | ||
2218 | rtx | |
2219 | gen_group_rtx (orig) | |
2220 | rtx orig; | |
2221 | { | |
2222 | int i, length; | |
2223 | rtx *tmps; | |
2224 | ||
2225 | if (GET_CODE (orig) != PARALLEL) | |
2226 | abort (); | |
2227 | ||
2228 | length = XVECLEN (orig, 0); | |
2229 | tmps = (rtx *) alloca (sizeof (rtx) * length); | |
2230 | ||
2231 | /* Skip a NULL entry in first slot. */ | |
2232 | i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1; | |
2233 | ||
2234 | if (i) | |
2235 | tmps[0] = 0; | |
2236 | ||
2237 | for (; i < length; i++) | |
2238 | { | |
2239 | enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0)); | |
2240 | rtx offset = XEXP (XVECEXP (orig, 0, i), 1); | |
2241 | ||
2242 | tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset); | |
2243 | } | |
2244 | ||
2245 | return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps)); | |
2246 | } | |
2247 | ||
6ede8018 | 2248 | /* Emit code to move a block SRC to a block DST, where DST is non-consecutive |
2249 | registers represented by a PARALLEL. SSIZE represents the total size of | |
2c269e73 | 2250 | block SRC in bytes, or -1 if not known. */ |
cb0ccc1e | 2251 | /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that |
6ede8018 | 2252 | the balance will be in what would be the low-order memory addresses, i.e. |
2253 | left justified for big endian, right justified for little endian. This | |
2254 | happens to be true for the targets currently using this support. If this | |
2255 | ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING | |
2256 | would be needed. */ | |
ce739127 | 2257 | |
2258 | void | |
2c269e73 | 2259 | emit_group_load (dst, orig_src, ssize) |
6ede8018 | 2260 | rtx dst, orig_src; |
fe352cf1 | 2261 | int ssize; |
ce739127 | 2262 | { |
6ede8018 | 2263 | rtx *tmps, src; |
2264 | int start, i; | |
ce739127 | 2265 | |
6ede8018 | 2266 | if (GET_CODE (dst) != PARALLEL) |
ce739127 | 2267 | abort (); |
2268 | ||
2269 | /* Check for a NULL entry, used to indicate that the parameter goes | |
2270 | both on the stack and in registers. */ | |
6ede8018 | 2271 | if (XEXP (XVECEXP (dst, 0, 0), 0)) |
2272 | start = 0; | |
ce739127 | 2273 | else |
6ede8018 | 2274 | start = 1; |
2275 | ||
fa56dc1d | 2276 | tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0)); |
6ede8018 | 2277 | |
6ede8018 | 2278 | /* Process the pieces. */ |
2279 | for (i = start; i < XVECLEN (dst, 0); i++) | |
2280 | { | |
2281 | enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); | |
02e7a332 | 2282 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); |
2283 | unsigned int bytelen = GET_MODE_SIZE (mode); | |
6ede8018 | 2284 | int shift = 0; |
2285 | ||
2286 | /* Handle trailing fragments that run over the size of the struct. */ | |
e1439bcb | 2287 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
6ede8018 | 2288 | { |
2289 | shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
2290 | bytelen = ssize - bytepos; | |
2291 | if (bytelen <= 0) | |
fe352cf1 | 2292 | abort (); |
6ede8018 | 2293 | } |
2294 | ||
c037cba7 | 2295 | /* If we won't be loading directly from memory, protect the real source |
2296 | from strange tricks we might play; but make sure that the source can | |
2297 | be loaded directly into the destination. */ | |
2298 | src = orig_src; | |
2299 | if (GET_CODE (orig_src) != MEM | |
2300 | && (!CONSTANT_P (orig_src) | |
2301 | || (GET_MODE (orig_src) != mode | |
2302 | && GET_MODE (orig_src) != VOIDmode))) | |
2303 | { | |
2304 | if (GET_MODE (orig_src) == VOIDmode) | |
2305 | src = gen_reg_rtx (mode); | |
2306 | else | |
2307 | src = gen_reg_rtx (GET_MODE (orig_src)); | |
2c269e73 | 2308 | |
c037cba7 | 2309 | emit_move_insn (src, orig_src); |
2310 | } | |
2311 | ||
6ede8018 | 2312 | /* Optimize the access just a bit. */ |
2313 | if (GET_CODE (src) == MEM | |
2c269e73 | 2314 | && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode) |
fe352cf1 | 2315 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
6ede8018 | 2316 | && bytelen == GET_MODE_SIZE (mode)) |
2317 | { | |
2318 | tmps[i] = gen_reg_rtx (mode); | |
e513d163 | 2319 | emit_move_insn (tmps[i], adjust_address (src, mode, bytepos)); |
ce739127 | 2320 | } |
a1000ec6 | 2321 | else if (GET_CODE (src) == CONCAT) |
2322 | { | |
2a075f91 | 2323 | unsigned int slen = GET_MODE_SIZE (GET_MODE (src)); |
2324 | unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0))); | |
2325 | ||
2326 | if ((bytepos == 0 && bytelen == slen0) | |
2327 | || (bytepos != 0 && bytepos + bytelen <= slen)) | |
4c183732 | 2328 | { |
2a075f91 | 2329 | /* The following assumes that the concatenated objects all |
2330 | have the same size. In this case, a simple calculation | |
2331 | can be used to determine the object and the bit field | |
2332 | to be extracted. */ | |
2333 | tmps[i] = XEXP (src, bytepos / slen0); | |
4c183732 | 2334 | if (! CONSTANT_P (tmps[i]) |
2335 | && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode)) | |
2336 | tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT, | |
2a075f91 | 2337 | (bytepos % slen0) * BITS_PER_UNIT, |
2338 | 1, NULL_RTX, mode, mode, ssize); | |
4c183732 | 2339 | } |
10d075b5 | 2340 | else if (bytepos == 0) |
2341 | { | |
2a075f91 | 2342 | rtx mem = assign_stack_temp (GET_MODE (src), slen, 0); |
10d075b5 | 2343 | emit_move_insn (mem, src); |
2c269e73 | 2344 | tmps[i] = adjust_address (mem, mode, 0); |
10d075b5 | 2345 | } |
a1000ec6 | 2346 | else |
2347 | abort (); | |
2348 | } | |
c037cba7 | 2349 | else if (CONSTANT_P (src) |
73645c13 | 2350 | || (GET_CODE (src) == REG && GET_MODE (src) == mode)) |
2351 | tmps[i] = src; | |
ce739127 | 2352 | else |
325d1c45 | 2353 | tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, |
2354 | bytepos * BITS_PER_UNIT, 1, NULL_RTX, | |
2c269e73 | 2355 | mode, mode, ssize); |
ce739127 | 2356 | |
6ede8018 | 2357 | if (BYTES_BIG_ENDIAN && shift) |
325d1c45 | 2358 | expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift), |
2359 | tmps[i], 0, OPTAB_WIDEN); | |
ce739127 | 2360 | } |
325d1c45 | 2361 | |
fa56dc1d | 2362 | emit_queue (); |
6ede8018 | 2363 | |
2364 | /* Copy the extracted pieces into the proper (probable) hard regs. */ | |
2365 | for (i = start; i < XVECLEN (dst, 0); i++) | |
2366 | emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]); | |
ce739127 | 2367 | } |
2368 | ||
b566e2e5 | 2369 | /* Emit code to move a block SRC to block DST, where SRC and DST are |
2370 | non-consecutive groups of registers, each represented by a PARALLEL. */ | |
2371 | ||
2372 | void | |
2373 | emit_group_move (dst, src) | |
2374 | rtx dst, src; | |
2375 | { | |
2376 | int i; | |
2377 | ||
2378 | if (GET_CODE (src) != PARALLEL | |
2379 | || GET_CODE (dst) != PARALLEL | |
2380 | || XVECLEN (src, 0) != XVECLEN (dst, 0)) | |
2381 | abort (); | |
2382 | ||
2383 | /* Skip first entry if NULL. */ | |
2384 | for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++) | |
2385 | emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), | |
2386 | XEXP (XVECEXP (src, 0, i), 0)); | |
2387 | } | |
2388 | ||
6ede8018 | 2389 | /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive |
2390 | registers represented by a PARALLEL. SSIZE represents the total size of | |
2c269e73 | 2391 | block DST, or -1 if not known. */ |
ce739127 | 2392 | |
2393 | void | |
2c269e73 | 2394 | emit_group_store (orig_dst, src, ssize) |
6ede8018 | 2395 | rtx orig_dst, src; |
fe352cf1 | 2396 | int ssize; |
ce739127 | 2397 | { |
6ede8018 | 2398 | rtx *tmps, dst; |
2399 | int start, i; | |
ce739127 | 2400 | |
6ede8018 | 2401 | if (GET_CODE (src) != PARALLEL) |
ce739127 | 2402 | abort (); |
2403 | ||
2404 | /* Check for a NULL entry, used to indicate that the parameter goes | |
2405 | both on the stack and in registers. */ | |
6ede8018 | 2406 | if (XEXP (XVECEXP (src, 0, 0), 0)) |
2407 | start = 0; | |
ce739127 | 2408 | else |
6ede8018 | 2409 | start = 1; |
2410 | ||
fa56dc1d | 2411 | tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0)); |
ce739127 | 2412 | |
6ede8018 | 2413 | /* Copy the (probable) hard regs into pseudos. */ |
2414 | for (i = start; i < XVECLEN (src, 0); i++) | |
ce739127 | 2415 | { |
6ede8018 | 2416 | rtx reg = XEXP (XVECEXP (src, 0, i), 0); |
2417 | tmps[i] = gen_reg_rtx (GET_MODE (reg)); | |
2418 | emit_move_insn (tmps[i], reg); | |
2419 | } | |
fa56dc1d | 2420 | emit_queue (); |
ce739127 | 2421 | |
6ede8018 | 2422 | /* If we won't be storing directly into memory, protect the real destination |
2423 | from strange tricks we might play. */ | |
2424 | dst = orig_dst; | |
723d3639 | 2425 | if (GET_CODE (dst) == PARALLEL) |
2426 | { | |
2427 | rtx temp; | |
2428 | ||
2429 | /* We can get a PARALLEL dst if there is a conditional expression in | |
2430 | a return statement. In that case, the dst and src are the same, | |
2431 | so no action is necessary. */ | |
2432 | if (rtx_equal_p (dst, src)) | |
2433 | return; | |
2434 | ||
2435 | /* It is unclear if we can ever reach here, but we may as well handle | |
2436 | it. Allocate a temporary, and split this into a store/load to/from | |
2437 | the temporary. */ | |
2438 | ||
2439 | temp = assign_stack_temp (GET_MODE (dst), ssize, 0); | |
2c269e73 | 2440 | emit_group_store (temp, src, ssize); |
2441 | emit_group_load (dst, temp, ssize); | |
723d3639 | 2442 | return; |
2443 | } | |
98f4973a | 2444 | else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT) |
6ede8018 | 2445 | { |
2446 | dst = gen_reg_rtx (GET_MODE (orig_dst)); | |
2447 | /* Make life a bit easier for combine. */ | |
2448 | emit_move_insn (dst, const0_rtx); | |
2449 | } | |
6ede8018 | 2450 | |
2451 | /* Process the pieces. */ | |
2452 | for (i = start; i < XVECLEN (src, 0); i++) | |
2453 | { | |
02e7a332 | 2454 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); |
6ede8018 | 2455 | enum machine_mode mode = GET_MODE (tmps[i]); |
02e7a332 | 2456 | unsigned int bytelen = GET_MODE_SIZE (mode); |
463e3bf7 | 2457 | rtx dest = dst; |
6ede8018 | 2458 | |
2459 | /* Handle trailing fragments that run over the size of the struct. */ | |
e1439bcb | 2460 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
fe08fc1b | 2461 | { |
6ede8018 | 2462 | if (BYTES_BIG_ENDIAN) |
2463 | { | |
2464 | int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
2465 | expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift), | |
2466 | tmps[i], 0, OPTAB_WIDEN); | |
2467 | } | |
2468 | bytelen = ssize - bytepos; | |
fe08fc1b | 2469 | } |
ce739127 | 2470 | |
463e3bf7 | 2471 | if (GET_CODE (dst) == CONCAT) |
2472 | { | |
2473 | if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) | |
2474 | dest = XEXP (dst, 0); | |
2475 | else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) | |
2476 | { | |
2477 | bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); | |
2478 | dest = XEXP (dst, 1); | |
2479 | } | |
2480 | else | |
2481 | abort (); | |
2482 | } | |
2483 | ||
6ede8018 | 2484 | /* Optimize the access just a bit. */ |
463e3bf7 | 2485 | if (GET_CODE (dest) == MEM |
2486 | && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode) | |
fe352cf1 | 2487 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
6ede8018 | 2488 | && bytelen == GET_MODE_SIZE (mode)) |
463e3bf7 | 2489 | emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]); |
6ede8018 | 2490 | else |
463e3bf7 | 2491 | store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, |
2c269e73 | 2492 | mode, tmps[i], ssize); |
ce739127 | 2493 | } |
fe352cf1 | 2494 | |
fa56dc1d | 2495 | emit_queue (); |
6ede8018 | 2496 | |
2497 | /* Copy from the pseudo into the (probable) hard reg. */ | |
2498 | if (GET_CODE (dst) == REG) | |
2499 | emit_move_insn (orig_dst, dst); | |
ce739127 | 2500 | } |
2501 | ||
25eb0f59 | 2502 | /* Generate code to copy a BLKmode object of TYPE out of a |
2503 | set of registers starting with SRCREG into TGTBLK. If TGTBLK | |
2504 | is null, a stack temporary is created. TGTBLK is returned. | |
2505 | ||
2506 | The primary purpose of this routine is to handle functions | |
2507 | that return BLKmode structures in registers. Some machines | |
2508 | (the PA for example) want to return all small structures | |
fa56dc1d | 2509 | in registers regardless of the structure's alignment. */ |
25eb0f59 | 2510 | |
2511 | rtx | |
325d1c45 | 2512 | copy_blkmode_from_reg (tgtblk, srcreg, type) |
25eb0f59 | 2513 | rtx tgtblk; |
2514 | rtx srcreg; | |
2515 | tree type; | |
2516 | { | |
325d1c45 | 2517 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); |
2518 | rtx src = NULL, dst = NULL; | |
2519 | unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); | |
2520 | unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0; | |
2521 | ||
2522 | if (tgtblk == 0) | |
2523 | { | |
387bc205 | 2524 | tgtblk = assign_temp (build_qualified_type (type, |
2525 | (TYPE_QUALS (type) | |
2526 | | TYPE_QUAL_CONST)), | |
2527 | 0, 1, 1); | |
325d1c45 | 2528 | preserve_temp_slots (tgtblk); |
2529 | } | |
fa56dc1d | 2530 | |
a689a61a | 2531 | /* This code assumes srcreg is at least a full word. If it isn't, copy it |
f4a0a478 | 2532 | into a new pseudo which is a full word. */ |
23551094 | 2533 | |
325d1c45 | 2534 | if (GET_MODE (srcreg) != BLKmode |
2535 | && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD) | |
f4a0a478 | 2536 | srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type)); |
325d1c45 | 2537 | |
2538 | /* Structures whose size is not a multiple of a word are aligned | |
2539 | to the least significant byte (to the right). On a BYTES_BIG_ENDIAN | |
2540 | machine, this means we must skip the empty high order bytes when | |
2541 | calculating the bit offset. */ | |
23551094 | 2542 | if (BYTES_BIG_ENDIAN |
23551094 | 2543 | && bytes % UNITS_PER_WORD) |
325d1c45 | 2544 | big_endian_correction |
2545 | = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT)); | |
2546 | ||
2547 | /* Copy the structure BITSIZE bites at a time. | |
fa56dc1d | 2548 | |
325d1c45 | 2549 | We could probably emit more efficient code for machines which do not use |
2550 | strict alignment, but it doesn't seem worth the effort at the current | |
2551 | time. */ | |
2552 | for (bitpos = 0, xbitpos = big_endian_correction; | |
2553 | bitpos < bytes * BITS_PER_UNIT; | |
2554 | bitpos += bitsize, xbitpos += bitsize) | |
2555 | { | |
fa56dc1d | 2556 | /* We need a new source operand each time xbitpos is on a |
325d1c45 | 2557 | word boundary and when xbitpos == big_endian_correction |
2558 | (the first time through). */ | |
2559 | if (xbitpos % BITS_PER_WORD == 0 | |
2560 | || xbitpos == big_endian_correction) | |
c502077e | 2561 | src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, |
2562 | GET_MODE (srcreg)); | |
325d1c45 | 2563 | |
2564 | /* We need a new destination operand each time bitpos is on | |
2565 | a word boundary. */ | |
2566 | if (bitpos % BITS_PER_WORD == 0) | |
2567 | dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode); | |
fa56dc1d | 2568 | |
325d1c45 | 2569 | /* Use xbitpos for the source extraction (right justified) and |
2570 | xbitpos for the destination store (left justified). */ | |
2571 | store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode, | |
2572 | extract_bit_field (src, bitsize, | |
2573 | xbitpos % BITS_PER_WORD, 1, | |
2574 | NULL_RTX, word_mode, word_mode, | |
2c269e73 | 2575 | BITS_PER_WORD), |
2576 | BITS_PER_WORD); | |
325d1c45 | 2577 | } |
2578 | ||
2579 | return tgtblk; | |
25eb0f59 | 2580 | } |
2581 | ||
07409b3a | 2582 | /* Add a USE expression for REG to the (possibly empty) list pointed |
2583 | to by CALL_FUSAGE. REG must denote a hard register. */ | |
10f307d9 | 2584 | |
2585 | void | |
7e2ca70b | 2586 | use_reg (call_fusage, reg) |
2587 | rtx *call_fusage, reg; | |
2588 | { | |
f2799de7 | 2589 | if (GET_CODE (reg) != REG |
2590 | || REGNO (reg) >= FIRST_PSEUDO_REGISTER) | |
fa56dc1d | 2591 | abort (); |
7e2ca70b | 2592 | |
2593 | *call_fusage | |
941522d6 | 2594 | = gen_rtx_EXPR_LIST (VOIDmode, |
2595 | gen_rtx_USE (VOIDmode, reg), *call_fusage); | |
7e2ca70b | 2596 | } |
2597 | ||
07409b3a | 2598 | /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs, |
2599 | starting at REGNO. All of these registers must be hard registers. */ | |
7e2ca70b | 2600 | |
2601 | void | |
f2799de7 | 2602 | use_regs (call_fusage, regno, nregs) |
2603 | rtx *call_fusage; | |
10f307d9 | 2604 | int regno; |
2605 | int nregs; | |
2606 | { | |
f2799de7 | 2607 | int i; |
10f307d9 | 2608 | |
f2799de7 | 2609 | if (regno + nregs > FIRST_PSEUDO_REGISTER) |
2610 | abort (); | |
2611 | ||
2612 | for (i = 0; i < nregs; i++) | |
936082bb | 2613 | use_reg (call_fusage, regno_reg_rtx[regno + i]); |
10f307d9 | 2614 | } |
ce739127 | 2615 | |
2616 | /* Add USE expressions to *CALL_FUSAGE for each REG contained in the | |
2617 | PARALLEL REGS. This is for calls that pass values in multiple | |
2618 | non-contiguous locations. The Irix 6 ABI has examples of this. */ | |
2619 | ||
2620 | void | |
2621 | use_group_regs (call_fusage, regs) | |
2622 | rtx *call_fusage; | |
2623 | rtx regs; | |
2624 | { | |
2625 | int i; | |
2626 | ||
2f373e5d | 2627 | for (i = 0; i < XVECLEN (regs, 0); i++) |
2628 | { | |
2629 | rtx reg = XEXP (XVECEXP (regs, 0, i), 0); | |
ce739127 | 2630 | |
2f373e5d | 2631 | /* A NULL entry means the parameter goes both on the stack and in |
2632 | registers. This can also be a MEM for targets that pass values | |
2633 | partially on the stack and partially in registers. */ | |
0dbd1c74 | 2634 | if (reg != 0 && GET_CODE (reg) == REG) |
2f373e5d | 2635 | use_reg (call_fusage, reg); |
2636 | } | |
ce739127 | 2637 | } |
10f307d9 | 2638 | \f |
6840589f | 2639 | |
d1f6ae0c | 2640 | /* Determine whether the LEN bytes generated by CONSTFUN can be |
2641 | stored to memory using several move instructions. CONSTFUNDATA is | |
2642 | a pointer which will be passed as argument in every CONSTFUN call. | |
2643 | ALIGN is maximum alignment we can assume. Return nonzero if a | |
2644 | call to store_by_pieces should succeed. */ | |
2645 | ||
6840589f | 2646 | int |
2647 | can_store_by_pieces (len, constfun, constfundata, align) | |
2648 | unsigned HOST_WIDE_INT len; | |
2649 | rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); | |
2650 | PTR constfundata; | |
2651 | unsigned int align; | |
2652 | { | |
9acfe138 | 2653 | unsigned HOST_WIDE_INT max_size, l; |
6840589f | 2654 | HOST_WIDE_INT offset = 0; |
2655 | enum machine_mode mode, tmode; | |
2656 | enum insn_code icode; | |
2657 | int reverse; | |
2658 | rtx cst; | |
2659 | ||
805e22b2 | 2660 | if (! STORE_BY_PIECES_P (len, align)) |
6840589f | 2661 | return 0; |
2662 | ||
2663 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) | |
2664 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) | |
2665 | align = MOVE_MAX * BITS_PER_UNIT; | |
2666 | ||
2667 | /* We would first store what we can in the largest integer mode, then go to | |
2668 | successively smaller modes. */ | |
2669 | ||
2670 | for (reverse = 0; | |
2671 | reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT); | |
2672 | reverse++) | |
2673 | { | |
2674 | l = len; | |
2675 | mode = VOIDmode; | |
d1f6ae0c | 2676 | max_size = STORE_MAX_PIECES + 1; |
6840589f | 2677 | while (max_size > 1) |
2678 | { | |
2679 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
2680 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2681 | if (GET_MODE_SIZE (tmode) < max_size) | |
2682 | mode = tmode; | |
2683 | ||
2684 | if (mode == VOIDmode) | |
2685 | break; | |
2686 | ||
2687 | icode = mov_optab->handlers[(int) mode].insn_code; | |
2688 | if (icode != CODE_FOR_nothing | |
2689 | && align >= GET_MODE_ALIGNMENT (mode)) | |
2690 | { | |
2691 | unsigned int size = GET_MODE_SIZE (mode); | |
2692 | ||
2693 | while (l >= size) | |
2694 | { | |
2695 | if (reverse) | |
2696 | offset -= size; | |
2697 | ||
2698 | cst = (*constfun) (constfundata, offset, mode); | |
2699 | if (!LEGITIMATE_CONSTANT_P (cst)) | |
2700 | return 0; | |
2701 | ||
2702 | if (!reverse) | |
2703 | offset += size; | |
2704 | ||
2705 | l -= size; | |
2706 | } | |
2707 | } | |
2708 | ||
2709 | max_size = GET_MODE_SIZE (mode); | |
2710 | } | |
2711 | ||
2712 | /* The code above should have handled everything. */ | |
2713 | if (l != 0) | |
2714 | abort (); | |
2715 | } | |
2716 | ||
2717 | return 1; | |
2718 | } | |
2719 | ||
2720 | /* Generate several move instructions to store LEN bytes generated by | |
2721 | CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a | |
2722 | pointer which will be passed as argument in every CONSTFUN call. | |
2723 | ALIGN is maximum alignment we can assume. */ | |
2724 | ||
2725 | void | |
2726 | store_by_pieces (to, len, constfun, constfundata, align) | |
2727 | rtx to; | |
2728 | unsigned HOST_WIDE_INT len; | |
2729 | rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode)); | |
2730 | PTR constfundata; | |
2731 | unsigned int align; | |
2732 | { | |
2733 | struct store_by_pieces data; | |
2734 | ||
805e22b2 | 2735 | if (! STORE_BY_PIECES_P (len, align)) |
6840589f | 2736 | abort (); |
2737 | to = protect_from_queue (to, 1); | |
2738 | data.constfun = constfun; | |
2739 | data.constfundata = constfundata; | |
2740 | data.len = len; | |
2741 | data.to = to; | |
2742 | store_by_pieces_1 (&data, align); | |
2743 | } | |
2744 | ||
325d1c45 | 2745 | /* Generate several move instructions to clear LEN bytes of block TO. (A MEM |
2746 | rtx with BLKmode). The caller must pass TO through protect_from_queue | |
2747 | before calling. ALIGN is maximum alignment we can assume. */ | |
dbd14dc5 | 2748 | |
2749 | static void | |
2750 | clear_by_pieces (to, len, align) | |
2751 | rtx to; | |
f7c44134 | 2752 | unsigned HOST_WIDE_INT len; |
fe352cf1 | 2753 | unsigned int align; |
dbd14dc5 | 2754 | { |
6840589f | 2755 | struct store_by_pieces data; |
2756 | ||
2757 | data.constfun = clear_by_pieces_1; | |
2571646d | 2758 | data.constfundata = NULL; |
6840589f | 2759 | data.len = len; |
2760 | data.to = to; | |
2761 | store_by_pieces_1 (&data, align); | |
2762 | } | |
2763 | ||
2764 | /* Callback routine for clear_by_pieces. | |
2765 | Return const0_rtx unconditionally. */ | |
2766 | ||
2767 | static rtx | |
2768 | clear_by_pieces_1 (data, offset, mode) | |
2769 | PTR data ATTRIBUTE_UNUSED; | |
2770 | HOST_WIDE_INT offset ATTRIBUTE_UNUSED; | |
2771 | enum machine_mode mode ATTRIBUTE_UNUSED; | |
2772 | { | |
2773 | return const0_rtx; | |
2774 | } | |
2775 | ||
2776 | /* Subroutine of clear_by_pieces and store_by_pieces. | |
2777 | Generate several move instructions to store LEN bytes of block TO. (A MEM | |
2778 | rtx with BLKmode). The caller must pass TO through protect_from_queue | |
2779 | before calling. ALIGN is maximum alignment we can assume. */ | |
2780 | ||
2781 | static void | |
2782 | store_by_pieces_1 (data, align) | |
2783 | struct store_by_pieces *data; | |
2784 | unsigned int align; | |
2785 | { | |
2786 | rtx to_addr = XEXP (data->to, 0); | |
d1f6ae0c | 2787 | unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1; |
53bd09ab | 2788 | enum machine_mode mode = VOIDmode, tmode; |
2789 | enum insn_code icode; | |
dbd14dc5 | 2790 | |
6840589f | 2791 | data->offset = 0; |
2792 | data->to_addr = to_addr; | |
2793 | data->autinc_to | |
dbd14dc5 | 2794 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC |
2795 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
2796 | ||
6840589f | 2797 | data->explicit_inc_to = 0; |
2798 | data->reverse | |
dbd14dc5 | 2799 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); |
6840589f | 2800 | if (data->reverse) |
2801 | data->offset = data->len; | |
dbd14dc5 | 2802 | |
6840589f | 2803 | /* If storing requires more than two move insns, |
dbd14dc5 | 2804 | copy addresses to registers (to make displacements shorter) |
2805 | and use post-increment if available. */ | |
6840589f | 2806 | if (!data->autinc_to |
2807 | && move_by_pieces_ninsns (data->len, align) > 2) | |
dbd14dc5 | 2808 | { |
fa56dc1d | 2809 | /* Determine the main mode we'll be using. */ |
53bd09ab | 2810 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2811 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2812 | if (GET_MODE_SIZE (tmode) < max_size) | |
2813 | mode = tmode; | |
2814 | ||
6840589f | 2815 | if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) |
dbd14dc5 | 2816 | { |
6840589f | 2817 | data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len)); |
2818 | data->autinc_to = 1; | |
2819 | data->explicit_inc_to = -1; | |
dbd14dc5 | 2820 | } |
f7c44134 | 2821 | |
6840589f | 2822 | if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse |
2823 | && ! data->autinc_to) | |
dbd14dc5 | 2824 | { |
6840589f | 2825 | data->to_addr = copy_addr_to_reg (to_addr); |
2826 | data->autinc_to = 1; | |
2827 | data->explicit_inc_to = 1; | |
dbd14dc5 | 2828 | } |
f7c44134 | 2829 | |
6840589f | 2830 | if ( !data->autinc_to && CONSTANT_P (to_addr)) |
2831 | data->to_addr = copy_addr_to_reg (to_addr); | |
dbd14dc5 | 2832 | } |
2833 | ||
9439ebf7 | 2834 | if (! SLOW_UNALIGNED_ACCESS (word_mode, align) |
325d1c45 | 2835 | || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT) |
80909c64 | 2836 | align = MOVE_MAX * BITS_PER_UNIT; |
dbd14dc5 | 2837 | |
6840589f | 2838 | /* First store what we can in the largest integer mode, then go to |
dbd14dc5 | 2839 | successively smaller modes. */ |
2840 | ||
2841 | while (max_size > 1) | |
2842 | { | |
dbd14dc5 | 2843 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2844 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2845 | if (GET_MODE_SIZE (tmode) < max_size) | |
2846 | mode = tmode; | |
2847 | ||
2848 | if (mode == VOIDmode) | |
2849 | break; | |
2850 | ||
2851 | icode = mov_optab->handlers[(int) mode].insn_code; | |
325d1c45 | 2852 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
6840589f | 2853 | store_by_pieces_2 (GEN_FCN (icode), mode, data); |
dbd14dc5 | 2854 | |
2855 | max_size = GET_MODE_SIZE (mode); | |
2856 | } | |
2857 | ||
2858 | /* The code above should have handled everything. */ | |
6840589f | 2859 | if (data->len != 0) |
dbd14dc5 | 2860 | abort (); |
2861 | } | |
2862 | ||
6840589f | 2863 | /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate |
dbd14dc5 | 2864 | with move instructions for mode MODE. GENFUN is the gen_... function |
2865 | to make a move insn for that mode. DATA has all the other info. */ | |
2866 | ||
2867 | static void | |
6840589f | 2868 | store_by_pieces_2 (genfun, mode, data) |
621f6678 | 2869 | rtx (*genfun) PARAMS ((rtx, ...)); |
dbd14dc5 | 2870 | enum machine_mode mode; |
6840589f | 2871 | struct store_by_pieces *data; |
dbd14dc5 | 2872 | { |
f7c44134 | 2873 | unsigned int size = GET_MODE_SIZE (mode); |
6840589f | 2874 | rtx to1, cst; |
dbd14dc5 | 2875 | |
2876 | while (data->len >= size) | |
2877 | { | |
f7c44134 | 2878 | if (data->reverse) |
2879 | data->offset -= size; | |
dbd14dc5 | 2880 | |
f7c44134 | 2881 | if (data->autinc_to) |
bf42c62d | 2882 | to1 = adjust_automodify_address (data->to, mode, data->to_addr, |
2883 | data->offset); | |
fa56dc1d | 2884 | else |
e513d163 | 2885 | to1 = adjust_address (data->to, mode, data->offset); |
dbd14dc5 | 2886 | |
e4e498cf | 2887 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
6840589f | 2888 | emit_insn (gen_add2_insn (data->to_addr, |
2889 | GEN_INT (-(HOST_WIDE_INT) size))); | |
dbd14dc5 | 2890 | |
6840589f | 2891 | cst = (*data->constfun) (data->constfundata, data->offset, mode); |
2892 | emit_insn ((*genfun) (to1, cst)); | |
f7c44134 | 2893 | |
e4e498cf | 2894 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
dbd14dc5 | 2895 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
dbd14dc5 | 2896 | |
f7c44134 | 2897 | if (! data->reverse) |
2898 | data->offset += size; | |
dbd14dc5 | 2899 | |
2900 | data->len -= size; | |
2901 | } | |
2902 | } | |
2903 | \f | |
325d1c45 | 2904 | /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is |
2a631e19 | 2905 | its length in bytes. */ |
0dbd1c74 | 2906 | |
2907 | rtx | |
2a631e19 | 2908 | clear_storage (object, size) |
10f307d9 | 2909 | rtx object; |
169d1054 | 2910 | rtx size; |
10f307d9 | 2911 | { |
0dbd1c74 | 2912 | rtx retval = 0; |
2a631e19 | 2913 | unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object) |
2914 | : GET_MODE_ALIGNMENT (GET_MODE (object))); | |
0dbd1c74 | 2915 | |
20c377c2 | 2916 | /* If OBJECT is not BLKmode and SIZE is the same size as its mode, |
2917 | just move a zero. Otherwise, do this a piece at a time. */ | |
886cfd4f | 2918 | if (GET_MODE (object) != BLKmode |
20c377c2 | 2919 | && GET_CODE (size) == CONST_INT |
c0bfc78e | 2920 | && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object))) |
20c377c2 | 2921 | emit_move_insn (object, CONST0_RTX (GET_MODE (object))); |
2922 | else | |
10f307d9 | 2923 | { |
dbd14dc5 | 2924 | object = protect_from_queue (object, 1); |
2925 | size = protect_from_queue (size, 0); | |
2926 | ||
2927 | if (GET_CODE (size) == CONST_INT | |
310d3ec9 | 2928 | && CLEAR_BY_PIECES_P (INTVAL (size), align)) |
dbd14dc5 | 2929 | clear_by_pieces (object, INTVAL (size), align); |
c0bfc78e | 2930 | else if (clear_storage_via_clrstr (object, size, align)) |
2931 | ; | |
dbd14dc5 | 2932 | else |
c0bfc78e | 2933 | retval = clear_storage_via_libcall (object, size); |
2934 | } | |
2935 | ||
2936 | return retval; | |
2937 | } | |
2938 | ||
2939 | /* A subroutine of clear_storage. Expand a clrstr pattern; | |
2940 | return true if successful. */ | |
2941 | ||
2942 | static bool | |
2943 | clear_storage_via_clrstr (object, size, align) | |
2944 | rtx object, size; | |
2945 | unsigned int align; | |
2946 | { | |
2947 | /* Try the most limited insn first, because there's no point | |
2948 | including more than one in the machine description unless | |
2949 | the more limited one has some advantage. */ | |
2950 | ||
2951 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); | |
2952 | enum machine_mode mode; | |
2953 | ||
2954 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; | |
2955 | mode = GET_MODE_WIDER_MODE (mode)) | |
2956 | { | |
2957 | enum insn_code code = clrstr_optab[(int) mode]; | |
2958 | insn_operand_predicate_fn pred; | |
2959 | ||
2960 | if (code != CODE_FOR_nothing | |
2961 | /* We don't need MODE to be narrower than | |
2962 | BITS_PER_HOST_WIDE_INT here because if SIZE is less than | |
2963 | the mode mask, as it is returned by the macro, it will | |
2964 | definitely be less than the actual mode mask. */ | |
2965 | && ((GET_CODE (size) == CONST_INT | |
2966 | && ((unsigned HOST_WIDE_INT) INTVAL (size) | |
2967 | <= (GET_MODE_MASK (mode) >> 1))) | |
2968 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) | |
2969 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 | |
2970 | || (*pred) (object, BLKmode)) | |
2971 | && ((pred = insn_data[(int) code].operand[2].predicate) == 0 | |
2972 | || (*pred) (opalign, VOIDmode))) | |
dbd14dc5 | 2973 | { |
c0bfc78e | 2974 | rtx op1; |
2975 | rtx last = get_last_insn (); | |
2976 | rtx pat; | |
dbd14dc5 | 2977 | |
c0bfc78e | 2978 | op1 = convert_to_mode (mode, size, 1); |
2979 | pred = insn_data[(int) code].operand[1].predicate; | |
2980 | if (pred != 0 && ! (*pred) (op1, mode)) | |
2981 | op1 = copy_to_mode_reg (mode, op1); | |
dbd14dc5 | 2982 | |
c0bfc78e | 2983 | pat = GEN_FCN ((int) code) (object, op1, opalign); |
2984 | if (pat) | |
dbd14dc5 | 2985 | { |
c0bfc78e | 2986 | emit_insn (pat); |
2987 | return true; | |
2988 | } | |
2989 | else | |
2990 | delete_insns_since (last); | |
2991 | } | |
2992 | } | |
dbd14dc5 | 2993 | |
c0bfc78e | 2994 | return false; |
2995 | } | |
dbd14dc5 | 2996 | |
c0bfc78e | 2997 | /* A subroutine of clear_storage. Expand a call to memset or bzero. |
2998 | Return the return value of memset, 0 otherwise. */ | |
dbd14dc5 | 2999 | |
c0bfc78e | 3000 | static rtx |
3001 | clear_storage_via_libcall (object, size) | |
3002 | rtx object, size; | |
3003 | { | |
3004 | tree call_expr, arg_list, fn, object_tree, size_tree; | |
3005 | enum machine_mode size_mode; | |
3006 | rtx retval; | |
dbd14dc5 | 3007 | |
c0bfc78e | 3008 | /* OBJECT or SIZE may have been passed through protect_from_queue. |
f708f8fd | 3009 | |
c0bfc78e | 3010 | It is unsafe to save the value generated by protect_from_queue |
3011 | and reuse it later. Consider what happens if emit_queue is | |
3012 | called before the return value from protect_from_queue is used. | |
f708f8fd | 3013 | |
c0bfc78e | 3014 | Expansion of the CALL_EXPR below will call emit_queue before |
3015 | we are finished emitting RTL for argument setup. So if we are | |
3016 | not careful we could get the wrong value for an argument. | |
f708f8fd | 3017 | |
c0bfc78e | 3018 | To avoid this problem we go ahead and emit code to copy OBJECT |
3019 | and SIZE into new pseudos. We can then place those new pseudos | |
3020 | into an RTL_EXPR and use them later, even after a call to | |
3021 | emit_queue. | |
f708f8fd | 3022 | |
c0bfc78e | 3023 | Note this is not strictly needed for library calls since they |
3024 | do not call emit_queue before loading their arguments. However, | |
3025 | we may need to have library calls call emit_queue in the future | |
3026 | since failing to do so could cause problems for targets which | |
3027 | define SMALL_REGISTER_CLASSES and pass arguments in registers. */ | |
f708f8fd | 3028 | |
c0bfc78e | 3029 | object = copy_to_mode_reg (Pmode, XEXP (object, 0)); |
f708f8fd | 3030 | |
c0bfc78e | 3031 | if (TARGET_MEM_FUNCTIONS) |
3032 | size_mode = TYPE_MODE (sizetype); | |
3033 | else | |
3034 | size_mode = TYPE_MODE (unsigned_type_node); | |
3035 | size = convert_to_mode (size_mode, size, 1); | |
3036 | size = copy_to_mode_reg (size_mode, size); | |
f708f8fd | 3037 | |
c0bfc78e | 3038 | /* It is incorrect to use the libcall calling conventions to call |
3039 | memset in this context. This could be a user call to memset and | |
3040 | the user may wish to examine the return value from memset. For | |
3041 | targets where libcalls and normal calls have different conventions | |
3042 | for returning pointers, we could end up generating incorrect code. | |
06b8e3db | 3043 | |
c0bfc78e | 3044 | For convenience, we generate the call to bzero this way as well. */ |
06b8e3db | 3045 | |
c0bfc78e | 3046 | object_tree = make_tree (ptr_type_node, object); |
3047 | if (TARGET_MEM_FUNCTIONS) | |
3048 | size_tree = make_tree (sizetype, size); | |
3049 | else | |
3050 | size_tree = make_tree (unsigned_type_node, size); | |
3051 | ||
3052 | fn = clear_storage_libcall_fn (true); | |
3053 | arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE); | |
3054 | if (TARGET_MEM_FUNCTIONS) | |
3055 | arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list); | |
3056 | arg_list = tree_cons (NULL_TREE, object_tree, arg_list); | |
3057 | ||
3058 | /* Now we have to build up the CALL_EXPR itself. */ | |
3059 | call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); | |
3060 | call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), | |
3061 | call_expr, arg_list, NULL_TREE); | |
3062 | TREE_SIDE_EFFECTS (call_expr) = 1; | |
3063 | ||
3064 | retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); | |
3065 | ||
3066 | /* If we are initializing a readonly value, show the above call | |
3067 | clobbered it. Otherwise, a load from it may erroneously be | |
3068 | hoisted from a loop. */ | |
3069 | if (RTX_UNCHANGING_P (object)) | |
3070 | emit_insn (gen_rtx_CLOBBER (VOIDmode, object)); | |
3071 | ||
3072 | return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX); | |
3073 | } | |
3074 | ||
3075 | /* A subroutine of clear_storage_via_libcall. Create the tree node | |
3076 | for the function we use for block clears. The first time FOR_CALL | |
3077 | is true, we call assemble_external. */ | |
3078 | ||
3079 | static GTY(()) tree block_clear_fn; | |
8ca560c1 | 3080 | |
c0bfc78e | 3081 | static tree |
3082 | clear_storage_libcall_fn (for_call) | |
3083 | int for_call; | |
3084 | { | |
3085 | static bool emitted_extern; | |
3086 | tree fn = block_clear_fn, args; | |
3087 | ||
3088 | if (!fn) | |
3089 | { | |
3090 | if (TARGET_MEM_FUNCTIONS) | |
3091 | { | |
3092 | fn = get_identifier ("memset"); | |
3093 | args = build_function_type_list (ptr_type_node, ptr_type_node, | |
3094 | integer_type_node, sizetype, | |
3095 | NULL_TREE); | |
3096 | } | |
3097 | else | |
3098 | { | |
3099 | fn = get_identifier ("bzero"); | |
3100 | args = build_function_type_list (void_type_node, ptr_type_node, | |
3101 | unsigned_type_node, NULL_TREE); | |
dbd14dc5 | 3102 | } |
c0bfc78e | 3103 | |
3104 | fn = build_decl (FUNCTION_DECL, fn, args); | |
3105 | DECL_EXTERNAL (fn) = 1; | |
3106 | TREE_PUBLIC (fn) = 1; | |
3107 | DECL_ARTIFICIAL (fn) = 1; | |
3108 | TREE_NOTHROW (fn) = 1; | |
3109 | ||
3110 | block_clear_fn = fn; | |
10f307d9 | 3111 | } |
0dbd1c74 | 3112 | |
c0bfc78e | 3113 | if (for_call && !emitted_extern) |
3114 | { | |
3115 | emitted_extern = true; | |
3116 | make_decl_rtl (fn, NULL); | |
3117 | assemble_external (fn); | |
3118 | } | |
10f307d9 | 3119 | |
c0bfc78e | 3120 | return fn; |
3121 | } | |
3122 | \f | |
10f307d9 | 3123 | /* Generate code to copy Y into X. |
3124 | Both Y and X must have the same mode, except that | |
3125 | Y can be a constant with VOIDmode. | |
3126 | This mode cannot be BLKmode; use emit_block_move for that. | |
3127 | ||
3128 | Return the last instruction emitted. */ | |
3129 | ||
3130 | rtx | |
3131 | emit_move_insn (x, y) | |
3132 | rtx x, y; | |
3133 | { | |
3134 | enum machine_mode mode = GET_MODE (x); | |
94580317 | 3135 | rtx y_cst = NULL_RTX; |
3136 | rtx last_insn; | |
10f307d9 | 3137 | |
3138 | x = protect_from_queue (x, 1); | |
3139 | y = protect_from_queue (y, 0); | |
3140 | ||
3141 | if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode)) | |
3142 | abort (); | |
3143 | ||
dd5ff96d | 3144 | /* Never force constant_p_rtx to memory. */ |
3145 | if (GET_CODE (y) == CONSTANT_P_RTX) | |
3146 | ; | |
c0c4a46d | 3147 | else if (CONSTANT_P (y)) |
94580317 | 3148 | { |
c0c4a46d | 3149 | if (optimize |
248c3c28 | 3150 | && SCALAR_FLOAT_MODE_P (GET_MODE (x)) |
c0c4a46d | 3151 | && (last_insn = compress_float_constant (x, y))) |
3152 | return last_insn; | |
3153 | ||
3154 | if (!LEGITIMATE_CONSTANT_P (y)) | |
3155 | { | |
3156 | y_cst = y; | |
3157 | y = force_const_mem (mode, y); | |
a6bbccc1 | 3158 | |
3159 | /* If the target's cannot_force_const_mem prevented the spill, | |
3160 | assume that the target's move expanders will also take care | |
3161 | of the non-legitimate constant. */ | |
3162 | if (!y) | |
3163 | y = y_cst; | |
c0c4a46d | 3164 | } |
94580317 | 3165 | } |
10f307d9 | 3166 | |
3167 | /* If X or Y are memory references, verify that their addresses are valid | |
3168 | for the machine. */ | |
3169 | if (GET_CODE (x) == MEM | |
3170 | && ((! memory_address_p (GET_MODE (x), XEXP (x, 0)) | |
3171 | && ! push_operand (x, GET_MODE (x))) | |
3172 | || (flag_force_addr | |
3173 | && CONSTANT_ADDRESS_P (XEXP (x, 0))))) | |
537ffcfc | 3174 | x = validize_mem (x); |
10f307d9 | 3175 | |
3176 | if (GET_CODE (y) == MEM | |
3177 | && (! memory_address_p (GET_MODE (y), XEXP (y, 0)) | |
3178 | || (flag_force_addr | |
3179 | && CONSTANT_ADDRESS_P (XEXP (y, 0))))) | |
537ffcfc | 3180 | y = validize_mem (y); |
10f307d9 | 3181 | |
3182 | if (mode == BLKmode) | |
3183 | abort (); | |
3184 | ||
94580317 | 3185 | last_insn = emit_move_insn_1 (x, y); |
3186 | ||
3187 | if (y_cst && GET_CODE (x) == REG) | |
c080d8f0 | 3188 | set_unique_reg_note (last_insn, REG_EQUAL, y_cst); |
94580317 | 3189 | |
3190 | return last_insn; | |
aaad03e5 | 3191 | } |
3192 | ||
3193 | /* Low level part of emit_move_insn. | |
3194 | Called just like emit_move_insn, but assumes X and Y | |
3195 | are basically valid. */ | |
3196 | ||
3197 | rtx | |
3198 | emit_move_insn_1 (x, y) | |
3199 | rtx x, y; | |
3200 | { | |
3201 | enum machine_mode mode = GET_MODE (x); | |
3202 | enum machine_mode submode; | |
3203 | enum mode_class class = GET_MODE_CLASS (mode); | |
aaad03e5 | 3204 | |
0fd4500a | 3205 | if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE) |
fa56dc1d | 3206 | abort (); |
1203f673 | 3207 | |
10f307d9 | 3208 | if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
3209 | return | |
3210 | emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y)); | |
3211 | ||
d3938eaa | 3212 | /* Expand complex moves by moving real part and imag part, if possible. */ |
b63679d2 | 3213 | else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT) |
e9e12845 | 3214 | && BLKmode != (submode = GET_MODE_INNER (mode)) |
b63679d2 | 3215 | && (mov_optab->handlers[(int) submode].insn_code |
3216 | != CODE_FOR_nothing)) | |
3217 | { | |
3218 | /* Don't split destination if it is a stack push. */ | |
3219 | int stack = push_operand (x, GET_MODE (x)); | |
b63679d2 | 3220 | |
4ed008e7 | 3221 | #ifdef PUSH_ROUNDING |
a8d8b962 | 3222 | /* In case we output to the stack, but the size is smaller machine can |
3223 | push exactly, we need to use move instructions. */ | |
3224 | if (stack | |
76ab50f8 | 3225 | && (PUSH_ROUNDING (GET_MODE_SIZE (submode)) |
3226 | != GET_MODE_SIZE (submode))) | |
a8d8b962 | 3227 | { |
3228 | rtx temp; | |
76ab50f8 | 3229 | HOST_WIDE_INT offset1, offset2; |
a8d8b962 | 3230 | |
3231 | /* Do not use anti_adjust_stack, since we don't want to update | |
3232 | stack_pointer_delta. */ | |
3233 | temp = expand_binop (Pmode, | |
3234 | #ifdef STACK_GROWS_DOWNWARD | |
3235 | sub_optab, | |
3236 | #else | |
3237 | add_optab, | |
3238 | #endif | |
3239 | stack_pointer_rtx, | |
3240 | GEN_INT | |
76ab50f8 | 3241 | (PUSH_ROUNDING |
3242 | (GET_MODE_SIZE (GET_MODE (x)))), | |
3243 | stack_pointer_rtx, 0, OPTAB_LIB_WIDEN); | |
3244 | ||
a8d8b962 | 3245 | if (temp != stack_pointer_rtx) |
3246 | emit_move_insn (stack_pointer_rtx, temp); | |
76ab50f8 | 3247 | |
a8d8b962 | 3248 | #ifdef STACK_GROWS_DOWNWARD |
3249 | offset1 = 0; | |
3250 | offset2 = GET_MODE_SIZE (submode); | |
3251 | #else | |
3252 | offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))); | |
3253 | offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x))) | |
3254 | + GET_MODE_SIZE (submode)); | |
3255 | #endif | |
76ab50f8 | 3256 | |
a8d8b962 | 3257 | emit_move_insn (change_address (x, submode, |
3258 | gen_rtx_PLUS (Pmode, | |
3259 | stack_pointer_rtx, | |
3260 | GEN_INT (offset1))), | |
3261 | gen_realpart (submode, y)); | |
3262 | emit_move_insn (change_address (x, submode, | |
3263 | gen_rtx_PLUS (Pmode, | |
3264 | stack_pointer_rtx, | |
3265 | GEN_INT (offset2))), | |
3266 | gen_imagpart (submode, y)); | |
3267 | } | |
e3fe8c3b | 3268 | else |
4ed008e7 | 3269 | #endif |
b63679d2 | 3270 | /* If this is a stack, push the highpart first, so it |
3271 | will be in the argument order. | |
3272 | ||
3273 | In that case, change_address is used only to convert | |
3274 | the mode, not to change the address. */ | |
e3fe8c3b | 3275 | if (stack) |
90524033 | 3276 | { |
55997042 | 3277 | /* Note that the real part always precedes the imag part in memory |
3278 | regardless of machine's endianness. */ | |
90524033 | 3279 | #ifdef STACK_GROWS_DOWNWARD |
3280 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) | |
f7c44134 | 3281 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
55997042 | 3282 | gen_imagpart (submode, y))); |
90524033 | 3283 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
f7c44134 | 3284 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
55997042 | 3285 | gen_realpart (submode, y))); |
90524033 | 3286 | #else |
3287 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) | |
f7c44134 | 3288 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
55997042 | 3289 | gen_realpart (submode, y))); |
90524033 | 3290 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
f7c44134 | 3291 | (gen_rtx_MEM (submode, XEXP (x, 0)), |
55997042 | 3292 | gen_imagpart (submode, y))); |
90524033 | 3293 | #endif |
3294 | } | |
3295 | else | |
3296 | { | |
7f964718 | 3297 | rtx realpart_x, realpart_y; |
3298 | rtx imagpart_x, imagpart_y; | |
3299 | ||
5b5abf88 | 3300 | /* If this is a complex value with each part being smaller than a |
3301 | word, the usual calling sequence will likely pack the pieces into | |
3302 | a single register. Unfortunately, SUBREG of hard registers only | |
3303 | deals in terms of words, so we have a problem converting input | |
3304 | arguments to the CONCAT of two registers that is used elsewhere | |
3305 | for complex values. If this is before reload, we can copy it into | |
3306 | memory and reload. FIXME, we should see about using extract and | |
3307 | insert on integer registers, but complex short and complex char | |
3308 | variables should be rarely used. */ | |
fa56dc1d | 3309 | if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD |
5b5abf88 | 3310 | && (reload_in_progress | reload_completed) == 0) |
3311 | { | |
76ab50f8 | 3312 | int packed_dest_p |
3313 | = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER); | |
3314 | int packed_src_p | |
3315 | = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER); | |
5b5abf88 | 3316 | |
3317 | if (packed_dest_p || packed_src_p) | |
3318 | { | |
3319 | enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT) | |
3320 | ? MODE_FLOAT : MODE_INT); | |
3321 | ||
387bc205 | 3322 | enum machine_mode reg_mode |
3323 | = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1); | |
5b5abf88 | 3324 | |
3325 | if (reg_mode != BLKmode) | |
3326 | { | |
3327 | rtx mem = assign_stack_temp (reg_mode, | |
3328 | GET_MODE_SIZE (mode), 0); | |
e513d163 | 3329 | rtx cmem = adjust_address (mem, mode, 0); |
5b5abf88 | 3330 | |
387bc205 | 3331 | cfun->cannot_inline |
3332 | = N_("function using short complex types cannot be inline"); | |
5b5abf88 | 3333 | |
3334 | if (packed_dest_p) | |
3335 | { | |
3336 | rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0); | |
76ab50f8 | 3337 | |
5b5abf88 | 3338 | emit_move_insn_1 (cmem, y); |
3339 | return emit_move_insn_1 (sreg, mem); | |
3340 | } | |
3341 | else | |
3342 | { | |
3343 | rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0); | |
76ab50f8 | 3344 | |
5b5abf88 | 3345 | emit_move_insn_1 (mem, sreg); |
3346 | return emit_move_insn_1 (x, cmem); | |
3347 | } | |
3348 | } | |
3349 | } | |
3350 | } | |
3351 | ||
7f964718 | 3352 | realpart_x = gen_realpart (submode, x); |
3353 | realpart_y = gen_realpart (submode, y); | |
3354 | imagpart_x = gen_imagpart (submode, x); | |
3355 | imagpart_y = gen_imagpart (submode, y); | |
3356 | ||
3357 | /* Show the output dies here. This is necessary for SUBREGs | |
3358 | of pseudos since we cannot track their lifetimes correctly; | |
c6abf2b8 | 3359 | hard regs shouldn't appear here except as return values. |
3360 | We never want to emit such a clobber after reload. */ | |
3361 | if (x != y | |
7f964718 | 3362 | && ! (reload_in_progress || reload_completed) |
3363 | && (GET_CODE (realpart_x) == SUBREG | |
3364 | || GET_CODE (imagpart_x) == SUBREG)) | |
76ab50f8 | 3365 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); |
7908d3b3 | 3366 | |
90524033 | 3367 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
7f964718 | 3368 | (realpart_x, realpart_y)); |
90524033 | 3369 | emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code) |
7f964718 | 3370 | (imagpart_x, imagpart_y)); |
90524033 | 3371 | } |
b63679d2 | 3372 | |
bc82d91b | 3373 | return get_last_insn (); |
b63679d2 | 3374 | } |
3375 | ||
78defff5 | 3376 | /* This will handle any multi-word or full-word mode that lacks a move_insn |
3377 | pattern. However, you will get better code if you define such patterns, | |
10f307d9 | 3378 | even if they must turn into multiple assembler instructions. */ |
78defff5 | 3379 | else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD) |
10f307d9 | 3380 | { |
3381 | rtx last_insn = 0; | |
6702c250 | 3382 | rtx seq, inner; |
7f964718 | 3383 | int need_clobber; |
76ab50f8 | 3384 | int i; |
fa56dc1d | 3385 | |
498aec4e | 3386 | #ifdef PUSH_ROUNDING |
3387 | ||
3388 | /* If X is a push on the stack, do the push now and replace | |
3389 | X with a reference to the stack pointer. */ | |
3390 | if (push_operand (x, GET_MODE (x))) | |
3391 | { | |
07c143fb | 3392 | rtx temp; |
3393 | enum rtx_code code; | |
ff385626 | 3394 | |
07c143fb | 3395 | /* Do not use anti_adjust_stack, since we don't want to update |
3396 | stack_pointer_delta. */ | |
3397 | temp = expand_binop (Pmode, | |
3398 | #ifdef STACK_GROWS_DOWNWARD | |
3399 | sub_optab, | |
3400 | #else | |
3401 | add_optab, | |
3402 | #endif | |
3403 | stack_pointer_rtx, | |
3404 | GEN_INT | |
76ab50f8 | 3405 | (PUSH_ROUNDING |
3406 | (GET_MODE_SIZE (GET_MODE (x)))), | |
92b7c66a | 3407 | stack_pointer_rtx, 0, OPTAB_LIB_WIDEN); |
76ab50f8 | 3408 | |
ff385626 | 3409 | if (temp != stack_pointer_rtx) |
3410 | emit_move_insn (stack_pointer_rtx, temp); | |
07c143fb | 3411 | |
3412 | code = GET_CODE (XEXP (x, 0)); | |
76ab50f8 | 3413 | |
07c143fb | 3414 | /* Just hope that small offsets off SP are OK. */ |
3415 | if (code == POST_INC) | |
ff385626 | 3416 | temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
76ab50f8 | 3417 | GEN_INT (-((HOST_WIDE_INT) |
3418 | GET_MODE_SIZE (GET_MODE (x))))); | |
07c143fb | 3419 | else if (code == POST_DEC) |
ff385626 | 3420 | temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
07c143fb | 3421 | GEN_INT (GET_MODE_SIZE (GET_MODE (x)))); |
3422 | else | |
3423 | temp = stack_pointer_rtx; | |
3424 | ||
3425 | x = change_address (x, VOIDmode, temp); | |
498aec4e | 3426 | } |
3427 | #endif | |
fa56dc1d | 3428 | |
6702c250 | 3429 | /* If we are in reload, see if either operand is a MEM whose address |
3430 | is scheduled for replacement. */ | |
3431 | if (reload_in_progress && GET_CODE (x) == MEM | |
3432 | && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0)) | |
e4e86ec5 | 3433 | x = replace_equiv_address_nv (x, inner); |
6702c250 | 3434 | if (reload_in_progress && GET_CODE (y) == MEM |
3435 | && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0)) | |
e4e86ec5 | 3436 | y = replace_equiv_address_nv (y, inner); |
6702c250 | 3437 | |
7f964718 | 3438 | start_sequence (); |
9cb64ebc | 3439 | |
7f964718 | 3440 | need_clobber = 0; |
10f307d9 | 3441 | for (i = 0; |
fa56dc1d | 3442 | i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; |
10f307d9 | 3443 | i++) |
3444 | { | |
3445 | rtx xpart = operand_subword (x, i, 1, mode); | |
3446 | rtx ypart = operand_subword (y, i, 1, mode); | |
3447 | ||
3448 | /* If we can't get a part of Y, put Y into memory if it is a | |
3449 | constant. Otherwise, force it into a register. If we still | |
3450 | can't get a part of Y, abort. */ | |
3451 | if (ypart == 0 && CONSTANT_P (y)) | |
3452 | { | |
3453 | y = force_const_mem (mode, y); | |
3454 | ypart = operand_subword (y, i, 1, mode); | |
3455 | } | |
3456 | else if (ypart == 0) | |
3457 | ypart = operand_subword_force (y, i, mode); | |
3458 | ||
3459 | if (xpart == 0 || ypart == 0) | |
3460 | abort (); | |
3461 | ||
7f964718 | 3462 | need_clobber |= (GET_CODE (xpart) == SUBREG); |
3463 | ||
10f307d9 | 3464 | last_insn = emit_move_insn (xpart, ypart); |
3465 | } | |
dd0d17cd | 3466 | |
31d3e01c | 3467 | seq = get_insns (); |
7f964718 | 3468 | end_sequence (); |
3469 | ||
3470 | /* Show the output dies here. This is necessary for SUBREGs | |
3471 | of pseudos since we cannot track their lifetimes correctly; | |
3472 | hard regs shouldn't appear here except as return values. | |
3473 | We never want to emit such a clobber after reload. */ | |
3474 | if (x != y | |
3475 | && ! (reload_in_progress || reload_completed) | |
3476 | && need_clobber != 0) | |
76ab50f8 | 3477 | emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); |
7f964718 | 3478 | |
3479 | emit_insn (seq); | |
3480 | ||
10f307d9 | 3481 | return last_insn; |
3482 | } | |
3483 | else | |
3484 | abort (); | |
3485 | } | |
c0c4a46d | 3486 | |
3487 | /* If Y is representable exactly in a narrower mode, and the target can | |
3488 | perform the extension directly from constant or memory, then emit the | |
3489 | move as an extension. */ | |
3490 | ||
3491 | static rtx | |
3492 | compress_float_constant (x, y) | |
3493 | rtx x, y; | |
3494 | { | |
3495 | enum machine_mode dstmode = GET_MODE (x); | |
3496 | enum machine_mode orig_srcmode = GET_MODE (y); | |
3497 | enum machine_mode srcmode; | |
3498 | REAL_VALUE_TYPE r; | |
3499 | ||
3500 | REAL_VALUE_FROM_CONST_DOUBLE (r, y); | |
3501 | ||
3502 | for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode)); | |
3503 | srcmode != orig_srcmode; | |
3504 | srcmode = GET_MODE_WIDER_MODE (srcmode)) | |
3505 | { | |
3506 | enum insn_code ic; | |
3507 | rtx trunc_y, last_insn; | |
3508 | ||
3509 | /* Skip if the target can't extend this way. */ | |
3510 | ic = can_extend_p (dstmode, srcmode, 0); | |
3511 | if (ic == CODE_FOR_nothing) | |
3512 | continue; | |
3513 | ||
3514 | /* Skip if the narrowed value isn't exact. */ | |
3515 | if (! exact_real_truncate (srcmode, &r)) | |
3516 | continue; | |
3517 | ||
3518 | trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode); | |
3519 | ||
3520 | if (LEGITIMATE_CONSTANT_P (trunc_y)) | |
3521 | { | |
3522 | /* Skip if the target needs extra instructions to perform | |
3523 | the extension. */ | |
3524 | if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode)) | |
3525 | continue; | |
3526 | } | |
3527 | else if (float_extend_from_mem[dstmode][srcmode]) | |
3528 | trunc_y = validize_mem (force_const_mem (srcmode, trunc_y)); | |
3529 | else | |
3530 | continue; | |
3531 | ||
3532 | emit_unop_insn (ic, x, trunc_y, UNKNOWN); | |
3533 | last_insn = get_last_insn (); | |
3534 | ||
3535 | if (GET_CODE (x) == REG) | |
3536 | REG_NOTES (last_insn) | |
3537 | = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn)); | |
3538 | ||
3539 | return last_insn; | |
3540 | } | |
3541 | ||
3542 | return NULL_RTX; | |
3543 | } | |
10f307d9 | 3544 | \f |
3545 | /* Pushing data onto the stack. */ | |
3546 | ||
3547 | /* Push a block of length SIZE (perhaps variable) | |
3548 | and return an rtx to address the beginning of the block. | |
3549 | Note that it is not possible for the value returned to be a QUEUED. | |
3550 | The value may be virtual_outgoing_args_rtx. | |
3551 | ||
3552 | EXTRA is the number of bytes of padding to push in addition to SIZE. | |
3553 | BELOW nonzero means this padding comes at low addresses; | |
3554 | otherwise, the padding comes at high addresses. */ | |
3555 | ||
3556 | rtx | |
3557 | push_block (size, extra, below) | |
3558 | rtx size; | |
3559 | int extra, below; | |
3560 | { | |
19cb6b50 | 3561 | rtx temp; |
ed8d3eee | 3562 | |
3563 | size = convert_modes (Pmode, ptr_mode, size, 1); | |
10f307d9 | 3564 | if (CONSTANT_P (size)) |
3565 | anti_adjust_stack (plus_constant (size, extra)); | |
3566 | else if (GET_CODE (size) == REG && extra == 0) | |
3567 | anti_adjust_stack (size); | |
3568 | else | |
3569 | { | |
481feae3 | 3570 | temp = copy_to_mode_reg (Pmode, size); |
10f307d9 | 3571 | if (extra != 0) |
b572011e | 3572 | temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra), |
10f307d9 | 3573 | temp, 0, OPTAB_LIB_WIDEN); |
3574 | anti_adjust_stack (temp); | |
3575 | } | |
3576 | ||
4448f543 | 3577 | #ifndef STACK_GROWS_DOWNWARD |
4448f543 | 3578 | if (0) |
4448f543 | 3579 | #else |
3580 | if (1) | |
10f307d9 | 3581 | #endif |
4448f543 | 3582 | { |
4448f543 | 3583 | temp = virtual_outgoing_args_rtx; |
3584 | if (extra != 0 && below) | |
3585 | temp = plus_constant (temp, extra); | |
3586 | } | |
3587 | else | |
3588 | { | |
3589 | if (GET_CODE (size) == CONST_INT) | |
3590 | temp = plus_constant (virtual_outgoing_args_rtx, | |
fa56dc1d | 3591 | -INTVAL (size) - (below ? 0 : extra)); |
4448f543 | 3592 | else if (extra != 0 && !below) |
3593 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
f7c44134 | 3594 | negate_rtx (Pmode, plus_constant (size, extra))); |
4448f543 | 3595 | else |
3596 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
3597 | negate_rtx (Pmode, size)); | |
3598 | } | |
10f307d9 | 3599 | |
3600 | return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp); | |
3601 | } | |
3602 | ||
fad4a30c | 3603 | #ifdef PUSH_ROUNDING |
3604 | ||
ef7dc4b4 | 3605 | /* Emit single push insn. */ |
fad4a30c | 3606 | |
ef7dc4b4 | 3607 | static void |
3608 | emit_single_push_insn (mode, x, type) | |
3609 | rtx x; | |
3610 | enum machine_mode mode; | |
3611 | tree type; | |
3612 | { | |
ef7dc4b4 | 3613 | rtx dest_addr; |
07c143fb | 3614 | unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
ef7dc4b4 | 3615 | rtx dest; |
675b92cc | 3616 | enum insn_code icode; |
3617 | insn_operand_predicate_fn pred; | |
ef7dc4b4 | 3618 | |
675b92cc | 3619 | stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
3620 | /* If there is push pattern, use it. Otherwise try old way of throwing | |
3621 | MEM representing push operation to move expander. */ | |
3622 | icode = push_optab->handlers[(int) mode].insn_code; | |
3623 | if (icode != CODE_FOR_nothing) | |
3624 | { | |
3625 | if (((pred = insn_data[(int) icode].operand[0].predicate) | |
e17f5b23 | 3626 | && !((*pred) (x, mode)))) |
675b92cc | 3627 | x = force_reg (mode, x); |
3628 | emit_insn (GEN_FCN (icode) (x)); | |
3629 | return; | |
3630 | } | |
ef7dc4b4 | 3631 | if (GET_MODE_SIZE (mode) == rounded_size) |
3632 | dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); | |
3633 | else | |
3634 | { | |
3635 | #ifdef STACK_GROWS_DOWNWARD | |
3636 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | |
e17f5b23 | 3637 | GEN_INT (-(HOST_WIDE_INT) rounded_size)); |
ef7dc4b4 | 3638 | #else |
3639 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | |
3640 | GEN_INT (rounded_size)); | |
3641 | #endif | |
3642 | dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); | |
3643 | } | |
3644 | ||
3645 | dest = gen_rtx_MEM (mode, dest_addr); | |
3646 | ||
ef7dc4b4 | 3647 | if (type != 0) |
3648 | { | |
3649 | set_mem_attributes (dest, type, 1); | |
a9d9ab08 | 3650 | |
3651 | if (flag_optimize_sibling_calls) | |
3652 | /* Function incoming arguments may overlap with sibling call | |
3653 | outgoing arguments and we cannot allow reordering of reads | |
3654 | from function arguments with stores to outgoing arguments | |
3655 | of sibling calls. */ | |
3656 | set_mem_alias_set (dest, 0); | |
ef7dc4b4 | 3657 | } |
3658 | emit_move_insn (dest, x); | |
ef7dc4b4 | 3659 | } |
fad4a30c | 3660 | #endif |
ef7dc4b4 | 3661 | |
10f307d9 | 3662 | /* Generate code to push X onto the stack, assuming it has mode MODE and |
3663 | type TYPE. | |
3664 | MODE is redundant except when X is a CONST_INT (since they don't | |
3665 | carry mode info). | |
3666 | SIZE is an rtx for the size of data to be copied (in bytes), | |
3667 | needed only if X is BLKmode. | |
3668 | ||
decd7a45 | 3669 | ALIGN (in bits) is maximum alignment we can assume. |
10f307d9 | 3670 | |
a984cc1e | 3671 | If PARTIAL and REG are both nonzero, then copy that many of the first |
3672 | words of X into registers starting with REG, and push the rest of X. | |
10f307d9 | 3673 | The amount of space pushed is decreased by PARTIAL words, |
3674 | rounded *down* to a multiple of PARM_BOUNDARY. | |
3675 | REG must be a hard register in this case. | |
a984cc1e | 3676 | If REG is zero but PARTIAL is not, take any all others actions for an |
3677 | argument partially in registers, but do not actually load any | |
3678 | registers. | |
10f307d9 | 3679 | |
3680 | EXTRA is the amount in bytes of extra space to leave next to this arg. | |
4bbea254 | 3681 | This is ignored if an argument block has already been allocated. |
10f307d9 | 3682 | |
3683 | On a machine that lacks real push insns, ARGS_ADDR is the address of | |
3684 | the bottom of the argument block for this call. We use indexing off there | |
3685 | to store the arg. On machines with push insns, ARGS_ADDR is 0 when a | |
3686 | argument block has not been preallocated. | |
3687 | ||
997d68fe | 3688 | ARGS_SO_FAR is the size of args previously pushed for this call. |
3689 | ||
3690 | REG_PARM_STACK_SPACE is nonzero if functions require stack space | |
3691 | for arguments passed in registers. If nonzero, it will be the number | |
3692 | of bytes required. */ | |
10f307d9 | 3693 | |
3694 | void | |
3695 | emit_push_insn (x, mode, type, size, align, partial, reg, extra, | |
9d855d2f | 3696 | args_addr, args_so_far, reg_parm_stack_space, |
ff385626 | 3697 | alignment_pad) |
19cb6b50 | 3698 | rtx x; |
10f307d9 | 3699 | enum machine_mode mode; |
3700 | tree type; | |
3701 | rtx size; | |
fe352cf1 | 3702 | unsigned int align; |
10f307d9 | 3703 | int partial; |
3704 | rtx reg; | |
3705 | int extra; | |
3706 | rtx args_addr; | |
3707 | rtx args_so_far; | |
997d68fe | 3708 | int reg_parm_stack_space; |
9d855d2f | 3709 | rtx alignment_pad; |
10f307d9 | 3710 | { |
3711 | rtx xinner; | |
3712 | enum direction stack_direction | |
3713 | #ifdef STACK_GROWS_DOWNWARD | |
3714 | = downward; | |
3715 | #else | |
3716 | = upward; | |
3717 | #endif | |
3718 | ||
3719 | /* Decide where to pad the argument: `downward' for below, | |
3720 | `upward' for above, or `none' for don't pad it. | |
3721 | Default is below for small data on big-endian machines; else above. */ | |
3722 | enum direction where_pad = FUNCTION_ARG_PADDING (mode, type); | |
3723 | ||
ff385626 | 3724 | /* Invert direction if stack is post-decrement. |
12a97a04 | 3725 | FIXME: why? */ |
3726 | if (STACK_PUSH_CODE == POST_DEC) | |
10f307d9 | 3727 | if (where_pad != none) |
3728 | where_pad = (where_pad == downward ? upward : downward); | |
3729 | ||
3730 | xinner = x = protect_from_queue (x, 0); | |
3731 | ||
3732 | if (mode == BLKmode) | |
3733 | { | |
3734 | /* Copy a block into the stack, entirely or partially. */ | |
3735 | ||
19cb6b50 | 3736 | rtx temp; |
10f307d9 | 3737 | int used = partial * UNITS_PER_WORD; |
3738 | int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT); | |
3739 | int skip; | |
fa56dc1d | 3740 | |
10f307d9 | 3741 | if (size == 0) |
3742 | abort (); | |
3743 | ||
3744 | used -= offset; | |
3745 | ||
3746 | /* USED is now the # of bytes we need not copy to the stack | |
3747 | because registers will take care of them. */ | |
3748 | ||
3749 | if (partial != 0) | |
e513d163 | 3750 | xinner = adjust_address (xinner, BLKmode, used); |
10f307d9 | 3751 | |
3752 | /* If the partial register-part of the arg counts in its stack size, | |
3753 | skip the part of stack space corresponding to the registers. | |
3754 | Otherwise, start copying to the beginning of the stack space, | |
3755 | by setting SKIP to 0. */ | |
997d68fe | 3756 | skip = (reg_parm_stack_space == 0) ? 0 : used; |
10f307d9 | 3757 | |
3758 | #ifdef PUSH_ROUNDING | |
3759 | /* Do it with several push insns if that doesn't take lots of insns | |
3760 | and if there is no difficulty with push insns that skip bytes | |
3761 | on the stack for alignment purposes. */ | |
3762 | if (args_addr == 0 | |
4448f543 | 3763 | && PUSH_ARGS |
10f307d9 | 3764 | && GET_CODE (size) == CONST_INT |
3765 | && skip == 0 | |
928a6bdc | 3766 | && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)) |
10f307d9 | 3767 | /* Here we avoid the case of a structure whose weak alignment |
3768 | forces many pushes of a small amount of data, | |
3769 | and such small pushes do rounding that causes trouble. */ | |
9439ebf7 | 3770 | && ((! SLOW_UNALIGNED_ACCESS (word_mode, align)) |
325d1c45 | 3771 | || align >= BIGGEST_ALIGNMENT |
decd7a45 | 3772 | || (PUSH_ROUNDING (align / BITS_PER_UNIT) |
3773 | == (align / BITS_PER_UNIT))) | |
10f307d9 | 3774 | && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size)) |
3775 | { | |
3776 | /* Push padding now if padding above and stack grows down, | |
3777 | or if padding below and stack grows up. | |
3778 | But if space already allocated, this has already been done. */ | |
3779 | if (extra && args_addr == 0 | |
3780 | && where_pad != none && where_pad != stack_direction) | |
b572011e | 3781 | anti_adjust_stack (GEN_INT (extra)); |
10f307d9 | 3782 | |
ef7dc4b4 | 3783 | move_by_pieces (NULL, xinner, INTVAL (size) - used, align); |
10f307d9 | 3784 | } |
3785 | else | |
fa56dc1d | 3786 | #endif /* PUSH_ROUNDING */ |
10f307d9 | 3787 | { |
a9f2963b | 3788 | rtx target; |
3789 | ||
10f307d9 | 3790 | /* Otherwise make space on the stack and copy the data |
3791 | to the address of that space. */ | |
3792 | ||
3793 | /* Deduct words put into registers from the size we must copy. */ | |
3794 | if (partial != 0) | |
3795 | { | |
3796 | if (GET_CODE (size) == CONST_INT) | |
b572011e | 3797 | size = GEN_INT (INTVAL (size) - used); |
10f307d9 | 3798 | else |
3799 | size = expand_binop (GET_MODE (size), sub_optab, size, | |
b572011e | 3800 | GEN_INT (used), NULL_RTX, 0, |
3801 | OPTAB_LIB_WIDEN); | |
10f307d9 | 3802 | } |
3803 | ||
3804 | /* Get the address of the stack space. | |
3805 | In this case, we do not deal with EXTRA separately. | |
3806 | A single stack adjust will do. */ | |
3807 | if (! args_addr) | |
3808 | { | |
3809 | temp = push_block (size, extra, where_pad == downward); | |
3810 | extra = 0; | |
3811 | } | |
3812 | else if (GET_CODE (args_so_far) == CONST_INT) | |
3813 | temp = memory_address (BLKmode, | |
3814 | plus_constant (args_addr, | |
3815 | skip + INTVAL (args_so_far))); | |
3816 | else | |
3817 | temp = memory_address (BLKmode, | |
941522d6 | 3818 | plus_constant (gen_rtx_PLUS (Pmode, |
3819 | args_addr, | |
3820 | args_so_far), | |
10f307d9 | 3821 | skip)); |
c0bfc78e | 3822 | |
3823 | if (!ACCUMULATE_OUTGOING_ARGS) | |
3824 | { | |
3825 | /* If the source is referenced relative to the stack pointer, | |
3826 | copy it to another register to stabilize it. We do not need | |
3827 | to do this if we know that we won't be changing sp. */ | |
3828 | ||
3829 | if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp) | |
3830 | || reg_mentioned_p (virtual_outgoing_args_rtx, temp)) | |
3831 | temp = copy_to_reg (temp); | |
3832 | } | |
3833 | ||
fa56dc1d | 3834 | target = gen_rtx_MEM (BLKmode, temp); |
a9f2963b | 3835 | |
fa56dc1d | 3836 | if (type != 0) |
3837 | { | |
3838 | set_mem_attributes (target, type, 1); | |
3839 | /* Function incoming arguments may overlap with sibling call | |
3840 | outgoing arguments and we cannot allow reordering of reads | |
3841 | from function arguments with stores to outgoing arguments | |
3842 | of sibling calls. */ | |
ab6ab77e | 3843 | set_mem_alias_set (target, 0); |
fa56dc1d | 3844 | } |
c0bfc78e | 3845 | |
0378dbdc | 3846 | /* ALIGN may well be better aligned than TYPE, e.g. due to |
3847 | PARM_BOUNDARY. Assume the caller isn't lying. */ | |
3848 | set_mem_align (target, align); | |
c0bfc78e | 3849 | |
0378dbdc | 3850 | emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM); |
10f307d9 | 3851 | } |
3852 | } | |
3853 | else if (partial > 0) | |
3854 | { | |
3855 | /* Scalar partly in registers. */ | |
3856 | ||
3857 | int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD; | |
3858 | int i; | |
3859 | int not_stack; | |
3860 | /* # words of start of argument | |
3861 | that we must make space for but need not store. */ | |
3862 | int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD); | |
3863 | int args_offset = INTVAL (args_so_far); | |
3864 | int skip; | |
3865 | ||
3866 | /* Push padding now if padding above and stack grows down, | |
3867 | or if padding below and stack grows up. | |
3868 | But if space already allocated, this has already been done. */ | |
3869 | if (extra && args_addr == 0 | |
3870 | && where_pad != none && where_pad != stack_direction) | |
b572011e | 3871 | anti_adjust_stack (GEN_INT (extra)); |
10f307d9 | 3872 | |
3873 | /* If we make space by pushing it, we might as well push | |
3874 | the real data. Otherwise, we can leave OFFSET nonzero | |
3875 | and leave the space uninitialized. */ | |
3876 | if (args_addr == 0) | |
3877 | offset = 0; | |
3878 | ||
3879 | /* Now NOT_STACK gets the number of words that we don't need to | |
3880 | allocate on the stack. */ | |
3881 | not_stack = partial - offset; | |
3882 | ||
3883 | /* If the partial register-part of the arg counts in its stack size, | |
3884 | skip the part of stack space corresponding to the registers. | |
3885 | Otherwise, start copying to the beginning of the stack space, | |
3886 | by setting SKIP to 0. */ | |
997d68fe | 3887 | skip = (reg_parm_stack_space == 0) ? 0 : not_stack; |
10f307d9 | 3888 | |
3889 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) | |
3890 | x = validize_mem (force_const_mem (mode, x)); | |
3891 | ||
3892 | /* If X is a hard register in a non-integer mode, copy it into a pseudo; | |
3893 | SUBREGs of such registers are not allowed. */ | |
3894 | if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER | |
3895 | && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)) | |
3896 | x = copy_to_reg (x); | |
3897 | ||
3898 | /* Loop over all the words allocated on the stack for this arg. */ | |
3899 | /* We can do it by words, because any scalar bigger than a word | |
3900 | has a size a multiple of a word. */ | |
3901 | #ifndef PUSH_ARGS_REVERSED | |
3902 | for (i = not_stack; i < size; i++) | |
3903 | #else | |
3904 | for (i = size - 1; i >= not_stack; i--) | |
3905 | #endif | |
3906 | if (i >= not_stack + offset) | |
3907 | emit_push_insn (operand_subword_force (x, i, mode), | |
b572011e | 3908 | word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX, |
3909 | 0, args_addr, | |
3910 | GEN_INT (args_offset + ((i - not_stack + skip) | |
997d68fe | 3911 | * UNITS_PER_WORD)), |
9d855d2f | 3912 | reg_parm_stack_space, alignment_pad); |
10f307d9 | 3913 | } |
3914 | else | |
3915 | { | |
3916 | rtx addr; | |
f7c44134 | 3917 | rtx dest; |
10f307d9 | 3918 | |
3919 | /* Push padding now if padding above and stack grows down, | |
3920 | or if padding below and stack grows up. | |
3921 | But if space already allocated, this has already been done. */ | |
3922 | if (extra && args_addr == 0 | |
3923 | && where_pad != none && where_pad != stack_direction) | |
b572011e | 3924 | anti_adjust_stack (GEN_INT (extra)); |
10f307d9 | 3925 | |
3926 | #ifdef PUSH_ROUNDING | |
4448f543 | 3927 | if (args_addr == 0 && PUSH_ARGS) |
ef7dc4b4 | 3928 | emit_single_push_insn (mode, x, type); |
10f307d9 | 3929 | else |
3930 | #endif | |
eb4b06b6 | 3931 | { |
3932 | if (GET_CODE (args_so_far) == CONST_INT) | |
3933 | addr | |
3934 | = memory_address (mode, | |
fa56dc1d | 3935 | plus_constant (args_addr, |
eb4b06b6 | 3936 | INTVAL (args_so_far))); |
fa56dc1d | 3937 | else |
941522d6 | 3938 | addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr, |
3939 | args_so_far)); | |
ef7dc4b4 | 3940 | dest = gen_rtx_MEM (mode, addr); |
3941 | if (type != 0) | |
3942 | { | |
3943 | set_mem_attributes (dest, type, 1); | |
3944 | /* Function incoming arguments may overlap with sibling call | |
3945 | outgoing arguments and we cannot allow reordering of reads | |
3946 | from function arguments with stores to outgoing arguments | |
3947 | of sibling calls. */ | |
ab6ab77e | 3948 | set_mem_alias_set (dest, 0); |
ef7dc4b4 | 3949 | } |
10f307d9 | 3950 | |
ef7dc4b4 | 3951 | emit_move_insn (dest, x); |
ef7dc4b4 | 3952 | } |
10f307d9 | 3953 | } |
3954 | ||
10f307d9 | 3955 | /* If part should go in registers, copy that part |
3956 | into the appropriate registers. Do this now, at the end, | |
3957 | since mem-to-mem copies above may do function calls. */ | |
a984cc1e | 3958 | if (partial > 0 && reg != 0) |
ce739127 | 3959 | { |
3960 | /* Handle calls that pass values in multiple non-contiguous locations. | |
3961 | The Irix 6 ABI has examples of this. */ | |
3962 | if (GET_CODE (reg) == PARALLEL) | |
2c269e73 | 3963 | emit_group_load (reg, x, -1); /* ??? size? */ |
ce739127 | 3964 | else |
3965 | move_block_to_reg (REGNO (reg), x, partial, mode); | |
3966 | } | |
10f307d9 | 3967 | |
3968 | if (extra && args_addr == 0 && where_pad == stack_direction) | |
b572011e | 3969 | anti_adjust_stack (GEN_INT (extra)); |
fa56dc1d | 3970 | |
364a85bd | 3971 | if (alignment_pad && args_addr == 0) |
9d855d2f | 3972 | anti_adjust_stack (alignment_pad); |
10f307d9 | 3973 | } |
3974 | \f | |
d8e5b213 | 3975 | /* Return X if X can be used as a subtarget in a sequence of arithmetic |
3976 | operations. */ | |
3977 | ||
3978 | static rtx | |
3979 | get_subtarget (x) | |
3980 | rtx x; | |
3981 | { | |
3982 | return ((x == 0 | |
3983 | /* Only registers can be subtargets. */ | |
3984 | || GET_CODE (x) != REG | |
3985 | /* If the register is readonly, it can't be set more than once. */ | |
3986 | || RTX_UNCHANGING_P (x) | |
3987 | /* Don't use hard regs to avoid extending their life. */ | |
3988 | || REGNO (x) < FIRST_PSEUDO_REGISTER | |
3989 | /* Avoid subtargets inside loops, | |
3990 | since they hide some invariant expressions. */ | |
3991 | || preserve_subexpressions_p ()) | |
3992 | ? 0 : x); | |
3993 | } | |
3994 | ||
10f307d9 | 3995 | /* Expand an assignment that stores the value of FROM into TO. |
3996 | If WANT_VALUE is nonzero, return an rtx for the value of TO. | |
9282409c | 3997 | (This may contain a QUEUED rtx; |
3998 | if the value is constant, this rtx is a constant.) | |
3999 | Otherwise, the returned value is NULL_RTX. | |
10f307d9 | 4000 | |
4001 | SUGGEST_REG is no longer actually used. | |
4002 | It used to mean, copy the value through a register | |
4003 | and return that register, if that is possible. | |
9282409c | 4004 | We now use WANT_VALUE to decide whether to do this. */ |
10f307d9 | 4005 | |
4006 | rtx | |
4007 | expand_assignment (to, from, want_value, suggest_reg) | |
4008 | tree to, from; | |
4009 | int want_value; | |
7014838c | 4010 | int suggest_reg ATTRIBUTE_UNUSED; |
10f307d9 | 4011 | { |
19cb6b50 | 4012 | rtx to_rtx = 0; |
10f307d9 | 4013 | rtx result; |
4014 | ||
4015 | /* Don't crash if the lhs of the assignment was erroneous. */ | |
4016 | ||
4017 | if (TREE_CODE (to) == ERROR_MARK) | |
9282409c | 4018 | { |
4019 | result = expand_expr (from, NULL_RTX, VOIDmode, 0); | |
4020 | return want_value ? result : NULL_RTX; | |
4021 | } | |
10f307d9 | 4022 | |
4023 | /* Assignment of a structure component needs special treatment | |
4024 | if the structure component's rtx is not simply a MEM. | |
e3a8913c | 4025 | Assignment of an array element at a constant index, and assignment of |
4026 | an array element in an unaligned packed structure field, has the same | |
4027 | problem. */ | |
10f307d9 | 4028 | |
26e80911 | 4029 | if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF |
ba04d9d5 | 4030 | || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF) |
10f307d9 | 4031 | { |
4032 | enum machine_mode mode1; | |
02e7a332 | 4033 | HOST_WIDE_INT bitsize, bitpos; |
2b96c5f6 | 4034 | rtx orig_to_rtx; |
954bdcb1 | 4035 | tree offset; |
10f307d9 | 4036 | int unsignedp; |
4037 | int volatilep = 0; | |
88ac3f7f | 4038 | tree tem; |
4039 | ||
4040 | push_temp_slots (); | |
7fce34be | 4041 | tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, |
2b96c5f6 | 4042 | &unsignedp, &volatilep); |
10f307d9 | 4043 | |
4044 | /* If we are going to use store_bit_field and extract_bit_field, | |
4045 | make sure to_rtx will be safe for multiple use. */ | |
4046 | ||
4047 | if (mode1 == VOIDmode && want_value) | |
4048 | tem = stabilize_reference (tem); | |
4049 | ||
a689a61a | 4050 | orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0); |
4051 | ||
954bdcb1 | 4052 | if (offset != 0) |
4053 | { | |
fac6aae6 | 4054 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); |
954bdcb1 | 4055 | |
4056 | if (GET_CODE (to_rtx) != MEM) | |
4057 | abort (); | |
33ef2f52 | 4058 | |
33ef2f52 | 4059 | #ifdef POINTERS_EXTEND_UNSIGNED |
479e4d5e | 4060 | if (GET_MODE (offset_rtx) != Pmode) |
4061 | offset_rtx = convert_memory_address (Pmode, offset_rtx); | |
4a836698 | 4062 | #else |
4063 | if (GET_MODE (offset_rtx) != ptr_mode) | |
4064 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
33ef2f52 | 4065 | #endif |
33ef2f52 | 4066 | |
d89d783c | 4067 | /* A constant address in TO_RTX can have VOIDmode, we must not try |
4068 | to call force_reg for that case. Avoid that case. */ | |
25d55d72 | 4069 | if (GET_CODE (to_rtx) == MEM |
4070 | && GET_MODE (to_rtx) == BLKmode | |
d89d783c | 4071 | && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode |
2b96c5f6 | 4072 | && bitsize > 0 |
fa56dc1d | 4073 | && (bitpos % bitsize) == 0 |
25d55d72 | 4074 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
2b96c5f6 | 4075 | && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1)) |
25d55d72 | 4076 | { |
fac6aae6 | 4077 | to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); |
25d55d72 | 4078 | bitpos = 0; |
4079 | } | |
4080 | ||
fcdc122e | 4081 | to_rtx = offset_address (to_rtx, offset_rtx, |
5b965633 | 4082 | highest_pow2_factor_for_type (TREE_TYPE (to), |
4083 | offset)); | |
954bdcb1 | 4084 | } |
7014838c | 4085 | |
b10dbbca | 4086 | if (GET_CODE (to_rtx) == MEM) |
4087 | { | |
b10dbbca | 4088 | /* If the field is at offset zero, we could have been given the |
4089 | DECL_RTX of the parent struct. Don't munge it. */ | |
4090 | to_rtx = shallow_copy_rtx (to_rtx); | |
4091 | ||
6f717f77 | 4092 | set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos); |
b10dbbca | 4093 | } |
46652181 | 4094 | |
2b96c5f6 | 4095 | /* Deal with volatile and readonly fields. The former is only done |
4096 | for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */ | |
4097 | if (volatilep && GET_CODE (to_rtx) == MEM) | |
4098 | { | |
4099 | if (to_rtx == orig_to_rtx) | |
4100 | to_rtx = copy_rtx (to_rtx); | |
4101 | MEM_VOLATILE_P (to_rtx) = 1; | |
10f307d9 | 4102 | } |
4103 | ||
ad87de1e | 4104 | if (TREE_CODE (to) == COMPONENT_REF |
4105 | && TREE_READONLY (TREE_OPERAND (to, 1))) | |
4106 | { | |
2b96c5f6 | 4107 | if (to_rtx == orig_to_rtx) |
ad87de1e | 4108 | to_rtx = copy_rtx (to_rtx); |
ad87de1e | 4109 | RTX_UNCHANGING_P (to_rtx) = 1; |
4110 | } | |
4111 | ||
666954ce | 4112 | if (GET_CODE (to_rtx) == MEM && ! can_address_p (to)) |
2b96c5f6 | 4113 | { |
4114 | if (to_rtx == orig_to_rtx) | |
4115 | to_rtx = copy_rtx (to_rtx); | |
4116 | MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; | |
4117 | } | |
4118 | ||
2b96c5f6 | 4119 | result = store_field (to_rtx, bitsize, bitpos, mode1, from, |
4120 | (want_value | |
4121 | /* Spurious cast for HPUX compiler. */ | |
4122 | ? ((enum machine_mode) | |
4123 | TYPE_MODE (TREE_TYPE (to))) | |
4124 | : VOIDmode), | |
4125 | unsignedp, TREE_TYPE (tem), get_alias_set (to)); | |
2e918804 | 4126 | |
2b96c5f6 | 4127 | preserve_temp_slots (result); |
4128 | free_temp_slots (); | |
4129 | pop_temp_slots (); | |
2e918804 | 4130 | |
2b96c5f6 | 4131 | /* If the value is meaningful, convert RESULT to the proper mode. |
4132 | Otherwise, return nothing. */ | |
4133 | return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)), | |
4134 | TYPE_MODE (TREE_TYPE (from)), | |
4135 | result, | |
4136 | TREE_UNSIGNED (TREE_TYPE (to))) | |
4137 | : NULL_RTX); | |
10f307d9 | 4138 | } |
4139 | ||
a2e044a5 | 4140 | /* If the rhs is a function call and its value is not an aggregate, |
4141 | call the function before we start to compute the lhs. | |
4142 | This is needed for correct code for cases such as | |
4143 | val = setjmp (buf) on machines where reference to val | |
e767499e | 4144 | requires loading up part of an address in a separate insn. |
4145 | ||
16a8193d | 4146 | Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG |
4147 | since it might be a promoted variable where the zero- or sign- extension | |
4148 | needs to be done. Handling this in the normal way is safe because no | |
4149 | computation is done before the call. */ | |
e767499e | 4150 | if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from) |
61b44857 | 4151 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST |
16a8193d | 4152 | && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) |
4153 | && GET_CODE (DECL_RTL (to)) == REG)) | |
a2e044a5 | 4154 | { |
88ac3f7f | 4155 | rtx value; |
4156 | ||
4157 | push_temp_slots (); | |
4158 | value = expand_expr (from, NULL_RTX, VOIDmode, 0); | |
a2e044a5 | 4159 | if (to_rtx == 0) |
8a06f2d4 | 4160 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
ac263f88 | 4161 | |
ce739127 | 4162 | /* Handle calls that return values in multiple non-contiguous locations. |
4163 | The Irix 6 ABI has examples of this. */ | |
4164 | if (GET_CODE (to_rtx) == PARALLEL) | |
2c269e73 | 4165 | emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from))); |
ce739127 | 4166 | else if (GET_MODE (to_rtx) == BLKmode) |
0378dbdc | 4167 | emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL); |
ac263f88 | 4168 | else |
5471b3be | 4169 | { |
4170 | #ifdef POINTERS_EXTEND_UNSIGNED | |
fcdc122e | 4171 | if (POINTER_TYPE_P (TREE_TYPE (to)) |
4172 | && GET_MODE (to_rtx) != GET_MODE (value)) | |
5471b3be | 4173 | value = convert_memory_address (GET_MODE (to_rtx), value); |
4174 | #endif | |
4175 | emit_move_insn (to_rtx, value); | |
4176 | } | |
a2e044a5 | 4177 | preserve_temp_slots (to_rtx); |
4178 | free_temp_slots (); | |
88ac3f7f | 4179 | pop_temp_slots (); |
9282409c | 4180 | return want_value ? to_rtx : NULL_RTX; |
a2e044a5 | 4181 | } |
4182 | ||
10f307d9 | 4183 | /* Ordinary treatment. Expand TO to get a REG or MEM rtx. |
4184 | Don't re-expand if it was expanded already (in COMPONENT_REF case). */ | |
4185 | ||
4186 | if (to_rtx == 0) | |
8a06f2d4 | 4187 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
10f307d9 | 4188 | |
addbe7ac | 4189 | /* Don't move directly into a return register. */ |
155b05dc | 4190 | if (TREE_CODE (to) == RESULT_DECL |
4191 | && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL)) | |
addbe7ac | 4192 | { |
88ac3f7f | 4193 | rtx temp; |
4194 | ||
4195 | push_temp_slots (); | |
4196 | temp = expand_expr (from, 0, GET_MODE (to_rtx), 0); | |
155b05dc | 4197 | |
4198 | if (GET_CODE (to_rtx) == PARALLEL) | |
2c269e73 | 4199 | emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from))); |
155b05dc | 4200 | else |
4201 | emit_move_insn (to_rtx, temp); | |
4202 | ||
addbe7ac | 4203 | preserve_temp_slots (to_rtx); |
4204 | free_temp_slots (); | |
88ac3f7f | 4205 | pop_temp_slots (); |
9282409c | 4206 | return want_value ? to_rtx : NULL_RTX; |
addbe7ac | 4207 | } |
4208 | ||
10f307d9 | 4209 | /* In case we are returning the contents of an object which overlaps |
4210 | the place the value is being stored, use a safe function when copying | |
4211 | a value through a pointer into a structure value return block. */ | |
4212 | if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF | |
4213 | && current_function_returns_struct | |
4214 | && !current_function_returns_pcc_struct) | |
4215 | { | |
88ac3f7f | 4216 | rtx from_rtx, size; |
4217 | ||
4218 | push_temp_slots (); | |
eaf7767e | 4219 | size = expr_size (from); |
8a06f2d4 | 4220 | from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0); |
10f307d9 | 4221 | |
c0bfc78e | 4222 | if (TARGET_MEM_FUNCTIONS) |
4223 | emit_library_call (memmove_libfunc, LCT_NORMAL, | |
4224 | VOIDmode, 3, XEXP (to_rtx, 0), Pmode, | |
4225 | XEXP (from_rtx, 0), Pmode, | |
4226 | convert_to_mode (TYPE_MODE (sizetype), | |
4227 | size, TREE_UNSIGNED (sizetype)), | |
4228 | TYPE_MODE (sizetype)); | |
4229 | else | |
4230 | emit_library_call (bcopy_libfunc, LCT_NORMAL, | |
4231 | VOIDmode, 3, XEXP (from_rtx, 0), Pmode, | |
4232 | XEXP (to_rtx, 0), Pmode, | |
4233 | convert_to_mode (TYPE_MODE (integer_type_node), | |
4234 | size, | |
4235 | TREE_UNSIGNED (integer_type_node)), | |
4236 | TYPE_MODE (integer_type_node)); | |
10f307d9 | 4237 | |
4238 | preserve_temp_slots (to_rtx); | |
4239 | free_temp_slots (); | |
88ac3f7f | 4240 | pop_temp_slots (); |
9282409c | 4241 | return want_value ? to_rtx : NULL_RTX; |
10f307d9 | 4242 | } |
4243 | ||
4244 | /* Compute FROM and store the value in the rtx we got. */ | |
4245 | ||
88ac3f7f | 4246 | push_temp_slots (); |
10f307d9 | 4247 | result = store_expr (from, to_rtx, want_value); |
4248 | preserve_temp_slots (result); | |
4249 | free_temp_slots (); | |
88ac3f7f | 4250 | pop_temp_slots (); |
9282409c | 4251 | return want_value ? result : NULL_RTX; |
10f307d9 | 4252 | } |
4253 | ||
4254 | /* Generate code for computing expression EXP, | |
4255 | and storing the value into TARGET. | |
10f307d9 | 4256 | TARGET may contain a QUEUED rtx. |
4257 | ||
9282409c | 4258 | If WANT_VALUE is nonzero, return a copy of the value |
4259 | not in TARGET, so that we can be sure to use the proper | |
4260 | value in a containing expression even if TARGET has something | |
4261 | else stored in it. If possible, we copy the value through a pseudo | |
4262 | and return that pseudo. Or, if the value is constant, we try to | |
4263 | return the constant. In some cases, we return a pseudo | |
4264 | copied *from* TARGET. | |
4265 | ||
4266 | If the mode is BLKmode then we may return TARGET itself. | |
4267 | It turns out that in BLKmode it doesn't cause a problem. | |
4268 | because C has no operators that could combine two different | |
4269 | assignments into the same BLKmode object with different values | |
4270 | with no sequence point. Will other languages need this to | |
4271 | be more thorough? | |
4272 | ||
4273 | If WANT_VALUE is 0, we return NULL, to make sure | |
4274 | to catch quickly any cases where the caller uses the value | |
4275 | and fails to set WANT_VALUE. */ | |
10f307d9 | 4276 | |
4277 | rtx | |
9282409c | 4278 | store_expr (exp, target, want_value) |
19cb6b50 | 4279 | tree exp; |
4280 | rtx target; | |
9282409c | 4281 | int want_value; |
10f307d9 | 4282 | { |
19cb6b50 | 4283 | rtx temp; |
10f307d9 | 4284 | int dont_return_target = 0; |
afadb0ab | 4285 | int dont_store_target = 0; |
10f307d9 | 4286 | |
4287 | if (TREE_CODE (exp) == COMPOUND_EXPR) | |
4288 | { | |
4289 | /* Perform first part of compound expression, then assign from second | |
4290 | part. */ | |
4291 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); | |
4292 | emit_queue (); | |
9282409c | 4293 | return store_expr (TREE_OPERAND (exp, 1), target, want_value); |
10f307d9 | 4294 | } |
4295 | else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode) | |
4296 | { | |
4297 | /* For conditional expression, get safe form of the target. Then | |
4298 | test the condition, doing the appropriate assignment on either | |
4299 | side. This avoids the creation of unnecessary temporaries. | |
4300 | For non-BLKmode, it is more efficient not to do this. */ | |
4301 | ||
4302 | rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx (); | |
4303 | ||
4304 | emit_queue (); | |
4305 | target = protect_from_queue (target, 1); | |
4306 | ||
d07f1b1f | 4307 | do_pending_stack_adjust (); |
10f307d9 | 4308 | NO_DEFER_POP; |
4309 | jumpifnot (TREE_OPERAND (exp, 0), lab1); | |
ad87de1e | 4310 | start_cleanup_deferral (); |
9282409c | 4311 | store_expr (TREE_OPERAND (exp, 1), target, 0); |
ad87de1e | 4312 | end_cleanup_deferral (); |
10f307d9 | 4313 | emit_queue (); |
4314 | emit_jump_insn (gen_jump (lab2)); | |
4315 | emit_barrier (); | |
4316 | emit_label (lab1); | |
ad87de1e | 4317 | start_cleanup_deferral (); |
9282409c | 4318 | store_expr (TREE_OPERAND (exp, 2), target, 0); |
ad87de1e | 4319 | end_cleanup_deferral (); |
10f307d9 | 4320 | emit_queue (); |
4321 | emit_label (lab2); | |
4322 | OK_DEFER_POP; | |
9012f57d | 4323 | |
9282409c | 4324 | return want_value ? target : NULL_RTX; |
10f307d9 | 4325 | } |
10f307d9 | 4326 | else if (queued_subexp_p (target)) |
9282409c | 4327 | /* If target contains a postincrement, let's not risk |
4328 | using it as the place to generate the rhs. */ | |
10f307d9 | 4329 | { |
4330 | if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode) | |
4331 | { | |
4332 | /* Expand EXP into a new pseudo. */ | |
4333 | temp = gen_reg_rtx (GET_MODE (target)); | |
4334 | temp = expand_expr (exp, temp, GET_MODE (target), 0); | |
4335 | } | |
4336 | else | |
b572011e | 4337 | temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0); |
9282409c | 4338 | |
4339 | /* If target is volatile, ANSI requires accessing the value | |
4340 | *from* the target, if it is accessed. So make that happen. | |
4341 | In no case return the target itself. */ | |
4342 | if (! MEM_VOLATILE_P (target) && want_value) | |
4343 | dont_return_target = 1; | |
10f307d9 | 4344 | } |
bb11bacb | 4345 | else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target) |
4346 | && GET_MODE (target) != BLKmode) | |
4347 | /* If target is in memory and caller wants value in a register instead, | |
4348 | arrange that. Pass TARGET as target for expand_expr so that, | |
4349 | if EXP is another assignment, WANT_VALUE will be nonzero for it. | |
4350 | We know expand_expr will not use the target in that case. | |
4351 | Don't do this if TARGET is volatile because we are supposed | |
4352 | to write it and then read it. */ | |
4353 | { | |
2741767d | 4354 | temp = expand_expr (exp, target, GET_MODE (target), 0); |
bb11bacb | 4355 | if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode) |
afadb0ab | 4356 | { |
4357 | /* If TEMP is already in the desired TARGET, only copy it from | |
4358 | memory and don't store it there again. */ | |
4359 | if (temp == target | |
4360 | || (rtx_equal_p (temp, target) | |
4361 | && ! side_effects_p (temp) && ! side_effects_p (target))) | |
4362 | dont_store_target = 1; | |
4363 | temp = copy_to_reg (temp); | |
4364 | } | |
bb11bacb | 4365 | dont_return_target = 1; |
4366 | } | |
acfb31e5 | 4367 | else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) |
edc2a478 | 4368 | /* If this is a scalar in a register that is stored in a wider mode |
acfb31e5 | 4369 | than the declared mode, compute the result into its declared mode |
4370 | and then convert to the wider mode. Our value is the computed | |
4371 | expression. */ | |
4372 | { | |
d2422fc2 | 4373 | rtx inner_target = 0; |
4374 | ||
256749c3 | 4375 | /* If we don't want a value, we can do the conversion inside EXP, |
8d426db9 | 4376 | which will often result in some optimizations. Do the conversion |
4377 | in two steps: first change the signedness, if needed, then | |
74086fde | 4378 | the extend. But don't do this if the type of EXP is a subtype |
4379 | of something else since then the conversion might involve | |
4380 | more than just converting modes. */ | |
4381 | if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp)) | |
4382 | && TREE_TYPE (TREE_TYPE (exp)) == 0) | |
8d426db9 | 4383 | { |
4384 | if (TREE_UNSIGNED (TREE_TYPE (exp)) | |
4385 | != SUBREG_PROMOTED_UNSIGNED_P (target)) | |
4070745f | 4386 | exp = convert |
4387 | ((*lang_hooks.types.signed_or_unsigned_type) | |
4388 | (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp); | |
8d426db9 | 4389 | |
771d21fa | 4390 | exp = convert ((*lang_hooks.types.type_for_mode) |
4391 | (GET_MODE (SUBREG_REG (target)), | |
4392 | SUBREG_PROMOTED_UNSIGNED_P (target)), | |
8d426db9 | 4393 | exp); |
d2422fc2 | 4394 | |
4395 | inner_target = SUBREG_REG (target); | |
8d426db9 | 4396 | } |
fa56dc1d | 4397 | |
d2422fc2 | 4398 | temp = expand_expr (exp, inner_target, VOIDmode, 0); |
ceefa980 | 4399 | |
eb9d8626 | 4400 | /* If TEMP is a volatile MEM and we want a result value, make |
b8ba61e5 | 4401 | the access now so it gets done only once. Likewise if |
4402 | it contains TARGET. */ | |
4403 | if (GET_CODE (temp) == MEM && want_value | |
4404 | && (MEM_VOLATILE_P (temp) | |
4405 | || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0)))) | |
eb9d8626 | 4406 | temp = copy_to_reg (temp); |
4407 | ||
ceefa980 | 4408 | /* If TEMP is a VOIDmode constant, use convert_modes to make |
4409 | sure that we properly convert it. */ | |
4410 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | |
c3ba908e | 4411 | { |
4412 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
4413 | temp, SUBREG_PROMOTED_UNSIGNED_P (target)); | |
4414 | temp = convert_modes (GET_MODE (SUBREG_REG (target)), | |
4415 | GET_MODE (target), temp, | |
4416 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
4417 | } | |
ceefa980 | 4418 | |
acfb31e5 | 4419 | convert_move (SUBREG_REG (target), temp, |
4420 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
28ad8d33 | 4421 | |
4422 | /* If we promoted a constant, change the mode back down to match | |
4423 | target. Otherwise, the caller might get confused by a result whose | |
4424 | mode is larger than expected. */ | |
4425 | ||
f06d0bb1 | 4426 | if (want_value && GET_MODE (temp) != GET_MODE (target)) |
28ad8d33 | 4427 | { |
f06d0bb1 | 4428 | if (GET_MODE (temp) != VOIDmode) |
4429 | { | |
4430 | temp = gen_lowpart_SUBREG (GET_MODE (target), temp); | |
4431 | SUBREG_PROMOTED_VAR_P (temp) = 1; | |
ff385626 | 4432 | SUBREG_PROMOTED_UNSIGNED_SET (temp, |
bfd242e8 | 4433 | SUBREG_PROMOTED_UNSIGNED_P (target)); |
f06d0bb1 | 4434 | } |
4435 | else | |
4436 | temp = convert_modes (GET_MODE (target), | |
4437 | GET_MODE (SUBREG_REG (target)), | |
4438 | temp, SUBREG_PROMOTED_UNSIGNED_P (target)); | |
28ad8d33 | 4439 | } |
4440 | ||
9282409c | 4441 | return want_value ? temp : NULL_RTX; |
acfb31e5 | 4442 | } |
10f307d9 | 4443 | else |
4444 | { | |
4445 | temp = expand_expr (exp, target, GET_MODE (target), 0); | |
eb9d8626 | 4446 | /* Return TARGET if it's a specified hardware register. |
9282409c | 4447 | If TARGET is a volatile mem ref, either return TARGET |
4448 | or return a reg copied *from* TARGET; ANSI requires this. | |
4449 | ||
4450 | Otherwise, if TEMP is not TARGET, return TEMP | |
4451 | if it is constant (for efficiency), | |
4452 | or if we really want the correct value. */ | |
10f307d9 | 4453 | if (!(target && GET_CODE (target) == REG |
4454 | && REGNO (target) < FIRST_PSEUDO_REGISTER) | |
9282409c | 4455 | && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)) |
46652181 | 4456 | && ! rtx_equal_p (temp, target) |
9282409c | 4457 | && (CONSTANT_P (temp) || want_value)) |
10f307d9 | 4458 | dont_return_target = 1; |
4459 | } | |
4460 | ||
ceefa980 | 4461 | /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not |
4462 | the same as that of TARGET, adjust the constant. This is needed, for | |
4463 | example, in case it is a CONST_DOUBLE and we want only a word-sized | |
4464 | value. */ | |
4465 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode | |
43769aba | 4466 | && TREE_CODE (exp) != ERROR_MARK |
ceefa980 | 4467 | && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) |
4468 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
4469 | temp, TREE_UNSIGNED (TREE_TYPE (exp))); | |
4470 | ||
10f307d9 | 4471 | /* If value was not generated in the target, store it there. |
8a06f2d4 | 4472 | Convert the value to TARGET's type first if necessary. |
4473 | If TEMP and TARGET compare equal according to rtx_equal_p, but | |
14e396bb | 4474 | one or both of them are volatile memory refs, we have to distinguish |
4475 | two cases: | |
4476 | - expand_expr has used TARGET. In this case, we must not generate | |
4477 | another copy. This can be detected by TARGET being equal according | |
4478 | to == . | |
4479 | - expand_expr has not used TARGET - that means that the source just | |
4480 | happens to have the same RTX form. Since temp will have been created | |
4481 | by expand_expr, it will compare unequal according to == . | |
4482 | We must generate a copy in this case, to reach the correct number | |
4483 | of volatile memory references. */ | |
10f307d9 | 4484 | |
b1ba8c8b | 4485 | if ((! rtx_equal_p (temp, target) |
14e396bb | 4486 | || (temp != target && (side_effects_p (temp) |
4487 | || side_effects_p (target)))) | |
afadb0ab | 4488 | && TREE_CODE (exp) != ERROR_MARK |
6db2b7ab | 4489 | && ! dont_store_target |
4490 | /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET, | |
4491 | but TARGET is not valid memory reference, TEMP will differ | |
4492 | from TARGET although it is really the same location. */ | |
4493 | && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd' | |
d18d957a | 4494 | || target != DECL_RTL_IF_SET (exp)) |
4495 | /* If there's nothing to copy, don't bother. Don't call expr_size | |
4496 | unless necessary, because some front-ends (C++) expr_size-hook | |
4497 | aborts on objects that are not supposed to be bit-copied or | |
4498 | bit-initialized. */ | |
4499 | && expr_size (exp) != const0_rtx) | |
10f307d9 | 4500 | { |
4501 | target = protect_from_queue (target, 1); | |
4502 | if (GET_MODE (temp) != GET_MODE (target) | |
d0ddddf7 | 4503 | && GET_MODE (temp) != VOIDmode) |
10f307d9 | 4504 | { |
4505 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp)); | |
4506 | if (dont_return_target) | |
4507 | { | |
4508 | /* In this case, we will return TEMP, | |
4509 | so make sure it has the proper mode. | |
4510 | But don't forget to store the value into TARGET. */ | |
4511 | temp = convert_to_mode (GET_MODE (target), temp, unsignedp); | |
4512 | emit_move_insn (target, temp); | |
4513 | } | |
4514 | else | |
4515 | convert_move (target, temp, unsignedp); | |
4516 | } | |
4517 | ||
4518 | else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST) | |
4519 | { | |
18279aee | 4520 | /* Handle copying a string constant into an array. The string |
4521 | constant may be shorter than the array. So copy just the string's | |
4522 | actual length, and clear the rest. First get the size of the data | |
4523 | type of the string, which is actually the size of the target. */ | |
4524 | rtx size = expr_size (exp); | |
10f307d9 | 4525 | |
35f44ac1 | 4526 | if (GET_CODE (size) == CONST_INT |
4527 | && INTVAL (size) < TREE_STRING_LENGTH (exp)) | |
0378dbdc | 4528 | emit_block_move (target, temp, size, BLOCK_OP_NORMAL); |
35f44ac1 | 4529 | else |
10f307d9 | 4530 | { |
35f44ac1 | 4531 | /* Compute the size of the data to copy from the string. */ |
4532 | tree copy_size | |
3586684a | 4533 | = size_binop (MIN_EXPR, |
c869557a | 4534 | make_tree (sizetype, size), |
902de8ed | 4535 | size_int (TREE_STRING_LENGTH (exp))); |
b572011e | 4536 | rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX, |
4537 | VOIDmode, 0); | |
35f44ac1 | 4538 | rtx label = 0; |
4539 | ||
4540 | /* Copy that much. */ | |
18279aee | 4541 | copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0); |
0378dbdc | 4542 | emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL); |
35f44ac1 | 4543 | |
ed8d3eee | 4544 | /* Figure out how much is left in TARGET that we have to clear. |
4545 | Do all calculations in ptr_mode. */ | |
35f44ac1 | 4546 | if (GET_CODE (copy_size_rtx) == CONST_INT) |
4547 | { | |
18279aee | 4548 | size = plus_constant (size, -INTVAL (copy_size_rtx)); |
4549 | target = adjust_address (target, BLKmode, | |
4550 | INTVAL (copy_size_rtx)); | |
35f44ac1 | 4551 | } |
4552 | else | |
4553 | { | |
4a836698 | 4554 | size = expand_binop (TYPE_MODE (sizetype), sub_optab, size, |
b572011e | 4555 | copy_size_rtx, NULL_RTX, 0, |
4556 | OPTAB_LIB_WIDEN); | |
35f44ac1 | 4557 | |
18279aee | 4558 | #ifdef POINTERS_EXTEND_UNSIGNED |
4559 | if (GET_MODE (copy_size_rtx) != Pmode) | |
4560 | copy_size_rtx = convert_memory_address (Pmode, | |
4561 | copy_size_rtx); | |
4562 | #endif | |
4563 | ||
4564 | target = offset_address (target, copy_size_rtx, | |
4565 | highest_pow2_factor (copy_size)); | |
35f44ac1 | 4566 | label = gen_label_rtx (); |
5a894bc6 | 4567 | emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX, |
2b96c5f6 | 4568 | GET_MODE (size), 0, label); |
35f44ac1 | 4569 | } |
4570 | ||
4571 | if (size != const0_rtx) | |
8a06f2d4 | 4572 | clear_storage (target, size); |
bdf60b71 | 4573 | |
35f44ac1 | 4574 | if (label) |
4575 | emit_label (label); | |
10f307d9 | 4576 | } |
4577 | } | |
ce739127 | 4578 | /* Handle calls that return values in multiple non-contiguous locations. |
4579 | The Irix 6 ABI has examples of this. */ | |
4580 | else if (GET_CODE (target) == PARALLEL) | |
2c269e73 | 4581 | emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp))); |
10f307d9 | 4582 | else if (GET_MODE (temp) == BLKmode) |
0378dbdc | 4583 | emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL); |
10f307d9 | 4584 | else |
4585 | emit_move_insn (target, temp); | |
4586 | } | |
9282409c | 4587 | |
eb9d8626 | 4588 | /* If we don't want a value, return NULL_RTX. */ |
4589 | if (! want_value) | |
4590 | return NULL_RTX; | |
4591 | ||
4592 | /* If we are supposed to return TEMP, do so as long as it isn't a MEM. | |
4593 | ??? The latter test doesn't seem to make sense. */ | |
4594 | else if (dont_return_target && GET_CODE (temp) != MEM) | |
10f307d9 | 4595 | return temp; |
eb9d8626 | 4596 | |
4597 | /* Return TARGET itself if it is a hard register. */ | |
4598 | else if (want_value && GET_MODE (target) != BLKmode | |
4599 | && ! (GET_CODE (target) == REG | |
4600 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) | |
9282409c | 4601 | return copy_to_reg (target); |
fa56dc1d | 4602 | |
eb9d8626 | 4603 | else |
9282409c | 4604 | return target; |
10f307d9 | 4605 | } |
4606 | \f | |
dbd14dc5 | 4607 | /* Return 1 if EXP just contains zeros. */ |
4608 | ||
4609 | static int | |
4610 | is_zeros_p (exp) | |
4611 | tree exp; | |
4612 | { | |
4613 | tree elt; | |
4614 | ||
4615 | switch (TREE_CODE (exp)) | |
4616 | { | |
4617 | case CONVERT_EXPR: | |
4618 | case NOP_EXPR: | |
4619 | case NON_LVALUE_EXPR: | |
f96c43fb | 4620 | case VIEW_CONVERT_EXPR: |
dbd14dc5 | 4621 | return is_zeros_p (TREE_OPERAND (exp, 0)); |
4622 | ||
4623 | case INTEGER_CST: | |
a0c2c45b | 4624 | return integer_zerop (exp); |
dbd14dc5 | 4625 | |
4626 | case COMPLEX_CST: | |
4627 | return | |
4628 | is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp)); | |
4629 | ||
4630 | case REAL_CST: | |
62aa7862 | 4631 | return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0); |
dbd14dc5 | 4632 | |
886cfd4f | 4633 | case VECTOR_CST: |
4634 | for (elt = TREE_VECTOR_CST_ELTS (exp); elt; | |
4635 | elt = TREE_CHAIN (elt)) | |
4636 | if (!is_zeros_p (TREE_VALUE (elt))) | |
4637 | return 0; | |
4638 | ||
4639 | return 1; | |
4640 | ||
dbd14dc5 | 4641 | case CONSTRUCTOR: |
e7ef3ff2 | 4642 | if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE) |
4643 | return CONSTRUCTOR_ELTS (exp) == NULL_TREE; | |
dbd14dc5 | 4644 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) |
4645 | if (! is_zeros_p (TREE_VALUE (elt))) | |
4646 | return 0; | |
4647 | ||
4648 | return 1; | |
fa56dc1d | 4649 | |
0dbd1c74 | 4650 | default: |
4651 | return 0; | |
dbd14dc5 | 4652 | } |
dbd14dc5 | 4653 | } |
4654 | ||
4655 | /* Return 1 if EXP contains mostly (3/4) zeros. */ | |
4656 | ||
4657 | static int | |
4658 | mostly_zeros_p (exp) | |
4659 | tree exp; | |
4660 | { | |
dbd14dc5 | 4661 | if (TREE_CODE (exp) == CONSTRUCTOR) |
4662 | { | |
e7ef3ff2 | 4663 | int elts = 0, zeros = 0; |
4664 | tree elt = CONSTRUCTOR_ELTS (exp); | |
4665 | if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE) | |
4666 | { | |
4667 | /* If there are no ranges of true bits, it is all zero. */ | |
4668 | return elt == NULL_TREE; | |
4669 | } | |
4670 | for (; elt; elt = TREE_CHAIN (elt)) | |
4671 | { | |
4672 | /* We do not handle the case where the index is a RANGE_EXPR, | |
4673 | so the statistic will be somewhat inaccurate. | |
4674 | We do make a more accurate count in store_constructor itself, | |
4675 | so since this function is only used for nested array elements, | |
a92771b8 | 4676 | this should be close enough. */ |
e7ef3ff2 | 4677 | if (mostly_zeros_p (TREE_VALUE (elt))) |
4678 | zeros++; | |
4679 | elts++; | |
4680 | } | |
dbd14dc5 | 4681 | |
4682 | return 4 * zeros >= 3 * elts; | |
4683 | } | |
4684 | ||
4685 | return is_zeros_p (exp); | |
4686 | } | |
4687 | \f | |
e7ef3ff2 | 4688 | /* Helper function for store_constructor. |
4689 | TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field. | |
4690 | TYPE is the type of the CONSTRUCTOR, not the element type. | |
2c269e73 | 4691 | CLEARED is as for store_constructor. |
1179a68b | 4692 | ALIAS_SET is the alias set to use for any stores. |
a5b7fc8b | 4693 | |
4694 | This provides a recursive shortcut back to store_constructor when it isn't | |
4695 | necessary to go through store_field. This is so that we can pass through | |
4696 | the cleared field to let store_constructor know that we may not have to | |
4697 | clear a substructure if the outer structure has already been cleared. */ | |
e7ef3ff2 | 4698 | |
4699 | static void | |
2c269e73 | 4700 | store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared, |
4701 | alias_set) | |
e7ef3ff2 | 4702 | rtx target; |
02e7a332 | 4703 | unsigned HOST_WIDE_INT bitsize; |
4704 | HOST_WIDE_INT bitpos; | |
e7ef3ff2 | 4705 | enum machine_mode mode; |
4706 | tree exp, type; | |
4707 | int cleared; | |
1179a68b | 4708 | int alias_set; |
e7ef3ff2 | 4709 | { |
4710 | if (TREE_CODE (exp) == CONSTRUCTOR | |
a5b7fc8b | 4711 | && bitpos % BITS_PER_UNIT == 0 |
6ef828f9 | 4712 | /* If we have a nonzero bitpos for a register target, then we just |
a5b7fc8b | 4713 | let store_field do the bitfield handling. This is unlikely to |
4714 | generate unnecessary clear instructions anyways. */ | |
4715 | && (bitpos == 0 || GET_CODE (target) == MEM)) | |
e7ef3ff2 | 4716 | { |
459b8611 | 4717 | if (GET_CODE (target) == MEM) |
4718 | target | |
4719 | = adjust_address (target, | |
4720 | GET_MODE (target) == BLKmode | |
4721 | || 0 != (bitpos | |
4722 | % GET_MODE_ALIGNMENT (GET_MODE (target))) | |
4723 | ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT); | |
1179a68b | 4724 | |
5b90bb08 | 4725 | |
2c269e73 | 4726 | /* Update the alias set, if required. */ |
5cc193e7 | 4727 | if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target) |
4728 | && MEM_ALIAS_SET (target) != 0) | |
86ce88aa | 4729 | { |
4730 | target = copy_rtx (target); | |
4731 | set_mem_alias_set (target, alias_set); | |
4732 | } | |
5b90bb08 | 4733 | |
2c269e73 | 4734 | store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT); |
e7ef3ff2 | 4735 | } |
4736 | else | |
2b96c5f6 | 4737 | store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type, |
4738 | alias_set); | |
e7ef3ff2 | 4739 | } |
4740 | ||
10f307d9 | 4741 | /* Store the value of constructor EXP into the rtx TARGET. |
2c269e73 | 4742 | TARGET is either a REG or a MEM; we know it cannot conflict, since |
4743 | safe_from_p has been called. | |
a316ea6a | 4744 | CLEARED is true if TARGET is known to have been zero'd. |
4745 | SIZE is the number of bytes of TARGET we are allowed to modify: this | |
4746 | may not be the same as the size of EXP if we are assigning to a field | |
4747 | which has been packed to exclude padding bits. */ | |
10f307d9 | 4748 | |
4749 | static void | |
2c269e73 | 4750 | store_constructor (exp, target, cleared, size) |
10f307d9 | 4751 | tree exp; |
4752 | rtx target; | |
e7ef3ff2 | 4753 | int cleared; |
3a6656ad | 4754 | HOST_WIDE_INT size; |
10f307d9 | 4755 | { |
2ef1e405 | 4756 | tree type = TREE_TYPE (exp); |
0bf16c4a | 4757 | #ifdef WORD_REGISTER_OPERATIONS |
3a6656ad | 4758 | HOST_WIDE_INT exp_size = int_size_in_bytes (type); |
0bf16c4a | 4759 | #endif |
2ef1e405 | 4760 | |
34f17b00 | 4761 | if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE |
4762 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
10f307d9 | 4763 | { |
19cb6b50 | 4764 | tree elt; |
10f307d9 | 4765 | |
2c269e73 | 4766 | /* We either clear the aggregate or indicate the value is dead. */ |
c4b2c771 | 4767 | if ((TREE_CODE (type) == UNION_TYPE |
4768 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
2c269e73 | 4769 | && ! cleared |
4770 | && ! CONSTRUCTOR_ELTS (exp)) | |
4771 | /* If the constructor is empty, clear the union. */ | |
226c8875 | 4772 | { |
2c269e73 | 4773 | clear_storage (target, expr_size (exp)); |
4774 | cleared = 1; | |
226c8875 | 4775 | } |
2ef1e405 | 4776 | |
4777 | /* If we are building a static constructor into a register, | |
4778 | set the initial value as zero so we can fold the value into | |
dfbad8f1 | 4779 | a constant. But if more than one register is involved, |
4780 | this probably loses. */ | |
2c269e73 | 4781 | else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp) |
dfbad8f1 | 4782 | && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) |
dbd14dc5 | 4783 | { |
2c269e73 | 4784 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); |
dbd14dc5 | 4785 | cleared = 1; |
4786 | } | |
4787 | ||
4788 | /* If the constructor has fewer fields than the structure | |
4789 | or if we are initializing the structure to mostly zeros, | |
80ac742d | 4790 | clear the whole structure first. Don't do this if TARGET is a |
20c377c2 | 4791 | register whose mode size isn't equal to SIZE since clear_storage |
4792 | can't handle this case. */ | |
2c269e73 | 4793 | else if (! cleared && size > 0 |
6c84d8f3 | 4794 | && ((list_length (CONSTRUCTOR_ELTS (exp)) |
efd3939c | 4795 | != fields_length (type)) |
20c377c2 | 4796 | || mostly_zeros_p (exp)) |
4797 | && (GET_CODE (target) != REG | |
2c269e73 | 4798 | || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) |
4799 | == size))) | |
dbd14dc5 | 4800 | { |
2c269e73 | 4801 | clear_storage (target, GEN_INT (size)); |
dbd14dc5 | 4802 | cleared = 1; |
4803 | } | |
2c269e73 | 4804 | |
4805 | if (! cleared) | |
941522d6 | 4806 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); |
10f307d9 | 4807 | |
4808 | /* Store each element of the constructor into | |
4809 | the corresponding field of TARGET. */ | |
4810 | ||
4811 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) | |
4812 | { | |
19cb6b50 | 4813 | tree field = TREE_PURPOSE (elt); |
e6860d27 | 4814 | tree value = TREE_VALUE (elt); |
19cb6b50 | 4815 | enum machine_mode mode; |
02e7a332 | 4816 | HOST_WIDE_INT bitsize; |
4817 | HOST_WIDE_INT bitpos = 0; | |
02e7a332 | 4818 | tree offset; |
c869557a | 4819 | rtx to_rtx = target; |
10f307d9 | 4820 | |
52a618b4 | 4821 | /* Just ignore missing fields. |
4822 | We cleared the whole structure, above, | |
4823 | if any fields are missing. */ | |
4824 | if (field == 0) | |
4825 | continue; | |
4826 | ||
297d4daf | 4827 | if (cleared && is_zeros_p (value)) |
e7ef3ff2 | 4828 | continue; |
dbd14dc5 | 4829 | |
02e7a332 | 4830 | if (host_integerp (DECL_SIZE (field), 1)) |
4831 | bitsize = tree_low_cst (DECL_SIZE (field), 1); | |
155b05dc | 4832 | else |
4833 | bitsize = -1; | |
4834 | ||
10f307d9 | 4835 | mode = DECL_MODE (field); |
4836 | if (DECL_BIT_FIELD (field)) | |
4837 | mode = VOIDmode; | |
4838 | ||
02e7a332 | 4839 | offset = DECL_FIELD_OFFSET (field); |
4840 | if (host_integerp (offset, 0) | |
4841 | && host_integerp (bit_position (field), 0)) | |
4842 | { | |
4843 | bitpos = int_bit_position (field); | |
4844 | offset = 0; | |
4845 | } | |
c869557a | 4846 | else |
02e7a332 | 4847 | bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); |
fa56dc1d | 4848 | |
c869557a | 4849 | if (offset) |
4850 | { | |
4851 | rtx offset_rtx; | |
4852 | ||
4853 | if (contains_placeholder_p (offset)) | |
768e985a | 4854 | offset = build (WITH_RECORD_EXPR, sizetype, |
ad87de1e | 4855 | offset, make_tree (TREE_TYPE (exp), target)); |
10f307d9 | 4856 | |
c869557a | 4857 | offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); |
4858 | if (GET_CODE (to_rtx) != MEM) | |
4859 | abort (); | |
4860 | ||
33ef2f52 | 4861 | #ifdef POINTERS_EXTEND_UNSIGNED |
479e4d5e | 4862 | if (GET_MODE (offset_rtx) != Pmode) |
4863 | offset_rtx = convert_memory_address (Pmode, offset_rtx); | |
4a836698 | 4864 | #else |
4865 | if (GET_MODE (offset_rtx) != ptr_mode) | |
4866 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
33ef2f52 | 4867 | #endif |
33ef2f52 | 4868 | |
fcdc122e | 4869 | to_rtx = offset_address (to_rtx, offset_rtx, |
4870 | highest_pow2_factor (offset)); | |
c869557a | 4871 | } |
7014838c | 4872 | |
4ba58fd4 | 4873 | if (TREE_READONLY (field)) |
4874 | { | |
f18e8e6c | 4875 | if (GET_CODE (to_rtx) == MEM) |
46652181 | 4876 | to_rtx = copy_rtx (to_rtx); |
4877 | ||
4ba58fd4 | 4878 | RTX_UNCHANGING_P (to_rtx) = 1; |
4879 | } | |
4880 | ||
e6860d27 | 4881 | #ifdef WORD_REGISTER_OPERATIONS |
4882 | /* If this initializes a field that is smaller than a word, at the | |
4883 | start of a word, try to widen it to a full word. | |
4884 | This special case allows us to output C++ member function | |
4885 | initializations in a form that the optimizers can understand. */ | |
02e7a332 | 4886 | if (GET_CODE (target) == REG |
e6860d27 | 4887 | && bitsize < BITS_PER_WORD |
4888 | && bitpos % BITS_PER_WORD == 0 | |
4889 | && GET_MODE_CLASS (mode) == MODE_INT | |
4890 | && TREE_CODE (value) == INTEGER_CST | |
3a6656ad | 4891 | && exp_size >= 0 |
4892 | && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) | |
e6860d27 | 4893 | { |
4894 | tree type = TREE_TYPE (value); | |
2c269e73 | 4895 | |
e6860d27 | 4896 | if (TYPE_PRECISION (type) < BITS_PER_WORD) |
4897 | { | |
771d21fa | 4898 | type = (*lang_hooks.types.type_for_size) |
4899 | (BITS_PER_WORD, TREE_UNSIGNED (type)); | |
e6860d27 | 4900 | value = convert (type, value); |
4901 | } | |
2c269e73 | 4902 | |
e6860d27 | 4903 | if (BYTES_BIG_ENDIAN) |
4904 | value | |
4905 | = fold (build (LSHIFT_EXPR, type, value, | |
4906 | build_int_2 (BITS_PER_WORD - bitsize, 0))); | |
4907 | bitsize = BITS_PER_WORD; | |
4908 | mode = word_mode; | |
4909 | } | |
4910 | #endif | |
5cc193e7 | 4911 | |
4912 | if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx) | |
4913 | && DECL_NONADDRESSABLE_P (field)) | |
4914 | { | |
4915 | to_rtx = copy_rtx (to_rtx); | |
4916 | MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; | |
4917 | } | |
4918 | ||
7014838c | 4919 | store_constructor_field (to_rtx, bitsize, bitpos, mode, |
297d4daf | 4920 | value, type, cleared, |
5cc193e7 | 4921 | get_alias_set (TREE_TYPE (field))); |
10f307d9 | 4922 | } |
4923 | } | |
0ad236c2 | 4924 | else if (TREE_CODE (type) == ARRAY_TYPE |
4925 | || TREE_CODE (type) == VECTOR_TYPE) | |
10f307d9 | 4926 | { |
19cb6b50 | 4927 | tree elt; |
4928 | int i; | |
e7ef3ff2 | 4929 | int need_to_clear; |
2ef1e405 | 4930 | tree domain = TYPE_DOMAIN (type); |
2ef1e405 | 4931 | tree elttype = TREE_TYPE (type); |
0ad236c2 | 4932 | int const_bounds_p; |
97b330ca | 4933 | HOST_WIDE_INT minelt = 0; |
4934 | HOST_WIDE_INT maxelt = 0; | |
84554bf9 | 4935 | |
0ad236c2 | 4936 | /* Vectors are like arrays, but the domain is stored via an array |
4937 | type indirectly. */ | |
4938 | if (TREE_CODE (type) == VECTOR_TYPE) | |
4939 | { | |
4940 | /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses | |
4941 | the same field as TYPE_DOMAIN, we are not guaranteed that | |
4942 | it always will. */ | |
4943 | domain = TYPE_DEBUG_REPRESENTATION_TYPE (type); | |
4944 | domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain))); | |
4945 | } | |
4946 | ||
4947 | const_bounds_p = (TYPE_MIN_VALUE (domain) | |
4948 | && TYPE_MAX_VALUE (domain) | |
4949 | && host_integerp (TYPE_MIN_VALUE (domain), 0) | |
4950 | && host_integerp (TYPE_MAX_VALUE (domain), 0)); | |
4951 | ||
84554bf9 | 4952 | /* If we have constant bounds for the range of the type, get them. */ |
4953 | if (const_bounds_p) | |
4954 | { | |
4955 | minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); | |
4956 | maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); | |
4957 | } | |
10f307d9 | 4958 | |
e7ef3ff2 | 4959 | /* If the constructor has fewer elements than the array, |
3398e91d | 4960 | clear the whole array first. Similarly if this is |
e7ef3ff2 | 4961 | static constructor of a non-BLKmode object. */ |
4962 | if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp))) | |
4963 | need_to_clear = 1; | |
4964 | else | |
4965 | { | |
4966 | HOST_WIDE_INT count = 0, zero_count = 0; | |
84554bf9 | 4967 | need_to_clear = ! const_bounds_p; |
4968 | ||
e7ef3ff2 | 4969 | /* This loop is a more accurate version of the loop in |
4970 | mostly_zeros_p (it handles RANGE_EXPR in an index). | |
4971 | It is also needed to check for missing elements. */ | |
4972 | for (elt = CONSTRUCTOR_ELTS (exp); | |
84554bf9 | 4973 | elt != NULL_TREE && ! need_to_clear; |
a6b6a56f | 4974 | elt = TREE_CHAIN (elt)) |
e7ef3ff2 | 4975 | { |
4976 | tree index = TREE_PURPOSE (elt); | |
4977 | HOST_WIDE_INT this_node_count; | |
325d1c45 | 4978 | |
e7ef3ff2 | 4979 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
4980 | { | |
4981 | tree lo_index = TREE_OPERAND (index, 0); | |
4982 | tree hi_index = TREE_OPERAND (index, 1); | |
a0c2c45b | 4983 | |
325d1c45 | 4984 | if (! host_integerp (lo_index, 1) |
4985 | || ! host_integerp (hi_index, 1)) | |
e7ef3ff2 | 4986 | { |
4987 | need_to_clear = 1; | |
4988 | break; | |
4989 | } | |
325d1c45 | 4990 | |
4991 | this_node_count = (tree_low_cst (hi_index, 1) | |
4992 | - tree_low_cst (lo_index, 1) + 1); | |
e7ef3ff2 | 4993 | } |
4994 | else | |
4995 | this_node_count = 1; | |
84554bf9 | 4996 | |
e7ef3ff2 | 4997 | count += this_node_count; |
4998 | if (mostly_zeros_p (TREE_VALUE (elt))) | |
4999 | zero_count += this_node_count; | |
5000 | } | |
84554bf9 | 5001 | |
028c2cf2 | 5002 | /* Clear the entire array first if there are any missing elements, |
a92771b8 | 5003 | or if the incidence of zero elements is >= 75%. */ |
84554bf9 | 5004 | if (! need_to_clear |
5005 | && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count)) | |
e7ef3ff2 | 5006 | need_to_clear = 1; |
5007 | } | |
84554bf9 | 5008 | |
6c84d8f3 | 5009 | if (need_to_clear && size > 0) |
dbd14dc5 | 5010 | { |
5011 | if (! cleared) | |
cbbc8e6a | 5012 | { |
5013 | if (REG_P (target)) | |
5014 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | |
5015 | else | |
5016 | clear_storage (target, GEN_INT (size)); | |
5017 | } | |
dbd14dc5 | 5018 | cleared = 1; |
5019 | } | |
c6e2ff20 | 5020 | else if (REG_P (target)) |
10f307d9 | 5021 | /* Inform later passes that the old value is dead. */ |
941522d6 | 5022 | emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); |
10f307d9 | 5023 | |
5024 | /* Store each element of the constructor into | |
5025 | the corresponding element of TARGET, determined | |
5026 | by counting the elements. */ | |
5027 | for (elt = CONSTRUCTOR_ELTS (exp), i = 0; | |
5028 | elt; | |
5029 | elt = TREE_CHAIN (elt), i++) | |
5030 | { | |
19cb6b50 | 5031 | enum machine_mode mode; |
325d1c45 | 5032 | HOST_WIDE_INT bitsize; |
5033 | HOST_WIDE_INT bitpos; | |
10f307d9 | 5034 | int unsignedp; |
e7ef3ff2 | 5035 | tree value = TREE_VALUE (elt); |
c4492f76 | 5036 | tree index = TREE_PURPOSE (elt); |
5037 | rtx xtarget = target; | |
10f307d9 | 5038 | |
e7ef3ff2 | 5039 | if (cleared && is_zeros_p (value)) |
5040 | continue; | |
dbd14dc5 | 5041 | |
10f307d9 | 5042 | unsignedp = TREE_UNSIGNED (elttype); |
155b05dc | 5043 | mode = TYPE_MODE (elttype); |
5044 | if (mode == BLKmode) | |
325d1c45 | 5045 | bitsize = (host_integerp (TYPE_SIZE (elttype), 1) |
5046 | ? tree_low_cst (TYPE_SIZE (elttype), 1) | |
5047 | : -1); | |
155b05dc | 5048 | else |
5049 | bitsize = GET_MODE_BITSIZE (mode); | |
10f307d9 | 5050 | |
e7ef3ff2 | 5051 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
5052 | { | |
5053 | tree lo_index = TREE_OPERAND (index, 0); | |
5054 | tree hi_index = TREE_OPERAND (index, 1); | |
805e22b2 | 5055 | rtx index_r, pos_rtx, loop_end; |
e7ef3ff2 | 5056 | struct nesting *loop; |
997a08e0 | 5057 | HOST_WIDE_INT lo, hi, count; |
5058 | tree position; | |
e7ef3ff2 | 5059 | |
a92771b8 | 5060 | /* If the range is constant and "small", unroll the loop. */ |
84554bf9 | 5061 | if (const_bounds_p |
5062 | && host_integerp (lo_index, 0) | |
325d1c45 | 5063 | && host_integerp (hi_index, 0) |
5064 | && (lo = tree_low_cst (lo_index, 0), | |
5065 | hi = tree_low_cst (hi_index, 0), | |
997a08e0 | 5066 | count = hi - lo + 1, |
5067 | (GET_CODE (target) != MEM | |
5068 | || count <= 2 | |
325d1c45 | 5069 | || (host_integerp (TYPE_SIZE (elttype), 1) |
5070 | && (tree_low_cst (TYPE_SIZE (elttype), 1) * count | |
5071 | <= 40 * 8))))) | |
e7ef3ff2 | 5072 | { |
997a08e0 | 5073 | lo -= minelt; hi -= minelt; |
5074 | for (; lo <= hi; lo++) | |
e7ef3ff2 | 5075 | { |
325d1c45 | 5076 | bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); |
5cc193e7 | 5077 | |
5078 | if (GET_CODE (target) == MEM | |
5079 | && !MEM_KEEP_ALIAS_SET_P (target) | |
0ad236c2 | 5080 | && TREE_CODE (type) == ARRAY_TYPE |
5cc193e7 | 5081 | && TYPE_NONALIASED_COMPONENT (type)) |
5082 | { | |
5083 | target = copy_rtx (target); | |
5084 | MEM_KEEP_ALIAS_SET_P (target) = 1; | |
5085 | } | |
5086 | ||
1179a68b | 5087 | store_constructor_field |
2c269e73 | 5088 | (target, bitsize, bitpos, mode, value, type, cleared, |
5089 | get_alias_set (elttype)); | |
e7ef3ff2 | 5090 | } |
5091 | } | |
5092 | else | |
5093 | { | |
805e22b2 | 5094 | expand_expr (hi_index, NULL_RTX, VOIDmode, 0); |
e7ef3ff2 | 5095 | loop_end = gen_label_rtx (); |
5096 | ||
5097 | unsignedp = TREE_UNSIGNED (domain); | |
5098 | ||
5099 | index = build_decl (VAR_DECL, NULL_TREE, domain); | |
5100 | ||
0e8e37b2 | 5101 | index_r |
e7ef3ff2 | 5102 | = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), |
5103 | &unsignedp, 0)); | |
0e8e37b2 | 5104 | SET_DECL_RTL (index, index_r); |
e7ef3ff2 | 5105 | if (TREE_CODE (value) == SAVE_EXPR |
5106 | && SAVE_EXPR_RTL (value) == 0) | |
5107 | { | |
a92771b8 | 5108 | /* Make sure value gets expanded once before the |
5109 | loop. */ | |
e7ef3ff2 | 5110 | expand_expr (value, const0_rtx, VOIDmode, 0); |
5111 | emit_queue (); | |
5112 | } | |
5113 | store_expr (lo_index, index_r, 0); | |
5114 | loop = expand_start_loop (0); | |
5115 | ||
a92771b8 | 5116 | /* Assign value to element index. */ |
902de8ed | 5117 | position |
5118 | = convert (ssizetype, | |
5119 | fold (build (MINUS_EXPR, TREE_TYPE (index), | |
5120 | index, TYPE_MIN_VALUE (domain)))); | |
5121 | position = size_binop (MULT_EXPR, position, | |
5122 | convert (ssizetype, | |
5123 | TYPE_SIZE_UNIT (elttype))); | |
5124 | ||
e7ef3ff2 | 5125 | pos_rtx = expand_expr (position, 0, VOIDmode, 0); |
fcdc122e | 5126 | xtarget = offset_address (target, pos_rtx, |
5127 | highest_pow2_factor (position)); | |
5128 | xtarget = adjust_address (xtarget, mode, 0); | |
e7ef3ff2 | 5129 | if (TREE_CODE (value) == CONSTRUCTOR) |
2c269e73 | 5130 | store_constructor (value, xtarget, cleared, |
a316ea6a | 5131 | bitsize / BITS_PER_UNIT); |
e7ef3ff2 | 5132 | else |
5133 | store_expr (value, xtarget, 0); | |
5134 | ||
5135 | expand_exit_loop_if_false (loop, | |
5136 | build (LT_EXPR, integer_type_node, | |
5137 | index, hi_index)); | |
5138 | ||
5139 | expand_increment (build (PREINCREMENT_EXPR, | |
5140 | TREE_TYPE (index), | |
37e76d7d | 5141 | index, integer_one_node), 0, 0); |
e7ef3ff2 | 5142 | expand_end_loop (); |
5143 | emit_label (loop_end); | |
e7ef3ff2 | 5144 | } |
5145 | } | |
325d1c45 | 5146 | else if ((index != 0 && ! host_integerp (index, 0)) |
5147 | || ! host_integerp (TYPE_SIZE (elttype), 1)) | |
c4492f76 | 5148 | { |
c4492f76 | 5149 | tree position; |
5150 | ||
845a6957 | 5151 | if (index == 0) |
902de8ed | 5152 | index = ssize_int (1); |
845a6957 | 5153 | |
e7ef3ff2 | 5154 | if (minelt) |
902de8ed | 5155 | index = convert (ssizetype, |
5156 | fold (build (MINUS_EXPR, index, | |
5157 | TYPE_MIN_VALUE (domain)))); | |
325d1c45 | 5158 | |
902de8ed | 5159 | position = size_binop (MULT_EXPR, index, |
5160 | convert (ssizetype, | |
5161 | TYPE_SIZE_UNIT (elttype))); | |
fcdc122e | 5162 | xtarget = offset_address (target, |
5163 | expand_expr (position, 0, VOIDmode, 0), | |
5164 | highest_pow2_factor (position)); | |
5165 | xtarget = adjust_address (xtarget, mode, 0); | |
e7ef3ff2 | 5166 | store_expr (value, xtarget, 0); |
c4492f76 | 5167 | } |
5168 | else | |
5169 | { | |
5170 | if (index != 0) | |
325d1c45 | 5171 | bitpos = ((tree_low_cst (index, 0) - minelt) |
5172 | * tree_low_cst (TYPE_SIZE (elttype), 1)); | |
c4492f76 | 5173 | else |
325d1c45 | 5174 | bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); |
5175 | ||
5cc193e7 | 5176 | if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target) |
0ad236c2 | 5177 | && TREE_CODE (type) == ARRAY_TYPE |
5cc193e7 | 5178 | && TYPE_NONALIASED_COMPONENT (type)) |
5179 | { | |
5180 | target = copy_rtx (target); | |
5181 | MEM_KEEP_ALIAS_SET_P (target) = 1; | |
5182 | } | |
5183 | ||
7014838c | 5184 | store_constructor_field (target, bitsize, bitpos, mode, value, |
2c269e73 | 5185 | type, cleared, get_alias_set (elttype)); |
1179a68b | 5186 | |
c4492f76 | 5187 | } |
10f307d9 | 5188 | } |
5189 | } | |
325d1c45 | 5190 | |
fa56dc1d | 5191 | /* Set constructor assignments. */ |
97b2af42 | 5192 | else if (TREE_CODE (type) == SET_TYPE) |
5193 | { | |
e7ef3ff2 | 5194 | tree elt = CONSTRUCTOR_ELTS (exp); |
325d1c45 | 5195 | unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits; |
97b2af42 | 5196 | tree domain = TYPE_DOMAIN (type); |
5197 | tree domain_min, domain_max, bitlength; | |
5198 | ||
c3418f42 | 5199 | /* The default implementation strategy is to extract the constant |
97b2af42 | 5200 | parts of the constructor, use that to initialize the target, |
5201 | and then "or" in whatever non-constant ranges we need in addition. | |
5202 | ||
5203 | If a large set is all zero or all ones, it is | |
5204 | probably better to set it using memset (if available) or bzero. | |
5205 | Also, if a large set has just a single range, it may also be | |
5206 | better to first clear all the first clear the set (using | |
a92771b8 | 5207 | bzero/memset), and set the bits we want. */ |
fa56dc1d | 5208 | |
a92771b8 | 5209 | /* Check for all zeros. */ |
6c84d8f3 | 5210 | if (elt == NULL_TREE && size > 0) |
97b2af42 | 5211 | { |
e7ef3ff2 | 5212 | if (!cleared) |
2a631e19 | 5213 | clear_storage (target, GEN_INT (size)); |
97b2af42 | 5214 | return; |
5215 | } | |
5216 | ||
97b2af42 | 5217 | domain_min = convert (sizetype, TYPE_MIN_VALUE (domain)); |
5218 | domain_max = convert (sizetype, TYPE_MAX_VALUE (domain)); | |
5219 | bitlength = size_binop (PLUS_EXPR, | |
902de8ed | 5220 | size_diffop (domain_max, domain_min), |
5221 | ssize_int (1)); | |
97b2af42 | 5222 | |
325d1c45 | 5223 | nbits = tree_low_cst (bitlength, 1); |
e7ef3ff2 | 5224 | |
5225 | /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that | |
5226 | are "complicated" (more than one range), initialize (the | |
fa56dc1d | 5227 | constant parts) by copying from a constant. */ |
e7ef3ff2 | 5228 | if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD |
5229 | || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE)) | |
97b2af42 | 5230 | { |
325d1c45 | 5231 | unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp)); |
b599eea5 | 5232 | enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1); |
a92771b8 | 5233 | char *bit_buffer = (char *) alloca (nbits); |
b599eea5 | 5234 | HOST_WIDE_INT word = 0; |
325d1c45 | 5235 | unsigned int bit_pos = 0; |
5236 | unsigned int ibit = 0; | |
5237 | unsigned int offset = 0; /* In bytes from beginning of set. */ | |
5238 | ||
e7ef3ff2 | 5239 | elt = get_set_constructor_bits (exp, bit_buffer, nbits); |
b599eea5 | 5240 | for (;;) |
97b2af42 | 5241 | { |
b599eea5 | 5242 | if (bit_buffer[ibit]) |
5243 | { | |
117a9033 | 5244 | if (BYTES_BIG_ENDIAN) |
b599eea5 | 5245 | word |= (1 << (set_word_size - 1 - bit_pos)); |
5246 | else | |
5247 | word |= 1 << bit_pos; | |
5248 | } | |
325d1c45 | 5249 | |
b599eea5 | 5250 | bit_pos++; ibit++; |
5251 | if (bit_pos >= set_word_size || ibit == nbits) | |
97b2af42 | 5252 | { |
e7ef3ff2 | 5253 | if (word != 0 || ! cleared) |
5254 | { | |
5255 | rtx datum = GEN_INT (word); | |
5256 | rtx to_rtx; | |
325d1c45 | 5257 | |
a92771b8 | 5258 | /* The assumption here is that it is safe to use |
5259 | XEXP if the set is multi-word, but not if | |
5260 | it's single-word. */ | |
e7ef3ff2 | 5261 | if (GET_CODE (target) == MEM) |
e513d163 | 5262 | to_rtx = adjust_address (target, mode, offset); |
fa56dc1d | 5263 | else if (offset == 0) |
e7ef3ff2 | 5264 | to_rtx = target; |
5265 | else | |
5266 | abort (); | |
5267 | emit_move_insn (to_rtx, datum); | |
5268 | } | |
325d1c45 | 5269 | |
b599eea5 | 5270 | if (ibit == nbits) |
5271 | break; | |
5272 | word = 0; | |
5273 | bit_pos = 0; | |
5274 | offset += set_word_size / BITS_PER_UNIT; | |
97b2af42 | 5275 | } |
5276 | } | |
97b2af42 | 5277 | } |
e7ef3ff2 | 5278 | else if (!cleared) |
325d1c45 | 5279 | /* Don't bother clearing storage if the set is all ones. */ |
5280 | if (TREE_CHAIN (elt) != NULL_TREE | |
5281 | || (TREE_PURPOSE (elt) == NULL_TREE | |
5282 | ? nbits != 1 | |
5283 | : ( ! host_integerp (TREE_VALUE (elt), 0) | |
5284 | || ! host_integerp (TREE_PURPOSE (elt), 0) | |
5285 | || (tree_low_cst (TREE_VALUE (elt), 0) | |
5286 | - tree_low_cst (TREE_PURPOSE (elt), 0) + 1 | |
5287 | != (HOST_WIDE_INT) nbits)))) | |
2a631e19 | 5288 | clear_storage (target, expr_size (exp)); |
fa56dc1d | 5289 | |
e7ef3ff2 | 5290 | for (; elt != NULL_TREE; elt = TREE_CHAIN (elt)) |
97b2af42 | 5291 | { |
fa56dc1d | 5292 | /* Start of range of element or NULL. */ |
97b2af42 | 5293 | tree startbit = TREE_PURPOSE (elt); |
fa56dc1d | 5294 | /* End of range of element, or element value. */ |
97b2af42 | 5295 | tree endbit = TREE_VALUE (elt); |
5296 | HOST_WIDE_INT startb, endb; | |
325d1c45 | 5297 | rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx; |
97b2af42 | 5298 | |
5299 | bitlength_rtx = expand_expr (bitlength, | |
325d1c45 | 5300 | NULL_RTX, MEM, EXPAND_CONST_ADDRESS); |
97b2af42 | 5301 | |
fa56dc1d | 5302 | /* Handle non-range tuple element like [ expr ]. */ |
97b2af42 | 5303 | if (startbit == NULL_TREE) |
5304 | { | |
5305 | startbit = save_expr (endbit); | |
5306 | endbit = startbit; | |
5307 | } | |
325d1c45 | 5308 | |
97b2af42 | 5309 | startbit = convert (sizetype, startbit); |
5310 | endbit = convert (sizetype, endbit); | |
5311 | if (! integer_zerop (domain_min)) | |
5312 | { | |
5313 | startbit = size_binop (MINUS_EXPR, startbit, domain_min); | |
5314 | endbit = size_binop (MINUS_EXPR, endbit, domain_min); | |
5315 | } | |
fa56dc1d | 5316 | startbit_rtx = expand_expr (startbit, NULL_RTX, MEM, |
97b2af42 | 5317 | EXPAND_CONST_ADDRESS); |
fa56dc1d | 5318 | endbit_rtx = expand_expr (endbit, NULL_RTX, MEM, |
97b2af42 | 5319 | EXPAND_CONST_ADDRESS); |
5320 | ||
5321 | if (REG_P (target)) | |
5322 | { | |
387bc205 | 5323 | targetx |
5324 | = assign_temp | |
771d21fa | 5325 | ((build_qualified_type ((*lang_hooks.types.type_for_mode) |
5326 | (GET_MODE (target), 0), | |
387bc205 | 5327 | TYPE_QUAL_CONST)), |
5328 | 0, 1, 1); | |
97b2af42 | 5329 | emit_move_insn (targetx, target); |
5330 | } | |
325d1c45 | 5331 | |
97b2af42 | 5332 | else if (GET_CODE (target) == MEM) |
5333 | targetx = target; | |
5334 | else | |
5335 | abort (); | |
5336 | ||
c0bfc78e | 5337 | /* Optimization: If startbit and endbit are constants divisible |
5338 | by BITS_PER_UNIT, call memset instead. */ | |
5339 | if (TARGET_MEM_FUNCTIONS | |
5340 | && TREE_CODE (startbit) == INTEGER_CST | |
97b2af42 | 5341 | && TREE_CODE (endbit) == INTEGER_CST |
5342 | && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0 | |
e7ef3ff2 | 5343 | && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0) |
97b2af42 | 5344 | { |
2c5d421b | 5345 | emit_library_call (memset_libfunc, LCT_NORMAL, |
97b2af42 | 5346 | VOIDmode, 3, |
e7ef3ff2 | 5347 | plus_constant (XEXP (targetx, 0), |
5348 | startb / BITS_PER_UNIT), | |
97b2af42 | 5349 | Pmode, |
36c8378b | 5350 | constm1_rtx, TYPE_MODE (integer_type_node), |
97b2af42 | 5351 | GEN_INT ((endb - startb) / BITS_PER_UNIT), |
36c8378b | 5352 | TYPE_MODE (sizetype)); |
97b2af42 | 5353 | } |
5354 | else | |
325d1c45 | 5355 | emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"), |
2c5d421b | 5356 | LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0), |
5357 | Pmode, bitlength_rtx, TYPE_MODE (sizetype), | |
325d1c45 | 5358 | startbit_rtx, TYPE_MODE (sizetype), |
5359 | endbit_rtx, TYPE_MODE (sizetype)); | |
5360 | ||
97b2af42 | 5361 | if (REG_P (target)) |
5362 | emit_move_insn (target, targetx); | |
5363 | } | |
5364 | } | |
10f307d9 | 5365 | |
5366 | else | |
5367 | abort (); | |
5368 | } | |
5369 | ||
5370 | /* Store the value of EXP (an expression tree) | |
5371 | into a subfield of TARGET which has mode MODE and occupies | |
5372 | BITSIZE bits, starting BITPOS bits from the start of TARGET. | |
5373 | If MODE is VOIDmode, it means that we are storing into a bit-field. | |
5374 | ||
5375 | If VALUE_MODE is VOIDmode, return nothing in particular. | |
5376 | UNSIGNEDP is not used in this case. | |
5377 | ||
5378 | Otherwise, return an rtx for the value stored. This rtx | |
5379 | has mode VALUE_MODE if that is convenient to do. | |
5380 | In this case, UNSIGNEDP must be nonzero if the value is an unsigned type. | |
5381 | ||
2b96c5f6 | 5382 | TYPE is the type of the underlying object, |
1e2513d9 | 5383 | |
5384 | ALIAS_SET is the alias set for the destination. This value will | |
5385 | (in general) be different from that for TARGET, since TARGET is a | |
5386 | reference to the containing structure. */ | |
10f307d9 | 5387 | |
5388 | static rtx | |
2b96c5f6 | 5389 | store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type, |
5390 | alias_set) | |
10f307d9 | 5391 | rtx target; |
02e7a332 | 5392 | HOST_WIDE_INT bitsize; |
5393 | HOST_WIDE_INT bitpos; | |
10f307d9 | 5394 | enum machine_mode mode; |
5395 | tree exp; | |
5396 | enum machine_mode value_mode; | |
5397 | int unsignedp; | |
2b96c5f6 | 5398 | tree type; |
1e2513d9 | 5399 | int alias_set; |
10f307d9 | 5400 | { |
b572011e | 5401 | HOST_WIDE_INT width_mask = 0; |
10f307d9 | 5402 | |
0dbd1c74 | 5403 | if (TREE_CODE (exp) == ERROR_MARK) |
5404 | return const0_rtx; | |
5405 | ||
55e9836d | 5406 | /* If we have nothing to store, do nothing unless the expression has |
5407 | side-effects. */ | |
5408 | if (bitsize == 0) | |
5409 | return expand_expr (exp, const0_rtx, VOIDmode, 0); | |
2b96c5f6 | 5410 | else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT) |
b572011e | 5411 | width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1; |
10f307d9 | 5412 | |
5413 | /* If we are storing into an unaligned field of an aligned union that is | |
5414 | in a register, we may have the mode of TARGET being an integer mode but | |
5415 | MODE == BLKmode. In that case, get an aligned object whose size and | |
5416 | alignment are the same as TARGET and store TARGET into it (we can avoid | |
5417 | the store if the field being stored is the entire width of TARGET). Then | |
5418 | call ourselves recursively to store the field into a BLKmode version of | |
5419 | that object. Finally, load from the object into TARGET. This is not | |
5420 | very efficient in general, but should only be slightly more expensive | |
5421 | than the otherwise-required unaligned accesses. Perhaps this can be | |
5422 | cleaned up later. */ | |
5423 | ||
5424 | if (mode == BLKmode | |
5425 | && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG)) | |
5426 | { | |
387bc205 | 5427 | rtx object |
5428 | = assign_temp | |
2b96c5f6 | 5429 | (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST), |
387bc205 | 5430 | 0, 1, 1); |
7a827396 | 5431 | rtx blk_object = adjust_address (object, BLKmode, 0); |
10f307d9 | 5432 | |
e1439bcb | 5433 | if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target))) |
10f307d9 | 5434 | emit_move_insn (object, target); |
5435 | ||
2b96c5f6 | 5436 | store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type, |
5437 | alias_set); | |
10f307d9 | 5438 | |
5439 | emit_move_insn (target, object); | |
5440 | ||
2b96c5f6 | 5441 | /* We want to return the BLKmode version of the data. */ |
559a13ea | 5442 | return blk_object; |
10f307d9 | 5443 | } |
efd3939c | 5444 | |
5445 | if (GET_CODE (target) == CONCAT) | |
5446 | { | |
5447 | /* We're storing into a struct containing a single __complex. */ | |
5448 | ||
5449 | if (bitpos != 0) | |
5450 | abort (); | |
5451 | return store_expr (exp, target, 0); | |
5452 | } | |
10f307d9 | 5453 | |
5454 | /* If the structure is in a register or if the component | |
5455 | is a bit field, we cannot use addressing to access it. | |
5456 | Use bit-field techniques or SUBREG to store in it. */ | |
5457 | ||
07edfa02 | 5458 | if (mode == VOIDmode |
03519f22 | 5459 | || (mode != BLKmode && ! direct_store[(int) mode] |
5460 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
5461 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) | |
07edfa02 | 5462 | || GET_CODE (target) == REG |
66aa258b | 5463 | || GET_CODE (target) == SUBREG |
4e05e574 | 5464 | /* If the field isn't aligned enough to store as an ordinary memref, |
5465 | store it as a bit field. */ | |
2c269e73 | 5466 | || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)) |
5467 | && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode) | |
155b05dc | 5468 | || bitpos % GET_MODE_ALIGNMENT (mode))) |
155b05dc | 5469 | /* If the RHS and field are a constant size and the size of the |
5470 | RHS isn't the same size as the bitfield, we must use bitfield | |
5471 | operations. */ | |
a0c2c45b | 5472 | || (bitsize >= 0 |
5473 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | |
5474 | && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)) | |
10f307d9 | 5475 | { |
b572011e | 5476 | rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); |
97d7f645 | 5477 | |
0aa5cbcc | 5478 | /* If BITSIZE is narrower than the size of the type of EXP |
5479 | we will be narrowing TEMP. Normally, what's wanted are the | |
5480 | low-order bits. However, if EXP's type is a record and this is | |
5481 | big-endian machine, we want the upper BITSIZE bits. */ | |
5482 | if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT | |
cce8da2f | 5483 | && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp)) |
0aa5cbcc | 5484 | && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE) |
5485 | temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp, | |
5486 | size_int (GET_MODE_BITSIZE (GET_MODE (temp)) | |
5487 | - bitsize), | |
5488 | temp, 1); | |
5489 | ||
97d7f645 | 5490 | /* Unless MODE is VOIDmode or BLKmode, convert TEMP to |
5491 | MODE. */ | |
5492 | if (mode != VOIDmode && mode != BLKmode | |
5493 | && mode != TYPE_MODE (TREE_TYPE (exp))) | |
5494 | temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1); | |
5495 | ||
0e20f9fb | 5496 | /* If the modes of TARGET and TEMP are both BLKmode, both |
5497 | must be in memory and BITPOS must be aligned on a byte | |
5498 | boundary. If so, we simply do a block copy. */ | |
5499 | if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode) | |
5500 | { | |
5501 | if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM | |
5502 | || bitpos % BITS_PER_UNIT != 0) | |
5503 | abort (); | |
5504 | ||
e513d163 | 5505 | target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); |
0e20f9fb | 5506 | emit_block_move (target, temp, |
2b96c5f6 | 5507 | GEN_INT ((bitsize + BITS_PER_UNIT - 1) |
0378dbdc | 5508 | / BITS_PER_UNIT), |
5509 | BLOCK_OP_NORMAL); | |
0e20f9fb | 5510 | |
5511 | return value_mode == VOIDmode ? const0_rtx : target; | |
5512 | } | |
5513 | ||
10f307d9 | 5514 | /* Store the value in the bitfield. */ |
2b96c5f6 | 5515 | store_bit_field (target, bitsize, bitpos, mode, temp, |
5516 | int_size_in_bytes (type)); | |
5517 | ||
10f307d9 | 5518 | if (value_mode != VOIDmode) |
5519 | { | |
2c269e73 | 5520 | /* The caller wants an rtx for the value. |
5521 | If possible, avoid refetching from the bitfield itself. */ | |
10f307d9 | 5522 | if (width_mask != 0 |
5523 | && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))) | |
ba281428 | 5524 | { |
7e716022 | 5525 | tree count; |
ba281428 | 5526 | enum machine_mode tmode; |
04e2d822 | 5527 | |
ba281428 | 5528 | tmode = GET_MODE (temp); |
04e2d822 | 5529 | if (tmode == VOIDmode) |
5530 | tmode = value_mode; | |
6de9716c | 5531 | |
5532 | if (unsignedp) | |
5533 | return expand_and (tmode, temp, | |
2d232d05 | 5534 | gen_int_mode (width_mask, tmode), |
6de9716c | 5535 | NULL_RTX); |
5536 | ||
ba281428 | 5537 | count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0); |
5538 | temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0); | |
5539 | return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0); | |
5540 | } | |
2c269e73 | 5541 | |
10f307d9 | 5542 | return extract_bit_field (target, bitsize, bitpos, unsignedp, |
2c269e73 | 5543 | NULL_RTX, value_mode, VOIDmode, |
2b96c5f6 | 5544 | int_size_in_bytes (type)); |
10f307d9 | 5545 | } |
5546 | return const0_rtx; | |
5547 | } | |
5548 | else | |
5549 | { | |
5550 | rtx addr = XEXP (target, 0); | |
2b96c5f6 | 5551 | rtx to_rtx = target; |
10f307d9 | 5552 | |
5553 | /* If a value is wanted, it must be the lhs; | |
5554 | so make the address stable for multiple use. */ | |
5555 | ||
5556 | if (value_mode != VOIDmode && GET_CODE (addr) != REG | |
5557 | && ! CONSTANT_ADDRESS_P (addr) | |
5558 | /* A frame-pointer reference is already stable. */ | |
5559 | && ! (GET_CODE (addr) == PLUS | |
5560 | && GET_CODE (XEXP (addr, 1)) == CONST_INT | |
5561 | && (XEXP (addr, 0) == virtual_incoming_args_rtx | |
5562 | || XEXP (addr, 0) == virtual_stack_vars_rtx))) | |
2b96c5f6 | 5563 | to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr)); |
10f307d9 | 5564 | |
5565 | /* Now build a reference to just the desired component. */ | |
5566 | ||
2b96c5f6 | 5567 | to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT); |
5568 | ||
5569 | if (to_rtx == target) | |
5570 | to_rtx = copy_rtx (to_rtx); | |
537ffcfc | 5571 | |
6a0934dd | 5572 | MEM_SET_IN_STRUCT_P (to_rtx, 1); |
5cc193e7 | 5573 | if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) |
2b96c5f6 | 5574 | set_mem_alias_set (to_rtx, alias_set); |
10f307d9 | 5575 | |
5576 | return store_expr (exp, to_rtx, value_mode != VOIDmode); | |
5577 | } | |
5578 | } | |
5579 | \f | |
5580 | /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF, | |
ba04d9d5 | 5581 | an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these |
5582 | codes and find the ultimate containing object, which we return. | |
10f307d9 | 5583 | |
5584 | We set *PBITSIZE to the size in bits that we want, *PBITPOS to the | |
5585 | bit position, and *PUNSIGNEDP to the signedness of the field. | |
954bdcb1 | 5586 | If the position of the field is variable, we store a tree |
5587 | giving the variable offset (in units) in *POFFSET. | |
5588 | This offset is in addition to the bit position. | |
5589 | If the position is not variable, we store 0 in *POFFSET. | |
10f307d9 | 5590 | |
5591 | If any of the extraction expressions is volatile, | |
5592 | we store 1 in *PVOLATILEP. Otherwise we don't change that. | |
5593 | ||
5594 | If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it | |
5595 | is a mode that can be used to access the field. In that case, *PBITSIZE | |
01ab6370 | 5596 | is redundant. |
5597 | ||
5598 | If the field describes a variable-sized object, *PMODE is set to | |
5599 | VOIDmode and *PBITSIZE is set to -1. An access cannot be made in | |
1e625a2e | 5600 | this case, but the address of the object can be found. */ |
10f307d9 | 5601 | |
5602 | tree | |
3ebd94bd | 5603 | get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, |
2b96c5f6 | 5604 | punsignedp, pvolatilep) |
10f307d9 | 5605 | tree exp; |
02e7a332 | 5606 | HOST_WIDE_INT *pbitsize; |
5607 | HOST_WIDE_INT *pbitpos; | |
954bdcb1 | 5608 | tree *poffset; |
10f307d9 | 5609 | enum machine_mode *pmode; |
5610 | int *punsignedp; | |
5611 | int *pvolatilep; | |
5612 | { | |
5613 | tree size_tree = 0; | |
5614 | enum machine_mode mode = VOIDmode; | |
902de8ed | 5615 | tree offset = size_zero_node; |
02e7a332 | 5616 | tree bit_offset = bitsize_zero_node; |
96216d37 | 5617 | tree placeholder_ptr = 0; |
02e7a332 | 5618 | tree tem; |
10f307d9 | 5619 | |
02e7a332 | 5620 | /* First get the mode, signedness, and size. We do this from just the |
5621 | outermost expression. */ | |
10f307d9 | 5622 | if (TREE_CODE (exp) == COMPONENT_REF) |
5623 | { | |
5624 | size_tree = DECL_SIZE (TREE_OPERAND (exp, 1)); | |
5625 | if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1))) | |
5626 | mode = DECL_MODE (TREE_OPERAND (exp, 1)); | |
02e7a332 | 5627 | |
10f307d9 | 5628 | *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1)); |
5629 | } | |
5630 | else if (TREE_CODE (exp) == BIT_FIELD_REF) | |
5631 | { | |
5632 | size_tree = TREE_OPERAND (exp, 1); | |
5633 | *punsignedp = TREE_UNSIGNED (exp); | |
5634 | } | |
5635 | else | |
5636 | { | |
5637 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
02e7a332 | 5638 | *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp)); |
5639 | ||
be2828ce | 5640 | if (mode == BLKmode) |
5641 | size_tree = TYPE_SIZE (TREE_TYPE (exp)); | |
02e7a332 | 5642 | else |
5643 | *pbitsize = GET_MODE_BITSIZE (mode); | |
10f307d9 | 5644 | } |
fa56dc1d | 5645 | |
02e7a332 | 5646 | if (size_tree != 0) |
10f307d9 | 5647 | { |
02e7a332 | 5648 | if (! host_integerp (size_tree, 1)) |
01ab6370 | 5649 | mode = BLKmode, *pbitsize = -1; |
5650 | else | |
02e7a332 | 5651 | *pbitsize = tree_low_cst (size_tree, 1); |
10f307d9 | 5652 | } |
5653 | ||
5654 | /* Compute cumulative bit-offset for nested component-refs and array-refs, | |
5655 | and find the ultimate containing object. */ | |
10f307d9 | 5656 | while (1) |
5657 | { | |
02e7a332 | 5658 | if (TREE_CODE (exp) == BIT_FIELD_REF) |
5659 | bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2)); | |
5660 | else if (TREE_CODE (exp) == COMPONENT_REF) | |
10f307d9 | 5661 | { |
02e7a332 | 5662 | tree field = TREE_OPERAND (exp, 1); |
5663 | tree this_offset = DECL_FIELD_OFFSET (field); | |
10f307d9 | 5664 | |
227bf826 | 5665 | /* If this field hasn't been filled in yet, don't go |
5666 | past it. This should only happen when folding expressions | |
5667 | made during type construction. */ | |
02e7a332 | 5668 | if (this_offset == 0) |
227bf826 | 5669 | break; |
02e7a332 | 5670 | else if (! TREE_CONSTANT (this_offset) |
5671 | && contains_placeholder_p (this_offset)) | |
5672 | this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp); | |
227bf826 | 5673 | |
7114c815 | 5674 | offset = size_binop (PLUS_EXPR, offset, this_offset); |
02e7a332 | 5675 | bit_offset = size_binop (PLUS_EXPR, bit_offset, |
5676 | DECL_FIELD_BIT_OFFSET (field)); | |
75f7b24f | 5677 | |
2b96c5f6 | 5678 | /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ |
10f307d9 | 5679 | } |
7114c815 | 5680 | |
ba04d9d5 | 5681 | else if (TREE_CODE (exp) == ARRAY_REF |
5682 | || TREE_CODE (exp) == ARRAY_RANGE_REF) | |
10f307d9 | 5683 | { |
cf389750 | 5684 | tree index = TREE_OPERAND (exp, 1); |
ba04d9d5 | 5685 | tree array = TREE_OPERAND (exp, 0); |
5686 | tree domain = TYPE_DOMAIN (TREE_TYPE (array)); | |
02e7a332 | 5687 | tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0); |
ba04d9d5 | 5688 | tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array))); |
cf389750 | 5689 | |
02e7a332 | 5690 | /* We assume all arrays have sizes that are a multiple of a byte. |
5691 | First subtract the lower bound, if any, in the type of the | |
5692 | index, then convert to sizetype and multiply by the size of the | |
5693 | array element. */ | |
5694 | if (low_bound != 0 && ! integer_zerop (low_bound)) | |
5695 | index = fold (build (MINUS_EXPR, TREE_TYPE (index), | |
5696 | index, low_bound)); | |
30384dcf | 5697 | |
7114c815 | 5698 | /* If the index has a self-referential type, pass it to a |
5699 | WITH_RECORD_EXPR; if the component size is, pass our | |
5700 | component to one. */ | |
02e7a332 | 5701 | if (! TREE_CONSTANT (index) |
5702 | && contains_placeholder_p (index)) | |
5703 | index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp); | |
7114c815 | 5704 | if (! TREE_CONSTANT (unit_size) |
5705 | && contains_placeholder_p (unit_size)) | |
ba04d9d5 | 5706 | unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array); |
cf389750 | 5707 | |
02e7a332 | 5708 | offset = size_binop (PLUS_EXPR, offset, |
5709 | size_binop (MULT_EXPR, | |
5710 | convert (sizetype, index), | |
7114c815 | 5711 | unit_size)); |
10f307d9 | 5712 | } |
7114c815 | 5713 | |
96216d37 | 5714 | else if (TREE_CODE (exp) == PLACEHOLDER_EXPR) |
5715 | { | |
86ce88aa | 5716 | tree new = find_placeholder (exp, &placeholder_ptr); |
5717 | ||
5718 | /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR. | |
5719 | We might have been called from tree optimization where we | |
5720 | haven't set up an object yet. */ | |
5721 | if (new == 0) | |
5722 | break; | |
5723 | else | |
5724 | exp = new; | |
5725 | ||
96216d37 | 5726 | continue; |
5727 | } | |
10f307d9 | 5728 | else if (TREE_CODE (exp) != NON_LVALUE_EXPR |
f96c43fb | 5729 | && TREE_CODE (exp) != VIEW_CONVERT_EXPR |
10f307d9 | 5730 | && ! ((TREE_CODE (exp) == NOP_EXPR |
5731 | || TREE_CODE (exp) == CONVERT_EXPR) | |
5732 | && (TYPE_MODE (TREE_TYPE (exp)) | |
5733 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))) | |
5734 | break; | |
954bdcb1 | 5735 | |
5736 | /* If any reference in the chain is volatile, the effect is volatile. */ | |
5737 | if (TREE_THIS_VOLATILE (exp)) | |
5738 | *pvolatilep = 1; | |
7fce34be | 5739 | |
10f307d9 | 5740 | exp = TREE_OPERAND (exp, 0); |
5741 | } | |
5742 | ||
02e7a332 | 5743 | /* If OFFSET is constant, see if we can return the whole thing as a |
5744 | constant bit position. Otherwise, split it up. */ | |
5745 | if (host_integerp (offset, 0) | |
5746 | && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset), | |
5747 | bitsize_unit_node)) | |
5748 | && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset)) | |
5749 | && host_integerp (tem, 0)) | |
5750 | *pbitpos = tree_low_cst (tem, 0), *poffset = 0; | |
5751 | else | |
5752 | *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset; | |
c869557a | 5753 | |
10f307d9 | 5754 | *pmode = mode; |
10f307d9 | 5755 | return exp; |
5756 | } | |
eb4b06b6 | 5757 | |
f96c43fb | 5758 | /* Return 1 if T is an expression that get_inner_reference handles. */ |
5759 | ||
5760 | int | |
5761 | handled_component_p (t) | |
5762 | tree t; | |
5763 | { | |
5764 | switch (TREE_CODE (t)) | |
5765 | { | |
5766 | case BIT_FIELD_REF: | |
5767 | case COMPONENT_REF: | |
5768 | case ARRAY_REF: | |
5769 | case ARRAY_RANGE_REF: | |
5770 | case NON_LVALUE_EXPR: | |
5771 | case VIEW_CONVERT_EXPR: | |
5772 | return 1; | |
5773 | ||
5774 | case NOP_EXPR: | |
5775 | case CONVERT_EXPR: | |
5776 | return (TYPE_MODE (TREE_TYPE (t)) | |
5777 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0)))); | |
5778 | ||
5779 | default: | |
5780 | return 0; | |
5781 | } | |
5782 | } | |
10f307d9 | 5783 | \f |
dc183975 | 5784 | /* Given an rtx VALUE that may contain additions and multiplications, return |
5785 | an equivalent value that just refers to a register, memory, or constant. | |
5786 | This is done by generating instructions to perform the arithmetic and | |
5787 | returning a pseudo-register containing the value. | |
c4f1a887 | 5788 | |
5789 | The returned value may be a REG, SUBREG, MEM or constant. */ | |
10f307d9 | 5790 | |
5791 | rtx | |
5792 | force_operand (value, target) | |
5793 | rtx value, target; | |
5794 | { | |
fef8467d | 5795 | rtx op1, op2; |
10f307d9 | 5796 | /* Use subtarget as the target for operand 0 of a binary operation. */ |
19cb6b50 | 5797 | rtx subtarget = get_subtarget (target); |
fef8467d | 5798 | enum rtx_code code = GET_CODE (value); |
10f307d9 | 5799 | |
8b59469a | 5800 | /* Check for a PIC address load. */ |
fef8467d | 5801 | if ((code == PLUS || code == MINUS) |
8b59469a | 5802 | && XEXP (value, 0) == pic_offset_table_rtx |
5803 | && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF | |
5804 | || GET_CODE (XEXP (value, 1)) == LABEL_REF | |
5805 | || GET_CODE (XEXP (value, 1)) == CONST)) | |
5806 | { | |
5807 | if (!subtarget) | |
5808 | subtarget = gen_reg_rtx (GET_MODE (value)); | |
5809 | emit_move_insn (subtarget, value); | |
5810 | return subtarget; | |
5811 | } | |
5812 | ||
fef8467d | 5813 | if (code == ZERO_EXTEND || code == SIGN_EXTEND) |
10f307d9 | 5814 | { |
fef8467d | 5815 | if (!target) |
5816 | target = gen_reg_rtx (GET_MODE (value)); | |
ef8396bd | 5817 | convert_move (target, force_operand (XEXP (value, 0), NULL), |
fef8467d | 5818 | code == ZERO_EXTEND); |
5819 | return target; | |
10f307d9 | 5820 | } |
5821 | ||
fef8467d | 5822 | if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c') |
10f307d9 | 5823 | { |
5824 | op2 = XEXP (value, 1); | |
fef8467d | 5825 | if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget)) |
10f307d9 | 5826 | subtarget = 0; |
fef8467d | 5827 | if (code == MINUS && GET_CODE (op2) == CONST_INT) |
10f307d9 | 5828 | { |
fef8467d | 5829 | code = PLUS; |
10f307d9 | 5830 | op2 = negate_rtx (GET_MODE (value), op2); |
5831 | } | |
5832 | ||
5833 | /* Check for an addition with OP2 a constant integer and our first | |
fef8467d | 5834 | operand a PLUS of a virtual register and something else. In that |
5835 | case, we want to emit the sum of the virtual register and the | |
5836 | constant first and then add the other value. This allows virtual | |
5837 | register instantiation to simply modify the constant rather than | |
5838 | creating another one around this addition. */ | |
5839 | if (code == PLUS && GET_CODE (op2) == CONST_INT | |
10f307d9 | 5840 | && GET_CODE (XEXP (value, 0)) == PLUS |
5841 | && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG | |
5842 | && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER | |
5843 | && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER) | |
5844 | { | |
fef8467d | 5845 | rtx temp = expand_simple_binop (GET_MODE (value), code, |
5846 | XEXP (XEXP (value, 0), 0), op2, | |
5847 | subtarget, 0, OPTAB_LIB_WIDEN); | |
5848 | return expand_simple_binop (GET_MODE (value), code, temp, | |
5849 | force_operand (XEXP (XEXP (value, | |
5850 | 0), 1), 0), | |
5851 | target, 0, OPTAB_LIB_WIDEN); | |
10f307d9 | 5852 | } |
fa56dc1d | 5853 | |
fef8467d | 5854 | op1 = force_operand (XEXP (value, 0), subtarget); |
5855 | op2 = force_operand (op2, NULL_RTX); | |
5856 | switch (code) | |
5857 | { | |
5858 | case MULT: | |
5859 | return expand_mult (GET_MODE (value), op1, op2, target, 1); | |
5860 | case DIV: | |
5861 | if (!INTEGRAL_MODE_P (GET_MODE (value))) | |
5862 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
5863 | target, 1, OPTAB_LIB_WIDEN); | |
5864 | else | |
5865 | return expand_divmod (0, | |
5866 | FLOAT_MODE_P (GET_MODE (value)) | |
5867 | ? RDIV_EXPR : TRUNC_DIV_EXPR, | |
5868 | GET_MODE (value), op1, op2, target, 0); | |
5869 | break; | |
5870 | case MOD: | |
5871 | return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, | |
5872 | target, 0); | |
5873 | break; | |
5874 | case UDIV: | |
5875 | return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2, | |
5876 | target, 1); | |
5877 | break; | |
5878 | case UMOD: | |
5879 | return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, | |
5880 | target, 1); | |
5881 | break; | |
5882 | case ASHIFTRT: | |
5883 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
5884 | target, 0, OPTAB_LIB_WIDEN); | |
5885 | break; | |
5886 | default: | |
5887 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
5888 | target, 1, OPTAB_LIB_WIDEN); | |
5889 | } | |
5890 | } | |
5891 | if (GET_RTX_CLASS (code) == '1') | |
5892 | { | |
5893 | op1 = force_operand (XEXP (value, 0), NULL_RTX); | |
5894 | return expand_simple_unop (GET_MODE (value), code, op1, target, 0); | |
10f307d9 | 5895 | } |
3084721c | 5896 | |
5897 | #ifdef INSN_SCHEDULING | |
5898 | /* On machines that have insn scheduling, we want all memory reference to be | |
5899 | explicit, so we need to deal with such paradoxical SUBREGs. */ | |
5900 | if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM | |
5901 | && (GET_MODE_SIZE (GET_MODE (value)) | |
5902 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value))))) | |
5903 | value | |
5904 | = simplify_gen_subreg (GET_MODE (value), | |
5905 | force_reg (GET_MODE (SUBREG_REG (value)), | |
5906 | force_operand (SUBREG_REG (value), | |
5907 | NULL_RTX)), | |
5908 | GET_MODE (SUBREG_REG (value)), | |
5909 | SUBREG_BYTE (value)); | |
5910 | #endif | |
5911 | ||
10f307d9 | 5912 | return value; |
5913 | } | |
5914 | \f | |
10f307d9 | 5915 | /* Subroutine of expand_expr: return nonzero iff there is no way that |
997d68fe | 5916 | EXP can reference X, which is being modified. TOP_P is nonzero if this |
5917 | call is going to be used to determine whether we need a temporary | |
67e40adc | 5918 | for EXP, as opposed to a recursive call to this function. |
5919 | ||
5920 | It is always safe for this routine to return zero since it merely | |
5921 | searches for optimization opportunities. */ | |
10f307d9 | 5922 | |
e41f0d80 | 5923 | int |
997d68fe | 5924 | safe_from_p (x, exp, top_p) |
10f307d9 | 5925 | rtx x; |
5926 | tree exp; | |
997d68fe | 5927 | int top_p; |
10f307d9 | 5928 | { |
5929 | rtx exp_rtl = 0; | |
5930 | int i, nops; | |
387bc205 | 5931 | static tree save_expr_list; |
10f307d9 | 5932 | |
a71ba0b1 | 5933 | if (x == 0 |
5934 | /* If EXP has varying size, we MUST use a target since we currently | |
62d8c952 | 5935 | have no way of allocating temporaries of variable size |
5936 | (except for arrays that have TYPE_ARRAY_MAX_SIZE set). | |
5937 | So we assume here that something at a higher level has prevented a | |
b9438b95 | 5938 | clash. This is somewhat bogus, but the best we can do. Only |
997d68fe | 5939 | do this when X is BLKmode and when we are at the top level. */ |
4b72716d | 5940 | || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) |
b9438b95 | 5941 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST |
62d8c952 | 5942 | && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE |
5943 | || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE | |
5944 | || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp))) | |
5945 | != INTEGER_CST) | |
387bc205 | 5946 | && GET_MODE (x) == BLKmode) |
5947 | /* If X is in the outgoing argument area, it is always safe. */ | |
5948 | || (GET_CODE (x) == MEM | |
5949 | && (XEXP (x, 0) == virtual_outgoing_args_rtx | |
5950 | || (GET_CODE (XEXP (x, 0)) == PLUS | |
5951 | && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))) | |
10f307d9 | 5952 | return 1; |
5953 | ||
5954 | /* If this is a subreg of a hard register, declare it unsafe, otherwise, | |
5955 | find the underlying pseudo. */ | |
5956 | if (GET_CODE (x) == SUBREG) | |
5957 | { | |
5958 | x = SUBREG_REG (x); | |
5959 | if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER) | |
5960 | return 0; | |
5961 | } | |
5962 | ||
387bc205 | 5963 | /* A SAVE_EXPR might appear many times in the expression passed to the |
5964 | top-level safe_from_p call, and if it has a complex subexpression, | |
5965 | examining it multiple times could result in a combinatorial explosion. | |
5966 | E.g. on an Alpha running at least 200MHz, a Fortran test case compiled | |
5967 | with optimization took about 28 minutes to compile -- even though it was | |
5968 | only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE | |
5969 | and turn that off when we are done. We keep a list of the SAVE_EXPRs | |
5970 | we have processed. Note that the only test of top_p was above. */ | |
5971 | ||
5972 | if (top_p) | |
5973 | { | |
5974 | int rtn; | |
5975 | tree t; | |
5976 | ||
5977 | save_expr_list = 0; | |
5978 | ||
5979 | rtn = safe_from_p (x, exp, 0); | |
5980 | ||
5981 | for (t = save_expr_list; t != 0; t = TREE_CHAIN (t)) | |
5982 | TREE_PRIVATE (TREE_PURPOSE (t)) = 0; | |
5983 | ||
5984 | return rtn; | |
5985 | } | |
10f307d9 | 5986 | |
387bc205 | 5987 | /* Now look at our tree code and possibly recurse. */ |
10f307d9 | 5988 | switch (TREE_CODE_CLASS (TREE_CODE (exp))) |
5989 | { | |
5990 | case 'd': | |
6db2b7ab | 5991 | exp_rtl = DECL_RTL_IF_SET (exp); |
10f307d9 | 5992 | break; |
5993 | ||
5994 | case 'c': | |
5995 | return 1; | |
5996 | ||
5997 | case 'x': | |
5998 | if (TREE_CODE (exp) == TREE_LIST) | |
52a618b4 | 5999 | return ((TREE_VALUE (exp) == 0 |
997d68fe | 6000 | || safe_from_p (x, TREE_VALUE (exp), 0)) |
10f307d9 | 6001 | && (TREE_CHAIN (exp) == 0 |
997d68fe | 6002 | || safe_from_p (x, TREE_CHAIN (exp), 0))); |
67e40adc | 6003 | else if (TREE_CODE (exp) == ERROR_MARK) |
6004 | return 1; /* An already-visited SAVE_EXPR? */ | |
10f307d9 | 6005 | else |
6006 | return 0; | |
6007 | ||
6008 | case '1': | |
997d68fe | 6009 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); |
10f307d9 | 6010 | |
6011 | case '2': | |
6012 | case '<': | |
997d68fe | 6013 | return (safe_from_p (x, TREE_OPERAND (exp, 0), 0) |
6014 | && safe_from_p (x, TREE_OPERAND (exp, 1), 0)); | |
10f307d9 | 6015 | |
6016 | case 'e': | |
6017 | case 'r': | |
6018 | /* Now do code-specific tests. EXP_RTL is set to any rtx we find in | |
6019 | the expression. If it is set, we conflict iff we are that rtx or | |
6020 | both are in memory. Otherwise, we check all operands of the | |
6021 | expression recursively. */ | |
6022 | ||
6023 | switch (TREE_CODE (exp)) | |
6024 | { | |
6025 | case ADDR_EXPR: | |
86ce88aa | 6026 | /* If the operand is static or we are static, we can't conflict. |
6027 | Likewise if we don't conflict with the operand at all. */ | |
6028 | if (staticp (TREE_OPERAND (exp, 0)) | |
6029 | || TREE_STATIC (exp) | |
6030 | || safe_from_p (x, TREE_OPERAND (exp, 0), 0)) | |
6031 | return 1; | |
6032 | ||
6033 | /* Otherwise, the only way this can conflict is if we are taking | |
6034 | the address of a DECL a that address if part of X, which is | |
6035 | very rare. */ | |
6036 | exp = TREE_OPERAND (exp, 0); | |
6037 | if (DECL_P (exp)) | |
6038 | { | |
6039 | if (!DECL_RTL_SET_P (exp) | |
6040 | || GET_CODE (DECL_RTL (exp)) != MEM) | |
6041 | return 0; | |
6042 | else | |
6043 | exp_rtl = XEXP (DECL_RTL (exp), 0); | |
6044 | } | |
6045 | break; | |
10f307d9 | 6046 | |
6047 | case INDIRECT_REF: | |
387bc205 | 6048 | if (GET_CODE (x) == MEM |
6049 | && alias_sets_conflict_p (MEM_ALIAS_SET (x), | |
6050 | get_alias_set (exp))) | |
10f307d9 | 6051 | return 0; |
6052 | break; | |
6053 | ||
6054 | case CALL_EXPR: | |
bc33ff05 | 6055 | /* Assume that the call will clobber all hard registers and |
6056 | all of memory. */ | |
6057 | if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER) | |
6058 | || GET_CODE (x) == MEM) | |
6059 | return 0; | |
10f307d9 | 6060 | break; |
6061 | ||
6062 | case RTL_EXPR: | |
2d9d2e10 | 6063 | /* If a sequence exists, we would have to scan every instruction |
6064 | in the sequence to see if it was safe. This is probably not | |
6065 | worthwhile. */ | |
6066 | if (RTL_EXPR_SEQUENCE (exp)) | |
10f307d9 | 6067 | return 0; |
6068 | ||
2d9d2e10 | 6069 | exp_rtl = RTL_EXPR_RTL (exp); |
10f307d9 | 6070 | break; |
6071 | ||
6072 | case WITH_CLEANUP_EXPR: | |
5929001a | 6073 | exp_rtl = WITH_CLEANUP_EXPR_RTL (exp); |
10f307d9 | 6074 | break; |
6075 | ||
34e2ddcd | 6076 | case CLEANUP_POINT_EXPR: |
997d68fe | 6077 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); |
34e2ddcd | 6078 | |
10f307d9 | 6079 | case SAVE_EXPR: |
6080 | exp_rtl = SAVE_EXPR_RTL (exp); | |
67e40adc | 6081 | if (exp_rtl) |
6082 | break; | |
6083 | ||
387bc205 | 6084 | /* If we've already scanned this, don't do it again. Otherwise, |
6085 | show we've scanned it and record for clearing the flag if we're | |
6086 | going on. */ | |
6087 | if (TREE_PRIVATE (exp)) | |
6088 | return 1; | |
67e40adc | 6089 | |
387bc205 | 6090 | TREE_PRIVATE (exp) = 1; |
6091 | if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0)) | |
84791d69 | 6092 | { |
387bc205 | 6093 | TREE_PRIVATE (exp) = 0; |
6094 | return 0; | |
84791d69 | 6095 | } |
387bc205 | 6096 | |
6097 | save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list); | |
67e40adc | 6098 | return 1; |
10f307d9 | 6099 | |
4e0ff571 | 6100 | case BIND_EXPR: |
6101 | /* The only operand we look at is operand 1. The rest aren't | |
6102 | part of the expression. */ | |
997d68fe | 6103 | return safe_from_p (x, TREE_OPERAND (exp, 1), 0); |
4e0ff571 | 6104 | |
10f307d9 | 6105 | case METHOD_CALL_EXPR: |
20dd417a | 6106 | /* This takes an rtx argument, but shouldn't appear here. */ |
10f307d9 | 6107 | abort (); |
fa56dc1d | 6108 | |
0dbd1c74 | 6109 | default: |
6110 | break; | |
10f307d9 | 6111 | } |
6112 | ||
6113 | /* If we have an rtx, we do not need to scan our operands. */ | |
6114 | if (exp_rtl) | |
6115 | break; | |
6116 | ||
e41f0d80 | 6117 | nops = first_rtl_op (TREE_CODE (exp)); |
10f307d9 | 6118 | for (i = 0; i < nops; i++) |
6119 | if (TREE_OPERAND (exp, i) != 0 | |
997d68fe | 6120 | && ! safe_from_p (x, TREE_OPERAND (exp, i), 0)) |
10f307d9 | 6121 | return 0; |
e41f0d80 | 6122 | |
6123 | /* If this is a language-specific tree code, it may require | |
6124 | special handling. */ | |
0fd4500a | 6125 | if ((unsigned int) TREE_CODE (exp) |
6126 | >= (unsigned int) LAST_AND_UNUSED_TREE_CODE | |
b3187c7c | 6127 | && !(*lang_hooks.safe_from_p) (x, exp)) |
e41f0d80 | 6128 | return 0; |
10f307d9 | 6129 | } |
6130 | ||
6131 | /* If we have an rtl, find any enclosed object. Then see if we conflict | |
6132 | with it. */ | |
6133 | if (exp_rtl) | |
6134 | { | |
6135 | if (GET_CODE (exp_rtl) == SUBREG) | |
6136 | { | |
6137 | exp_rtl = SUBREG_REG (exp_rtl); | |
6138 | if (GET_CODE (exp_rtl) == REG | |
6139 | && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER) | |
6140 | return 0; | |
6141 | } | |
6142 | ||
6143 | /* If the rtl is X, then it is not safe. Otherwise, it is unless both | |
387bc205 | 6144 | are memory and they conflict. */ |
10f307d9 | 6145 | return ! (rtx_equal_p (x, exp_rtl) |
6146 | || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM | |
c5e81ca5 | 6147 | && true_dependence (exp_rtl, VOIDmode, x, |
387bc205 | 6148 | rtx_addr_varies_p))); |
10f307d9 | 6149 | } |
6150 | ||
6151 | /* If we reach here, it is safe. */ | |
6152 | return 1; | |
6153 | } | |
6154 | ||
46e62598 | 6155 | /* Subroutine of expand_expr: return rtx if EXP is a |
6156 | variable or parameter; else return 0. */ | |
6157 | ||
6158 | static rtx | |
6159 | var_rtx (exp) | |
6160 | tree exp; | |
6161 | { | |
6162 | STRIP_NOPS (exp); | |
6163 | switch (TREE_CODE (exp)) | |
6164 | { | |
6165 | case PARM_DECL: | |
6166 | case VAR_DECL: | |
6167 | return DECL_RTL (exp); | |
6168 | default: | |
6169 | return 0; | |
6170 | } | |
6171 | } | |
32a0589f | 6172 | |
6173 | #ifdef MAX_INTEGER_COMPUTATION_MODE | |
f060a027 | 6174 | |
32a0589f | 6175 | void |
6176 | check_max_integer_computation_mode (exp) | |
fa56dc1d | 6177 | tree exp; |
32a0589f | 6178 | { |
5b98bbe7 | 6179 | enum tree_code code; |
32a0589f | 6180 | enum machine_mode mode; |
6181 | ||
5b98bbe7 | 6182 | /* Strip any NOPs that don't change the mode. */ |
6183 | STRIP_NOPS (exp); | |
6184 | code = TREE_CODE (exp); | |
6185 | ||
a1db3221 | 6186 | /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */ |
6187 | if (code == NOP_EXPR | |
6188 | && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) | |
6189 | return; | |
6190 | ||
32a0589f | 6191 | /* First check the type of the overall operation. We need only look at |
6192 | unary, binary and relational operations. */ | |
6193 | if (TREE_CODE_CLASS (code) == '1' | |
6194 | || TREE_CODE_CLASS (code) == '2' | |
6195 | || TREE_CODE_CLASS (code) == '<') | |
6196 | { | |
6197 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
6198 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6199 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
f060a027 | 6200 | internal_error ("unsupported wide integer operation"); |
32a0589f | 6201 | } |
6202 | ||
6203 | /* Check operand of a unary op. */ | |
6204 | if (TREE_CODE_CLASS (code) == '1') | |
6205 | { | |
6206 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
6207 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6208 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
f060a027 | 6209 | internal_error ("unsupported wide integer operation"); |
32a0589f | 6210 | } |
fa56dc1d | 6211 | |
32a0589f | 6212 | /* Check operands of a binary/comparison op. */ |
6213 | if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<') | |
6214 | { | |
6215 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
6216 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6217 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
f060a027 | 6218 | internal_error ("unsupported wide integer operation"); |
32a0589f | 6219 | |
6220 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))); | |
6221 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6222 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
f060a027 | 6223 | internal_error ("unsupported wide integer operation"); |
32a0589f | 6224 | } |
6225 | } | |
6226 | #endif | |
155b05dc | 6227 | \f |
fcdc122e | 6228 | /* Return the highest power of two that EXP is known to be a multiple of. |
6229 | This is used in updating alignment of MEMs in array references. */ | |
6230 | ||
6231 | static HOST_WIDE_INT | |
6232 | highest_pow2_factor (exp) | |
6233 | tree exp; | |
6234 | { | |
6235 | HOST_WIDE_INT c0, c1; | |
6236 | ||
6237 | switch (TREE_CODE (exp)) | |
6238 | { | |
6239 | case INTEGER_CST: | |
fe93cfe6 | 6240 | /* We can find the lowest bit that's a one. If the low |
6241 | HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT. | |
6242 | We need to handle this case since we can find it in a COND_EXPR, | |
6243 | a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an | |
6244 | erroneous program, so return BIGGEST_ALIGNMENT to avoid any | |
e62299bd | 6245 | later ICE. */ |
fe93cfe6 | 6246 | if (TREE_CONSTANT_OVERFLOW (exp)) |
a689a61a | 6247 | return BIGGEST_ALIGNMENT; |
fe93cfe6 | 6248 | else |
fcdc122e | 6249 | { |
fe93cfe6 | 6250 | /* Note: tree_low_cst is intentionally not used here, |
6251 | we don't care about the upper bits. */ | |
6252 | c0 = TREE_INT_CST_LOW (exp); | |
6253 | c0 &= -c0; | |
6254 | return c0 ? c0 : BIGGEST_ALIGNMENT; | |
fcdc122e | 6255 | } |
6256 | break; | |
6257 | ||
cce8da2f | 6258 | case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR: |
fcdc122e | 6259 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); |
6260 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6261 | return MIN (c0, c1); | |
6262 | ||
6263 | case MULT_EXPR: | |
6264 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); | |
6265 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6266 | return c0 * c1; | |
6267 | ||
6268 | case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR: | |
6269 | case CEIL_DIV_EXPR: | |
cce8da2f | 6270 | if (integer_pow2p (TREE_OPERAND (exp, 1)) |
6271 | && host_integerp (TREE_OPERAND (exp, 1), 1)) | |
6272 | { | |
6273 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); | |
6274 | c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1); | |
6275 | return MAX (1, c0 / c1); | |
6276 | } | |
6277 | break; | |
fcdc122e | 6278 | |
6279 | case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR: | |
cce8da2f | 6280 | case SAVE_EXPR: case WITH_RECORD_EXPR: |
fcdc122e | 6281 | return highest_pow2_factor (TREE_OPERAND (exp, 0)); |
6282 | ||
cce8da2f | 6283 | case COMPOUND_EXPR: |
6284 | return highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6285 | ||
fcdc122e | 6286 | case COND_EXPR: |
6287 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6288 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 2)); | |
6289 | return MIN (c0, c1); | |
6290 | ||
6291 | default: | |
6292 | break; | |
6293 | } | |
6294 | ||
6295 | return 1; | |
6296 | } | |
5b965633 | 6297 | |
6298 | /* Similar, except that it is known that the expression must be a multiple | |
6299 | of the alignment of TYPE. */ | |
6300 | ||
6301 | static HOST_WIDE_INT | |
6302 | highest_pow2_factor_for_type (type, exp) | |
6303 | tree type; | |
6304 | tree exp; | |
6305 | { | |
6306 | HOST_WIDE_INT type_align, factor; | |
6307 | ||
6308 | factor = highest_pow2_factor (exp); | |
6309 | type_align = TYPE_ALIGN (type) / BITS_PER_UNIT; | |
6310 | return MAX (factor, type_align); | |
6311 | } | |
fcdc122e | 6312 | \f |
c3a9c149 | 6313 | /* Return an object on the placeholder list that matches EXP, a |
6314 | PLACEHOLDER_EXPR. An object "matches" if it is of the type of the | |
96216d37 | 6315 | PLACEHOLDER_EXPR or a pointer type to it. For further information, see |
86ce88aa | 6316 | tree.def. If no such object is found, return 0. If PLIST is nonzero, it |
6317 | is a location which initially points to a starting location in the | |
96216d37 | 6318 | placeholder list (zero means start of the list) and where a pointer into |
6319 | the placeholder list at which the object is found is placed. */ | |
c3a9c149 | 6320 | |
6321 | tree | |
6322 | find_placeholder (exp, plist) | |
6323 | tree exp; | |
6324 | tree *plist; | |
6325 | { | |
6326 | tree type = TREE_TYPE (exp); | |
6327 | tree placeholder_expr; | |
6328 | ||
96216d37 | 6329 | for (placeholder_expr |
6330 | = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list; | |
6331 | placeholder_expr != 0; | |
c3a9c149 | 6332 | placeholder_expr = TREE_CHAIN (placeholder_expr)) |
6333 | { | |
6334 | tree need_type = TYPE_MAIN_VARIANT (type); | |
6335 | tree elt; | |
6336 | ||
6337 | /* Find the outermost reference that is of the type we want. If none, | |
6338 | see if any object has a type that is a pointer to the type we | |
6339 | want. */ | |
6340 | for (elt = TREE_PURPOSE (placeholder_expr); elt != 0; | |
6341 | elt = ((TREE_CODE (elt) == COMPOUND_EXPR | |
6342 | || TREE_CODE (elt) == COND_EXPR) | |
6343 | ? TREE_OPERAND (elt, 1) | |
6344 | : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r' | |
6345 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '1' | |
6346 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '2' | |
6347 | || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e') | |
6348 | ? TREE_OPERAND (elt, 0) : 0)) | |
6349 | if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type) | |
6350 | { | |
6351 | if (plist) | |
6352 | *plist = placeholder_expr; | |
6353 | return elt; | |
6354 | } | |
6355 | ||
6356 | for (elt = TREE_PURPOSE (placeholder_expr); elt != 0; | |
6357 | elt | |
6358 | = ((TREE_CODE (elt) == COMPOUND_EXPR | |
6359 | || TREE_CODE (elt) == COND_EXPR) | |
6360 | ? TREE_OPERAND (elt, 1) | |
6361 | : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r' | |
6362 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '1' | |
6363 | || TREE_CODE_CLASS (TREE_CODE (elt)) == '2' | |
6364 | || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e') | |
6365 | ? TREE_OPERAND (elt, 0) : 0)) | |
6366 | if (POINTER_TYPE_P (TREE_TYPE (elt)) | |
6367 | && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt))) | |
6368 | == need_type)) | |
6369 | { | |
6370 | if (plist) | |
6371 | *plist = placeholder_expr; | |
6372 | return build1 (INDIRECT_REF, need_type, elt); | |
6373 | } | |
6374 | } | |
6375 | ||
86ce88aa | 6376 | return 0; |
c3a9c149 | 6377 | } |
6378 | \f | |
10f307d9 | 6379 | /* expand_expr: generate code for computing expression EXP. |
6380 | An rtx for the computed value is returned. The value is never null. | |
6381 | In the case of a void EXP, const0_rtx is returned. | |
6382 | ||
6383 | The value may be stored in TARGET if TARGET is nonzero. | |
6384 | TARGET is just a suggestion; callers must assume that | |
6385 | the rtx returned may not be the same as TARGET. | |
6386 | ||
6387 | If TARGET is CONST0_RTX, it means that the value will be ignored. | |
6388 | ||
6389 | If TMODE is not VOIDmode, it suggests generating the | |
6390 | result in mode TMODE. But this is done only when convenient. | |
6391 | Otherwise, TMODE is ignored and the value generated in its natural mode. | |
6392 | TMODE is just a suggestion; callers must assume that | |
6393 | the rtx returned may not have mode TMODE. | |
6394 | ||
d2ae1b1e | 6395 | Note that TARGET may have neither TMODE nor MODE. In that case, it |
6396 | probably will not be used. | |
10f307d9 | 6397 | |
6398 | If MODIFIER is EXPAND_SUM then when EXP is an addition | |
6399 | we can return an rtx of the form (MULT (REG ...) (CONST_INT ...)) | |
6400 | or a nest of (PLUS ...) and (MINUS ...) where the terms are | |
6401 | products as above, or REG or MEM, or constant. | |
6402 | Ordinarily in such cases we would output mul or add instructions | |
6403 | and then return a pseudo reg containing the sum. | |
6404 | ||
6405 | EXPAND_INITIALIZER is much like EXPAND_SUM except that | |
6406 | it also marks a label as absolutely required (it can't be dead). | |
1aaabd2e | 6407 | It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns. |
d2ae1b1e | 6408 | This is used for outputting expressions used in initializers. |
6409 | ||
6410 | EXPAND_CONST_ADDRESS says that it is okay to return a MEM | |
6411 | with a constant address even if that address is not normally legitimate. | |
6412 | EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */ | |
10f307d9 | 6413 | |
6414 | rtx | |
6415 | expand_expr (exp, target, tmode, modifier) | |
19cb6b50 | 6416 | tree exp; |
10f307d9 | 6417 | rtx target; |
6418 | enum machine_mode tmode; | |
6419 | enum expand_modifier modifier; | |
6420 | { | |
19cb6b50 | 6421 | rtx op0, op1, temp; |
10f307d9 | 6422 | tree type = TREE_TYPE (exp); |
6423 | int unsignedp = TREE_UNSIGNED (type); | |
19cb6b50 | 6424 | enum machine_mode mode; |
6425 | enum tree_code code = TREE_CODE (exp); | |
10f307d9 | 6426 | optab this_optab; |
32b3a273 | 6427 | rtx subtarget, original_target; |
6428 | int ignore; | |
10f307d9 | 6429 | tree context; |
6430 | ||
fa56dc1d | 6431 | /* Handle ERROR_MARK before anybody tries to access its type. */ |
84554bf9 | 6432 | if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK) |
32b3a273 | 6433 | { |
6434 | op0 = CONST0_RTX (tmode); | |
6435 | if (op0 != 0) | |
6436 | return op0; | |
6437 | return const0_rtx; | |
6438 | } | |
6439 | ||
6440 | mode = TYPE_MODE (type); | |
6441 | /* Use subtarget as the target for operand 0 of a binary operation. */ | |
d8e5b213 | 6442 | subtarget = get_subtarget (target); |
32b3a273 | 6443 | original_target = target; |
6444 | ignore = (target == const0_rtx | |
6445 | || ((code == NON_LVALUE_EXPR || code == NOP_EXPR | |
6446 | || code == CONVERT_EXPR || code == REFERENCE_EXPR | |
b3187c7c | 6447 | || code == COND_EXPR || code == VIEW_CONVERT_EXPR) |
32b3a273 | 6448 | && TREE_CODE (type) == VOID_TYPE)); |
6449 | ||
f75fb6ae | 6450 | /* If we are going to ignore this result, we need only do something |
6451 | if there is a side-effect somewhere in the expression. If there | |
c869557a | 6452 | is, short-circuit the most common cases here. Note that we must |
6453 | not call expand_expr with anything but const0_rtx in case this | |
6454 | is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */ | |
10f307d9 | 6455 | |
f75fb6ae | 6456 | if (ignore) |
6457 | { | |
6458 | if (! TREE_SIDE_EFFECTS (exp)) | |
6459 | return const0_rtx; | |
6460 | ||
155b05dc | 6461 | /* Ensure we reference a volatile object even if value is ignored, but |
6462 | don't do this if all we are doing is taking its address. */ | |
f75fb6ae | 6463 | if (TREE_THIS_VOLATILE (exp) |
6464 | && TREE_CODE (exp) != FUNCTION_DECL | |
155b05dc | 6465 | && mode != VOIDmode && mode != BLKmode |
6466 | && modifier != EXPAND_CONST_ADDRESS) | |
f75fb6ae | 6467 | { |
8a06f2d4 | 6468 | temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier); |
f75fb6ae | 6469 | if (GET_CODE (temp) == MEM) |
6470 | temp = copy_to_reg (temp); | |
6471 | return const0_rtx; | |
6472 | } | |
6473 | ||
155b05dc | 6474 | if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF |
6475 | || code == INDIRECT_REF || code == BUFFER_REF) | |
8a06f2d4 | 6476 | return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6477 | modifier); | |
6478 | ||
155b05dc | 6479 | else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<' |
ba04d9d5 | 6480 | || code == ARRAY_REF || code == ARRAY_RANGE_REF) |
f75fb6ae | 6481 | { |
8a06f2d4 | 6482 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); |
6483 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); | |
f75fb6ae | 6484 | return const0_rtx; |
6485 | } | |
6486 | else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR) | |
6487 | && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1))) | |
6488 | /* If the second operand has no side effects, just evaluate | |
a92771b8 | 6489 | the first. */ |
8a06f2d4 | 6490 | return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6491 | modifier); | |
155b05dc | 6492 | else if (code == BIT_FIELD_REF) |
6493 | { | |
8a06f2d4 | 6494 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier); |
6495 | expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier); | |
6496 | expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier); | |
155b05dc | 6497 | return const0_rtx; |
6498 | } | |
8a06f2d4 | 6499 | |
6dae9dfa | 6500 | target = 0; |
f75fb6ae | 6501 | } |
10f307d9 | 6502 | |
32a0589f | 6503 | #ifdef MAX_INTEGER_COMPUTATION_MODE |
5b98bbe7 | 6504 | /* Only check stuff here if the mode we want is different from the mode |
6505 | of the expression; if it's the same, check_max_integer_computiation_mode | |
6506 | will handle it. Do we really need to check this stuff at all? */ | |
6507 | ||
c6349b56 | 6508 | if (target |
5b98bbe7 | 6509 | && GET_MODE (target) != mode |
c6349b56 | 6510 | && TREE_CODE (exp) != INTEGER_CST |
6511 | && TREE_CODE (exp) != PARM_DECL | |
b0464da8 | 6512 | && TREE_CODE (exp) != ARRAY_REF |
ba04d9d5 | 6513 | && TREE_CODE (exp) != ARRAY_RANGE_REF |
b0464da8 | 6514 | && TREE_CODE (exp) != COMPONENT_REF |
6515 | && TREE_CODE (exp) != BIT_FIELD_REF | |
6516 | && TREE_CODE (exp) != INDIRECT_REF | |
0a3ec02a | 6517 | && TREE_CODE (exp) != CALL_EXPR |
4e96f61d | 6518 | && TREE_CODE (exp) != VAR_DECL |
6519 | && TREE_CODE (exp) != RTL_EXPR) | |
32a0589f | 6520 | { |
6521 | enum machine_mode mode = GET_MODE (target); | |
6522 | ||
6523 | if (GET_MODE_CLASS (mode) == MODE_INT | |
6524 | && mode > MAX_INTEGER_COMPUTATION_MODE) | |
f060a027 | 6525 | internal_error ("unsupported wide integer operation"); |
32a0589f | 6526 | } |
6527 | ||
5b98bbe7 | 6528 | if (tmode != mode |
6529 | && TREE_CODE (exp) != INTEGER_CST | |
c6349b56 | 6530 | && TREE_CODE (exp) != PARM_DECL |
b0464da8 | 6531 | && TREE_CODE (exp) != ARRAY_REF |
ba04d9d5 | 6532 | && TREE_CODE (exp) != ARRAY_RANGE_REF |
b0464da8 | 6533 | && TREE_CODE (exp) != COMPONENT_REF |
6534 | && TREE_CODE (exp) != BIT_FIELD_REF | |
6535 | && TREE_CODE (exp) != INDIRECT_REF | |
c6349b56 | 6536 | && TREE_CODE (exp) != VAR_DECL |
0a3ec02a | 6537 | && TREE_CODE (exp) != CALL_EXPR |
4e96f61d | 6538 | && TREE_CODE (exp) != RTL_EXPR |
a1db3221 | 6539 | && GET_MODE_CLASS (tmode) == MODE_INT |
32a0589f | 6540 | && tmode > MAX_INTEGER_COMPUTATION_MODE) |
f060a027 | 6541 | internal_error ("unsupported wide integer operation"); |
32a0589f | 6542 | |
6543 | check_max_integer_computation_mode (exp); | |
6544 | #endif | |
6545 | ||
34f17b00 | 6546 | /* If will do cse, generate all results into pseudo registers |
6547 | since 1) that allows cse to find more things | |
6548 | and 2) otherwise cse could produce an insn the machine | |
805e22b2 | 6549 | cannot support. An exception is a CONSTRUCTOR into a multi-word |
6550 | MEM: that's much more likely to be most efficient into the MEM. | |
6551 | Another is a CALL_EXPR which must return in memory. */ | |
34f17b00 | 6552 | |
10f307d9 | 6553 | if (! cse_not_expected && mode != BLKmode && target |
18279aee | 6554 | && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER) |
805e22b2 | 6555 | && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD) |
6556 | && ! (code == CALL_EXPR && aggregate_value_p (exp))) | |
10f307d9 | 6557 | target = subtarget; |
6558 | ||
10f307d9 | 6559 | switch (code) |
6560 | { | |
6561 | case LABEL_DECL: | |
bb5fd95e | 6562 | { |
6563 | tree function = decl_function_context (exp); | |
6564 | /* Handle using a label in a containing function. */ | |
f94a4a77 | 6565 | if (function != current_function_decl |
6566 | && function != inline_function_decl && function != 0) | |
bb5fd95e | 6567 | { |
6568 | struct function *p = find_function_data (function); | |
0a893c29 | 6569 | p->expr->x_forced_labels |
6570 | = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp), | |
6571 | p->expr->x_forced_labels); | |
bb5fd95e | 6572 | } |
be2828ce | 6573 | else |
6574 | { | |
be2828ce | 6575 | if (modifier == EXPAND_INITIALIZER) |
6576 | forced_labels = gen_rtx_EXPR_LIST (VOIDmode, | |
6577 | label_rtx (exp), | |
6578 | forced_labels); | |
6579 | } | |
7014838c | 6580 | |
941522d6 | 6581 | temp = gen_rtx_MEM (FUNCTION_MODE, |
6582 | gen_rtx_LABEL_REF (Pmode, label_rtx (exp))); | |
f94a4a77 | 6583 | if (function != current_function_decl |
6584 | && function != inline_function_decl && function != 0) | |
1aaabd2e | 6585 | LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1; |
6586 | return temp; | |
bb5fd95e | 6587 | } |
10f307d9 | 6588 | |
6589 | case PARM_DECL: | |
71a455ac | 6590 | if (!DECL_RTL_SET_P (exp)) |
10f307d9 | 6591 | { |
6592 | error_with_decl (exp, "prior parameter's size depends on `%s'"); | |
2ef1e405 | 6593 | return CONST0_RTX (mode); |
10f307d9 | 6594 | } |
6595 | ||
a92771b8 | 6596 | /* ... fall through ... */ |
d2ae1b1e | 6597 | |
10f307d9 | 6598 | case VAR_DECL: |
f8c3511b | 6599 | /* If a static var's type was incomplete when the decl was written, |
6600 | but the type is complete now, lay out the decl now. */ | |
4b72716d | 6601 | if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) |
f8c3511b | 6602 | && (TREE_STATIC (exp) || DECL_EXTERNAL (exp))) |
6603 | { | |
f96c43fb | 6604 | rtx value = DECL_RTL_IF_SET (exp); |
6605 | ||
f8c3511b | 6606 | layout_decl (exp, 0); |
f96c43fb | 6607 | |
6608 | /* If the RTL was already set, update its mode and memory | |
6609 | attributes. */ | |
6610 | if (value != 0) | |
6611 | { | |
6612 | PUT_MODE (value, DECL_MODE (exp)); | |
6613 | SET_DECL_RTL (exp, 0); | |
6614 | set_mem_attributes (value, exp, 1); | |
6615 | SET_DECL_RTL (exp, value); | |
6616 | } | |
e17f5b23 | 6617 | } |
eb4b06b6 | 6618 | |
a92771b8 | 6619 | /* ... fall through ... */ |
d2ae1b1e | 6620 | |
f8c3511b | 6621 | case FUNCTION_DECL: |
10f307d9 | 6622 | case RESULT_DECL: |
6623 | if (DECL_RTL (exp) == 0) | |
6624 | abort (); | |
d2ae1b1e | 6625 | |
34f17b00 | 6626 | /* Ensure variable marked as used even if it doesn't go through |
6627 | a parser. If it hasn't be used yet, write out an external | |
6628 | definition. */ | |
6629 | if (! TREE_USED (exp)) | |
6630 | { | |
6631 | assemble_external (exp); | |
6632 | TREE_USED (exp) = 1; | |
6633 | } | |
6634 | ||
6e6b4174 | 6635 | /* Show we haven't gotten RTL for this yet. */ |
6636 | temp = 0; | |
6637 | ||
10f307d9 | 6638 | /* Handle variables inherited from containing functions. */ |
6639 | context = decl_function_context (exp); | |
6640 | ||
6641 | /* We treat inline_function_decl as an alias for the current function | |
6642 | because that is the inline function whose vars, types, etc. | |
6643 | are being merged into the current function. | |
6644 | See expand_inline_function. */ | |
d2ae1b1e | 6645 | |
10f307d9 | 6646 | if (context != 0 && context != current_function_decl |
6647 | && context != inline_function_decl | |
6648 | /* If var is static, we don't need a static chain to access it. */ | |
6649 | && ! (GET_CODE (DECL_RTL (exp)) == MEM | |
6650 | && CONSTANT_P (XEXP (DECL_RTL (exp), 0)))) | |
6651 | { | |
6652 | rtx addr; | |
6653 | ||
6654 | /* Mark as non-local and addressable. */ | |
bc417a8f | 6655 | DECL_NONLOCAL (exp) = 1; |
8fd50fe1 | 6656 | if (DECL_NO_STATIC_CHAIN (current_function_decl)) |
6657 | abort (); | |
9b86eec0 | 6658 | (*lang_hooks.mark_addressable) (exp); |
10f307d9 | 6659 | if (GET_CODE (DECL_RTL (exp)) != MEM) |
6660 | abort (); | |
6661 | addr = XEXP (DECL_RTL (exp), 0); | |
6662 | if (GET_CODE (addr) == MEM) | |
537ffcfc | 6663 | addr |
6664 | = replace_equiv_address (addr, | |
6665 | fix_lexical_addr (XEXP (addr, 0), exp)); | |
10f307d9 | 6666 | else |
6667 | addr = fix_lexical_addr (addr, exp); | |
f7c44134 | 6668 | |
537ffcfc | 6669 | temp = replace_equiv_address (DECL_RTL (exp), addr); |
10f307d9 | 6670 | } |
2ef1e405 | 6671 | |
10f307d9 | 6672 | /* This is the case of an array whose size is to be determined |
6673 | from its initializer, while the initializer is still being parsed. | |
6674 | See expand_decl. */ | |
d2ae1b1e | 6675 | |
6e6b4174 | 6676 | else if (GET_CODE (DECL_RTL (exp)) == MEM |
6677 | && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG) | |
537ffcfc | 6678 | temp = validize_mem (DECL_RTL (exp)); |
d2ae1b1e | 6679 | |
6680 | /* If DECL_RTL is memory, we are in the normal case and either | |
6681 | the address is not valid or it is not a register and -fforce-addr | |
6682 | is specified, get the address into a register. */ | |
6683 | ||
6e6b4174 | 6684 | else if (GET_CODE (DECL_RTL (exp)) == MEM |
6685 | && modifier != EXPAND_CONST_ADDRESS | |
6686 | && modifier != EXPAND_SUM | |
6687 | && modifier != EXPAND_INITIALIZER | |
6688 | && (! memory_address_p (DECL_MODE (exp), | |
6689 | XEXP (DECL_RTL (exp), 0)) | |
6690 | || (flag_force_addr | |
6691 | && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG))) | |
537ffcfc | 6692 | temp = replace_equiv_address (DECL_RTL (exp), |
6693 | copy_rtx (XEXP (DECL_RTL (exp), 0))); | |
acfb31e5 | 6694 | |
6e6b4174 | 6695 | /* If we got something, return it. But first, set the alignment |
5ac439f3 | 6696 | if the address is a register. */ |
6e6b4174 | 6697 | if (temp != 0) |
6698 | { | |
6699 | if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG) | |
80909c64 | 6700 | mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp)); |
6e6b4174 | 6701 | |
6702 | return temp; | |
6703 | } | |
6704 | ||
acfb31e5 | 6705 | /* If the mode of DECL_RTL does not match that of the decl, it |
6706 | must be a promoted value. We return a SUBREG of the wanted mode, | |
6707 | but mark it so that we know that it was already extended. */ | |
6708 | ||
6709 | if (GET_CODE (DECL_RTL (exp)) == REG | |
ac85e396 | 6710 | && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp)) |
acfb31e5 | 6711 | { |
acfb31e5 | 6712 | /* Get the signedness used for this variable. Ensure we get the |
6713 | same mode we got when the variable was declared. */ | |
54fa89c6 | 6714 | if (GET_MODE (DECL_RTL (exp)) |
ff385626 | 6715 | != promote_mode (type, DECL_MODE (exp), &unsignedp, |
d20c8e15 | 6716 | (TREE_CODE (exp) == RESULT_DECL ? 1 : 0))) |
acfb31e5 | 6717 | abort (); |
6718 | ||
701e46d0 | 6719 | temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp)); |
acfb31e5 | 6720 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
bfd242e8 | 6721 | SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); |
acfb31e5 | 6722 | return temp; |
6723 | } | |
6724 | ||
10f307d9 | 6725 | return DECL_RTL (exp); |
6726 | ||
6727 | case INTEGER_CST: | |
9998b6a9 | 6728 | temp = immed_double_const (TREE_INT_CST_LOW (exp), |
a0c2c45b | 6729 | TREE_INT_CST_HIGH (exp), mode); |
10f307d9 | 6730 | |
9998b6a9 | 6731 | /* ??? If overflow is set, fold will have done an incomplete job, |
6732 | which can result in (plus xx (const_int 0)), which can get | |
6733 | simplified by validate_replace_rtx during virtual register | |
6734 | instantiation, which can result in unrecognizable insns. | |
6735 | Avoid this by forcing all overflows into registers. */ | |
f64482cc | 6736 | if (TREE_CONSTANT_OVERFLOW (exp) |
6737 | && modifier != EXPAND_INITIALIZER) | |
9998b6a9 | 6738 | temp = force_reg (mode, temp); |
6739 | ||
6740 | return temp; | |
6741 | ||
10f307d9 | 6742 | case CONST_DECL: |
8a06f2d4 | 6743 | return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0); |
10f307d9 | 6744 | |
6745 | case REAL_CST: | |
6746 | /* If optimized, generate immediate CONST_DOUBLE | |
fa56dc1d | 6747 | which will be turned into memory by reload if necessary. |
6748 | ||
10f307d9 | 6749 | We used to force a register so that loop.c could see it. But |
6750 | this does not allow gen_* patterns to perform optimizations with | |
6751 | the constants. It also produces two insns in cases like "x = 1.0;". | |
6752 | On most machines, floating-point constants are not permitted in | |
6753 | many insns, so we'd end up copying it to a register in any case. | |
6754 | ||
6755 | Now, we do the copying in expand_binop, if appropriate. */ | |
2ff23ed0 | 6756 | return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp), |
6757 | TYPE_MODE (TREE_TYPE (exp))); | |
10f307d9 | 6758 | |
6759 | case COMPLEX_CST: | |
6760 | case STRING_CST: | |
6761 | if (! TREE_CST_RTL (exp)) | |
abf74c5b | 6762 | output_constant_def (exp, 1); |
10f307d9 | 6763 | |
6764 | /* TREE_CST_RTL probably contains a constant address. | |
6765 | On RISC machines where a constant address isn't valid, | |
6766 | make some insns to get that address into a register. */ | |
6767 | if (GET_CODE (TREE_CST_RTL (exp)) == MEM | |
6768 | && modifier != EXPAND_CONST_ADDRESS | |
6769 | && modifier != EXPAND_INITIALIZER | |
6770 | && modifier != EXPAND_SUM | |
d2ae1b1e | 6771 | && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)) |
6772 | || (flag_force_addr | |
6773 | && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG))) | |
537ffcfc | 6774 | return replace_equiv_address (TREE_CST_RTL (exp), |
6775 | copy_rtx (XEXP (TREE_CST_RTL (exp), 0))); | |
10f307d9 | 6776 | return TREE_CST_RTL (exp); |
6777 | ||
dae7d8ad | 6778 | case EXPR_WITH_FILE_LOCATION: |
e8832c23 | 6779 | { |
6780 | rtx to_return; | |
e772a198 | 6781 | const char *saved_input_filename = input_filename; |
e8832c23 | 6782 | int saved_lineno = lineno; |
6783 | input_filename = EXPR_WFL_FILENAME (exp); | |
6784 | lineno = EXPR_WFL_LINENO (exp); | |
6785 | if (EXPR_WFL_EMIT_LINE_NOTE (exp)) | |
6786 | emit_line_note (input_filename, lineno); | |
5929001a | 6787 | /* Possibly avoid switching back and forth here. */ |
1084d1c7 | 6788 | to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier); |
e8832c23 | 6789 | input_filename = saved_input_filename; |
6790 | lineno = saved_lineno; | |
6791 | return to_return; | |
6792 | } | |
dae7d8ad | 6793 | |
10f307d9 | 6794 | case SAVE_EXPR: |
6795 | context = decl_function_context (exp); | |
d2ae1b1e | 6796 | |
f94a4a77 | 6797 | /* If this SAVE_EXPR was at global context, assume we are an |
6798 | initialization function and move it into our context. */ | |
6799 | if (context == 0) | |
6800 | SAVE_EXPR_CONTEXT (exp) = current_function_decl; | |
6801 | ||
10f307d9 | 6802 | /* We treat inline_function_decl as an alias for the current function |
6803 | because that is the inline function whose vars, types, etc. | |
6804 | are being merged into the current function. | |
6805 | See expand_inline_function. */ | |
6806 | if (context == current_function_decl || context == inline_function_decl) | |
6807 | context = 0; | |
6808 | ||
6809 | /* If this is non-local, handle it. */ | |
6810 | if (context) | |
6811 | { | |
f94a4a77 | 6812 | /* The following call just exists to abort if the context is |
6813 | not of a containing function. */ | |
6814 | find_function_data (context); | |
6815 | ||
10f307d9 | 6816 | temp = SAVE_EXPR_RTL (exp); |
6817 | if (temp && GET_CODE (temp) == REG) | |
6818 | { | |
6819 | put_var_into_stack (exp); | |
6820 | temp = SAVE_EXPR_RTL (exp); | |
6821 | } | |
6822 | if (temp == 0 || GET_CODE (temp) != MEM) | |
6823 | abort (); | |
537ffcfc | 6824 | return |
6825 | replace_equiv_address (temp, | |
6826 | fix_lexical_addr (XEXP (temp, 0), exp)); | |
10f307d9 | 6827 | } |
6828 | if (SAVE_EXPR_RTL (exp) == 0) | |
6829 | { | |
7f99bf0e | 6830 | if (mode == VOIDmode) |
6831 | temp = const0_rtx; | |
6832 | else | |
387bc205 | 6833 | temp = assign_temp (build_qualified_type (type, |
6834 | (TYPE_QUALS (type) | |
6835 | | TYPE_QUAL_CONST)), | |
6836 | 3, 0, 0); | |
acfb31e5 | 6837 | |
10f307d9 | 6838 | SAVE_EXPR_RTL (exp) = temp; |
10f307d9 | 6839 | if (!optimize && GET_CODE (temp) == REG) |
941522d6 | 6840 | save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp, |
6841 | save_expr_regs); | |
063e6b85 | 6842 | |
6843 | /* If the mode of TEMP does not match that of the expression, it | |
6844 | must be a promoted value. We pass store_expr a SUBREG of the | |
6845 | wanted mode but mark it so that we know that it was already | |
6846 | extended. Note that `unsignedp' was modified above in | |
6847 | this case. */ | |
6848 | ||
6849 | if (GET_CODE (temp) == REG && GET_MODE (temp) != mode) | |
6850 | { | |
701e46d0 | 6851 | temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp)); |
063e6b85 | 6852 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
bfd242e8 | 6853 | SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); |
063e6b85 | 6854 | } |
6855 | ||
1b77f1df | 6856 | if (temp == const0_rtx) |
8a06f2d4 | 6857 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); |
1b77f1df | 6858 | else |
6859 | store_expr (TREE_OPERAND (exp, 0), temp, 0); | |
997d68fe | 6860 | |
6861 | TREE_USED (exp) = 1; | |
10f307d9 | 6862 | } |
acfb31e5 | 6863 | |
6864 | /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it | |
6865 | must be a promoted value. We return a SUBREG of the wanted mode, | |
a92771b8 | 6866 | but mark it so that we know that it was already extended. */ |
acfb31e5 | 6867 | |
6868 | if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG | |
6869 | && GET_MODE (SAVE_EXPR_RTL (exp)) != mode) | |
6870 | { | |
16ea2f41 | 6871 | /* Compute the signedness and make the proper SUBREG. */ |
6872 | promote_mode (type, mode, &unsignedp, 0); | |
701e46d0 | 6873 | temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp)); |
acfb31e5 | 6874 | SUBREG_PROMOTED_VAR_P (temp) = 1; |
bfd242e8 | 6875 | SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); |
acfb31e5 | 6876 | return temp; |
6877 | } | |
6878 | ||
10f307d9 | 6879 | return SAVE_EXPR_RTL (exp); |
6880 | ||
0e676ec9 | 6881 | case UNSAVE_EXPR: |
6882 | { | |
6883 | rtx temp; | |
6884 | temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); | |
1d347c23 | 6885 | TREE_OPERAND (exp, 0) |
6886 | = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0)); | |
0e676ec9 | 6887 | return temp; |
6888 | } | |
6889 | ||
c869557a | 6890 | case PLACEHOLDER_EXPR: |
0dbd1c74 | 6891 | { |
c3a9c149 | 6892 | tree old_list = placeholder_list; |
96216d37 | 6893 | tree placeholder_expr = 0; |
0dbd1c74 | 6894 | |
c3a9c149 | 6895 | exp = find_placeholder (exp, &placeholder_expr); |
86ce88aa | 6896 | if (exp == 0) |
6897 | abort (); | |
6898 | ||
c3a9c149 | 6899 | placeholder_list = TREE_CHAIN (placeholder_expr); |
8a06f2d4 | 6900 | temp = expand_expr (exp, original_target, tmode, modifier); |
c3a9c149 | 6901 | placeholder_list = old_list; |
6902 | return temp; | |
0dbd1c74 | 6903 | } |
c869557a | 6904 | |
c869557a | 6905 | case WITH_RECORD_EXPR: |
6906 | /* Put the object on the placeholder list, expand our first operand, | |
6907 | and pop the list. */ | |
6908 | placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE, | |
6909 | placeholder_list); | |
8a06f2d4 | 6910 | target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode, |
6911 | modifier); | |
c869557a | 6912 | placeholder_list = TREE_CHAIN (placeholder_list); |
6913 | return target; | |
6914 | ||
c19f64ba | 6915 | case GOTO_EXPR: |
6916 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL) | |
6917 | expand_goto (TREE_OPERAND (exp, 0)); | |
6918 | else | |
6919 | expand_computed_goto (TREE_OPERAND (exp, 0)); | |
6920 | return const0_rtx; | |
6921 | ||
10f307d9 | 6922 | case EXIT_EXPR: |
2571646d | 6923 | expand_exit_loop_if_false (NULL, |
34f17b00 | 6924 | invert_truthvalue (TREE_OPERAND (exp, 0))); |
10f307d9 | 6925 | return const0_rtx; |
6926 | ||
d0b30cc7 | 6927 | case LABELED_BLOCK_EXPR: |
6928 | if (LABELED_BLOCK_BODY (exp)) | |
4e3da239 | 6929 | expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1); |
6312a35e | 6930 | /* Should perhaps use expand_label, but this is simpler and safer. */ |
18df45ce | 6931 | do_pending_stack_adjust (); |
d0b30cc7 | 6932 | emit_label (label_rtx (LABELED_BLOCK_LABEL (exp))); |
6933 | return const0_rtx; | |
6934 | ||
6935 | case EXIT_BLOCK_EXPR: | |
6936 | if (EXIT_BLOCK_RETURN (exp)) | |
be2828ce | 6937 | sorry ("returned value in block_exit_expr"); |
d0b30cc7 | 6938 | expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp))); |
6939 | return const0_rtx; | |
6940 | ||
10f307d9 | 6941 | case LOOP_EXPR: |
88ac3f7f | 6942 | push_temp_slots (); |
10f307d9 | 6943 | expand_start_loop (1); |
4e3da239 | 6944 | expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1); |
10f307d9 | 6945 | expand_end_loop (); |
88ac3f7f | 6946 | pop_temp_slots (); |
10f307d9 | 6947 | |
6948 | return const0_rtx; | |
6949 | ||
6950 | case BIND_EXPR: | |
6951 | { | |
6952 | tree vars = TREE_OPERAND (exp, 0); | |
10f307d9 | 6953 | |
6954 | /* Need to open a binding contour here because | |
694ec519 | 6955 | if there are any cleanups they must be contained here. */ |
87a9ad11 | 6956 | expand_start_bindings (2); |
10f307d9 | 6957 | |
16203a1f | 6958 | /* Mark the corresponding BLOCK for output in its proper place. */ |
6959 | if (TREE_OPERAND (exp, 2) != 0 | |
6960 | && ! TREE_USED (TREE_OPERAND (exp, 2))) | |
20325f61 | 6961 | (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2)); |
10f307d9 | 6962 | |
6963 | /* If VARS have not yet been expanded, expand them now. */ | |
6964 | while (vars) | |
6965 | { | |
0e8e37b2 | 6966 | if (!DECL_RTL_SET_P (vars)) |
805e22b2 | 6967 | expand_decl (vars); |
10f307d9 | 6968 | expand_decl_init (vars); |
6969 | vars = TREE_CHAIN (vars); | |
6970 | } | |
6971 | ||
8a06f2d4 | 6972 | temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier); |
10f307d9 | 6973 | |
6974 | expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0); | |
6975 | ||
6976 | return temp; | |
6977 | } | |
6978 | ||
6979 | case RTL_EXPR: | |
1e06f4a9 | 6980 | if (RTL_EXPR_SEQUENCE (exp)) |
6981 | { | |
6982 | if (RTL_EXPR_SEQUENCE (exp) == const0_rtx) | |
6983 | abort (); | |
31d3e01c | 6984 | emit_insn (RTL_EXPR_SEQUENCE (exp)); |
1e06f4a9 | 6985 | RTL_EXPR_SEQUENCE (exp) = const0_rtx; |
6986 | } | |
e94410ad | 6987 | preserve_rtl_expr_result (RTL_EXPR_RTL (exp)); |
6988 | free_temps_for_rtl_expr (exp); | |
10f307d9 | 6989 | return RTL_EXPR_RTL (exp); |
6990 | ||
6991 | case CONSTRUCTOR: | |
f75fb6ae | 6992 | /* If we don't need the result, just ensure we evaluate any |
6993 | subexpressions. */ | |
6994 | if (ignore) | |
6995 | { | |
6996 | tree elt; | |
8a06f2d4 | 6997 | |
f75fb6ae | 6998 | for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) |
8a06f2d4 | 6999 | expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0); |
7000 | ||
f75fb6ae | 7001 | return const0_rtx; |
7002 | } | |
603c4ee1 | 7003 | |
2ef1e405 | 7004 | /* All elts simple constants => refer to a constant in memory. But |
7005 | if this is a non-BLKmode mode, let it store a field at a time | |
7006 | since that should make a CONST_INT or CONST_DOUBLE when we | |
603c4ee1 | 7007 | fold. Likewise, if we have a target we can use, it is best to |
8cb5b99d | 7008 | store directly into the target unless the type is large enough |
7009 | that memcpy will be used. If we are making an initializer and | |
a43fa0dd | 7010 | all operands are constant, put it in memory as well. |
7011 | ||
7012 | FIXME: Avoid trying to fill vector constructors piece-meal. | |
7013 | Output them with output_constant_def below unless we're sure | |
7014 | they're zeros. This should go away when vector initializers | |
7015 | are treated like VECTOR_CST instead of arrays. | |
7016 | */ | |
f75fb6ae | 7017 | else if ((TREE_STATIC (exp) |
603c4ee1 | 7018 | && ((mode == BLKmode |
997d68fe | 7019 | && ! (target != 0 && safe_from_p (target, exp, 1))) |
8cb5b99d | 7020 | || TREE_ADDRESSABLE (exp) |
325d1c45 | 7021 | || (host_integerp (TYPE_SIZE_UNIT (type), 1) |
fa56dc1d | 7022 | && (! MOVE_BY_PIECES_P |
325d1c45 | 7023 | (tree_low_cst (TYPE_SIZE_UNIT (type), 1), |
7024 | TYPE_ALIGN (type))) | |
ff385626 | 7025 | && ((TREE_CODE (type) == VECTOR_TYPE |
7026 | && !is_zeros_p (exp)) | |
7027 | || ! mostly_zeros_p (exp))))) | |
f75fb6ae | 7028 | || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp))) |
10f307d9 | 7029 | { |
abf74c5b | 7030 | rtx constructor = output_constant_def (exp, 1); |
325d1c45 | 7031 | |
bb5fd95e | 7032 | if (modifier != EXPAND_CONST_ADDRESS |
7033 | && modifier != EXPAND_INITIALIZER | |
537ffcfc | 7034 | && modifier != EXPAND_SUM) |
7035 | constructor = validize_mem (constructor); | |
7036 | ||
10f307d9 | 7037 | return constructor; |
7038 | } | |
10f307d9 | 7039 | else |
7040 | { | |
c359e3f7 | 7041 | /* Handle calls that pass values in multiple non-contiguous |
7042 | locations. The Irix 6 ABI has examples of this. */ | |
997d68fe | 7043 | if (target == 0 || ! safe_from_p (target, exp, 1) |
c359e3f7 | 7044 | || GET_CODE (target) == PARALLEL) |
387bc205 | 7045 | target |
7046 | = assign_temp (build_qualified_type (type, | |
7047 | (TYPE_QUALS (type) | |
7048 | | (TREE_READONLY (exp) | |
7049 | * TYPE_QUAL_CONST))), | |
18279aee | 7050 | 0, TREE_ADDRESSABLE (exp), 1); |
6703a20a | 7051 | |
e7971c00 | 7052 | store_constructor (exp, target, 0, int_expr_size (exp)); |
10f307d9 | 7053 | return target; |
7054 | } | |
7055 | ||
7056 | case INDIRECT_REF: | |
7057 | { | |
7058 | tree exp1 = TREE_OPERAND (exp, 0); | |
bc7e8dbe | 7059 | tree index; |
fa56dc1d | 7060 | tree string = string_constant (exp1, &index); |
7061 | ||
981ac425 | 7062 | /* Try to optimize reads from const strings. */ |
ff385626 | 7063 | if (string |
7064 | && TREE_CODE (string) == STRING_CST | |
7065 | && TREE_CODE (index) == INTEGER_CST | |
a0c2c45b | 7066 | && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0 |
ff385626 | 7067 | && GET_MODE_CLASS (mode) == MODE_INT |
7068 | && GET_MODE_SIZE (mode) == 1 | |
8a06f2d4 | 7069 | && modifier != EXPAND_WRITE) |
ff385626 | 7070 | return gen_int_mode (TREE_STRING_POINTER (string) |
506664d9 | 7071 | [TREE_INT_CST_LOW (index)], mode); |
10f307d9 | 7072 | |
1128c34c | 7073 | op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM); |
7074 | op0 = memory_address (mode, op0); | |
941522d6 | 7075 | temp = gen_rtx_MEM (mode, op0); |
f7c44134 | 7076 | set_mem_attributes (temp, exp, 0); |
37749825 | 7077 | |
155b05dc | 7078 | /* If we are writing to this object and its type is a record with |
7079 | readonly fields, we must mark it as readonly so it will | |
7080 | conflict with readonly references to those fields. */ | |
8a06f2d4 | 7081 | if (modifier == EXPAND_WRITE && readonly_fields_p (type)) |
155b05dc | 7082 | RTX_UNCHANGING_P (temp) = 1; |
7083 | ||
2a8921a2 | 7084 | return temp; |
7085 | } | |
10f307d9 | 7086 | |
7087 | case ARRAY_REF: | |
cf389750 | 7088 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE) |
7089 | abort (); | |
10f307d9 | 7090 | |
10f307d9 | 7091 | { |
cf389750 | 7092 | tree array = TREE_OPERAND (exp, 0); |
7093 | tree domain = TYPE_DOMAIN (TREE_TYPE (array)); | |
7094 | tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node; | |
902de8ed | 7095 | tree index = convert (sizetype, TREE_OPERAND (exp, 1)); |
26e80911 | 7096 | HOST_WIDE_INT i; |
c869557a | 7097 | |
ecef77f6 | 7098 | /* Optimize the special-case of a zero lower bound. |
7099 | ||
7100 | We convert the low_bound to sizetype to avoid some problems | |
7101 | with constant folding. (E.g. suppose the lower bound is 1, | |
7102 | and its mode is QI. Without the conversion, (ARRAY | |
7103 | +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) | |
902de8ed | 7104 | +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ |
ecef77f6 | 7105 | |
cf389750 | 7106 | if (! integer_zerop (low_bound)) |
902de8ed | 7107 | index = size_diffop (index, convert (sizetype, low_bound)); |
cf389750 | 7108 | |
cf389750 | 7109 | /* Fold an expression like: "foo"[2]. |
8169404e | 7110 | This is not done in fold so it won't happen inside &. |
7111 | Don't fold if this is for wide characters since it's too | |
7112 | difficult to do correctly and this is a very rare case. */ | |
cf389750 | 7113 | |
b1ff8ab1 | 7114 | if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER |
7115 | && TREE_CODE (array) == STRING_CST | |
cf389750 | 7116 | && TREE_CODE (index) == INTEGER_CST |
a0c2c45b | 7117 | && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0 |
8169404e | 7118 | && GET_MODE_CLASS (mode) == MODE_INT |
7119 | && GET_MODE_SIZE (mode) == 1) | |
506664d9 | 7120 | return gen_int_mode (TREE_STRING_POINTER (array) |
7121 | [TREE_INT_CST_LOW (index)], mode); | |
10f307d9 | 7122 | |
cf389750 | 7123 | /* If this is a constant index into a constant array, |
7124 | just get the value from the array. Handle both the cases when | |
7125 | we have an explicit constructor and when our operand is a variable | |
7126 | that was declared const. */ | |
2ef1e405 | 7127 | |
b1ff8ab1 | 7128 | if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER |
7129 | && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array) | |
a0c2c45b | 7130 | && TREE_CODE (index) == INTEGER_CST |
fa56dc1d | 7131 | && 0 > compare_tree_int (index, |
a0c2c45b | 7132 | list_length (CONSTRUCTOR_ELTS |
7133 | (TREE_OPERAND (exp, 0))))) | |
cf389750 | 7134 | { |
a0c2c45b | 7135 | tree elem; |
7136 | ||
7137 | for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), | |
7138 | i = TREE_INT_CST_LOW (index); | |
7139 | elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem)) | |
7140 | ; | |
7141 | ||
7142 | if (elem) | |
8a06f2d4 | 7143 | return expand_expr (fold (TREE_VALUE (elem)), target, tmode, |
7144 | modifier); | |
cf389750 | 7145 | } |
fa56dc1d | 7146 | |
cf389750 | 7147 | else if (optimize >= 1 |
b1ff8ab1 | 7148 | && modifier != EXPAND_CONST_ADDRESS |
7149 | && modifier != EXPAND_INITIALIZER | |
cf389750 | 7150 | && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array) |
7151 | && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array) | |
7152 | && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK) | |
7153 | { | |
26e80911 | 7154 | if (TREE_CODE (index) == INTEGER_CST) |
cf389750 | 7155 | { |
7156 | tree init = DECL_INITIAL (array); | |
7157 | ||
cf389750 | 7158 | if (TREE_CODE (init) == CONSTRUCTOR) |
7159 | { | |
5d844ba2 | 7160 | tree elem; |
cf389750 | 7161 | |
a0c2c45b | 7162 | for (elem = CONSTRUCTOR_ELTS (init); |
7f01d61c | 7163 | (elem |
7164 | && !tree_int_cst_equal (TREE_PURPOSE (elem), index)); | |
a0c2c45b | 7165 | elem = TREE_CHAIN (elem)) |
7166 | ; | |
7167 | ||
a6d6d374 | 7168 | if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem))) |
cf389750 | 7169 | return expand_expr (fold (TREE_VALUE (elem)), target, |
8a06f2d4 | 7170 | tmode, modifier); |
cf389750 | 7171 | } |
7172 | else if (TREE_CODE (init) == STRING_CST | |
a0c2c45b | 7173 | && 0 > compare_tree_int (index, |
7174 | TREE_STRING_LENGTH (init))) | |
bdb729f9 | 7175 | { |
7176 | tree type = TREE_TYPE (TREE_TYPE (init)); | |
7177 | enum machine_mode mode = TYPE_MODE (type); | |
7178 | ||
7179 | if (GET_MODE_CLASS (mode) == MODE_INT | |
7180 | && GET_MODE_SIZE (mode) == 1) | |
506664d9 | 7181 | return gen_int_mode (TREE_STRING_POINTER (init) |
7182 | [TREE_INT_CST_LOW (index)], mode); | |
bdb729f9 | 7183 | } |
cf389750 | 7184 | } |
7185 | } | |
7186 | } | |
fa56dc1d | 7187 | /* Fall through. */ |
10f307d9 | 7188 | |
7189 | case COMPONENT_REF: | |
7190 | case BIT_FIELD_REF: | |
ba04d9d5 | 7191 | case ARRAY_RANGE_REF: |
2ef1e405 | 7192 | /* If the operand is a CONSTRUCTOR, we can just extract the |
71baa5fb | 7193 | appropriate field if it is present. Don't do this if we have |
7194 | already written the data since we want to refer to that copy | |
7195 | and varasm.c assumes that's what we'll do. */ | |
ba04d9d5 | 7196 | if (code == COMPONENT_REF |
71baa5fb | 7197 | && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR |
7198 | && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0) | |
2ef1e405 | 7199 | { |
7200 | tree elt; | |
7201 | ||
7202 | for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt; | |
7203 | elt = TREE_CHAIN (elt)) | |
c30615f6 | 7204 | if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1) |
7205 | /* We can normally use the value of the field in the | |
7206 | CONSTRUCTOR. However, if this is a bitfield in | |
7207 | an integral mode that we can fit in a HOST_WIDE_INT, | |
7208 | we must mask only the number of bits in the bitfield, | |
7209 | since this is done implicitly by the constructor. If | |
7210 | the bitfield does not meet either of those conditions, | |
7211 | we can't do this optimization. */ | |
7212 | && (! DECL_BIT_FIELD (TREE_PURPOSE (elt)) | |
7213 | || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt))) | |
7214 | == MODE_INT) | |
7215 | && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt))) | |
7216 | <= HOST_BITS_PER_WIDE_INT)))) | |
7217 | { | |
fa56dc1d | 7218 | op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier); |
c30615f6 | 7219 | if (DECL_BIT_FIELD (TREE_PURPOSE (elt))) |
7220 | { | |
ab7943b9 | 7221 | HOST_WIDE_INT bitsize |
7222 | = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt))); | |
6de9716c | 7223 | enum machine_mode imode |
7224 | = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt))); | |
c30615f6 | 7225 | |
7226 | if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt)))) | |
7227 | { | |
7228 | op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1); | |
6de9716c | 7229 | op0 = expand_and (imode, op0, op1, target); |
c30615f6 | 7230 | } |
7231 | else | |
7232 | { | |
7233 | tree count | |
997d68fe | 7234 | = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize, |
7235 | 0); | |
c30615f6 | 7236 | |
7237 | op0 = expand_shift (LSHIFT_EXPR, imode, op0, count, | |
7238 | target, 0); | |
7239 | op0 = expand_shift (RSHIFT_EXPR, imode, op0, count, | |
7240 | target, 0); | |
7241 | } | |
7242 | } | |
7243 | ||
7244 | return op0; | |
7245 | } | |
2ef1e405 | 7246 | } |
7247 | ||
10f307d9 | 7248 | { |
7249 | enum machine_mode mode1; | |
02e7a332 | 7250 | HOST_WIDE_INT bitsize, bitpos; |
954bdcb1 | 7251 | tree offset; |
10f307d9 | 7252 | int volatilep = 0; |
7fce34be | 7253 | tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, |
2b96c5f6 | 7254 | &mode1, &unsignedp, &volatilep); |
c3a9c149 | 7255 | rtx orig_op0; |
10f307d9 | 7256 | |
227bf826 | 7257 | /* If we got back the original object, something is wrong. Perhaps |
7258 | we are evaluating an expression too early. In any event, don't | |
7259 | infinitely recurse. */ | |
7260 | if (tem == exp) | |
7261 | abort (); | |
7262 | ||
5dfe36ec | 7263 | /* If TEM's type is a union of variable size, pass TARGET to the inner |
00039714 | 7264 | computation, since it will need a temporary and TARGET is known |
7265 | to have to do. This occurs in unchecked conversion in Ada. */ | |
fa56dc1d | 7266 | |
c3a9c149 | 7267 | orig_op0 = op0 |
7268 | = expand_expr (tem, | |
7269 | (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE | |
7270 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) | |
7271 | != INTEGER_CST) | |
7272 | ? target : NULL_RTX), | |
7273 | VOIDmode, | |
7274 | (modifier == EXPAND_INITIALIZER | |
7275 | || modifier == EXPAND_CONST_ADDRESS) | |
7276 | ? modifier : EXPAND_NORMAL); | |
10f307d9 | 7277 | |
2a8921a2 | 7278 | /* If this is a constant, put it into a register if it is a |
155b05dc | 7279 | legitimate constant and OFFSET is 0 and memory if it isn't. */ |
2a8921a2 | 7280 | if (CONSTANT_P (op0)) |
7281 | { | |
7282 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem)); | |
155b05dc | 7283 | if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) |
7284 | && offset == 0) | |
2a8921a2 | 7285 | op0 = force_reg (mode, op0); |
7286 | else | |
7287 | op0 = validize_mem (force_const_mem (mode, op0)); | |
7288 | } | |
7289 | ||
954bdcb1 | 7290 | if (offset != 0) |
7291 | { | |
fac6aae6 | 7292 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); |
954bdcb1 | 7293 | |
78ccc5b4 | 7294 | /* If this object is in a register, put it into memory. |
155b05dc | 7295 | This case can't occur in C, but can in Ada if we have |
7296 | unchecked conversion of an expression from a scalar type to | |
7297 | an array or record type. */ | |
7298 | if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG | |
7299 | || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF) | |
7300 | { | |
99c75e88 | 7301 | /* If the operand is a SAVE_EXPR, we can deal with this by |
7302 | forcing the SAVE_EXPR into memory. */ | |
7303 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR) | |
1ec36ac9 | 7304 | { |
7305 | put_var_into_stack (TREE_OPERAND (exp, 0)); | |
7306 | op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0)); | |
7307 | } | |
99c75e88 | 7308 | else |
7309 | { | |
7310 | tree nt | |
7311 | = build_qualified_type (TREE_TYPE (tem), | |
7312 | (TYPE_QUALS (TREE_TYPE (tem)) | |
7313 | | TYPE_QUAL_CONST)); | |
7314 | rtx memloc = assign_temp (nt, 1, 1, 1); | |
7315 | ||
99c75e88 | 7316 | emit_move_insn (memloc, op0); |
7317 | op0 = memloc; | |
7318 | } | |
155b05dc | 7319 | } |
7320 | ||
954bdcb1 | 7321 | if (GET_CODE (op0) != MEM) |
7322 | abort (); | |
5785f96f | 7323 | |
5785f96f | 7324 | #ifdef POINTERS_EXTEND_UNSIGNED |
479e4d5e | 7325 | if (GET_MODE (offset_rtx) != Pmode) |
7326 | offset_rtx = convert_memory_address (Pmode, offset_rtx); | |
4a836698 | 7327 | #else |
7328 | if (GET_MODE (offset_rtx) != ptr_mode) | |
7329 | offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); | |
5785f96f | 7330 | #endif |
7331 | ||
155b05dc | 7332 | /* A constant address in OP0 can have VOIDmode, we must not try |
f2eca2c2 | 7333 | to call force_reg for that case. Avoid that case. */ |
25d55d72 | 7334 | if (GET_CODE (op0) == MEM |
7335 | && GET_MODE (op0) == BLKmode | |
f2eca2c2 | 7336 | && GET_MODE (XEXP (op0, 0)) != VOIDmode |
155b05dc | 7337 | && bitsize != 0 |
fa56dc1d | 7338 | && (bitpos % bitsize) == 0 |
25d55d72 | 7339 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
2b96c5f6 | 7340 | && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1)) |
25d55d72 | 7341 | { |
fac6aae6 | 7342 | op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); |
25d55d72 | 7343 | bitpos = 0; |
7344 | } | |
7345 | ||
fcdc122e | 7346 | op0 = offset_address (op0, offset_rtx, |
7347 | highest_pow2_factor (offset)); | |
954bdcb1 | 7348 | } |
7349 | ||
67c68e45 | 7350 | /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT, |
7351 | record its alignment as BIGGEST_ALIGNMENT. */ | |
7352 | if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0 | |
7353 | && is_aligning_offset (offset, tem)) | |
7354 | set_mem_align (op0, BIGGEST_ALIGNMENT); | |
7355 | ||
10f307d9 | 7356 | /* Don't forget about volatility even if this is a bitfield. */ |
7357 | if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0)) | |
7358 | { | |
c3a9c149 | 7359 | if (op0 == orig_op0) |
7360 | op0 = copy_rtx (op0); | |
7361 | ||
10f307d9 | 7362 | MEM_VOLATILE_P (op0) = 1; |
7363 | } | |
7364 | ||
963043a6 | 7365 | /* The following code doesn't handle CONCAT. |
7366 | Assume only bitpos == 0 can be used for CONCAT, due to | |
7367 | one element arrays having the same mode as its element. */ | |
7368 | if (GET_CODE (op0) == CONCAT) | |
7369 | { | |
7370 | if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0))) | |
7371 | abort (); | |
7372 | return op0; | |
7373 | } | |
7374 | ||
4e05e574 | 7375 | /* In cases where an aligned union has an unaligned object |
7376 | as a field, we might be extracting a BLKmode value from | |
7377 | an integer-mode (e.g., SImode) object. Handle this case | |
7378 | by doing the extract into an object as wide as the field | |
7379 | (which we know to be the width of a basic mode), then | |
b1ff8ab1 | 7380 | storing into memory, and changing the mode to BLKmode. */ |
10f307d9 | 7381 | if (mode1 == VOIDmode |
4e05e574 | 7382 | || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG |
b1ff8ab1 | 7383 | || (mode1 != BLKmode && ! direct_load[(int) mode1] |
7384 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
fb2d4326 | 7385 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT |
7386 | && modifier != EXPAND_CONST_ADDRESS | |
7387 | && modifier != EXPAND_INITIALIZER) | |
b1ff8ab1 | 7388 | /* If the field isn't aligned enough to fetch as a memref, |
7389 | fetch it as a bit field. */ | |
7390 | || (mode1 != BLKmode | |
2e0245a6 | 7391 | && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)) |
b1ff8ab1 | 7392 | && ((TYPE_ALIGN (TREE_TYPE (tem)) |
7393 | < GET_MODE_ALIGNMENT (mode)) | |
7394 | || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))) | |
7395 | /* If the type and the field are a constant size and the | |
7396 | size of the type isn't the same size as the bitfield, | |
7397 | we must use bitfield operations. */ | |
7398 | || (bitsize >= 0 | |
7399 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) | |
7400 | == INTEGER_CST) | |
7401 | && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), | |
2b96c5f6 | 7402 | bitsize))) |
10f307d9 | 7403 | { |
10f307d9 | 7404 | enum machine_mode ext_mode = mode; |
7405 | ||
155b05dc | 7406 | if (ext_mode == BLKmode |
7407 | && ! (target != 0 && GET_CODE (op0) == MEM | |
7408 | && GET_CODE (target) == MEM | |
7409 | && bitpos % BITS_PER_UNIT == 0)) | |
10f307d9 | 7410 | ext_mode = mode_for_size (bitsize, MODE_INT, 1); |
7411 | ||
7412 | if (ext_mode == BLKmode) | |
0e20f9fb | 7413 | { |
7414 | /* In this case, BITPOS must start at a byte boundary and | |
7415 | TARGET, if specified, must be a MEM. */ | |
7416 | if (GET_CODE (op0) != MEM | |
7417 | || (target != 0 && GET_CODE (target) != MEM) | |
7418 | || bitpos % BITS_PER_UNIT != 0) | |
7419 | abort (); | |
7420 | ||
e513d163 | 7421 | op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT); |
0e20f9fb | 7422 | if (target == 0) |
7423 | target = assign_temp (type, 0, 1, 1); | |
7424 | ||
7425 | emit_block_move (target, op0, | |
2b96c5f6 | 7426 | GEN_INT ((bitsize + BITS_PER_UNIT - 1) |
0378dbdc | 7427 | / BITS_PER_UNIT), |
7428 | BLOCK_OP_NORMAL); | |
fa56dc1d | 7429 | |
0e20f9fb | 7430 | return target; |
7431 | } | |
10f307d9 | 7432 | |
6e6b4174 | 7433 | op0 = validize_mem (op0); |
7434 | ||
7435 | if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG) | |
2c269e73 | 7436 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); |
6e6b4174 | 7437 | |
7438 | op0 = extract_bit_field (op0, bitsize, bitpos, | |
10f307d9 | 7439 | unsignedp, target, ext_mode, ext_mode, |
10f307d9 | 7440 | int_size_in_bytes (TREE_TYPE (tem))); |
0aa5cbcc | 7441 | |
7442 | /* If the result is a record type and BITSIZE is narrower than | |
7443 | the mode of OP0, an integral mode, and this is a big endian | |
7444 | machine, we must put the field into the high-order bits. */ | |
7445 | if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN | |
7446 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
cce8da2f | 7447 | && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0))) |
0aa5cbcc | 7448 | op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0, |
7449 | size_int (GET_MODE_BITSIZE (GET_MODE (op0)) | |
7450 | - bitsize), | |
7451 | op0, 1); | |
7452 | ||
10f307d9 | 7453 | if (mode == BLKmode) |
7454 | { | |
a9d9ab08 | 7455 | rtx new = assign_temp (build_qualified_type |
771d21fa | 7456 | ((*lang_hooks.types.type_for_mode) |
7457 | (ext_mode, 0), | |
a9d9ab08 | 7458 | TYPE_QUAL_CONST), 0, 1, 1); |
10f307d9 | 7459 | |
7460 | emit_move_insn (new, op0); | |
7461 | op0 = copy_rtx (new); | |
7462 | PUT_MODE (op0, BLKmode); | |
a9d9ab08 | 7463 | set_mem_attributes (op0, exp, 1); |
10f307d9 | 7464 | } |
7465 | ||
7466 | return op0; | |
7467 | } | |
7468 | ||
f8ca8b77 | 7469 | /* If the result is BLKmode, use that to access the object |
7470 | now as well. */ | |
7471 | if (mode == BLKmode) | |
7472 | mode1 = BLKmode; | |
7473 | ||
10f307d9 | 7474 | /* Get a reference to just this component. */ |
7475 | if (modifier == EXPAND_CONST_ADDRESS | |
7476 | || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
e4e86ec5 | 7477 | op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); |
10f307d9 | 7478 | else |
e513d163 | 7479 | op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); |
b5ba9f3a | 7480 | |
c3a9c149 | 7481 | if (op0 == orig_op0) |
7482 | op0 = copy_rtx (op0); | |
7483 | ||
f7c44134 | 7484 | set_mem_attributes (op0, exp, 0); |
6e6b4174 | 7485 | if (GET_CODE (XEXP (op0, 0)) == REG) |
2b96c5f6 | 7486 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); |
6e6b4174 | 7487 | |
10f307d9 | 7488 | MEM_VOLATILE_P (op0) |= volatilep; |
1c9f9aa6 | 7489 | if (mode == mode1 || mode1 == BLKmode || mode1 == tmode |
0909656b | 7490 | || modifier == EXPAND_CONST_ADDRESS |
1c9f9aa6 | 7491 | || modifier == EXPAND_INITIALIZER) |
10f307d9 | 7492 | return op0; |
1c9f9aa6 | 7493 | else if (target == 0) |
10f307d9 | 7494 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); |
1c9f9aa6 | 7495 | |
10f307d9 | 7496 | convert_move (target, op0, unsignedp); |
7497 | return target; | |
7498 | } | |
7499 | ||
cef0c6a0 | 7500 | case VTABLE_REF: |
7501 | { | |
7502 | rtx insn, before = get_last_insn (), vtbl_ref; | |
7503 | ||
7504 | /* Evaluate the interior expression. */ | |
7505 | subtarget = expand_expr (TREE_OPERAND (exp, 0), target, | |
7506 | tmode, modifier); | |
7507 | ||
7508 | /* Get or create an instruction off which to hang a note. */ | |
7509 | if (REG_P (subtarget)) | |
7510 | { | |
7511 | target = subtarget; | |
7512 | insn = get_last_insn (); | |
7513 | if (insn == before) | |
7514 | abort (); | |
7515 | if (! INSN_P (insn)) | |
7516 | insn = prev_nonnote_insn (insn); | |
7517 | } | |
7518 | else | |
7519 | { | |
7520 | target = gen_reg_rtx (GET_MODE (subtarget)); | |
7521 | insn = emit_move_insn (target, subtarget); | |
7522 | } | |
7523 | ||
7524 | /* Collect the data for the note. */ | |
7525 | vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0); | |
7526 | vtbl_ref = plus_constant (vtbl_ref, | |
7527 | tree_low_cst (TREE_OPERAND (exp, 2), 0)); | |
7528 | /* Discard the initial CONST that was added. */ | |
7529 | vtbl_ref = XEXP (vtbl_ref, 0); | |
7530 | ||
7531 | REG_NOTES (insn) | |
7532 | = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn)); | |
7533 | ||
7534 | return target; | |
7535 | } | |
7536 | ||
10f307d9 | 7537 | /* Intended for a reference to a buffer of a file-object in Pascal. |
7538 | But it's not certain that a special tree code will really be | |
7539 | necessary for these. INDIRECT_REF might work for them. */ | |
7540 | case BUFFER_REF: | |
7541 | abort (); | |
7542 | ||
b63679d2 | 7543 | case IN_EXPR: |
b63679d2 | 7544 | { |
d2ae1b1e | 7545 | /* Pascal set IN expression. |
7546 | ||
7547 | Algorithm: | |
7548 | rlo = set_low - (set_low%bits_per_word); | |
7549 | the_word = set [ (index - rlo)/bits_per_word ]; | |
7550 | bit_index = index % bits_per_word; | |
7551 | bitmask = 1 << bit_index; | |
7552 | return !!(the_word & bitmask); */ | |
7553 | ||
b63679d2 | 7554 | tree set = TREE_OPERAND (exp, 0); |
7555 | tree index = TREE_OPERAND (exp, 1); | |
d2ae1b1e | 7556 | int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index)); |
b63679d2 | 7557 | tree set_type = TREE_TYPE (set); |
b63679d2 | 7558 | tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type)); |
7559 | tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type)); | |
d2ae1b1e | 7560 | rtx index_val = expand_expr (index, 0, VOIDmode, 0); |
7561 | rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0); | |
7562 | rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0); | |
7563 | rtx setval = expand_expr (set, 0, VOIDmode, 0); | |
7564 | rtx setaddr = XEXP (setval, 0); | |
7565 | enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index)); | |
b63679d2 | 7566 | rtx rlow; |
7567 | rtx diff, quo, rem, addr, bit, result; | |
b63679d2 | 7568 | |
d2ae1b1e | 7569 | /* If domain is empty, answer is no. Likewise if index is constant |
7570 | and out of bounds. */ | |
7d27e4c9 | 7571 | if (((TREE_CODE (set_high_bound) == INTEGER_CST |
d2ae1b1e | 7572 | && TREE_CODE (set_low_bound) == INTEGER_CST |
7d27e4c9 | 7573 | && tree_int_cst_lt (set_high_bound, set_low_bound)) |
d2ae1b1e | 7574 | || (TREE_CODE (index) == INTEGER_CST |
7575 | && TREE_CODE (set_low_bound) == INTEGER_CST | |
7576 | && tree_int_cst_lt (index, set_low_bound)) | |
7577 | || (TREE_CODE (set_high_bound) == INTEGER_CST | |
7578 | && TREE_CODE (index) == INTEGER_CST | |
7579 | && tree_int_cst_lt (set_high_bound, index)))) | |
b63679d2 | 7580 | return const0_rtx; |
7581 | ||
d2ae1b1e | 7582 | if (target == 0) |
7583 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); | |
b63679d2 | 7584 | |
7585 | /* If we get here, we have to generate the code for both cases | |
7586 | (in range and out of range). */ | |
7587 | ||
7588 | op0 = gen_label_rtx (); | |
7589 | op1 = gen_label_rtx (); | |
7590 | ||
7591 | if (! (GET_CODE (index_val) == CONST_INT | |
7592 | && GET_CODE (lo_r) == CONST_INT)) | |
2b96c5f6 | 7593 | emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX, |
7594 | GET_MODE (index_val), iunsignedp, op1); | |
b63679d2 | 7595 | |
7596 | if (! (GET_CODE (index_val) == CONST_INT | |
7597 | && GET_CODE (hi_r) == CONST_INT)) | |
2b96c5f6 | 7598 | emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX, |
7599 | GET_MODE (index_val), iunsignedp, op1); | |
b63679d2 | 7600 | |
7601 | /* Calculate the element number of bit zero in the first word | |
7602 | of the set. */ | |
7603 | if (GET_CODE (lo_r) == CONST_INT) | |
17cc6bd4 | 7604 | rlow = GEN_INT (INTVAL (lo_r) |
fa56dc1d | 7605 | & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)); |
b63679d2 | 7606 | else |
17cc6bd4 | 7607 | rlow = expand_binop (index_mode, and_optab, lo_r, |
7608 | GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)), | |
d2ae1b1e | 7609 | NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN); |
b63679d2 | 7610 | |
d2ae1b1e | 7611 | diff = expand_binop (index_mode, sub_optab, index_val, rlow, |
7612 | NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN); | |
b63679d2 | 7613 | |
7614 | quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff, | |
d2ae1b1e | 7615 | GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp); |
b63679d2 | 7616 | rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val, |
d2ae1b1e | 7617 | GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp); |
7618 | ||
b63679d2 | 7619 | addr = memory_address (byte_mode, |
d2ae1b1e | 7620 | expand_binop (index_mode, add_optab, diff, |
7621 | setaddr, NULL_RTX, iunsignedp, | |
17cc6bd4 | 7622 | OPTAB_LIB_WIDEN)); |
d2ae1b1e | 7623 | |
fa56dc1d | 7624 | /* Extract the bit we want to examine. */ |
b63679d2 | 7625 | bit = expand_shift (RSHIFT_EXPR, byte_mode, |
941522d6 | 7626 | gen_rtx_MEM (byte_mode, addr), |
17cc6bd4 | 7627 | make_tree (TREE_TYPE (index), rem), |
7628 | NULL_RTX, 1); | |
7629 | result = expand_binop (byte_mode, and_optab, bit, const1_rtx, | |
7630 | GET_MODE (target) == byte_mode ? target : 0, | |
b63679d2 | 7631 | 1, OPTAB_LIB_WIDEN); |
17cc6bd4 | 7632 | |
7633 | if (result != target) | |
7634 | convert_move (target, result, 1); | |
b63679d2 | 7635 | |
7636 | /* Output the code to handle the out-of-range case. */ | |
7637 | emit_jump (op0); | |
7638 | emit_label (op1); | |
7639 | emit_move_insn (target, const0_rtx); | |
7640 | emit_label (op0); | |
7641 | return target; | |
7642 | } | |
7643 | ||
10f307d9 | 7644 | case WITH_CLEANUP_EXPR: |
5929001a | 7645 | if (WITH_CLEANUP_EXPR_RTL (exp) == 0) |
10f307d9 | 7646 | { |
5929001a | 7647 | WITH_CLEANUP_EXPR_RTL (exp) |
8a06f2d4 | 7648 | = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); |
a9bc793b | 7649 | expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1), |
7650 | CLEANUP_EH_ONLY (exp)); | |
694ec519 | 7651 | |
10f307d9 | 7652 | /* That's it for this cleanup. */ |
5929001a | 7653 | TREE_OPERAND (exp, 1) = 0; |
10f307d9 | 7654 | } |
5929001a | 7655 | return WITH_CLEANUP_EXPR_RTL (exp); |
10f307d9 | 7656 | |
34e2ddcd | 7657 | case CLEANUP_POINT_EXPR: |
7658 | { | |
694ec519 | 7659 | /* Start a new binding layer that will keep track of all cleanup |
7660 | actions to be performed. */ | |
87a9ad11 | 7661 | expand_start_bindings (2); |
694ec519 | 7662 | |
6fd6341f | 7663 | target_temp_slot_level = temp_slot_level; |
694ec519 | 7664 | |
8a06f2d4 | 7665 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); |
80036564 | 7666 | /* If we're going to use this value, load it up now. */ |
7667 | if (! ignore) | |
7668 | op0 = force_not_mem (op0); | |
6fd6341f | 7669 | preserve_temp_slots (op0); |
694ec519 | 7670 | expand_end_bindings (NULL_TREE, 0, 0); |
34e2ddcd | 7671 | } |
7672 | return op0; | |
7673 | ||
10f307d9 | 7674 | case CALL_EXPR: |
7675 | /* Check for a built-in function. */ | |
7676 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR | |
d2ae1b1e | 7677 | && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
7678 | == FUNCTION_DECL) | |
10f307d9 | 7679 | && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
ff385626 | 7680 | { |
edbbe5ca | 7681 | if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
7682 | == BUILT_IN_FRONTEND) | |
b467ecc1 | 7683 | return (*lang_hooks.expand_expr) |
7684 | (exp, original_target, tmode, modifier); | |
edbbe5ca | 7685 | else |
7686 | return expand_builtin (exp, target, subtarget, tmode, ignore); | |
7687 | } | |
d2ae1b1e | 7688 | |
4e0ff571 | 7689 | return expand_call (exp, target, ignore); |
10f307d9 | 7690 | |
7691 | case NON_LVALUE_EXPR: | |
7692 | case NOP_EXPR: | |
7693 | case CONVERT_EXPR: | |
7694 | case REFERENCE_EXPR: | |
87ec3f77 | 7695 | if (TREE_OPERAND (exp, 0) == error_mark_node) |
f30669db | 7696 | return const0_rtx; |
87ec3f77 | 7697 | |
10f307d9 | 7698 | if (TREE_CODE (type) == UNION_TYPE) |
7699 | { | |
7700 | tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
155b05dc | 7701 | |
a9d9ab08 | 7702 | /* If both input and output are BLKmode, this conversion isn't doing |
7703 | anything except possibly changing memory attribute. */ | |
7704 | if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode) | |
7705 | { | |
7706 | rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode, | |
7707 | modifier); | |
7708 | ||
7709 | result = copy_rtx (result); | |
7710 | set_mem_attributes (result, exp, 0); | |
7711 | return result; | |
7712 | } | |
155b05dc | 7713 | |
10f307d9 | 7714 | if (target == 0) |
387bc205 | 7715 | target = assign_temp (type, 0, 1, 1); |
d2ae1b1e | 7716 | |
10f307d9 | 7717 | if (GET_CODE (target) == MEM) |
7718 | /* Store data into beginning of memory target. */ | |
7719 | store_expr (TREE_OPERAND (exp, 0), | |
e513d163 | 7720 | adjust_address (target, TYPE_MODE (valtype), 0), 0); |
acfb31e5 | 7721 | |
10f307d9 | 7722 | else if (GET_CODE (target) == REG) |
7723 | /* Store this field into a union of the proper type. */ | |
155b05dc | 7724 | store_field (target, |
7725 | MIN ((int_size_in_bytes (TREE_TYPE | |
7726 | (TREE_OPERAND (exp, 0))) | |
7727 | * BITS_PER_UNIT), | |
e1439bcb | 7728 | (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), |
155b05dc | 7729 | 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), |
2b96c5f6 | 7730 | VOIDmode, 0, type, 0); |
10f307d9 | 7731 | else |
7732 | abort (); | |
7733 | ||
7734 | /* Return the entire union. */ | |
7735 | return target; | |
7736 | } | |
d2ae1b1e | 7737 | |
d324678b | 7738 | if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
7739 | { | |
7740 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, | |
8a06f2d4 | 7741 | modifier); |
d324678b | 7742 | |
7743 | /* If the signedness of the conversion differs and OP0 is | |
7744 | a promoted SUBREG, clear that indication since we now | |
7745 | have to do the proper extension. */ | |
7746 | if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp | |
7747 | && GET_CODE (op0) == SUBREG) | |
7748 | SUBREG_PROMOTED_VAR_P (op0) = 0; | |
7749 | ||
7750 | return op0; | |
7751 | } | |
7752 | ||
5b1bb114 | 7753 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); |
dda75192 | 7754 | if (GET_MODE (op0) == mode) |
7755 | return op0; | |
dda75192 | 7756 | |
d2ae1b1e | 7757 | /* If OP0 is a constant, just convert it into the proper mode. */ |
7758 | if (CONSTANT_P (op0)) | |
5b1bb114 | 7759 | { |
7760 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
7761 | enum machine_mode inner_mode = TYPE_MODE (inner_type); | |
7762 | ||
ff385626 | 7763 | if (modifier == EXPAND_INITIALIZER) |
5b1bb114 | 7764 | return simplify_gen_subreg (mode, op0, inner_mode, |
7765 | subreg_lowpart_offset (mode, | |
7766 | inner_mode)); | |
7767 | else | |
7768 | return convert_modes (mode, inner_mode, op0, | |
7769 | TREE_UNSIGNED (inner_type)); | |
7770 | } | |
dda75192 | 7771 | |
1aaabd2e | 7772 | if (modifier == EXPAND_INITIALIZER) |
941522d6 | 7773 | return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0); |
d2ae1b1e | 7774 | |
10f307d9 | 7775 | if (target == 0) |
d2ae1b1e | 7776 | return |
7777 | convert_to_mode (mode, op0, | |
7778 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); | |
10f307d9 | 7779 | else |
d2ae1b1e | 7780 | convert_move (target, op0, |
7781 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); | |
10f307d9 | 7782 | return target; |
7783 | ||
f96c43fb | 7784 | case VIEW_CONVERT_EXPR: |
8a06f2d4 | 7785 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); |
f96c43fb | 7786 | |
7787 | /* If the input and output modes are both the same, we are done. | |
7788 | Otherwise, if neither mode is BLKmode and both are within a word, we | |
e58d0f17 | 7789 | can use gen_lowpart. If neither is true, make sure the operand is |
7790 | in memory and convert the MEM to the new mode. */ | |
f96c43fb | 7791 | if (TYPE_MODE (type) == GET_MODE (op0)) |
7792 | ; | |
7793 | else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode | |
7794 | && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD | |
7795 | && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD) | |
7796 | op0 = gen_lowpart (TYPE_MODE (type), op0); | |
e58d0f17 | 7797 | else if (GET_CODE (op0) != MEM) |
f96c43fb | 7798 | { |
e58d0f17 | 7799 | /* If the operand is not a MEM, force it into memory. Since we |
7800 | are going to be be changing the mode of the MEM, don't call | |
7801 | force_const_mem for constants because we don't allow pool | |
7802 | constants to change mode. */ | |
f96c43fb | 7803 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
f96c43fb | 7804 | |
e58d0f17 | 7805 | if (TREE_ADDRESSABLE (exp)) |
7806 | abort (); | |
f96c43fb | 7807 | |
e58d0f17 | 7808 | if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type)) |
7809 | target | |
7810 | = assign_stack_temp_for_type | |
7811 | (TYPE_MODE (inner_type), | |
7812 | GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type); | |
f96c43fb | 7813 | |
e58d0f17 | 7814 | emit_move_insn (target, op0); |
7815 | op0 = target; | |
f96c43fb | 7816 | } |
7817 | ||
e58d0f17 | 7818 | /* At this point, OP0 is in the correct mode. If the output type is such |
7819 | that the operand is known to be aligned, indicate that it is. | |
7820 | Otherwise, we need only be concerned about alignment for non-BLKmode | |
7821 | results. */ | |
f96c43fb | 7822 | if (GET_CODE (op0) == MEM) |
7823 | { | |
7824 | op0 = copy_rtx (op0); | |
7825 | ||
f96c43fb | 7826 | if (TYPE_ALIGN_OK (type)) |
7827 | set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type))); | |
7828 | else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT | |
7829 | && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) | |
7830 | { | |
7831 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
cce8da2f | 7832 | HOST_WIDE_INT temp_size |
7833 | = MAX (int_size_in_bytes (inner_type), | |
7834 | (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type))); | |
f96c43fb | 7835 | rtx new = assign_stack_temp_for_type (TYPE_MODE (type), |
7836 | temp_size, 0, type); | |
7a827396 | 7837 | rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0); |
f96c43fb | 7838 | |
e58d0f17 | 7839 | if (TREE_ADDRESSABLE (exp)) |
7840 | abort (); | |
7841 | ||
f96c43fb | 7842 | if (GET_MODE (op0) == BLKmode) |
7843 | emit_block_move (new_with_op0_mode, op0, | |
0378dbdc | 7844 | GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))), |
7845 | BLOCK_OP_NORMAL); | |
f96c43fb | 7846 | else |
7847 | emit_move_insn (new_with_op0_mode, op0); | |
7848 | ||
7849 | op0 = new; | |
7850 | } | |
ff385626 | 7851 | |
7a827396 | 7852 | op0 = adjust_address (op0, TYPE_MODE (type), 0); |
f96c43fb | 7853 | } |
7854 | ||
7855 | return op0; | |
7856 | ||
10f307d9 | 7857 | case PLUS_EXPR: |
bec2d490 | 7858 | this_optab = ! unsignedp && flag_trapv |
a2c7420e | 7859 | && (GET_MODE_CLASS (mode) == MODE_INT) |
bec2d490 | 7860 | ? addv_optab : add_optab; |
10f307d9 | 7861 | |
7862 | /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and | |
7863 | something else, make sure we add the register to the constant and | |
7864 | then to the other thing. This case can occur during strength | |
7865 | reduction and doing it this way will produce better code if the | |
7866 | frame pointer or argument pointer is eliminated. | |
7867 | ||
7868 | fold-const.c will ensure that the constant is always in the inner | |
7869 | PLUS_EXPR, so the only case we need to do anything about is if | |
7870 | sp, ap, or fp is our second argument, in which case we must swap | |
7871 | the innermost first argument and our second argument. */ | |
7872 | ||
7873 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR | |
7874 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST | |
7875 | && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR | |
7876 | && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx | |
7877 | || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx | |
7878 | || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx)) | |
7879 | { | |
7880 | tree t = TREE_OPERAND (exp, 1); | |
7881 | ||
7882 | TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
7883 | TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t; | |
7884 | } | |
7885 | ||
ed8d3eee | 7886 | /* If the result is to be ptr_mode and we are adding an integer to |
10f307d9 | 7887 | something, we might be forming a constant. So try to use |
7888 | plus_constant. If it produces a sum and we can't accept it, | |
7889 | use force_operand. This allows P = &ARR[const] to generate | |
7890 | efficient code on machines where a SYMBOL_REF is not a valid | |
7891 | address. | |
7892 | ||
7893 | If this is an EXPAND_SUM call, always return the sum. */ | |
66aa258b | 7894 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER |
ff385626 | 7895 | || (mode == ptr_mode && (unsignedp || ! flag_trapv))) |
10f307d9 | 7896 | { |
66aa258b | 7897 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST |
7898 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
7899 | && TREE_CONSTANT (TREE_OPERAND (exp, 1))) | |
7900 | { | |
2c551bbe | 7901 | rtx constant_part; |
7902 | ||
66aa258b | 7903 | op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, |
7904 | EXPAND_SUM); | |
2c551bbe | 7905 | /* Use immed_double_const to ensure that the constant is |
7906 | truncated according to the mode of OP1, then sign extended | |
7907 | to a HOST_WIDE_INT. Using the constant directly can result | |
7908 | in non-canonical RTL in a 64x32 cross compile. */ | |
7909 | constant_part | |
7910 | = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)), | |
7911 | (HOST_WIDE_INT) 0, | |
0bf16c4a | 7912 | TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))); |
79d5c3ba | 7913 | op1 = plus_constant (op1, INTVAL (constant_part)); |
66aa258b | 7914 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
7915 | op1 = force_operand (op1, target); | |
7916 | return op1; | |
7917 | } | |
10f307d9 | 7918 | |
66aa258b | 7919 | else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST |
7920 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT | |
7921 | && TREE_CONSTANT (TREE_OPERAND (exp, 0))) | |
7922 | { | |
2c551bbe | 7923 | rtx constant_part; |
7924 | ||
66aa258b | 7925 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
00e465b1 | 7926 | (modifier == EXPAND_INITIALIZER |
7927 | ? EXPAND_INITIALIZER : EXPAND_SUM)); | |
66aa258b | 7928 | if (! CONSTANT_P (op0)) |
7929 | { | |
7930 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, | |
7931 | VOIDmode, modifier); | |
9282409c | 7932 | /* Don't go to both_summands if modifier |
7933 | says it's not right to return a PLUS. */ | |
7934 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
7935 | goto binop2; | |
66aa258b | 7936 | goto both_summands; |
7937 | } | |
2c551bbe | 7938 | /* Use immed_double_const to ensure that the constant is |
7939 | truncated according to the mode of OP1, then sign extended | |
7940 | to a HOST_WIDE_INT. Using the constant directly can result | |
7941 | in non-canonical RTL in a 64x32 cross compile. */ | |
7942 | constant_part | |
7943 | = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)), | |
7944 | (HOST_WIDE_INT) 0, | |
f2761a0e | 7945 | TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))); |
79d5c3ba | 7946 | op0 = plus_constant (op0, INTVAL (constant_part)); |
66aa258b | 7947 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) |
7948 | op0 = force_operand (op0, target); | |
7949 | return op0; | |
7950 | } | |
10f307d9 | 7951 | } |
7952 | ||
a60e4107 | 7953 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
7954 | subtarget = 0; | |
7955 | ||
10f307d9 | 7956 | /* No sense saving up arithmetic to be done |
7957 | if it's all in the wrong mode to form part of an address. | |
7958 | And force_operand won't know whether to sign-extend or | |
7959 | zero-extend. */ | |
7960 | if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
ed8d3eee | 7961 | || mode != ptr_mode) |
a60e4107 | 7962 | { |
7963 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
7964 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); | |
8c1d1299 | 7965 | if (op0 == const0_rtx) |
7966 | return op1; | |
7967 | if (op1 == const0_rtx) | |
7968 | return op0; | |
a60e4107 | 7969 | goto binop2; |
7970 | } | |
10f307d9 | 7971 | |
8a06f2d4 | 7972 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier); |
7973 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier); | |
10f307d9 | 7974 | |
acd367d1 | 7975 | /* We come here from MINUS_EXPR when the second operand is a |
7976 | constant. */ | |
66aa258b | 7977 | both_summands: |
10f307d9 | 7978 | /* Make sure any term that's a sum with a constant comes last. */ |
7979 | if (GET_CODE (op0) == PLUS | |
7980 | && CONSTANT_P (XEXP (op0, 1))) | |
7981 | { | |
7982 | temp = op0; | |
7983 | op0 = op1; | |
7984 | op1 = temp; | |
7985 | } | |
7986 | /* If adding to a sum including a constant, | |
7987 | associate it to put the constant outside. */ | |
7988 | if (GET_CODE (op1) == PLUS | |
7989 | && CONSTANT_P (XEXP (op1, 1))) | |
7990 | { | |
7991 | rtx constant_term = const0_rtx; | |
7992 | ||
7993 | temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0); | |
7994 | if (temp != 0) | |
7995 | op0 = temp; | |
2d7187c2 | 7996 | /* Ensure that MULT comes first if there is one. */ |
7997 | else if (GET_CODE (op0) == MULT) | |
941522d6 | 7998 | op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0)); |
10f307d9 | 7999 | else |
941522d6 | 8000 | op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0); |
10f307d9 | 8001 | |
8002 | /* Let's also eliminate constants from op0 if possible. */ | |
8003 | op0 = eliminate_constant_term (op0, &constant_term); | |
8004 | ||
8005 | /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so | |
fa56dc1d | 8006 | their sum should be a constant. Form it into OP1, since the |
10f307d9 | 8007 | result we want will then be OP0 + OP1. */ |
8008 | ||
8009 | temp = simplify_binary_operation (PLUS, mode, constant_term, | |
8010 | XEXP (op1, 1)); | |
8011 | if (temp != 0) | |
8012 | op1 = temp; | |
8013 | else | |
941522d6 | 8014 | op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1)); |
10f307d9 | 8015 | } |
8016 | ||
8017 | /* Put a constant term last and put a multiplication first. */ | |
8018 | if (CONSTANT_P (op0) || GET_CODE (op1) == MULT) | |
8019 | temp = op1, op1 = op0, op0 = temp; | |
8020 | ||
8021 | temp = simplify_binary_operation (PLUS, mode, op0, op1); | |
941522d6 | 8022 | return temp ? temp : gen_rtx_PLUS (mode, op0, op1); |
10f307d9 | 8023 | |
8024 | case MINUS_EXPR: | |
94d01330 | 8025 | /* For initializers, we are allowed to return a MINUS of two |
8026 | symbolic constants. Here we handle all cases when both operands | |
8027 | are constant. */ | |
10f307d9 | 8028 | /* Handle difference of two symbolic constants, |
8029 | for the sake of an initializer. */ | |
8030 | if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
8031 | && really_constant_p (TREE_OPERAND (exp, 0)) | |
8032 | && really_constant_p (TREE_OPERAND (exp, 1))) | |
8033 | { | |
8a06f2d4 | 8034 | rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, |
8035 | modifier); | |
8036 | rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, | |
8037 | modifier); | |
94d01330 | 8038 | |
94d01330 | 8039 | /* If the last operand is a CONST_INT, use plus_constant of |
8040 | the negated constant. Else make the MINUS. */ | |
8041 | if (GET_CODE (op1) == CONST_INT) | |
8042 | return plus_constant (op0, - INTVAL (op1)); | |
8043 | else | |
941522d6 | 8044 | return gen_rtx_MINUS (mode, op0, op1); |
10f307d9 | 8045 | } |
a02b3586 | 8046 | |
bec2d490 | 8047 | this_optab = ! unsignedp && flag_trapv |
8048 | && (GET_MODE_CLASS(mode) == MODE_INT) | |
8049 | ? subv_optab : sub_optab; | |
acd367d1 | 8050 | |
8051 | /* No sense saving up arithmetic to be done | |
8052 | if it's all in the wrong mode to form part of an address. | |
8053 | And force_operand won't know whether to sign-extend or | |
8054 | zero-extend. */ | |
8055 | if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
8056 | || mode != ptr_mode) | |
8057 | goto binop; | |
8058 | ||
8059 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) | |
8060 | subtarget = 0; | |
8061 | ||
8062 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier); | |
8063 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier); | |
8064 | ||
8065 | /* Convert A - const to A + (-const). */ | |
8066 | if (GET_CODE (op1) == CONST_INT) | |
8067 | { | |
8068 | op1 = negate_rtx (mode, op1); | |
8069 | goto both_summands; | |
8070 | } | |
8071 | ||
8072 | goto binop2; | |
10f307d9 | 8073 | |
8074 | case MULT_EXPR: | |
10f307d9 | 8075 | /* If first operand is constant, swap them. |
8076 | Thus the following special case checks need only | |
8077 | check the second operand. */ | |
8078 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST) | |
8079 | { | |
19cb6b50 | 8080 | tree t1 = TREE_OPERAND (exp, 0); |
10f307d9 | 8081 | TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1); |
8082 | TREE_OPERAND (exp, 1) = t1; | |
8083 | } | |
8084 | ||
8085 | /* Attempt to return something suitable for generating an | |
8086 | indexed address, for machines that support that. */ | |
8087 | ||
ed8d3eee | 8088 | if (modifier == EXPAND_SUM && mode == ptr_mode |
78e5555a | 8089 | && host_integerp (TREE_OPERAND (exp, 1), 0)) |
10f307d9 | 8090 | { |
eb4b06b6 | 8091 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, |
8092 | EXPAND_SUM); | |
10f307d9 | 8093 | |
78e5555a | 8094 | /* If we knew for certain that this is arithmetic for an array |
8095 | reference, and we knew the bounds of the array, then we could | |
8096 | apply the distributive law across (PLUS X C) for constant C. | |
8097 | Without such knowledge, we risk overflowing the computation | |
8098 | when both X and C are large, but X+C isn't. */ | |
8099 | /* ??? Could perhaps special-case EXP being unsigned and C being | |
8100 | positive. In that case we are certain that X+C is no smaller | |
8101 | than X and so the transformed expression will overflow iff the | |
8102 | original would have. */ | |
10f307d9 | 8103 | |
8104 | if (GET_CODE (op0) != REG) | |
b572011e | 8105 | op0 = force_operand (op0, NULL_RTX); |
10f307d9 | 8106 | if (GET_CODE (op0) != REG) |
8107 | op0 = copy_to_mode_reg (mode, op0); | |
8108 | ||
7014838c | 8109 | return |
8110 | gen_rtx_MULT (mode, op0, | |
78e5555a | 8111 | GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0))); |
10f307d9 | 8112 | } |
8113 | ||
997d68fe | 8114 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
10f307d9 | 8115 | subtarget = 0; |
8116 | ||
8117 | /* Check for multiplying things that have been extended | |
8118 | from a narrower type. If this machine supports multiplying | |
8119 | in that narrower type with a result in the desired type, | |
8120 | do it that way, and avoid the explicit type-conversion. */ | |
8121 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR | |
8122 | && TREE_CODE (type) == INTEGER_TYPE | |
8123 | && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) | |
8124 | < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
8125 | && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST | |
8126 | && int_fits_type_p (TREE_OPERAND (exp, 1), | |
8127 | TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) | |
8128 | /* Don't use a widening multiply if a shift will do. */ | |
8129 | && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
b572011e | 8130 | > HOST_BITS_PER_WIDE_INT) |
10f307d9 | 8131 | || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0)) |
8132 | || | |
8133 | (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR | |
8134 | && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) | |
8135 | == | |
8136 | TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))) | |
8137 | /* If both operands are extended, they must either both | |
8138 | be zero-extended or both be sign-extended. */ | |
8139 | && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) | |
8140 | == | |
8141 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))))) | |
8142 | { | |
8143 | enum machine_mode innermode | |
8144 | = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))); | |
10b58489 | 8145 | optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
8146 | ? smul_widen_optab : umul_widen_optab); | |
10f307d9 | 8147 | this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
8148 | ? umul_widen_optab : smul_widen_optab); | |
10b58489 | 8149 | if (mode == GET_MODE_WIDER_MODE (innermode)) |
10f307d9 | 8150 | { |
10b58489 | 8151 | if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
8152 | { | |
8153 | op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
8154 | NULL_RTX, VOIDmode, 0); | |
8155 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
8156 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, | |
8157 | VOIDmode, 0); | |
8158 | else | |
8159 | op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), | |
8160 | NULL_RTX, VOIDmode, 0); | |
8161 | goto binop2; | |
8162 | } | |
8163 | else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing | |
8164 | && innermode == word_mode) | |
8165 | { | |
8166 | rtx htem; | |
8167 | op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
8168 | NULL_RTX, VOIDmode, 0); | |
8169 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) | |
15324f8c | 8170 | op1 = convert_modes (innermode, mode, |
8171 | expand_expr (TREE_OPERAND (exp, 1), | |
8172 | NULL_RTX, VOIDmode, 0), | |
8173 | unsignedp); | |
10b58489 | 8174 | else |
8175 | op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0), | |
8176 | NULL_RTX, VOIDmode, 0); | |
8177 | temp = expand_binop (mode, other_optab, op0, op1, target, | |
8178 | unsignedp, OPTAB_LIB_WIDEN); | |
8179 | htem = expand_mult_highpart_adjust (innermode, | |
8180 | gen_highpart (innermode, temp), | |
8181 | op0, op1, | |
8182 | gen_highpart (innermode, temp), | |
8183 | unsignedp); | |
8184 | emit_move_insn (gen_highpart (innermode, temp), htem); | |
8185 | return temp; | |
8186 | } | |
10f307d9 | 8187 | } |
8188 | } | |
8189 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
b572011e | 8190 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
10f307d9 | 8191 | return expand_mult (mode, op0, op1, target, unsignedp); |
8192 | ||
8193 | case TRUNC_DIV_EXPR: | |
8194 | case FLOOR_DIV_EXPR: | |
8195 | case CEIL_DIV_EXPR: | |
8196 | case ROUND_DIV_EXPR: | |
8197 | case EXACT_DIV_EXPR: | |
997d68fe | 8198 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
10f307d9 | 8199 | subtarget = 0; |
8200 | /* Possible optimization: compute the dividend with EXPAND_SUM | |
8201 | then if the divisor is constant can optimize the case | |
8202 | where some terms of the dividend have coeffs divisible by it. */ | |
8203 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
b572011e | 8204 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
10f307d9 | 8205 | return expand_divmod (0, code, mode, op0, op1, target, unsignedp); |
8206 | ||
8207 | case RDIV_EXPR: | |
3623b3f6 | 8208 | /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving |
8209 | expensive divide. If not, combine will rebuild the original | |
8210 | computation. */ | |
8211 | if (flag_unsafe_math_optimizations && optimize && !optimize_size | |
fe1b1167 | 8212 | && TREE_CODE (type) == REAL_TYPE |
3623b3f6 | 8213 | && !real_onep (TREE_OPERAND (exp, 0))) |
8214 | return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0), | |
8215 | build (RDIV_EXPR, type, | |
8216 | build_real (type, dconst1), | |
8217 | TREE_OPERAND (exp, 1))), | |
8218 | target, tmode, unsignedp); | |
ad99e708 | 8219 | this_optab = sdiv_optab; |
10f307d9 | 8220 | goto binop; |
8221 | ||
8222 | case TRUNC_MOD_EXPR: | |
8223 | case FLOOR_MOD_EXPR: | |
8224 | case CEIL_MOD_EXPR: | |
8225 | case ROUND_MOD_EXPR: | |
997d68fe | 8226 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
10f307d9 | 8227 | subtarget = 0; |
8228 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
b572011e | 8229 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
10f307d9 | 8230 | return expand_divmod (1, code, mode, op0, op1, target, unsignedp); |
8231 | ||
8232 | case FIX_ROUND_EXPR: | |
8233 | case FIX_FLOOR_EXPR: | |
8234 | case FIX_CEIL_EXPR: | |
8235 | abort (); /* Not used for C. */ | |
8236 | ||
8237 | case FIX_TRUNC_EXPR: | |
b572011e | 8238 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
10f307d9 | 8239 | if (target == 0) |
8240 | target = gen_reg_rtx (mode); | |
8241 | expand_fix (target, op0, unsignedp); | |
8242 | return target; | |
8243 | ||
8244 | case FLOAT_EXPR: | |
b572011e | 8245 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
10f307d9 | 8246 | if (target == 0) |
8247 | target = gen_reg_rtx (mode); | |
8248 | /* expand_float can't figure out what to do if FROM has VOIDmode. | |
8249 | So give it the correct mode. With -O, cse will optimize this. */ | |
8250 | if (GET_MODE (op0) == VOIDmode) | |
8251 | op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), | |
8252 | op0); | |
8253 | expand_float (target, op0, | |
8254 | TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); | |
8255 | return target; | |
8256 | ||
8257 | case NEGATE_EXPR: | |
1cb59c60 | 8258 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); |
bec2d490 | 8259 | temp = expand_unop (mode, |
ff385626 | 8260 | ! unsignedp && flag_trapv |
8261 | && (GET_MODE_CLASS(mode) == MODE_INT) | |
8262 | ? negv_optab : neg_optab, op0, target, 0); | |
10f307d9 | 8263 | if (temp == 0) |
8264 | abort (); | |
8265 | return temp; | |
8266 | ||
8267 | case ABS_EXPR: | |
8268 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
8269 | ||
5db186f1 | 8270 | /* Handle complex values specially. */ |
d2ae1b1e | 8271 | if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT |
8272 | || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT) | |
8273 | return expand_complex_abs (mode, op0, target, unsignedp); | |
5db186f1 | 8274 | |
10f307d9 | 8275 | /* Unsigned abs is simply the operand. Testing here means we don't |
8276 | risk generating incorrect code below. */ | |
8277 | if (TREE_UNSIGNED (type)) | |
8278 | return op0; | |
8279 | ||
bec2d490 | 8280 | return expand_abs (mode, op0, target, unsignedp, |
997d68fe | 8281 | safe_from_p (target, TREE_OPERAND (exp, 0), 1)); |
10f307d9 | 8282 | |
8283 | case MAX_EXPR: | |
8284 | case MIN_EXPR: | |
8285 | target = original_target; | |
997d68fe | 8286 | if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1) |
716cadbd | 8287 | || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)) |
d2ae1b1e | 8288 | || GET_MODE (target) != mode |
10f307d9 | 8289 | || (GET_CODE (target) == REG |
8290 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) | |
8291 | target = gen_reg_rtx (mode); | |
b572011e | 8292 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
10f307d9 | 8293 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0); |
8294 | ||
8295 | /* First try to do it with a special MIN or MAX instruction. | |
8296 | If that does not win, use a conditional jump to select the proper | |
8297 | value. */ | |
8298 | this_optab = (TREE_UNSIGNED (type) | |
8299 | ? (code == MIN_EXPR ? umin_optab : umax_optab) | |
8300 | : (code == MIN_EXPR ? smin_optab : smax_optab)); | |
8301 | ||
8302 | temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, | |
8303 | OPTAB_WIDEN); | |
8304 | if (temp != 0) | |
8305 | return temp; | |
8306 | ||
446a42ee | 8307 | /* At this point, a MEM target is no longer useful; we will get better |
8308 | code without it. */ | |
fa56dc1d | 8309 | |
446a42ee | 8310 | if (GET_CODE (target) == MEM) |
8311 | target = gen_reg_rtx (mode); | |
8312 | ||
1145b168 | 8313 | if (target != op0) |
8314 | emit_move_insn (target, op0); | |
d2ae1b1e | 8315 | |
10f307d9 | 8316 | op0 = gen_label_rtx (); |
d2ae1b1e | 8317 | |
228661d1 | 8318 | /* If this mode is an integer too wide to compare properly, |
8319 | compare word by word. Rely on cse to optimize constant cases. */ | |
a4110d9a | 8320 | if (GET_MODE_CLASS (mode) == MODE_INT |
8321 | && ! can_compare_p (GE, mode, ccp_jump)) | |
10f307d9 | 8322 | { |
228661d1 | 8323 | if (code == MAX_EXPR) |
d2ae1b1e | 8324 | do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), |
8325 | target, op1, NULL_RTX, op0); | |
10f307d9 | 8326 | else |
d2ae1b1e | 8327 | do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), |
8328 | op1, target, NULL_RTX, op0); | |
10f307d9 | 8329 | } |
228661d1 | 8330 | else |
8331 | { | |
1a29b174 | 8332 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))); |
8333 | do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE, | |
2b96c5f6 | 8334 | unsignedp, mode, NULL_RTX, NULL_RTX, |
1a29b174 | 8335 | op0); |
228661d1 | 8336 | } |
1a29b174 | 8337 | emit_move_insn (target, op1); |
10f307d9 | 8338 | emit_label (op0); |
8339 | return target; | |
8340 | ||
10f307d9 | 8341 | case BIT_NOT_EXPR: |
8342 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
8343 | temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); | |
8344 | if (temp == 0) | |
8345 | abort (); | |
8346 | return temp; | |
8347 | ||
8348 | case FFS_EXPR: | |
8349 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
8350 | temp = expand_unop (mode, ffs_optab, op0, target, 1); | |
8351 | if (temp == 0) | |
8352 | abort (); | |
8353 | return temp; | |
8354 | ||
d2ae1b1e | 8355 | /* ??? Can optimize bitwise operations with one arg constant. |
8356 | Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b) | |
8357 | and (a bitwise1 b) bitwise2 b (etc) | |
8358 | but that is probably not worth while. */ | |
8359 | ||
8360 | /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two | |
8361 | boolean values when we want in all cases to compute both of them. In | |
8362 | general it is fastest to do TRUTH_AND_EXPR by computing both operands | |
8363 | as actual zero-or-1 values and then bitwise anding. In cases where | |
8364 | there cannot be any side effects, better code would be made by | |
8365 | treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is | |
8366 | how to recognize those cases. */ | |
8367 | ||
10f307d9 | 8368 | case TRUTH_AND_EXPR: |
8369 | case BIT_AND_EXPR: | |
8370 | this_optab = and_optab; | |
8371 | goto binop; | |
8372 | ||
10f307d9 | 8373 | case TRUTH_OR_EXPR: |
8374 | case BIT_IOR_EXPR: | |
8375 | this_optab = ior_optab; | |
8376 | goto binop; | |
8377 | ||
c43d8fab | 8378 | case TRUTH_XOR_EXPR: |
10f307d9 | 8379 | case BIT_XOR_EXPR: |
8380 | this_optab = xor_optab; | |
8381 | goto binop; | |
8382 | ||
8383 | case LSHIFT_EXPR: | |
8384 | case RSHIFT_EXPR: | |
8385 | case LROTATE_EXPR: | |
8386 | case RROTATE_EXPR: | |
997d68fe | 8387 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
10f307d9 | 8388 | subtarget = 0; |
8389 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
8390 | return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target, | |
8391 | unsignedp); | |
8392 | ||
d2ae1b1e | 8393 | /* Could determine the answer when only additive constants differ. Also, |
8394 | the addition of one can be handled by changing the condition. */ | |
10f307d9 | 8395 | case LT_EXPR: |
8396 | case LE_EXPR: | |
8397 | case GT_EXPR: | |
8398 | case GE_EXPR: | |
8399 | case EQ_EXPR: | |
8400 | case NE_EXPR: | |
a4110d9a | 8401 | case UNORDERED_EXPR: |
8402 | case ORDERED_EXPR: | |
8403 | case UNLT_EXPR: | |
8404 | case UNLE_EXPR: | |
8405 | case UNGT_EXPR: | |
8406 | case UNGE_EXPR: | |
8407 | case UNEQ_EXPR: | |
10f307d9 | 8408 | temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0); |
8409 | if (temp != 0) | |
8410 | return temp; | |
d2ae1b1e | 8411 | |
a92771b8 | 8412 | /* For foo != 0, load foo, and if it is nonzero load 1 instead. */ |
10f307d9 | 8413 | if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1)) |
8414 | && original_target | |
8415 | && GET_CODE (original_target) == REG | |
8416 | && (GET_MODE (original_target) | |
8417 | == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))) | |
8418 | { | |
d2ae1b1e | 8419 | temp = expand_expr (TREE_OPERAND (exp, 0), original_target, |
8420 | VOIDmode, 0); | |
8421 | ||
701cbdec | 8422 | /* If temp is constant, we can just compute the result. */ |
8423 | if (GET_CODE (temp) == CONST_INT) | |
8424 | { | |
8425 | if (INTVAL (temp) != 0) | |
8426 | emit_move_insn (target, const1_rtx); | |
8427 | else | |
8428 | emit_move_insn (target, const0_rtx); | |
8429 | ||
8430 | return target; | |
8431 | } | |
8432 | ||
10f307d9 | 8433 | if (temp != original_target) |
701cbdec | 8434 | { |
8435 | enum machine_mode mode1 = GET_MODE (temp); | |
8436 | if (mode1 == VOIDmode) | |
8437 | mode1 = tmode != VOIDmode ? tmode : mode; | |
ff385626 | 8438 | |
701cbdec | 8439 | temp = copy_to_mode_reg (mode1, temp); |
8440 | } | |
d2ae1b1e | 8441 | |
10f307d9 | 8442 | op1 = gen_label_rtx (); |
5a894bc6 | 8443 | emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX, |
2b96c5f6 | 8444 | GET_MODE (temp), unsignedp, op1); |
10f307d9 | 8445 | emit_move_insn (temp, const1_rtx); |
8446 | emit_label (op1); | |
8447 | return temp; | |
8448 | } | |
d2ae1b1e | 8449 | |
10f307d9 | 8450 | /* If no set-flag instruction, must generate a conditional |
8451 | store into a temporary variable. Drop through | |
8452 | and handle this like && and ||. */ | |
8453 | ||
8454 | case TRUTH_ANDIF_EXPR: | |
8455 | case TRUTH_ORIF_EXPR: | |
34f17b00 | 8456 | if (! ignore |
997d68fe | 8457 | && (target == 0 || ! safe_from_p (target, exp, 1) |
34f17b00 | 8458 | /* Make sure we don't have a hard reg (such as function's return |
8459 | value) live across basic blocks, if not optimizing. */ | |
8460 | || (!optimize && GET_CODE (target) == REG | |
8461 | && REGNO (target) < FIRST_PSEUDO_REGISTER))) | |
10f307d9 | 8462 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); |
34f17b00 | 8463 | |
8464 | if (target) | |
8465 | emit_clr_insn (target); | |
8466 | ||
10f307d9 | 8467 | op1 = gen_label_rtx (); |
8468 | jumpifnot (exp, op1); | |
34f17b00 | 8469 | |
8470 | if (target) | |
8471 | emit_0_to_1_insn (target); | |
8472 | ||
10f307d9 | 8473 | emit_label (op1); |
34f17b00 | 8474 | return ignore ? const0_rtx : target; |
10f307d9 | 8475 | |
8476 | case TRUTH_NOT_EXPR: | |
8477 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0); | |
8478 | /* The parser is careful to generate TRUTH_NOT_EXPR | |
8479 | only with operands that are always zero or one. */ | |
b572011e | 8480 | temp = expand_binop (mode, xor_optab, op0, const1_rtx, |
10f307d9 | 8481 | target, 1, OPTAB_LIB_WIDEN); |
8482 | if (temp == 0) | |
8483 | abort (); | |
8484 | return temp; | |
8485 | ||
8486 | case COMPOUND_EXPR: | |
8487 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); | |
8488 | emit_queue (); | |
8489 | return expand_expr (TREE_OPERAND (exp, 1), | |
8490 | (ignore ? const0_rtx : target), | |
8491 | VOIDmode, 0); | |
8492 | ||
8493 | case COND_EXPR: | |
4035eace | 8494 | /* If we would have a "singleton" (see below) were it not for a |
8495 | conversion in each arm, bring that conversion back out. */ | |
8496 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR | |
8497 | && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR | |
8498 | && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) | |
8499 | == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0)))) | |
8500 | { | |
9c811526 | 8501 | tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0); |
8502 | tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0); | |
8503 | ||
8504 | if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2' | |
8505 | && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0)) | |
8506 | || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2' | |
8507 | && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)) | |
8508 | || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1' | |
8509 | && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0)) | |
8510 | || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1' | |
8511 | && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))) | |
4035eace | 8512 | return expand_expr (build1 (NOP_EXPR, type, |
9c811526 | 8513 | build (COND_EXPR, TREE_TYPE (iftrue), |
4035eace | 8514 | TREE_OPERAND (exp, 0), |
9c811526 | 8515 | iftrue, iffalse)), |
4035eace | 8516 | target, tmode, modifier); |
8517 | } | |
8518 | ||
10f307d9 | 8519 | { |
8520 | /* Note that COND_EXPRs whose type is a structure or union | |
8521 | are required to be constructed to contain assignments of | |
8522 | a temporary variable, so that we can evaluate them here | |
8523 | for side effect only. If type is void, we must do likewise. */ | |
8524 | ||
8525 | /* If an arm of the branch requires a cleanup, | |
8526 | only that cleanup is performed. */ | |
8527 | ||
8528 | tree singleton = 0; | |
8529 | tree binary_op = 0, unary_op = 0; | |
10f307d9 | 8530 | |
8531 | /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and | |
8532 | convert it to our mode, if necessary. */ | |
8533 | if (integer_onep (TREE_OPERAND (exp, 1)) | |
8534 | && integer_zerop (TREE_OPERAND (exp, 2)) | |
8535 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<') | |
8536 | { | |
f75fb6ae | 8537 | if (ignore) |
8538 | { | |
8539 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, | |
8a06f2d4 | 8540 | modifier); |
f75fb6ae | 8541 | return const0_rtx; |
8542 | } | |
8543 | ||
8a06f2d4 | 8544 | op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier); |
10f307d9 | 8545 | if (GET_MODE (op0) == mode) |
8546 | return op0; | |
d2ae1b1e | 8547 | |
10f307d9 | 8548 | if (target == 0) |
8549 | target = gen_reg_rtx (mode); | |
8550 | convert_move (target, op0, unsignedp); | |
8551 | return target; | |
8552 | } | |
8553 | ||
4035eace | 8554 | /* Check for X ? A + B : A. If we have this, we can copy A to the |
8555 | output and conditionally add B. Similarly for unary operations. | |
8556 | Don't do this if X has side-effects because those side effects | |
8557 | might affect A or B and the "?" operation is a sequence point in | |
8558 | ANSI. (operand_equal_p tests for side effects.) */ | |
10f307d9 | 8559 | |
8560 | if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2' | |
8561 | && operand_equal_p (TREE_OPERAND (exp, 2), | |
8562 | TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0)) | |
8563 | singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1); | |
8564 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2' | |
8565 | && operand_equal_p (TREE_OPERAND (exp, 1), | |
8566 | TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0)) | |
8567 | singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2); | |
8568 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1' | |
8569 | && operand_equal_p (TREE_OPERAND (exp, 2), | |
8570 | TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0)) | |
8571 | singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1); | |
8572 | else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1' | |
8573 | && operand_equal_p (TREE_OPERAND (exp, 1), | |
8574 | TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0)) | |
8575 | singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2); | |
8576 | ||
46e62598 | 8577 | /* If we are not to produce a result, we have no target. Otherwise, |
8578 | if a target was specified use it; it will not be used as an | |
fa56dc1d | 8579 | intermediate target unless it is safe. If no target, use a |
46e62598 | 8580 | temporary. */ |
8581 | ||
8582 | if (ignore) | |
8583 | temp = 0; | |
8584 | else if (original_target | |
997d68fe | 8585 | && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1) |
46e62598 | 8586 | || (singleton && GET_CODE (original_target) == REG |
8587 | && REGNO (original_target) >= FIRST_PSEUDO_REGISTER | |
8588 | && original_target == var_rtx (singleton))) | |
8589 | && GET_MODE (original_target) == mode | |
e500743d | 8590 | #ifdef HAVE_conditional_move |
8591 | && (! can_conditionally_move_p (mode) | |
8592 | || GET_CODE (original_target) == REG | |
8593 | || TREE_ADDRESSABLE (type)) | |
8594 | #endif | |
9c0e6d90 | 8595 | && (GET_CODE (original_target) != MEM |
8596 | || TREE_ADDRESSABLE (type))) | |
46e62598 | 8597 | temp = original_target; |
8598 | else if (TREE_ADDRESSABLE (type)) | |
8599 | abort (); | |
8600 | else | |
8601 | temp = assign_temp (type, 0, 0, 1); | |
8602 | ||
4035eace | 8603 | /* If we had X ? A + C : A, with C a constant power of 2, and we can |
8604 | do the test of X as a store-flag operation, do this as | |
8605 | A + ((X != 0) << log C). Similarly for other simple binary | |
8606 | operators. Only do for C == 1 if BRANCH_COST is low. */ | |
f75fb6ae | 8607 | if (temp && singleton && binary_op |
10f307d9 | 8608 | && (TREE_CODE (binary_op) == PLUS_EXPR |
8609 | || TREE_CODE (binary_op) == MINUS_EXPR | |
8610 | || TREE_CODE (binary_op) == BIT_IOR_EXPR | |
b35321d6 | 8611 | || TREE_CODE (binary_op) == BIT_XOR_EXPR) |
4035eace | 8612 | && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1)) |
8613 | : integer_onep (TREE_OPERAND (binary_op, 1))) | |
10f307d9 | 8614 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<') |
8615 | { | |
8616 | rtx result; | |
b4f00eec | 8617 | tree cond; |
bec2d490 | 8618 | optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR |
ff385626 | 8619 | ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op)) |
8620 | ? addv_optab : add_optab) | |
8621 | : TREE_CODE (binary_op) == MINUS_EXPR | |
8622 | ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op)) | |
8623 | ? subv_optab : sub_optab) | |
8624 | : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab | |
8625 | : xor_optab); | |
10f307d9 | 8626 | |
b4f00eec | 8627 | /* If we had X ? A : A + 1, do this as A + (X == 0). */ |
10f307d9 | 8628 | if (singleton == TREE_OPERAND (exp, 1)) |
b4f00eec | 8629 | cond = invert_truthvalue (TREE_OPERAND (exp, 0)); |
8630 | else | |
8631 | cond = TREE_OPERAND (exp, 0); | |
10f307d9 | 8632 | |
b4f00eec | 8633 | result = do_store_flag (cond, (safe_from_p (temp, singleton, 1) |
8634 | ? temp : NULL_RTX), | |
10f307d9 | 8635 | mode, BRANCH_COST <= 1); |
8636 | ||
4035eace | 8637 | if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1))) |
8638 | result = expand_shift (LSHIFT_EXPR, mode, result, | |
8639 | build_int_2 (tree_log2 | |
8640 | (TREE_OPERAND | |
8641 | (binary_op, 1)), | |
8642 | 0), | |
997d68fe | 8643 | (safe_from_p (temp, singleton, 1) |
4035eace | 8644 | ? temp : NULL_RTX), 0); |
8645 | ||
10f307d9 | 8646 | if (result) |
8647 | { | |
b572011e | 8648 | op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0); |
10f307d9 | 8649 | return expand_binop (mode, boptab, op1, result, temp, |
8650 | unsignedp, OPTAB_LIB_WIDEN); | |
8651 | } | |
10f307d9 | 8652 | } |
fa56dc1d | 8653 | |
d07f1b1f | 8654 | do_pending_stack_adjust (); |
10f307d9 | 8655 | NO_DEFER_POP; |
8656 | op0 = gen_label_rtx (); | |
8657 | ||
8658 | if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))) | |
8659 | { | |
8660 | if (temp != 0) | |
8661 | { | |
8662 | /* If the target conflicts with the other operand of the | |
8663 | binary op, we can't use it. Also, we can't use the target | |
8664 | if it is a hard register, because evaluating the condition | |
8665 | might clobber it. */ | |
8666 | if ((binary_op | |
997d68fe | 8667 | && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1)) |
10f307d9 | 8668 | || (GET_CODE (temp) == REG |
8669 | && REGNO (temp) < FIRST_PSEUDO_REGISTER)) | |
8670 | temp = gen_reg_rtx (mode); | |
8671 | store_expr (singleton, temp, 0); | |
8672 | } | |
8673 | else | |
b572011e | 8674 | expand_expr (singleton, |
637e11f7 | 8675 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); |
10f307d9 | 8676 | if (singleton == TREE_OPERAND (exp, 1)) |
8677 | jumpif (TREE_OPERAND (exp, 0), op0); | |
8678 | else | |
8679 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
8680 | ||
ad87de1e | 8681 | start_cleanup_deferral (); |
10f307d9 | 8682 | if (binary_op && temp == 0) |
8683 | /* Just touch the other operand. */ | |
8684 | expand_expr (TREE_OPERAND (binary_op, 1), | |
b572011e | 8685 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); |
10f307d9 | 8686 | else if (binary_op) |
8687 | store_expr (build (TREE_CODE (binary_op), type, | |
8688 | make_tree (type, temp), | |
8689 | TREE_OPERAND (binary_op, 1)), | |
8690 | temp, 0); | |
8691 | else | |
8692 | store_expr (build1 (TREE_CODE (unary_op), type, | |
8693 | make_tree (type, temp)), | |
8694 | temp, 0); | |
8695 | op1 = op0; | |
10f307d9 | 8696 | } |
10f307d9 | 8697 | /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any |
8698 | comparison operator. If we have one of these cases, set the | |
8699 | output to A, branch on A (cse will merge these two references), | |
8700 | then set the output to FOO. */ | |
8701 | else if (temp | |
8702 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<' | |
8703 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) | |
8704 | && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
8705 | TREE_OPERAND (exp, 1), 0) | |
0dbd1c74 | 8706 | && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)) |
8707 | || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR) | |
997d68fe | 8708 | && safe_from_p (temp, TREE_OPERAND (exp, 2), 1)) |
10f307d9 | 8709 | { |
fa56dc1d | 8710 | if (GET_CODE (temp) == REG |
8711 | && REGNO (temp) < FIRST_PSEUDO_REGISTER) | |
10f307d9 | 8712 | temp = gen_reg_rtx (mode); |
8713 | store_expr (TREE_OPERAND (exp, 1), temp, 0); | |
8714 | jumpif (TREE_OPERAND (exp, 0), op0); | |
34e2ddcd | 8715 | |
ad87de1e | 8716 | start_cleanup_deferral (); |
10f307d9 | 8717 | store_expr (TREE_OPERAND (exp, 2), temp, 0); |
8718 | op1 = op0; | |
8719 | } | |
8720 | else if (temp | |
8721 | && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<' | |
8722 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) | |
8723 | && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), | |
8724 | TREE_OPERAND (exp, 2), 0) | |
0dbd1c74 | 8725 | && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)) |
8726 | || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR) | |
997d68fe | 8727 | && safe_from_p (temp, TREE_OPERAND (exp, 1), 1)) |
10f307d9 | 8728 | { |
fa56dc1d | 8729 | if (GET_CODE (temp) == REG |
8730 | && REGNO (temp) < FIRST_PSEUDO_REGISTER) | |
10f307d9 | 8731 | temp = gen_reg_rtx (mode); |
8732 | store_expr (TREE_OPERAND (exp, 2), temp, 0); | |
8733 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
34e2ddcd | 8734 | |
ad87de1e | 8735 | start_cleanup_deferral (); |
10f307d9 | 8736 | store_expr (TREE_OPERAND (exp, 1), temp, 0); |
8737 | op1 = op0; | |
8738 | } | |
8739 | else | |
8740 | { | |
8741 | op1 = gen_label_rtx (); | |
8742 | jumpifnot (TREE_OPERAND (exp, 0), op0); | |
34e2ddcd | 8743 | |
ad87de1e | 8744 | start_cleanup_deferral (); |
fa56dc1d | 8745 | |
d5b495d7 | 8746 | /* One branch of the cond can be void, if it never returns. For |
fa56dc1d | 8747 | example A ? throw : E */ |
d5b495d7 | 8748 | if (temp != 0 |
fa56dc1d | 8749 | && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node) |
10f307d9 | 8750 | store_expr (TREE_OPERAND (exp, 1), temp, 0); |
8751 | else | |
b572011e | 8752 | expand_expr (TREE_OPERAND (exp, 1), |
8753 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); | |
ad87de1e | 8754 | end_cleanup_deferral (); |
10f307d9 | 8755 | emit_queue (); |
8756 | emit_jump_insn (gen_jump (op1)); | |
8757 | emit_barrier (); | |
8758 | emit_label (op0); | |
ad87de1e | 8759 | start_cleanup_deferral (); |
d5b495d7 | 8760 | if (temp != 0 |
fa56dc1d | 8761 | && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node) |
10f307d9 | 8762 | store_expr (TREE_OPERAND (exp, 2), temp, 0); |
8763 | else | |
b572011e | 8764 | expand_expr (TREE_OPERAND (exp, 2), |
8765 | ignore ? const0_rtx : NULL_RTX, VOIDmode, 0); | |
10f307d9 | 8766 | } |
8767 | ||
ad87de1e | 8768 | end_cleanup_deferral (); |
10f307d9 | 8769 | |
8770 | emit_queue (); | |
8771 | emit_label (op1); | |
8772 | OK_DEFER_POP; | |
34e2ddcd | 8773 | |
10f307d9 | 8774 | return temp; |
8775 | } | |
8776 | ||
8777 | case TARGET_EXPR: | |
8778 | { | |
8779 | /* Something needs to be initialized, but we didn't know | |
8780 | where that thing was when building the tree. For example, | |
8781 | it could be the return value of a function, or a parameter | |
8782 | to a function which lays down in the stack, or a temporary | |
8783 | variable which must be passed by reference. | |
8784 | ||
8785 | We guarantee that the expression will either be constructed | |
8786 | or copied into our original target. */ | |
8787 | ||
8788 | tree slot = TREE_OPERAND (exp, 0); | |
55e5e99d | 8789 | tree cleanups = NULL_TREE; |
382a84fe | 8790 | tree exp1; |
10f307d9 | 8791 | |
8792 | if (TREE_CODE (slot) != VAR_DECL) | |
8793 | abort (); | |
8794 | ||
4ebbf7e7 | 8795 | if (! ignore) |
8796 | target = original_target; | |
8797 | ||
813a136d | 8798 | /* Set this here so that if we get a target that refers to a |
8799 | register variable that's already been used, put_reg_into_stack | |
fa56dc1d | 8800 | knows that it should fix up those uses. */ |
813a136d | 8801 | TREE_USED (slot) = 1; |
8802 | ||
10f307d9 | 8803 | if (target == 0) |
8804 | { | |
0e8e37b2 | 8805 | if (DECL_RTL_SET_P (slot)) |
254deafd | 8806 | { |
8807 | target = DECL_RTL (slot); | |
382a84fe | 8808 | /* If we have already expanded the slot, so don't do |
254deafd | 8809 | it again. (mrs) */ |
382a84fe | 8810 | if (TREE_OPERAND (exp, 1) == NULL_TREE) |
8811 | return target; | |
254deafd | 8812 | } |
10f307d9 | 8813 | else |
8814 | { | |
0dbd1c74 | 8815 | target = assign_temp (type, 2, 0, 1); |
10f307d9 | 8816 | /* All temp slots at this level must not conflict. */ |
8817 | preserve_temp_slots (target); | |
0e8e37b2 | 8818 | SET_DECL_RTL (slot, target); |
0dbd1c74 | 8819 | if (TREE_ADDRESSABLE (slot)) |
930f0e87 | 8820 | put_var_into_stack (slot); |
10f307d9 | 8821 | |
52c30396 | 8822 | /* Since SLOT is not known to the called function |
8823 | to belong to its stack frame, we must build an explicit | |
8824 | cleanup. This case occurs when we must build up a reference | |
8825 | to pass the reference as an argument. In this case, | |
8826 | it is very likely that such a reference need not be | |
8827 | built here. */ | |
8828 | ||
8829 | if (TREE_OPERAND (exp, 2) == 0) | |
04745efb | 8830 | TREE_OPERAND (exp, 2) |
8831 | = (*lang_hooks.maybe_build_cleanup) (slot); | |
55e5e99d | 8832 | cleanups = TREE_OPERAND (exp, 2); |
52c30396 | 8833 | } |
10f307d9 | 8834 | } |
8835 | else | |
8836 | { | |
8837 | /* This case does occur, when expanding a parameter which | |
8838 | needs to be constructed on the stack. The target | |
8839 | is the actual stack address that we want to initialize. | |
8840 | The function we call will perform the cleanup in this case. */ | |
8841 | ||
5bf72397 | 8842 | /* If we have already assigned it space, use that space, |
8843 | not target that we were passed in, as our target | |
8844 | parameter is only a hint. */ | |
0e8e37b2 | 8845 | if (DECL_RTL_SET_P (slot)) |
fa56dc1d | 8846 | { |
8847 | target = DECL_RTL (slot); | |
8848 | /* If we have already expanded the slot, so don't do | |
5bf72397 | 8849 | it again. (mrs) */ |
fa56dc1d | 8850 | if (TREE_OPERAND (exp, 1) == NULL_TREE) |
8851 | return target; | |
5bf72397 | 8852 | } |
4d22520d | 8853 | else |
8854 | { | |
0e8e37b2 | 8855 | SET_DECL_RTL (slot, target); |
4d22520d | 8856 | /* If we must have an addressable slot, then make sure that |
8857 | the RTL that we just stored in slot is OK. */ | |
8858 | if (TREE_ADDRESSABLE (slot)) | |
930f0e87 | 8859 | put_var_into_stack (slot); |
4d22520d | 8860 | } |
10f307d9 | 8861 | } |
8862 | ||
836b9503 | 8863 | exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1); |
382a84fe | 8864 | /* Mark it as expanded. */ |
8865 | TREE_OPERAND (exp, 1) = NULL_TREE; | |
8866 | ||
483f9fca | 8867 | store_expr (exp1, target, 0); |
5f019534 | 8868 | |
a9bc793b | 8869 | expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp)); |
fa56dc1d | 8870 | |
483f9fca | 8871 | return target; |
10f307d9 | 8872 | } |
8873 | ||
8874 | case INIT_EXPR: | |
8875 | { | |
8876 | tree lhs = TREE_OPERAND (exp, 0); | |
8877 | tree rhs = TREE_OPERAND (exp, 1); | |
10f307d9 | 8878 | |
8879 | temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0); | |
10f307d9 | 8880 | return temp; |
8881 | } | |
8882 | ||
8883 | case MODIFY_EXPR: | |
8884 | { | |
8885 | /* If lhs is complex, expand calls in rhs before computing it. | |
8cbe13ee | 8886 | That's so we don't compute a pointer and save it over a |
8887 | call. If lhs is simple, compute it first so we can give it | |
8888 | as a target if the rhs is just a call. This avoids an | |
8889 | extra temp and copy and that prevents a partial-subsumption | |
8890 | which makes bad code. Actually we could treat | |
8891 | component_ref's of vars like vars. */ | |
10f307d9 | 8892 | |
8893 | tree lhs = TREE_OPERAND (exp, 0); | |
8894 | tree rhs = TREE_OPERAND (exp, 1); | |
10f307d9 | 8895 | |
8896 | temp = 0; | |
8897 | ||
10f307d9 | 8898 | /* Check for |= or &= of a bitfield of size one into another bitfield |
8899 | of size 1. In this case, (unless we need the result of the | |
8900 | assignment) we can do this more efficiently with a | |
8901 | test followed by an assignment, if necessary. | |
8902 | ||
8903 | ??? At this point, we can't get a BIT_FIELD_REF here. But if | |
8904 | things change so we do, this code should be enhanced to | |
8905 | support it. */ | |
8906 | if (ignore | |
8907 | && TREE_CODE (lhs) == COMPONENT_REF | |
8908 | && (TREE_CODE (rhs) == BIT_IOR_EXPR | |
8909 | || TREE_CODE (rhs) == BIT_AND_EXPR) | |
8910 | && TREE_OPERAND (rhs, 0) == lhs | |
8911 | && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF | |
a0c2c45b | 8912 | && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) |
8913 | && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) | |
10f307d9 | 8914 | { |
8915 | rtx label = gen_label_rtx (); | |
8916 | ||
8917 | do_jump (TREE_OPERAND (rhs, 1), | |
8918 | TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0, | |
8919 | TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0); | |
8920 | expand_assignment (lhs, convert (TREE_TYPE (rhs), | |
8921 | (TREE_CODE (rhs) == BIT_IOR_EXPR | |
8922 | ? integer_one_node | |
8923 | : integer_zero_node)), | |
8924 | 0, 0); | |
01ab6370 | 8925 | do_pending_stack_adjust (); |
10f307d9 | 8926 | emit_label (label); |
8927 | return const0_rtx; | |
8928 | } | |
8929 | ||
10f307d9 | 8930 | temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0); |
ff385626 | 8931 | |
10f307d9 | 8932 | return temp; |
8933 | } | |
8934 | ||
2be2ba3c | 8935 | case RETURN_EXPR: |
8936 | if (!TREE_OPERAND (exp, 0)) | |
8937 | expand_null_return (); | |
8938 | else | |
8939 | expand_return (TREE_OPERAND (exp, 0)); | |
8940 | return const0_rtx; | |
8941 | ||
10f307d9 | 8942 | case PREINCREMENT_EXPR: |
8943 | case PREDECREMENT_EXPR: | |
37e76d7d | 8944 | return expand_increment (exp, 0, ignore); |
10f307d9 | 8945 | |
8946 | case POSTINCREMENT_EXPR: | |
8947 | case POSTDECREMENT_EXPR: | |
8948 | /* Faster to treat as pre-increment if result is not used. */ | |
37e76d7d | 8949 | return expand_increment (exp, ! ignore, ignore); |
10f307d9 | 8950 | |
8951 | case ADDR_EXPR: | |
8952 | /* Are we taking the address of a nested function? */ | |
8953 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL | |
8fd50fe1 | 8954 | && decl_function_context (TREE_OPERAND (exp, 0)) != 0 |
997d68fe | 8955 | && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)) |
8956 | && ! TREE_STATIC (exp)) | |
10f307d9 | 8957 | { |
8958 | op0 = trampoline_address (TREE_OPERAND (exp, 0)); | |
8959 | op0 = force_operand (op0, target); | |
8960 | } | |
fbd92dcf | 8961 | /* If we are taking the address of something erroneous, just |
8962 | return a zero. */ | |
8963 | else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) | |
8964 | return const0_rtx; | |
606a59ec | 8965 | /* If we are taking the address of a constant and are at the |
8966 | top level, we have to use output_constant_def since we can't | |
8967 | call force_const_mem at top level. */ | |
8968 | else if (cfun == 0 | |
8969 | && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR | |
8970 | || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) | |
8971 | == 'c'))) | |
8972 | op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0); | |
10f307d9 | 8973 | else |
8974 | { | |
52c30396 | 8975 | /* We make sure to pass const0_rtx down if we came in with |
8976 | ignore set, to avoid doing the cleanups twice for something. */ | |
8977 | op0 = expand_expr (TREE_OPERAND (exp, 0), | |
8978 | ignore ? const0_rtx : NULL_RTX, VOIDmode, | |
10f307d9 | 8979 | (modifier == EXPAND_INITIALIZER |
8980 | ? modifier : EXPAND_CONST_ADDRESS)); | |
3f9387ea | 8981 | |
f17fe086 | 8982 | /* If we are going to ignore the result, OP0 will have been set |
8983 | to const0_rtx, so just return it. Don't get confused and | |
8984 | think we are taking the address of the constant. */ | |
8985 | if (ignore) | |
8986 | return op0; | |
8987 | ||
f9636a66 | 8988 | /* Pass 1 for MODIFY, so that protect_from_queue doesn't get |
8989 | clever and returns a REG when given a MEM. */ | |
8990 | op0 = protect_from_queue (op0, 1); | |
d0bb4e31 | 8991 | |
7014838c | 8992 | /* We would like the object in memory. If it is a constant, we can |
8993 | have it be statically allocated into memory. For a non-constant, | |
8994 | we need to allocate some memory and store the value into it. */ | |
3f9387ea | 8995 | |
8996 | if (CONSTANT_P (op0)) | |
8997 | op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))), | |
8998 | op0); | |
fbd92dcf | 8999 | else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG |
ddf4ad75 | 9000 | || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF |
9001 | || GET_CODE (op0) == PARALLEL) | |
3f9387ea | 9002 | { |
009eb56e | 9003 | /* If the operand is a SAVE_EXPR, we can deal with this by |
9004 | forcing the SAVE_EXPR into memory. */ | |
9005 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR) | |
9006 | { | |
9007 | put_var_into_stack (TREE_OPERAND (exp, 0)); | |
9008 | op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0)); | |
9009 | } | |
ddf4ad75 | 9010 | else |
009eb56e | 9011 | { |
9012 | /* If this object is in a register, it can't be BLKmode. */ | |
9013 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
d9d2eee0 | 9014 | rtx memloc = assign_temp (inner_type, 1, 1, 1); |
009eb56e | 9015 | |
9016 | if (GET_CODE (op0) == PARALLEL) | |
9017 | /* Handle calls that pass values in multiple | |
9018 | non-contiguous locations. The Irix 6 ABI has examples | |
9019 | of this. */ | |
ff385626 | 9020 | emit_group_store (memloc, op0, |
009eb56e | 9021 | int_size_in_bytes (inner_type)); |
9022 | else | |
9023 | emit_move_insn (memloc, op0); | |
ff385626 | 9024 | |
009eb56e | 9025 | op0 = memloc; |
9026 | } | |
3f9387ea | 9027 | } |
9028 | ||
10f307d9 | 9029 | if (GET_CODE (op0) != MEM) |
9030 | abort (); | |
fa56dc1d | 9031 | |
3084721c | 9032 | mark_temp_addr_taken (op0); |
10f307d9 | 9033 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) |
ed8d3eee | 9034 | { |
3084721c | 9035 | op0 = XEXP (op0, 0); |
ed8d3eee | 9036 | #ifdef POINTERS_EXTEND_UNSIGNED |
3084721c | 9037 | if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode |
ed8d3eee | 9038 | && mode == ptr_mode) |
3084721c | 9039 | op0 = convert_memory_address (ptr_mode, op0); |
ed8d3eee | 9040 | #endif |
3084721c | 9041 | return op0; |
ed8d3eee | 9042 | } |
4aa0b850 | 9043 | |
1ed6227f | 9044 | /* If OP0 is not aligned as least as much as the type requires, we |
9045 | need to make a temporary, copy OP0 to it, and take the address of | |
9046 | the temporary. We want to use the alignment of the type, not of | |
9047 | the operand. Note that this is incorrect for FUNCTION_TYPE, but | |
9048 | the test for BLKmode means that can't happen. The test for | |
9049 | BLKmode is because we never make mis-aligned MEMs with | |
9050 | non-BLKmode. | |
9051 | ||
9052 | We don't need to do this at all if the machine doesn't have | |
9053 | strict alignment. */ | |
9054 | if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode | |
9055 | && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))) | |
f96c43fb | 9056 | > MEM_ALIGN (op0)) |
9057 | && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT) | |
2b96c5f6 | 9058 | { |
9059 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
9060 | rtx new | |
9061 | = assign_stack_temp_for_type | |
9062 | (TYPE_MODE (inner_type), | |
9063 | MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0)) | |
30098777 | 9064 | : int_size_in_bytes (inner_type), |
2b96c5f6 | 9065 | 1, build_qualified_type (inner_type, |
9066 | (TYPE_QUALS (inner_type) | |
9067 | | TYPE_QUAL_CONST))); | |
9068 | ||
a9d9ab08 | 9069 | if (TYPE_ALIGN_OK (inner_type)) |
9070 | abort (); | |
9071 | ||
0378dbdc | 9072 | emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)), |
9073 | BLOCK_OP_NORMAL); | |
2b96c5f6 | 9074 | op0 = new; |
9075 | } | |
9076 | ||
10f307d9 | 9077 | op0 = force_operand (XEXP (op0, 0), target); |
9078 | } | |
4aa0b850 | 9079 | |
cc84ca71 | 9080 | if (flag_force_addr |
9081 | && GET_CODE (op0) != REG | |
9082 | && modifier != EXPAND_CONST_ADDRESS | |
9083 | && modifier != EXPAND_INITIALIZER | |
9084 | && modifier != EXPAND_SUM) | |
4aa0b850 | 9085 | op0 = force_reg (Pmode, op0); |
9086 | ||
6e6b4174 | 9087 | if (GET_CODE (op0) == REG |
9088 | && ! REG_USERVAR_P (op0)) | |
80909c64 | 9089 | mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type))); |
4aa0b850 | 9090 | |
ed8d3eee | 9091 | #ifdef POINTERS_EXTEND_UNSIGNED |
9092 | if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode | |
9093 | && mode == ptr_mode) | |
d621c33d | 9094 | op0 = convert_memory_address (ptr_mode, op0); |
ed8d3eee | 9095 | #endif |
9096 | ||
10f307d9 | 9097 | return op0; |
9098 | ||
9099 | case ENTRY_VALUE_EXPR: | |
9100 | abort (); | |
9101 | ||
b63679d2 | 9102 | /* COMPLEX type for Extended Pascal & Fortran */ |
9103 | case COMPLEX_EXPR: | |
9104 | { | |
9105 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); | |
dd0d17cd | 9106 | rtx insns; |
b63679d2 | 9107 | |
9108 | /* Get the rtx code of the operands. */ | |
9109 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
9110 | op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0); | |
9111 | ||
9112 | if (! target) | |
9113 | target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); | |
9114 | ||
dd0d17cd | 9115 | start_sequence (); |
b63679d2 | 9116 | |
9117 | /* Move the real (op0) and imaginary (op1) parts to their location. */ | |
5db186f1 | 9118 | emit_move_insn (gen_realpart (mode, target), op0); |
9119 | emit_move_insn (gen_imagpart (mode, target), op1); | |
b63679d2 | 9120 | |
dd0d17cd | 9121 | insns = get_insns (); |
9122 | end_sequence (); | |
9123 | ||
b63679d2 | 9124 | /* Complex construction should appear as a single unit. */ |
dd0d17cd | 9125 | /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS, |
9126 | each with a separate pseudo as destination. | |
9127 | It's not correct for flow to treat them as a unit. */ | |
8ba8185c | 9128 | if (GET_CODE (target) != CONCAT) |
dd0d17cd | 9129 | emit_no_conflict_block (insns, target, op0, op1, NULL_RTX); |
9130 | else | |
31d3e01c | 9131 | emit_insn (insns); |
b63679d2 | 9132 | |
9133 | return target; | |
9134 | } | |
9135 | ||
9136 | case REALPART_EXPR: | |
5db186f1 | 9137 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
9138 | return gen_realpart (mode, op0); | |
fa56dc1d | 9139 | |
b63679d2 | 9140 | case IMAGPART_EXPR: |
5db186f1 | 9141 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); |
9142 | return gen_imagpart (mode, op0); | |
b63679d2 | 9143 | |
9144 | case CONJ_EXPR: | |
9145 | { | |
1b77ec5f | 9146 | enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); |
b63679d2 | 9147 | rtx imag_t; |
dd0d17cd | 9148 | rtx insns; |
fa56dc1d | 9149 | |
9150 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
b63679d2 | 9151 | |
9152 | if (! target) | |
d2ae1b1e | 9153 | target = gen_reg_rtx (mode); |
fa56dc1d | 9154 | |
dd0d17cd | 9155 | start_sequence (); |
b63679d2 | 9156 | |
9157 | /* Store the realpart and the negated imagpart to target. */ | |
1b77ec5f | 9158 | emit_move_insn (gen_realpart (partmode, target), |
9159 | gen_realpart (partmode, op0)); | |
b63679d2 | 9160 | |
1b77ec5f | 9161 | imag_t = gen_imagpart (partmode, target); |
bec2d490 | 9162 | temp = expand_unop (partmode, |
ff385626 | 9163 | ! unsignedp && flag_trapv |
9164 | && (GET_MODE_CLASS(partmode) == MODE_INT) | |
9165 | ? negv_optab : neg_optab, | |
fa56dc1d | 9166 | gen_imagpart (partmode, op0), imag_t, 0); |
b63679d2 | 9167 | if (temp != imag_t) |
9168 | emit_move_insn (imag_t, temp); | |
9169 | ||
dd0d17cd | 9170 | insns = get_insns (); |
9171 | end_sequence (); | |
9172 | ||
fa56dc1d | 9173 | /* Conjugate should appear as a single unit |
d2ae1b1e | 9174 | If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS, |
dd0d17cd | 9175 | each with a separate pseudo as destination. |
9176 | It's not correct for flow to treat them as a unit. */ | |
8ba8185c | 9177 | if (GET_CODE (target) != CONCAT) |
dd0d17cd | 9178 | emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX); |
9179 | else | |
31d3e01c | 9180 | emit_insn (insns); |
b63679d2 | 9181 | |
9182 | return target; | |
9183 | } | |
9184 | ||
694ec519 | 9185 | case TRY_CATCH_EXPR: |
9186 | { | |
9187 | tree handler = TREE_OPERAND (exp, 1); | |
9188 | ||
9189 | expand_eh_region_start (); | |
9190 | ||
9191 | op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0); | |
9192 | ||
df4b504c | 9193 | expand_eh_region_end_cleanup (handler); |
694ec519 | 9194 | |
9195 | return op0; | |
9196 | } | |
9197 | ||
f0c211a3 | 9198 | case TRY_FINALLY_EXPR: |
9199 | { | |
9200 | tree try_block = TREE_OPERAND (exp, 0); | |
9201 | tree finally_block = TREE_OPERAND (exp, 1); | |
f0c211a3 | 9202 | |
e62d4117 | 9203 | if (!optimize || unsafe_for_reeval (finally_block) > 1) |
6fc87840 | 9204 | { |
9205 | /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR | |
9206 | is not sufficient, so we cannot expand the block twice. | |
9207 | So we play games with GOTO_SUBROUTINE_EXPR to let us | |
9208 | expand the thing only once. */ | |
e62d4117 | 9209 | /* When not optimizing, we go ahead with this form since |
9210 | (1) user breakpoints operate more predictably without | |
9211 | code duplication, and | |
9212 | (2) we're not running any of the global optimizers | |
9213 | that would explode in time/space with the highly | |
9214 | connected CFG created by the indirect branching. */ | |
6fc87840 | 9215 | |
9216 | rtx finally_label = gen_label_rtx (); | |
9217 | rtx done_label = gen_label_rtx (); | |
9218 | rtx return_link = gen_reg_rtx (Pmode); | |
9219 | tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node, | |
9220 | (tree) finally_label, (tree) return_link); | |
9221 | TREE_SIDE_EFFECTS (cleanup) = 1; | |
9222 | ||
9223 | /* Start a new binding layer that will keep track of all cleanup | |
9224 | actions to be performed. */ | |
9225 | expand_start_bindings (2); | |
9226 | target_temp_slot_level = temp_slot_level; | |
9227 | ||
9228 | expand_decl_cleanup (NULL_TREE, cleanup); | |
9229 | op0 = expand_expr (try_block, target, tmode, modifier); | |
9230 | ||
9231 | preserve_temp_slots (op0); | |
9232 | expand_end_bindings (NULL_TREE, 0, 0); | |
9233 | emit_jump (done_label); | |
9234 | emit_label (finally_label); | |
9235 | expand_expr (finally_block, const0_rtx, VOIDmode, 0); | |
9236 | emit_indirect_jump (return_link); | |
9237 | emit_label (done_label); | |
9238 | } | |
9239 | else | |
9240 | { | |
9241 | expand_start_bindings (2); | |
9242 | target_temp_slot_level = temp_slot_level; | |
f0c211a3 | 9243 | |
6fc87840 | 9244 | expand_decl_cleanup (NULL_TREE, finally_block); |
9245 | op0 = expand_expr (try_block, target, tmode, modifier); | |
f0c211a3 | 9246 | |
6fc87840 | 9247 | preserve_temp_slots (op0); |
9248 | expand_end_bindings (NULL_TREE, 0, 0); | |
9249 | } | |
f0c211a3 | 9250 | |
f0c211a3 | 9251 | return op0; |
9252 | } | |
9253 | ||
fa56dc1d | 9254 | case GOTO_SUBROUTINE_EXPR: |
f0c211a3 | 9255 | { |
9256 | rtx subr = (rtx) TREE_OPERAND (exp, 0); | |
9257 | rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1); | |
9258 | rtx return_address = gen_label_rtx (); | |
fa56dc1d | 9259 | emit_move_insn (return_link, |
9260 | gen_rtx_LABEL_REF (Pmode, return_address)); | |
f0c211a3 | 9261 | emit_jump (subr); |
9262 | emit_label (return_address); | |
9263 | return const0_rtx; | |
9264 | } | |
9265 | ||
a66c9326 | 9266 | case VA_ARG_EXPR: |
9267 | return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type); | |
9268 | ||
df4b504c | 9269 | case EXC_PTR_EXPR: |
572fdaa3 | 9270 | return get_exception_pointer (cfun); |
df4b504c | 9271 | |
6bfa2cc1 | 9272 | case FDESC_EXPR: |
9273 | /* Function descriptors are not valid except for as | |
9274 | initialization constants, and should not be expanded. */ | |
9275 | abort (); | |
9276 | ||
10f307d9 | 9277 | default: |
b467ecc1 | 9278 | return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier); |
10f307d9 | 9279 | } |
9280 | ||
9281 | /* Here to do an ordinary binary operator, generating an instruction | |
9282 | from the optab already placed in `this_optab'. */ | |
9283 | binop: | |
997d68fe | 9284 | if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1)) |
10f307d9 | 9285 | subtarget = 0; |
9286 | op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0); | |
b572011e | 9287 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
10f307d9 | 9288 | binop2: |
9289 | temp = expand_binop (mode, this_optab, op0, op1, target, | |
9290 | unsignedp, OPTAB_LIB_WIDEN); | |
9291 | if (temp == 0) | |
9292 | abort (); | |
9293 | return temp; | |
9294 | } | |
b54842d8 | 9295 | \f |
67c68e45 | 9296 | /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that |
9297 | when applied to the address of EXP produces an address known to be | |
9298 | aligned more than BIGGEST_ALIGNMENT. */ | |
9299 | ||
9300 | static int | |
9301 | is_aligning_offset (offset, exp) | |
9302 | tree offset; | |
9303 | tree exp; | |
9304 | { | |
9305 | /* Strip off any conversions and WITH_RECORD_EXPR nodes. */ | |
9306 | while (TREE_CODE (offset) == NON_LVALUE_EXPR | |
9307 | || TREE_CODE (offset) == NOP_EXPR | |
9308 | || TREE_CODE (offset) == CONVERT_EXPR | |
9309 | || TREE_CODE (offset) == WITH_RECORD_EXPR) | |
9310 | offset = TREE_OPERAND (offset, 0); | |
9311 | ||
9312 | /* We must now have a BIT_AND_EXPR with a constant that is one less than | |
9313 | power of 2 and which is larger than BIGGEST_ALIGNMENT. */ | |
9314 | if (TREE_CODE (offset) != BIT_AND_EXPR | |
9315 | || !host_integerp (TREE_OPERAND (offset, 1), 1) | |
9316 | || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0 | |
9317 | || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0) | |
9318 | return 0; | |
9319 | ||
9320 | /* Look at the first operand of BIT_AND_EXPR and strip any conversion. | |
9321 | It must be NEGATE_EXPR. Then strip any more conversions. */ | |
9322 | offset = TREE_OPERAND (offset, 0); | |
9323 | while (TREE_CODE (offset) == NON_LVALUE_EXPR | |
9324 | || TREE_CODE (offset) == NOP_EXPR | |
9325 | || TREE_CODE (offset) == CONVERT_EXPR) | |
9326 | offset = TREE_OPERAND (offset, 0); | |
9327 | ||
9328 | if (TREE_CODE (offset) != NEGATE_EXPR) | |
9329 | return 0; | |
9330 | ||
9331 | offset = TREE_OPERAND (offset, 0); | |
9332 | while (TREE_CODE (offset) == NON_LVALUE_EXPR | |
9333 | || TREE_CODE (offset) == NOP_EXPR | |
9334 | || TREE_CODE (offset) == CONVERT_EXPR) | |
9335 | offset = TREE_OPERAND (offset, 0); | |
9336 | ||
9337 | /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR | |
9338 | whose type is the same as EXP. */ | |
9339 | return (TREE_CODE (offset) == ADDR_EXPR | |
9340 | && (TREE_OPERAND (offset, 0) == exp | |
9341 | || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR | |
9342 | && (TREE_TYPE (TREE_OPERAND (offset, 0)) | |
9343 | == TREE_TYPE (exp))))); | |
9344 | } | |
9345 | \f | |
dafdd1c8 | 9346 | /* Return the tree node if an ARG corresponds to a string constant or zero |
6ef828f9 | 9347 | if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset |
902de8ed | 9348 | in bytes within the string that ARG is accessing. The type of the |
9349 | offset will be `sizetype'. */ | |
b54842d8 | 9350 | |
53800dbe | 9351 | tree |
b54842d8 | 9352 | string_constant (arg, ptr_offset) |
9353 | tree arg; | |
9354 | tree *ptr_offset; | |
9355 | { | |
9356 | STRIP_NOPS (arg); | |
9357 | ||
9358 | if (TREE_CODE (arg) == ADDR_EXPR | |
9359 | && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) | |
9360 | { | |
902de8ed | 9361 | *ptr_offset = size_zero_node; |
b54842d8 | 9362 | return TREE_OPERAND (arg, 0); |
9363 | } | |
9364 | else if (TREE_CODE (arg) == PLUS_EXPR) | |
9365 | { | |
9366 | tree arg0 = TREE_OPERAND (arg, 0); | |
9367 | tree arg1 = TREE_OPERAND (arg, 1); | |
9368 | ||
9369 | STRIP_NOPS (arg0); | |
9370 | STRIP_NOPS (arg1); | |
9371 | ||
9372 | if (TREE_CODE (arg0) == ADDR_EXPR | |
9373 | && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST) | |
10f307d9 | 9374 | { |
902de8ed | 9375 | *ptr_offset = convert (sizetype, arg1); |
b54842d8 | 9376 | return TREE_OPERAND (arg0, 0); |
10f307d9 | 9377 | } |
b54842d8 | 9378 | else if (TREE_CODE (arg1) == ADDR_EXPR |
9379 | && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST) | |
10f307d9 | 9380 | { |
902de8ed | 9381 | *ptr_offset = convert (sizetype, arg0); |
b54842d8 | 9382 | return TREE_OPERAND (arg1, 0); |
10f307d9 | 9383 | } |
b54842d8 | 9384 | } |
649d8da6 | 9385 | |
b54842d8 | 9386 | return 0; |
9387 | } | |
649d8da6 | 9388 | \f |
b54842d8 | 9389 | /* Expand code for a post- or pre- increment or decrement |
9390 | and return the RTX for the result. | |
9391 | POST is 1 for postinc/decrements and 0 for preinc/decrements. */ | |
acfb31e5 | 9392 | |
b54842d8 | 9393 | static rtx |
9394 | expand_increment (exp, post, ignore) | |
19cb6b50 | 9395 | tree exp; |
b54842d8 | 9396 | int post, ignore; |
649d8da6 | 9397 | { |
19cb6b50 | 9398 | rtx op0, op1; |
9399 | rtx temp, value; | |
9400 | tree incremented = TREE_OPERAND (exp, 0); | |
b54842d8 | 9401 | optab this_optab = add_optab; |
9402 | int icode; | |
9403 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | |
9404 | int op0_is_copy = 0; | |
9405 | int single_insn = 0; | |
9406 | /* 1 means we can't store into OP0 directly, | |
9407 | because it is a subreg narrower than a word, | |
9408 | and we don't dare clobber the rest of the word. */ | |
9409 | int bad_subreg = 0; | |
acfb31e5 | 9410 | |
b54842d8 | 9411 | /* Stabilize any component ref that might need to be |
9412 | evaluated more than once below. */ | |
9413 | if (!post | |
9414 | || TREE_CODE (incremented) == BIT_FIELD_REF | |
9415 | || (TREE_CODE (incremented) == COMPONENT_REF | |
9416 | && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF | |
9417 | || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1))))) | |
9418 | incremented = stabilize_reference (incremented); | |
9419 | /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost | |
9420 | ones into save exprs so that they don't accidentally get evaluated | |
9421 | more than once by the code below. */ | |
9422 | if (TREE_CODE (incremented) == PREINCREMENT_EXPR | |
9423 | || TREE_CODE (incremented) == PREDECREMENT_EXPR) | |
9424 | incremented = save_expr (incremented); | |
0dbd1c74 | 9425 | |
b54842d8 | 9426 | /* Compute the operands as RTX. |
9427 | Note whether OP0 is the actual lvalue or a copy of it: | |
9428 | I believe it is a copy iff it is a register or subreg | |
1e625a2e | 9429 | and insns were generated in computing it. */ |
0dbd1c74 | 9430 | |
b54842d8 | 9431 | temp = get_last_insn (); |
8a06f2d4 | 9432 | op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0); |
0dbd1c74 | 9433 | |
b54842d8 | 9434 | /* If OP0 is a SUBREG made for a promoted variable, we cannot increment |
9435 | in place but instead must do sign- or zero-extension during assignment, | |
9436 | so we copy it into a new register and let the code below use it as | |
9437 | a copy. | |
0dbd1c74 | 9438 | |
b54842d8 | 9439 | Note that we can safely modify this SUBREG since it is know not to be |
9440 | shared (it was made by the expand_expr call above). */ | |
9441 | ||
9442 | if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0)) | |
9443 | { | |
9444 | if (post) | |
9445 | SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0)); | |
9446 | else | |
9447 | bad_subreg = 1; | |
9448 | } | |
9449 | else if (GET_CODE (op0) == SUBREG | |
9450 | && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD) | |
9451 | { | |
9452 | /* We cannot increment this SUBREG in place. If we are | |
9453 | post-incrementing, get a copy of the old value. Otherwise, | |
9454 | just mark that we cannot increment in place. */ | |
9455 | if (post) | |
9456 | op0 = copy_to_reg (op0); | |
9457 | else | |
9458 | bad_subreg = 1; | |
0dbd1c74 | 9459 | } |
9460 | ||
b54842d8 | 9461 | op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG) |
9462 | && temp != get_last_insn ()); | |
8a06f2d4 | 9463 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
acfb31e5 | 9464 | |
b54842d8 | 9465 | /* Decide whether incrementing or decrementing. */ |
9466 | if (TREE_CODE (exp) == POSTDECREMENT_EXPR | |
9467 | || TREE_CODE (exp) == PREDECREMENT_EXPR) | |
9468 | this_optab = sub_optab; | |
9469 | ||
9470 | /* Convert decrement by a constant into a negative increment. */ | |
9471 | if (this_optab == sub_optab | |
9472 | && GET_CODE (op1) == CONST_INT) | |
649d8da6 | 9473 | { |
fa56dc1d | 9474 | op1 = GEN_INT (-INTVAL (op1)); |
b54842d8 | 9475 | this_optab = add_optab; |
649d8da6 | 9476 | } |
acfb31e5 | 9477 | |
bec2d490 | 9478 | if (TYPE_TRAP_SIGNED (TREE_TYPE (exp))) |
e17f5b23 | 9479 | this_optab = this_optab == add_optab ? addv_optab : subv_optab; |
bec2d490 | 9480 | |
b54842d8 | 9481 | /* For a preincrement, see if we can do this with a single instruction. */ |
9482 | if (!post) | |
9483 | { | |
9484 | icode = (int) this_optab->handlers[(int) mode].insn_code; | |
9485 | if (icode != (int) CODE_FOR_nothing | |
9486 | /* Make sure that OP0 is valid for operands 0 and 1 | |
9487 | of the insn we want to queue. */ | |
6357eaae | 9488 | && (*insn_data[icode].operand[0].predicate) (op0, mode) |
9489 | && (*insn_data[icode].operand[1].predicate) (op0, mode) | |
9490 | && (*insn_data[icode].operand[2].predicate) (op1, mode)) | |
b54842d8 | 9491 | single_insn = 1; |
9492 | } | |
10f307d9 | 9493 | |
b54842d8 | 9494 | /* If OP0 is not the actual lvalue, but rather a copy in a register, |
9495 | then we cannot just increment OP0. We must therefore contrive to | |
9496 | increment the original value. Then, for postincrement, we can return | |
9497 | OP0 since it is a copy of the old value. For preincrement, expand here | |
9498 | unless we can do it with a single insn. | |
10f307d9 | 9499 | |
b54842d8 | 9500 | Likewise if storing directly into OP0 would clobber high bits |
9501 | we need to preserve (bad_subreg). */ | |
9502 | if (op0_is_copy || (!post && !single_insn) || bad_subreg) | |
36ce2511 | 9503 | { |
b54842d8 | 9504 | /* This is the easiest way to increment the value wherever it is. |
9505 | Problems with multiple evaluation of INCREMENTED are prevented | |
9506 | because either (1) it is a component_ref or preincrement, | |
9507 | in which case it was stabilized above, or (2) it is an array_ref | |
9508 | with constant index in an array in a register, which is | |
9509 | safe to reevaluate. */ | |
9510 | tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR | |
9511 | || TREE_CODE (exp) == PREDECREMENT_EXPR) | |
9512 | ? MINUS_EXPR : PLUS_EXPR), | |
9513 | TREE_TYPE (exp), | |
9514 | incremented, | |
9515 | TREE_OPERAND (exp, 1)); | |
36ce2511 | 9516 | |
b54842d8 | 9517 | while (TREE_CODE (incremented) == NOP_EXPR |
9518 | || TREE_CODE (incremented) == CONVERT_EXPR) | |
9519 | { | |
9520 | newexp = convert (TREE_TYPE (incremented), newexp); | |
9521 | incremented = TREE_OPERAND (incremented, 0); | |
9522 | } | |
10f307d9 | 9523 | |
b54842d8 | 9524 | temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0); |
9525 | return post ? op0 : temp; | |
9526 | } | |
10f307d9 | 9527 | |
b54842d8 | 9528 | if (post) |
9529 | { | |
9530 | /* We have a true reference to the value in OP0. | |
9531 | If there is an insn to add or subtract in this mode, queue it. | |
9532 | Queueing the increment insn avoids the register shuffling | |
9533 | that often results if we must increment now and first save | |
9534 | the old value for subsequent use. */ | |
10f307d9 | 9535 | |
b54842d8 | 9536 | #if 0 /* Turned off to avoid making extra insn for indexed memref. */ |
9537 | op0 = stabilize (op0); | |
9538 | #endif | |
3674f4b7 | 9539 | |
b54842d8 | 9540 | icode = (int) this_optab->handlers[(int) mode].insn_code; |
9541 | if (icode != (int) CODE_FOR_nothing | |
9542 | /* Make sure that OP0 is valid for operands 0 and 1 | |
9543 | of the insn we want to queue. */ | |
6357eaae | 9544 | && (*insn_data[icode].operand[0].predicate) (op0, mode) |
9545 | && (*insn_data[icode].operand[1].predicate) (op0, mode)) | |
b54842d8 | 9546 | { |
6357eaae | 9547 | if (! (*insn_data[icode].operand[2].predicate) (op1, mode)) |
b54842d8 | 9548 | op1 = force_reg (mode, op1); |
10f307d9 | 9549 | |
b54842d8 | 9550 | return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1)); |
9551 | } | |
9552 | if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM) | |
9553 | { | |
9554 | rtx addr = (general_operand (XEXP (op0, 0), mode) | |
9555 | ? force_reg (Pmode, XEXP (op0, 0)) | |
9556 | : copy_to_reg (XEXP (op0, 0))); | |
9557 | rtx temp, result; | |
649d8da6 | 9558 | |
537ffcfc | 9559 | op0 = replace_equiv_address (op0, addr); |
b54842d8 | 9560 | temp = force_reg (GET_MODE (op0), op0); |
6357eaae | 9561 | if (! (*insn_data[icode].operand[2].predicate) (op1, mode)) |
b54842d8 | 9562 | op1 = force_reg (mode, op1); |
649d8da6 | 9563 | |
b54842d8 | 9564 | /* The increment queue is LIFO, thus we have to `queue' |
9565 | the instructions in reverse order. */ | |
9566 | enqueue_insn (op0, gen_move_insn (op0, temp)); | |
9567 | result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1)); | |
9568 | return result; | |
10f307d9 | 9569 | } |
9570 | } | |
649d8da6 | 9571 | |
b54842d8 | 9572 | /* Preincrement, or we can't increment with one simple insn. */ |
9573 | if (post) | |
9574 | /* Save a copy of the value before inc or dec, to return it later. */ | |
9575 | temp = value = copy_to_reg (op0); | |
9576 | else | |
9577 | /* Arrange to return the incremented value. */ | |
9578 | /* Copy the rtx because expand_binop will protect from the queue, | |
9579 | and the results of that would be invalid for us to return | |
9580 | if our caller does emit_queue before using our result. */ | |
9581 | temp = copy_rtx (value = op0); | |
10f307d9 | 9582 | |
b54842d8 | 9583 | /* Increment however we can. */ |
8a06f2d4 | 9584 | op1 = expand_binop (mode, this_optab, value, op1, op0, |
b54842d8 | 9585 | TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN); |
8a06f2d4 | 9586 | |
b54842d8 | 9587 | /* Make sure the value is stored into OP0. */ |
9588 | if (op1 != op0) | |
9589 | emit_move_insn (op0, op1); | |
7214c9d7 | 9590 | |
b54842d8 | 9591 | return temp; |
9592 | } | |
9593 | \f | |
b54842d8 | 9594 | /* At the start of a function, record that we have no previously-pushed |
9595 | arguments waiting to be popped. */ | |
10f307d9 | 9596 | |
b54842d8 | 9597 | void |
9598 | init_pending_stack_adjust () | |
9599 | { | |
9600 | pending_stack_adjust = 0; | |
9601 | } | |
10f307d9 | 9602 | |
b54842d8 | 9603 | /* When exiting from function, if safe, clear out any pending stack adjust |
c68850d6 | 9604 | so the adjustment won't get done. |
9605 | ||
9606 | Note, if the current function calls alloca, then it must have a | |
9607 | frame pointer regardless of the value of flag_omit_frame_pointer. */ | |
10f307d9 | 9608 | |
b54842d8 | 9609 | void |
9610 | clear_pending_stack_adjust () | |
9611 | { | |
9612 | #ifdef EXIT_IGNORE_STACK | |
9613 | if (optimize > 0 | |
c68850d6 | 9614 | && (! flag_omit_frame_pointer || current_function_calls_alloca) |
9615 | && EXIT_IGNORE_STACK | |
b54842d8 | 9616 | && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline) |
9617 | && ! flag_inline_functions) | |
91b70175 | 9618 | { |
9619 | stack_pointer_delta -= pending_stack_adjust, | |
9620 | pending_stack_adjust = 0; | |
9621 | } | |
b54842d8 | 9622 | #endif |
9623 | } | |
10f307d9 | 9624 | |
b54842d8 | 9625 | /* Pop any previously-pushed arguments that have not been popped yet. */ |
9626 | ||
9627 | void | |
9628 | do_pending_stack_adjust () | |
9629 | { | |
9630 | if (inhibit_defer_pop == 0) | |
649d8da6 | 9631 | { |
b54842d8 | 9632 | if (pending_stack_adjust != 0) |
9633 | adjust_stack (GEN_INT (pending_stack_adjust)); | |
9634 | pending_stack_adjust = 0; | |
10f307d9 | 9635 | } |
10f307d9 | 9636 | } |
9637 | \f | |
b54842d8 | 9638 | /* Expand conditional expressions. */ |
10f307d9 | 9639 | |
b54842d8 | 9640 | /* Generate code to evaluate EXP and jump to LABEL if the value is zero. |
9641 | LABEL is an rtx of code CODE_LABEL, in this function and all the | |
9642 | functions here. */ | |
10f307d9 | 9643 | |
b54842d8 | 9644 | void |
9645 | jumpifnot (exp, label) | |
649d8da6 | 9646 | tree exp; |
b54842d8 | 9647 | rtx label; |
10f307d9 | 9648 | { |
b54842d8 | 9649 | do_jump (exp, label, NULL_RTX); |
9650 | } | |
10f307d9 | 9651 | |
b54842d8 | 9652 | /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */ |
649d8da6 | 9653 | |
b54842d8 | 9654 | void |
9655 | jumpif (exp, label) | |
9656 | tree exp; | |
9657 | rtx label; | |
9658 | { | |
9659 | do_jump (exp, NULL_RTX, label); | |
9660 | } | |
649d8da6 | 9661 | |
b54842d8 | 9662 | /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if |
9663 | the result is zero, or IF_TRUE_LABEL if the result is one. | |
9664 | Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero, | |
9665 | meaning fall through in that case. | |
649d8da6 | 9666 | |
b54842d8 | 9667 | do_jump always does any pending stack adjust except when it does not |
9668 | actually perform a jump. An example where there is no jump | |
9669 | is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. | |
649d8da6 | 9670 | |
b54842d8 | 9671 | This function is responsible for optimizing cases such as |
9672 | &&, || and comparison operators in EXP. */ | |
7214c9d7 | 9673 | |
b54842d8 | 9674 | void |
9675 | do_jump (exp, if_false_label, if_true_label) | |
9676 | tree exp; | |
9677 | rtx if_false_label, if_true_label; | |
9678 | { | |
19cb6b50 | 9679 | enum tree_code code = TREE_CODE (exp); |
b54842d8 | 9680 | /* Some cases need to create a label to jump to |
9681 | in order to properly fall through. | |
9682 | These cases set DROP_THROUGH_LABEL nonzero. */ | |
9683 | rtx drop_through_label = 0; | |
9684 | rtx temp; | |
b54842d8 | 9685 | int i; |
9686 | tree type; | |
9687 | enum machine_mode mode; | |
649d8da6 | 9688 | |
32a0589f | 9689 | #ifdef MAX_INTEGER_COMPUTATION_MODE |
9690 | check_max_integer_computation_mode (exp); | |
9691 | #endif | |
9692 | ||
b54842d8 | 9693 | emit_queue (); |
649d8da6 | 9694 | |
b54842d8 | 9695 | switch (code) |
649d8da6 | 9696 | { |
b54842d8 | 9697 | case ERROR_MARK: |
649d8da6 | 9698 | break; |
10f307d9 | 9699 | |
b54842d8 | 9700 | case INTEGER_CST: |
9701 | temp = integer_zerop (exp) ? if_false_label : if_true_label; | |
9702 | if (temp) | |
9703 | emit_jump (temp); | |
9704 | break; | |
10f307d9 | 9705 | |
b54842d8 | 9706 | #if 0 |
9707 | /* This is not true with #pragma weak */ | |
9708 | case ADDR_EXPR: | |
9709 | /* The address of something can never be zero. */ | |
9710 | if (if_true_label) | |
9711 | emit_jump (if_true_label); | |
9712 | break; | |
9713 | #endif | |
10f307d9 | 9714 | |
b54842d8 | 9715 | case NOP_EXPR: |
9716 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF | |
9717 | || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF | |
ba04d9d5 | 9718 | || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF |
9719 | || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF) | |
b54842d8 | 9720 | goto normal; |
9721 | case CONVERT_EXPR: | |
9722 | /* If we are narrowing the operand, we have to do the compare in the | |
9723 | narrower mode. */ | |
9724 | if ((TYPE_PRECISION (TREE_TYPE (exp)) | |
9725 | < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))) | |
9726 | goto normal; | |
9727 | case NON_LVALUE_EXPR: | |
9728 | case REFERENCE_EXPR: | |
9729 | case ABS_EXPR: | |
9730 | case NEGATE_EXPR: | |
9731 | case LROTATE_EXPR: | |
9732 | case RROTATE_EXPR: | |
6ef828f9 | 9733 | /* These cannot change zero->nonzero or vice versa. */ |
b54842d8 | 9734 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); |
9735 | break; | |
10f307d9 | 9736 | |
155b05dc | 9737 | case WITH_RECORD_EXPR: |
9738 | /* Put the object on the placeholder list, recurse through our first | |
9739 | operand, and pop the list. */ | |
9740 | placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE, | |
9741 | placeholder_list); | |
9742 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
9743 | placeholder_list = TREE_CHAIN (placeholder_list); | |
9744 | break; | |
9745 | ||
b54842d8 | 9746 | #if 0 |
9747 | /* This is never less insns than evaluating the PLUS_EXPR followed by | |
9748 | a test and can be longer if the test is eliminated. */ | |
9749 | case PLUS_EXPR: | |
9750 | /* Reduce to minus. */ | |
9751 | exp = build (MINUS_EXPR, TREE_TYPE (exp), | |
9752 | TREE_OPERAND (exp, 0), | |
9753 | fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)), | |
9754 | TREE_OPERAND (exp, 1)))); | |
9755 | /* Process as MINUS. */ | |
649d8da6 | 9756 | #endif |
10f307d9 | 9757 | |
b54842d8 | 9758 | case MINUS_EXPR: |
6ef828f9 | 9759 | /* Nonzero iff operands of minus differ. */ |
1a29b174 | 9760 | do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp), |
9761 | TREE_OPERAND (exp, 0), | |
9762 | TREE_OPERAND (exp, 1)), | |
9763 | NE, NE, if_false_label, if_true_label); | |
b54842d8 | 9764 | break; |
10f307d9 | 9765 | |
b54842d8 | 9766 | case BIT_AND_EXPR: |
9767 | /* If we are AND'ing with a small constant, do this comparison in the | |
9768 | smallest type that fits. If the machine doesn't have comparisons | |
9769 | that small, it will be converted back to the wider comparison. | |
9770 | This helps if we are testing the sign bit of a narrower object. | |
9771 | combine can't do this for us because it can't know whether a | |
9772 | ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */ | |
10f307d9 | 9773 | |
b54842d8 | 9774 | if (! SLOW_BYTE_ACCESS |
9775 | && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST | |
9776 | && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT | |
a0c2c45b | 9777 | && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0 |
b54842d8 | 9778 | && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode |
771d21fa | 9779 | && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0 |
b54842d8 | 9780 | && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) |
9781 | && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code | |
9782 | != CODE_FOR_nothing)) | |
9783 | { | |
9784 | do_jump (convert (type, exp), if_false_label, if_true_label); | |
9785 | break; | |
9786 | } | |
9787 | goto normal; | |
10f307d9 | 9788 | |
b54842d8 | 9789 | case TRUTH_NOT_EXPR: |
9790 | do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
9791 | break; | |
10f307d9 | 9792 | |
b54842d8 | 9793 | case TRUTH_ANDIF_EXPR: |
9794 | if (if_false_label == 0) | |
9795 | if_false_label = drop_through_label = gen_label_rtx (); | |
9796 | do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX); | |
9797 | start_cleanup_deferral (); | |
9798 | do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
9799 | end_cleanup_deferral (); | |
9800 | break; | |
10f307d9 | 9801 | |
b54842d8 | 9802 | case TRUTH_ORIF_EXPR: |
9803 | if (if_true_label == 0) | |
9804 | if_true_label = drop_through_label = gen_label_rtx (); | |
9805 | do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label); | |
9806 | start_cleanup_deferral (); | |
9807 | do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
9808 | end_cleanup_deferral (); | |
9809 | break; | |
10f307d9 | 9810 | |
b54842d8 | 9811 | case COMPOUND_EXPR: |
9812 | push_temp_slots (); | |
9813 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0); | |
9814 | preserve_temp_slots (NULL_RTX); | |
9815 | free_temp_slots (); | |
9816 | pop_temp_slots (); | |
9817 | emit_queue (); | |
9818 | do_pending_stack_adjust (); | |
9819 | do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label); | |
9820 | break; | |
10f307d9 | 9821 | |
b54842d8 | 9822 | case COMPONENT_REF: |
9823 | case BIT_FIELD_REF: | |
9824 | case ARRAY_REF: | |
ba04d9d5 | 9825 | case ARRAY_RANGE_REF: |
b54842d8 | 9826 | { |
02e7a332 | 9827 | HOST_WIDE_INT bitsize, bitpos; |
9828 | int unsignedp; | |
b54842d8 | 9829 | enum machine_mode mode; |
9830 | tree type; | |
9831 | tree offset; | |
9832 | int volatilep = 0; | |
10f307d9 | 9833 | |
b54842d8 | 9834 | /* Get description of this reference. We don't actually care |
9835 | about the underlying object here. */ | |
325d1c45 | 9836 | get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, |
2b96c5f6 | 9837 | &unsignedp, &volatilep); |
10f307d9 | 9838 | |
771d21fa | 9839 | type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp); |
b54842d8 | 9840 | if (! SLOW_BYTE_ACCESS |
9841 | && type != 0 && bitsize >= 0 | |
9842 | && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)) | |
9843 | && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code | |
9844 | != CODE_FOR_nothing)) | |
9845 | { | |
9846 | do_jump (convert (type, exp), if_false_label, if_true_label); | |
9847 | break; | |
9848 | } | |
9849 | goto normal; | |
9850 | } | |
10f307d9 | 9851 | |
b54842d8 | 9852 | case COND_EXPR: |
9853 | /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */ | |
9854 | if (integer_onep (TREE_OPERAND (exp, 1)) | |
9855 | && integer_zerop (TREE_OPERAND (exp, 2))) | |
9856 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
10f307d9 | 9857 | |
b54842d8 | 9858 | else if (integer_zerop (TREE_OPERAND (exp, 1)) |
9859 | && integer_onep (TREE_OPERAND (exp, 2))) | |
9860 | do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
10f307d9 | 9861 | |
b54842d8 | 9862 | else |
9863 | { | |
19cb6b50 | 9864 | rtx label1 = gen_label_rtx (); |
b54842d8 | 9865 | drop_through_label = gen_label_rtx (); |
10f307d9 | 9866 | |
b54842d8 | 9867 | do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX); |
10f307d9 | 9868 | |
b54842d8 | 9869 | start_cleanup_deferral (); |
9870 | /* Now the THEN-expression. */ | |
9871 | do_jump (TREE_OPERAND (exp, 1), | |
9872 | if_false_label ? if_false_label : drop_through_label, | |
9873 | if_true_label ? if_true_label : drop_through_label); | |
9874 | /* In case the do_jump just above never jumps. */ | |
9875 | do_pending_stack_adjust (); | |
9876 | emit_label (label1); | |
10f307d9 | 9877 | |
b54842d8 | 9878 | /* Now the ELSE-expression. */ |
9879 | do_jump (TREE_OPERAND (exp, 2), | |
9880 | if_false_label ? if_false_label : drop_through_label, | |
9881 | if_true_label ? if_true_label : drop_through_label); | |
9882 | end_cleanup_deferral (); | |
9883 | } | |
9884 | break; | |
10f307d9 | 9885 | |
b54842d8 | 9886 | case EQ_EXPR: |
9887 | { | |
9888 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
10f307d9 | 9889 | |
9e042f31 | 9890 | if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT |
9891 | || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT) | |
c89676f9 | 9892 | { |
9893 | tree exp0 = save_expr (TREE_OPERAND (exp, 0)); | |
9894 | tree exp1 = save_expr (TREE_OPERAND (exp, 1)); | |
9895 | do_jump | |
9896 | (fold | |
9897 | (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp), | |
9898 | fold (build (EQ_EXPR, TREE_TYPE (exp), | |
9899 | fold (build1 (REALPART_EXPR, | |
9900 | TREE_TYPE (inner_type), | |
9901 | exp0)), | |
9902 | fold (build1 (REALPART_EXPR, | |
9903 | TREE_TYPE (inner_type), | |
9904 | exp1)))), | |
9905 | fold (build (EQ_EXPR, TREE_TYPE (exp), | |
9906 | fold (build1 (IMAGPART_EXPR, | |
9907 | TREE_TYPE (inner_type), | |
9908 | exp0)), | |
9909 | fold (build1 (IMAGPART_EXPR, | |
9910 | TREE_TYPE (inner_type), | |
9911 | exp1)))))), | |
9912 | if_false_label, if_true_label); | |
9913 | } | |
9e042f31 | 9914 | |
9915 | else if (integer_zerop (TREE_OPERAND (exp, 1))) | |
9916 | do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label); | |
9917 | ||
b54842d8 | 9918 | else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT |
a4110d9a | 9919 | && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump)) |
b54842d8 | 9920 | do_jump_by_parts_equality (exp, if_false_label, if_true_label); |
9921 | else | |
1a29b174 | 9922 | do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label); |
b54842d8 | 9923 | break; |
9924 | } | |
10f307d9 | 9925 | |
b54842d8 | 9926 | case NE_EXPR: |
9927 | { | |
9928 | tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
10f307d9 | 9929 | |
9e042f31 | 9930 | if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT |
9931 | || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT) | |
c89676f9 | 9932 | { |
9933 | tree exp0 = save_expr (TREE_OPERAND (exp, 0)); | |
9934 | tree exp1 = save_expr (TREE_OPERAND (exp, 1)); | |
9935 | do_jump | |
9936 | (fold | |
9937 | (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), | |
9938 | fold (build (NE_EXPR, TREE_TYPE (exp), | |
9939 | fold (build1 (REALPART_EXPR, | |
9940 | TREE_TYPE (inner_type), | |
9941 | exp0)), | |
9942 | fold (build1 (REALPART_EXPR, | |
9943 | TREE_TYPE (inner_type), | |
9944 | exp1)))), | |
9945 | fold (build (NE_EXPR, TREE_TYPE (exp), | |
9946 | fold (build1 (IMAGPART_EXPR, | |
9947 | TREE_TYPE (inner_type), | |
9948 | exp0)), | |
9949 | fold (build1 (IMAGPART_EXPR, | |
9950 | TREE_TYPE (inner_type), | |
9951 | exp1)))))), | |
9952 | if_false_label, if_true_label); | |
9953 | } | |
9e042f31 | 9954 | |
9955 | else if (integer_zerop (TREE_OPERAND (exp, 1))) | |
9956 | do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label); | |
9957 | ||
b54842d8 | 9958 | else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT |
a4110d9a | 9959 | && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump)) |
b54842d8 | 9960 | do_jump_by_parts_equality (exp, if_true_label, if_false_label); |
9961 | else | |
1a29b174 | 9962 | do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label); |
b54842d8 | 9963 | break; |
9964 | } | |
10f307d9 | 9965 | |
b54842d8 | 9966 | case LT_EXPR: |
10ee3e0f | 9967 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9968 | if (GET_MODE_CLASS (mode) == MODE_INT | |
a4110d9a | 9969 | && ! can_compare_p (LT, mode, ccp_jump)) |
b54842d8 | 9970 | do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label); |
9971 | else | |
1a29b174 | 9972 | do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label); |
b54842d8 | 9973 | break; |
10f307d9 | 9974 | |
b54842d8 | 9975 | case LE_EXPR: |
10ee3e0f | 9976 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9977 | if (GET_MODE_CLASS (mode) == MODE_INT | |
a4110d9a | 9978 | && ! can_compare_p (LE, mode, ccp_jump)) |
b54842d8 | 9979 | do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label); |
9980 | else | |
1a29b174 | 9981 | do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label); |
b54842d8 | 9982 | break; |
10f307d9 | 9983 | |
b54842d8 | 9984 | case GT_EXPR: |
10ee3e0f | 9985 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9986 | if (GET_MODE_CLASS (mode) == MODE_INT | |
a4110d9a | 9987 | && ! can_compare_p (GT, mode, ccp_jump)) |
b54842d8 | 9988 | do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label); |
9989 | else | |
1a29b174 | 9990 | do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label); |
b54842d8 | 9991 | break; |
10f307d9 | 9992 | |
b54842d8 | 9993 | case GE_EXPR: |
10ee3e0f | 9994 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
9995 | if (GET_MODE_CLASS (mode) == MODE_INT | |
a4110d9a | 9996 | && ! can_compare_p (GE, mode, ccp_jump)) |
b54842d8 | 9997 | do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label); |
9998 | else | |
1a29b174 | 9999 | do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label); |
b54842d8 | 10000 | break; |
10f307d9 | 10001 | |
a4110d9a | 10002 | case UNORDERED_EXPR: |
10003 | case ORDERED_EXPR: | |
10004 | { | |
10005 | enum rtx_code cmp, rcmp; | |
10006 | int do_rev; | |
10007 | ||
10008 | if (code == UNORDERED_EXPR) | |
10009 | cmp = UNORDERED, rcmp = ORDERED; | |
10010 | else | |
10011 | cmp = ORDERED, rcmp = UNORDERED; | |
fa56dc1d | 10012 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
a4110d9a | 10013 | |
10014 | do_rev = 0; | |
10015 | if (! can_compare_p (cmp, mode, ccp_jump) | |
10016 | && (can_compare_p (rcmp, mode, ccp_jump) | |
10017 | /* If the target doesn't provide either UNORDERED or ORDERED | |
10018 | comparisons, canonicalize on UNORDERED for the library. */ | |
10019 | || rcmp == UNORDERED)) | |
10020 | do_rev = 1; | |
10021 | ||
ff385626 | 10022 | if (! do_rev) |
a4110d9a | 10023 | do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label); |
10024 | else | |
10025 | do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label); | |
10026 | } | |
10027 | break; | |
10028 | ||
10029 | { | |
10030 | enum rtx_code rcode1; | |
10031 | enum tree_code tcode2; | |
10032 | ||
10033 | case UNLT_EXPR: | |
10034 | rcode1 = UNLT; | |
10035 | tcode2 = LT_EXPR; | |
10036 | goto unordered_bcc; | |
10037 | case UNLE_EXPR: | |
10038 | rcode1 = UNLE; | |
10039 | tcode2 = LE_EXPR; | |
10040 | goto unordered_bcc; | |
10041 | case UNGT_EXPR: | |
10042 | rcode1 = UNGT; | |
10043 | tcode2 = GT_EXPR; | |
10044 | goto unordered_bcc; | |
10045 | case UNGE_EXPR: | |
10046 | rcode1 = UNGE; | |
10047 | tcode2 = GE_EXPR; | |
10048 | goto unordered_bcc; | |
10049 | case UNEQ_EXPR: | |
10050 | rcode1 = UNEQ; | |
10051 | tcode2 = EQ_EXPR; | |
10052 | goto unordered_bcc; | |
79777bad | 10053 | |
a4110d9a | 10054 | unordered_bcc: |
ff385626 | 10055 | mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); |
a4110d9a | 10056 | if (can_compare_p (rcode1, mode, ccp_jump)) |
10057 | do_compare_and_jump (exp, rcode1, rcode1, if_false_label, | |
10058 | if_true_label); | |
10059 | else | |
10060 | { | |
10061 | tree op0 = save_expr (TREE_OPERAND (exp, 0)); | |
10062 | tree op1 = save_expr (TREE_OPERAND (exp, 1)); | |
10063 | tree cmp0, cmp1; | |
10064 | ||
fa56dc1d | 10065 | /* If the target doesn't support combined unordered |
a4110d9a | 10066 | compares, decompose into UNORDERED + comparison. */ |
10067 | cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1)); | |
10068 | cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1)); | |
10069 | exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1); | |
10070 | do_jump (exp, if_false_label, if_true_label); | |
10071 | } | |
10072 | } | |
10073 | break; | |
10074 | ||
689df48e | 10075 | /* Special case: |
10076 | __builtin_expect (<test>, 0) and | |
10077 | __builtin_expect (<test>, 1) | |
10078 | ||
10079 | We need to do this here, so that <test> is not converted to a SCC | |
10080 | operation on machines that use condition code registers and COMPARE | |
10081 | like the PowerPC, and then the jump is done based on whether the SCC | |
10082 | operation produced a 1 or 0. */ | |
10083 | case CALL_EXPR: | |
10084 | /* Check for a built-in function. */ | |
10085 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) | |
10086 | { | |
10087 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
10088 | tree arglist = TREE_OPERAND (exp, 1); | |
10089 | ||
10090 | if (TREE_CODE (fndecl) == FUNCTION_DECL | |
10091 | && DECL_BUILT_IN (fndecl) | |
10092 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT | |
10093 | && arglist != NULL_TREE | |
10094 | && TREE_CHAIN (arglist) != NULL_TREE) | |
10095 | { | |
10096 | rtx seq = expand_builtin_expect_jump (exp, if_false_label, | |
10097 | if_true_label); | |
10098 | ||
10099 | if (seq != NULL_RTX) | |
10100 | { | |
10101 | emit_insn (seq); | |
10102 | return; | |
10103 | } | |
10104 | } | |
10105 | } | |
10106 | /* fall through and generate the normal code. */ | |
10107 | ||
b54842d8 | 10108 | default: |
10109 | normal: | |
10110 | temp = expand_expr (exp, NULL_RTX, VOIDmode, 0); | |
10111 | #if 0 | |
10112 | /* This is not needed any more and causes poor code since it causes | |
10113 | comparisons and tests from non-SI objects to have different code | |
10114 | sequences. */ | |
10115 | /* Copy to register to avoid generating bad insns by cse | |
10116 | from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */ | |
10117 | if (!cse_not_expected && GET_CODE (temp) == MEM) | |
10118 | temp = copy_to_reg (temp); | |
649d8da6 | 10119 | #endif |
b54842d8 | 10120 | do_pending_stack_adjust (); |
1a29b174 | 10121 | /* Do any postincrements in the expression that was tested. */ |
10122 | emit_queue (); | |
10123 | ||
ff385626 | 10124 | if (GET_CODE (temp) == CONST_INT |
c17aca7a | 10125 | || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode) |
10126 | || GET_CODE (temp) == LABEL_REF) | |
1a29b174 | 10127 | { |
10128 | rtx target = temp == const0_rtx ? if_false_label : if_true_label; | |
10129 | if (target) | |
10130 | emit_jump (target); | |
10131 | } | |
b54842d8 | 10132 | else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT |
a4110d9a | 10133 | && ! can_compare_p (NE, GET_MODE (temp), ccp_jump)) |
b54842d8 | 10134 | /* Note swapping the labels gives us not-equal. */ |
10135 | do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label); | |
10136 | else if (GET_MODE (temp) != VOIDmode) | |
1a29b174 | 10137 | do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)), |
10138 | NE, TREE_UNSIGNED (TREE_TYPE (exp)), | |
2b96c5f6 | 10139 | GET_MODE (temp), NULL_RTX, |
1a29b174 | 10140 | if_false_label, if_true_label); |
b54842d8 | 10141 | else |
10142 | abort (); | |
10143 | } | |
10f307d9 | 10144 | |
b54842d8 | 10145 | if (drop_through_label) |
10146 | { | |
10147 | /* If do_jump produces code that might be jumped around, | |
10148 | do any stack adjusts from that code, before the place | |
10149 | where control merges in. */ | |
10150 | do_pending_stack_adjust (); | |
10151 | emit_label (drop_through_label); | |
10152 | } | |
10f307d9 | 10153 | } |
b54842d8 | 10154 | \f |
10155 | /* Given a comparison expression EXP for values too wide to be compared | |
10156 | with one insn, test the comparison and jump to the appropriate label. | |
10157 | The code of EXP is ignored; we always test GT if SWAP is 0, | |
10158 | and LT if SWAP is 1. */ | |
10f307d9 | 10159 | |
b54842d8 | 10160 | static void |
10161 | do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label) | |
10162 | tree exp; | |
10163 | int swap; | |
10164 | rtx if_false_label, if_true_label; | |
10165 | { | |
10166 | rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0); | |
10167 | rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0); | |
10168 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
b54842d8 | 10169 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))); |
10f307d9 | 10170 | |
1a29b174 | 10171 | do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label); |
228661d1 | 10172 | } |
10173 | ||
b54842d8 | 10174 | /* Compare OP0 with OP1, word at a time, in mode MODE. |
10175 | UNSIGNEDP says to do unsigned comparison. | |
10176 | Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */ | |
228661d1 | 10177 | |
b54842d8 | 10178 | void |
10179 | do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label) | |
10180 | enum machine_mode mode; | |
10181 | int unsignedp; | |
10182 | rtx op0, op1; | |
10183 | rtx if_false_label, if_true_label; | |
228661d1 | 10184 | { |
b54842d8 | 10185 | int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); |
10186 | rtx drop_through_label = 0; | |
10187 | int i; | |
228661d1 | 10188 | |
b54842d8 | 10189 | if (! if_true_label || ! if_false_label) |
10190 | drop_through_label = gen_label_rtx (); | |
10191 | if (! if_true_label) | |
10192 | if_true_label = drop_through_label; | |
10193 | if (! if_false_label) | |
10194 | if_false_label = drop_through_label; | |
228661d1 | 10195 | |
b54842d8 | 10196 | /* Compare a word at a time, high order first. */ |
10197 | for (i = 0; i < nwords; i++) | |
10198 | { | |
b54842d8 | 10199 | rtx op0_word, op1_word; |
10f307d9 | 10200 | |
b54842d8 | 10201 | if (WORDS_BIG_ENDIAN) |
10202 | { | |
10203 | op0_word = operand_subword_force (op0, i, mode); | |
10204 | op1_word = operand_subword_force (op1, i, mode); | |
10205 | } | |
10206 | else | |
10207 | { | |
10208 | op0_word = operand_subword_force (op0, nwords - 1 - i, mode); | |
10209 | op1_word = operand_subword_force (op1, nwords - 1 - i, mode); | |
10210 | } | |
10f307d9 | 10211 | |
b54842d8 | 10212 | /* All but high-order word must be compared as unsigned. */ |
1a29b174 | 10213 | do_compare_rtx_and_jump (op0_word, op1_word, GT, |
2b96c5f6 | 10214 | (unsignedp || i > 0), word_mode, NULL_RTX, |
1a29b174 | 10215 | NULL_RTX, if_true_label); |
10f307d9 | 10216 | |
b54842d8 | 10217 | /* Consider lower words only if these are equal. */ |
1a29b174 | 10218 | do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode, |
2b96c5f6 | 10219 | NULL_RTX, NULL_RTX, if_false_label); |
b54842d8 | 10220 | } |
10f307d9 | 10221 | |
b54842d8 | 10222 | if (if_false_label) |
10223 | emit_jump (if_false_label); | |
10224 | if (drop_through_label) | |
10225 | emit_label (drop_through_label); | |
10f307d9 | 10226 | } |
10227 | ||
b54842d8 | 10228 | /* Given an EQ_EXPR expression EXP for values too wide to be compared |
10229 | with one insn, test the comparison and jump to the appropriate label. */ | |
10f307d9 | 10230 | |
b54842d8 | 10231 | static void |
10232 | do_jump_by_parts_equality (exp, if_false_label, if_true_label) | |
10233 | tree exp; | |
10234 | rtx if_false_label, if_true_label; | |
10f307d9 | 10235 | { |
b54842d8 | 10236 | rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
10237 | rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); | |
10238 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
10239 | int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD); | |
10240 | int i; | |
10241 | rtx drop_through_label = 0; | |
10f307d9 | 10242 | |
b54842d8 | 10243 | if (! if_false_label) |
10244 | drop_through_label = if_false_label = gen_label_rtx (); | |
10f307d9 | 10245 | |
b54842d8 | 10246 | for (i = 0; i < nwords; i++) |
1a29b174 | 10247 | do_compare_rtx_and_jump (operand_subword_force (op0, i, mode), |
10248 | operand_subword_force (op1, i, mode), | |
10249 | EQ, TREE_UNSIGNED (TREE_TYPE (exp)), | |
2b96c5f6 | 10250 | word_mode, NULL_RTX, if_false_label, NULL_RTX); |
10f307d9 | 10251 | |
b54842d8 | 10252 | if (if_true_label) |
10253 | emit_jump (if_true_label); | |
10254 | if (drop_through_label) | |
10255 | emit_label (drop_through_label); | |
10f307d9 | 10256 | } |
b54842d8 | 10257 | \f |
10258 | /* Jump according to whether OP0 is 0. | |
10259 | We assume that OP0 has an integer mode that is too wide | |
10260 | for the available compare insns. */ | |
10f307d9 | 10261 | |
c5aa1e92 | 10262 | void |
b54842d8 | 10263 | do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label) |
10264 | rtx op0; | |
10265 | rtx if_false_label, if_true_label; | |
649d8da6 | 10266 | { |
b54842d8 | 10267 | int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD; |
10268 | rtx part; | |
10269 | int i; | |
10270 | rtx drop_through_label = 0; | |
10f307d9 | 10271 | |
b54842d8 | 10272 | /* The fastest way of doing this comparison on almost any machine is to |
10273 | "or" all the words and compare the result. If all have to be loaded | |
10274 | from memory and this is a very wide item, it's possible this may | |
10275 | be slower, but that's highly unlikely. */ | |
10f307d9 | 10276 | |
b54842d8 | 10277 | part = gen_reg_rtx (word_mode); |
10278 | emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0))); | |
10279 | for (i = 1; i < nwords && part != 0; i++) | |
10280 | part = expand_binop (word_mode, ior_optab, part, | |
10281 | operand_subword_force (op0, i, GET_MODE (op0)), | |
10282 | part, 1, OPTAB_WIDEN); | |
10f307d9 | 10283 | |
b54842d8 | 10284 | if (part != 0) |
10285 | { | |
1a29b174 | 10286 | do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode, |
2b96c5f6 | 10287 | NULL_RTX, if_false_label, if_true_label); |
10f307d9 | 10288 | |
b54842d8 | 10289 | return; |
10290 | } | |
10f307d9 | 10291 | |
b54842d8 | 10292 | /* If we couldn't do the "or" simply, do this with a series of compares. */ |
10293 | if (! if_false_label) | |
10294 | drop_through_label = if_false_label = gen_label_rtx (); | |
10f307d9 | 10295 | |
b54842d8 | 10296 | for (i = 0; i < nwords; i++) |
1a29b174 | 10297 | do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)), |
2b96c5f6 | 10298 | const0_rtx, EQ, 1, word_mode, NULL_RTX, |
1a29b174 | 10299 | if_false_label, NULL_RTX); |
10f307d9 | 10300 | |
b54842d8 | 10301 | if (if_true_label) |
10302 | emit_jump (if_true_label); | |
a92771b8 | 10303 | |
b54842d8 | 10304 | if (drop_through_label) |
10305 | emit_label (drop_through_label); | |
10f307d9 | 10306 | } |
b54842d8 | 10307 | \f |
1a29b174 | 10308 | /* Generate code for a comparison of OP0 and OP1 with rtx code CODE. |
b54842d8 | 10309 | (including code to compute the values to be compared) |
10310 | and set (CC0) according to the result. | |
1a29b174 | 10311 | The decision as to signed or unsigned comparison must be made by the caller. |
10f307d9 | 10312 | |
b54842d8 | 10313 | We force a stack adjustment unless there are currently |
1a29b174 | 10314 | things pushed on the stack that aren't yet used. |
649d8da6 | 10315 | |
1a29b174 | 10316 | If MODE is BLKmode, SIZE is an RTX giving the size of the objects being |
2b96c5f6 | 10317 | compared. */ |
1a29b174 | 10318 | |
10319 | rtx | |
2b96c5f6 | 10320 | compare_from_rtx (op0, op1, code, unsignedp, mode, size) |
19cb6b50 | 10321 | rtx op0, op1; |
1a29b174 | 10322 | enum rtx_code code; |
10323 | int unsignedp; | |
10324 | enum machine_mode mode; | |
10325 | rtx size; | |
b54842d8 | 10326 | { |
74690aaa | 10327 | enum rtx_code ucode; |
1a29b174 | 10328 | rtx tem; |
1203f673 | 10329 | |
1a29b174 | 10330 | /* If one operand is constant, make it the second one. Only do this |
10331 | if the other operand is not constant as well. */ | |
649d8da6 | 10332 | |
f5ef1390 | 10333 | if (swap_commutative_operands_p (op0, op1)) |
10f307d9 | 10334 | { |
1a29b174 | 10335 | tem = op0; |
10336 | op0 = op1; | |
10337 | op1 = tem; | |
10338 | code = swap_condition (code); | |
649d8da6 | 10339 | } |
10f307d9 | 10340 | |
1a29b174 | 10341 | if (flag_force_mem) |
b54842d8 | 10342 | { |
1a29b174 | 10343 | op0 = force_not_mem (op0); |
10344 | op1 = force_not_mem (op1); | |
10345 | } | |
10f307d9 | 10346 | |
1a29b174 | 10347 | do_pending_stack_adjust (); |
10348 | ||
74690aaa | 10349 | ucode = unsignedp ? unsigned_condition (code) : code; |
10350 | if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0) | |
1a29b174 | 10351 | return tem; |
10352 | ||
10353 | #if 0 | |
10354 | /* There's no need to do this now that combine.c can eliminate lots of | |
10355 | sign extensions. This can be less efficient in certain cases on other | |
10356 | machines. */ | |
10357 | ||
10358 | /* If this is a signed equality comparison, we can do it as an | |
10359 | unsigned comparison since zero-extension is cheaper than sign | |
10360 | extension and comparisons with zero are done as unsigned. This is | |
10361 | the case even on machines that can do fast sign extension, since | |
10362 | zero-extension is easier to combine with other operations than | |
10363 | sign-extension is. If we are comparing against a constant, we must | |
10364 | convert it to what it would look like unsigned. */ | |
10365 | if ((code == EQ || code == NE) && ! unsignedp | |
10366 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) | |
10367 | { | |
10368 | if (GET_CODE (op1) == CONST_INT | |
10369 | && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) | |
10370 | op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); | |
10371 | unsignedp = 1; | |
b54842d8 | 10372 | } |
10373 | #endif | |
fa56dc1d | 10374 | |
2b96c5f6 | 10375 | emit_cmp_insn (op0, op1, code, size, mode, unsignedp); |
a92771b8 | 10376 | |
ff11430a | 10377 | #if HAVE_cc0 |
1a29b174 | 10378 | return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx); |
ff11430a | 10379 | #else |
10380 | return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); | |
10381 | #endif | |
649d8da6 | 10382 | } |
10f307d9 | 10383 | |
1a29b174 | 10384 | /* Like do_compare_and_jump but expects the values to compare as two rtx's. |
b54842d8 | 10385 | The decision as to signed or unsigned comparison must be made by the caller. |
10f307d9 | 10386 | |
b54842d8 | 10387 | If MODE is BLKmode, SIZE is an RTX giving the size of the objects being |
2b96c5f6 | 10388 | compared. */ |
649d8da6 | 10389 | |
1a29b174 | 10390 | void |
2b96c5f6 | 10391 | do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, |
1a29b174 | 10392 | if_false_label, if_true_label) |
19cb6b50 | 10393 | rtx op0, op1; |
b54842d8 | 10394 | enum rtx_code code; |
10395 | int unsignedp; | |
10396 | enum machine_mode mode; | |
10397 | rtx size; | |
1a29b174 | 10398 | rtx if_false_label, if_true_label; |
10f307d9 | 10399 | { |
74690aaa | 10400 | enum rtx_code ucode; |
b54842d8 | 10401 | rtx tem; |
1a29b174 | 10402 | int dummy_true_label = 0; |
10403 | ||
10404 | /* Reverse the comparison if that is safe and we want to jump if it is | |
10405 | false. */ | |
10406 | if (! if_true_label && ! FLOAT_MODE_P (mode)) | |
10407 | { | |
10408 | if_true_label = if_false_label; | |
10409 | if_false_label = 0; | |
10410 | code = reverse_condition (code); | |
10411 | } | |
10f307d9 | 10412 | |
b54842d8 | 10413 | /* If one operand is constant, make it the second one. Only do this |
10414 | if the other operand is not constant as well. */ | |
01ab6370 | 10415 | |
f5ef1390 | 10416 | if (swap_commutative_operands_p (op0, op1)) |
649d8da6 | 10417 | { |
b54842d8 | 10418 | tem = op0; |
10419 | op0 = op1; | |
10420 | op1 = tem; | |
10421 | code = swap_condition (code); | |
10422 | } | |
10f307d9 | 10423 | |
b54842d8 | 10424 | if (flag_force_mem) |
10425 | { | |
10426 | op0 = force_not_mem (op0); | |
10427 | op1 = force_not_mem (op1); | |
10428 | } | |
10f307d9 | 10429 | |
b54842d8 | 10430 | do_pending_stack_adjust (); |
649d8da6 | 10431 | |
74690aaa | 10432 | ucode = unsignedp ? unsigned_condition (code) : code; |
10433 | if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0) | |
1a29b174 | 10434 | { |
10435 | if (tem == const_true_rtx) | |
10436 | { | |
10437 | if (if_true_label) | |
10438 | emit_jump (if_true_label); | |
10439 | } | |
10440 | else | |
10441 | { | |
10442 | if (if_false_label) | |
10443 | emit_jump (if_false_label); | |
10444 | } | |
10445 | return; | |
10446 | } | |
649d8da6 | 10447 | |
b54842d8 | 10448 | #if 0 |
10449 | /* There's no need to do this now that combine.c can eliminate lots of | |
10450 | sign extensions. This can be less efficient in certain cases on other | |
10451 | machines. */ | |
649d8da6 | 10452 | |
b54842d8 | 10453 | /* If this is a signed equality comparison, we can do it as an |
10454 | unsigned comparison since zero-extension is cheaper than sign | |
10455 | extension and comparisons with zero are done as unsigned. This is | |
10456 | the case even on machines that can do fast sign extension, since | |
10457 | zero-extension is easier to combine with other operations than | |
10458 | sign-extension is. If we are comparing against a constant, we must | |
10459 | convert it to what it would look like unsigned. */ | |
10460 | if ((code == EQ || code == NE) && ! unsignedp | |
10461 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) | |
10462 | { | |
10463 | if (GET_CODE (op1) == CONST_INT | |
10464 | && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1)) | |
10465 | op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))); | |
10466 | unsignedp = 1; | |
10467 | } | |
10468 | #endif | |
649d8da6 | 10469 | |
1a29b174 | 10470 | if (! if_true_label) |
10471 | { | |
10472 | dummy_true_label = 1; | |
10473 | if_true_label = gen_label_rtx (); | |
10474 | } | |
10475 | ||
2b96c5f6 | 10476 | emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, |
1a29b174 | 10477 | if_true_label); |
10478 | ||
10479 | if (if_false_label) | |
10480 | emit_jump (if_false_label); | |
10481 | if (dummy_true_label) | |
10482 | emit_label (if_true_label); | |
10483 | } | |
10484 | ||
10485 | /* Generate code for a comparison expression EXP (including code to compute | |
10486 | the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or | |
10487 | IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the | |
10488 | generated code will drop through. | |
10489 | SIGNED_CODE should be the rtx operation for this comparison for | |
10490 | signed data; UNSIGNED_CODE, likewise for use if data is unsigned. | |
10491 | ||
10492 | We force a stack adjustment unless there are currently | |
10493 | things pushed on the stack that aren't yet used. */ | |
10494 | ||
10495 | static void | |
10496 | do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label, | |
10497 | if_true_label) | |
19cb6b50 | 10498 | tree exp; |
1a29b174 | 10499 | enum rtx_code signed_code, unsigned_code; |
10500 | rtx if_false_label, if_true_label; | |
10501 | { | |
19cb6b50 | 10502 | rtx op0, op1; |
10503 | tree type; | |
10504 | enum machine_mode mode; | |
1a29b174 | 10505 | int unsignedp; |
10506 | enum rtx_code code; | |
10507 | ||
10508 | /* Don't crash if the comparison was erroneous. */ | |
2b96c5f6 | 10509 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
1a29b174 | 10510 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK) |
10511 | return; | |
10512 | ||
2b96c5f6 | 10513 | op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0); |
7f69ec60 | 10514 | if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK) |
10515 | return; | |
10516 | ||
1a29b174 | 10517 | type = TREE_TYPE (TREE_OPERAND (exp, 0)); |
10518 | mode = TYPE_MODE (type); | |
7f69ec60 | 10519 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST |
10520 | && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST | |
10521 | || (GET_MODE_BITSIZE (mode) | |
1be84d5a | 10522 | > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, |
10523 | 1))))))) | |
7f69ec60 | 10524 | { |
10525 | /* op0 might have been replaced by promoted constant, in which | |
10526 | case the type of second argument should be used. */ | |
10527 | type = TREE_TYPE (TREE_OPERAND (exp, 1)); | |
10528 | mode = TYPE_MODE (type); | |
10529 | } | |
1a29b174 | 10530 | unsignedp = TREE_UNSIGNED (type); |
10531 | code = unsignedp ? unsigned_code : signed_code; | |
10532 | ||
10533 | #ifdef HAVE_canonicalize_funcptr_for_compare | |
10534 | /* If function pointers need to be "canonicalized" before they can | |
10535 | be reliably compared, then canonicalize them. */ | |
10536 | if (HAVE_canonicalize_funcptr_for_compare | |
10537 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE | |
10538 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
10539 | == FUNCTION_TYPE)) | |
10540 | { | |
10541 | rtx new_op0 = gen_reg_rtx (mode); | |
10542 | ||
10543 | emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0)); | |
10544 | op0 = new_op0; | |
10545 | } | |
10546 | ||
10547 | if (HAVE_canonicalize_funcptr_for_compare | |
10548 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE | |
10549 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
10550 | == FUNCTION_TYPE)) | |
10551 | { | |
10552 | rtx new_op1 = gen_reg_rtx (mode); | |
10553 | ||
10554 | emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1)); | |
10555 | op1 = new_op1; | |
10556 | } | |
10557 | #endif | |
10558 | ||
10559 | /* Do any postincrements in the expression that was tested. */ | |
10560 | emit_queue (); | |
10561 | ||
10562 | do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, | |
10563 | ((mode == BLKmode) | |
10564 | ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX), | |
1a29b174 | 10565 | if_false_label, if_true_label); |
b54842d8 | 10566 | } |
10567 | \f | |
10568 | /* Generate code to calculate EXP using a store-flag instruction | |
10569 | and return an rtx for the result. EXP is either a comparison | |
10570 | or a TRUTH_NOT_EXPR whose operand is a comparison. | |
649d8da6 | 10571 | |
b54842d8 | 10572 | If TARGET is nonzero, store the result there if convenient. |
649d8da6 | 10573 | |
6ef828f9 | 10574 | If ONLY_CHEAP is nonzero, only do this if it is likely to be very |
b54842d8 | 10575 | cheap. |
649d8da6 | 10576 | |
b54842d8 | 10577 | Return zero if there is no suitable set-flag instruction |
10578 | available on this machine. | |
649d8da6 | 10579 | |
b54842d8 | 10580 | Once expand_expr has been called on the arguments of the comparison, |
10581 | we are committed to doing the store flag, since it is not safe to | |
10582 | re-evaluate the expression. We emit the store-flag insn by calling | |
10583 | emit_store_flag, but only expand the arguments if we have a reason | |
10584 | to believe that emit_store_flag will be successful. If we think that | |
10585 | it will, but it isn't, we have to simulate the store-flag with a | |
10586 | set/jump/set sequence. */ | |
649d8da6 | 10587 | |
b54842d8 | 10588 | static rtx |
10589 | do_store_flag (exp, target, mode, only_cheap) | |
10590 | tree exp; | |
10591 | rtx target; | |
10592 | enum machine_mode mode; | |
10593 | int only_cheap; | |
10594 | { | |
10595 | enum rtx_code code; | |
10596 | tree arg0, arg1, type; | |
10597 | tree tem; | |
10598 | enum machine_mode operand_mode; | |
10599 | int invert = 0; | |
10600 | int unsignedp; | |
10601 | rtx op0, op1; | |
10602 | enum insn_code icode; | |
10603 | rtx subtarget = target; | |
0c22b90f | 10604 | rtx result, label; |
649d8da6 | 10605 | |
b54842d8 | 10606 | /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the |
10607 | result at the end. We can't simply invert the test since it would | |
10608 | have already been inverted if it were valid. This case occurs for | |
10609 | some floating-point comparisons. */ | |
649d8da6 | 10610 | |
b54842d8 | 10611 | if (TREE_CODE (exp) == TRUTH_NOT_EXPR) |
10612 | invert = 1, exp = TREE_OPERAND (exp, 0); | |
649d8da6 | 10613 | |
b54842d8 | 10614 | arg0 = TREE_OPERAND (exp, 0); |
10615 | arg1 = TREE_OPERAND (exp, 1); | |
fc80e4dd | 10616 | |
10617 | /* Don't crash if the comparison was erroneous. */ | |
10618 | if (arg0 == error_mark_node || arg1 == error_mark_node) | |
10619 | return const0_rtx; | |
10620 | ||
b54842d8 | 10621 | type = TREE_TYPE (arg0); |
10622 | operand_mode = TYPE_MODE (type); | |
10623 | unsignedp = TREE_UNSIGNED (type); | |
649d8da6 | 10624 | |
b54842d8 | 10625 | /* We won't bother with BLKmode store-flag operations because it would mean |
10626 | passing a lot of information to emit_store_flag. */ | |
10627 | if (operand_mode == BLKmode) | |
10628 | return 0; | |
649d8da6 | 10629 | |
b54842d8 | 10630 | /* We won't bother with store-flag operations involving function pointers |
10631 | when function pointers must be canonicalized before comparisons. */ | |
10632 | #ifdef HAVE_canonicalize_funcptr_for_compare | |
10633 | if (HAVE_canonicalize_funcptr_for_compare | |
10634 | && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE | |
10635 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
10636 | == FUNCTION_TYPE)) | |
10637 | || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE | |
10638 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
10639 | == FUNCTION_TYPE)))) | |
10640 | return 0; | |
649d8da6 | 10641 | #endif |
10642 | ||
b54842d8 | 10643 | STRIP_NOPS (arg0); |
10644 | STRIP_NOPS (arg1); | |
649d8da6 | 10645 | |
b54842d8 | 10646 | /* Get the rtx comparison code to use. We know that EXP is a comparison |
10647 | operation of some type. Some comparisons against 1 and -1 can be | |
10648 | converted to comparisons with zero. Do so here so that the tests | |
10649 | below will be aware that we have a comparison with zero. These | |
10650 | tests will not catch constants in the first operand, but constants | |
10651 | are rarely passed as the first operand. */ | |
649d8da6 | 10652 | |
b54842d8 | 10653 | switch (TREE_CODE (exp)) |
10654 | { | |
10655 | case EQ_EXPR: | |
10656 | code = EQ; | |
10f307d9 | 10657 | break; |
b54842d8 | 10658 | case NE_EXPR: |
10659 | code = NE; | |
10f307d9 | 10660 | break; |
b54842d8 | 10661 | case LT_EXPR: |
10662 | if (integer_onep (arg1)) | |
10663 | arg1 = integer_zero_node, code = unsignedp ? LEU : LE; | |
10664 | else | |
10665 | code = unsignedp ? LTU : LT; | |
649d8da6 | 10666 | break; |
b54842d8 | 10667 | case LE_EXPR: |
10668 | if (! unsignedp && integer_all_onesp (arg1)) | |
10669 | arg1 = integer_zero_node, code = LT; | |
10670 | else | |
10671 | code = unsignedp ? LEU : LE; | |
649d8da6 | 10672 | break; |
b54842d8 | 10673 | case GT_EXPR: |
10674 | if (! unsignedp && integer_all_onesp (arg1)) | |
10675 | arg1 = integer_zero_node, code = GE; | |
10676 | else | |
10677 | code = unsignedp ? GTU : GT; | |
10678 | break; | |
10679 | case GE_EXPR: | |
10680 | if (integer_onep (arg1)) | |
10681 | arg1 = integer_zero_node, code = unsignedp ? GTU : GT; | |
10682 | else | |
10683 | code = unsignedp ? GEU : GE; | |
649d8da6 | 10684 | break; |
a4110d9a | 10685 | |
10686 | case UNORDERED_EXPR: | |
10687 | code = UNORDERED; | |
10688 | break; | |
10689 | case ORDERED_EXPR: | |
10690 | code = ORDERED; | |
10691 | break; | |
10692 | case UNLT_EXPR: | |
10693 | code = UNLT; | |
10694 | break; | |
10695 | case UNLE_EXPR: | |
10696 | code = UNLE; | |
10697 | break; | |
10698 | case UNGT_EXPR: | |
10699 | code = UNGT; | |
10700 | break; | |
10701 | case UNGE_EXPR: | |
10702 | code = UNGE; | |
10703 | break; | |
10704 | case UNEQ_EXPR: | |
10705 | code = UNEQ; | |
10706 | break; | |
a4110d9a | 10707 | |
649d8da6 | 10708 | default: |
b54842d8 | 10709 | abort (); |
10f307d9 | 10710 | } |
10f307d9 | 10711 | |
b54842d8 | 10712 | /* Put a constant second. */ |
10713 | if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST) | |
10714 | { | |
10715 | tem = arg0; arg0 = arg1; arg1 = tem; | |
10716 | code = swap_condition (code); | |
649d8da6 | 10717 | } |
10f307d9 | 10718 | |
b54842d8 | 10719 | /* If this is an equality or inequality test of a single bit, we can |
10720 | do this by shifting the bit being tested to the low-order bit and | |
10721 | masking the result with the constant 1. If the condition was EQ, | |
10722 | we xor it with 1. This does not require an scc insn and is faster | |
10723 | than an scc insn even if we have it. */ | |
3218a49d | 10724 | |
b54842d8 | 10725 | if ((code == NE || code == EQ) |
10726 | && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) | |
10727 | && integer_pow2p (TREE_OPERAND (arg0, 1))) | |
10728 | { | |
10729 | tree inner = TREE_OPERAND (arg0, 0); | |
10730 | int bitnum = tree_log2 (TREE_OPERAND (arg0, 1)); | |
10731 | int ops_unsignedp; | |
10f307d9 | 10732 | |
b54842d8 | 10733 | /* If INNER is a right shift of a constant and it plus BITNUM does |
10734 | not overflow, adjust BITNUM and INNER. */ | |
649d8da6 | 10735 | |
b54842d8 | 10736 | if (TREE_CODE (inner) == RSHIFT_EXPR |
10737 | && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST | |
10738 | && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0 | |
a0c2c45b | 10739 | && bitnum < TYPE_PRECISION (type) |
10740 | && 0 > compare_tree_int (TREE_OPERAND (inner, 1), | |
10741 | bitnum - TYPE_PRECISION (type))) | |
649d8da6 | 10742 | { |
b54842d8 | 10743 | bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); |
10744 | inner = TREE_OPERAND (inner, 0); | |
649d8da6 | 10745 | } |
649d8da6 | 10746 | |
b54842d8 | 10747 | /* If we are going to be able to omit the AND below, we must do our |
10748 | operations as unsigned. If we must use the AND, we have a choice. | |
10749 | Normally unsigned is faster, but for some machines signed is. */ | |
10750 | ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1 | |
10751 | #ifdef LOAD_EXTEND_OP | |
10752 | : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1) | |
10753 | #else | |
10754 | : 1 | |
10755 | #endif | |
10756 | ); | |
10f307d9 | 10757 | |
d8e5b213 | 10758 | if (! get_subtarget (subtarget) |
d1676939 | 10759 | || GET_MODE (subtarget) != operand_mode |
997d68fe | 10760 | || ! safe_from_p (subtarget, inner, 1)) |
b54842d8 | 10761 | subtarget = 0; |
10f307d9 | 10762 | |
b54842d8 | 10763 | op0 = expand_expr (inner, subtarget, VOIDmode, 0); |
10f307d9 | 10764 | |
b54842d8 | 10765 | if (bitnum != 0) |
3af7dce6 | 10766 | op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0, |
b54842d8 | 10767 | size_int (bitnum), subtarget, ops_unsignedp); |
10f307d9 | 10768 | |
b54842d8 | 10769 | if (GET_MODE (op0) != mode) |
10770 | op0 = convert_to_mode (mode, op0, ops_unsignedp); | |
10f307d9 | 10771 | |
b54842d8 | 10772 | if ((code == EQ && ! invert) || (code == NE && invert)) |
10773 | op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget, | |
10774 | ops_unsignedp, OPTAB_LIB_WIDEN); | |
10f307d9 | 10775 | |
b54842d8 | 10776 | /* Put the AND last so it can combine with more things. */ |
10777 | if (bitnum != TYPE_PRECISION (type) - 1) | |
6de9716c | 10778 | op0 = expand_and (mode, op0, const1_rtx, subtarget); |
10f307d9 | 10779 | |
b54842d8 | 10780 | return op0; |
10781 | } | |
10f307d9 | 10782 | |
b54842d8 | 10783 | /* Now see if we are likely to be able to do this. Return if not. */ |
a4110d9a | 10784 | if (! can_compare_p (code, operand_mode, ccp_store_flag)) |
b54842d8 | 10785 | return 0; |
a4110d9a | 10786 | |
b54842d8 | 10787 | icode = setcc_gen_code[(int) code]; |
10788 | if (icode == CODE_FOR_nothing | |
6357eaae | 10789 | || (only_cheap && insn_data[(int) icode].operand[0].mode != mode)) |
649d8da6 | 10790 | { |
b54842d8 | 10791 | /* We can only do this if it is one of the special cases that |
10792 | can be handled without an scc insn. */ | |
10793 | if ((code == LT && integer_zerop (arg1)) | |
10794 | || (! only_cheap && code == GE && integer_zerop (arg1))) | |
10795 | ; | |
10796 | else if (BRANCH_COST >= 0 | |
10797 | && ! only_cheap && (code == NE || code == EQ) | |
10798 | && TREE_CODE (type) != REAL_TYPE | |
10799 | && ((abs_optab->handlers[(int) operand_mode].insn_code | |
10800 | != CODE_FOR_nothing) | |
10801 | || (ffs_optab->handlers[(int) operand_mode].insn_code | |
10802 | != CODE_FOR_nothing))) | |
10803 | ; | |
10804 | else | |
10805 | return 0; | |
649d8da6 | 10806 | } |
fa56dc1d | 10807 | |
d8e5b213 | 10808 | if (! get_subtarget (target) |
d1676939 | 10809 | || GET_MODE (subtarget) != operand_mode |
997d68fe | 10810 | || ! safe_from_p (subtarget, arg1, 1)) |
b54842d8 | 10811 | subtarget = 0; |
10812 | ||
10813 | op0 = expand_expr (arg0, subtarget, VOIDmode, 0); | |
10814 | op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); | |
10815 | ||
10816 | if (target == 0) | |
10817 | target = gen_reg_rtx (mode); | |
10818 | ||
10819 | /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe | |
10820 | because, if the emit_store_flag does anything it will succeed and | |
10821 | OP0 and OP1 will not be used subsequently. */ | |
649d8da6 | 10822 | |
b54842d8 | 10823 | result = emit_store_flag (target, code, |
10824 | queued_subexp_p (op0) ? copy_rtx (op0) : op0, | |
10825 | queued_subexp_p (op1) ? copy_rtx (op1) : op1, | |
10826 | operand_mode, unsignedp, 1); | |
649d8da6 | 10827 | |
b54842d8 | 10828 | if (result) |
10829 | { | |
10830 | if (invert) | |
10831 | result = expand_binop (mode, xor_optab, result, const1_rtx, | |
10832 | result, 0, OPTAB_LIB_WIDEN); | |
10833 | return result; | |
649d8da6 | 10834 | } |
10f307d9 | 10835 | |
b54842d8 | 10836 | /* If this failed, we have to do this with set/compare/jump/set code. */ |
10837 | if (GET_CODE (target) != REG | |
10838 | || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1)) | |
10839 | target = gen_reg_rtx (GET_MODE (target)); | |
10840 | ||
10841 | emit_move_insn (target, invert ? const0_rtx : const1_rtx); | |
10842 | result = compare_from_rtx (op0, op1, code, unsignedp, | |
2b96c5f6 | 10843 | operand_mode, NULL_RTX); |
b54842d8 | 10844 | if (GET_CODE (result) == CONST_INT) |
10845 | return (((result == const0_rtx && ! invert) | |
10846 | || (result != const0_rtx && invert)) | |
10847 | ? const0_rtx : const1_rtx); | |
649d8da6 | 10848 | |
66ac11b2 | 10849 | /* The code of RESULT may not match CODE if compare_from_rtx |
10850 | decided to swap its operands and reverse the original code. | |
10851 | ||
10852 | We know that compare_from_rtx returns either a CONST_INT or | |
10853 | a new comparison code, so it is safe to just extract the | |
10854 | code from RESULT. */ | |
10855 | code = GET_CODE (result); | |
10856 | ||
b54842d8 | 10857 | label = gen_label_rtx (); |
10858 | if (bcc_gen_fctn[(int) code] == 0) | |
10859 | abort (); | |
a92771b8 | 10860 | |
b54842d8 | 10861 | emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); |
10862 | emit_move_insn (target, invert ? const1_rtx : const0_rtx); | |
10863 | emit_label (label); | |
10f307d9 | 10864 | |
b54842d8 | 10865 | return target; |
649d8da6 | 10866 | } |
b54842d8 | 10867 | \f |
b54842d8 | 10868 | |
539a3a92 | 10869 | /* Stubs in case we haven't got a casesi insn. */ |
10870 | #ifndef HAVE_casesi | |
10871 | # define HAVE_casesi 0 | |
10872 | # define gen_casesi(a, b, c, d, e) (0) | |
10873 | # define CODE_FOR_casesi CODE_FOR_nothing | |
10874 | #endif | |
10875 | ||
10876 | /* If the machine does not have a case insn that compares the bounds, | |
10877 | this means extra overhead for dispatch tables, which raises the | |
10878 | threshold for using them. */ | |
10879 | #ifndef CASE_VALUES_THRESHOLD | |
10880 | #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5) | |
10881 | #endif /* CASE_VALUES_THRESHOLD */ | |
10882 | ||
10883 | unsigned int | |
10884 | case_values_threshold () | |
10885 | { | |
10886 | return CASE_VALUES_THRESHOLD; | |
10887 | } | |
10888 | ||
10889 | /* Attempt to generate a casesi instruction. Returns 1 if successful, | |
10890 | 0 otherwise (i.e. if there is no casesi instruction). */ | |
10891 | int | |
10892 | try_casesi (index_type, index_expr, minval, range, | |
10893 | table_label, default_label) | |
10894 | tree index_type, index_expr, minval, range; | |
10895 | rtx table_label ATTRIBUTE_UNUSED; | |
10896 | rtx default_label; | |
10897 | { | |
10898 | enum machine_mode index_mode = SImode; | |
10899 | int index_bits = GET_MODE_BITSIZE (index_mode); | |
10900 | rtx op1, op2, index; | |
10901 | enum machine_mode op_mode; | |
10902 | ||
10903 | if (! HAVE_casesi) | |
10904 | return 0; | |
10905 | ||
10906 | /* Convert the index to SImode. */ | |
10907 | if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode)) | |
10908 | { | |
10909 | enum machine_mode omode = TYPE_MODE (index_type); | |
10910 | rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0); | |
10911 | ||
10912 | /* We must handle the endpoints in the original mode. */ | |
10913 | index_expr = build (MINUS_EXPR, index_type, | |
10914 | index_expr, minval); | |
10915 | minval = integer_zero_node; | |
10916 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
10917 | emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX, | |
2b96c5f6 | 10918 | omode, 1, default_label); |
539a3a92 | 10919 | /* Now we can safely truncate. */ |
10920 | index = convert_to_mode (index_mode, index, 0); | |
10921 | } | |
10922 | else | |
10923 | { | |
10924 | if (TYPE_MODE (index_type) != index_mode) | |
10925 | { | |
771d21fa | 10926 | index_expr = convert ((*lang_hooks.types.type_for_size) |
10927 | (index_bits, 0), index_expr); | |
539a3a92 | 10928 | index_type = TREE_TYPE (index_expr); |
10929 | } | |
10930 | ||
10931 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
10932 | } | |
10933 | emit_queue (); | |
10934 | index = protect_from_queue (index, 0); | |
10935 | do_pending_stack_adjust (); | |
10936 | ||
10937 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode; | |
10938 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate) | |
10939 | (index, op_mode)) | |
10940 | index = copy_to_mode_reg (op_mode, index); | |
35f44ac1 | 10941 | |
539a3a92 | 10942 | op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0); |
10943 | ||
10944 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode; | |
10945 | op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)), | |
10946 | op1, TREE_UNSIGNED (TREE_TYPE (minval))); | |
10947 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate) | |
10948 | (op1, op_mode)) | |
10949 | op1 = copy_to_mode_reg (op_mode, op1); | |
10950 | ||
10951 | op2 = expand_expr (range, NULL_RTX, VOIDmode, 0); | |
10952 | ||
10953 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode; | |
10954 | op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)), | |
10955 | op2, TREE_UNSIGNED (TREE_TYPE (range))); | |
10956 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate) | |
10957 | (op2, op_mode)) | |
10958 | op2 = copy_to_mode_reg (op_mode, op2); | |
10959 | ||
10960 | emit_jump_insn (gen_casesi (index, op1, op2, | |
10961 | table_label, default_label)); | |
10962 | return 1; | |
10963 | } | |
10964 | ||
10965 | /* Attempt to generate a tablejump instruction; same concept. */ | |
10966 | #ifndef HAVE_tablejump | |
10967 | #define HAVE_tablejump 0 | |
10968 | #define gen_tablejump(x, y) (0) | |
10969 | #endif | |
10970 | ||
10971 | /* Subroutine of the next function. | |
10972 | ||
10973 | INDEX is the value being switched on, with the lowest value | |
b54842d8 | 10974 | in the table already subtracted. |
10975 | MODE is its expected mode (needed if INDEX is constant). | |
10976 | RANGE is the length of the jump table. | |
10977 | TABLE_LABEL is a CODE_LABEL rtx for the table itself. | |
1ccc1a7e | 10978 | |
b54842d8 | 10979 | DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the |
10980 | index value is out of range. */ | |
a92771b8 | 10981 | |
539a3a92 | 10982 | static void |
b54842d8 | 10983 | do_tablejump (index, mode, range, table_label, default_label) |
10984 | rtx index, range, table_label, default_label; | |
10985 | enum machine_mode mode; | |
649d8da6 | 10986 | { |
19cb6b50 | 10987 | rtx temp, vector; |
1ccc1a7e | 10988 | |
88d866dd | 10989 | if (INTVAL (range) > cfun->max_jumptable_ents) |
10990 | cfun->max_jumptable_ents = INTVAL (range); | |
71a455ac | 10991 | |
b54842d8 | 10992 | /* Do an unsigned comparison (in the proper mode) between the index |
10993 | expression and the value which represents the length of the range. | |
10994 | Since we just finished subtracting the lower bound of the range | |
10995 | from the index expression, this comparison allows us to simultaneously | |
10996 | check that the original index expression value is both greater than | |
10997 | or equal to the minimum value of the range and less than or equal to | |
10998 | the maximum value of the range. */ | |
9282409c | 10999 | |
5a894bc6 | 11000 | emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, |
2b96c5f6 | 11001 | default_label); |
10f307d9 | 11002 | |
b54842d8 | 11003 | /* If index is in range, it must fit in Pmode. |
11004 | Convert to Pmode so we can index with it. */ | |
11005 | if (mode != Pmode) | |
11006 | index = convert_to_mode (Pmode, index, 1); | |
10f307d9 | 11007 | |
b54842d8 | 11008 | /* Don't let a MEM slip thru, because then INDEX that comes |
11009 | out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, | |
11010 | and break_out_memory_refs will go to work on it and mess it up. */ | |
11011 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
11012 | if (flag_pic && GET_CODE (index) != REG) | |
11013 | index = copy_to_mode_reg (Pmode, index); | |
11014 | #endif | |
649d8da6 | 11015 | |
b54842d8 | 11016 | /* If flag_force_addr were to affect this address |
11017 | it could interfere with the tricky assumptions made | |
11018 | about addresses that contain label-refs, | |
11019 | which may be valid only very near the tablejump itself. */ | |
11020 | /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the | |
11021 | GET_MODE_SIZE, because this indicates how large insns are. The other | |
11022 | uses should all be Pmode, because they are addresses. This code | |
11023 | could fail if addresses and insns are not the same size. */ | |
11024 | index = gen_rtx_PLUS (Pmode, | |
11025 | gen_rtx_MULT (Pmode, index, | |
11026 | GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))), | |
11027 | gen_rtx_LABEL_REF (Pmode, table_label)); | |
11028 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
11029 | if (flag_pic) | |
11030 | index = PIC_CASE_VECTOR_ADDRESS (index); | |
11031 | else | |
10f307d9 | 11032 | #endif |
b54842d8 | 11033 | index = memory_address_noforce (CASE_VECTOR_MODE, index); |
11034 | temp = gen_reg_rtx (CASE_VECTOR_MODE); | |
11035 | vector = gen_rtx_MEM (CASE_VECTOR_MODE, index); | |
11036 | RTX_UNCHANGING_P (vector) = 1; | |
11037 | convert_move (temp, vector, 0); | |
11038 | ||
11039 | emit_jump_insn (gen_tablejump (temp, table_label)); | |
11040 | ||
11041 | /* If we are generating PIC code or if the table is PC-relative, the | |
11042 | table and JUMP_INSN must be adjacent, so don't output a BARRIER. */ | |
11043 | if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic) | |
11044 | emit_barrier (); | |
10f307d9 | 11045 | } |
b54842d8 | 11046 | |
539a3a92 | 11047 | int |
11048 | try_tablejump (index_type, index_expr, minval, range, | |
11049 | table_label, default_label) | |
11050 | tree index_type, index_expr, minval, range; | |
11051 | rtx table_label, default_label; | |
11052 | { | |
11053 | rtx index; | |
11054 | ||
11055 | if (! HAVE_tablejump) | |
11056 | return 0; | |
11057 | ||
11058 | index_expr = fold (build (MINUS_EXPR, index_type, | |
11059 | convert (index_type, index_expr), | |
11060 | convert (index_type, minval))); | |
11061 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); | |
11062 | emit_queue (); | |
11063 | index = protect_from_queue (index, 0); | |
11064 | do_pending_stack_adjust (); | |
11065 | ||
11066 | do_tablejump (index, TYPE_MODE (index_type), | |
11067 | convert_modes (TYPE_MODE (index_type), | |
11068 | TYPE_MODE (TREE_TYPE (range)), | |
11069 | expand_expr (range, NULL_RTX, | |
11070 | VOIDmode, 0), | |
11071 | TREE_UNSIGNED (TREE_TYPE (range))), | |
11072 | table_label, default_label); | |
11073 | return 1; | |
11074 | } | |
1f3233d1 | 11075 | |
ead34f59 | 11076 | /* Nonzero if the mode is a valid vector mode for this architecture. |
11077 | This returns nonzero even if there is no hardware support for the | |
11078 | vector mode, but we can emulate with narrower modes. */ | |
11079 | ||
11080 | int | |
11081 | vector_mode_valid_p (mode) | |
11082 | enum machine_mode mode; | |
11083 | { | |
11084 | enum mode_class class = GET_MODE_CLASS (mode); | |
11085 | enum machine_mode innermode; | |
11086 | ||
11087 | /* Doh! What's going on? */ | |
11088 | if (class != MODE_VECTOR_INT | |
11089 | && class != MODE_VECTOR_FLOAT) | |
11090 | return 0; | |
11091 | ||
11092 | /* Hardware support. Woo hoo! */ | |
11093 | if (VECTOR_MODE_SUPPORTED_P (mode)) | |
11094 | return 1; | |
11095 | ||
11096 | innermode = GET_MODE_INNER (mode); | |
11097 | ||
11098 | /* We should probably return 1 if requesting V4DI and we have no DI, | |
11099 | but we have V2DI, but this is probably very unlikely. */ | |
11100 | ||
11101 | /* If we have support for the inner mode, we can safely emulate it. | |
11102 | We may not have V2DI, but me can emulate with a pair of DIs. */ | |
11103 | return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing; | |
11104 | } | |
11105 | ||
1f3233d1 | 11106 | #include "gt-expr.h" |