]>
Commit | Line | Data |
---|---|---|
bbf6f052 | 1 | /* Convert tree expression to rtl instructions, for GNU compiler. |
8752c357 | 2 | Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, |
40e90eac | 3 | 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
455f14dd | 4 | Free Software Foundation, Inc. |
bbf6f052 | 5 | |
1322177d | 6 | This file is part of GCC. |
bbf6f052 | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 10 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 11 | version. |
bbf6f052 | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
bbf6f052 RK |
17 | |
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
bbf6f052 | 21 | |
bbf6f052 | 22 | #include "config.h" |
670ee920 | 23 | #include "system.h" |
4977bab6 ZW |
24 | #include "coretypes.h" |
25 | #include "tm.h" | |
ca695ac9 | 26 | #include "machmode.h" |
bbf6f052 RK |
27 | #include "rtl.h" |
28 | #include "tree.h" | |
29 | #include "flags.h" | |
bf76bb5a | 30 | #include "regs.h" |
4ed67205 | 31 | #include "hard-reg-set.h" |
3d195391 | 32 | #include "except.h" |
bbf6f052 | 33 | #include "function.h" |
bbf6f052 | 34 | #include "insn-config.h" |
34e81b5a | 35 | #include "insn-attr.h" |
3a94c984 | 36 | /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ |
d6f4ec51 | 37 | #include "expr.h" |
e78d8e51 ZW |
38 | #include "optabs.h" |
39 | #include "libfuncs.h" | |
bbf6f052 | 40 | #include "recog.h" |
3ef1eef4 | 41 | #include "reload.h" |
bbf6f052 | 42 | #include "output.h" |
bbf6f052 | 43 | #include "typeclass.h" |
10f0ad3d | 44 | #include "toplev.h" |
ac79cd5a | 45 | #include "langhooks.h" |
e2c49ac2 | 46 | #include "intl.h" |
b1474bb7 | 47 | #include "tm_p.h" |
6de9cd9a | 48 | #include "tree-iterator.h" |
2f8e398b PB |
49 | #include "tree-pass.h" |
50 | #include "tree-flow.h" | |
c988af2b | 51 | #include "target.h" |
2f8e398b | 52 | #include "timevar.h" |
6fb5fa3c | 53 | #include "df.h" |
a509ebb5 | 54 | #include "diagnostic.h" |
4e3825db | 55 | #include "ssaexpand.h" |
bbf6f052 | 56 | |
bbf6f052 | 57 | /* Decide whether a function's arguments should be processed |
bbc8a071 RK |
58 | from first to last or from last to first. |
59 | ||
60 | They should if the stack and args grow in opposite directions, but | |
61 | only if we have push insns. */ | |
bbf6f052 | 62 | |
bbf6f052 | 63 | #ifdef PUSH_ROUNDING |
bbc8a071 | 64 | |
2da4124d | 65 | #ifndef PUSH_ARGS_REVERSED |
3319a347 | 66 | #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) |
3a94c984 | 67 | #define PUSH_ARGS_REVERSED /* If it's last to first. */ |
bbf6f052 | 68 | #endif |
2da4124d | 69 | #endif |
bbc8a071 | 70 | |
bbf6f052 RK |
71 | #endif |
72 | ||
73 | #ifndef STACK_PUSH_CODE | |
74 | #ifdef STACK_GROWS_DOWNWARD | |
75 | #define STACK_PUSH_CODE PRE_DEC | |
76 | #else | |
77 | #define STACK_PUSH_CODE PRE_INC | |
78 | #endif | |
79 | #endif | |
80 | ||
4ca79136 | 81 | |
bbf6f052 RK |
82 | /* If this is nonzero, we do not bother generating VOLATILE |
83 | around volatile memory references, and we are willing to | |
84 | output indirect addresses. If cse is to follow, we reject | |
85 | indirect addresses so a useful potential cse is generated; | |
86 | if it is used only once, instruction combination will produce | |
87 | the same indirect address eventually. */ | |
88 | int cse_not_expected; | |
89 | ||
4969d05d RK |
90 | /* This structure is used by move_by_pieces to describe the move to |
91 | be performed. */ | |
b0f43ca0 | 92 | struct move_by_pieces_d |
4969d05d RK |
93 | { |
94 | rtx to; | |
95 | rtx to_addr; | |
96 | int autinc_to; | |
97 | int explicit_inc_to; | |
98 | rtx from; | |
99 | rtx from_addr; | |
100 | int autinc_from; | |
101 | int explicit_inc_from; | |
3bdf5ad1 RK |
102 | unsigned HOST_WIDE_INT len; |
103 | HOST_WIDE_INT offset; | |
4969d05d RK |
104 | int reverse; |
105 | }; | |
106 | ||
57814e5e | 107 | /* This structure is used by store_by_pieces to describe the clear to |
9de08200 RK |
108 | be performed. */ |
109 | ||
b0f43ca0 | 110 | struct store_by_pieces_d |
9de08200 RK |
111 | { |
112 | rtx to; | |
113 | rtx to_addr; | |
114 | int autinc_to; | |
115 | int explicit_inc_to; | |
3bdf5ad1 RK |
116 | unsigned HOST_WIDE_INT len; |
117 | HOST_WIDE_INT offset; | |
502b8322 | 118 | rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode); |
fad205ff | 119 | void *constfundata; |
9de08200 RK |
120 | int reverse; |
121 | }; | |
122 | ||
502b8322 | 123 | static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT, |
45d78e7f | 124 | unsigned int, |
502b8322 AJ |
125 | unsigned int); |
126 | static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode, | |
b0f43ca0 | 127 | struct move_by_pieces_d *); |
502b8322 | 128 | static bool block_move_libcall_safe_for_call_parm (void); |
079a182e | 129 | static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT); |
502b8322 AJ |
130 | static tree emit_block_move_libcall_fn (int); |
131 | static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); | |
132 | static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode); | |
133 | static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); | |
b0f43ca0 | 134 | static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int); |
502b8322 | 135 | static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode, |
b0f43ca0 | 136 | struct store_by_pieces_d *); |
502b8322 AJ |
137 | static tree clear_storage_libcall_fn (int); |
138 | static rtx compress_float_constant (rtx, rtx); | |
139 | static rtx get_subtarget (rtx); | |
502b8322 AJ |
140 | static void store_constructor_field (rtx, unsigned HOST_WIDE_INT, |
141 | HOST_WIDE_INT, enum machine_mode, | |
4862826d | 142 | tree, tree, int, alias_set_type); |
502b8322 AJ |
143 | static void store_constructor (tree, rtx, int, HOST_WIDE_INT); |
144 | static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode, | |
4862826d | 145 | tree, tree, alias_set_type, bool); |
502b8322 | 146 | |
fa233e34 | 147 | static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree); |
502b8322 | 148 | |
22ea9ec0 | 149 | static int is_aligning_offset (const_tree, const_tree); |
eb698c58 RS |
150 | static void expand_operands (tree, tree, rtx, rtx*, rtx*, |
151 | enum expand_modifier); | |
bc15d0ef | 152 | static rtx reduce_to_bit_field_precision (rtx, rtx, tree); |
8e7aa1f9 | 153 | static rtx do_store_flag (sepops, rtx, enum machine_mode); |
21d93687 | 154 | #ifdef PUSH_ROUNDING |
502b8322 | 155 | static void emit_single_push_insn (enum machine_mode, rtx, tree); |
21d93687 | 156 | #endif |
502b8322 AJ |
157 | static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx); |
158 | static rtx const_vector_from_tree (tree); | |
57aaef66 | 159 | static void write_complex_part (rtx, rtx, bool); |
bbf6f052 | 160 | |
4fa52007 RK |
161 | /* Record for each mode whether we can move a register directly to or |
162 | from an object of that mode in memory. If we can't, we won't try | |
163 | to use that mode directly when accessing a field of that mode. */ | |
164 | ||
165 | static char direct_load[NUM_MACHINE_MODES]; | |
166 | static char direct_store[NUM_MACHINE_MODES]; | |
167 | ||
51286de6 RH |
168 | /* Record for each mode whether we can float-extend from memory. */ |
169 | ||
170 | static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; | |
171 | ||
fbe1758d | 172 | /* This macro is used to determine whether move_by_pieces should be called |
3a94c984 | 173 | to perform a structure copy. */ |
fbe1758d | 174 | #ifndef MOVE_BY_PIECES_P |
19caa751 | 175 | #define MOVE_BY_PIECES_P(SIZE, ALIGN) \ |
45d78e7f | 176 | (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \ |
e04ad03d | 177 | < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ())) |
fbe1758d AM |
178 | #endif |
179 | ||
78762e3b RS |
180 | /* This macro is used to determine whether clear_by_pieces should be |
181 | called to clear storage. */ | |
182 | #ifndef CLEAR_BY_PIECES_P | |
183 | #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \ | |
45d78e7f | 184 | (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ |
e04ad03d | 185 | < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ())) |
78762e3b RS |
186 | #endif |
187 | ||
4977bab6 | 188 | /* This macro is used to determine whether store_by_pieces should be |
cfa31150 SL |
189 | called to "memset" storage with byte values other than zero. */ |
190 | #ifndef SET_BY_PIECES_P | |
191 | #define SET_BY_PIECES_P(SIZE, ALIGN) \ | |
192 | (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ | |
e04ad03d | 193 | < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ())) |
cfa31150 SL |
194 | #endif |
195 | ||
196 | /* This macro is used to determine whether store_by_pieces should be | |
197 | called to "memcpy" storage when the source is a constant string. */ | |
4977bab6 | 198 | #ifndef STORE_BY_PIECES_P |
45d78e7f JJ |
199 | #define STORE_BY_PIECES_P(SIZE, ALIGN) \ |
200 | (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ | |
e04ad03d | 201 | < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ())) |
4977bab6 ZW |
202 | #endif |
203 | ||
266007a7 | 204 | /* This array records the insn_code of insns to perform block moves. */ |
70128ad9 | 205 | enum insn_code movmem_optab[NUM_MACHINE_MODES]; |
266007a7 | 206 | |
57e84f18 AS |
207 | /* This array records the insn_code of insns to perform block sets. */ |
208 | enum insn_code setmem_optab[NUM_MACHINE_MODES]; | |
9de08200 | 209 | |
40c1d5f8 | 210 | /* These arrays record the insn_code of three different kinds of insns |
118355a0 ZW |
211 | to perform block compares. */ |
212 | enum insn_code cmpstr_optab[NUM_MACHINE_MODES]; | |
40c1d5f8 | 213 | enum insn_code cmpstrn_optab[NUM_MACHINE_MODES]; |
118355a0 ZW |
214 | enum insn_code cmpmem_optab[NUM_MACHINE_MODES]; |
215 | ||
48ae6c13 RH |
216 | /* Synchronization primitives. */ |
217 | enum insn_code sync_add_optab[NUM_MACHINE_MODES]; | |
218 | enum insn_code sync_sub_optab[NUM_MACHINE_MODES]; | |
219 | enum insn_code sync_ior_optab[NUM_MACHINE_MODES]; | |
220 | enum insn_code sync_and_optab[NUM_MACHINE_MODES]; | |
221 | enum insn_code sync_xor_optab[NUM_MACHINE_MODES]; | |
222 | enum insn_code sync_nand_optab[NUM_MACHINE_MODES]; | |
223 | enum insn_code sync_old_add_optab[NUM_MACHINE_MODES]; | |
224 | enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES]; | |
225 | enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES]; | |
226 | enum insn_code sync_old_and_optab[NUM_MACHINE_MODES]; | |
227 | enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES]; | |
228 | enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES]; | |
229 | enum insn_code sync_new_add_optab[NUM_MACHINE_MODES]; | |
230 | enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES]; | |
231 | enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES]; | |
232 | enum insn_code sync_new_and_optab[NUM_MACHINE_MODES]; | |
233 | enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES]; | |
234 | enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES]; | |
235 | enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES]; | |
48ae6c13 RH |
236 | enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES]; |
237 | enum insn_code sync_lock_release[NUM_MACHINE_MODES]; | |
238 | ||
cc2902df | 239 | /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */ |
e87b4f3f RS |
240 | |
241 | #ifndef SLOW_UNALIGNED_ACCESS | |
e1565e65 | 242 | #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT |
e87b4f3f | 243 | #endif |
bbf6f052 | 244 | \f |
b5deb7b6 SL |
245 | /* This is run to set up which modes can be used |
246 | directly in memory and to initialize the block move optab. It is run | |
247 | at the beginning of compilation and when the target is reinitialized. */ | |
4fa52007 RK |
248 | |
249 | void | |
b5deb7b6 | 250 | init_expr_target (void) |
4fa52007 RK |
251 | { |
252 | rtx insn, pat; | |
253 | enum machine_mode mode; | |
cff48d8f | 254 | int num_clobbers; |
9ec36da5 | 255 | rtx mem, mem1; |
bf1660a6 | 256 | rtx reg; |
9ec36da5 | 257 | |
e2549997 RS |
258 | /* Try indexing by frame ptr and try by stack ptr. |
259 | It is known that on the Convex the stack ptr isn't a valid index. | |
260 | With luck, one or the other is valid on any machine. */ | |
9ec36da5 JL |
261 | mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx); |
262 | mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx); | |
4fa52007 | 263 | |
bf1660a6 JL |
264 | /* A scratch register we can modify in-place below to avoid |
265 | useless RTL allocations. */ | |
266 | reg = gen_rtx_REG (VOIDmode, -1); | |
267 | ||
1f8c3c5b | 268 | insn = rtx_alloc (INSN); |
bbbbb16a | 269 | pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX); |
1f8c3c5b | 270 | PATTERN (insn) = pat; |
4fa52007 RK |
271 | |
272 | for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES; | |
273 | mode = (enum machine_mode) ((int) mode + 1)) | |
274 | { | |
275 | int regno; | |
4fa52007 RK |
276 | |
277 | direct_load[(int) mode] = direct_store[(int) mode] = 0; | |
278 | PUT_MODE (mem, mode); | |
e2549997 | 279 | PUT_MODE (mem1, mode); |
bf1660a6 | 280 | PUT_MODE (reg, mode); |
4fa52007 | 281 | |
e6fe56a4 RK |
282 | /* See if there is some register that can be used in this mode and |
283 | directly loaded or stored from memory. */ | |
284 | ||
7308a047 RS |
285 | if (mode != VOIDmode && mode != BLKmode) |
286 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER | |
287 | && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0); | |
288 | regno++) | |
289 | { | |
290 | if (! HARD_REGNO_MODE_OK (regno, mode)) | |
291 | continue; | |
e6fe56a4 | 292 | |
6fb5fa3c | 293 | SET_REGNO (reg, regno); |
e6fe56a4 | 294 | |
7308a047 RS |
295 | SET_SRC (pat) = mem; |
296 | SET_DEST (pat) = reg; | |
297 | if (recog (pat, insn, &num_clobbers) >= 0) | |
298 | direct_load[(int) mode] = 1; | |
e6fe56a4 | 299 | |
e2549997 RS |
300 | SET_SRC (pat) = mem1; |
301 | SET_DEST (pat) = reg; | |
302 | if (recog (pat, insn, &num_clobbers) >= 0) | |
303 | direct_load[(int) mode] = 1; | |
304 | ||
7308a047 RS |
305 | SET_SRC (pat) = reg; |
306 | SET_DEST (pat) = mem; | |
307 | if (recog (pat, insn, &num_clobbers) >= 0) | |
308 | direct_store[(int) mode] = 1; | |
e2549997 RS |
309 | |
310 | SET_SRC (pat) = reg; | |
311 | SET_DEST (pat) = mem1; | |
312 | if (recog (pat, insn, &num_clobbers) >= 0) | |
313 | direct_store[(int) mode] = 1; | |
7308a047 | 314 | } |
4fa52007 RK |
315 | } |
316 | ||
51286de6 RH |
317 | mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000)); |
318 | ||
319 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode; | |
320 | mode = GET_MODE_WIDER_MODE (mode)) | |
321 | { | |
322 | enum machine_mode srcmode; | |
323 | for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode; | |
0fb7aeda | 324 | srcmode = GET_MODE_WIDER_MODE (srcmode)) |
51286de6 RH |
325 | { |
326 | enum insn_code ic; | |
327 | ||
328 | ic = can_extend_p (mode, srcmode, 0); | |
329 | if (ic == CODE_FOR_nothing) | |
330 | continue; | |
331 | ||
332 | PUT_MODE (mem, srcmode); | |
0fb7aeda | 333 | |
51286de6 RH |
334 | if ((*insn_data[ic].operand[1].predicate) (mem, srcmode)) |
335 | float_extend_from_mem[mode][srcmode] = true; | |
336 | } | |
337 | } | |
4fa52007 | 338 | } |
cff48d8f | 339 | |
bbf6f052 RK |
340 | /* This is run at the start of compiling a function. */ |
341 | ||
342 | void | |
502b8322 | 343 | init_expr (void) |
bbf6f052 | 344 | { |
3e029763 | 345 | memset (&crtl->expr, 0, sizeof (crtl->expr)); |
bbf6f052 | 346 | } |
bbf6f052 RK |
347 | \f |
348 | /* Copy data from FROM to TO, where the machine modes are not the same. | |
0f996086 CF |
349 | Both modes may be integer, or both may be floating, or both may be |
350 | fixed-point. | |
bbf6f052 RK |
351 | UNSIGNEDP should be nonzero if FROM is an unsigned type. |
352 | This causes zero-extension instead of sign-extension. */ | |
353 | ||
354 | void | |
502b8322 | 355 | convert_move (rtx to, rtx from, int unsignedp) |
bbf6f052 RK |
356 | { |
357 | enum machine_mode to_mode = GET_MODE (to); | |
358 | enum machine_mode from_mode = GET_MODE (from); | |
3d8bf70f BE |
359 | int to_real = SCALAR_FLOAT_MODE_P (to_mode); |
360 | int from_real = SCALAR_FLOAT_MODE_P (from_mode); | |
bbf6f052 RK |
361 | enum insn_code code; |
362 | rtx libcall; | |
363 | ||
364 | /* rtx code for making an equivalent value. */ | |
37d0b254 SE |
365 | enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN |
366 | : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND)); | |
bbf6f052 | 367 | |
bbf6f052 | 368 | |
5b0264cb | 369 | gcc_assert (to_real == from_real); |
ed1223ba EC |
370 | gcc_assert (to_mode != BLKmode); |
371 | gcc_assert (from_mode != BLKmode); | |
bbf6f052 | 372 | |
6de9cd9a DN |
373 | /* If the source and destination are already the same, then there's |
374 | nothing to do. */ | |
375 | if (to == from) | |
376 | return; | |
377 | ||
1499e0a8 RK |
378 | /* If FROM is a SUBREG that indicates that we have already done at least |
379 | the required extension, strip it. We don't handle such SUBREGs as | |
380 | TO here. */ | |
381 | ||
382 | if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from) | |
383 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from))) | |
384 | >= GET_MODE_SIZE (to_mode)) | |
385 | && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp) | |
386 | from = gen_lowpart (to_mode, from), from_mode = to_mode; | |
387 | ||
5b0264cb | 388 | gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to)); |
1499e0a8 | 389 | |
bbf6f052 RK |
390 | if (to_mode == from_mode |
391 | || (from_mode == VOIDmode && CONSTANT_P (from))) | |
392 | { | |
393 | emit_move_insn (to, from); | |
394 | return; | |
395 | } | |
396 | ||
0b4565c9 BS |
397 | if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) |
398 | { | |
5b0264cb | 399 | gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode)); |
3a94c984 | 400 | |
0b4565c9 | 401 | if (VECTOR_MODE_P (to_mode)) |
bafe341a | 402 | from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); |
0b4565c9 | 403 | else |
bafe341a | 404 | to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0); |
0b4565c9 BS |
405 | |
406 | emit_move_insn (to, from); | |
407 | return; | |
408 | } | |
409 | ||
06765df1 R |
410 | if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT) |
411 | { | |
412 | convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp); | |
413 | convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp); | |
414 | return; | |
415 | } | |
416 | ||
bbf6f052 RK |
417 | if (to_real) |
418 | { | |
642dfa8b | 419 | rtx value, insns; |
85363ca0 | 420 | convert_optab tab; |
81d79e2c | 421 | |
15ed7b52 JG |
422 | gcc_assert ((GET_MODE_PRECISION (from_mode) |
423 | != GET_MODE_PRECISION (to_mode)) | |
424 | || (DECIMAL_FLOAT_MODE_P (from_mode) | |
425 | != DECIMAL_FLOAT_MODE_P (to_mode))); | |
ed1223ba | 426 | |
15ed7b52 JG |
427 | if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode)) |
428 | /* Conversion between decimal float and binary float, same size. */ | |
429 | tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab; | |
430 | else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)) | |
85363ca0 | 431 | tab = sext_optab; |
85363ca0 | 432 | else |
5b0264cb | 433 | tab = trunc_optab; |
2b01c326 | 434 | |
85363ca0 | 435 | /* Try converting directly if the insn is supported. */ |
2b01c326 | 436 | |
166cdb08 | 437 | code = convert_optab_handler (tab, to_mode, from_mode)->insn_code; |
85363ca0 | 438 | if (code != CODE_FOR_nothing) |
b092b471 | 439 | { |
85363ca0 ZW |
440 | emit_unop_insn (code, to, from, |
441 | tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE); | |
b092b471 JW |
442 | return; |
443 | } | |
b092b471 | 444 | |
85363ca0 | 445 | /* Otherwise use a libcall. */ |
8a33f100 | 446 | libcall = convert_optab_libfunc (tab, to_mode, from_mode); |
3a94c984 | 447 | |
5b0264cb NS |
448 | /* Is this conversion implemented yet? */ |
449 | gcc_assert (libcall); | |
bbf6f052 | 450 | |
642dfa8b | 451 | start_sequence (); |
ebb1b59a | 452 | value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, |
81d79e2c | 453 | 1, from, from_mode); |
642dfa8b BS |
454 | insns = get_insns (); |
455 | end_sequence (); | |
450b1728 EC |
456 | emit_libcall_block (insns, to, value, |
457 | tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode, | |
458 | from) | |
459 | : gen_rtx_FLOAT_EXTEND (to_mode, from)); | |
bbf6f052 RK |
460 | return; |
461 | } | |
462 | ||
85363ca0 ZW |
463 | /* Handle pointer conversion. */ /* SPEE 900220. */ |
464 | /* Targets are expected to provide conversion insns between PxImode and | |
465 | xImode for all MODE_PARTIAL_INT modes they use, but no others. */ | |
466 | if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT) | |
467 | { | |
468 | enum machine_mode full_mode | |
469 | = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT); | |
470 | ||
166cdb08 | 471 | gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code |
5b0264cb | 472 | != CODE_FOR_nothing); |
85363ca0 ZW |
473 | |
474 | if (full_mode != from_mode) | |
475 | from = convert_to_mode (full_mode, from, unsignedp); | |
166cdb08 | 476 | emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code, |
85363ca0 ZW |
477 | to, from, UNKNOWN); |
478 | return; | |
479 | } | |
480 | if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT) | |
481 | { | |
d2348bd5 | 482 | rtx new_from; |
85363ca0 ZW |
483 | enum machine_mode full_mode |
484 | = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT); | |
485 | ||
166cdb08 | 486 | gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code |
5b0264cb | 487 | != CODE_FOR_nothing); |
85363ca0 | 488 | |
85363ca0 | 489 | if (to_mode == full_mode) |
d2348bd5 | 490 | { |
166cdb08 | 491 | emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code, |
d2348bd5 DD |
492 | to, from, UNKNOWN); |
493 | return; | |
494 | } | |
495 | ||
496 | new_from = gen_reg_rtx (full_mode); | |
166cdb08 | 497 | emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code, |
d2348bd5 | 498 | new_from, from, UNKNOWN); |
85363ca0 | 499 | |
a1105617 | 500 | /* else proceed to integer conversions below. */ |
85363ca0 | 501 | from_mode = full_mode; |
d2348bd5 | 502 | from = new_from; |
85363ca0 ZW |
503 | } |
504 | ||
0f996086 CF |
505 | /* Make sure both are fixed-point modes or both are not. */ |
506 | gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) == | |
507 | ALL_SCALAR_FIXED_POINT_MODE_P (to_mode)); | |
508 | if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode)) | |
509 | { | |
510 | /* If we widen from_mode to to_mode and they are in the same class, | |
511 | we won't saturate the result. | |
512 | Otherwise, always saturate the result to play safe. */ | |
513 | if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode) | |
514 | && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode)) | |
515 | expand_fixed_convert (to, from, 0, 0); | |
516 | else | |
517 | expand_fixed_convert (to, from, 0, 1); | |
518 | return; | |
519 | } | |
520 | ||
bbf6f052 RK |
521 | /* Now both modes are integers. */ |
522 | ||
523 | /* Handle expanding beyond a word. */ | |
524 | if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode) | |
525 | && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD) | |
526 | { | |
527 | rtx insns; | |
528 | rtx lowpart; | |
529 | rtx fill_value; | |
530 | rtx lowfrom; | |
531 | int i; | |
532 | enum machine_mode lowpart_mode; | |
533 | int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD); | |
534 | ||
535 | /* Try converting directly if the insn is supported. */ | |
536 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
537 | != CODE_FOR_nothing) | |
538 | { | |
cd1b4b44 RK |
539 | /* If FROM is a SUBREG, put it into a register. Do this |
540 | so that we always generate the same set of insns for | |
541 | better cse'ing; if an intermediate assignment occurred, | |
542 | we won't be doing the operation directly on the SUBREG. */ | |
543 | if (optimize > 0 && GET_CODE (from) == SUBREG) | |
544 | from = force_reg (from_mode, from); | |
bbf6f052 RK |
545 | emit_unop_insn (code, to, from, equiv_code); |
546 | return; | |
547 | } | |
548 | /* Next, try converting via full word. */ | |
549 | else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD | |
550 | && ((code = can_extend_p (to_mode, word_mode, unsignedp)) | |
551 | != CODE_FOR_nothing)) | |
552 | { | |
2f6025a1 | 553 | rtx word_to = gen_reg_rtx (word_mode); |
f8cfc6aa | 554 | if (REG_P (to)) |
6a2d136b EB |
555 | { |
556 | if (reg_overlap_mentioned_p (to, from)) | |
557 | from = force_reg (from_mode, from); | |
c41c1387 | 558 | emit_clobber (to); |
6a2d136b | 559 | } |
2f6025a1 PB |
560 | convert_move (word_to, from, unsignedp); |
561 | emit_unop_insn (code, to, word_to, equiv_code); | |
bbf6f052 RK |
562 | return; |
563 | } | |
564 | ||
565 | /* No special multiword conversion insn; do it by hand. */ | |
566 | start_sequence (); | |
567 | ||
5c5033c3 RK |
568 | /* Since we will turn this into a no conflict block, we must ensure |
569 | that the source does not overlap the target. */ | |
570 | ||
571 | if (reg_overlap_mentioned_p (to, from)) | |
572 | from = force_reg (from_mode, from); | |
573 | ||
bbf6f052 RK |
574 | /* Get a copy of FROM widened to a word, if necessary. */ |
575 | if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD) | |
576 | lowpart_mode = word_mode; | |
577 | else | |
578 | lowpart_mode = from_mode; | |
579 | ||
580 | lowfrom = convert_to_mode (lowpart_mode, from, unsignedp); | |
581 | ||
582 | lowpart = gen_lowpart (lowpart_mode, to); | |
583 | emit_move_insn (lowpart, lowfrom); | |
584 | ||
585 | /* Compute the value to put in each remaining word. */ | |
586 | if (unsignedp) | |
587 | fill_value = const0_rtx; | |
588 | else | |
9a53bc17 PB |
589 | fill_value = emit_store_flag (gen_reg_rtx (word_mode), |
590 | LT, lowfrom, const0_rtx, | |
591 | VOIDmode, 0, -1); | |
bbf6f052 RK |
592 | |
593 | /* Fill the remaining words. */ | |
594 | for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++) | |
595 | { | |
596 | int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i); | |
597 | rtx subword = operand_subword (to, index, 1, to_mode); | |
598 | ||
5b0264cb | 599 | gcc_assert (subword); |
bbf6f052 RK |
600 | |
601 | if (fill_value != subword) | |
602 | emit_move_insn (subword, fill_value); | |
603 | } | |
604 | ||
605 | insns = get_insns (); | |
606 | end_sequence (); | |
607 | ||
d70dcf29 | 608 | emit_insn (insns); |
bbf6f052 RK |
609 | return; |
610 | } | |
611 | ||
d3c64ee3 RS |
612 | /* Truncating multi-word to a word or less. */ |
613 | if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD | |
614 | && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD) | |
bbf6f052 | 615 | { |
3c0cb5de | 616 | if (!((MEM_P (from) |
431a6eca JW |
617 | && ! MEM_VOLATILE_P (from) |
618 | && direct_load[(int) to_mode] | |
619 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
f8cfc6aa | 620 | || REG_P (from) |
431a6eca JW |
621 | || GET_CODE (from) == SUBREG)) |
622 | from = force_reg (from_mode, from); | |
bbf6f052 RK |
623 | convert_move (to, gen_lowpart (word_mode, from), 0); |
624 | return; | |
625 | } | |
626 | ||
bbf6f052 RK |
627 | /* Now follow all the conversions between integers |
628 | no more than a word long. */ | |
629 | ||
630 | /* For truncation, usually we can just refer to FROM in a narrower mode. */ | |
631 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode) | |
632 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), | |
d3c64ee3 | 633 | GET_MODE_BITSIZE (from_mode))) |
bbf6f052 | 634 | { |
3c0cb5de | 635 | if (!((MEM_P (from) |
d3c64ee3 RS |
636 | && ! MEM_VOLATILE_P (from) |
637 | && direct_load[(int) to_mode] | |
638 | && ! mode_dependent_address_p (XEXP (from, 0))) | |
f8cfc6aa | 639 | || REG_P (from) |
d3c64ee3 RS |
640 | || GET_CODE (from) == SUBREG)) |
641 | from = force_reg (from_mode, from); | |
f8cfc6aa | 642 | if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER |
34aa3599 RK |
643 | && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode)) |
644 | from = copy_to_reg (from); | |
bbf6f052 RK |
645 | emit_move_insn (to, gen_lowpart (to_mode, from)); |
646 | return; | |
647 | } | |
648 | ||
d3c64ee3 | 649 | /* Handle extension. */ |
bbf6f052 RK |
650 | if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode)) |
651 | { | |
652 | /* Convert directly if that works. */ | |
653 | if ((code = can_extend_p (to_mode, from_mode, unsignedp)) | |
654 | != CODE_FOR_nothing) | |
655 | { | |
656 | emit_unop_insn (code, to, from, equiv_code); | |
657 | return; | |
658 | } | |
659 | else | |
660 | { | |
661 | enum machine_mode intermediate; | |
2b28d92e NC |
662 | rtx tmp; |
663 | tree shift_amount; | |
bbf6f052 RK |
664 | |
665 | /* Search for a mode to convert via. */ | |
666 | for (intermediate = from_mode; intermediate != VOIDmode; | |
667 | intermediate = GET_MODE_WIDER_MODE (intermediate)) | |
930b4e39 RK |
668 | if (((can_extend_p (to_mode, intermediate, unsignedp) |
669 | != CODE_FOR_nothing) | |
670 | || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate) | |
d60eaeff JL |
671 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode), |
672 | GET_MODE_BITSIZE (intermediate)))) | |
bbf6f052 RK |
673 | && (can_extend_p (intermediate, from_mode, unsignedp) |
674 | != CODE_FOR_nothing)) | |
675 | { | |
676 | convert_move (to, convert_to_mode (intermediate, from, | |
677 | unsignedp), unsignedp); | |
678 | return; | |
679 | } | |
680 | ||
2b28d92e | 681 | /* No suitable intermediate mode. |
3a94c984 | 682 | Generate what we need with shifts. */ |
4a90aeeb NS |
683 | shift_amount = build_int_cst (NULL_TREE, |
684 | GET_MODE_BITSIZE (to_mode) | |
7d60be94 | 685 | - GET_MODE_BITSIZE (from_mode)); |
2b28d92e NC |
686 | from = gen_lowpart (to_mode, force_reg (from_mode, from)); |
687 | tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount, | |
688 | to, unsignedp); | |
3a94c984 | 689 | tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount, |
2b28d92e NC |
690 | to, unsignedp); |
691 | if (tmp != to) | |
692 | emit_move_insn (to, tmp); | |
693 | return; | |
bbf6f052 RK |
694 | } |
695 | } | |
696 | ||
3a94c984 | 697 | /* Support special truncate insns for certain modes. */ |
166cdb08 | 698 | if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing) |
bbf6f052 | 699 | { |
166cdb08 | 700 | emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code, |
85363ca0 | 701 | to, from, UNKNOWN); |
b9bcad65 RK |
702 | return; |
703 | } | |
704 | ||
bbf6f052 RK |
705 | /* Handle truncation of volatile memrefs, and so on; |
706 | the things that couldn't be truncated directly, | |
85363ca0 ZW |
707 | and for which there was no special instruction. |
708 | ||
709 | ??? Code above formerly short-circuited this, for most integer | |
710 | mode pairs, with a force_reg in from_mode followed by a recursive | |
711 | call to this routine. Appears always to have been wrong. */ | |
bbf6f052 RK |
712 | if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)) |
713 | { | |
714 | rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from)); | |
715 | emit_move_insn (to, temp); | |
716 | return; | |
717 | } | |
718 | ||
719 | /* Mode combination is not recognized. */ | |
5b0264cb | 720 | gcc_unreachable (); |
bbf6f052 RK |
721 | } |
722 | ||
723 | /* Return an rtx for a value that would result | |
724 | from converting X to mode MODE. | |
725 | Both X and MODE may be floating, or both integer. | |
726 | UNSIGNEDP is nonzero if X is an unsigned value. | |
727 | This can be done by referring to a part of X in place | |
ad76cef8 | 728 | or by copying to a new temporary with conversion. */ |
bbf6f052 RK |
729 | |
730 | rtx | |
502b8322 | 731 | convert_to_mode (enum machine_mode mode, rtx x, int unsignedp) |
5ffe63ed RS |
732 | { |
733 | return convert_modes (mode, VOIDmode, x, unsignedp); | |
734 | } | |
735 | ||
736 | /* Return an rtx for a value that would result | |
737 | from converting X from mode OLDMODE to mode MODE. | |
738 | Both modes may be floating, or both integer. | |
739 | UNSIGNEDP is nonzero if X is an unsigned value. | |
740 | ||
741 | This can be done by referring to a part of X in place | |
742 | or by copying to a new temporary with conversion. | |
743 | ||
ad76cef8 | 744 | You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */ |
5ffe63ed RS |
745 | |
746 | rtx | |
502b8322 | 747 | convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp) |
bbf6f052 | 748 | { |
b3694847 | 749 | rtx temp; |
5ffe63ed | 750 | |
1499e0a8 RK |
751 | /* If FROM is a SUBREG that indicates that we have already done at least |
752 | the required extension, strip it. */ | |
753 | ||
754 | if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) | |
755 | && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode) | |
756 | && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp) | |
757 | x = gen_lowpart (mode, x); | |
bbf6f052 | 758 | |
64791b18 RK |
759 | if (GET_MODE (x) != VOIDmode) |
760 | oldmode = GET_MODE (x); | |
3a94c984 | 761 | |
3a3d54f2 UB |
762 | if (mode == oldmode) |
763 | return x; | |
764 | ||
bbf6f052 | 765 | /* There is one case that we must handle specially: If we are converting |
906c4e36 | 766 | a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and |
bbf6f052 RK |
767 | we are to interpret the constant as unsigned, gen_lowpart will do |
768 | the wrong if the constant appears negative. What we want to do is | |
769 | make the high-order word of the constant zero, not all ones. */ | |
770 | ||
771 | if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT | |
906c4e36 | 772 | && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT |
481683e1 | 773 | && CONST_INT_P (x) && INTVAL (x) < 0) |
96ff8a16 | 774 | { |
54fb1ae0 | 775 | double_int val = uhwi_to_double_int (INTVAL (x)); |
96ff8a16 | 776 | |
54fb1ae0 AS |
777 | /* We need to zero extend VAL. */ |
778 | if (oldmode != VOIDmode) | |
779 | val = double_int_zext (val, GET_MODE_BITSIZE (oldmode)); | |
96ff8a16 | 780 | |
54fb1ae0 | 781 | return immed_double_int_const (val, mode); |
96ff8a16 | 782 | } |
bbf6f052 RK |
783 | |
784 | /* We can do this with a gen_lowpart if both desired and current modes | |
785 | are integer, and this is either a constant integer, a register, or a | |
ba2e110c RK |
786 | non-volatile MEM. Except for the constant case where MODE is no |
787 | wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */ | |
bbf6f052 | 788 | |
481683e1 | 789 | if ((CONST_INT_P (x) |
ba2e110c | 790 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
bbf6f052 | 791 | || (GET_MODE_CLASS (mode) == MODE_INT |
5ffe63ed | 792 | && GET_MODE_CLASS (oldmode) == MODE_INT |
bbf6f052 | 793 | && (GET_CODE (x) == CONST_DOUBLE |
5ffe63ed | 794 | || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode) |
3c0cb5de | 795 | && ((MEM_P (x) && ! MEM_VOLATILE_P (x) |
d57c66da | 796 | && direct_load[(int) mode]) |
f8cfc6aa | 797 | || (REG_P (x) |
006c9f4a SE |
798 | && (! HARD_REGISTER_P (x) |
799 | || HARD_REGNO_MODE_OK (REGNO (x), mode)) | |
2bf29316 JW |
800 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), |
801 | GET_MODE_BITSIZE (GET_MODE (x))))))))) | |
ba2e110c RK |
802 | { |
803 | /* ?? If we don't know OLDMODE, we have to assume here that | |
804 | X does not need sign- or zero-extension. This may not be | |
805 | the case, but it's the best we can do. */ | |
481683e1 | 806 | if (CONST_INT_P (x) && oldmode != VOIDmode |
ba2e110c RK |
807 | && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode)) |
808 | { | |
809 | HOST_WIDE_INT val = INTVAL (x); | |
810 | int width = GET_MODE_BITSIZE (oldmode); | |
811 | ||
812 | /* We must sign or zero-extend in this case. Start by | |
813 | zero-extending, then sign extend if we need to. */ | |
814 | val &= ((HOST_WIDE_INT) 1 << width) - 1; | |
815 | if (! unsignedp | |
816 | && (val & ((HOST_WIDE_INT) 1 << (width - 1)))) | |
817 | val |= (HOST_WIDE_INT) (-1) << width; | |
818 | ||
2496c7bd | 819 | return gen_int_mode (val, mode); |
ba2e110c RK |
820 | } |
821 | ||
822 | return gen_lowpart (mode, x); | |
823 | } | |
bbf6f052 | 824 | |
ebe75517 JH |
825 | /* Converting from integer constant into mode is always equivalent to an |
826 | subreg operation. */ | |
827 | if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode) | |
828 | { | |
5b0264cb | 829 | gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode)); |
ebe75517 JH |
830 | return simplify_gen_subreg (mode, x, oldmode, 0); |
831 | } | |
832 | ||
bbf6f052 RK |
833 | temp = gen_reg_rtx (mode); |
834 | convert_move (temp, x, unsignedp); | |
835 | return temp; | |
836 | } | |
837 | \f | |
cf5124f6 RS |
838 | /* STORE_MAX_PIECES is the number of bytes at a time that we can |
839 | store efficiently. Due to internal GCC limitations, this is | |
840 | MOVE_MAX_PIECES limited by the number of bytes GCC can represent | |
841 | for an immediate constant. */ | |
842 | ||
843 | #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT)) | |
844 | ||
8fd3cf4e JJ |
845 | /* Determine whether the LEN bytes can be moved by using several move |
846 | instructions. Return nonzero if a call to move_by_pieces should | |
847 | succeed. */ | |
848 | ||
849 | int | |
502b8322 AJ |
850 | can_move_by_pieces (unsigned HOST_WIDE_INT len, |
851 | unsigned int align ATTRIBUTE_UNUSED) | |
8fd3cf4e JJ |
852 | { |
853 | return MOVE_BY_PIECES_P (len, align); | |
854 | } | |
855 | ||
21d93687 | 856 | /* Generate several move instructions to copy LEN bytes from block FROM to |
ad76cef8 | 857 | block TO. (These are MEM rtx's with BLKmode). |
566aa174 | 858 | |
21d93687 RK |
859 | If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is |
860 | used to push FROM to the stack. | |
566aa174 | 861 | |
8fd3cf4e | 862 | ALIGN is maximum stack alignment we can assume. |
bbf6f052 | 863 | |
8fd3cf4e JJ |
864 | If ENDP is 0 return to, if ENDP is 1 return memory at the end ala |
865 | mempcpy, and if ENDP is 2 return memory the end minus one byte ala | |
866 | stpcpy. */ | |
867 | ||
868 | rtx | |
502b8322 AJ |
869 | move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, |
870 | unsigned int align, int endp) | |
bbf6f052 | 871 | { |
b0f43ca0 | 872 | struct move_by_pieces_d data; |
d4ebfa65 BE |
873 | enum machine_mode to_addr_mode, from_addr_mode |
874 | = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from)); | |
566aa174 | 875 | rtx to_addr, from_addr = XEXP (from, 0); |
770ae6cc | 876 | unsigned int max_size = MOVE_MAX_PIECES + 1; |
fbe1758d AM |
877 | enum machine_mode mode = VOIDmode, tmode; |
878 | enum insn_code icode; | |
bbf6f052 | 879 | |
f26aca6d DD |
880 | align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from)); |
881 | ||
bbf6f052 | 882 | data.offset = 0; |
bbf6f052 | 883 | data.from_addr = from_addr; |
566aa174 JH |
884 | if (to) |
885 | { | |
d4ebfa65 | 886 | to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to)); |
566aa174 JH |
887 | to_addr = XEXP (to, 0); |
888 | data.to = to; | |
889 | data.autinc_to | |
890 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC | |
891 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
892 | data.reverse | |
893 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); | |
894 | } | |
895 | else | |
896 | { | |
d4ebfa65 | 897 | to_addr_mode = VOIDmode; |
566aa174 JH |
898 | to_addr = NULL_RTX; |
899 | data.to = NULL_RTX; | |
900 | data.autinc_to = 1; | |
901 | #ifdef STACK_GROWS_DOWNWARD | |
902 | data.reverse = 1; | |
903 | #else | |
904 | data.reverse = 0; | |
905 | #endif | |
906 | } | |
907 | data.to_addr = to_addr; | |
bbf6f052 | 908 | data.from = from; |
bbf6f052 RK |
909 | data.autinc_from |
910 | = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC | |
911 | || GET_CODE (from_addr) == POST_INC | |
912 | || GET_CODE (from_addr) == POST_DEC); | |
913 | ||
914 | data.explicit_inc_from = 0; | |
915 | data.explicit_inc_to = 0; | |
bbf6f052 RK |
916 | if (data.reverse) data.offset = len; |
917 | data.len = len; | |
918 | ||
919 | /* If copying requires more than two move insns, | |
920 | copy addresses to registers (to make displacements shorter) | |
921 | and use post-increment if available. */ | |
922 | if (!(data.autinc_from && data.autinc_to) | |
45d78e7f | 923 | && move_by_pieces_ninsns (len, align, max_size) > 2) |
bbf6f052 | 924 | { |
3a94c984 | 925 | /* Find the mode of the largest move... */ |
fbe1758d AM |
926 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
927 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
928 | if (GET_MODE_SIZE (tmode) < max_size) | |
929 | mode = tmode; | |
930 | ||
931 | if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) | |
bbf6f052 | 932 | { |
d4ebfa65 BE |
933 | data.from_addr = copy_to_mode_reg (from_addr_mode, |
934 | plus_constant (from_addr, len)); | |
bbf6f052 RK |
935 | data.autinc_from = 1; |
936 | data.explicit_inc_from = -1; | |
937 | } | |
fbe1758d | 938 | if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from) |
bbf6f052 | 939 | { |
d4ebfa65 | 940 | data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr); |
bbf6f052 RK |
941 | data.autinc_from = 1; |
942 | data.explicit_inc_from = 1; | |
943 | } | |
bbf6f052 | 944 | if (!data.autinc_from && CONSTANT_P (from_addr)) |
d4ebfa65 | 945 | data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr); |
fbe1758d | 946 | if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) |
bbf6f052 | 947 | { |
d4ebfa65 BE |
948 | data.to_addr = copy_to_mode_reg (to_addr_mode, |
949 | plus_constant (to_addr, len)); | |
bbf6f052 RK |
950 | data.autinc_to = 1; |
951 | data.explicit_inc_to = -1; | |
952 | } | |
fbe1758d | 953 | if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to) |
bbf6f052 | 954 | { |
d4ebfa65 | 955 | data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr); |
bbf6f052 RK |
956 | data.autinc_to = 1; |
957 | data.explicit_inc_to = 1; | |
958 | } | |
bbf6f052 | 959 | if (!data.autinc_to && CONSTANT_P (to_addr)) |
d4ebfa65 | 960 | data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr); |
bbf6f052 RK |
961 | } |
962 | ||
f64d6991 DE |
963 | tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); |
964 | if (align >= GET_MODE_ALIGNMENT (tmode)) | |
965 | align = GET_MODE_ALIGNMENT (tmode); | |
966 | else | |
967 | { | |
968 | enum machine_mode xmode; | |
969 | ||
970 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; | |
971 | tmode != VOIDmode; | |
972 | xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) | |
973 | if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES | |
974 | || SLOW_UNALIGNED_ACCESS (tmode, align)) | |
975 | break; | |
976 | ||
977 | align = MAX (align, GET_MODE_ALIGNMENT (xmode)); | |
978 | } | |
bbf6f052 RK |
979 | |
980 | /* First move what we can in the largest integer mode, then go to | |
981 | successively smaller modes. */ | |
982 | ||
983 | while (max_size > 1) | |
984 | { | |
e7c33f54 RK |
985 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
986 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
987 | if (GET_MODE_SIZE (tmode) < max_size) | |
bbf6f052 RK |
988 | mode = tmode; |
989 | ||
990 | if (mode == VOIDmode) | |
991 | break; | |
992 | ||
166cdb08 | 993 | icode = optab_handler (mov_optab, mode)->insn_code; |
19caa751 | 994 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
bbf6f052 RK |
995 | move_by_pieces_1 (GEN_FCN (icode), mode, &data); |
996 | ||
997 | max_size = GET_MODE_SIZE (mode); | |
998 | } | |
999 | ||
1000 | /* The code above should have handled everything. */ | |
5b0264cb | 1001 | gcc_assert (!data.len); |
8fd3cf4e JJ |
1002 | |
1003 | if (endp) | |
1004 | { | |
1005 | rtx to1; | |
1006 | ||
5b0264cb | 1007 | gcc_assert (!data.reverse); |
8fd3cf4e JJ |
1008 | if (data.autinc_to) |
1009 | { | |
1010 | if (endp == 2) | |
1011 | { | |
1012 | if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) | |
1013 | emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); | |
1014 | else | |
d4ebfa65 BE |
1015 | data.to_addr = copy_to_mode_reg (to_addr_mode, |
1016 | plus_constant (data.to_addr, | |
8fd3cf4e JJ |
1017 | -1)); |
1018 | } | |
1019 | to1 = adjust_automodify_address (data.to, QImode, data.to_addr, | |
1020 | data.offset); | |
1021 | } | |
1022 | else | |
1023 | { | |
1024 | if (endp == 2) | |
1025 | --data.offset; | |
1026 | to1 = adjust_address (data.to, QImode, data.offset); | |
1027 | } | |
1028 | return to1; | |
1029 | } | |
1030 | else | |
1031 | return data.to; | |
bbf6f052 RK |
1032 | } |
1033 | ||
1034 | /* Return number of insns required to move L bytes by pieces. | |
f1eaaf73 | 1035 | ALIGN (in bits) is maximum alignment we can assume. */ |
bbf6f052 | 1036 | |
3bdf5ad1 | 1037 | static unsigned HOST_WIDE_INT |
45d78e7f JJ |
1038 | move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align, |
1039 | unsigned int max_size) | |
bbf6f052 | 1040 | { |
3bdf5ad1 | 1041 | unsigned HOST_WIDE_INT n_insns = 0; |
f64d6991 | 1042 | enum machine_mode tmode; |
bbf6f052 | 1043 | |
f64d6991 DE |
1044 | tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); |
1045 | if (align >= GET_MODE_ALIGNMENT (tmode)) | |
1046 | align = GET_MODE_ALIGNMENT (tmode); | |
1047 | else | |
1048 | { | |
1049 | enum machine_mode tmode, xmode; | |
1050 | ||
1051 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; | |
1052 | tmode != VOIDmode; | |
1053 | xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) | |
1054 | if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES | |
1055 | || SLOW_UNALIGNED_ACCESS (tmode, align)) | |
1056 | break; | |
1057 | ||
1058 | align = MAX (align, GET_MODE_ALIGNMENT (xmode)); | |
1059 | } | |
bbf6f052 RK |
1060 | |
1061 | while (max_size > 1) | |
1062 | { | |
f64d6991 | 1063 | enum machine_mode mode = VOIDmode; |
bbf6f052 RK |
1064 | enum insn_code icode; |
1065 | ||
e7c33f54 RK |
1066 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
1067 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
1068 | if (GET_MODE_SIZE (tmode) < max_size) | |
bbf6f052 RK |
1069 | mode = tmode; |
1070 | ||
1071 | if (mode == VOIDmode) | |
1072 | break; | |
1073 | ||
166cdb08 | 1074 | icode = optab_handler (mov_optab, mode)->insn_code; |
19caa751 | 1075 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
bbf6f052 RK |
1076 | n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode); |
1077 | ||
1078 | max_size = GET_MODE_SIZE (mode); | |
1079 | } | |
1080 | ||
5b0264cb | 1081 | gcc_assert (!l); |
bbf6f052 RK |
1082 | return n_insns; |
1083 | } | |
1084 | ||
1085 | /* Subroutine of move_by_pieces. Move as many bytes as appropriate | |
1086 | with move instructions for mode MODE. GENFUN is the gen_... function | |
1087 | to make a move insn for that mode. DATA has all the other info. */ | |
1088 | ||
1089 | static void | |
502b8322 | 1090 | move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode, |
b0f43ca0 | 1091 | struct move_by_pieces_d *data) |
bbf6f052 | 1092 | { |
3bdf5ad1 | 1093 | unsigned int size = GET_MODE_SIZE (mode); |
ae0ed63a | 1094 | rtx to1 = NULL_RTX, from1; |
bbf6f052 RK |
1095 | |
1096 | while (data->len >= size) | |
1097 | { | |
3bdf5ad1 RK |
1098 | if (data->reverse) |
1099 | data->offset -= size; | |
1100 | ||
566aa174 | 1101 | if (data->to) |
3bdf5ad1 | 1102 | { |
566aa174 | 1103 | if (data->autinc_to) |
630036c6 JJ |
1104 | to1 = adjust_automodify_address (data->to, mode, data->to_addr, |
1105 | data->offset); | |
566aa174 | 1106 | else |
f4ef873c | 1107 | to1 = adjust_address (data->to, mode, data->offset); |
3bdf5ad1 | 1108 | } |
3bdf5ad1 RK |
1109 | |
1110 | if (data->autinc_from) | |
630036c6 JJ |
1111 | from1 = adjust_automodify_address (data->from, mode, data->from_addr, |
1112 | data->offset); | |
3bdf5ad1 | 1113 | else |
f4ef873c | 1114 | from1 = adjust_address (data->from, mode, data->offset); |
bbf6f052 | 1115 | |
940da324 | 1116 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
3d709fd3 RH |
1117 | emit_insn (gen_add2_insn (data->to_addr, |
1118 | GEN_INT (-(HOST_WIDE_INT)size))); | |
940da324 | 1119 | if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) |
3d709fd3 RH |
1120 | emit_insn (gen_add2_insn (data->from_addr, |
1121 | GEN_INT (-(HOST_WIDE_INT)size))); | |
bbf6f052 | 1122 | |
566aa174 JH |
1123 | if (data->to) |
1124 | emit_insn ((*genfun) (to1, from1)); | |
1125 | else | |
21d93687 RK |
1126 | { |
1127 | #ifdef PUSH_ROUNDING | |
1128 | emit_single_push_insn (mode, from1, NULL); | |
1129 | #else | |
5b0264cb | 1130 | gcc_unreachable (); |
21d93687 RK |
1131 | #endif |
1132 | } | |
3bdf5ad1 | 1133 | |
940da324 | 1134 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
906c4e36 | 1135 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
940da324 | 1136 | if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0) |
906c4e36 | 1137 | emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size))); |
bbf6f052 | 1138 | |
3bdf5ad1 RK |
1139 | if (! data->reverse) |
1140 | data->offset += size; | |
bbf6f052 RK |
1141 | |
1142 | data->len -= size; | |
1143 | } | |
1144 | } | |
1145 | \f | |
4ca79136 RH |
1146 | /* Emit code to move a block Y to a block X. This may be done with |
1147 | string-move instructions, with multiple scalar move instructions, | |
1148 | or with a library call. | |
bbf6f052 | 1149 | |
4ca79136 | 1150 | Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode. |
bbf6f052 | 1151 | SIZE is an rtx that says how long they are. |
19caa751 | 1152 | ALIGN is the maximum alignment we can assume they have. |
44bb111a | 1153 | METHOD describes what kind of copy this is, and what mechanisms may be used. |
bbf6f052 | 1154 | |
e9a25f70 JL |
1155 | Return the address of the new block, if memcpy is called and returns it, |
1156 | 0 otherwise. */ | |
1157 | ||
1158 | rtx | |
079a182e JH |
1159 | emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method, |
1160 | unsigned int expected_align, HOST_WIDE_INT expected_size) | |
bbf6f052 | 1161 | { |
44bb111a | 1162 | bool may_use_call; |
e9a25f70 | 1163 | rtx retval = 0; |
44bb111a RH |
1164 | unsigned int align; |
1165 | ||
1166 | switch (method) | |
1167 | { | |
1168 | case BLOCK_OP_NORMAL: | |
8148fe65 | 1169 | case BLOCK_OP_TAILCALL: |
44bb111a RH |
1170 | may_use_call = true; |
1171 | break; | |
1172 | ||
1173 | case BLOCK_OP_CALL_PARM: | |
1174 | may_use_call = block_move_libcall_safe_for_call_parm (); | |
1175 | ||
1176 | /* Make inhibit_defer_pop nonzero around the library call | |
1177 | to force it to pop the arguments right away. */ | |
1178 | NO_DEFER_POP; | |
1179 | break; | |
1180 | ||
1181 | case BLOCK_OP_NO_LIBCALL: | |
1182 | may_use_call = false; | |
1183 | break; | |
1184 | ||
1185 | default: | |
5b0264cb | 1186 | gcc_unreachable (); |
44bb111a RH |
1187 | } |
1188 | ||
1189 | align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); | |
e100f395 | 1190 | gcc_assert (align >= BITS_PER_UNIT); |
e9a25f70 | 1191 | |
5b0264cb NS |
1192 | gcc_assert (MEM_P (x)); |
1193 | gcc_assert (MEM_P (y)); | |
1194 | gcc_assert (size); | |
bbf6f052 | 1195 | |
82c82743 RH |
1196 | /* Make sure we've got BLKmode addresses; store_one_arg can decide that |
1197 | block copy is more efficient for other large modes, e.g. DCmode. */ | |
1198 | x = adjust_address (x, BLKmode, 0); | |
1199 | y = adjust_address (y, BLKmode, 0); | |
1200 | ||
cb38fd88 RH |
1201 | /* Set MEM_SIZE as appropriate for this block copy. The main place this |
1202 | can be incorrect is coming from __builtin_memcpy. */ | |
481683e1 | 1203 | if (CONST_INT_P (size)) |
cb38fd88 | 1204 | { |
6972c506 JJ |
1205 | if (INTVAL (size) == 0) |
1206 | return 0; | |
1207 | ||
cb38fd88 RH |
1208 | x = shallow_copy_rtx (x); |
1209 | y = shallow_copy_rtx (y); | |
1210 | set_mem_size (x, size); | |
1211 | set_mem_size (y, size); | |
1212 | } | |
1213 | ||
481683e1 | 1214 | if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align)) |
8fd3cf4e | 1215 | move_by_pieces (x, y, INTVAL (size), align, 0); |
079a182e JH |
1216 | else if (emit_block_move_via_movmem (x, y, size, align, |
1217 | expected_align, expected_size)) | |
4ca79136 | 1218 | ; |
09e881c9 BE |
1219 | else if (may_use_call |
1220 | && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)) | |
1221 | && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y))) | |
8148fe65 JJ |
1222 | retval = emit_block_move_via_libcall (x, y, size, |
1223 | method == BLOCK_OP_TAILCALL); | |
44bb111a RH |
1224 | else |
1225 | emit_block_move_via_loop (x, y, size, align); | |
1226 | ||
1227 | if (method == BLOCK_OP_CALL_PARM) | |
1228 | OK_DEFER_POP; | |
266007a7 | 1229 | |
4ca79136 RH |
1230 | return retval; |
1231 | } | |
266007a7 | 1232 | |
079a182e JH |
1233 | rtx |
1234 | emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method) | |
1235 | { | |
1236 | return emit_block_move_hints (x, y, size, method, 0, -1); | |
1237 | } | |
1238 | ||
502b8322 | 1239 | /* A subroutine of emit_block_move. Returns true if calling the |
44bb111a RH |
1240 | block move libcall will not clobber any parameters which may have |
1241 | already been placed on the stack. */ | |
1242 | ||
1243 | static bool | |
502b8322 | 1244 | block_move_libcall_safe_for_call_parm (void) |
44bb111a | 1245 | { |
81464b2c KT |
1246 | #if defined (REG_PARM_STACK_SPACE) |
1247 | tree fn; | |
1248 | #endif | |
1249 | ||
a357a6d4 | 1250 | /* If arguments are pushed on the stack, then they're safe. */ |
44bb111a RH |
1251 | if (PUSH_ARGS) |
1252 | return true; | |
44bb111a | 1253 | |
450b1728 | 1254 | /* If registers go on the stack anyway, any argument is sure to clobber |
a357a6d4 | 1255 | an outgoing argument. */ |
ac294f0b | 1256 | #if defined (REG_PARM_STACK_SPACE) |
81464b2c | 1257 | fn = emit_block_move_libcall_fn (false); |
5a905a2b JJ |
1258 | /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't |
1259 | depend on its argument. */ | |
1260 | (void) fn; | |
81464b2c KT |
1261 | if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn))) |
1262 | && REG_PARM_STACK_SPACE (fn) != 0) | |
1263 | return false; | |
44bb111a | 1264 | #endif |
44bb111a | 1265 | |
a357a6d4 GK |
1266 | /* If any argument goes in memory, then it might clobber an outgoing |
1267 | argument. */ | |
1268 | { | |
1269 | CUMULATIVE_ARGS args_so_far; | |
1270 | tree fn, arg; | |
450b1728 | 1271 | |
a357a6d4 | 1272 | fn = emit_block_move_libcall_fn (false); |
0f6937fe | 1273 | INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3); |
450b1728 | 1274 | |
a357a6d4 GK |
1275 | arg = TYPE_ARG_TYPES (TREE_TYPE (fn)); |
1276 | for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg)) | |
1277 | { | |
1278 | enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg)); | |
3c07301f NF |
1279 | rtx tmp = targetm.calls.function_arg (&args_so_far, mode, |
1280 | NULL_TREE, true); | |
a357a6d4 | 1281 | if (!tmp || !REG_P (tmp)) |
44bb111a | 1282 | return false; |
78a52f11 | 1283 | if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1)) |
a357a6d4 | 1284 | return false; |
3c07301f NF |
1285 | targetm.calls.function_arg_advance (&args_so_far, mode, |
1286 | NULL_TREE, true); | |
a357a6d4 GK |
1287 | } |
1288 | } | |
1289 | return true; | |
44bb111a RH |
1290 | } |
1291 | ||
70128ad9 | 1292 | /* A subroutine of emit_block_move. Expand a movmem pattern; |
4ca79136 | 1293 | return true if successful. */ |
3ef1eef4 | 1294 | |
4ca79136 | 1295 | static bool |
079a182e JH |
1296 | emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align, |
1297 | unsigned int expected_align, HOST_WIDE_INT expected_size) | |
4ca79136 | 1298 | { |
4ca79136 | 1299 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); |
a5e9c810 | 1300 | int save_volatile_ok = volatile_ok; |
4ca79136 | 1301 | enum machine_mode mode; |
266007a7 | 1302 | |
079a182e JH |
1303 | if (expected_align < align) |
1304 | expected_align = align; | |
1305 | ||
4ca79136 RH |
1306 | /* Since this is a move insn, we don't care about volatility. */ |
1307 | volatile_ok = 1; | |
1308 | ||
ee960939 OH |
1309 | /* Try the most limited insn first, because there's no point |
1310 | including more than one in the machine description unless | |
1311 | the more limited one has some advantage. */ | |
1312 | ||
4ca79136 RH |
1313 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; |
1314 | mode = GET_MODE_WIDER_MODE (mode)) | |
1315 | { | |
70128ad9 | 1316 | enum insn_code code = movmem_optab[(int) mode]; |
4ca79136 RH |
1317 | insn_operand_predicate_fn pred; |
1318 | ||
1319 | if (code != CODE_FOR_nothing | |
1320 | /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT | |
1321 | here because if SIZE is less than the mode mask, as it is | |
1322 | returned by the macro, it will definitely be less than the | |
1323 | actual mode mask. */ | |
481683e1 | 1324 | && ((CONST_INT_P (size) |
4ca79136 RH |
1325 | && ((unsigned HOST_WIDE_INT) INTVAL (size) |
1326 | <= (GET_MODE_MASK (mode) >> 1))) | |
1327 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) | |
1328 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 | |
1329 | || (*pred) (x, BLKmode)) | |
1330 | && ((pred = insn_data[(int) code].operand[1].predicate) == 0 | |
1331 | || (*pred) (y, BLKmode)) | |
1332 | && ((pred = insn_data[(int) code].operand[3].predicate) == 0 | |
1333 | || (*pred) (opalign, VOIDmode))) | |
1334 | { | |
1335 | rtx op2; | |
1336 | rtx last = get_last_insn (); | |
1337 | rtx pat; | |
1338 | ||
1339 | op2 = convert_to_mode (mode, size, 1); | |
1340 | pred = insn_data[(int) code].operand[2].predicate; | |
1341 | if (pred != 0 && ! (*pred) (op2, mode)) | |
1342 | op2 = copy_to_mode_reg (mode, op2); | |
1343 | ||
1344 | /* ??? When called via emit_block_move_for_call, it'd be | |
1345 | nice if there were some way to inform the backend, so | |
1346 | that it doesn't fail the expansion because it thinks | |
1347 | emitting the libcall would be more efficient. */ | |
1348 | ||
079a182e JH |
1349 | if (insn_data[(int) code].n_operands == 4) |
1350 | pat = GEN_FCN ((int) code) (x, y, op2, opalign); | |
1351 | else | |
1352 | pat = GEN_FCN ((int) code) (x, y, op2, opalign, | |
9946ca2d RA |
1353 | GEN_INT (expected_align |
1354 | / BITS_PER_UNIT), | |
079a182e | 1355 | GEN_INT (expected_size)); |
4ca79136 RH |
1356 | if (pat) |
1357 | { | |
1358 | emit_insn (pat); | |
a5e9c810 | 1359 | volatile_ok = save_volatile_ok; |
4ca79136 | 1360 | return true; |
bbf6f052 | 1361 | } |
4ca79136 RH |
1362 | else |
1363 | delete_insns_since (last); | |
bbf6f052 | 1364 | } |
4ca79136 | 1365 | } |
bbf6f052 | 1366 | |
a5e9c810 | 1367 | volatile_ok = save_volatile_ok; |
4ca79136 RH |
1368 | return false; |
1369 | } | |
3ef1eef4 | 1370 | |
8f99553f | 1371 | /* A subroutine of emit_block_move. Expand a call to memcpy. |
4ca79136 | 1372 | Return the return value from memcpy, 0 otherwise. */ |
4bc973ae | 1373 | |
8c996513 | 1374 | rtx |
8148fe65 | 1375 | emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall) |
4ca79136 | 1376 | { |
ee960939 | 1377 | rtx dst_addr, src_addr; |
5039610b | 1378 | tree call_expr, fn, src_tree, dst_tree, size_tree; |
4ca79136 RH |
1379 | enum machine_mode size_mode; |
1380 | rtx retval; | |
4bc973ae | 1381 | |
ad76cef8 PB |
1382 | /* Emit code to copy the addresses of DST and SRC and SIZE into new |
1383 | pseudos. We can then place those new pseudos into a VAR_DECL and | |
1384 | use them later. */ | |
ee960939 OH |
1385 | |
1386 | dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0)); | |
1387 | src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0)); | |
4ca79136 | 1388 | |
ee960939 OH |
1389 | dst_addr = convert_memory_address (ptr_mode, dst_addr); |
1390 | src_addr = convert_memory_address (ptr_mode, src_addr); | |
ee960939 OH |
1391 | |
1392 | dst_tree = make_tree (ptr_type_node, dst_addr); | |
1393 | src_tree = make_tree (ptr_type_node, src_addr); | |
4ca79136 | 1394 | |
8f99553f | 1395 | size_mode = TYPE_MODE (sizetype); |
ee960939 | 1396 | |
4ca79136 RH |
1397 | size = convert_to_mode (size_mode, size, 1); |
1398 | size = copy_to_mode_reg (size_mode, size); | |
1399 | ||
1400 | /* It is incorrect to use the libcall calling conventions to call | |
1401 | memcpy in this context. This could be a user call to memcpy and | |
1402 | the user may wish to examine the return value from memcpy. For | |
1403 | targets where libcalls and normal calls have different conventions | |
8f99553f | 1404 | for returning pointers, we could end up generating incorrect code. */ |
4ca79136 | 1405 | |
8f99553f | 1406 | size_tree = make_tree (sizetype, size); |
4ca79136 RH |
1407 | |
1408 | fn = emit_block_move_libcall_fn (true); | |
5039610b | 1409 | call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree); |
8148fe65 | 1410 | CALL_EXPR_TAILCALL (call_expr) = tailcall; |
4ca79136 | 1411 | |
84217346 | 1412 | retval = expand_normal (call_expr); |
4ca79136 | 1413 | |
8f99553f | 1414 | return retval; |
4ca79136 | 1415 | } |
52cf7115 | 1416 | |
4ca79136 RH |
1417 | /* A subroutine of emit_block_move_via_libcall. Create the tree node |
1418 | for the function we use for block copies. The first time FOR_CALL | |
1419 | is true, we call assemble_external. */ | |
52cf7115 | 1420 | |
4ca79136 RH |
1421 | static GTY(()) tree block_move_fn; |
1422 | ||
9661b15f | 1423 | void |
502b8322 | 1424 | init_block_move_fn (const char *asmspec) |
4ca79136 | 1425 | { |
9661b15f | 1426 | if (!block_move_fn) |
4ca79136 | 1427 | { |
8fd3cf4e | 1428 | tree args, fn; |
9661b15f | 1429 | |
8f99553f JM |
1430 | fn = get_identifier ("memcpy"); |
1431 | args = build_function_type_list (ptr_type_node, ptr_type_node, | |
1432 | const_ptr_type_node, sizetype, | |
1433 | NULL_TREE); | |
52cf7115 | 1434 | |
c2255bc4 | 1435 | fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args); |
4ca79136 RH |
1436 | DECL_EXTERNAL (fn) = 1; |
1437 | TREE_PUBLIC (fn) = 1; | |
1438 | DECL_ARTIFICIAL (fn) = 1; | |
1439 | TREE_NOTHROW (fn) = 1; | |
5b5cba1f JM |
1440 | DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT; |
1441 | DECL_VISIBILITY_SPECIFIED (fn) = 1; | |
66c60e67 | 1442 | |
4ca79136 | 1443 | block_move_fn = fn; |
bbf6f052 | 1444 | } |
e9a25f70 | 1445 | |
9661b15f | 1446 | if (asmspec) |
0e6df31e | 1447 | set_user_assembler_name (block_move_fn, asmspec); |
9661b15f JJ |
1448 | } |
1449 | ||
1450 | static tree | |
502b8322 | 1451 | emit_block_move_libcall_fn (int for_call) |
9661b15f JJ |
1452 | { |
1453 | static bool emitted_extern; | |
1454 | ||
1455 | if (!block_move_fn) | |
1456 | init_block_move_fn (NULL); | |
1457 | ||
4ca79136 RH |
1458 | if (for_call && !emitted_extern) |
1459 | { | |
1460 | emitted_extern = true; | |
0e6df31e | 1461 | make_decl_rtl (block_move_fn); |
9661b15f | 1462 | assemble_external (block_move_fn); |
4ca79136 RH |
1463 | } |
1464 | ||
9661b15f | 1465 | return block_move_fn; |
bbf6f052 | 1466 | } |
44bb111a RH |
1467 | |
1468 | /* A subroutine of emit_block_move. Copy the data via an explicit | |
1469 | loop. This is used only when libcalls are forbidden. */ | |
1470 | /* ??? It'd be nice to copy in hunks larger than QImode. */ | |
1471 | ||
1472 | static void | |
502b8322 AJ |
1473 | emit_block_move_via_loop (rtx x, rtx y, rtx size, |
1474 | unsigned int align ATTRIBUTE_UNUSED) | |
44bb111a RH |
1475 | { |
1476 | rtx cmp_label, top_label, iter, x_addr, y_addr, tmp; | |
d4ebfa65 BE |
1477 | enum machine_mode x_addr_mode |
1478 | = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x)); | |
1479 | enum machine_mode y_addr_mode | |
1480 | = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y)); | |
44bb111a RH |
1481 | enum machine_mode iter_mode; |
1482 | ||
1483 | iter_mode = GET_MODE (size); | |
1484 | if (iter_mode == VOIDmode) | |
1485 | iter_mode = word_mode; | |
1486 | ||
1487 | top_label = gen_label_rtx (); | |
1488 | cmp_label = gen_label_rtx (); | |
1489 | iter = gen_reg_rtx (iter_mode); | |
1490 | ||
1491 | emit_move_insn (iter, const0_rtx); | |
1492 | ||
1493 | x_addr = force_operand (XEXP (x, 0), NULL_RTX); | |
1494 | y_addr = force_operand (XEXP (y, 0), NULL_RTX); | |
1495 | do_pending_stack_adjust (); | |
1496 | ||
44bb111a RH |
1497 | emit_jump (cmp_label); |
1498 | emit_label (top_label); | |
1499 | ||
d4ebfa65 BE |
1500 | tmp = convert_modes (x_addr_mode, iter_mode, iter, true); |
1501 | x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp); | |
1502 | ||
1503 | if (x_addr_mode != y_addr_mode) | |
1504 | tmp = convert_modes (y_addr_mode, iter_mode, iter, true); | |
1505 | y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp); | |
1506 | ||
44bb111a RH |
1507 | x = change_address (x, QImode, x_addr); |
1508 | y = change_address (y, QImode, y_addr); | |
1509 | ||
1510 | emit_move_insn (x, y); | |
1511 | ||
1512 | tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter, | |
1513 | true, OPTAB_LIB_WIDEN); | |
1514 | if (tmp != iter) | |
1515 | emit_move_insn (iter, tmp); | |
1516 | ||
44bb111a RH |
1517 | emit_label (cmp_label); |
1518 | ||
1519 | emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode, | |
1520 | true, top_label); | |
44bb111a | 1521 | } |
bbf6f052 RK |
1522 | \f |
1523 | /* Copy all or part of a value X into registers starting at REGNO. | |
1524 | The number of registers to be filled is NREGS. */ | |
1525 | ||
1526 | void | |
502b8322 | 1527 | move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode) |
bbf6f052 RK |
1528 | { |
1529 | int i; | |
381127e8 | 1530 | #ifdef HAVE_load_multiple |
3a94c984 | 1531 | rtx pat; |
381127e8 RL |
1532 | rtx last; |
1533 | #endif | |
bbf6f052 | 1534 | |
72bb9717 RK |
1535 | if (nregs == 0) |
1536 | return; | |
1537 | ||
bbf6f052 RK |
1538 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) |
1539 | x = validize_mem (force_const_mem (mode, x)); | |
1540 | ||
1541 | /* See if the machine can do this with a load multiple insn. */ | |
1542 | #ifdef HAVE_load_multiple | |
c3a02afe | 1543 | if (HAVE_load_multiple) |
bbf6f052 | 1544 | { |
c3a02afe | 1545 | last = get_last_insn (); |
38a448ca | 1546 | pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x, |
c3a02afe RK |
1547 | GEN_INT (nregs)); |
1548 | if (pat) | |
1549 | { | |
1550 | emit_insn (pat); | |
1551 | return; | |
1552 | } | |
1553 | else | |
1554 | delete_insns_since (last); | |
bbf6f052 | 1555 | } |
bbf6f052 RK |
1556 | #endif |
1557 | ||
1558 | for (i = 0; i < nregs; i++) | |
38a448ca | 1559 | emit_move_insn (gen_rtx_REG (word_mode, regno + i), |
bbf6f052 RK |
1560 | operand_subword_force (x, i, mode)); |
1561 | } | |
1562 | ||
1563 | /* Copy all or part of a BLKmode value X out of registers starting at REGNO. | |
c6b97fac | 1564 | The number of registers to be filled is NREGS. */ |
0040593d | 1565 | |
bbf6f052 | 1566 | void |
502b8322 | 1567 | move_block_from_reg (int regno, rtx x, int nregs) |
bbf6f052 RK |
1568 | { |
1569 | int i; | |
bbf6f052 | 1570 | |
2954d7db RK |
1571 | if (nregs == 0) |
1572 | return; | |
1573 | ||
bbf6f052 RK |
1574 | /* See if the machine can do this with a store multiple insn. */ |
1575 | #ifdef HAVE_store_multiple | |
c3a02afe | 1576 | if (HAVE_store_multiple) |
bbf6f052 | 1577 | { |
c6b97fac AM |
1578 | rtx last = get_last_insn (); |
1579 | rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno), | |
1580 | GEN_INT (nregs)); | |
c3a02afe RK |
1581 | if (pat) |
1582 | { | |
1583 | emit_insn (pat); | |
1584 | return; | |
1585 | } | |
1586 | else | |
1587 | delete_insns_since (last); | |
bbf6f052 | 1588 | } |
bbf6f052 RK |
1589 | #endif |
1590 | ||
1591 | for (i = 0; i < nregs; i++) | |
1592 | { | |
1593 | rtx tem = operand_subword (x, i, 1, BLKmode); | |
1594 | ||
5b0264cb | 1595 | gcc_assert (tem); |
bbf6f052 | 1596 | |
38a448ca | 1597 | emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i)); |
bbf6f052 RK |
1598 | } |
1599 | } | |
1600 | ||
084a1106 JDA |
1601 | /* Generate a PARALLEL rtx for a new non-consecutive group of registers from |
1602 | ORIG, where ORIG is a non-consecutive group of registers represented by | |
1603 | a PARALLEL. The clone is identical to the original except in that the | |
1604 | original set of registers is replaced by a new set of pseudo registers. | |
1605 | The new set has the same modes as the original set. */ | |
1606 | ||
1607 | rtx | |
502b8322 | 1608 | gen_group_rtx (rtx orig) |
084a1106 JDA |
1609 | { |
1610 | int i, length; | |
1611 | rtx *tmps; | |
1612 | ||
5b0264cb | 1613 | gcc_assert (GET_CODE (orig) == PARALLEL); |
084a1106 JDA |
1614 | |
1615 | length = XVECLEN (orig, 0); | |
1b4572a8 | 1616 | tmps = XALLOCAVEC (rtx, length); |
084a1106 JDA |
1617 | |
1618 | /* Skip a NULL entry in first slot. */ | |
1619 | i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1; | |
1620 | ||
1621 | if (i) | |
1622 | tmps[0] = 0; | |
1623 | ||
1624 | for (; i < length; i++) | |
1625 | { | |
1626 | enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0)); | |
1627 | rtx offset = XEXP (XVECEXP (orig, 0, i), 1); | |
1628 | ||
1629 | tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset); | |
1630 | } | |
1631 | ||
1632 | return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps)); | |
1633 | } | |
1634 | ||
27e29549 RH |
1635 | /* A subroutine of emit_group_load. Arguments as for emit_group_load, |
1636 | except that values are placed in TMPS[i], and must later be moved | |
daa956d0 | 1637 | into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */ |
fffa9c1d | 1638 | |
27e29549 RH |
1639 | static void |
1640 | emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize) | |
fffa9c1d | 1641 | { |
27e29549 | 1642 | rtx src; |
aac5cc16 | 1643 | int start, i; |
7ef7000b | 1644 | enum machine_mode m = GET_MODE (orig_src); |
fffa9c1d | 1645 | |
5b0264cb | 1646 | gcc_assert (GET_CODE (dst) == PARALLEL); |
fffa9c1d | 1647 | |
f2978871 AM |
1648 | if (m != VOIDmode |
1649 | && !SCALAR_INT_MODE_P (m) | |
1650 | && !MEM_P (orig_src) | |
1651 | && GET_CODE (orig_src) != CONCAT) | |
782fa603 AH |
1652 | { |
1653 | enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src)); | |
1654 | if (imode == BLKmode) | |
1655 | src = assign_stack_temp (GET_MODE (orig_src), ssize, 0); | |
1656 | else | |
1657 | src = gen_reg_rtx (imode); | |
1658 | if (imode != BLKmode) | |
1659 | src = gen_lowpart (GET_MODE (orig_src), src); | |
1660 | emit_move_insn (src, orig_src); | |
1661 | /* ...and back again. */ | |
1662 | if (imode != BLKmode) | |
1663 | src = gen_lowpart (imode, src); | |
27e29549 | 1664 | emit_group_load_1 (tmps, dst, src, type, ssize); |
782fa603 AH |
1665 | return; |
1666 | } | |
1667 | ||
fffa9c1d JW |
1668 | /* Check for a NULL entry, used to indicate that the parameter goes |
1669 | both on the stack and in registers. */ | |
aac5cc16 RH |
1670 | if (XEXP (XVECEXP (dst, 0, 0), 0)) |
1671 | start = 0; | |
fffa9c1d | 1672 | else |
aac5cc16 RH |
1673 | start = 1; |
1674 | ||
aac5cc16 RH |
1675 | /* Process the pieces. */ |
1676 | for (i = start; i < XVECLEN (dst, 0); i++) | |
1677 | { | |
1678 | enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0)); | |
770ae6cc RK |
1679 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1)); |
1680 | unsigned int bytelen = GET_MODE_SIZE (mode); | |
aac5cc16 RH |
1681 | int shift = 0; |
1682 | ||
1683 | /* Handle trailing fragments that run over the size of the struct. */ | |
8752c357 | 1684 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
aac5cc16 | 1685 | { |
6e985040 AM |
1686 | /* Arrange to shift the fragment to where it belongs. |
1687 | extract_bit_field loads to the lsb of the reg. */ | |
1688 | if ( | |
1689 | #ifdef BLOCK_REG_PADDING | |
1690 | BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start) | |
1691 | == (BYTES_BIG_ENDIAN ? upward : downward) | |
1692 | #else | |
1693 | BYTES_BIG_ENDIAN | |
1694 | #endif | |
1695 | ) | |
1696 | shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
aac5cc16 | 1697 | bytelen = ssize - bytepos; |
5b0264cb | 1698 | gcc_assert (bytelen > 0); |
aac5cc16 RH |
1699 | } |
1700 | ||
f3ce87a9 DE |
1701 | /* If we won't be loading directly from memory, protect the real source |
1702 | from strange tricks we might play; but make sure that the source can | |
1703 | be loaded directly into the destination. */ | |
1704 | src = orig_src; | |
3c0cb5de | 1705 | if (!MEM_P (orig_src) |
f3ce87a9 DE |
1706 | && (!CONSTANT_P (orig_src) |
1707 | || (GET_MODE (orig_src) != mode | |
1708 | && GET_MODE (orig_src) != VOIDmode))) | |
1709 | { | |
1710 | if (GET_MODE (orig_src) == VOIDmode) | |
1711 | src = gen_reg_rtx (mode); | |
1712 | else | |
1713 | src = gen_reg_rtx (GET_MODE (orig_src)); | |
04050c69 | 1714 | |
f3ce87a9 DE |
1715 | emit_move_insn (src, orig_src); |
1716 | } | |
1717 | ||
aac5cc16 | 1718 | /* Optimize the access just a bit. */ |
3c0cb5de | 1719 | if (MEM_P (src) |
6e985040 AM |
1720 | && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src)) |
1721 | || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)) | |
729a2125 | 1722 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
aac5cc16 RH |
1723 | && bytelen == GET_MODE_SIZE (mode)) |
1724 | { | |
1725 | tmps[i] = gen_reg_rtx (mode); | |
f4ef873c | 1726 | emit_move_insn (tmps[i], adjust_address (src, mode, bytepos)); |
fffa9c1d | 1727 | } |
d20b1190 EB |
1728 | else if (COMPLEX_MODE_P (mode) |
1729 | && GET_MODE (src) == mode | |
1730 | && bytelen == GET_MODE_SIZE (mode)) | |
1731 | /* Let emit_move_complex do the bulk of the work. */ | |
1732 | tmps[i] = src; | |
7c4a6db0 JW |
1733 | else if (GET_CODE (src) == CONCAT) |
1734 | { | |
015b1ad1 JDA |
1735 | unsigned int slen = GET_MODE_SIZE (GET_MODE (src)); |
1736 | unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0))); | |
1737 | ||
1738 | if ((bytepos == 0 && bytelen == slen0) | |
1739 | || (bytepos != 0 && bytepos + bytelen <= slen)) | |
cbb92744 | 1740 | { |
015b1ad1 JDA |
1741 | /* The following assumes that the concatenated objects all |
1742 | have the same size. In this case, a simple calculation | |
1743 | can be used to determine the object and the bit field | |
1744 | to be extracted. */ | |
1745 | tmps[i] = XEXP (src, bytepos / slen0); | |
cbb92744 | 1746 | if (! CONSTANT_P (tmps[i]) |
f8cfc6aa | 1747 | && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)) |
cbb92744 | 1748 | tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT, |
015b1ad1 | 1749 | (bytepos % slen0) * BITS_PER_UNIT, |
b3520980 | 1750 | 1, NULL_RTX, mode, mode); |
cbb92744 | 1751 | } |
5b0264cb | 1752 | else |
58f69841 | 1753 | { |
5b0264cb | 1754 | rtx mem; |
f58c00e3 | 1755 | |
5b0264cb NS |
1756 | gcc_assert (!bytepos); |
1757 | mem = assign_stack_temp (GET_MODE (src), slen, 0); | |
58f69841 | 1758 | emit_move_insn (mem, src); |
f58c00e3 EB |
1759 | tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT, |
1760 | 0, 1, NULL_RTX, mode, mode); | |
58f69841 | 1761 | } |
7c4a6db0 | 1762 | } |
9c0631a7 AH |
1763 | /* FIXME: A SIMD parallel will eventually lead to a subreg of a |
1764 | SIMD register, which is currently broken. While we get GCC | |
1765 | to emit proper RTL for these cases, let's dump to memory. */ | |
1766 | else if (VECTOR_MODE_P (GET_MODE (dst)) | |
f8cfc6aa | 1767 | && REG_P (src)) |
9c0631a7 AH |
1768 | { |
1769 | int slen = GET_MODE_SIZE (GET_MODE (src)); | |
1770 | rtx mem; | |
1771 | ||
1772 | mem = assign_stack_temp (GET_MODE (src), slen, 0); | |
1773 | emit_move_insn (mem, src); | |
1774 | tmps[i] = adjust_address (mem, mode, (int) bytepos); | |
1775 | } | |
d3a16cbd FJ |
1776 | else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode |
1777 | && XVECLEN (dst, 0) > 1) | |
1778 | tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos); | |
7cefcade JDA |
1779 | else if (CONSTANT_P (src)) |
1780 | { | |
1781 | HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen; | |
1782 | ||
1783 | if (len == ssize) | |
1784 | tmps[i] = src; | |
1785 | else | |
1786 | { | |
1787 | rtx first, second; | |
1788 | ||
1789 | gcc_assert (2 * len == ssize); | |
1790 | split_double (src, &first, &second); | |
1791 | if (i) | |
1792 | tmps[i] = second; | |
1793 | else | |
1794 | tmps[i] = first; | |
1795 | } | |
1796 | } | |
1797 | else if (REG_P (src) && GET_MODE (src) == mode) | |
2ee5437b | 1798 | tmps[i] = src; |
fffa9c1d | 1799 | else |
19caa751 RK |
1800 | tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT, |
1801 | bytepos * BITS_PER_UNIT, 1, NULL_RTX, | |
b3520980 | 1802 | mode, mode); |
fffa9c1d | 1803 | |
6e985040 | 1804 | if (shift) |
09b52670 | 1805 | tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i], |
7d60be94 | 1806 | build_int_cst (NULL_TREE, shift), tmps[i], 0); |
fffa9c1d | 1807 | } |
27e29549 RH |
1808 | } |
1809 | ||
1810 | /* Emit code to move a block SRC of type TYPE to a block DST, | |
1811 | where DST is non-consecutive registers represented by a PARALLEL. | |
1812 | SSIZE represents the total size of block ORIG_SRC in bytes, or -1 | |
1813 | if not known. */ | |
1814 | ||
1815 | void | |
1816 | emit_group_load (rtx dst, rtx src, tree type, int ssize) | |
1817 | { | |
1818 | rtx *tmps; | |
1819 | int i; | |
1820 | ||
1b4572a8 | 1821 | tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0)); |
27e29549 | 1822 | emit_group_load_1 (tmps, dst, src, type, ssize); |
19caa751 | 1823 | |
aac5cc16 | 1824 | /* Copy the extracted pieces into the proper (probable) hard regs. */ |
27e29549 RH |
1825 | for (i = 0; i < XVECLEN (dst, 0); i++) |
1826 | { | |
1827 | rtx d = XEXP (XVECEXP (dst, 0, i), 0); | |
1828 | if (d == NULL) | |
1829 | continue; | |
1830 | emit_move_insn (d, tmps[i]); | |
1831 | } | |
1832 | } | |
1833 | ||
1834 | /* Similar, but load SRC into new pseudos in a format that looks like | |
1835 | PARALLEL. This can later be fed to emit_group_move to get things | |
1836 | in the right place. */ | |
1837 | ||
1838 | rtx | |
1839 | emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize) | |
1840 | { | |
1841 | rtvec vec; | |
1842 | int i; | |
1843 | ||
1844 | vec = rtvec_alloc (XVECLEN (parallel, 0)); | |
1845 | emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize); | |
1846 | ||
1847 | /* Convert the vector to look just like the original PARALLEL, except | |
1848 | with the computed values. */ | |
1849 | for (i = 0; i < XVECLEN (parallel, 0); i++) | |
1850 | { | |
1851 | rtx e = XVECEXP (parallel, 0, i); | |
1852 | rtx d = XEXP (e, 0); | |
1853 | ||
1854 | if (d) | |
1855 | { | |
1856 | d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i)); | |
1857 | e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1)); | |
1858 | } | |
1859 | RTVEC_ELT (vec, i) = e; | |
1860 | } | |
1861 | ||
1862 | return gen_rtx_PARALLEL (GET_MODE (parallel), vec); | |
fffa9c1d JW |
1863 | } |
1864 | ||
084a1106 JDA |
1865 | /* Emit code to move a block SRC to block DST, where SRC and DST are |
1866 | non-consecutive groups of registers, each represented by a PARALLEL. */ | |
1867 | ||
1868 | void | |
502b8322 | 1869 | emit_group_move (rtx dst, rtx src) |
084a1106 JDA |
1870 | { |
1871 | int i; | |
1872 | ||
5b0264cb NS |
1873 | gcc_assert (GET_CODE (src) == PARALLEL |
1874 | && GET_CODE (dst) == PARALLEL | |
1875 | && XVECLEN (src, 0) == XVECLEN (dst, 0)); | |
084a1106 JDA |
1876 | |
1877 | /* Skip first entry if NULL. */ | |
1878 | for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++) | |
1879 | emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), | |
1880 | XEXP (XVECEXP (src, 0, i), 0)); | |
1881 | } | |
1882 | ||
27e29549 RH |
1883 | /* Move a group of registers represented by a PARALLEL into pseudos. */ |
1884 | ||
1885 | rtx | |
1886 | emit_group_move_into_temps (rtx src) | |
1887 | { | |
1888 | rtvec vec = rtvec_alloc (XVECLEN (src, 0)); | |
1889 | int i; | |
1890 | ||
1891 | for (i = 0; i < XVECLEN (src, 0); i++) | |
1892 | { | |
1893 | rtx e = XVECEXP (src, 0, i); | |
1894 | rtx d = XEXP (e, 0); | |
1895 | ||
1896 | if (d) | |
1897 | e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1)); | |
1898 | RTVEC_ELT (vec, i) = e; | |
1899 | } | |
1900 | ||
1901 | return gen_rtx_PARALLEL (GET_MODE (src), vec); | |
1902 | } | |
1903 | ||
6e985040 AM |
1904 | /* Emit code to move a block SRC to a block ORIG_DST of type TYPE, |
1905 | where SRC is non-consecutive registers represented by a PARALLEL. | |
1906 | SSIZE represents the total size of block ORIG_DST, or -1 if not | |
1907 | known. */ | |
fffa9c1d JW |
1908 | |
1909 | void | |
6e985040 | 1910 | emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) |
fffa9c1d | 1911 | { |
aac5cc16 | 1912 | rtx *tmps, dst; |
79edfde8 | 1913 | int start, finish, i; |
7ef7000b | 1914 | enum machine_mode m = GET_MODE (orig_dst); |
fffa9c1d | 1915 | |
5b0264cb | 1916 | gcc_assert (GET_CODE (src) == PARALLEL); |
fffa9c1d | 1917 | |
0da34ce4 RH |
1918 | if (!SCALAR_INT_MODE_P (m) |
1919 | && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT) | |
782fa603 AH |
1920 | { |
1921 | enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst)); | |
1922 | if (imode == BLKmode) | |
1923 | dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0); | |
1924 | else | |
1925 | dst = gen_reg_rtx (imode); | |
1926 | emit_group_store (dst, src, type, ssize); | |
1927 | if (imode != BLKmode) | |
1928 | dst = gen_lowpart (GET_MODE (orig_dst), dst); | |
1929 | emit_move_insn (orig_dst, dst); | |
1930 | return; | |
1931 | } | |
1932 | ||
fffa9c1d JW |
1933 | /* Check for a NULL entry, used to indicate that the parameter goes |
1934 | both on the stack and in registers. */ | |
aac5cc16 RH |
1935 | if (XEXP (XVECEXP (src, 0, 0), 0)) |
1936 | start = 0; | |
fffa9c1d | 1937 | else |
aac5cc16 | 1938 | start = 1; |
79edfde8 | 1939 | finish = XVECLEN (src, 0); |
aac5cc16 | 1940 | |
1b4572a8 | 1941 | tmps = XALLOCAVEC (rtx, finish); |
fffa9c1d | 1942 | |
aac5cc16 | 1943 | /* Copy the (probable) hard regs into pseudos. */ |
79edfde8 | 1944 | for (i = start; i < finish; i++) |
fffa9c1d | 1945 | { |
aac5cc16 | 1946 | rtx reg = XEXP (XVECEXP (src, 0, i), 0); |
5ac60669 RS |
1947 | if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER) |
1948 | { | |
1949 | tmps[i] = gen_reg_rtx (GET_MODE (reg)); | |
1950 | emit_move_insn (tmps[i], reg); | |
1951 | } | |
1952 | else | |
1953 | tmps[i] = reg; | |
aac5cc16 | 1954 | } |
fffa9c1d | 1955 | |
aac5cc16 RH |
1956 | /* If we won't be storing directly into memory, protect the real destination |
1957 | from strange tricks we might play. */ | |
1958 | dst = orig_dst; | |
10a9f2be JW |
1959 | if (GET_CODE (dst) == PARALLEL) |
1960 | { | |
1961 | rtx temp; | |
1962 | ||
1963 | /* We can get a PARALLEL dst if there is a conditional expression in | |
1964 | a return statement. In that case, the dst and src are the same, | |
1965 | so no action is necessary. */ | |
1966 | if (rtx_equal_p (dst, src)) | |
1967 | return; | |
1968 | ||
1969 | /* It is unclear if we can ever reach here, but we may as well handle | |
1970 | it. Allocate a temporary, and split this into a store/load to/from | |
1971 | the temporary. */ | |
1972 | ||
1973 | temp = assign_stack_temp (GET_MODE (dst), ssize, 0); | |
6e985040 AM |
1974 | emit_group_store (temp, src, type, ssize); |
1975 | emit_group_load (dst, temp, type, ssize); | |
10a9f2be JW |
1976 | return; |
1977 | } | |
3c0cb5de | 1978 | else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT) |
aac5cc16 | 1979 | { |
79edfde8 RS |
1980 | enum machine_mode outer = GET_MODE (dst); |
1981 | enum machine_mode inner; | |
5650dfbd | 1982 | HOST_WIDE_INT bytepos; |
79edfde8 RS |
1983 | bool done = false; |
1984 | rtx temp; | |
1985 | ||
5ac60669 | 1986 | if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER) |
79edfde8 RS |
1987 | dst = gen_reg_rtx (outer); |
1988 | ||
aac5cc16 | 1989 | /* Make life a bit easier for combine. */ |
79edfde8 RS |
1990 | /* If the first element of the vector is the low part |
1991 | of the destination mode, use a paradoxical subreg to | |
1992 | initialize the destination. */ | |
1993 | if (start < finish) | |
1994 | { | |
1995 | inner = GET_MODE (tmps[start]); | |
7488662d | 1996 | bytepos = subreg_lowpart_offset (inner, outer); |
79edfde8 RS |
1997 | if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos) |
1998 | { | |
1999 | temp = simplify_gen_subreg (outer, tmps[start], | |
7488662d | 2000 | inner, 0); |
9fd20553 RS |
2001 | if (temp) |
2002 | { | |
2003 | emit_move_insn (dst, temp); | |
2004 | done = true; | |
2005 | start++; | |
2006 | } | |
79edfde8 RS |
2007 | } |
2008 | } | |
2009 | ||
2010 | /* If the first element wasn't the low part, try the last. */ | |
2011 | if (!done | |
2012 | && start < finish - 1) | |
2013 | { | |
2014 | inner = GET_MODE (tmps[finish - 1]); | |
7488662d | 2015 | bytepos = subreg_lowpart_offset (inner, outer); |
79edfde8 RS |
2016 | if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos) |
2017 | { | |
2018 | temp = simplify_gen_subreg (outer, tmps[finish - 1], | |
7488662d | 2019 | inner, 0); |
9fd20553 RS |
2020 | if (temp) |
2021 | { | |
2022 | emit_move_insn (dst, temp); | |
2023 | done = true; | |
2024 | finish--; | |
2025 | } | |
79edfde8 RS |
2026 | } |
2027 | } | |
2028 | ||
2029 | /* Otherwise, simply initialize the result to zero. */ | |
2030 | if (!done) | |
2031 | emit_move_insn (dst, CONST0_RTX (outer)); | |
aac5cc16 | 2032 | } |
aac5cc16 RH |
2033 | |
2034 | /* Process the pieces. */ | |
79edfde8 | 2035 | for (i = start; i < finish; i++) |
aac5cc16 | 2036 | { |
770ae6cc | 2037 | HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); |
aac5cc16 | 2038 | enum machine_mode mode = GET_MODE (tmps[i]); |
770ae6cc | 2039 | unsigned int bytelen = GET_MODE_SIZE (mode); |
38c4df0b | 2040 | unsigned int adj_bytelen = bytelen; |
6ddae612 | 2041 | rtx dest = dst; |
aac5cc16 RH |
2042 | |
2043 | /* Handle trailing fragments that run over the size of the struct. */ | |
8752c357 | 2044 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
38c4df0b | 2045 | adj_bytelen = ssize - bytepos; |
fffa9c1d | 2046 | |
6ddae612 JJ |
2047 | if (GET_CODE (dst) == CONCAT) |
2048 | { | |
38c4df0b JM |
2049 | if (bytepos + adj_bytelen |
2050 | <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) | |
6ddae612 JJ |
2051 | dest = XEXP (dst, 0); |
2052 | else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) | |
2053 | { | |
2054 | bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); | |
2055 | dest = XEXP (dst, 1); | |
2056 | } | |
5b0264cb | 2057 | else |
0d446150 | 2058 | { |
6cd7942d L |
2059 | enum machine_mode dest_mode = GET_MODE (dest); |
2060 | enum machine_mode tmp_mode = GET_MODE (tmps[i]); | |
6cd7942d | 2061 | |
e0978eba | 2062 | gcc_assert (bytepos == 0 && XVECLEN (src, 0)); |
6cd7942d L |
2063 | |
2064 | if (GET_MODE_ALIGNMENT (dest_mode) | |
2065 | >= GET_MODE_ALIGNMENT (tmp_mode)) | |
2066 | { | |
e0978eba L |
2067 | dest = assign_stack_temp (dest_mode, |
2068 | GET_MODE_SIZE (dest_mode), | |
2069 | 0); | |
6cd7942d L |
2070 | emit_move_insn (adjust_address (dest, |
2071 | tmp_mode, | |
2072 | bytepos), | |
2073 | tmps[i]); | |
2074 | dst = dest; | |
2075 | } | |
2076 | else | |
2077 | { | |
e0978eba L |
2078 | dest = assign_stack_temp (tmp_mode, |
2079 | GET_MODE_SIZE (tmp_mode), | |
2080 | 0); | |
6cd7942d L |
2081 | emit_move_insn (dest, tmps[i]); |
2082 | dst = adjust_address (dest, dest_mode, bytepos); | |
2083 | } | |
0d446150 JH |
2084 | break; |
2085 | } | |
6ddae612 JJ |
2086 | } |
2087 | ||
38c4df0b JM |
2088 | if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) |
2089 | { | |
2090 | /* store_bit_field always takes its value from the lsb. | |
2091 | Move the fragment to the lsb if it's not already there. */ | |
2092 | if ( | |
2093 | #ifdef BLOCK_REG_PADDING | |
2094 | BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start) | |
2095 | == (BYTES_BIG_ENDIAN ? upward : downward) | |
2096 | #else | |
2097 | BYTES_BIG_ENDIAN | |
2098 | #endif | |
2099 | ) | |
2100 | { | |
2101 | int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; | |
2102 | tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i], | |
2103 | build_int_cst (NULL_TREE, shift), | |
2104 | tmps[i], 0); | |
2105 | } | |
2106 | bytelen = adj_bytelen; | |
2107 | } | |
2108 | ||
aac5cc16 | 2109 | /* Optimize the access just a bit. */ |
3c0cb5de | 2110 | if (MEM_P (dest) |
6e985040 AM |
2111 | && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest)) |
2112 | || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)) | |
729a2125 | 2113 | && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0 |
aac5cc16 | 2114 | && bytelen == GET_MODE_SIZE (mode)) |
6ddae612 | 2115 | emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]); |
aac5cc16 | 2116 | else |
6ddae612 | 2117 | store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT, |
b3520980 | 2118 | mode, tmps[i]); |
fffa9c1d | 2119 | } |
729a2125 | 2120 | |
aac5cc16 | 2121 | /* Copy from the pseudo into the (probable) hard reg. */ |
0d446150 | 2122 | if (orig_dst != dst) |
aac5cc16 | 2123 | emit_move_insn (orig_dst, dst); |
fffa9c1d JW |
2124 | } |
2125 | ||
c36fce9a GRK |
2126 | /* Generate code to copy a BLKmode object of TYPE out of a |
2127 | set of registers starting with SRCREG into TGTBLK. If TGTBLK | |
2128 | is null, a stack temporary is created. TGTBLK is returned. | |
2129 | ||
c988af2b RS |
2130 | The purpose of this routine is to handle functions that return |
2131 | BLKmode structures in registers. Some machines (the PA for example) | |
2132 | want to return all small structures in registers regardless of the | |
2133 | structure's alignment. */ | |
c36fce9a GRK |
2134 | |
2135 | rtx | |
502b8322 | 2136 | copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type) |
c36fce9a | 2137 | { |
19caa751 RK |
2138 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type); |
2139 | rtx src = NULL, dst = NULL; | |
2140 | unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD); | |
c988af2b | 2141 | unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0; |
15af420d | 2142 | enum machine_mode copy_mode; |
19caa751 RK |
2143 | |
2144 | if (tgtblk == 0) | |
2145 | { | |
1da68f56 RK |
2146 | tgtblk = assign_temp (build_qualified_type (type, |
2147 | (TYPE_QUALS (type) | |
2148 | | TYPE_QUAL_CONST)), | |
2149 | 0, 1, 1); | |
19caa751 RK |
2150 | preserve_temp_slots (tgtblk); |
2151 | } | |
3a94c984 | 2152 | |
1ed1b4fb | 2153 | /* This code assumes srcreg is at least a full word. If it isn't, copy it |
9ac3e73b | 2154 | into a new pseudo which is a full word. */ |
0d7839da | 2155 | |
19caa751 RK |
2156 | if (GET_MODE (srcreg) != BLKmode |
2157 | && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD) | |
8df83eae | 2158 | srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type)); |
19caa751 | 2159 | |
c988af2b RS |
2160 | /* If the structure doesn't take up a whole number of words, see whether |
2161 | SRCREG is padded on the left or on the right. If it's on the left, | |
2162 | set PADDING_CORRECTION to the number of bits to skip. | |
2163 | ||
2164 | In most ABIs, the structure will be returned at the least end of | |
2165 | the register, which translates to right padding on little-endian | |
2166 | targets and left padding on big-endian targets. The opposite | |
2167 | holds if the structure is returned at the most significant | |
2168 | end of the register. */ | |
2169 | if (bytes % UNITS_PER_WORD != 0 | |
2170 | && (targetm.calls.return_in_msb (type) | |
2171 | ? !BYTES_BIG_ENDIAN | |
2172 | : BYTES_BIG_ENDIAN)) | |
2173 | padding_correction | |
19caa751 RK |
2174 | = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT)); |
2175 | ||
15af420d EB |
2176 | /* Copy the structure BITSIZE bits at a time. If the target lives in |
2177 | memory, take care of not reading/writing past its end by selecting | |
2178 | a copy mode suited to BITSIZE. This should always be possible given | |
2179 | how it is computed. | |
3a94c984 | 2180 | |
19caa751 RK |
2181 | We could probably emit more efficient code for machines which do not use |
2182 | strict alignment, but it doesn't seem worth the effort at the current | |
2183 | time. */ | |
15af420d EB |
2184 | |
2185 | copy_mode = word_mode; | |
2186 | if (MEM_P (tgtblk)) | |
2187 | { | |
2188 | enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1); | |
2189 | if (mem_mode != BLKmode) | |
2190 | copy_mode = mem_mode; | |
2191 | } | |
2192 | ||
c988af2b | 2193 | for (bitpos = 0, xbitpos = padding_correction; |
19caa751 RK |
2194 | bitpos < bytes * BITS_PER_UNIT; |
2195 | bitpos += bitsize, xbitpos += bitsize) | |
2196 | { | |
3a94c984 | 2197 | /* We need a new source operand each time xbitpos is on a |
c988af2b | 2198 | word boundary and when xbitpos == padding_correction |
19caa751 RK |
2199 | (the first time through). */ |
2200 | if (xbitpos % BITS_PER_WORD == 0 | |
c988af2b | 2201 | || xbitpos == padding_correction) |
b47f8cfc JH |
2202 | src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, |
2203 | GET_MODE (srcreg)); | |
19caa751 RK |
2204 | |
2205 | /* We need a new destination operand each time bitpos is on | |
2206 | a word boundary. */ | |
2207 | if (bitpos % BITS_PER_WORD == 0) | |
2208 | dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode); | |
3a94c984 | 2209 | |
19caa751 | 2210 | /* Use xbitpos for the source extraction (right justified) and |
15af420d EB |
2211 | bitpos for the destination store (left justified). */ |
2212 | store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode, | |
19caa751 RK |
2213 | extract_bit_field (src, bitsize, |
2214 | xbitpos % BITS_PER_WORD, 1, | |
15af420d | 2215 | NULL_RTX, copy_mode, copy_mode)); |
19caa751 RK |
2216 | } |
2217 | ||
2218 | return tgtblk; | |
c36fce9a GRK |
2219 | } |
2220 | ||
94b25f81 RK |
2221 | /* Add a USE expression for REG to the (possibly empty) list pointed |
2222 | to by CALL_FUSAGE. REG must denote a hard register. */ | |
bbf6f052 RK |
2223 | |
2224 | void | |
502b8322 | 2225 | use_reg (rtx *call_fusage, rtx reg) |
b3f8cf4a | 2226 | { |
5b0264cb | 2227 | gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER); |
ed1223ba | 2228 | |
b3f8cf4a | 2229 | *call_fusage |
38a448ca RH |
2230 | = gen_rtx_EXPR_LIST (VOIDmode, |
2231 | gen_rtx_USE (VOIDmode, reg), *call_fusage); | |
b3f8cf4a RK |
2232 | } |
2233 | ||
94b25f81 RK |
2234 | /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs, |
2235 | starting at REGNO. All of these registers must be hard registers. */ | |
b3f8cf4a RK |
2236 | |
2237 | void | |
502b8322 | 2238 | use_regs (rtx *call_fusage, int regno, int nregs) |
bbf6f052 | 2239 | { |
0304dfbb | 2240 | int i; |
bbf6f052 | 2241 | |
5b0264cb | 2242 | gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER); |
0304dfbb DE |
2243 | |
2244 | for (i = 0; i < nregs; i++) | |
e50126e8 | 2245 | use_reg (call_fusage, regno_reg_rtx[regno + i]); |
bbf6f052 | 2246 | } |
fffa9c1d JW |
2247 | |
2248 | /* Add USE expressions to *CALL_FUSAGE for each REG contained in the | |
2249 | PARALLEL REGS. This is for calls that pass values in multiple | |
2250 | non-contiguous locations. The Irix 6 ABI has examples of this. */ | |
2251 | ||
2252 | void | |
502b8322 | 2253 | use_group_regs (rtx *call_fusage, rtx regs) |
fffa9c1d JW |
2254 | { |
2255 | int i; | |
2256 | ||
6bd35f86 DE |
2257 | for (i = 0; i < XVECLEN (regs, 0); i++) |
2258 | { | |
2259 | rtx reg = XEXP (XVECEXP (regs, 0, i), 0); | |
fffa9c1d | 2260 | |
6bd35f86 DE |
2261 | /* A NULL entry means the parameter goes both on the stack and in |
2262 | registers. This can also be a MEM for targets that pass values | |
2263 | partially on the stack and partially in registers. */ | |
f8cfc6aa | 2264 | if (reg != 0 && REG_P (reg)) |
6bd35f86 DE |
2265 | use_reg (call_fusage, reg); |
2266 | } | |
fffa9c1d | 2267 | } |
641cac0b AN |
2268 | |
2269 | /* Return the defining gimple statement for SSA_NAME NAME if it is an | |
2270 | assigment and the code of the expresion on the RHS is CODE. Return | |
2271 | NULL otherwise. */ | |
2272 | ||
2273 | static gimple | |
2274 | get_def_for_expr (tree name, enum tree_code code) | |
2275 | { | |
2276 | gimple def_stmt; | |
2277 | ||
2278 | if (TREE_CODE (name) != SSA_NAME) | |
2279 | return NULL; | |
2280 | ||
2281 | def_stmt = get_gimple_for_ssa_name (name); | |
2282 | if (!def_stmt | |
2283 | || gimple_assign_rhs_code (def_stmt) != code) | |
2284 | return NULL; | |
2285 | ||
2286 | return def_stmt; | |
2287 | } | |
bbf6f052 | 2288 | \f |
57814e5e | 2289 | |
cf5124f6 RS |
2290 | /* Determine whether the LEN bytes generated by CONSTFUN can be |
2291 | stored to memory using several move instructions. CONSTFUNDATA is | |
2292 | a pointer which will be passed as argument in every CONSTFUN call. | |
cfa31150 SL |
2293 | ALIGN is maximum alignment we can assume. MEMSETP is true if this is |
2294 | a memset operation and false if it's a copy of a constant string. | |
2295 | Return nonzero if a call to store_by_pieces should succeed. */ | |
cf5124f6 | 2296 | |
57814e5e | 2297 | int |
502b8322 AJ |
2298 | can_store_by_pieces (unsigned HOST_WIDE_INT len, |
2299 | rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), | |
cfa31150 | 2300 | void *constfundata, unsigned int align, bool memsetp) |
57814e5e | 2301 | { |
45d78e7f JJ |
2302 | unsigned HOST_WIDE_INT l; |
2303 | unsigned int max_size; | |
57814e5e JJ |
2304 | HOST_WIDE_INT offset = 0; |
2305 | enum machine_mode mode, tmode; | |
2306 | enum insn_code icode; | |
2307 | int reverse; | |
2308 | rtx cst; | |
2309 | ||
2c430630 RS |
2310 | if (len == 0) |
2311 | return 1; | |
2312 | ||
b8698a0f | 2313 | if (! (memsetp |
cfa31150 SL |
2314 | ? SET_BY_PIECES_P (len, align) |
2315 | : STORE_BY_PIECES_P (len, align))) | |
57814e5e JJ |
2316 | return 0; |
2317 | ||
f64d6991 DE |
2318 | tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); |
2319 | if (align >= GET_MODE_ALIGNMENT (tmode)) | |
2320 | align = GET_MODE_ALIGNMENT (tmode); | |
2321 | else | |
2322 | { | |
2323 | enum machine_mode xmode; | |
2324 | ||
2325 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; | |
2326 | tmode != VOIDmode; | |
2327 | xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) | |
2328 | if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES | |
2329 | || SLOW_UNALIGNED_ACCESS (tmode, align)) | |
2330 | break; | |
2331 | ||
2332 | align = MAX (align, GET_MODE_ALIGNMENT (xmode)); | |
2333 | } | |
57814e5e JJ |
2334 | |
2335 | /* We would first store what we can in the largest integer mode, then go to | |
2336 | successively smaller modes. */ | |
2337 | ||
2338 | for (reverse = 0; | |
2339 | reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT); | |
2340 | reverse++) | |
2341 | { | |
2342 | l = len; | |
2343 | mode = VOIDmode; | |
cf5124f6 | 2344 | max_size = STORE_MAX_PIECES + 1; |
57814e5e JJ |
2345 | while (max_size > 1) |
2346 | { | |
2347 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
2348 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2349 | if (GET_MODE_SIZE (tmode) < max_size) | |
2350 | mode = tmode; | |
2351 | ||
2352 | if (mode == VOIDmode) | |
2353 | break; | |
2354 | ||
166cdb08 | 2355 | icode = optab_handler (mov_optab, mode)->insn_code; |
57814e5e JJ |
2356 | if (icode != CODE_FOR_nothing |
2357 | && align >= GET_MODE_ALIGNMENT (mode)) | |
2358 | { | |
2359 | unsigned int size = GET_MODE_SIZE (mode); | |
2360 | ||
2361 | while (l >= size) | |
2362 | { | |
2363 | if (reverse) | |
2364 | offset -= size; | |
2365 | ||
2366 | cst = (*constfun) (constfundata, offset, mode); | |
2367 | if (!LEGITIMATE_CONSTANT_P (cst)) | |
2368 | return 0; | |
2369 | ||
2370 | if (!reverse) | |
2371 | offset += size; | |
2372 | ||
2373 | l -= size; | |
2374 | } | |
2375 | } | |
2376 | ||
2377 | max_size = GET_MODE_SIZE (mode); | |
2378 | } | |
2379 | ||
2380 | /* The code above should have handled everything. */ | |
5b0264cb | 2381 | gcc_assert (!l); |
57814e5e JJ |
2382 | } |
2383 | ||
2384 | return 1; | |
2385 | } | |
2386 | ||
2387 | /* Generate several move instructions to store LEN bytes generated by | |
2388 | CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a | |
2389 | pointer which will be passed as argument in every CONSTFUN call. | |
cfa31150 SL |
2390 | ALIGN is maximum alignment we can assume. MEMSETP is true if this is |
2391 | a memset operation and false if it's a copy of a constant string. | |
8fd3cf4e JJ |
2392 | If ENDP is 0 return to, if ENDP is 1 return memory at the end ala |
2393 | mempcpy, and if ENDP is 2 return memory the end minus one byte ala | |
2394 | stpcpy. */ | |
57814e5e | 2395 | |
8fd3cf4e | 2396 | rtx |
502b8322 AJ |
2397 | store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, |
2398 | rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), | |
cfa31150 | 2399 | void *constfundata, unsigned int align, bool memsetp, int endp) |
57814e5e | 2400 | { |
d4ebfa65 BE |
2401 | enum machine_mode to_addr_mode |
2402 | = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to)); | |
b0f43ca0 | 2403 | struct store_by_pieces_d data; |
57814e5e | 2404 | |
2c430630 RS |
2405 | if (len == 0) |
2406 | { | |
5b0264cb | 2407 | gcc_assert (endp != 2); |
2c430630 RS |
2408 | return to; |
2409 | } | |
2410 | ||
cfa31150 SL |
2411 | gcc_assert (memsetp |
2412 | ? SET_BY_PIECES_P (len, align) | |
2413 | : STORE_BY_PIECES_P (len, align)); | |
57814e5e JJ |
2414 | data.constfun = constfun; |
2415 | data.constfundata = constfundata; | |
2416 | data.len = len; | |
2417 | data.to = to; | |
2418 | store_by_pieces_1 (&data, align); | |
8fd3cf4e JJ |
2419 | if (endp) |
2420 | { | |
2421 | rtx to1; | |
2422 | ||
5b0264cb | 2423 | gcc_assert (!data.reverse); |
8fd3cf4e JJ |
2424 | if (data.autinc_to) |
2425 | { | |
2426 | if (endp == 2) | |
2427 | { | |
2428 | if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0) | |
2429 | emit_insn (gen_add2_insn (data.to_addr, constm1_rtx)); | |
2430 | else | |
d4ebfa65 BE |
2431 | data.to_addr = copy_to_mode_reg (to_addr_mode, |
2432 | plus_constant (data.to_addr, | |
8fd3cf4e JJ |
2433 | -1)); |
2434 | } | |
2435 | to1 = adjust_automodify_address (data.to, QImode, data.to_addr, | |
2436 | data.offset); | |
2437 | } | |
2438 | else | |
2439 | { | |
2440 | if (endp == 2) | |
2441 | --data.offset; | |
2442 | to1 = adjust_address (data.to, QImode, data.offset); | |
2443 | } | |
2444 | return to1; | |
2445 | } | |
2446 | else | |
2447 | return data.to; | |
57814e5e JJ |
2448 | } |
2449 | ||
19caa751 | 2450 | /* Generate several move instructions to clear LEN bytes of block TO. (A MEM |
ad76cef8 | 2451 | rtx with BLKmode). ALIGN is maximum alignment we can assume. */ |
9de08200 RK |
2452 | |
2453 | static void | |
342e2b74 | 2454 | clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align) |
9de08200 | 2455 | { |
b0f43ca0 | 2456 | struct store_by_pieces_d data; |
57814e5e | 2457 | |
2c430630 RS |
2458 | if (len == 0) |
2459 | return; | |
2460 | ||
57814e5e | 2461 | data.constfun = clear_by_pieces_1; |
df4ae160 | 2462 | data.constfundata = NULL; |
57814e5e JJ |
2463 | data.len = len; |
2464 | data.to = to; | |
2465 | store_by_pieces_1 (&data, align); | |
2466 | } | |
2467 | ||
2468 | /* Callback routine for clear_by_pieces. | |
2469 | Return const0_rtx unconditionally. */ | |
2470 | ||
2471 | static rtx | |
502b8322 AJ |
2472 | clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED, |
2473 | HOST_WIDE_INT offset ATTRIBUTE_UNUSED, | |
2474 | enum machine_mode mode ATTRIBUTE_UNUSED) | |
57814e5e JJ |
2475 | { |
2476 | return const0_rtx; | |
2477 | } | |
2478 | ||
2479 | /* Subroutine of clear_by_pieces and store_by_pieces. | |
2480 | Generate several move instructions to store LEN bytes of block TO. (A MEM | |
ad76cef8 | 2481 | rtx with BLKmode). ALIGN is maximum alignment we can assume. */ |
57814e5e JJ |
2482 | |
2483 | static void | |
b0f43ca0 | 2484 | store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED, |
502b8322 | 2485 | unsigned int align ATTRIBUTE_UNUSED) |
57814e5e | 2486 | { |
d4ebfa65 BE |
2487 | enum machine_mode to_addr_mode |
2488 | = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to)); | |
57814e5e | 2489 | rtx to_addr = XEXP (data->to, 0); |
45d78e7f | 2490 | unsigned int max_size = STORE_MAX_PIECES + 1; |
fbe1758d AM |
2491 | enum machine_mode mode = VOIDmode, tmode; |
2492 | enum insn_code icode; | |
9de08200 | 2493 | |
57814e5e JJ |
2494 | data->offset = 0; |
2495 | data->to_addr = to_addr; | |
2496 | data->autinc_to | |
9de08200 RK |
2497 | = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC |
2498 | || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC); | |
2499 | ||
57814e5e JJ |
2500 | data->explicit_inc_to = 0; |
2501 | data->reverse | |
9de08200 | 2502 | = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC); |
57814e5e JJ |
2503 | if (data->reverse) |
2504 | data->offset = data->len; | |
9de08200 | 2505 | |
57814e5e | 2506 | /* If storing requires more than two move insns, |
9de08200 RK |
2507 | copy addresses to registers (to make displacements shorter) |
2508 | and use post-increment if available. */ | |
57814e5e | 2509 | if (!data->autinc_to |
45d78e7f | 2510 | && move_by_pieces_ninsns (data->len, align, max_size) > 2) |
9de08200 | 2511 | { |
3a94c984 | 2512 | /* Determine the main mode we'll be using. */ |
fbe1758d AM |
2513 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2514 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2515 | if (GET_MODE_SIZE (tmode) < max_size) | |
2516 | mode = tmode; | |
2517 | ||
57814e5e | 2518 | if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) |
9de08200 | 2519 | { |
d4ebfa65 BE |
2520 | data->to_addr = copy_to_mode_reg (to_addr_mode, |
2521 | plus_constant (to_addr, data->len)); | |
57814e5e JJ |
2522 | data->autinc_to = 1; |
2523 | data->explicit_inc_to = -1; | |
9de08200 | 2524 | } |
3bdf5ad1 | 2525 | |
57814e5e JJ |
2526 | if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse |
2527 | && ! data->autinc_to) | |
9de08200 | 2528 | { |
d4ebfa65 | 2529 | data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr); |
57814e5e JJ |
2530 | data->autinc_to = 1; |
2531 | data->explicit_inc_to = 1; | |
9de08200 | 2532 | } |
3bdf5ad1 | 2533 | |
57814e5e | 2534 | if ( !data->autinc_to && CONSTANT_P (to_addr)) |
d4ebfa65 | 2535 | data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr); |
9de08200 RK |
2536 | } |
2537 | ||
f64d6991 DE |
2538 | tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1); |
2539 | if (align >= GET_MODE_ALIGNMENT (tmode)) | |
2540 | align = GET_MODE_ALIGNMENT (tmode); | |
2541 | else | |
2542 | { | |
2543 | enum machine_mode xmode; | |
2544 | ||
2545 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode; | |
2546 | tmode != VOIDmode; | |
2547 | xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode)) | |
2548 | if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES | |
2549 | || SLOW_UNALIGNED_ACCESS (tmode, align)) | |
2550 | break; | |
2551 | ||
2552 | align = MAX (align, GET_MODE_ALIGNMENT (xmode)); | |
2553 | } | |
9de08200 | 2554 | |
57814e5e | 2555 | /* First store what we can in the largest integer mode, then go to |
9de08200 RK |
2556 | successively smaller modes. */ |
2557 | ||
2558 | while (max_size > 1) | |
2559 | { | |
9de08200 RK |
2560 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
2561 | tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode)) | |
2562 | if (GET_MODE_SIZE (tmode) < max_size) | |
2563 | mode = tmode; | |
2564 | ||
2565 | if (mode == VOIDmode) | |
2566 | break; | |
2567 | ||
166cdb08 | 2568 | icode = optab_handler (mov_optab, mode)->insn_code; |
19caa751 | 2569 | if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode)) |
57814e5e | 2570 | store_by_pieces_2 (GEN_FCN (icode), mode, data); |
9de08200 RK |
2571 | |
2572 | max_size = GET_MODE_SIZE (mode); | |
2573 | } | |
2574 | ||
2575 | /* The code above should have handled everything. */ | |
5b0264cb | 2576 | gcc_assert (!data->len); |
9de08200 RK |
2577 | } |
2578 | ||
57814e5e | 2579 | /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate |
9de08200 RK |
2580 | with move instructions for mode MODE. GENFUN is the gen_... function |
2581 | to make a move insn for that mode. DATA has all the other info. */ | |
2582 | ||
2583 | static void | |
502b8322 | 2584 | store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode, |
b0f43ca0 | 2585 | struct store_by_pieces_d *data) |
9de08200 | 2586 | { |
3bdf5ad1 | 2587 | unsigned int size = GET_MODE_SIZE (mode); |
57814e5e | 2588 | rtx to1, cst; |
9de08200 RK |
2589 | |
2590 | while (data->len >= size) | |
2591 | { | |
3bdf5ad1 RK |
2592 | if (data->reverse) |
2593 | data->offset -= size; | |
9de08200 | 2594 | |
3bdf5ad1 | 2595 | if (data->autinc_to) |
630036c6 JJ |
2596 | to1 = adjust_automodify_address (data->to, mode, data->to_addr, |
2597 | data->offset); | |
3a94c984 | 2598 | else |
f4ef873c | 2599 | to1 = adjust_address (data->to, mode, data->offset); |
9de08200 | 2600 | |
940da324 | 2601 | if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) |
57814e5e JJ |
2602 | emit_insn (gen_add2_insn (data->to_addr, |
2603 | GEN_INT (-(HOST_WIDE_INT) size))); | |
9de08200 | 2604 | |
57814e5e JJ |
2605 | cst = (*data->constfun) (data->constfundata, data->offset, mode); |
2606 | emit_insn ((*genfun) (to1, cst)); | |
3bdf5ad1 | 2607 | |
940da324 | 2608 | if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0) |
9de08200 | 2609 | emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size))); |
9de08200 | 2610 | |
3bdf5ad1 RK |
2611 | if (! data->reverse) |
2612 | data->offset += size; | |
9de08200 RK |
2613 | |
2614 | data->len -= size; | |
2615 | } | |
2616 | } | |
2617 | \f | |
19caa751 | 2618 | /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is |
8ac61af7 | 2619 | its length in bytes. */ |
e9a25f70 JL |
2620 | |
2621 | rtx | |
079a182e JH |
2622 | clear_storage_hints (rtx object, rtx size, enum block_op_methods method, |
2623 | unsigned int expected_align, HOST_WIDE_INT expected_size) | |
bbf6f052 | 2624 | { |
57aaef66 RH |
2625 | enum machine_mode mode = GET_MODE (object); |
2626 | unsigned int align; | |
e9a25f70 | 2627 | |
8148fe65 JJ |
2628 | gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL); |
2629 | ||
fcf1b822 RK |
2630 | /* If OBJECT is not BLKmode and SIZE is the same size as its mode, |
2631 | just move a zero. Otherwise, do this a piece at a time. */ | |
57aaef66 | 2632 | if (mode != BLKmode |
481683e1 | 2633 | && CONST_INT_P (size) |
57aaef66 | 2634 | && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode)) |
bbf6f052 | 2635 | { |
57aaef66 RH |
2636 | rtx zero = CONST0_RTX (mode); |
2637 | if (zero != NULL) | |
2638 | { | |
2639 | emit_move_insn (object, zero); | |
2640 | return NULL; | |
2641 | } | |
2642 | ||
2643 | if (COMPLEX_MODE_P (mode)) | |
2644 | { | |
2645 | zero = CONST0_RTX (GET_MODE_INNER (mode)); | |
2646 | if (zero != NULL) | |
2647 | { | |
2648 | write_complex_part (object, zero, 0); | |
2649 | write_complex_part (object, zero, 1); | |
2650 | return NULL; | |
2651 | } | |
2652 | } | |
4ca79136 RH |
2653 | } |
2654 | ||
57aaef66 RH |
2655 | if (size == const0_rtx) |
2656 | return NULL; | |
2657 | ||
2658 | align = MEM_ALIGN (object); | |
2659 | ||
481683e1 | 2660 | if (CONST_INT_P (size) |
57aaef66 RH |
2661 | && CLEAR_BY_PIECES_P (INTVAL (size), align)) |
2662 | clear_by_pieces (object, INTVAL (size), align); | |
079a182e JH |
2663 | else if (set_storage_via_setmem (object, size, const0_rtx, align, |
2664 | expected_align, expected_size)) | |
57aaef66 | 2665 | ; |
09e881c9 | 2666 | else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object))) |
8c996513 JH |
2667 | return set_storage_via_libcall (object, size, const0_rtx, |
2668 | method == BLOCK_OP_TAILCALL); | |
09e881c9 BE |
2669 | else |
2670 | gcc_unreachable (); | |
57aaef66 RH |
2671 | |
2672 | return NULL; | |
4ca79136 RH |
2673 | } |
2674 | ||
079a182e JH |
2675 | rtx |
2676 | clear_storage (rtx object, rtx size, enum block_op_methods method) | |
2677 | { | |
2678 | return clear_storage_hints (object, size, method, 0, -1); | |
2679 | } | |
2680 | ||
2681 | ||
8f99553f | 2682 | /* A subroutine of clear_storage. Expand a call to memset. |
4ca79136 | 2683 | Return the return value of memset, 0 otherwise. */ |
9de08200 | 2684 | |
8c996513 JH |
2685 | rtx |
2686 | set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall) | |
4ca79136 | 2687 | { |
5039610b | 2688 | tree call_expr, fn, object_tree, size_tree, val_tree; |
4ca79136 RH |
2689 | enum machine_mode size_mode; |
2690 | rtx retval; | |
9de08200 | 2691 | |
ad76cef8 PB |
2692 | /* Emit code to copy OBJECT and SIZE into new pseudos. We can then |
2693 | place those into new pseudos into a VAR_DECL and use them later. */ | |
52cf7115 | 2694 | |
4ca79136 | 2695 | object = copy_to_mode_reg (Pmode, XEXP (object, 0)); |
52cf7115 | 2696 | |
8f99553f | 2697 | size_mode = TYPE_MODE (sizetype); |
4ca79136 RH |
2698 | size = convert_to_mode (size_mode, size, 1); |
2699 | size = copy_to_mode_reg (size_mode, size); | |
52cf7115 | 2700 | |
4ca79136 RH |
2701 | /* It is incorrect to use the libcall calling conventions to call |
2702 | memset in this context. This could be a user call to memset and | |
2703 | the user may wish to examine the return value from memset. For | |
2704 | targets where libcalls and normal calls have different conventions | |
8f99553f | 2705 | for returning pointers, we could end up generating incorrect code. */ |
4bc973ae | 2706 | |
4ca79136 | 2707 | object_tree = make_tree (ptr_type_node, object); |
481683e1 | 2708 | if (!CONST_INT_P (val)) |
8c996513 | 2709 | val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1); |
8f99553f | 2710 | size_tree = make_tree (sizetype, size); |
8c996513 | 2711 | val_tree = make_tree (integer_type_node, val); |
4ca79136 RH |
2712 | |
2713 | fn = clear_storage_libcall_fn (true); | |
038dc49a | 2714 | call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree); |
8148fe65 | 2715 | CALL_EXPR_TAILCALL (call_expr) = tailcall; |
4ca79136 | 2716 | |
84217346 | 2717 | retval = expand_normal (call_expr); |
4ca79136 | 2718 | |
8f99553f | 2719 | return retval; |
4ca79136 RH |
2720 | } |
2721 | ||
8c996513 | 2722 | /* A subroutine of set_storage_via_libcall. Create the tree node |
4ca79136 RH |
2723 | for the function we use for block clears. The first time FOR_CALL |
2724 | is true, we call assemble_external. */ | |
2725 | ||
8dd5516b | 2726 | tree block_clear_fn; |
66c60e67 | 2727 | |
9661b15f | 2728 | void |
502b8322 | 2729 | init_block_clear_fn (const char *asmspec) |
4ca79136 | 2730 | { |
9661b15f | 2731 | if (!block_clear_fn) |
4ca79136 | 2732 | { |
9661b15f JJ |
2733 | tree fn, args; |
2734 | ||
8f99553f JM |
2735 | fn = get_identifier ("memset"); |
2736 | args = build_function_type_list (ptr_type_node, ptr_type_node, | |
2737 | integer_type_node, sizetype, | |
2738 | NULL_TREE); | |
4ca79136 | 2739 | |
c2255bc4 | 2740 | fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args); |
4ca79136 RH |
2741 | DECL_EXTERNAL (fn) = 1; |
2742 | TREE_PUBLIC (fn) = 1; | |
2743 | DECL_ARTIFICIAL (fn) = 1; | |
2744 | TREE_NOTHROW (fn) = 1; | |
5b5cba1f JM |
2745 | DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT; |
2746 | DECL_VISIBILITY_SPECIFIED (fn) = 1; | |
4ca79136 RH |
2747 | |
2748 | block_clear_fn = fn; | |
bbf6f052 | 2749 | } |
e9a25f70 | 2750 | |
9661b15f | 2751 | if (asmspec) |
0e6df31e | 2752 | set_user_assembler_name (block_clear_fn, asmspec); |
9661b15f JJ |
2753 | } |
2754 | ||
2755 | static tree | |
502b8322 | 2756 | clear_storage_libcall_fn (int for_call) |
9661b15f JJ |
2757 | { |
2758 | static bool emitted_extern; | |
2759 | ||
2760 | if (!block_clear_fn) | |
2761 | init_block_clear_fn (NULL); | |
2762 | ||
4ca79136 RH |
2763 | if (for_call && !emitted_extern) |
2764 | { | |
2765 | emitted_extern = true; | |
0e6df31e | 2766 | make_decl_rtl (block_clear_fn); |
9661b15f | 2767 | assemble_external (block_clear_fn); |
4ca79136 | 2768 | } |
bbf6f052 | 2769 | |
9661b15f | 2770 | return block_clear_fn; |
4ca79136 | 2771 | } |
57e84f18 AS |
2772 | \f |
2773 | /* Expand a setmem pattern; return true if successful. */ | |
2774 | ||
2775 | bool | |
079a182e JH |
2776 | set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align, |
2777 | unsigned int expected_align, HOST_WIDE_INT expected_size) | |
57e84f18 AS |
2778 | { |
2779 | /* Try the most limited insn first, because there's no point | |
2780 | including more than one in the machine description unless | |
2781 | the more limited one has some advantage. */ | |
2782 | ||
2783 | rtx opalign = GEN_INT (align / BITS_PER_UNIT); | |
2784 | enum machine_mode mode; | |
2785 | ||
079a182e JH |
2786 | if (expected_align < align) |
2787 | expected_align = align; | |
2788 | ||
57e84f18 AS |
2789 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; |
2790 | mode = GET_MODE_WIDER_MODE (mode)) | |
2791 | { | |
2792 | enum insn_code code = setmem_optab[(int) mode]; | |
2793 | insn_operand_predicate_fn pred; | |
2794 | ||
2795 | if (code != CODE_FOR_nothing | |
2796 | /* We don't need MODE to be narrower than | |
2797 | BITS_PER_HOST_WIDE_INT here because if SIZE is less than | |
2798 | the mode mask, as it is returned by the macro, it will | |
2799 | definitely be less than the actual mode mask. */ | |
481683e1 | 2800 | && ((CONST_INT_P (size) |
57e84f18 AS |
2801 | && ((unsigned HOST_WIDE_INT) INTVAL (size) |
2802 | <= (GET_MODE_MASK (mode) >> 1))) | |
2803 | || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) | |
2804 | && ((pred = insn_data[(int) code].operand[0].predicate) == 0 | |
2805 | || (*pred) (object, BLKmode)) | |
2806 | && ((pred = insn_data[(int) code].operand[3].predicate) == 0 | |
2807 | || (*pred) (opalign, VOIDmode))) | |
2808 | { | |
9ed92901 AS |
2809 | rtx opsize, opchar; |
2810 | enum machine_mode char_mode; | |
57e84f18 AS |
2811 | rtx last = get_last_insn (); |
2812 | rtx pat; | |
2813 | ||
2814 | opsize = convert_to_mode (mode, size, 1); | |
2815 | pred = insn_data[(int) code].operand[1].predicate; | |
2816 | if (pred != 0 && ! (*pred) (opsize, mode)) | |
2817 | opsize = copy_to_mode_reg (mode, opsize); | |
ed1223ba | 2818 | |
9ed92901 AS |
2819 | opchar = val; |
2820 | char_mode = insn_data[(int) code].operand[2].mode; | |
2821 | if (char_mode != VOIDmode) | |
2822 | { | |
2823 | opchar = convert_to_mode (char_mode, opchar, 1); | |
2824 | pred = insn_data[(int) code].operand[2].predicate; | |
2825 | if (pred != 0 && ! (*pred) (opchar, char_mode)) | |
2826 | opchar = copy_to_mode_reg (char_mode, opchar); | |
2827 | } | |
57e84f18 | 2828 | |
079a182e JH |
2829 | if (insn_data[(int) code].n_operands == 4) |
2830 | pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign); | |
2831 | else | |
2832 | pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign, | |
9946ca2d RA |
2833 | GEN_INT (expected_align |
2834 | / BITS_PER_UNIT), | |
079a182e | 2835 | GEN_INT (expected_size)); |
57e84f18 AS |
2836 | if (pat) |
2837 | { | |
2838 | emit_insn (pat); | |
2839 | return true; | |
2840 | } | |
2841 | else | |
2842 | delete_insns_since (last); | |
2843 | } | |
2844 | } | |
2845 | ||
2846 | return false; | |
2847 | } | |
2848 | ||
4ca79136 | 2849 | \f |
1466e387 RH |
2850 | /* Write to one of the components of the complex value CPLX. Write VAL to |
2851 | the real part if IMAG_P is false, and the imaginary part if its true. */ | |
bbf6f052 | 2852 | |
1466e387 RH |
2853 | static void |
2854 | write_complex_part (rtx cplx, rtx val, bool imag_p) | |
2855 | { | |
ddf4e03f RH |
2856 | enum machine_mode cmode; |
2857 | enum machine_mode imode; | |
2858 | unsigned ibitsize; | |
2859 | ||
1466e387 | 2860 | if (GET_CODE (cplx) == CONCAT) |
1466e387 | 2861 | { |
ddf4e03f RH |
2862 | emit_move_insn (XEXP (cplx, imag_p), val); |
2863 | return; | |
2864 | } | |
2865 | ||
2866 | cmode = GET_MODE (cplx); | |
2867 | imode = GET_MODE_INNER (cmode); | |
2868 | ibitsize = GET_MODE_BITSIZE (imode); | |
bbf6f052 | 2869 | |
7a31c801 DE |
2870 | /* For MEMs simplify_gen_subreg may generate an invalid new address |
2871 | because, e.g., the original address is considered mode-dependent | |
2872 | by the target, which restricts simplify_subreg from invoking | |
2873 | adjust_address_nv. Instead of preparing fallback support for an | |
2874 | invalid address, we call adjust_address_nv directly. */ | |
2875 | if (MEM_P (cplx)) | |
22469409 BW |
2876 | { |
2877 | emit_move_insn (adjust_address_nv (cplx, imode, | |
2878 | imag_p ? GET_MODE_SIZE (imode) : 0), | |
2879 | val); | |
2880 | return; | |
2881 | } | |
7a31c801 | 2882 | |
ddf4e03f RH |
2883 | /* If the sub-object is at least word sized, then we know that subregging |
2884 | will work. This special case is important, since store_bit_field | |
2885 | wants to operate on integer modes, and there's rarely an OImode to | |
2886 | correspond to TCmode. */ | |
36d7571c EB |
2887 | if (ibitsize >= BITS_PER_WORD |
2888 | /* For hard regs we have exact predicates. Assume we can split | |
2889 | the original object if it spans an even number of hard regs. | |
2890 | This special case is important for SCmode on 64-bit platforms | |
2891 | where the natural size of floating-point regs is 32-bit. */ | |
2ca202e7 | 2892 | || (REG_P (cplx) |
36d7571c | 2893 | && REGNO (cplx) < FIRST_PSEUDO_REGISTER |
7a31c801 | 2894 | && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)) |
ddf4e03f RH |
2895 | { |
2896 | rtx part = simplify_gen_subreg (imode, cplx, cmode, | |
2897 | imag_p ? GET_MODE_SIZE (imode) : 0); | |
36d7571c EB |
2898 | if (part) |
2899 | { | |
2900 | emit_move_insn (part, val); | |
2901 | return; | |
2902 | } | |
2903 | else | |
2904 | /* simplify_gen_subreg may fail for sub-word MEMs. */ | |
2905 | gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD); | |
1466e387 | 2906 | } |
36d7571c EB |
2907 | |
2908 | store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val); | |
1466e387 RH |
2909 | } |
2910 | ||
2911 | /* Extract one of the components of the complex value CPLX. Extract the | |
2912 | real part if IMAG_P is false, and the imaginary part if it's true. */ | |
2913 | ||
2914 | static rtx | |
2915 | read_complex_part (rtx cplx, bool imag_p) | |
bbf6f052 | 2916 | { |
1466e387 RH |
2917 | enum machine_mode cmode, imode; |
2918 | unsigned ibitsize; | |
bbf6f052 | 2919 | |
1466e387 RH |
2920 | if (GET_CODE (cplx) == CONCAT) |
2921 | return XEXP (cplx, imag_p); | |
bbf6f052 | 2922 | |
1466e387 RH |
2923 | cmode = GET_MODE (cplx); |
2924 | imode = GET_MODE_INNER (cmode); | |
2925 | ibitsize = GET_MODE_BITSIZE (imode); | |
2926 | ||
2927 | /* Special case reads from complex constants that got spilled to memory. */ | |
2928 | if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF) | |
de1b33dd | 2929 | { |
1466e387 RH |
2930 | tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0)); |
2931 | if (decl && TREE_CODE (decl) == COMPLEX_CST) | |
2932 | { | |
2933 | tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl); | |
2934 | if (CONSTANT_CLASS_P (part)) | |
2935 | return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL); | |
2936 | } | |
2937 | } | |
51286de6 | 2938 | |
7a31c801 DE |
2939 | /* For MEMs simplify_gen_subreg may generate an invalid new address |
2940 | because, e.g., the original address is considered mode-dependent | |
2941 | by the target, which restricts simplify_subreg from invoking | |
2942 | adjust_address_nv. Instead of preparing fallback support for an | |
2943 | invalid address, we call adjust_address_nv directly. */ | |
2944 | if (MEM_P (cplx)) | |
2945 | return adjust_address_nv (cplx, imode, | |
2946 | imag_p ? GET_MODE_SIZE (imode) : 0); | |
2947 | ||
ddf4e03f RH |
2948 | /* If the sub-object is at least word sized, then we know that subregging |
2949 | will work. This special case is important, since extract_bit_field | |
2950 | wants to operate on integer modes, and there's rarely an OImode to | |
2951 | correspond to TCmode. */ | |
36d7571c EB |
2952 | if (ibitsize >= BITS_PER_WORD |
2953 | /* For hard regs we have exact predicates. Assume we can split | |
2954 | the original object if it spans an even number of hard regs. | |
2955 | This special case is important for SCmode on 64-bit platforms | |
2956 | where the natural size of floating-point regs is 32-bit. */ | |
2ca202e7 | 2957 | || (REG_P (cplx) |
36d7571c | 2958 | && REGNO (cplx) < FIRST_PSEUDO_REGISTER |
7a31c801 | 2959 | && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)) |
ddf4e03f RH |
2960 | { |
2961 | rtx ret = simplify_gen_subreg (imode, cplx, cmode, | |
2962 | imag_p ? GET_MODE_SIZE (imode) : 0); | |
36d7571c EB |
2963 | if (ret) |
2964 | return ret; | |
2965 | else | |
2966 | /* simplify_gen_subreg may fail for sub-word MEMs. */ | |
2967 | gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD); | |
ddf4e03f RH |
2968 | } |
2969 | ||
1466e387 RH |
2970 | return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, |
2971 | true, NULL_RTX, imode, imode); | |
2972 | } | |
2973 | \f | |
539eaa3a | 2974 | /* A subroutine of emit_move_insn_1. Yet another lowpart generator. |
074e6d01 | 2975 | NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be |
539eaa3a RH |
2976 | represented in NEW_MODE. If FORCE is true, this will never happen, as |
2977 | we'll force-create a SUBREG if needed. */ | |
0c19a26f | 2978 | |
1466e387 | 2979 | static rtx |
074e6d01 | 2980 | emit_move_change_mode (enum machine_mode new_mode, |
539eaa3a | 2981 | enum machine_mode old_mode, rtx x, bool force) |
1466e387 | 2982 | { |
074e6d01 | 2983 | rtx ret; |
1466e387 | 2984 | |
4bcc9de5 UB |
2985 | if (push_operand (x, GET_MODE (x))) |
2986 | { | |
2987 | ret = gen_rtx_MEM (new_mode, XEXP (x, 0)); | |
2988 | MEM_COPY_ATTRIBUTES (ret, x); | |
2989 | } | |
2990 | else if (MEM_P (x)) | |
1466e387 | 2991 | { |
ef7befe0 BE |
2992 | /* We don't have to worry about changing the address since the |
2993 | size in bytes is supposed to be the same. */ | |
2994 | if (reload_in_progress) | |
2995 | { | |
2996 | /* Copy the MEM to change the mode and move any | |
2997 | substitutions from the old MEM to the new one. */ | |
2998 | ret = adjust_address_nv (x, new_mode, 0); | |
2999 | copy_replacements (x, ret); | |
3000 | } | |
3001 | else | |
3002 | ret = adjust_address (x, new_mode, 0); | |
de1b33dd | 3003 | } |
1466e387 RH |
3004 | else |
3005 | { | |
35fd3193 | 3006 | /* Note that we do want simplify_subreg's behavior of validating |
074e6d01 RH |
3007 | that the new mode is ok for a hard register. If we were to use |
3008 | simplify_gen_subreg, we would create the subreg, but would | |
3009 | probably run into the target not being able to implement it. */ | |
539eaa3a RH |
3010 | /* Except, of course, when FORCE is true, when this is exactly what |
3011 | we want. Which is needed for CCmodes on some targets. */ | |
3012 | if (force) | |
3013 | ret = simplify_gen_subreg (new_mode, x, old_mode, 0); | |
3014 | else | |
3015 | ret = simplify_subreg (new_mode, x, old_mode, 0); | |
1466e387 | 3016 | } |
bbf6f052 | 3017 | |
074e6d01 RH |
3018 | return ret; |
3019 | } | |
3020 | ||
1466e387 RH |
3021 | /* A subroutine of emit_move_insn_1. Generate a move from Y into X using |
3022 | an integer mode of the same size as MODE. Returns the instruction | |
3023 | emitted, or NULL if such a move could not be generated. */ | |
bbf6f052 | 3024 | |
1466e387 | 3025 | static rtx |
652b0932 | 3026 | emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force) |
1466e387 RH |
3027 | { |
3028 | enum machine_mode imode; | |
3029 | enum insn_code code; | |
bbf6f052 | 3030 | |
1466e387 RH |
3031 | /* There must exist a mode of the exact size we require. */ |
3032 | imode = int_mode_for_mode (mode); | |
3033 | if (imode == BLKmode) | |
3034 | return NULL_RTX; | |
de1b33dd | 3035 | |
1466e387 | 3036 | /* The target must support moves in this mode. */ |
166cdb08 | 3037 | code = optab_handler (mov_optab, imode)->insn_code; |
1466e387 RH |
3038 | if (code == CODE_FOR_nothing) |
3039 | return NULL_RTX; | |
de1b33dd | 3040 | |
652b0932 | 3041 | x = emit_move_change_mode (imode, mode, x, force); |
539eaa3a RH |
3042 | if (x == NULL_RTX) |
3043 | return NULL_RTX; | |
652b0932 | 3044 | y = emit_move_change_mode (imode, mode, y, force); |
539eaa3a RH |
3045 | if (y == NULL_RTX) |
3046 | return NULL_RTX; | |
3047 | return emit_insn (GEN_FCN (code) (x, y)); | |
261c4230 RS |
3048 | } |
3049 | ||
1466e387 RH |
3050 | /* A subroutine of emit_move_insn_1. X is a push_operand in MODE. |
3051 | Return an equivalent MEM that does not use an auto-increment. */ | |
261c4230 | 3052 | |
1466e387 RH |
3053 | static rtx |
3054 | emit_move_resolve_push (enum machine_mode mode, rtx x) | |
261c4230 | 3055 | { |
1466e387 RH |
3056 | enum rtx_code code = GET_CODE (XEXP (x, 0)); |
3057 | HOST_WIDE_INT adjust; | |
3058 | rtx temp; | |
261c4230 | 3059 | |
1466e387 RH |
3060 | adjust = GET_MODE_SIZE (mode); |
3061 | #ifdef PUSH_ROUNDING | |
3062 | adjust = PUSH_ROUNDING (adjust); | |
3063 | #endif | |
3064 | if (code == PRE_DEC || code == POST_DEC) | |
3065 | adjust = -adjust; | |
6541fe75 JJ |
3066 | else if (code == PRE_MODIFY || code == POST_MODIFY) |
3067 | { | |
3068 | rtx expr = XEXP (XEXP (x, 0), 1); | |
3069 | HOST_WIDE_INT val; | |
3070 | ||
3071 | gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS); | |
481683e1 | 3072 | gcc_assert (CONST_INT_P (XEXP (expr, 1))); |
6541fe75 JJ |
3073 | val = INTVAL (XEXP (expr, 1)); |
3074 | if (GET_CODE (expr) == MINUS) | |
3075 | val = -val; | |
3076 | gcc_assert (adjust == val || adjust == -val); | |
3077 | adjust = val; | |
3078 | } | |
76bbe028 | 3079 | |
1466e387 RH |
3080 | /* Do not use anti_adjust_stack, since we don't want to update |
3081 | stack_pointer_delta. */ | |
3082 | temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx, | |
3083 | GEN_INT (adjust), stack_pointer_rtx, | |
3084 | 0, OPTAB_LIB_WIDEN); | |
3085 | if (temp != stack_pointer_rtx) | |
3086 | emit_move_insn (stack_pointer_rtx, temp); | |
bbf6f052 | 3087 | |
1466e387 | 3088 | switch (code) |
7308a047 | 3089 | { |
1466e387 RH |
3090 | case PRE_INC: |
3091 | case PRE_DEC: | |
6541fe75 | 3092 | case PRE_MODIFY: |
1466e387 RH |
3093 | temp = stack_pointer_rtx; |
3094 | break; | |
3095 | case POST_INC: | |
1466e387 | 3096 | case POST_DEC: |
6541fe75 JJ |
3097 | case POST_MODIFY: |
3098 | temp = plus_constant (stack_pointer_rtx, -adjust); | |
1466e387 RH |
3099 | break; |
3100 | default: | |
3101 | gcc_unreachable (); | |
3102 | } | |
7308a047 | 3103 | |
1466e387 RH |
3104 | return replace_equiv_address (x, temp); |
3105 | } | |
1a06f5fe | 3106 | |
1466e387 RH |
3107 | /* A subroutine of emit_move_complex. Generate a move from Y into X. |
3108 | X is known to satisfy push_operand, and MODE is known to be complex. | |
3109 | Returns the last instruction emitted. */ | |
bb93b973 | 3110 | |
ceca734e | 3111 | rtx |
1466e387 RH |
3112 | emit_move_complex_push (enum machine_mode mode, rtx x, rtx y) |
3113 | { | |
3114 | enum machine_mode submode = GET_MODE_INNER (mode); | |
3115 | bool imag_first; | |
bb93b973 | 3116 | |
1466e387 RH |
3117 | #ifdef PUSH_ROUNDING |
3118 | unsigned int submodesize = GET_MODE_SIZE (submode); | |
bb93b973 | 3119 | |
1466e387 RH |
3120 | /* In case we output to the stack, but the size is smaller than the |
3121 | machine can push exactly, we need to use move instructions. */ | |
3122 | if (PUSH_ROUNDING (submodesize) != submodesize) | |
3123 | { | |
3124 | x = emit_move_resolve_push (mode, x); | |
3125 | return emit_move_insn (x, y); | |
3126 | } | |
79ce92d7 | 3127 | #endif |
7308a047 | 3128 | |
1466e387 RH |
3129 | /* Note that the real part always precedes the imag part in memory |
3130 | regardless of machine's endianness. */ | |
3131 | switch (GET_CODE (XEXP (x, 0))) | |
3132 | { | |
3133 | case PRE_DEC: | |
3134 | case POST_DEC: | |
3135 | imag_first = true; | |
3136 | break; | |
3137 | case PRE_INC: | |
3138 | case POST_INC: | |
3139 | imag_first = false; | |
3140 | break; | |
3141 | default: | |
3142 | gcc_unreachable (); | |
3143 | } | |
beb72684 | 3144 | |
1466e387 RH |
3145 | emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), |
3146 | read_complex_part (y, imag_first)); | |
3147 | return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)), | |
3148 | read_complex_part (y, !imag_first)); | |
3149 | } | |
405f63da | 3150 | |
ceca734e RH |
3151 | /* A subroutine of emit_move_complex. Perform the move from Y to X |
3152 | via two moves of the parts. Returns the last instruction emitted. */ | |
3153 | ||
3154 | rtx | |
3155 | emit_move_complex_parts (rtx x, rtx y) | |
3156 | { | |
3157 | /* Show the output dies here. This is necessary for SUBREGs | |
3158 | of pseudos since we cannot track their lifetimes correctly; | |
3159 | hard regs shouldn't appear here except as return values. */ | |
3160 | if (!reload_completed && !reload_in_progress | |
3161 | && REG_P (x) && !reg_overlap_mentioned_p (x, y)) | |
c41c1387 | 3162 | emit_clobber (x); |
ceca734e RH |
3163 | |
3164 | write_complex_part (x, read_complex_part (y, false), false); | |
3165 | write_complex_part (x, read_complex_part (y, true), true); | |
3166 | ||
3167 | return get_last_insn (); | |
3168 | } | |
3169 | ||
1466e387 RH |
3170 | /* A subroutine of emit_move_insn_1. Generate a move from Y into X. |
3171 | MODE is known to be complex. Returns the last instruction emitted. */ | |
beb72684 | 3172 | |
1466e387 RH |
3173 | static rtx |
3174 | emit_move_complex (enum machine_mode mode, rtx x, rtx y) | |
3175 | { | |
3176 | bool try_int; | |
405f63da | 3177 | |
1466e387 RH |
3178 | /* Need to take special care for pushes, to maintain proper ordering |
3179 | of the data, and possibly extra padding. */ | |
3180 | if (push_operand (x, mode)) | |
3181 | return emit_move_complex_push (mode, x, y); | |
7308a047 | 3182 | |
1466e387 RH |
3183 | /* See if we can coerce the target into moving both values at once. */ |
3184 | ||
c6506442 DE |
3185 | /* Move floating point as parts. */ |
3186 | if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT | |
166cdb08 | 3187 | && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing) |
c6506442 | 3188 | try_int = false; |
1466e387 | 3189 | /* Not possible if the values are inherently not adjacent. */ |
c6506442 | 3190 | else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT) |
1466e387 RH |
3191 | try_int = false; |
3192 | /* Is possible if both are registers (or subregs of registers). */ | |
3193 | else if (register_operand (x, mode) && register_operand (y, mode)) | |
3194 | try_int = true; | |
3195 | /* If one of the operands is a memory, and alignment constraints | |
3196 | are friendly enough, we may be able to do combined memory operations. | |
3197 | We do not attempt this if Y is a constant because that combination is | |
3198 | usually better with the by-parts thing below. */ | |
3199 | else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y)) | |
3200 | && (!STRICT_ALIGNMENT | |
3201 | || get_mode_alignment (mode) == BIGGEST_ALIGNMENT)) | |
3202 | try_int = true; | |
3203 | else | |
3204 | try_int = false; | |
3205 | ||
3206 | if (try_int) | |
a3600c71 | 3207 | { |
c6506442 DE |
3208 | rtx ret; |
3209 | ||
3210 | /* For memory to memory moves, optimal behavior can be had with the | |
3211 | existing block move logic. */ | |
3212 | if (MEM_P (x) && MEM_P (y)) | |
3213 | { | |
3214 | emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)), | |
3215 | BLOCK_OP_NO_LIBCALL); | |
3216 | return get_last_insn (); | |
3217 | } | |
3218 | ||
652b0932 | 3219 | ret = emit_move_via_integer (mode, x, y, true); |
1466e387 RH |
3220 | if (ret) |
3221 | return ret; | |
3222 | } | |
a3600c71 | 3223 | |
ceca734e | 3224 | return emit_move_complex_parts (x, y); |
1466e387 | 3225 | } |
a3600c71 | 3226 | |
1466e387 RH |
3227 | /* A subroutine of emit_move_insn_1. Generate a move from Y into X. |
3228 | MODE is known to be MODE_CC. Returns the last instruction emitted. */ | |
a3600c71 | 3229 | |
1466e387 RH |
3230 | static rtx |
3231 | emit_move_ccmode (enum machine_mode mode, rtx x, rtx y) | |
3232 | { | |
3233 | rtx ret; | |
a3600c71 | 3234 | |
1466e387 RH |
3235 | /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */ |
3236 | if (mode != CCmode) | |
3237 | { | |
166cdb08 | 3238 | enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code; |
1466e387 | 3239 | if (code != CODE_FOR_nothing) |
539eaa3a RH |
3240 | { |
3241 | x = emit_move_change_mode (CCmode, mode, x, true); | |
3242 | y = emit_move_change_mode (CCmode, mode, y, true); | |
3243 | return emit_insn (GEN_FCN (code) (x, y)); | |
3244 | } | |
1466e387 RH |
3245 | } |
3246 | ||
3247 | /* Otherwise, find the MODE_INT mode of the same width. */ | |
652b0932 | 3248 | ret = emit_move_via_integer (mode, x, y, false); |
1466e387 RH |
3249 | gcc_assert (ret != NULL); |
3250 | return ret; | |
3251 | } | |
3252 | ||
550ab0c6 JM |
3253 | /* Return true if word I of OP lies entirely in the |
3254 | undefined bits of a paradoxical subreg. */ | |
3255 | ||
3256 | static bool | |
22ea9ec0 | 3257 | undefined_operand_subword_p (const_rtx op, int i) |
550ab0c6 JM |
3258 | { |
3259 | enum machine_mode innermode, innermostmode; | |
3260 | int offset; | |
3261 | if (GET_CODE (op) != SUBREG) | |
3262 | return false; | |
3263 | innermode = GET_MODE (op); | |
3264 | innermostmode = GET_MODE (SUBREG_REG (op)); | |
3265 | offset = i * UNITS_PER_WORD + SUBREG_BYTE (op); | |
3266 | /* The SUBREG_BYTE represents offset, as if the value were stored in | |
3267 | memory, except for a paradoxical subreg where we define | |
3268 | SUBREG_BYTE to be 0; undo this exception as in | |
3269 | simplify_subreg. */ | |
3270 | if (SUBREG_BYTE (op) == 0 | |
3271 | && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode)) | |
3272 | { | |
3273 | int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode)); | |
3274 | if (WORDS_BIG_ENDIAN) | |
3275 | offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; | |
3276 | if (BYTES_BIG_ENDIAN) | |
3277 | offset += difference % UNITS_PER_WORD; | |
3278 | } | |
3279 | if (offset >= GET_MODE_SIZE (innermostmode) | |
3280 | || offset <= -GET_MODE_SIZE (word_mode)) | |
3281 | return true; | |
3282 | return false; | |
3283 | } | |
3284 | ||
1466e387 RH |
3285 | /* A subroutine of emit_move_insn_1. Generate a move from Y into X. |
3286 | MODE is any multi-word or full-word mode that lacks a move_insn | |
3287 | pattern. Note that you will get better code if you define such | |
3288 | patterns, even if they must turn into multiple assembler instructions. */ | |
3289 | ||
3290 | static rtx | |
3291 | emit_move_multi_word (enum machine_mode mode, rtx x, rtx y) | |
3292 | { | |
3293 | rtx last_insn = 0; | |
3294 | rtx seq, inner; | |
3295 | bool need_clobber; | |
3296 | int i; | |
ed1223ba | 3297 | |
1466e387 | 3298 | gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD); |
ed1223ba | 3299 | |
1466e387 RH |
3300 | /* If X is a push on the stack, do the push now and replace |
3301 | X with a reference to the stack pointer. */ | |
3302 | if (push_operand (x, mode)) | |
3303 | x = emit_move_resolve_push (mode, x); | |
3304 | ||
3305 | /* If we are in reload, see if either operand is a MEM whose address | |
3306 | is scheduled for replacement. */ | |
3307 | if (reload_in_progress && MEM_P (x) | |
3308 | && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0)) | |
3309 | x = replace_equiv_address_nv (x, inner); | |
3310 | if (reload_in_progress && MEM_P (y) | |
3311 | && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0)) | |
3312 | y = replace_equiv_address_nv (y, inner); | |
3313 | ||
3314 | start_sequence (); | |
3315 | ||
3316 | need_clobber = false; | |
3317 | for (i = 0; | |
3318 | i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; | |
3319 | i++) | |
3320 | { | |
3321 | rtx xpart = operand_subword (x, i, 1, mode); | |
550ab0c6 JM |
3322 | rtx ypart; |
3323 | ||
3324 | /* Do not generate code for a move if it would come entirely | |
3325 | from the undefined bits of a paradoxical subreg. */ | |
3326 | if (undefined_operand_subword_p (y, i)) | |
3327 | continue; | |
3328 | ||
3329 | ypart = operand_subword (y, i, 1, mode); | |
1466e387 RH |
3330 | |
3331 | /* If we can't get a part of Y, put Y into memory if it is a | |
535a42b1 NS |
3332 | constant. Otherwise, force it into a register. Then we must |
3333 | be able to get a part of Y. */ | |
1466e387 | 3334 | if (ypart == 0 && CONSTANT_P (y)) |
a3600c71 | 3335 | { |
aacd3885 | 3336 | y = use_anchored_address (force_const_mem (mode, y)); |
1466e387 | 3337 | ypart = operand_subword (y, i, 1, mode); |
a3600c71 | 3338 | } |
1466e387 RH |
3339 | else if (ypart == 0) |
3340 | ypart = operand_subword_force (y, i, mode); | |
3341 | ||
3342 | gcc_assert (xpart && ypart); | |
3343 | ||
3344 | need_clobber |= (GET_CODE (xpart) == SUBREG); | |
502b8322 | 3345 | |
1466e387 | 3346 | last_insn = emit_move_insn (xpart, ypart); |
a3600c71 HPN |
3347 | } |
3348 | ||
1466e387 RH |
3349 | seq = get_insns (); |
3350 | end_sequence (); | |
3351 | ||
3352 | /* Show the output dies here. This is necessary for SUBREGs | |
3353 | of pseudos since we cannot track their lifetimes correctly; | |
3354 | hard regs shouldn't appear here except as return values. | |
3355 | We never want to emit such a clobber after reload. */ | |
3356 | if (x != y | |
3357 | && ! (reload_in_progress || reload_completed) | |
3358 | && need_clobber != 0) | |
c41c1387 | 3359 | emit_clobber (x); |
1466e387 RH |
3360 | |
3361 | emit_insn (seq); | |
3362 | ||
3363 | return last_insn; | |
3364 | } | |
3365 | ||
3366 | /* Low level part of emit_move_insn. | |
3367 | Called just like emit_move_insn, but assumes X and Y | |
3368 | are basically valid. */ | |
3369 | ||
3370 | rtx | |
3371 | emit_move_insn_1 (rtx x, rtx y) | |
3372 | { | |
3373 | enum machine_mode mode = GET_MODE (x); | |
3374 | enum insn_code code; | |
3375 | ||
3376 | gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE); | |
3377 | ||
166cdb08 | 3378 | code = optab_handler (mov_optab, mode)->insn_code; |
1466e387 RH |
3379 | if (code != CODE_FOR_nothing) |
3380 | return emit_insn (GEN_FCN (code) (x, y)); | |
3381 | ||
3382 | /* Expand complex moves by moving real part and imag part. */ | |
3383 | if (COMPLEX_MODE_P (mode)) | |
3384 | return emit_move_complex (mode, x, y); | |
3385 | ||
0f996086 CF |
3386 | if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT |
3387 | || ALL_FIXED_POINT_MODE_P (mode)) | |
ef7befe0 BE |
3388 | { |
3389 | rtx result = emit_move_via_integer (mode, x, y, true); | |
3390 | ||
3391 | /* If we can't find an integer mode, use multi words. */ | |
3392 | if (result) | |
3393 | return result; | |
3394 | else | |
3395 | return emit_move_multi_word (mode, x, y); | |
3396 | } | |
3397 | ||
1466e387 RH |
3398 | if (GET_MODE_CLASS (mode) == MODE_CC) |
3399 | return emit_move_ccmode (mode, x, y); | |
3400 | ||
5581fc91 RS |
3401 | /* Try using a move pattern for the corresponding integer mode. This is |
3402 | only safe when simplify_subreg can convert MODE constants into integer | |
3403 | constants. At present, it can only do this reliably if the value | |
3404 | fits within a HOST_WIDE_INT. */ | |
1466e387 | 3405 | if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
bbf6f052 | 3406 | { |
652b0932 | 3407 | rtx ret = emit_move_via_integer (mode, x, y, false); |
1466e387 RH |
3408 | if (ret) |
3409 | return ret; | |
3410 | } | |
0fb7aeda | 3411 | |
1466e387 RH |
3412 | return emit_move_multi_word (mode, x, y); |
3413 | } | |
918a6124 | 3414 | |
1466e387 RH |
3415 | /* Generate code to copy Y into X. |
3416 | Both Y and X must have the same mode, except that | |
3417 | Y can be a constant with VOIDmode. | |
3418 | This mode cannot be BLKmode; use emit_block_move for that. | |
3a94c984 | 3419 | |
1466e387 | 3420 | Return the last instruction emitted. */ |
3ef1eef4 | 3421 | |
1466e387 RH |
3422 | rtx |
3423 | emit_move_insn (rtx x, rtx y) | |
3424 | { | |
3425 | enum machine_mode mode = GET_MODE (x); | |
3426 | rtx y_cst = NULL_RTX; | |
3427 | rtx last_insn, set; | |
15a7a8ec | 3428 | |
1466e387 RH |
3429 | gcc_assert (mode != BLKmode |
3430 | && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode)); | |
bbf6f052 | 3431 | |
1466e387 RH |
3432 | if (CONSTANT_P (y)) |
3433 | { | |
3434 | if (optimize | |
3435 | && SCALAR_FLOAT_MODE_P (GET_MODE (x)) | |
3436 | && (last_insn = compress_float_constant (x, y))) | |
3437 | return last_insn; | |
bbf6f052 | 3438 | |
1466e387 | 3439 | y_cst = y; |
bbf6f052 | 3440 | |
1466e387 RH |
3441 | if (!LEGITIMATE_CONSTANT_P (y)) |
3442 | { | |
3443 | y = force_const_mem (mode, y); | |
235ae7be | 3444 | |
1466e387 RH |
3445 | /* If the target's cannot_force_const_mem prevented the spill, |
3446 | assume that the target's move expanders will also take care | |
3447 | of the non-legitimate constant. */ | |
3448 | if (!y) | |
3449 | y = y_cst; | |
aacd3885 RS |
3450 | else |
3451 | y = use_anchored_address (y); | |
bbf6f052 | 3452 | } |
1466e387 | 3453 | } |
6551fa4d | 3454 | |
1466e387 RH |
3455 | /* If X or Y are memory references, verify that their addresses are valid |
3456 | for the machine. */ | |
3457 | if (MEM_P (x) | |
09e881c9 BE |
3458 | && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0), |
3459 | MEM_ADDR_SPACE (x)) | |
3de5e93a | 3460 | && ! push_operand (x, GET_MODE (x)))) |
1466e387 | 3461 | x = validize_mem (x); |
235ae7be | 3462 | |
1466e387 | 3463 | if (MEM_P (y) |
09e881c9 BE |
3464 | && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0), |
3465 | MEM_ADDR_SPACE (y))) | |
1466e387 | 3466 | y = validize_mem (y); |
235ae7be | 3467 | |
1466e387 | 3468 | gcc_assert (mode != BLKmode); |
235ae7be | 3469 | |
1466e387 RH |
3470 | last_insn = emit_move_insn_1 (x, y); |
3471 | ||
3472 | if (y_cst && REG_P (x) | |
3473 | && (set = single_set (last_insn)) != NULL_RTX | |
3474 | && SET_DEST (set) == x | |
3475 | && ! rtx_equal_p (y_cst, SET_SRC (set))) | |
3476 | set_unique_reg_note (last_insn, REG_EQUAL, y_cst); | |
3477 | ||
3478 | return last_insn; | |
bbf6f052 | 3479 | } |
51286de6 RH |
3480 | |
3481 | /* If Y is representable exactly in a narrower mode, and the target can | |
3482 | perform the extension directly from constant or memory, then emit the | |
3483 | move as an extension. */ | |
3484 | ||
3485 | static rtx | |
502b8322 | 3486 | compress_float_constant (rtx x, rtx y) |
51286de6 RH |
3487 | { |
3488 | enum machine_mode dstmode = GET_MODE (x); | |
3489 | enum machine_mode orig_srcmode = GET_MODE (y); | |
3490 | enum machine_mode srcmode; | |
3491 | REAL_VALUE_TYPE r; | |
e4541b7a | 3492 | int oldcost, newcost; |
f40751dd | 3493 | bool speed = optimize_insn_for_speed_p (); |
51286de6 RH |
3494 | |
3495 | REAL_VALUE_FROM_CONST_DOUBLE (r, y); | |
3496 | ||
e4541b7a | 3497 | if (LEGITIMATE_CONSTANT_P (y)) |
f40751dd | 3498 | oldcost = rtx_cost (y, SET, speed); |
e4541b7a | 3499 | else |
f40751dd | 3500 | oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed); |
e4541b7a | 3501 | |
51286de6 RH |
3502 | for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode)); |
3503 | srcmode != orig_srcmode; | |
3504 | srcmode = GET_MODE_WIDER_MODE (srcmode)) | |
3505 | { | |
3506 | enum insn_code ic; | |
3507 | rtx trunc_y, last_insn; | |
3508 | ||
3509 | /* Skip if the target can't extend this way. */ | |
3510 | ic = can_extend_p (dstmode, srcmode, 0); | |
3511 | if (ic == CODE_FOR_nothing) | |
3512 | continue; | |
3513 | ||
3514 | /* Skip if the narrowed value isn't exact. */ | |
3515 | if (! exact_real_truncate (srcmode, &r)) | |
3516 | continue; | |
3517 | ||
3518 | trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode); | |
3519 | ||
3520 | if (LEGITIMATE_CONSTANT_P (trunc_y)) | |
3521 | { | |
3522 | /* Skip if the target needs extra instructions to perform | |
3523 | the extension. */ | |
3524 | if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode)) | |
3525 | continue; | |
e4541b7a | 3526 | /* This is valid, but may not be cheaper than the original. */ |
f40751dd | 3527 | newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed); |
e4541b7a DJ |
3528 | if (oldcost < newcost) |
3529 | continue; | |
51286de6 RH |
3530 | } |
3531 | else if (float_extend_from_mem[dstmode][srcmode]) | |
e4541b7a DJ |
3532 | { |
3533 | trunc_y = force_const_mem (srcmode, trunc_y); | |
3534 | /* This is valid, but may not be cheaper than the original. */ | |
f40751dd | 3535 | newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed); |
e4541b7a DJ |
3536 | if (oldcost < newcost) |
3537 | continue; | |
3538 | trunc_y = validize_mem (trunc_y); | |
3539 | } | |
51286de6 RH |
3540 | else |
3541 | continue; | |
d763e130 RS |
3542 | |
3543 | /* For CSE's benefit, force the compressed constant pool entry | |
3544 | into a new pseudo. This constant may be used in different modes, | |
3545 | and if not, combine will put things back together for us. */ | |
3546 | trunc_y = force_reg (srcmode, trunc_y); | |
51286de6 RH |
3547 | emit_unop_insn (ic, x, trunc_y, UNKNOWN); |
3548 | last_insn = get_last_insn (); | |
3549 | ||
f8cfc6aa | 3550 | if (REG_P (x)) |
0c19a26f | 3551 | set_unique_reg_note (last_insn, REG_EQUAL, y); |
51286de6 RH |
3552 | |
3553 | return last_insn; | |
3554 | } | |
3555 | ||
3556 | return NULL_RTX; | |
3557 | } | |
bbf6f052 RK |
3558 | \f |
3559 | /* Pushing data onto the stack. */ | |
3560 | ||
3561 | /* Push a block of length SIZE (perhaps variable) | |
3562 | and return an rtx to address the beginning of the block. | |
bbf6f052 RK |
3563 | The value may be virtual_outgoing_args_rtx. |
3564 | ||
3565 | EXTRA is the number of bytes of padding to push in addition to SIZE. | |
3566 | BELOW nonzero means this padding comes at low addresses; | |
3567 | otherwise, the padding comes at high addresses. */ | |
3568 | ||
3569 | rtx | |
502b8322 | 3570 | push_block (rtx size, int extra, int below) |
bbf6f052 | 3571 | { |
b3694847 | 3572 | rtx temp; |
88f63c77 RK |
3573 | |
3574 | size = convert_modes (Pmode, ptr_mode, size, 1); | |
bbf6f052 RK |
3575 | if (CONSTANT_P (size)) |
3576 | anti_adjust_stack (plus_constant (size, extra)); | |
f8cfc6aa | 3577 | else if (REG_P (size) && extra == 0) |
bbf6f052 RK |
3578 | anti_adjust_stack (size); |
3579 | else | |
3580 | { | |
ce48579b | 3581 | temp = copy_to_mode_reg (Pmode, size); |
bbf6f052 | 3582 | if (extra != 0) |
906c4e36 | 3583 | temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra), |
bbf6f052 RK |
3584 | temp, 0, OPTAB_LIB_WIDEN); |
3585 | anti_adjust_stack (temp); | |
3586 | } | |
3587 | ||
f73ad30e | 3588 | #ifndef STACK_GROWS_DOWNWARD |
f73ad30e | 3589 | if (0) |
f73ad30e JH |
3590 | #else |
3591 | if (1) | |
bbf6f052 | 3592 | #endif |
f73ad30e | 3593 | { |
f73ad30e JH |
3594 | temp = virtual_outgoing_args_rtx; |
3595 | if (extra != 0 && below) | |
3596 | temp = plus_constant (temp, extra); | |
3597 | } | |
3598 | else | |
3599 | { | |
481683e1 | 3600 | if (CONST_INT_P (size)) |
f73ad30e | 3601 | temp = plus_constant (virtual_outgoing_args_rtx, |
3a94c984 | 3602 | -INTVAL (size) - (below ? 0 : extra)); |
f73ad30e JH |
3603 | else if (extra != 0 && !below) |
3604 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
3bdf5ad1 | 3605 | negate_rtx (Pmode, plus_constant (size, extra))); |
f73ad30e JH |
3606 | else |
3607 | temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx, | |
3608 | negate_rtx (Pmode, size)); | |
3609 | } | |
bbf6f052 RK |
3610 | |
3611 | return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp); | |
3612 | } | |
3613 | ||
21d93687 RK |
3614 | #ifdef PUSH_ROUNDING |
3615 | ||
566aa174 | 3616 | /* Emit single push insn. */ |
21d93687 | 3617 | |
566aa174 | 3618 | static void |
502b8322 | 3619 | emit_single_push_insn (enum machine_mode mode, rtx x, tree type) |
566aa174 | 3620 | { |
566aa174 | 3621 | rtx dest_addr; |
918a6124 | 3622 | unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
566aa174 | 3623 | rtx dest; |
371b8fc0 JH |
3624 | enum insn_code icode; |
3625 | insn_operand_predicate_fn pred; | |
566aa174 | 3626 | |
371b8fc0 JH |
3627 | stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode)); |
3628 | /* If there is push pattern, use it. Otherwise try old way of throwing | |
3629 | MEM representing push operation to move expander. */ | |
166cdb08 | 3630 | icode = optab_handler (push_optab, mode)->insn_code; |
371b8fc0 JH |
3631 | if (icode != CODE_FOR_nothing) |
3632 | { | |
3633 | if (((pred = insn_data[(int) icode].operand[0].predicate) | |
505ddab6 | 3634 | && !((*pred) (x, mode)))) |
371b8fc0 JH |
3635 | x = force_reg (mode, x); |
3636 | emit_insn (GEN_FCN (icode) (x)); | |
3637 | return; | |
3638 | } | |
566aa174 JH |
3639 | if (GET_MODE_SIZE (mode) == rounded_size) |
3640 | dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx); | |
329d586f KH |
3641 | /* If we are to pad downward, adjust the stack pointer first and |
3642 | then store X into the stack location using an offset. This is | |
3643 | because emit_move_insn does not know how to pad; it does not have | |
3644 | access to type. */ | |
3645 | else if (FUNCTION_ARG_PADDING (mode, type) == downward) | |
3646 | { | |
3647 | unsigned padding_size = rounded_size - GET_MODE_SIZE (mode); | |
3648 | HOST_WIDE_INT offset; | |
3649 | ||
3650 | emit_move_insn (stack_pointer_rtx, | |
3651 | expand_binop (Pmode, | |
3652 | #ifdef STACK_GROWS_DOWNWARD | |
3653 | sub_optab, | |
3654 | #else | |
3655 | add_optab, | |
3656 | #endif | |
3657 | stack_pointer_rtx, | |
3658 | GEN_INT (rounded_size), | |
3659 | NULL_RTX, 0, OPTAB_LIB_WIDEN)); | |
3660 | ||
3661 | offset = (HOST_WIDE_INT) padding_size; | |
3662 | #ifdef STACK_GROWS_DOWNWARD | |
3663 | if (STACK_PUSH_CODE == POST_DEC) | |
3664 | /* We have already decremented the stack pointer, so get the | |
3665 | previous value. */ | |
3666 | offset += (HOST_WIDE_INT) rounded_size; | |
3667 | #else | |
3668 | if (STACK_PUSH_CODE == POST_INC) | |
3669 | /* We have already incremented the stack pointer, so get the | |
3670 | previous value. */ | |
3671 | offset -= (HOST_WIDE_INT) rounded_size; | |
3672 | #endif | |
3673 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset)); | |
3674 | } | |
566aa174 JH |
3675 | else |
3676 | { | |
3677 | #ifdef STACK_GROWS_DOWNWARD | |
329d586f | 3678 | /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */ |
566aa174 | 3679 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
505ddab6 | 3680 | GEN_INT (-(HOST_WIDE_INT) rounded_size)); |
566aa174 | 3681 | #else |
329d586f | 3682 | /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */ |
566aa174 JH |
3683 | dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
3684 | GEN_INT (rounded_size)); | |
3685 | #endif | |
3686 | dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr); | |
3687 | } | |
3688 | ||
3689 | dest = gen_rtx_MEM (mode, dest_addr); | |
3690 | ||
566aa174 JH |
3691 | if (type != 0) |
3692 | { | |
3693 | set_mem_attributes (dest, type, 1); | |
c3d32120 RK |
3694 | |
3695 | if (flag_optimize_sibling_calls) | |
3696 | /* Function incoming arguments may overlap with sibling call | |
3697 | outgoing arguments and we cannot allow reordering of reads | |
3698 | from function arguments with stores to outgoing arguments | |
3699 | of sibling calls. */ | |
3700 | set_mem_alias_set (dest, 0); | |
566aa174 JH |
3701 | } |
3702 | emit_move_insn (dest, x); | |
566aa174 | 3703 | } |
21d93687 | 3704 | #endif |
566aa174 | 3705 | |
bbf6f052 RK |
3706 | /* Generate code to push X onto the stack, assuming it has mode MODE and |
3707 | type TYPE. | |
3708 | MODE is redundant except when X is a CONST_INT (since they don't | |
3709 | carry mode info). | |
3710 | SIZE is an rtx for the size of data to be copied (in bytes), | |
3711 | needed only if X is BLKmode. | |
3712 | ||
f1eaaf73 | 3713 | ALIGN (in bits) is maximum alignment we can assume. |
bbf6f052 | 3714 | |
cd048831 | 3715 | If PARTIAL and REG are both nonzero, then copy that many of the first |
78a52f11 RH |
3716 | bytes of X into registers starting with REG, and push the rest of X. |
3717 | The amount of space pushed is decreased by PARTIAL bytes. | |
bbf6f052 | 3718 | REG must be a hard register in this case. |
cd048831 RK |
3719 | If REG is zero but PARTIAL is not, take any all others actions for an |
3720 | argument partially in registers, but do not actually load any | |
3721 | registers. | |
bbf6f052 RK |
3722 | |
3723 | EXTRA is the amount in bytes of extra space to leave next to this arg. | |
6dc42e49 | 3724 | This is ignored if an argument block has already been allocated. |
bbf6f052 RK |
3725 | |
3726 | On a machine that lacks real push insns, ARGS_ADDR is the address of | |
3727 | the bottom of the argument block for this call. We use indexing off there | |
3728 | to store the arg. On machines with push insns, ARGS_ADDR is 0 when a | |
3729 | argument block has not been preallocated. | |
3730 | ||
e5e809f4 JL |
3731 | ARGS_SO_FAR is the size of args previously pushed for this call. |
3732 | ||
3733 | REG_PARM_STACK_SPACE is nonzero if functions require stack space | |
3734 | for arguments passed in registers. If nonzero, it will be the number | |
3735 | of bytes required. */ | |
bbf6f052 RK |
3736 | |
3737 | void | |
502b8322 AJ |
3738 | emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, |
3739 | unsigned int align, int partial, rtx reg, int extra, | |
3740 | rtx args_addr, rtx args_so_far, int reg_parm_stack_space, | |
3741 | rtx alignment_pad) | |
bbf6f052 RK |
3742 | { |
3743 | rtx xinner; | |
3744 | enum direction stack_direction | |
3745 | #ifdef STACK_GROWS_DOWNWARD | |
3746 | = downward; | |
3747 | #else | |
3748 | = upward; | |
3749 | #endif | |
3750 | ||
3751 | /* Decide where to pad the argument: `downward' for below, | |
3752 | `upward' for above, or `none' for don't pad it. | |
3753 | Default is below for small data on big-endian machines; else above. */ | |
3754 | enum direction where_pad = FUNCTION_ARG_PADDING (mode, type); | |
3755 | ||
0fb7aeda | 3756 | /* Invert direction if stack is post-decrement. |
9e0e11bf GK |
3757 | FIXME: why? */ |
3758 | if (STACK_PUSH_CODE == POST_DEC) | |
bbf6f052 RK |
3759 | if (where_pad != none) |
3760 | where_pad = (where_pad == downward ? upward : downward); | |
3761 | ||
ad76cef8 | 3762 | xinner = x; |
bbf6f052 | 3763 | |
46bd2bee JM |
3764 | if (mode == BLKmode |
3765 | || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode))) | |
bbf6f052 RK |
3766 | { |
3767 | /* Copy a block into the stack, entirely or partially. */ | |
3768 | ||
b3694847 | 3769 | rtx temp; |
78a52f11 | 3770 | int used; |
531547e9 | 3771 | int offset; |
bbf6f052 | 3772 | int skip; |
3a94c984 | 3773 | |
78a52f11 RH |
3774 | offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT); |
3775 | used = partial - offset; | |
531547e9 | 3776 | |
46bd2bee JM |
3777 | if (mode != BLKmode) |
3778 | { | |
3779 | /* A value is to be stored in an insufficiently aligned | |
3780 | stack slot; copy via a suitably aligned slot if | |
3781 | necessary. */ | |
3782 | size = GEN_INT (GET_MODE_SIZE (mode)); | |
3783 | if (!MEM_P (xinner)) | |
3784 | { | |
3785 | temp = assign_temp (type, 0, 1, 1); | |
3786 | emit_move_insn (temp, xinner); | |
3787 | xinner = temp; | |
3788 | } | |
3789 | } | |
3790 | ||
5b0264cb | 3791 | gcc_assert (size); |
bbf6f052 | 3792 | |
bbf6f052 RK |
3793 | /* USED is now the # of bytes we need not copy to the stack |
3794 | because registers will take care of them. */ | |
3795 | ||
3796 | if (partial != 0) | |
f4ef873c | 3797 | xinner = adjust_address (xinner, BLKmode, used); |
bbf6f052 RK |
3798 | |
3799 | /* If the partial register-part of the arg counts in its stack size, | |
3800 | skip the part of stack space corresponding to the registers. | |
3801 | Otherwise, start copying to the beginning of the stack space, | |
3802 | by setting SKIP to 0. */ | |
e5e809f4 | 3803 | skip = (reg_parm_stack_space == 0) ? 0 : used; |
bbf6f052 RK |
3804 | |
3805 | #ifdef PUSH_ROUNDING | |
3806 | /* Do it with several push insns if that doesn't take lots of insns | |
3807 | and if there is no difficulty with push insns that skip bytes | |
3808 | on the stack for alignment purposes. */ | |
3809 | if (args_addr == 0 | |
f73ad30e | 3810 | && PUSH_ARGS |
481683e1 | 3811 | && CONST_INT_P (size) |
bbf6f052 | 3812 | && skip == 0 |
f26aca6d | 3813 | && MEM_ALIGN (xinner) >= align |
15914757 | 3814 | && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align)) |
bbf6f052 RK |
3815 | /* Here we avoid the case of a structure whose weak alignment |
3816 | forces many pushes of a small amount of data, | |
3817 | and such small pushes do rounding that causes trouble. */ | |
e1565e65 | 3818 | && ((! SLOW_UNALIGNED_ACCESS (word_mode, align)) |
19caa751 | 3819 | || align >= BIGGEST_ALIGNMENT |
f1eaaf73 DE |
3820 | || (PUSH_ROUNDING (align / BITS_PER_UNIT) |
3821 | == (align / BITS_PER_UNIT))) | |
bbf6f052 RK |
3822 | && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size)) |
3823 | { | |
3824 | /* Push padding now if padding above and stack grows down, | |
3825 | or if padding below and stack grows up. | |
3826 | But if space already allocated, this has already been done. */ | |
3827 | if (extra && args_addr == 0 | |
3828 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3829 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 | 3830 | |
8fd3cf4e | 3831 | move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0); |
bbf6f052 RK |
3832 | } |
3833 | else | |
3a94c984 | 3834 | #endif /* PUSH_ROUNDING */ |
bbf6f052 | 3835 | { |
7ab923cc JJ |
3836 | rtx target; |
3837 | ||
bbf6f052 RK |
3838 | /* Otherwise make space on the stack and copy the data |
3839 | to the address of that space. */ | |
3840 | ||
3841 | /* Deduct words put into registers from the size we must copy. */ | |
3842 | if (partial != 0) | |
3843 | { | |
481683e1 | 3844 | if (CONST_INT_P (size)) |
906c4e36 | 3845 | size = GEN_INT (INTVAL (size) - used); |
bbf6f052 RK |
3846 | else |
3847 | size = expand_binop (GET_MODE (size), sub_optab, size, | |
906c4e36 RK |
3848 | GEN_INT (used), NULL_RTX, 0, |
3849 | OPTAB_LIB_WIDEN); | |
bbf6f052 RK |
3850 | } |
3851 | ||
3852 | /* Get the address of the stack space. | |
3853 | In this case, we do not deal with EXTRA separately. | |
3854 | A single stack adjust will do. */ | |
3855 | if (! args_addr) | |
3856 | { | |
3857 | temp = push_block (size, extra, where_pad == downward); | |
3858 | extra = 0; | |
3859 | } | |
481683e1 | 3860 | else if (CONST_INT_P (args_so_far)) |
bbf6f052 RK |
3861 | temp = memory_address (BLKmode, |
3862 | plus_constant (args_addr, | |
3863 | skip + INTVAL (args_so_far))); | |
3864 | else | |
3865 | temp = memory_address (BLKmode, | |
38a448ca RH |
3866 | plus_constant (gen_rtx_PLUS (Pmode, |
3867 | args_addr, | |
3868 | args_so_far), | |
bbf6f052 | 3869 | skip)); |
4ca79136 RH |
3870 | |
3871 | if (!ACCUMULATE_OUTGOING_ARGS) | |
3872 | { | |
3873 | /* If the source is referenced relative to the stack pointer, | |
3874 | copy it to another register to stabilize it. We do not need | |
3875 | to do this if we know that we won't be changing sp. */ | |
3876 | ||
3877 | if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp) | |
3878 | || reg_mentioned_p (virtual_outgoing_args_rtx, temp)) | |
3879 | temp = copy_to_reg (temp); | |
3880 | } | |
3881 | ||
3a94c984 | 3882 | target = gen_rtx_MEM (BLKmode, temp); |
7ab923cc | 3883 | |
2bb16349 RH |
3884 | /* We do *not* set_mem_attributes here, because incoming arguments |
3885 | may overlap with sibling call outgoing arguments and we cannot | |
3886 | allow reordering of reads from function arguments with stores | |
3887 | to outgoing arguments of sibling calls. We do, however, want | |
3888 | to record the alignment of the stack slot. */ | |
44bb111a RH |
3889 | /* ALIGN may well be better aligned than TYPE, e.g. due to |
3890 | PARM_BOUNDARY. Assume the caller isn't lying. */ | |
3891 | set_mem_align (target, align); | |
4ca79136 | 3892 | |
44bb111a | 3893 | emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM); |
bbf6f052 RK |
3894 | } |
3895 | } | |
3896 | else if (partial > 0) | |
3897 | { | |
3898 | /* Scalar partly in registers. */ | |
3899 | ||
3900 | int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD; | |
3901 | int i; | |
3902 | int not_stack; | |
78a52f11 | 3903 | /* # bytes of start of argument |
bbf6f052 | 3904 | that we must make space for but need not store. */ |
ac7e839c | 3905 | int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT); |
bbf6f052 RK |
3906 | int args_offset = INTVAL (args_so_far); |
3907 | int skip; | |
3908 | ||
3909 | /* Push padding now if padding above and stack grows down, | |
3910 | or if padding below and stack grows up. | |
3911 | But if space already allocated, this has already been done. */ | |
3912 | if (extra && args_addr == 0 | |
3913 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3914 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 RK |
3915 | |
3916 | /* If we make space by pushing it, we might as well push | |
3917 | the real data. Otherwise, we can leave OFFSET nonzero | |
3918 | and leave the space uninitialized. */ | |
3919 | if (args_addr == 0) | |
3920 | offset = 0; | |
3921 | ||
3922 | /* Now NOT_STACK gets the number of words that we don't need to | |
40b0345d | 3923 | allocate on the stack. Convert OFFSET to words too. */ |
78a52f11 | 3924 | not_stack = (partial - offset) / UNITS_PER_WORD; |
ac7e839c | 3925 | offset /= UNITS_PER_WORD; |
bbf6f052 RK |
3926 | |
3927 | /* If the partial register-part of the arg counts in its stack size, | |
3928 | skip the part of stack space corresponding to the registers. | |
3929 | Otherwise, start copying to the beginning of the stack space, | |
3930 | by setting SKIP to 0. */ | |
e5e809f4 | 3931 | skip = (reg_parm_stack_space == 0) ? 0 : not_stack; |
bbf6f052 RK |
3932 | |
3933 | if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) | |
3934 | x = validize_mem (force_const_mem (mode, x)); | |
3935 | ||
3936 | /* If X is a hard register in a non-integer mode, copy it into a pseudo; | |
3937 | SUBREGs of such registers are not allowed. */ | |
f8cfc6aa | 3938 | if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER |
bbf6f052 RK |
3939 | && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)) |
3940 | x = copy_to_reg (x); | |
3941 | ||
3942 | /* Loop over all the words allocated on the stack for this arg. */ | |
3943 | /* We can do it by words, because any scalar bigger than a word | |
3944 | has a size a multiple of a word. */ | |
3945 | #ifndef PUSH_ARGS_REVERSED | |
3946 | for (i = not_stack; i < size; i++) | |
3947 | #else | |
3948 | for (i = size - 1; i >= not_stack; i--) | |
3949 | #endif | |
3950 | if (i >= not_stack + offset) | |
3951 | emit_push_insn (operand_subword_force (x, i, mode), | |
906c4e36 RK |
3952 | word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX, |
3953 | 0, args_addr, | |
3954 | GEN_INT (args_offset + ((i - not_stack + skip) | |
e5e809f4 | 3955 | * UNITS_PER_WORD)), |
4fc026cd | 3956 | reg_parm_stack_space, alignment_pad); |
bbf6f052 RK |
3957 | } |
3958 | else | |
3959 | { | |
3960 | rtx addr; | |
3bdf5ad1 | 3961 | rtx dest; |
bbf6f052 RK |
3962 | |
3963 | /* Push padding now if padding above and stack grows down, | |
3964 | or if padding below and stack grows up. | |
3965 | But if space already allocated, this has already been done. */ | |
3966 | if (extra && args_addr == 0 | |
3967 | && where_pad != none && where_pad != stack_direction) | |
906c4e36 | 3968 | anti_adjust_stack (GEN_INT (extra)); |
bbf6f052 RK |
3969 | |
3970 | #ifdef PUSH_ROUNDING | |
f73ad30e | 3971 | if (args_addr == 0 && PUSH_ARGS) |
566aa174 | 3972 | emit_single_push_insn (mode, x, type); |
bbf6f052 RK |
3973 | else |
3974 | #endif | |
921b3427 | 3975 | { |
481683e1 | 3976 | if (CONST_INT_P (args_so_far)) |
921b3427 RK |
3977 | addr |
3978 | = memory_address (mode, | |
3a94c984 | 3979 | plus_constant (args_addr, |
921b3427 | 3980 | INTVAL (args_so_far))); |
3a94c984 | 3981 | else |
38a448ca RH |
3982 | addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr, |
3983 | args_so_far)); | |
566aa174 | 3984 | dest = gen_rtx_MEM (mode, addr); |
2bb16349 RH |
3985 | |
3986 | /* We do *not* set_mem_attributes here, because incoming arguments | |
3987 | may overlap with sibling call outgoing arguments and we cannot | |
3988 | allow reordering of reads from function arguments with stores | |
3989 | to outgoing arguments of sibling calls. We do, however, want | |
3990 | to record the alignment of the stack slot. */ | |
3991 | /* ALIGN may well be better aligned than TYPE, e.g. due to | |
3992 | PARM_BOUNDARY. Assume the caller isn't lying. */ | |
3993 | set_mem_align (dest, align); | |
bbf6f052 | 3994 | |
566aa174 | 3995 | emit_move_insn (dest, x); |
566aa174 | 3996 | } |
bbf6f052 RK |
3997 | } |
3998 | ||
bbf6f052 RK |
3999 | /* If part should go in registers, copy that part |
4000 | into the appropriate registers. Do this now, at the end, | |
4001 | since mem-to-mem copies above may do function calls. */ | |
cd048831 | 4002 | if (partial > 0 && reg != 0) |
fffa9c1d JW |
4003 | { |
4004 | /* Handle calls that pass values in multiple non-contiguous locations. | |
4005 | The Irix 6 ABI has examples of this. */ | |
4006 | if (GET_CODE (reg) == PARALLEL) | |
6e985040 | 4007 | emit_group_load (reg, x, type, -1); |
fffa9c1d | 4008 | else |
78a52f11 RH |
4009 | { |
4010 | gcc_assert (partial % UNITS_PER_WORD == 0); | |
4011 | move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode); | |
4012 | } | |
fffa9c1d | 4013 | } |
bbf6f052 RK |
4014 | |
4015 | if (extra && args_addr == 0 && where_pad == stack_direction) | |
906c4e36 | 4016 | anti_adjust_stack (GEN_INT (extra)); |
3a94c984 | 4017 | |
3ea2292a | 4018 | if (alignment_pad && args_addr == 0) |
4fc026cd | 4019 | anti_adjust_stack (alignment_pad); |
bbf6f052 RK |
4020 | } |
4021 | \f | |
296b4ed9 RK |
4022 | /* Return X if X can be used as a subtarget in a sequence of arithmetic |
4023 | operations. */ | |
4024 | ||
4025 | static rtx | |
502b8322 | 4026 | get_subtarget (rtx x) |
296b4ed9 | 4027 | { |
7c27e184 PB |
4028 | return (optimize |
4029 | || x == 0 | |
296b4ed9 | 4030 | /* Only registers can be subtargets. */ |
f8cfc6aa | 4031 | || !REG_P (x) |
296b4ed9 RK |
4032 | /* Don't use hard regs to avoid extending their life. */ |
4033 | || REGNO (x) < FIRST_PSEUDO_REGISTER | |
296b4ed9 RK |
4034 | ? 0 : x); |
4035 | } | |
4036 | ||
8c1cfd5a RH |
4037 | /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where |
4038 | FIELD is a bitfield. Returns true if the optimization was successful, | |
4039 | and there's nothing else to do. */ | |
4040 | ||
4041 | static bool | |
4042 | optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize, | |
4043 | unsigned HOST_WIDE_INT bitpos, | |
4044 | enum machine_mode mode1, rtx str_rtx, | |
4045 | tree to, tree src) | |
4046 | { | |
4047 | enum machine_mode str_mode = GET_MODE (str_rtx); | |
4048 | unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode); | |
4049 | tree op0, op1; | |
4050 | rtx value, result; | |
4051 | optab binop; | |
4052 | ||
4053 | if (mode1 != VOIDmode | |
4054 | || bitsize >= BITS_PER_WORD | |
4055 | || str_bitsize > BITS_PER_WORD | |
4056 | || TREE_SIDE_EFFECTS (to) | |
4057 | || TREE_THIS_VOLATILE (to)) | |
4058 | return false; | |
4059 | ||
4060 | STRIP_NOPS (src); | |
4061 | if (!BINARY_CLASS_P (src) | |
4062 | || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE) | |
4063 | return false; | |
4064 | ||
4065 | op0 = TREE_OPERAND (src, 0); | |
4066 | op1 = TREE_OPERAND (src, 1); | |
4067 | STRIP_NOPS (op0); | |
4068 | ||
4069 | if (!operand_equal_p (to, op0, 0)) | |
4070 | return false; | |
4071 | ||
4072 | if (MEM_P (str_rtx)) | |
4073 | { | |
4074 | unsigned HOST_WIDE_INT offset1; | |
4075 | ||
4076 | if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD) | |
4077 | str_mode = word_mode; | |
4078 | str_mode = get_best_mode (bitsize, bitpos, | |
4079 | MEM_ALIGN (str_rtx), str_mode, 0); | |
4080 | if (str_mode == VOIDmode) | |
4081 | return false; | |
4082 | str_bitsize = GET_MODE_BITSIZE (str_mode); | |
4083 | ||
4084 | offset1 = bitpos; | |
4085 | bitpos %= str_bitsize; | |
4086 | offset1 = (offset1 - bitpos) / BITS_PER_UNIT; | |
4087 | str_rtx = adjust_address (str_rtx, str_mode, offset1); | |
4088 | } | |
4089 | else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG) | |
4090 | return false; | |
4091 | ||
4092 | /* If the bit field covers the whole REG/MEM, store_field | |
4093 | will likely generate better code. */ | |
4094 | if (bitsize >= str_bitsize) | |
4095 | return false; | |
4096 | ||
4097 | /* We can't handle fields split across multiple entities. */ | |
4098 | if (bitpos + bitsize > str_bitsize) | |
4099 | return false; | |
4100 | ||
4101 | if (BYTES_BIG_ENDIAN) | |
4102 | bitpos = str_bitsize - bitpos - bitsize; | |
4103 | ||
4104 | switch (TREE_CODE (src)) | |
4105 | { | |
4106 | case PLUS_EXPR: | |
4107 | case MINUS_EXPR: | |
4108 | /* For now, just optimize the case of the topmost bitfield | |
4109 | where we don't need to do any masking and also | |
4110 | 1 bit bitfields where xor can be used. | |
4111 | We might win by one instruction for the other bitfields | |
4112 | too if insv/extv instructions aren't used, so that | |
4113 | can be added later. */ | |
4114 | if (bitpos + bitsize != str_bitsize | |
4115 | && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST)) | |
4116 | break; | |
4117 | ||
49452c07 | 4118 | value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL); |
8c1cfd5a RH |
4119 | value = convert_modes (str_mode, |
4120 | TYPE_MODE (TREE_TYPE (op1)), value, | |
4121 | TYPE_UNSIGNED (TREE_TYPE (op1))); | |
4122 | ||
4123 | /* We may be accessing data outside the field, which means | |
4124 | we can alias adjacent data. */ | |
4125 | if (MEM_P (str_rtx)) | |
4126 | { | |
4127 | str_rtx = shallow_copy_rtx (str_rtx); | |
4128 | set_mem_alias_set (str_rtx, 0); | |
4129 | set_mem_expr (str_rtx, 0); | |
4130 | } | |
4131 | ||
4132 | binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab; | |
4133 | if (bitsize == 1 && bitpos + bitsize != str_bitsize) | |
4134 | { | |
4135 | value = expand_and (str_mode, value, const1_rtx, NULL); | |
4136 | binop = xor_optab; | |
4137 | } | |
4138 | value = expand_shift (LSHIFT_EXPR, str_mode, value, | |
4139 | build_int_cst (NULL_TREE, bitpos), | |
4140 | NULL_RTX, 1); | |
4141 | result = expand_binop (str_mode, binop, str_rtx, | |
4142 | value, str_rtx, 1, OPTAB_WIDEN); | |
4143 | if (result != str_rtx) | |
4144 | emit_move_insn (str_rtx, result); | |
4145 | return true; | |
4146 | ||
92fb2d32 KH |
4147 | case BIT_IOR_EXPR: |
4148 | case BIT_XOR_EXPR: | |
4149 | if (TREE_CODE (op1) != INTEGER_CST) | |
4150 | break; | |
49452c07 | 4151 | value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL); |
92fb2d32 KH |
4152 | value = convert_modes (GET_MODE (str_rtx), |
4153 | TYPE_MODE (TREE_TYPE (op1)), value, | |
4154 | TYPE_UNSIGNED (TREE_TYPE (op1))); | |
4155 | ||
4156 | /* We may be accessing data outside the field, which means | |
4157 | we can alias adjacent data. */ | |
4158 | if (MEM_P (str_rtx)) | |
4159 | { | |
4160 | str_rtx = shallow_copy_rtx (str_rtx); | |
4161 | set_mem_alias_set (str_rtx, 0); | |
4162 | set_mem_expr (str_rtx, 0); | |
4163 | } | |
4164 | ||
4165 | binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab; | |
4166 | if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))) | |
4167 | { | |
4168 | rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) | |
4169 | - 1); | |
4170 | value = expand_and (GET_MODE (str_rtx), value, mask, | |
4171 | NULL_RTX); | |
4172 | } | |
4173 | value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value, | |
4174 | build_int_cst (NULL_TREE, bitpos), | |
4175 | NULL_RTX, 1); | |
4176 | result = expand_binop (GET_MODE (str_rtx), binop, str_rtx, | |
4177 | value, str_rtx, 1, OPTAB_WIDEN); | |
4178 | if (result != str_rtx) | |
4179 | emit_move_insn (str_rtx, result); | |
4180 | return true; | |
4181 | ||
8c1cfd5a RH |
4182 | default: |
4183 | break; | |
4184 | } | |
4185 | ||
4186 | return false; | |
4187 | } | |
4188 | ||
4189 | ||
79f5e442 ZD |
4190 | /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL |
4191 | is true, try generating a nontemporal store. */ | |
bbf6f052 | 4192 | |
e836a5a2 | 4193 | void |
79f5e442 | 4194 | expand_assignment (tree to, tree from, bool nontemporal) |
bbf6f052 | 4195 | { |
b3694847 | 4196 | rtx to_rtx = 0; |
bbf6f052 RK |
4197 | rtx result; |
4198 | ||
4199 | /* Don't crash if the lhs of the assignment was erroneous. */ | |
bbf6f052 | 4200 | if (TREE_CODE (to) == ERROR_MARK) |
709f5be1 | 4201 | { |
84217346 | 4202 | result = expand_normal (from); |
e836a5a2 | 4203 | return; |
709f5be1 | 4204 | } |
bbf6f052 | 4205 | |
6cc1d694 RS |
4206 | /* Optimize away no-op moves without side-effects. */ |
4207 | if (operand_equal_p (to, from, 0)) | |
4208 | return; | |
4209 | ||
bbf6f052 RK |
4210 | /* Assignment of a structure component needs special treatment |
4211 | if the structure component's rtx is not simply a MEM. | |
6be58303 JW |
4212 | Assignment of an array element at a constant index, and assignment of |
4213 | an array element in an unaligned packed structure field, has the same | |
4214 | problem. */ | |
8c1cfd5a | 4215 | if (handled_component_p (to) |
70f34814 RG |
4216 | /* ??? We only need to handle MEM_REF here if the access is not |
4217 | a full access of the base object. */ | |
4218 | || (TREE_CODE (to) == MEM_REF | |
4219 | && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR) | |
7c02ae17 | 4220 | || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) |
bbf6f052 RK |
4221 | { |
4222 | enum machine_mode mode1; | |
770ae6cc | 4223 | HOST_WIDE_INT bitsize, bitpos; |
7bb0943f | 4224 | tree offset; |
bbf6f052 RK |
4225 | int unsignedp; |
4226 | int volatilep = 0; | |
0088fcb1 RK |
4227 | tree tem; |
4228 | ||
4229 | push_temp_slots (); | |
839c4796 | 4230 | tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, |
2614034e | 4231 | &unsignedp, &volatilep, true); |
bbf6f052 RK |
4232 | |
4233 | /* If we are going to use store_bit_field and extract_bit_field, | |
4234 | make sure to_rtx will be safe for multiple use. */ | |
4235 | ||
84217346 | 4236 | to_rtx = expand_normal (tem); |
1ed1b4fb | 4237 | |
6a78b724 DD |
4238 | /* If the bitfield is volatile, we want to access it in the |
4239 | field's mode, not the computed mode. */ | |
4240 | if (volatilep | |
4241 | && GET_CODE (to_rtx) == MEM | |
4242 | && flag_strict_volatile_bitfields > 0) | |
4243 | to_rtx = adjust_address (to_rtx, mode1, 0); | |
4244 | ||
7bb0943f RS |
4245 | if (offset != 0) |
4246 | { | |
d4ebfa65 | 4247 | enum machine_mode address_mode; |
1e188d1e | 4248 | rtx offset_rtx; |
7bb0943f | 4249 | |
1e188d1e RH |
4250 | if (!MEM_P (to_rtx)) |
4251 | { | |
4252 | /* We can get constant negative offsets into arrays with broken | |
4253 | user code. Translate this to a trap instead of ICEing. */ | |
4254 | gcc_assert (TREE_CODE (offset) == INTEGER_CST); | |
4255 | expand_builtin_trap (); | |
4256 | to_rtx = gen_rtx_MEM (BLKmode, const0_rtx); | |
4257 | } | |
bd070e1a | 4258 | |
1e188d1e | 4259 | offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); |
d4ebfa65 BE |
4260 | address_mode |
4261 | = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx)); | |
4262 | if (GET_MODE (offset_rtx) != address_mode) | |
4263 | offset_rtx = convert_to_mode (address_mode, offset_rtx, 0); | |
bd070e1a | 4264 | |
9a7b9f4f JL |
4265 | /* A constant address in TO_RTX can have VOIDmode, we must not try |
4266 | to call force_reg for that case. Avoid that case. */ | |
3c0cb5de | 4267 | if (MEM_P (to_rtx) |
89752202 | 4268 | && GET_MODE (to_rtx) == BLKmode |
9a7b9f4f | 4269 | && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode |
a06ef755 | 4270 | && bitsize > 0 |
3a94c984 | 4271 | && (bitpos % bitsize) == 0 |
89752202 | 4272 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 |
a06ef755 | 4273 | && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1)) |
89752202 | 4274 | { |
e3c8ea67 | 4275 | to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT); |
89752202 HB |
4276 | bitpos = 0; |
4277 | } | |
4278 | ||
0d4903b8 | 4279 | to_rtx = offset_address (to_rtx, offset_rtx, |
d50a16c4 EB |
4280 | highest_pow2_factor_for_target (to, |
4281 | offset)); | |
7bb0943f | 4282 | } |
c5c76735 | 4283 | |
6a866023 JJ |
4284 | /* No action is needed if the target is not a memory and the field |
4285 | lies completely outside that target. This can occur if the source | |
4286 | code contains an out-of-bounds access to a small array. */ | |
4287 | if (!MEM_P (to_rtx) | |
4288 | && GET_MODE (to_rtx) != BLKmode | |
4289 | && (unsigned HOST_WIDE_INT) bitpos | |
4290 | >= GET_MODE_BITSIZE (GET_MODE (to_rtx))) | |
4291 | { | |
4292 | expand_normal (from); | |
4293 | result = NULL; | |
4294 | } | |
8c1cfd5a | 4295 | /* Handle expand_expr of a complex value returning a CONCAT. */ |
6a866023 | 4296 | else if (GET_CODE (to_rtx) == CONCAT) |
a06ef755 | 4297 | { |
a1a65f89 | 4298 | if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))) |
0becc986 RH |
4299 | { |
4300 | gcc_assert (bitpos == 0); | |
79f5e442 | 4301 | result = store_expr (from, to_rtx, false, nontemporal); |
0becc986 RH |
4302 | } |
4303 | else | |
4304 | { | |
4305 | gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1)); | |
79f5e442 ZD |
4306 | result = store_expr (from, XEXP (to_rtx, bitpos != 0), false, |
4307 | nontemporal); | |
0becc986 | 4308 | } |
bbf6f052 | 4309 | } |
8c1cfd5a | 4310 | else |
df62f18a | 4311 | { |
8c1cfd5a | 4312 | if (MEM_P (to_rtx)) |
b8b139c7 | 4313 | { |
8c1cfd5a RH |
4314 | /* If the field is at offset zero, we could have been given the |
4315 | DECL_RTX of the parent struct. Don't munge it. */ | |
4316 | to_rtx = shallow_copy_rtx (to_rtx); | |
b8b139c7 | 4317 | |
8c1cfd5a | 4318 | set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos); |
b8b139c7 | 4319 | |
8c1cfd5a RH |
4320 | /* Deal with volatile and readonly fields. The former is only |
4321 | done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */ | |
4322 | if (volatilep) | |
4323 | MEM_VOLATILE_P (to_rtx) = 1; | |
2039d7aa | 4324 | if (component_uses_parent_alias_set (to)) |
8c1cfd5a | 4325 | MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; |
df62f18a | 4326 | } |
60ba25bf | 4327 | |
8c1cfd5a RH |
4328 | if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1, |
4329 | to_rtx, to, from)) | |
4330 | result = NULL; | |
4331 | else | |
4332 | result = store_field (to_rtx, bitsize, bitpos, mode1, from, | |
79f5e442 ZD |
4333 | TREE_TYPE (tem), get_alias_set (to), |
4334 | nontemporal); | |
df62f18a JJ |
4335 | } |
4336 | ||
8c1cfd5a RH |
4337 | if (result) |
4338 | preserve_temp_slots (result); | |
a06ef755 RK |
4339 | free_temp_slots (); |
4340 | pop_temp_slots (); | |
e836a5a2 | 4341 | return; |
bbf6f052 RK |
4342 | } |
4343 | ||
8f439681 RE |
4344 | else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF) |
4345 | { | |
09e881c9 | 4346 | addr_space_t as = ADDR_SPACE_GENERIC; |
8f439681 RE |
4347 | enum machine_mode mode, op_mode1; |
4348 | enum insn_code icode; | |
4349 | rtx reg, addr, mem, insn; | |
4350 | ||
09e881c9 BE |
4351 | if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0)))) |
4352 | as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0)))); | |
4353 | ||
8f439681 RE |
4354 | reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL); |
4355 | reg = force_not_mem (reg); | |
4356 | ||
4357 | mode = TYPE_MODE (TREE_TYPE (to)); | |
4358 | addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode, | |
4359 | EXPAND_SUM); | |
09e881c9 | 4360 | addr = memory_address_addr_space (mode, addr, as); |
8f439681 RE |
4361 | mem = gen_rtx_MEM (mode, addr); |
4362 | ||
4363 | set_mem_attributes (mem, to, 0); | |
09e881c9 | 4364 | set_mem_addr_space (mem, as); |
8f439681 RE |
4365 | |
4366 | icode = movmisalign_optab->handlers[mode].insn_code; | |
4367 | gcc_assert (icode != CODE_FOR_nothing); | |
4368 | ||
4369 | op_mode1 = insn_data[icode].operand[1].mode; | |
4370 | if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1) | |
4371 | && op_mode1 != VOIDmode) | |
4372 | reg = copy_to_mode_reg (op_mode1, reg); | |
4373 | ||
4374 | insn = GEN_FCN (icode) (mem, reg); | |
4375 | emit_insn (insn); | |
4376 | return; | |
4377 | } | |
4378 | ||
cd1db108 RS |
4379 | /* If the rhs is a function call and its value is not an aggregate, |
4380 | call the function before we start to compute the lhs. | |
4381 | This is needed for correct code for cases such as | |
4382 | val = setjmp (buf) on machines where reference to val | |
1ad87b63 RK |
4383 | requires loading up part of an address in a separate insn. |
4384 | ||
1858863b JW |
4385 | Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG |
4386 | since it might be a promoted variable where the zero- or sign- extension | |
4387 | needs to be done. Handling this in the normal way is safe because no | |
4e3825db | 4388 | computation is done before the call. The same is true for SSA names. */ |
61f71b34 | 4389 | if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from) |
90d245c5 | 4390 | && COMPLETE_TYPE_P (TREE_TYPE (from)) |
b35cd3c1 | 4391 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST |
4e3825db MM |
4392 | && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) |
4393 | && REG_P (DECL_RTL (to))) | |
4394 | || TREE_CODE (to) == SSA_NAME)) | |
cd1db108 | 4395 | { |
0088fcb1 RK |
4396 | rtx value; |
4397 | ||
4398 | push_temp_slots (); | |
84217346 | 4399 | value = expand_normal (from); |
cd1db108 | 4400 | if (to_rtx == 0) |
37a08a29 | 4401 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
aaf87c45 | 4402 | |
fffa9c1d JW |
4403 | /* Handle calls that return values in multiple non-contiguous locations. |
4404 | The Irix 6 ABI has examples of this. */ | |
4405 | if (GET_CODE (to_rtx) == PARALLEL) | |
6e985040 AM |
4406 | emit_group_load (to_rtx, value, TREE_TYPE (from), |
4407 | int_size_in_bytes (TREE_TYPE (from))); | |
fffa9c1d | 4408 | else if (GET_MODE (to_rtx) == BLKmode) |
44bb111a | 4409 | emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL); |
aaf87c45 | 4410 | else |
6419e5b0 | 4411 | { |
5ae6cd0d | 4412 | if (POINTER_TYPE_P (TREE_TYPE (to))) |
d4ebfa65 BE |
4413 | value = convert_memory_address_addr_space |
4414 | (GET_MODE (to_rtx), value, | |
4415 | TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to)))); | |
4416 | ||
6419e5b0 DT |
4417 | emit_move_insn (to_rtx, value); |
4418 | } | |
cd1db108 RS |
4419 | preserve_temp_slots (to_rtx); |
4420 | free_temp_slots (); | |
0088fcb1 | 4421 | pop_temp_slots (); |
e836a5a2 | 4422 | return; |
cd1db108 RS |
4423 | } |
4424 | ||
bbf6f052 RK |
4425 | /* Ordinary treatment. Expand TO to get a REG or MEM rtx. |
4426 | Don't re-expand if it was expanded already (in COMPONENT_REF case). */ | |
4427 | ||
4428 | if (to_rtx == 0) | |
37a08a29 | 4429 | to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); |
bbf6f052 | 4430 | |
86d38d25 | 4431 | /* Don't move directly into a return register. */ |
14a774a9 | 4432 | if (TREE_CODE (to) == RESULT_DECL |
f8cfc6aa | 4433 | && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL)) |
86d38d25 | 4434 | { |
0088fcb1 RK |
4435 | rtx temp; |
4436 | ||
4437 | push_temp_slots (); | |
49452c07 | 4438 | temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL); |
14a774a9 RK |
4439 | |
4440 | if (GET_CODE (to_rtx) == PARALLEL) | |
6e985040 AM |
4441 | emit_group_load (to_rtx, temp, TREE_TYPE (from), |
4442 | int_size_in_bytes (TREE_TYPE (from))); | |
14a774a9 RK |
4443 | else |
4444 | emit_move_insn (to_rtx, temp); | |
4445 | ||
86d38d25 RS |
4446 | preserve_temp_slots (to_rtx); |
4447 | free_temp_slots (); | |
0088fcb1 | 4448 | pop_temp_slots (); |
e836a5a2 | 4449 | return; |
86d38d25 RS |
4450 | } |
4451 | ||
bbf6f052 RK |
4452 | /* In case we are returning the contents of an object which overlaps |
4453 | the place the value is being stored, use a safe function when copying | |
4454 | a value through a pointer into a structure value return block. */ | |
434c8f4b RG |
4455 | if (TREE_CODE (to) == RESULT_DECL |
4456 | && TREE_CODE (from) == INDIRECT_REF | |
09e881c9 | 4457 | && ADDR_SPACE_GENERIC_P |
434c8f4b RG |
4458 | (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0))))) |
4459 | && refs_may_alias_p (to, from) | |
e3b5732b JH |
4460 | && cfun->returns_struct |
4461 | && !cfun->returns_pcc_struct) | |
bbf6f052 | 4462 | { |
0088fcb1 RK |
4463 | rtx from_rtx, size; |
4464 | ||
4465 | push_temp_slots (); | |
33a20d10 | 4466 | size = expr_size (from); |
84217346 | 4467 | from_rtx = expand_normal (from); |
bbf6f052 | 4468 | |
8f99553f JM |
4469 | emit_library_call (memmove_libfunc, LCT_NORMAL, |
4470 | VOIDmode, 3, XEXP (to_rtx, 0), Pmode, | |
4471 | XEXP (from_rtx, 0), Pmode, | |
4472 | convert_to_mode (TYPE_MODE (sizetype), | |
4473 | size, TYPE_UNSIGNED (sizetype)), | |
4474 | TYPE_MODE (sizetype)); | |
bbf6f052 RK |
4475 | |
4476 | preserve_temp_slots (to_rtx); | |
4477 | free_temp_slots (); | |
0088fcb1 | 4478 | pop_temp_slots (); |
e836a5a2 | 4479 | return; |
bbf6f052 RK |
4480 | } |
4481 | ||
4482 | /* Compute FROM and store the value in the rtx we got. */ | |
4483 | ||
0088fcb1 | 4484 | push_temp_slots (); |
79f5e442 | 4485 | result = store_expr (from, to_rtx, 0, nontemporal); |
bbf6f052 RK |
4486 | preserve_temp_slots (result); |
4487 | free_temp_slots (); | |
0088fcb1 | 4488 | pop_temp_slots (); |
e836a5a2 | 4489 | return; |
bbf6f052 RK |
4490 | } |
4491 | ||
79f5e442 ZD |
4492 | /* Emits nontemporal store insn that moves FROM to TO. Returns true if this |
4493 | succeeded, false otherwise. */ | |
4494 | ||
28ed065e | 4495 | bool |
79f5e442 ZD |
4496 | emit_storent_insn (rtx to, rtx from) |
4497 | { | |
4498 | enum machine_mode mode = GET_MODE (to), imode; | |
166cdb08 | 4499 | enum insn_code code = optab_handler (storent_optab, mode)->insn_code; |
79f5e442 ZD |
4500 | rtx pattern; |
4501 | ||
4502 | if (code == CODE_FOR_nothing) | |
4503 | return false; | |
4504 | ||
4505 | imode = insn_data[code].operand[0].mode; | |
4506 | if (!insn_data[code].operand[0].predicate (to, imode)) | |
4507 | return false; | |
4508 | ||
4509 | imode = insn_data[code].operand[1].mode; | |
4510 | if (!insn_data[code].operand[1].predicate (from, imode)) | |
4511 | { | |
4512 | from = copy_to_mode_reg (imode, from); | |
4513 | if (!insn_data[code].operand[1].predicate (from, imode)) | |
4514 | return false; | |
4515 | } | |
4516 | ||
4517 | pattern = GEN_FCN (code) (to, from); | |
4518 | if (pattern == NULL_RTX) | |
4519 | return false; | |
4520 | ||
4521 | emit_insn (pattern); | |
4522 | return true; | |
4523 | } | |
4524 | ||
bbf6f052 RK |
4525 | /* Generate code for computing expression EXP, |
4526 | and storing the value into TARGET. | |
bbf6f052 | 4527 | |
709f5be1 RS |
4528 | If the mode is BLKmode then we may return TARGET itself. |
4529 | It turns out that in BLKmode it doesn't cause a problem. | |
4530 | because C has no operators that could combine two different | |
4531 | assignments into the same BLKmode object with different values | |
4532 | with no sequence point. Will other languages need this to | |
4533 | be more thorough? | |
4534 | ||
6f4fd16d | 4535 | If CALL_PARAM_P is nonzero, this is a store into a call param on the |
79f5e442 | 4536 | stack, and block moves may need to be treated specially. |
b8698a0f | 4537 | |
79f5e442 | 4538 | If NONTEMPORAL is true, try using a nontemporal store instruction. */ |
bbf6f052 RK |
4539 | |
4540 | rtx | |
79f5e442 | 4541 | store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) |
bbf6f052 | 4542 | { |
b3694847 | 4543 | rtx temp; |
0fab64a3 | 4544 | rtx alt_rtl = NULL_RTX; |
db3927fb | 4545 | location_t loc = EXPR_LOCATION (exp); |
bbf6f052 | 4546 | |
847311f4 AL |
4547 | if (VOID_TYPE_P (TREE_TYPE (exp))) |
4548 | { | |
4549 | /* C++ can generate ?: expressions with a throw expression in one | |
4550 | branch and an rvalue in the other. Here, we resolve attempts to | |
4d6922ee | 4551 | store the throw expression's nonexistent result. */ |
6f4fd16d | 4552 | gcc_assert (!call_param_p); |
49452c07 | 4553 | expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL); |
847311f4 AL |
4554 | return NULL_RTX; |
4555 | } | |
bbf6f052 RK |
4556 | if (TREE_CODE (exp) == COMPOUND_EXPR) |
4557 | { | |
4558 | /* Perform first part of compound expression, then assign from second | |
4559 | part. */ | |
8403445a | 4560 | expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, |
6f4fd16d | 4561 | call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL); |
79f5e442 ZD |
4562 | return store_expr (TREE_OPERAND (exp, 1), target, call_param_p, |
4563 | nontemporal); | |
bbf6f052 RK |
4564 | } |
4565 | else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode) | |
4566 | { | |
4567 | /* For conditional expression, get safe form of the target. Then | |
4568 | test the condition, doing the appropriate assignment on either | |
4569 | side. This avoids the creation of unnecessary temporaries. | |
4570 | For non-BLKmode, it is more efficient not to do this. */ | |
4571 | ||
4572 | rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx (); | |
4573 | ||
dabf8373 | 4574 | do_pending_stack_adjust (); |
bbf6f052 | 4575 | NO_DEFER_POP; |
40e90eac | 4576 | jumpifnot (TREE_OPERAND (exp, 0), lab1, -1); |
79f5e442 ZD |
4577 | store_expr (TREE_OPERAND (exp, 1), target, call_param_p, |
4578 | nontemporal); | |
bbf6f052 RK |
4579 | emit_jump_insn (gen_jump (lab2)); |
4580 | emit_barrier (); | |
4581 | emit_label (lab1); | |
79f5e442 ZD |
4582 | store_expr (TREE_OPERAND (exp, 2), target, call_param_p, |
4583 | nontemporal); | |
bbf6f052 RK |
4584 | emit_label (lab2); |
4585 | OK_DEFER_POP; | |
a3a58acc | 4586 | |
436d948e | 4587 | return NULL_RTX; |
12f06d17 | 4588 | } |
1499e0a8 | 4589 | else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) |
09da1532 | 4590 | /* If this is a scalar in a register that is stored in a wider mode |
1499e0a8 RK |
4591 | than the declared mode, compute the result into its declared mode |
4592 | and then convert to the wider mode. Our value is the computed | |
4593 | expression. */ | |
4594 | { | |
b76b08ef RK |
4595 | rtx inner_target = 0; |
4596 | ||
436d948e KH |
4597 | /* We can do the conversion inside EXP, which will often result |
4598 | in some optimizations. Do the conversion in two steps: first | |
4599 | change the signedness, if needed, then the extend. But don't | |
4600 | do this if the type of EXP is a subtype of something else | |
4601 | since then the conversion might involve more than just | |
4602 | converting modes. */ | |
4603 | if (INTEGRAL_TYPE_P (TREE_TYPE (exp)) | |
7e7d1b4b | 4604 | && TREE_TYPE (TREE_TYPE (exp)) == 0 |
ac5dc795 PB |
4605 | && GET_MODE_PRECISION (GET_MODE (target)) |
4606 | == TYPE_PRECISION (TREE_TYPE (exp))) | |
f635a84d | 4607 | { |
8df83eae | 4608 | if (TYPE_UNSIGNED (TREE_TYPE (exp)) |
f635a84d | 4609 | != SUBREG_PROMOTED_UNSIGNED_P (target)) |
fdd84500 RH |
4610 | { |
4611 | /* Some types, e.g. Fortran's logical*4, won't have a signed | |
4612 | version, so use the mode instead. */ | |
4613 | tree ntype | |
12753674 | 4614 | = (signed_or_unsigned_type_for |
fdd84500 RH |
4615 | (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp))); |
4616 | if (ntype == NULL) | |
4617 | ntype = lang_hooks.types.type_for_mode | |
4618 | (TYPE_MODE (TREE_TYPE (exp)), | |
4619 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
4620 | ||
db3927fb | 4621 | exp = fold_convert_loc (loc, ntype, exp); |
fdd84500 | 4622 | } |
f635a84d | 4623 | |
db3927fb AH |
4624 | exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode |
4625 | (GET_MODE (SUBREG_REG (target)), | |
4626 | SUBREG_PROMOTED_UNSIGNED_P (target)), | |
4627 | exp); | |
b76b08ef RK |
4628 | |
4629 | inner_target = SUBREG_REG (target); | |
f635a84d | 4630 | } |
3a94c984 | 4631 | |
8403445a | 4632 | temp = expand_expr (exp, inner_target, VOIDmode, |
6f4fd16d | 4633 | call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL); |
b258707c RS |
4634 | |
4635 | /* If TEMP is a VOIDmode constant, use convert_modes to make | |
4636 | sure that we properly convert it. */ | |
4637 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | |
1f1b0541 RH |
4638 | { |
4639 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
4640 | temp, SUBREG_PROMOTED_UNSIGNED_P (target)); | |
4641 | temp = convert_modes (GET_MODE (SUBREG_REG (target)), | |
4642 | GET_MODE (target), temp, | |
4643 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
4644 | } | |
b258707c | 4645 | |
1499e0a8 RK |
4646 | convert_move (SUBREG_REG (target), temp, |
4647 | SUBREG_PROMOTED_UNSIGNED_P (target)); | |
3dbecef9 | 4648 | |
436d948e | 4649 | return NULL_RTX; |
1499e0a8 | 4650 | } |
14a43348 JJ |
4651 | else if (TREE_CODE (exp) == STRING_CST |
4652 | && !nontemporal && !call_param_p | |
4653 | && TREE_STRING_LENGTH (exp) > 0 | |
4654 | && TYPE_MODE (TREE_TYPE (exp)) == BLKmode) | |
4655 | { | |
4656 | /* Optimize initialization of an array with a STRING_CST. */ | |
4657 | HOST_WIDE_INT exp_len, str_copy_len; | |
4658 | rtx dest_mem; | |
4659 | ||
4660 | exp_len = int_expr_size (exp); | |
4661 | if (exp_len <= 0) | |
4662 | goto normal_expr; | |
4663 | ||
4664 | str_copy_len = strlen (TREE_STRING_POINTER (exp)); | |
4665 | if (str_copy_len < TREE_STRING_LENGTH (exp) - 1) | |
4666 | goto normal_expr; | |
4667 | ||
4668 | str_copy_len = TREE_STRING_LENGTH (exp); | |
4669 | if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0) | |
4670 | { | |
4671 | str_copy_len += STORE_MAX_PIECES - 1; | |
4672 | str_copy_len &= ~(STORE_MAX_PIECES - 1); | |
4673 | } | |
4674 | str_copy_len = MIN (str_copy_len, exp_len); | |
4675 | if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str, | |
1b4572a8 | 4676 | CONST_CAST(char *, TREE_STRING_POINTER (exp)), |
cfa31150 | 4677 | MEM_ALIGN (target), false)) |
14a43348 JJ |
4678 | goto normal_expr; |
4679 | ||
4680 | dest_mem = target; | |
4681 | ||
4682 | dest_mem = store_by_pieces (dest_mem, | |
4683 | str_copy_len, builtin_strncpy_read_str, | |
1b4572a8 | 4684 | CONST_CAST(char *, TREE_STRING_POINTER (exp)), |
65fedc2c JJ |
4685 | MEM_ALIGN (target), false, |
4686 | exp_len > str_copy_len ? 1 : 0); | |
14a43348 | 4687 | if (exp_len > str_copy_len) |
b62a2e15 JDA |
4688 | clear_storage (adjust_address (dest_mem, BLKmode, 0), |
4689 | GEN_INT (exp_len - str_copy_len), | |
14a43348 JJ |
4690 | BLOCK_OP_NORMAL); |
4691 | return NULL_RTX; | |
4692 | } | |
70f34814 RG |
4693 | else if (TREE_CODE (exp) == MEM_REF |
4694 | && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR | |
4695 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == STRING_CST | |
4696 | && integer_zerop (TREE_OPERAND (exp, 1)) | |
4697 | && !nontemporal && !call_param_p | |
4698 | && TYPE_MODE (TREE_TYPE (exp)) == BLKmode) | |
4699 | { | |
4700 | /* Optimize initialization of an array with a STRING_CST. */ | |
4701 | HOST_WIDE_INT exp_len, str_copy_len; | |
4702 | rtx dest_mem; | |
4703 | tree str = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
4704 | ||
4705 | exp_len = int_expr_size (exp); | |
4706 | if (exp_len <= 0) | |
4707 | goto normal_expr; | |
4708 | ||
4709 | str_copy_len = strlen (TREE_STRING_POINTER (str)); | |
4710 | if (str_copy_len < TREE_STRING_LENGTH (str) - 1) | |
4711 | goto normal_expr; | |
4712 | ||
4713 | str_copy_len = TREE_STRING_LENGTH (str); | |
4714 | if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0) | |
4715 | { | |
4716 | str_copy_len += STORE_MAX_PIECES - 1; | |
4717 | str_copy_len &= ~(STORE_MAX_PIECES - 1); | |
4718 | } | |
4719 | str_copy_len = MIN (str_copy_len, exp_len); | |
4720 | if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str, | |
4721 | CONST_CAST(char *, TREE_STRING_POINTER (str)), | |
4722 | MEM_ALIGN (target), false)) | |
4723 | goto normal_expr; | |
4724 | ||
4725 | dest_mem = target; | |
4726 | ||
4727 | dest_mem = store_by_pieces (dest_mem, | |
4728 | str_copy_len, builtin_strncpy_read_str, | |
4729 | CONST_CAST(char *, TREE_STRING_POINTER (str)), | |
4730 | MEM_ALIGN (target), false, | |
4731 | exp_len > str_copy_len ? 1 : 0); | |
4732 | if (exp_len > str_copy_len) | |
4733 | clear_storage (adjust_address (dest_mem, BLKmode, 0), | |
4734 | GEN_INT (exp_len - str_copy_len), | |
4735 | BLOCK_OP_NORMAL); | |
4736 | return NULL_RTX; | |
4737 | } | |
bbf6f052 RK |
4738 | else |
4739 | { | |
79f5e442 ZD |
4740 | rtx tmp_target; |
4741 | ||
14a43348 | 4742 | normal_expr: |
79f5e442 ZD |
4743 | /* If we want to use a nontemporal store, force the value to |
4744 | register first. */ | |
4745 | tmp_target = nontemporal ? NULL_RTX : target; | |
4746 | temp = expand_expr_real (exp, tmp_target, GET_MODE (target), | |
6f4fd16d | 4747 | (call_param_p |
0fab64a3 MM |
4748 | ? EXPAND_STACK_PARM : EXPAND_NORMAL), |
4749 | &alt_rtl); | |
bbf6f052 RK |
4750 | } |
4751 | ||
b258707c RS |
4752 | /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not |
4753 | the same as that of TARGET, adjust the constant. This is needed, for | |
4754 | example, in case it is a CONST_DOUBLE and we want only a word-sized | |
4755 | value. */ | |
4756 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode | |
c1da1f33 | 4757 | && TREE_CODE (exp) != ERROR_MARK |
b258707c RS |
4758 | && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) |
4759 | temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)), | |
8df83eae | 4760 | temp, TYPE_UNSIGNED (TREE_TYPE (exp))); |
b258707c | 4761 | |
bbf6f052 | 4762 | /* If value was not generated in the target, store it there. |
1bbd65cd EB |
4763 | Convert the value to TARGET's type first if necessary and emit the |
4764 | pending incrementations that have been queued when expanding EXP. | |
4765 | Note that we cannot emit the whole queue blindly because this will | |
4766 | effectively disable the POST_INC optimization later. | |
4767 | ||
37a08a29 | 4768 | If TEMP and TARGET compare equal according to rtx_equal_p, but |
f3f2255a R |
4769 | one or both of them are volatile memory refs, we have to distinguish |
4770 | two cases: | |
4771 | - expand_expr has used TARGET. In this case, we must not generate | |
4772 | another copy. This can be detected by TARGET being equal according | |
4773 | to == . | |
4774 | - expand_expr has not used TARGET - that means that the source just | |
4775 | happens to have the same RTX form. Since temp will have been created | |
4776 | by expand_expr, it will compare unequal according to == . | |
4777 | We must generate a copy in this case, to reach the correct number | |
4778 | of volatile memory references. */ | |
bbf6f052 | 4779 | |
6036acbb | 4780 | if ((! rtx_equal_p (temp, target) |
f3f2255a R |
4781 | || (temp != target && (side_effects_p (temp) |
4782 | || side_effects_p (target)))) | |
e5408e52 | 4783 | && TREE_CODE (exp) != ERROR_MARK |
9c5c5f2c MM |
4784 | /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET, |
4785 | but TARGET is not valid memory reference, TEMP will differ | |
4786 | from TARGET although it is really the same location. */ | |
0fab64a3 | 4787 | && !(alt_rtl && rtx_equal_p (alt_rtl, target)) |
535a42b1 NS |
4788 | /* If there's nothing to copy, don't bother. Don't call |
4789 | expr_size unless necessary, because some front-ends (C++) | |
4790 | expr_size-hook must not be given objects that are not | |
4791 | supposed to be bit-copied or bit-initialized. */ | |
e56fc090 | 4792 | && expr_size (exp) != const0_rtx) |
bbf6f052 | 4793 | { |
bbf6f052 | 4794 | if (GET_MODE (temp) != GET_MODE (target) |
f0348c25 | 4795 | && GET_MODE (temp) != VOIDmode) |
bbf6f052 | 4796 | { |
8df83eae | 4797 | int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); |
28ed065e | 4798 | if (GET_MODE (target) == BLKmode |
ab669042 | 4799 | || GET_MODE (temp) == BLKmode) |
ed1223ba EC |
4800 | emit_block_move (target, temp, expr_size (exp), |
4801 | (call_param_p | |
4802 | ? BLOCK_OP_CALL_PARM | |
4803 | : BLOCK_OP_NORMAL)); | |
bbf6f052 RK |
4804 | else |
4805 | convert_move (target, temp, unsignedp); | |
4806 | } | |
4807 | ||
4808 | else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST) | |
4809 | { | |
c24ae149 RK |
4810 | /* Handle copying a string constant into an array. The string |
4811 | constant may be shorter than the array. So copy just the string's | |
4812 | actual length, and clear the rest. First get the size of the data | |
4813 | type of the string, which is actually the size of the target. */ | |
4814 | rtx size = expr_size (exp); | |
bbf6f052 | 4815 | |
481683e1 | 4816 | if (CONST_INT_P (size) |
e87b4f3f | 4817 | && INTVAL (size) < TREE_STRING_LENGTH (exp)) |
8403445a | 4818 | emit_block_move (target, temp, size, |
6f4fd16d | 4819 | (call_param_p |
8403445a | 4820 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); |
e87b4f3f | 4821 | else |
bbf6f052 | 4822 | { |
d4ebfa65 BE |
4823 | enum machine_mode pointer_mode |
4824 | = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target)); | |
4825 | enum machine_mode address_mode | |
4826 | = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target)); | |
4827 | ||
e87b4f3f RS |
4828 | /* Compute the size of the data to copy from the string. */ |
4829 | tree copy_size | |
db3927fb AH |
4830 | = size_binop_loc (loc, MIN_EXPR, |
4831 | make_tree (sizetype, size), | |
4832 | size_int (TREE_STRING_LENGTH (exp))); | |
8403445a AM |
4833 | rtx copy_size_rtx |
4834 | = expand_expr (copy_size, NULL_RTX, VOIDmode, | |
6f4fd16d | 4835 | (call_param_p |
8403445a | 4836 | ? EXPAND_STACK_PARM : EXPAND_NORMAL)); |
e87b4f3f RS |
4837 | rtx label = 0; |
4838 | ||
4839 | /* Copy that much. */ | |
d4ebfa65 | 4840 | copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx, |
8df83eae | 4841 | TYPE_UNSIGNED (sizetype)); |
8403445a | 4842 | emit_block_move (target, temp, copy_size_rtx, |
6f4fd16d | 4843 | (call_param_p |
8403445a | 4844 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); |
e87b4f3f | 4845 | |
88f63c77 | 4846 | /* Figure out how much is left in TARGET that we have to clear. |
d4ebfa65 | 4847 | Do all calculations in pointer_mode. */ |
481683e1 | 4848 | if (CONST_INT_P (copy_size_rtx)) |
e87b4f3f | 4849 | { |
c24ae149 RK |
4850 | size = plus_constant (size, -INTVAL (copy_size_rtx)); |
4851 | target = adjust_address (target, BLKmode, | |
4852 | INTVAL (copy_size_rtx)); | |
e87b4f3f RS |
4853 | } |
4854 | else | |
4855 | { | |
fa06ab5c | 4856 | size = expand_binop (TYPE_MODE (sizetype), sub_optab, size, |
906c4e36 RK |
4857 | copy_size_rtx, NULL_RTX, 0, |
4858 | OPTAB_LIB_WIDEN); | |
e87b4f3f | 4859 | |
d4ebfa65 BE |
4860 | if (GET_MODE (copy_size_rtx) != address_mode) |
4861 | copy_size_rtx = convert_to_mode (address_mode, | |
4862 | copy_size_rtx, | |
8df83eae | 4863 | TYPE_UNSIGNED (sizetype)); |
c24ae149 RK |
4864 | |
4865 | target = offset_address (target, copy_size_rtx, | |
4866 | highest_pow2_factor (copy_size)); | |
e87b4f3f | 4867 | label = gen_label_rtx (); |
c5d5d461 | 4868 | emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX, |
a06ef755 | 4869 | GET_MODE (size), 0, label); |
e87b4f3f RS |
4870 | } |
4871 | ||
4872 | if (size != const0_rtx) | |
8148fe65 | 4873 | clear_storage (target, size, BLOCK_OP_NORMAL); |
22619c3f | 4874 | |
e87b4f3f RS |
4875 | if (label) |
4876 | emit_label (label); | |
bbf6f052 RK |
4877 | } |
4878 | } | |
fffa9c1d JW |
4879 | /* Handle calls that return values in multiple non-contiguous locations. |
4880 | The Irix 6 ABI has examples of this. */ | |
4881 | else if (GET_CODE (target) == PARALLEL) | |
6e985040 AM |
4882 | emit_group_load (target, temp, TREE_TYPE (exp), |
4883 | int_size_in_bytes (TREE_TYPE (exp))); | |
bbf6f052 | 4884 | else if (GET_MODE (temp) == BLKmode) |
8403445a | 4885 | emit_block_move (target, temp, expr_size (exp), |
6f4fd16d | 4886 | (call_param_p |
8403445a | 4887 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); |
79f5e442 ZD |
4888 | else if (nontemporal |
4889 | && emit_storent_insn (target, temp)) | |
4890 | /* If we managed to emit a nontemporal store, there is nothing else to | |
4891 | do. */ | |
4892 | ; | |
bbf6f052 | 4893 | else |
b0dccb00 RH |
4894 | { |
4895 | temp = force_operand (temp, target); | |
4896 | if (temp != target) | |
4897 | emit_move_insn (target, temp); | |
4898 | } | |
bbf6f052 | 4899 | } |
709f5be1 | 4900 | |
436d948e | 4901 | return NULL_RTX; |
bbf6f052 RK |
4902 | } |
4903 | \f | |
fe24d485 | 4904 | /* Helper for categorize_ctor_elements. Identical interface. */ |
9de08200 | 4905 | |
fe24d485 | 4906 | static bool |
fa233e34 | 4907 | categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts, |
6f642f98 RH |
4908 | HOST_WIDE_INT *p_elt_count, |
4909 | bool *p_must_clear) | |
9de08200 | 4910 | { |
4038c495 | 4911 | unsigned HOST_WIDE_INT idx; |
fe24d485 | 4912 | HOST_WIDE_INT nz_elts, elt_count; |
4038c495 | 4913 | tree value, purpose; |
9de08200 | 4914 | |
fe24d485 OH |
4915 | /* Whether CTOR is a valid constant initializer, in accordance with what |
4916 | initializer_constant_valid_p does. If inferred from the constructor | |
4917 | elements, true until proven otherwise. */ | |
4918 | bool const_from_elts_p = constructor_static_from_elts_p (ctor); | |
4919 | bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor); | |
4920 | ||
6de9cd9a | 4921 | nz_elts = 0; |
6fa91b48 | 4922 | elt_count = 0; |
caf93cb0 | 4923 | |
4038c495 | 4924 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value) |
9de08200 | 4925 | { |
675c873b | 4926 | HOST_WIDE_INT mult = 1; |
9de08200 | 4927 | |
6de9cd9a DN |
4928 | if (TREE_CODE (purpose) == RANGE_EXPR) |
4929 | { | |
4930 | tree lo_index = TREE_OPERAND (purpose, 0); | |
4931 | tree hi_index = TREE_OPERAND (purpose, 1); | |
9de08200 | 4932 | |
6de9cd9a DN |
4933 | if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1)) |
4934 | mult = (tree_low_cst (hi_index, 1) | |
4935 | - tree_low_cst (lo_index, 1) + 1); | |
4936 | } | |
9de08200 | 4937 | |
6de9cd9a DN |
4938 | switch (TREE_CODE (value)) |
4939 | { | |
4940 | case CONSTRUCTOR: | |
4941 | { | |
fe24d485 | 4942 | HOST_WIDE_INT nz = 0, ic = 0; |
ed1223ba | 4943 | |
fe24d485 OH |
4944 | bool const_elt_p |
4945 | = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear); | |
4946 | ||
6de9cd9a | 4947 | nz_elts += mult * nz; |
fe24d485 OH |
4948 | elt_count += mult * ic; |
4949 | ||
4950 | if (const_from_elts_p && const_p) | |
4951 | const_p = const_elt_p; | |
6de9cd9a DN |
4952 | } |
4953 | break; | |
9de08200 | 4954 | |
6de9cd9a DN |
4955 | case INTEGER_CST: |
4956 | case REAL_CST: | |
0f996086 | 4957 | case FIXED_CST: |
6de9cd9a DN |
4958 | if (!initializer_zerop (value)) |
4959 | nz_elts += mult; | |
6fa91b48 | 4960 | elt_count += mult; |
6de9cd9a | 4961 | break; |
97f8d136 RK |
4962 | |
4963 | case STRING_CST: | |
4964 | nz_elts += mult * TREE_STRING_LENGTH (value); | |
6fa91b48 | 4965 | elt_count += mult * TREE_STRING_LENGTH (value); |
97f8d136 RK |
4966 | break; |
4967 | ||
6de9cd9a DN |
4968 | case COMPLEX_CST: |
4969 | if (!initializer_zerop (TREE_REALPART (value))) | |
4970 | nz_elts += mult; | |
4971 | if (!initializer_zerop (TREE_IMAGPART (value))) | |
4972 | nz_elts += mult; | |
6fa91b48 | 4973 | elt_count += mult; |
6de9cd9a | 4974 | break; |
97f8d136 | 4975 | |
6de9cd9a DN |
4976 | case VECTOR_CST: |
4977 | { | |
4978 | tree v; | |
4979 | for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v)) | |
6fa91b48 SB |
4980 | { |
4981 | if (!initializer_zerop (TREE_VALUE (v))) | |
4982 | nz_elts += mult; | |
4983 | elt_count += mult; | |
4984 | } | |
6de9cd9a DN |
4985 | } |
4986 | break; | |
69ef87e2 | 4987 | |
6de9cd9a | 4988 | default: |
675c873b EB |
4989 | { |
4990 | HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true); | |
4991 | if (tc < 1) | |
4992 | tc = 1; | |
4993 | nz_elts += mult * tc; | |
4994 | elt_count += mult * tc; | |
fe24d485 | 4995 | |
675c873b EB |
4996 | if (const_from_elts_p && const_p) |
4997 | const_p = initializer_constant_valid_p (value, TREE_TYPE (value)) | |
4998 | != NULL_TREE; | |
4999 | } | |
6de9cd9a DN |
5000 | break; |
5001 | } | |
5002 | } | |
69ef87e2 | 5003 | |
6f642f98 RH |
5004 | if (!*p_must_clear |
5005 | && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE | |
5006 | || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE)) | |
5007 | { | |
5008 | tree init_sub_type; | |
486e4326 | 5009 | bool clear_this = true; |
6f642f98 | 5010 | |
4038c495 | 5011 | if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor))) |
6f642f98 | 5012 | { |
486e4326 RH |
5013 | /* We don't expect more than one element of the union to be |
5014 | initialized. Not sure what we should do otherwise... */ | |
4038c495 GB |
5015 | gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor)) |
5016 | == 1); | |
486e4326 | 5017 | |
4038c495 GB |
5018 | init_sub_type = TREE_TYPE (VEC_index (constructor_elt, |
5019 | CONSTRUCTOR_ELTS (ctor), | |
5020 | 0)->value); | |
486e4326 RH |
5021 | |
5022 | /* ??? We could look at each element of the union, and find the | |
5023 | largest element. Which would avoid comparing the size of the | |
5024 | initialized element against any tail padding in the union. | |
5025 | Doesn't seem worth the effort... */ | |
ed1223ba | 5026 | if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)), |
486e4326 RH |
5027 | TYPE_SIZE (init_sub_type)) == 1) |
5028 | { | |
5029 | /* And now we have to find out if the element itself is fully | |
5030 | constructed. E.g. for union { struct { int a, b; } s; } u | |
5031 | = { .s = { .a = 1 } }. */ | |
73ed17ff | 5032 | if (elt_count == count_type_elements (init_sub_type, false)) |
486e4326 RH |
5033 | clear_this = false; |
5034 | } | |
6f642f98 | 5035 | } |
486e4326 RH |
5036 | |
5037 | *p_must_clear = clear_this; | |
6f642f98 RH |
5038 | } |
5039 | ||
6de9cd9a | 5040 | *p_nz_elts += nz_elts; |
6fa91b48 | 5041 | *p_elt_count += elt_count; |
fe24d485 OH |
5042 | |
5043 | return const_p; | |
6de9cd9a DN |
5044 | } |
5045 | ||
fe24d485 OH |
5046 | /* Examine CTOR to discover: |
5047 | * how many scalar fields are set to nonzero values, | |
5048 | and place it in *P_NZ_ELTS; | |
5049 | * how many scalar fields in total are in CTOR, | |
5050 | and place it in *P_ELT_COUNT. | |
5051 | * if a type is a union, and the initializer from the constructor | |
5052 | is not the largest element in the union, then set *p_must_clear. | |
5053 | ||
5054 | Return whether or not CTOR is a valid static constant initializer, the same | |
5055 | as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */ | |
5056 | ||
5057 | bool | |
fa233e34 | 5058 | categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts, |
6f642f98 RH |
5059 | HOST_WIDE_INT *p_elt_count, |
5060 | bool *p_must_clear) | |
6de9cd9a DN |
5061 | { |
5062 | *p_nz_elts = 0; | |
6fa91b48 | 5063 | *p_elt_count = 0; |
6f642f98 | 5064 | *p_must_clear = false; |
fe24d485 OH |
5065 | |
5066 | return | |
5067 | categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear); | |
6de9cd9a DN |
5068 | } |
5069 | ||
5070 | /* Count the number of scalars in TYPE. Return -1 on overflow or | |
73ed17ff JJ |
5071 | variable-sized. If ALLOW_FLEXARR is true, don't count flexible |
5072 | array member at the end of the structure. */ | |
6de9cd9a DN |
5073 | |
5074 | HOST_WIDE_INT | |
fa233e34 | 5075 | count_type_elements (const_tree type, bool allow_flexarr) |
6de9cd9a DN |
5076 | { |
5077 | const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1)); | |
5078 | switch (TREE_CODE (type)) | |
5079 | { | |
5080 | case ARRAY_TYPE: | |
5081 | { | |
5082 | tree telts = array_type_nelts (type); | |
5083 | if (telts && host_integerp (telts, 1)) | |
5084 | { | |
5377d5ba | 5085 | HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1; |
73ed17ff | 5086 | HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false); |
6de9cd9a DN |
5087 | if (n == 0) |
5088 | return 0; | |
5377d5ba | 5089 | else if (max / n > m) |
6de9cd9a DN |
5090 | return n * m; |
5091 | } | |
5092 | return -1; | |
5093 | } | |
5094 | ||
5095 | case RECORD_TYPE: | |
5096 | { | |
5097 | HOST_WIDE_INT n = 0, t; | |
5098 | tree f; | |
5099 | ||
5100 | for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f)) | |
5101 | if (TREE_CODE (f) == FIELD_DECL) | |
5102 | { | |
73ed17ff | 5103 | t = count_type_elements (TREE_TYPE (f), false); |
6de9cd9a | 5104 | if (t < 0) |
73ed17ff JJ |
5105 | { |
5106 | /* Check for structures with flexible array member. */ | |
5107 | tree tf = TREE_TYPE (f); | |
5108 | if (allow_flexarr | |
5109 | && TREE_CHAIN (f) == NULL | |
5110 | && TREE_CODE (tf) == ARRAY_TYPE | |
5111 | && TYPE_DOMAIN (tf) | |
5112 | && TYPE_MIN_VALUE (TYPE_DOMAIN (tf)) | |
5113 | && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf))) | |
5114 | && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf)) | |
5115 | && int_size_in_bytes (type) >= 0) | |
5116 | break; | |
5117 | ||
5118 | return -1; | |
5119 | } | |
6de9cd9a DN |
5120 | n += t; |
5121 | } | |
5122 | ||
5123 | return n; | |
5124 | } | |
9de08200 | 5125 | |
6de9cd9a DN |
5126 | case UNION_TYPE: |
5127 | case QUAL_UNION_TYPE: | |
22199fd2 | 5128 | return -1; |
6de9cd9a DN |
5129 | |
5130 | case COMPLEX_TYPE: | |
5131 | return 2; | |
5132 | ||
5133 | case VECTOR_TYPE: | |
3a021db2 | 5134 | return TYPE_VECTOR_SUBPARTS (type); |
6de9cd9a DN |
5135 | |
5136 | case INTEGER_TYPE: | |
5137 | case REAL_TYPE: | |
0f996086 | 5138 | case FIXED_POINT_TYPE: |
6de9cd9a DN |
5139 | case ENUMERAL_TYPE: |
5140 | case BOOLEAN_TYPE: | |
6de9cd9a DN |
5141 | case POINTER_TYPE: |
5142 | case OFFSET_TYPE: | |
5143 | case REFERENCE_TYPE: | |
9de08200 | 5144 | return 1; |
3a94c984 | 5145 | |
16d5ffec SM |
5146 | case ERROR_MARK: |
5147 | return 0; | |
5148 | ||
6de9cd9a DN |
5149 | case VOID_TYPE: |
5150 | case METHOD_TYPE: | |
6de9cd9a DN |
5151 | case FUNCTION_TYPE: |
5152 | case LANG_TYPE: | |
e9a25f70 | 5153 | default: |
5b0264cb | 5154 | gcc_unreachable (); |
9de08200 | 5155 | } |
9de08200 RK |
5156 | } |
5157 | ||
5158 | /* Return 1 if EXP contains mostly (3/4) zeros. */ | |
5159 | ||
e0ce7708 | 5160 | static int |
22ea9ec0 | 5161 | mostly_zeros_p (const_tree exp) |
9de08200 | 5162 | { |
9de08200 | 5163 | if (TREE_CODE (exp) == CONSTRUCTOR) |
caf93cb0 | 5164 | |
9de08200 | 5165 | { |
fe24d485 | 5166 | HOST_WIDE_INT nz_elts, count, elts; |
6f642f98 RH |
5167 | bool must_clear; |
5168 | ||
fe24d485 | 5169 | categorize_ctor_elements (exp, &nz_elts, &count, &must_clear); |
6f642f98 RH |
5170 | if (must_clear) |
5171 | return 1; | |
6de9cd9a | 5172 | |
73ed17ff | 5173 | elts = count_type_elements (TREE_TYPE (exp), false); |
9de08200 | 5174 | |
6de9cd9a | 5175 | return nz_elts < elts / 4; |
9de08200 RK |
5176 | } |
5177 | ||
6de9cd9a | 5178 | return initializer_zerop (exp); |
9de08200 | 5179 | } |
c5250139 RG |
5180 | |
5181 | /* Return 1 if EXP contains all zeros. */ | |
5182 | ||
5183 | static int | |
22ea9ec0 | 5184 | all_zeros_p (const_tree exp) |
c5250139 RG |
5185 | { |
5186 | if (TREE_CODE (exp) == CONSTRUCTOR) | |
5187 | ||
5188 | { | |
fe24d485 | 5189 | HOST_WIDE_INT nz_elts, count; |
c5250139 RG |
5190 | bool must_clear; |
5191 | ||
fe24d485 | 5192 | categorize_ctor_elements (exp, &nz_elts, &count, &must_clear); |
c5250139 RG |
5193 | return nz_elts == 0; |
5194 | } | |
5195 | ||
5196 | return initializer_zerop (exp); | |
5197 | } | |
9de08200 | 5198 | \f |
e1a43f73 PB |
5199 | /* Helper function for store_constructor. |
5200 | TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field. | |
5201 | TYPE is the type of the CONSTRUCTOR, not the element type. | |
04050c69 | 5202 | CLEARED is as for store_constructor. |
23cb1766 | 5203 | ALIAS_SET is the alias set to use for any stores. |
23ccec44 JW |
5204 | |
5205 | This provides a recursive shortcut back to store_constructor when it isn't | |
5206 | necessary to go through store_field. This is so that we can pass through | |
5207 | the cleared field to let store_constructor know that we may not have to | |
5208 | clear a substructure if the outer structure has already been cleared. */ | |
e1a43f73 PB |
5209 | |
5210 | static void | |
502b8322 AJ |
5211 | store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize, |
5212 | HOST_WIDE_INT bitpos, enum machine_mode mode, | |
4862826d ILT |
5213 | tree exp, tree type, int cleared, |
5214 | alias_set_type alias_set) | |
e1a43f73 PB |
5215 | { |
5216 | if (TREE_CODE (exp) == CONSTRUCTOR | |
6c89c39a RK |
5217 | /* We can only call store_constructor recursively if the size and |
5218 | bit position are on a byte boundary. */ | |
23ccec44 | 5219 | && bitpos % BITS_PER_UNIT == 0 |
6c89c39a | 5220 | && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0) |
cc2902df | 5221 | /* If we have a nonzero bitpos for a register target, then we just |
23ccec44 JW |
5222 | let store_field do the bitfield handling. This is unlikely to |
5223 | generate unnecessary clear instructions anyways. */ | |
3c0cb5de | 5224 | && (bitpos == 0 || MEM_P (target))) |
e1a43f73 | 5225 | { |
3c0cb5de | 5226 | if (MEM_P (target)) |
61cb205c RK |
5227 | target |
5228 | = adjust_address (target, | |
5229 | GET_MODE (target) == BLKmode | |
5230 | || 0 != (bitpos | |
5231 | % GET_MODE_ALIGNMENT (GET_MODE (target))) | |
5232 | ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT); | |
23cb1766 | 5233 | |
e0339ef7 | 5234 | |
04050c69 | 5235 | /* Update the alias set, if required. */ |
3c0cb5de | 5236 | if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target) |
10b76d73 | 5237 | && MEM_ALIAS_SET (target) != 0) |
70072ed9 RK |
5238 | { |
5239 | target = copy_rtx (target); | |
5240 | set_mem_alias_set (target, alias_set); | |
5241 | } | |
e0339ef7 | 5242 | |
dbb5c281 | 5243 | store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT); |
e1a43f73 PB |
5244 | } |
5245 | else | |
79f5e442 | 5246 | store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false); |
e1a43f73 PB |
5247 | } |
5248 | ||
bbf6f052 | 5249 | /* Store the value of constructor EXP into the rtx TARGET. |
04050c69 RK |
5250 | TARGET is either a REG or a MEM; we know it cannot conflict, since |
5251 | safe_from_p has been called. | |
dbb5c281 RK |
5252 | CLEARED is true if TARGET is known to have been zero'd. |
5253 | SIZE is the number of bytes of TARGET we are allowed to modify: this | |
b7010412 RK |
5254 | may not be the same as the size of EXP if we are assigning to a field |
5255 | which has been packed to exclude padding bits. */ | |
bbf6f052 RK |
5256 | |
5257 | static void | |
502b8322 | 5258 | store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) |
bbf6f052 | 5259 | { |
4af3895e | 5260 | tree type = TREE_TYPE (exp); |
a5efcd63 | 5261 | #ifdef WORD_REGISTER_OPERATIONS |
13eb1f7f | 5262 | HOST_WIDE_INT exp_size = int_size_in_bytes (type); |
a5efcd63 | 5263 | #endif |
4af3895e | 5264 | |
5b0264cb | 5265 | switch (TREE_CODE (type)) |
bbf6f052 | 5266 | { |
5b0264cb NS |
5267 | case RECORD_TYPE: |
5268 | case UNION_TYPE: | |
5269 | case QUAL_UNION_TYPE: | |
5270 | { | |
4038c495 GB |
5271 | unsigned HOST_WIDE_INT idx; |
5272 | tree field, value; | |
9de08200 | 5273 | |
5b0264cb NS |
5274 | /* If size is zero or the target is already cleared, do nothing. */ |
5275 | if (size == 0 || cleared) | |
9de08200 | 5276 | cleared = 1; |
5b0264cb NS |
5277 | /* We either clear the aggregate or indicate the value is dead. */ |
5278 | else if ((TREE_CODE (type) == UNION_TYPE | |
5279 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
5280 | && ! CONSTRUCTOR_ELTS (exp)) | |
5281 | /* If the constructor is empty, clear the union. */ | |
5282 | { | |
8148fe65 | 5283 | clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL); |
5b0264cb NS |
5284 | cleared = 1; |
5285 | } | |
bbf6f052 | 5286 | |
5b0264cb NS |
5287 | /* If we are building a static constructor into a register, |
5288 | set the initial value as zero so we can fold the value into | |
5289 | a constant. But if more than one register is involved, | |
5290 | this probably loses. */ | |
5291 | else if (REG_P (target) && TREE_STATIC (exp) | |
5292 | && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) | |
5293 | { | |
5294 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | |
5295 | cleared = 1; | |
5296 | } | |
3a94c984 | 5297 | |
5b0264cb NS |
5298 | /* If the constructor has fewer fields than the structure or |
5299 | if we are initializing the structure to mostly zeros, clear | |
5300 | the whole structure first. Don't do this if TARGET is a | |
5301 | register whose mode size isn't equal to SIZE since | |
5302 | clear_storage can't handle this case. */ | |
5303 | else if (size > 0 | |
4038c495 | 5304 | && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp)) |
5b0264cb NS |
5305 | != fields_length (type)) |
5306 | || mostly_zeros_p (exp)) | |
5307 | && (!REG_P (target) | |
5308 | || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) | |
5309 | == size))) | |
5310 | { | |
8148fe65 | 5311 | clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); |
5b0264cb NS |
5312 | cleared = 1; |
5313 | } | |
b50d17a1 | 5314 | |
58f7fcc3 | 5315 | if (REG_P (target) && !cleared) |
c41c1387 | 5316 | emit_clobber (target); |
bbf6f052 | 5317 | |
5b0264cb NS |
5318 | /* Store each element of the constructor into the |
5319 | corresponding field of TARGET. */ | |
4038c495 | 5320 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value) |
5b0264cb | 5321 | { |
5b0264cb NS |
5322 | enum machine_mode mode; |
5323 | HOST_WIDE_INT bitsize; | |
5324 | HOST_WIDE_INT bitpos = 0; | |
5325 | tree offset; | |
5326 | rtx to_rtx = target; | |
ed1223ba | 5327 | |
5b0264cb NS |
5328 | /* Just ignore missing fields. We cleared the whole |
5329 | structure, above, if any fields are missing. */ | |
5330 | if (field == 0) | |
5331 | continue; | |
ed1223ba | 5332 | |
5b0264cb NS |
5333 | if (cleared && initializer_zerop (value)) |
5334 | continue; | |
ed1223ba | 5335 | |
5b0264cb NS |
5336 | if (host_integerp (DECL_SIZE (field), 1)) |
5337 | bitsize = tree_low_cst (DECL_SIZE (field), 1); | |
5338 | else | |
5339 | bitsize = -1; | |
ed1223ba | 5340 | |
5b0264cb NS |
5341 | mode = DECL_MODE (field); |
5342 | if (DECL_BIT_FIELD (field)) | |
5343 | mode = VOIDmode; | |
ed1223ba | 5344 | |
5b0264cb NS |
5345 | offset = DECL_FIELD_OFFSET (field); |
5346 | if (host_integerp (offset, 0) | |
5347 | && host_integerp (bit_position (field), 0)) | |
5348 | { | |
5349 | bitpos = int_bit_position (field); | |
5350 | offset = 0; | |
5351 | } | |
5352 | else | |
5353 | bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); | |
ed1223ba | 5354 | |
5b0264cb NS |
5355 | if (offset) |
5356 | { | |
d4ebfa65 | 5357 | enum machine_mode address_mode; |
5b0264cb | 5358 | rtx offset_rtx; |
ed1223ba | 5359 | |
5b0264cb NS |
5360 | offset |
5361 | = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset, | |
5362 | make_tree (TREE_TYPE (exp), | |
5363 | target)); | |
5364 | ||
84217346 | 5365 | offset_rtx = expand_normal (offset); |
5b0264cb | 5366 | gcc_assert (MEM_P (to_rtx)); |
ed1223ba | 5367 | |
d4ebfa65 BE |
5368 | address_mode |
5369 | = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx)); | |
5370 | if (GET_MODE (offset_rtx) != address_mode) | |
5371 | offset_rtx = convert_to_mode (address_mode, offset_rtx, 0); | |
bd070e1a | 5372 | |
5b0264cb NS |
5373 | to_rtx = offset_address (to_rtx, offset_rtx, |
5374 | highest_pow2_factor (offset)); | |
5375 | } | |
c5c76735 | 5376 | |
34c73909 | 5377 | #ifdef WORD_REGISTER_OPERATIONS |
5b0264cb NS |
5378 | /* If this initializes a field that is smaller than a |
5379 | word, at the start of a word, try to widen it to a full | |
5380 | word. This special case allows us to output C++ member | |
5381 | function initializations in a form that the optimizers | |
5382 | can understand. */ | |
5383 | if (REG_P (target) | |
5384 | && bitsize < BITS_PER_WORD | |
5385 | && bitpos % BITS_PER_WORD == 0 | |
5386 | && GET_MODE_CLASS (mode) == MODE_INT | |
5387 | && TREE_CODE (value) == INTEGER_CST | |
5388 | && exp_size >= 0 | |
5389 | && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) | |
5390 | { | |
5391 | tree type = TREE_TYPE (value); | |
ed1223ba | 5392 | |
5b0264cb NS |
5393 | if (TYPE_PRECISION (type) < BITS_PER_WORD) |
5394 | { | |
5395 | type = lang_hooks.types.type_for_size | |
5396 | (BITS_PER_WORD, TYPE_UNSIGNED (type)); | |
3967bc2d | 5397 | value = fold_convert (type, value); |
5b0264cb | 5398 | } |
ed1223ba | 5399 | |
5b0264cb NS |
5400 | if (BYTES_BIG_ENDIAN) |
5401 | value | |
4845b383 | 5402 | = fold_build2 (LSHIFT_EXPR, type, value, |
3967bc2d | 5403 | build_int_cst (type, |
4845b383 | 5404 | BITS_PER_WORD - bitsize)); |
5b0264cb NS |
5405 | bitsize = BITS_PER_WORD; |
5406 | mode = word_mode; | |
5407 | } | |
34c73909 | 5408 | #endif |
10b76d73 | 5409 | |
5b0264cb NS |
5410 | if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx) |
5411 | && DECL_NONADDRESSABLE_P (field)) | |
5412 | { | |
5413 | to_rtx = copy_rtx (to_rtx); | |
5414 | MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; | |
5415 | } | |
ed1223ba | 5416 | |
5b0264cb NS |
5417 | store_constructor_field (to_rtx, bitsize, bitpos, mode, |
5418 | value, type, cleared, | |
5419 | get_alias_set (TREE_TYPE (field))); | |
5420 | } | |
5421 | break; | |
5422 | } | |
5423 | case ARRAY_TYPE: | |
5424 | { | |
4038c495 GB |
5425 | tree value, index; |
5426 | unsigned HOST_WIDE_INT i; | |
5b0264cb NS |
5427 | int need_to_clear; |
5428 | tree domain; | |
5429 | tree elttype = TREE_TYPE (type); | |
5430 | int const_bounds_p; | |
5431 | HOST_WIDE_INT minelt = 0; | |
5432 | HOST_WIDE_INT maxelt = 0; | |
5433 | ||
5434 | domain = TYPE_DOMAIN (type); | |
5435 | const_bounds_p = (TYPE_MIN_VALUE (domain) | |
5436 | && TYPE_MAX_VALUE (domain) | |
5437 | && host_integerp (TYPE_MIN_VALUE (domain), 0) | |
5438 | && host_integerp (TYPE_MAX_VALUE (domain), 0)); | |
5439 | ||
5440 | /* If we have constant bounds for the range of the type, get them. */ | |
5441 | if (const_bounds_p) | |
5442 | { | |
5443 | minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); | |
5444 | maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); | |
5445 | } | |
3a021db2 | 5446 | |
5b0264cb NS |
5447 | /* If the constructor has fewer elements than the array, clear |
5448 | the whole array first. Similarly if this is static | |
5449 | constructor of a non-BLKmode object. */ | |
5450 | if (cleared) | |
5451 | need_to_clear = 0; | |
5452 | else if (REG_P (target) && TREE_STATIC (exp)) | |
5453 | need_to_clear = 1; | |
5454 | else | |
5455 | { | |
4038c495 GB |
5456 | unsigned HOST_WIDE_INT idx; |
5457 | tree index, value; | |
5b0264cb NS |
5458 | HOST_WIDE_INT count = 0, zero_count = 0; |
5459 | need_to_clear = ! const_bounds_p; | |
ed1223ba | 5460 | |
5b0264cb NS |
5461 | /* This loop is a more accurate version of the loop in |
5462 | mostly_zeros_p (it handles RANGE_EXPR in an index). It | |
5463 | is also needed to check for missing elements. */ | |
4038c495 | 5464 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value) |
5b0264cb | 5465 | { |
5b0264cb | 5466 | HOST_WIDE_INT this_node_count; |
4038c495 GB |
5467 | |
5468 | if (need_to_clear) | |
5469 | break; | |
ed1223ba | 5470 | |
5b0264cb NS |
5471 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
5472 | { | |
5473 | tree lo_index = TREE_OPERAND (index, 0); | |
5474 | tree hi_index = TREE_OPERAND (index, 1); | |
ed1223ba | 5475 | |
5b0264cb NS |
5476 | if (! host_integerp (lo_index, 1) |
5477 | || ! host_integerp (hi_index, 1)) | |
5478 | { | |
5479 | need_to_clear = 1; | |
5480 | break; | |
5481 | } | |
ed1223ba | 5482 | |
5b0264cb NS |
5483 | this_node_count = (tree_low_cst (hi_index, 1) |
5484 | - tree_low_cst (lo_index, 1) + 1); | |
5485 | } | |
5486 | else | |
5487 | this_node_count = 1; | |
ed1223ba | 5488 | |
5b0264cb | 5489 | count += this_node_count; |
4038c495 | 5490 | if (mostly_zeros_p (value)) |
5b0264cb NS |
5491 | zero_count += this_node_count; |
5492 | } | |
ed1223ba | 5493 | |
5b0264cb NS |
5494 | /* Clear the entire array first if there are any missing |
5495 | elements, or if the incidence of zero elements is >= | |
5496 | 75%. */ | |
5497 | if (! need_to_clear | |
5498 | && (count < maxelt - minelt + 1 | |
5499 | || 4 * zero_count >= 3 * count)) | |
5500 | need_to_clear = 1; | |
5501 | } | |
ed1223ba | 5502 | |
5b0264cb NS |
5503 | if (need_to_clear && size > 0) |
5504 | { | |
5505 | if (REG_P (target)) | |
5506 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); | |
5507 | else | |
8148fe65 | 5508 | clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); |
5b0264cb NS |
5509 | cleared = 1; |
5510 | } | |
3a021db2 | 5511 | |
5b0264cb NS |
5512 | if (!cleared && REG_P (target)) |
5513 | /* Inform later passes that the old value is dead. */ | |
c41c1387 | 5514 | emit_clobber (target); |
3a021db2 | 5515 | |
5b0264cb NS |
5516 | /* Store each element of the constructor into the |
5517 | corresponding element of TARGET, determined by counting the | |
5518 | elements. */ | |
4038c495 | 5519 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value) |
5b0264cb NS |
5520 | { |
5521 | enum machine_mode mode; | |
5522 | HOST_WIDE_INT bitsize; | |
5523 | HOST_WIDE_INT bitpos; | |
5b0264cb | 5524 | rtx xtarget = target; |
ed1223ba | 5525 | |
5b0264cb NS |
5526 | if (cleared && initializer_zerop (value)) |
5527 | continue; | |
ed1223ba | 5528 | |
5b0264cb NS |
5529 | mode = TYPE_MODE (elttype); |
5530 | if (mode == BLKmode) | |
5531 | bitsize = (host_integerp (TYPE_SIZE (elttype), 1) | |
5532 | ? tree_low_cst (TYPE_SIZE (elttype), 1) | |
5533 | : -1); | |
5534 | else | |
5535 | bitsize = GET_MODE_BITSIZE (mode); | |
ed1223ba | 5536 | |
5b0264cb NS |
5537 | if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) |
5538 | { | |
5539 | tree lo_index = TREE_OPERAND (index, 0); | |
5540 | tree hi_index = TREE_OPERAND (index, 1); | |
5541 | rtx index_r, pos_rtx; | |
5542 | HOST_WIDE_INT lo, hi, count; | |
5543 | tree position; | |
ed1223ba | 5544 | |
5b0264cb NS |
5545 | /* If the range is constant and "small", unroll the loop. */ |
5546 | if (const_bounds_p | |
5547 | && host_integerp (lo_index, 0) | |
5548 | && host_integerp (hi_index, 0) | |
5549 | && (lo = tree_low_cst (lo_index, 0), | |
5550 | hi = tree_low_cst (hi_index, 0), | |
5551 | count = hi - lo + 1, | |
5552 | (!MEM_P (target) | |
5553 | || count <= 2 | |
5554 | || (host_integerp (TYPE_SIZE (elttype), 1) | |
5555 | && (tree_low_cst (TYPE_SIZE (elttype), 1) * count | |
5556 | <= 40 * 8))))) | |
5557 | { | |
5558 | lo -= minelt; hi -= minelt; | |
5559 | for (; lo <= hi; lo++) | |
5560 | { | |
5561 | bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); | |
ed1223ba | 5562 | |
5b0264cb NS |
5563 | if (MEM_P (target) |
5564 | && !MEM_KEEP_ALIAS_SET_P (target) | |
5565 | && TREE_CODE (type) == ARRAY_TYPE | |
5566 | && TYPE_NONALIASED_COMPONENT (type)) | |
5567 | { | |
5568 | target = copy_rtx (target); | |
5569 | MEM_KEEP_ALIAS_SET_P (target) = 1; | |
5570 | } | |
ed1223ba | 5571 | |
5b0264cb NS |
5572 | store_constructor_field |
5573 | (target, bitsize, bitpos, mode, value, type, cleared, | |
5574 | get_alias_set (elttype)); | |
5575 | } | |
5576 | } | |
5577 | else | |
5578 | { | |
5579 | rtx loop_start = gen_label_rtx (); | |
5580 | rtx loop_end = gen_label_rtx (); | |
5581 | tree exit_cond; | |
ed1223ba | 5582 | |
84217346 | 5583 | expand_normal (hi_index); |
ed1223ba | 5584 | |
c2255bc4 AH |
5585 | index = build_decl (EXPR_LOCATION (exp), |
5586 | VAR_DECL, NULL_TREE, domain); | |
cde0f3fd | 5587 | index_r = gen_reg_rtx (promote_decl_mode (index, NULL)); |
5b0264cb | 5588 | SET_DECL_RTL (index, index_r); |
79f5e442 | 5589 | store_expr (lo_index, index_r, 0, false); |
ed1223ba | 5590 | |
5b0264cb NS |
5591 | /* Build the head of the loop. */ |
5592 | do_pending_stack_adjust (); | |
5593 | emit_label (loop_start); | |
5594 | ||
5595 | /* Assign value to element index. */ | |
3967bc2d RS |
5596 | position = |
5597 | fold_convert (ssizetype, | |
5598 | fold_build2 (MINUS_EXPR, | |
5599 | TREE_TYPE (index), | |
5600 | index, | |
5601 | TYPE_MIN_VALUE (domain))); | |
5602 | ||
5603 | position = | |
5604 | size_binop (MULT_EXPR, position, | |
5605 | fold_convert (ssizetype, | |
5606 | TYPE_SIZE_UNIT (elttype))); | |
ed1223ba | 5607 | |
84217346 | 5608 | pos_rtx = expand_normal (position); |
5b0264cb NS |
5609 | xtarget = offset_address (target, pos_rtx, |
5610 | highest_pow2_factor (position)); | |
5611 | xtarget = adjust_address (xtarget, mode, 0); | |
5612 | if (TREE_CODE (value) == CONSTRUCTOR) | |
5613 | store_constructor (value, xtarget, cleared, | |
5614 | bitsize / BITS_PER_UNIT); | |
5615 | else | |
79f5e442 | 5616 | store_expr (value, xtarget, 0, false); |
5b0264cb NS |
5617 | |
5618 | /* Generate a conditional jump to exit the loop. */ | |
5619 | exit_cond = build2 (LT_EXPR, integer_type_node, | |
5620 | index, hi_index); | |
40e90eac | 5621 | jumpif (exit_cond, loop_end, -1); |
ed1223ba | 5622 | |
5b0264cb NS |
5623 | /* Update the loop counter, and jump to the head of |
5624 | the loop. */ | |
5625 | expand_assignment (index, | |
5626 | build2 (PLUS_EXPR, TREE_TYPE (index), | |
79f5e442 ZD |
5627 | index, integer_one_node), |
5628 | false); | |
ed1223ba | 5629 | |
5b0264cb | 5630 | emit_jump (loop_start); |
ed1223ba | 5631 | |
5b0264cb NS |
5632 | /* Build the end of the loop. */ |
5633 | emit_label (loop_end); | |
5634 | } | |
5635 | } | |
5636 | else if ((index != 0 && ! host_integerp (index, 0)) | |
5637 | || ! host_integerp (TYPE_SIZE (elttype), 1)) | |
5638 | { | |
5639 | tree position; | |
ed1223ba | 5640 | |
5b0264cb NS |
5641 | if (index == 0) |
5642 | index = ssize_int (1); | |
ed1223ba | 5643 | |
5b0264cb NS |
5644 | if (minelt) |
5645 | index = fold_convert (ssizetype, | |
4845b383 KH |
5646 | fold_build2 (MINUS_EXPR, |
5647 | TREE_TYPE (index), | |
5648 | index, | |
5649 | TYPE_MIN_VALUE (domain))); | |
ed1223ba | 5650 | |
3967bc2d RS |
5651 | position = |
5652 | size_binop (MULT_EXPR, index, | |
5653 | fold_convert (ssizetype, | |
5654 | TYPE_SIZE_UNIT (elttype))); | |
5b0264cb | 5655 | xtarget = offset_address (target, |
84217346 | 5656 | expand_normal (position), |
5b0264cb NS |
5657 | highest_pow2_factor (position)); |
5658 | xtarget = adjust_address (xtarget, mode, 0); | |
79f5e442 | 5659 | store_expr (value, xtarget, 0, false); |
5b0264cb NS |
5660 | } |
5661 | else | |
5662 | { | |
5663 | if (index != 0) | |
5664 | bitpos = ((tree_low_cst (index, 0) - minelt) | |
5665 | * tree_low_cst (TYPE_SIZE (elttype), 1)); | |
5666 | else | |
5667 | bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); | |
ed1223ba | 5668 | |
5b0264cb NS |
5669 | if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target) |
5670 | && TREE_CODE (type) == ARRAY_TYPE | |
5671 | && TYPE_NONALIASED_COMPONENT (type)) | |
5672 | { | |
5673 | target = copy_rtx (target); | |
5674 | MEM_KEEP_ALIAS_SET_P (target) = 1; | |
5675 | } | |
5676 | store_constructor_field (target, bitsize, bitpos, mode, value, | |
5677 | type, cleared, get_alias_set (elttype)); | |
5678 | } | |
5679 | } | |
5680 | break; | |
5681 | } | |
3a021db2 | 5682 | |
5b0264cb NS |
5683 | case VECTOR_TYPE: |
5684 | { | |
4038c495 GB |
5685 | unsigned HOST_WIDE_INT idx; |
5686 | constructor_elt *ce; | |
5b0264cb NS |
5687 | int i; |
5688 | int need_to_clear; | |
5689 | int icode = 0; | |
5690 | tree elttype = TREE_TYPE (type); | |
5691 | int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1); | |
5692 | enum machine_mode eltmode = TYPE_MODE (elttype); | |
5693 | HOST_WIDE_INT bitsize; | |
5694 | HOST_WIDE_INT bitpos; | |
201dd46b | 5695 | rtvec vector = NULL; |
5b0264cb | 5696 | unsigned n_elts; |
723a7ced | 5697 | alias_set_type alias; |
ed1223ba | 5698 | |
5b0264cb | 5699 | gcc_assert (eltmode != BLKmode); |
ed1223ba | 5700 | |
5b0264cb NS |
5701 | n_elts = TYPE_VECTOR_SUBPARTS (type); |
5702 | if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target))) | |
5703 | { | |
5704 | enum machine_mode mode = GET_MODE (target); | |
ed1223ba | 5705 | |
166cdb08 | 5706 | icode = (int) optab_handler (vec_init_optab, mode)->insn_code; |
5b0264cb NS |
5707 | if (icode != CODE_FOR_nothing) |
5708 | { | |
5709 | unsigned int i; | |
ed1223ba | 5710 | |
201dd46b | 5711 | vector = rtvec_alloc (n_elts); |
5b0264cb | 5712 | for (i = 0; i < n_elts; i++) |
201dd46b | 5713 | RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode)); |
5b0264cb NS |
5714 | } |
5715 | } | |
ed1223ba | 5716 | |
5b0264cb NS |
5717 | /* If the constructor has fewer elements than the vector, |
5718 | clear the whole array first. Similarly if this is static | |
5719 | constructor of a non-BLKmode object. */ | |
5720 | if (cleared) | |
5721 | need_to_clear = 0; | |
5722 | else if (REG_P (target) && TREE_STATIC (exp)) | |
5723 | need_to_clear = 1; | |
5724 | else | |
5725 | { | |
5726 | unsigned HOST_WIDE_INT count = 0, zero_count = 0; | |
4038c495 | 5727 | tree value; |
ed1223ba | 5728 | |
4038c495 | 5729 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) |
5b0264cb NS |
5730 | { |
5731 | int n_elts_here = tree_low_cst | |
5732 | (int_const_binop (TRUNC_DIV_EXPR, | |
4038c495 | 5733 | TYPE_SIZE (TREE_TYPE (value)), |
5b0264cb | 5734 | TYPE_SIZE (elttype), 0), 1); |
ed1223ba | 5735 | |
5b0264cb | 5736 | count += n_elts_here; |
4038c495 | 5737 | if (mostly_zeros_p (value)) |
5b0264cb NS |
5738 | zero_count += n_elts_here; |
5739 | } | |
3a021db2 | 5740 | |
5b0264cb NS |
5741 | /* Clear the entire vector first if there are any missing elements, |
5742 | or if the incidence of zero elements is >= 75%. */ | |
5743 | need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count); | |
5744 | } | |
ed1223ba | 5745 | |
5b0264cb NS |
5746 | if (need_to_clear && size > 0 && !vector) |
5747 | { | |
5748 | if (REG_P (target)) | |
723a7ced | 5749 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); |
5b0264cb | 5750 | else |
8148fe65 | 5751 | clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); |
5b0264cb NS |
5752 | cleared = 1; |
5753 | } | |
ed1223ba | 5754 | |
2ab1754e | 5755 | /* Inform later passes that the old value is dead. */ |
cf26aa89 | 5756 | if (!cleared && !vector && REG_P (target)) |
2ab1754e | 5757 | emit_move_insn (target, CONST0_RTX (GET_MODE (target))); |
5b0264cb | 5758 | |
723a7ced JJ |
5759 | if (MEM_P (target)) |
5760 | alias = MEM_ALIAS_SET (target); | |
5761 | else | |
5762 | alias = get_alias_set (elttype); | |
5763 | ||
5b0264cb NS |
5764 | /* Store each element of the constructor into the corresponding |
5765 | element of TARGET, determined by counting the elements. */ | |
4038c495 GB |
5766 | for (idx = 0, i = 0; |
5767 | VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce); | |
5768 | idx++, i += bitsize / elt_size) | |
5b0264cb | 5769 | { |
5b0264cb | 5770 | HOST_WIDE_INT eltpos; |
4038c495 | 5771 | tree value = ce->value; |
ed1223ba | 5772 | |
5b0264cb NS |
5773 | bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1); |
5774 | if (cleared && initializer_zerop (value)) | |
5775 | continue; | |
ed1223ba | 5776 | |
4038c495 GB |
5777 | if (ce->index) |
5778 | eltpos = tree_low_cst (ce->index, 1); | |
5b0264cb NS |
5779 | else |
5780 | eltpos = i; | |
ed1223ba | 5781 | |
5b0264cb NS |
5782 | if (vector) |
5783 | { | |
5784 | /* Vector CONSTRUCTORs should only be built from smaller | |
5785 | vectors in the case of BLKmode vectors. */ | |
5786 | gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE); | |
201dd46b | 5787 | RTVEC_ELT (vector, eltpos) |
84217346 | 5788 | = expand_normal (value); |
5b0264cb NS |
5789 | } |
5790 | else | |
5791 | { | |
5792 | enum machine_mode value_mode = | |
5793 | TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE | |
3a021db2 PB |
5794 | ? TYPE_MODE (TREE_TYPE (value)) |
5795 | : eltmode; | |
5b0264cb NS |
5796 | bitpos = eltpos * elt_size; |
5797 | store_constructor_field (target, bitsize, bitpos, | |
5798 | value_mode, value, type, | |
723a7ced | 5799 | cleared, alias); |
5b0264cb NS |
5800 | } |
5801 | } | |
ed1223ba | 5802 | |
5b0264cb NS |
5803 | if (vector) |
5804 | emit_insn (GEN_FCN (icode) | |
5805 | (target, | |
201dd46b | 5806 | gen_rtx_PARALLEL (GET_MODE (target), vector))); |
5b0264cb NS |
5807 | break; |
5808 | } | |
ed1223ba | 5809 | |
5b0264cb NS |
5810 | default: |
5811 | gcc_unreachable (); | |
071a6595 | 5812 | } |
bbf6f052 RK |
5813 | } |
5814 | ||
5815 | /* Store the value of EXP (an expression tree) | |
5816 | into a subfield of TARGET which has mode MODE and occupies | |
5817 | BITSIZE bits, starting BITPOS bits from the start of TARGET. | |
5818 | If MODE is VOIDmode, it means that we are storing into a bit-field. | |
5819 | ||
f45bdcd0 KH |
5820 | Always return const0_rtx unless we have something particular to |
5821 | return. | |
bbf6f052 | 5822 | |
a06ef755 | 5823 | TYPE is the type of the underlying object, |
ece32014 MM |
5824 | |
5825 | ALIAS_SET is the alias set for the destination. This value will | |
5826 | (in general) be different from that for TARGET, since TARGET is a | |
79f5e442 | 5827 | reference to the containing structure. |
b8698a0f | 5828 | |
79f5e442 | 5829 | If NONTEMPORAL is true, try generating a nontemporal store. */ |
bbf6f052 RK |
5830 | |
5831 | static rtx | |
502b8322 | 5832 | store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, |
4862826d ILT |
5833 | enum machine_mode mode, tree exp, tree type, |
5834 | alias_set_type alias_set, bool nontemporal) | |
bbf6f052 | 5835 | { |
e9a25f70 JL |
5836 | if (TREE_CODE (exp) == ERROR_MARK) |
5837 | return const0_rtx; | |
5838 | ||
2be6a7e9 RK |
5839 | /* If we have nothing to store, do nothing unless the expression has |
5840 | side-effects. */ | |
5841 | if (bitsize == 0) | |
49452c07 | 5842 | return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL); |
bbf6f052 RK |
5843 | |
5844 | /* If we are storing into an unaligned field of an aligned union that is | |
5845 | in a register, we may have the mode of TARGET being an integer mode but | |
5846 | MODE == BLKmode. In that case, get an aligned object whose size and | |
5847 | alignment are the same as TARGET and store TARGET into it (we can avoid | |
5848 | the store if the field being stored is the entire width of TARGET). Then | |
5849 | call ourselves recursively to store the field into a BLKmode version of | |
5850 | that object. Finally, load from the object into TARGET. This is not | |
5851 | very efficient in general, but should only be slightly more expensive | |
5852 | than the otherwise-required unaligned accesses. Perhaps this can be | |
85a43a2f RK |
5853 | cleaned up later. It's tempting to make OBJECT readonly, but it's set |
5854 | twice, once with emit_move_insn and once via store_field. */ | |
bbf6f052 RK |
5855 | |
5856 | if (mode == BLKmode | |
f8cfc6aa | 5857 | && (REG_P (target) || GET_CODE (target) == SUBREG)) |
bbf6f052 | 5858 | { |
85a43a2f | 5859 | rtx object = assign_temp (type, 0, 1, 1); |
c4e59f51 | 5860 | rtx blk_object = adjust_address (object, BLKmode, 0); |
bbf6f052 | 5861 | |
8752c357 | 5862 | if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target))) |
bbf6f052 RK |
5863 | emit_move_insn (object, target); |
5864 | ||
79f5e442 ZD |
5865 | store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set, |
5866 | nontemporal); | |
bbf6f052 RK |
5867 | |
5868 | emit_move_insn (target, object); | |
5869 | ||
a06ef755 | 5870 | /* We want to return the BLKmode version of the data. */ |
46093b97 | 5871 | return blk_object; |
bbf6f052 | 5872 | } |
c3b247b4 JM |
5873 | |
5874 | if (GET_CODE (target) == CONCAT) | |
5875 | { | |
5876 | /* We're storing into a struct containing a single __complex. */ | |
5877 | ||
5b0264cb | 5878 | gcc_assert (!bitpos); |
79f5e442 | 5879 | return store_expr (exp, target, 0, nontemporal); |
c3b247b4 | 5880 | } |
bbf6f052 RK |
5881 | |
5882 | /* If the structure is in a register or if the component | |
5883 | is a bit field, we cannot use addressing to access it. | |
5884 | Use bit-field techniques or SUBREG to store in it. */ | |
5885 | ||
4fa52007 | 5886 | if (mode == VOIDmode |
6ab06cbb JW |
5887 | || (mode != BLKmode && ! direct_store[(int) mode] |
5888 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
5889 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT) | |
f8cfc6aa | 5890 | || REG_P (target) |
c980ac49 | 5891 | || GET_CODE (target) == SUBREG |
ccc98036 RS |
5892 | /* If the field isn't aligned enough to store as an ordinary memref, |
5893 | store it as a bit field. */ | |
15b19a7d | 5894 | || (mode != BLKmode |
9e5f281f OH |
5895 | && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)) |
5896 | || bitpos % GET_MODE_ALIGNMENT (mode)) | |
5897 | && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))) | |
502b8322 | 5898 | || (bitpos % BITS_PER_UNIT != 0))) |
14a774a9 RK |
5899 | /* If the RHS and field are a constant size and the size of the |
5900 | RHS isn't the same size as the bitfield, we must use bitfield | |
5901 | operations. */ | |
05bccae2 RK |
5902 | || (bitsize >= 0 |
5903 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | |
70f34814 RG |
5904 | && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0) |
5905 | /* If we are expanding a MEM_REF of a non-BLKmode non-addressable | |
5906 | decl we must use bitfield operations. */ | |
5907 | || (bitsize >= 0 | |
5908 | && TREE_CODE (exp) == MEM_REF | |
5909 | && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR | |
5910 | && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) | |
5911 | && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 )) | |
5912 | && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode)) | |
bbf6f052 | 5913 | { |
48cc8d3b | 5914 | rtx temp; |
641cac0b | 5915 | gimple nop_def; |
48cc8d3b RH |
5916 | |
5917 | /* If EXP is a NOP_EXPR of precision less than its mode, then that | |
5918 | implies a mask operation. If the precision is the same size as | |
5919 | the field we're storing into, that mask is redundant. This is | |
5920 | particularly common with bit field assignments generated by the | |
5921 | C front end. */ | |
641cac0b AN |
5922 | nop_def = get_def_for_expr (exp, NOP_EXPR); |
5923 | if (nop_def) | |
8d740330 RH |
5924 | { |
5925 | tree type = TREE_TYPE (exp); | |
5926 | if (INTEGRAL_TYPE_P (type) | |
5927 | && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type)) | |
5928 | && bitsize == TYPE_PRECISION (type)) | |
5929 | { | |
641cac0b AN |
5930 | tree op = gimple_assign_rhs1 (nop_def); |
5931 | type = TREE_TYPE (op); | |
8d740330 | 5932 | if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize) |
641cac0b | 5933 | exp = op; |
8d740330 RH |
5934 | } |
5935 | } | |
48cc8d3b | 5936 | |
84217346 | 5937 | temp = expand_normal (exp); |
bbd6cf73 | 5938 | |
ef19912d RK |
5939 | /* If BITSIZE is narrower than the size of the type of EXP |
5940 | we will be narrowing TEMP. Normally, what's wanted are the | |
5941 | low-order bits. However, if EXP's type is a record and this is | |
5942 | big-endian machine, we want the upper BITSIZE bits. */ | |
5943 | if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT | |
65a07688 | 5944 | && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp)) |
ef19912d RK |
5945 | && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE) |
5946 | temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp, | |
5947 | size_int (GET_MODE_BITSIZE (GET_MODE (temp)) | |
5948 | - bitsize), | |
c1853da7 | 5949 | NULL_RTX, 1); |
ef19912d | 5950 | |
bbd6cf73 RK |
5951 | /* Unless MODE is VOIDmode or BLKmode, convert TEMP to |
5952 | MODE. */ | |
5953 | if (mode != VOIDmode && mode != BLKmode | |
5954 | && mode != TYPE_MODE (TREE_TYPE (exp))) | |
5955 | temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1); | |
5956 | ||
bd323ce5 | 5957 | /* If the modes of TEMP and TARGET are both BLKmode, both |
a281e72d | 5958 | must be in memory and BITPOS must be aligned on a byte |
bd323ce5 EB |
5959 | boundary. If so, we simply do a block copy. Likewise |
5960 | for a BLKmode-like TARGET. */ | |
5961 | if (GET_MODE (temp) == BLKmode | |
5962 | && (GET_MODE (target) == BLKmode | |
5963 | || (MEM_P (target) | |
5964 | && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT | |
5965 | && (bitpos % BITS_PER_UNIT) == 0 | |
5966 | && (bitsize % BITS_PER_UNIT) == 0))) | |
a281e72d | 5967 | { |
5b0264cb | 5968 | gcc_assert (MEM_P (target) && MEM_P (temp) |
bd323ce5 | 5969 | && (bitpos % BITS_PER_UNIT) == 0); |
a281e72d | 5970 | |
f4ef873c | 5971 | target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); |
a281e72d | 5972 | emit_block_move (target, temp, |
a06ef755 | 5973 | GEN_INT ((bitsize + BITS_PER_UNIT - 1) |
44bb111a RH |
5974 | / BITS_PER_UNIT), |
5975 | BLOCK_OP_NORMAL); | |
a281e72d | 5976 | |
f45bdcd0 | 5977 | return const0_rtx; |
a281e72d RK |
5978 | } |
5979 | ||
bbf6f052 | 5980 | /* Store the value in the bitfield. */ |
b3520980 | 5981 | store_bit_field (target, bitsize, bitpos, mode, temp); |
a06ef755 | 5982 | |
bbf6f052 RK |
5983 | return const0_rtx; |
5984 | } | |
5985 | else | |
5986 | { | |
bbf6f052 | 5987 | /* Now build a reference to just the desired component. */ |
f45bdcd0 | 5988 | rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT); |
a06ef755 RK |
5989 | |
5990 | if (to_rtx == target) | |
5991 | to_rtx = copy_rtx (to_rtx); | |
792760b9 | 5992 | |
c6df88cb | 5993 | MEM_SET_IN_STRUCT_P (to_rtx, 1); |
10b76d73 | 5994 | if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) |
a06ef755 | 5995 | set_mem_alias_set (to_rtx, alias_set); |
bbf6f052 | 5996 | |
79f5e442 | 5997 | return store_expr (exp, to_rtx, 0, nontemporal); |
bbf6f052 RK |
5998 | } |
5999 | } | |
6000 | \f | |
6001 | /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF, | |
b4e3fabb RK |
6002 | an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these |
6003 | codes and find the ultimate containing object, which we return. | |
bbf6f052 RK |
6004 | |
6005 | We set *PBITSIZE to the size in bits that we want, *PBITPOS to the | |
6006 | bit position, and *PUNSIGNEDP to the signedness of the field. | |
7bb0943f RS |
6007 | If the position of the field is variable, we store a tree |
6008 | giving the variable offset (in units) in *POFFSET. | |
6009 | This offset is in addition to the bit position. | |
6010 | If the position is not variable, we store 0 in *POFFSET. | |
bbf6f052 RK |
6011 | |
6012 | If any of the extraction expressions is volatile, | |
6013 | we store 1 in *PVOLATILEP. Otherwise we don't change that. | |
6014 | ||
bd323ce5 EB |
6015 | If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode. |
6016 | Otherwise, it is a mode that can be used to access the field. | |
e7c33f54 RK |
6017 | |
6018 | If the field describes a variable-sized object, *PMODE is set to | |
bd323ce5 | 6019 | BLKmode and *PBITSIZE is set to -1. An access cannot be made in |
2614034e EB |
6020 | this case, but the address of the object can be found. |
6021 | ||
6022 | If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't | |
6023 | look through nodes that serve as markers of a greater alignment than | |
6024 | the one that can be deduced from the expression. These nodes make it | |
6025 | possible for front-ends to prevent temporaries from being created by | |
6026 | the middle-end on alignment considerations. For that purpose, the | |
6027 | normal operating mode at high-level is to always pass FALSE so that | |
6028 | the ultimate containing object is really returned; moreover, the | |
6029 | associated predicate handled_component_p will always return TRUE | |
6030 | on these nodes, thus indicating that they are essentially handled | |
6031 | by get_inner_reference. TRUE should only be passed when the caller | |
6032 | is scanning the expression in order to build another representation | |
6033 | and specifically knows how to handle these nodes; as such, this is | |
6034 | the normal operating mode in the RTL expanders. */ | |
bbf6f052 RK |
6035 | |
6036 | tree | |
502b8322 AJ |
6037 | get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, |
6038 | HOST_WIDE_INT *pbitpos, tree *poffset, | |
6039 | enum machine_mode *pmode, int *punsignedp, | |
2614034e | 6040 | int *pvolatilep, bool keep_aligning) |
bbf6f052 RK |
6041 | { |
6042 | tree size_tree = 0; | |
6043 | enum machine_mode mode = VOIDmode; | |
bd323ce5 | 6044 | bool blkmode_bitfield = false; |
fed3cef0 | 6045 | tree offset = size_zero_node; |
7fa5296e | 6046 | double_int bit_offset = double_int_zero; |
bbf6f052 | 6047 | |
770ae6cc RK |
6048 | /* First get the mode, signedness, and size. We do this from just the |
6049 | outermost expression. */ | |
997ac87b | 6050 | *pbitsize = -1; |
bbf6f052 RK |
6051 | if (TREE_CODE (exp) == COMPONENT_REF) |
6052 | { | |
bd323ce5 EB |
6053 | tree field = TREE_OPERAND (exp, 1); |
6054 | size_tree = DECL_SIZE (field); | |
6055 | if (!DECL_BIT_FIELD (field)) | |
6056 | mode = DECL_MODE (field); | |
6057 | else if (DECL_MODE (field) == BLKmode) | |
6058 | blkmode_bitfield = true; | |
6a78b724 DD |
6059 | else if (TREE_THIS_VOLATILE (exp) |
6060 | && flag_strict_volatile_bitfields > 0) | |
6061 | /* Volatile bitfields should be accessed in the mode of the | |
6062 | field's type, not the mode computed based on the bit | |
6063 | size. */ | |
6064 | mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field)); | |
bd323ce5 EB |
6065 | |
6066 | *punsignedp = DECL_UNSIGNED (field); | |
bbf6f052 RK |
6067 | } |
6068 | else if (TREE_CODE (exp) == BIT_FIELD_REF) | |
6069 | { | |
6070 | size_tree = TREE_OPERAND (exp, 1); | |
fc0f49f3 RG |
6071 | *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp)) |
6072 | || TYPE_UNSIGNED (TREE_TYPE (exp))); | |
ed1223ba | 6073 | |
0890b981 AP |
6074 | /* For vector types, with the correct size of access, use the mode of |
6075 | inner type. */ | |
6076 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE | |
6077 | && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))) | |
6078 | && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp)))) | |
6079 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
bbf6f052 RK |
6080 | } |
6081 | else | |
6082 | { | |
6083 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
8df83eae | 6084 | *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); |
770ae6cc | 6085 | |
ab87f8c8 JL |
6086 | if (mode == BLKmode) |
6087 | size_tree = TYPE_SIZE (TREE_TYPE (exp)); | |
770ae6cc RK |
6088 | else |
6089 | *pbitsize = GET_MODE_BITSIZE (mode); | |
bbf6f052 | 6090 | } |
3a94c984 | 6091 | |
770ae6cc | 6092 | if (size_tree != 0) |
bbf6f052 | 6093 | { |
770ae6cc | 6094 | if (! host_integerp (size_tree, 1)) |
e7c33f54 RK |
6095 | mode = BLKmode, *pbitsize = -1; |
6096 | else | |
770ae6cc | 6097 | *pbitsize = tree_low_cst (size_tree, 1); |
bbf6f052 RK |
6098 | } |
6099 | ||
6100 | /* Compute cumulative bit-offset for nested component-refs and array-refs, | |
6101 | and find the ultimate containing object. */ | |
bbf6f052 RK |
6102 | while (1) |
6103 | { | |
afe84921 | 6104 | switch (TREE_CODE (exp)) |
bbf6f052 | 6105 | { |
afe84921 | 6106 | case BIT_FIELD_REF: |
7fa5296e RG |
6107 | bit_offset |
6108 | = double_int_add (bit_offset, | |
6109 | tree_to_double_int (TREE_OPERAND (exp, 2))); | |
afe84921 | 6110 | break; |
bbf6f052 | 6111 | |
afe84921 RH |
6112 | case COMPONENT_REF: |
6113 | { | |
6114 | tree field = TREE_OPERAND (exp, 1); | |
6115 | tree this_offset = component_ref_field_offset (exp); | |
e7f3c83f | 6116 | |
afe84921 RH |
6117 | /* If this field hasn't been filled in yet, don't go past it. |
6118 | This should only happen when folding expressions made during | |
6119 | type construction. */ | |
6120 | if (this_offset == 0) | |
6121 | break; | |
e6d8c385 | 6122 | |
afe84921 | 6123 | offset = size_binop (PLUS_EXPR, offset, this_offset); |
7fa5296e RG |
6124 | bit_offset = double_int_add (bit_offset, |
6125 | tree_to_double_int | |
6126 | (DECL_FIELD_BIT_OFFSET (field))); | |
7156dead | 6127 | |
afe84921 RH |
6128 | /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ |
6129 | } | |
6130 | break; | |
7156dead | 6131 | |
afe84921 RH |
6132 | case ARRAY_REF: |
6133 | case ARRAY_RANGE_REF: | |
6134 | { | |
6135 | tree index = TREE_OPERAND (exp, 1); | |
6136 | tree low_bound = array_ref_low_bound (exp); | |
6137 | tree unit_size = array_ref_element_size (exp); | |
6138 | ||
6139 | /* We assume all arrays have sizes that are a multiple of a byte. | |
6140 | First subtract the lower bound, if any, in the type of the | |
6141 | index, then convert to sizetype and multiply by the size of | |
6142 | the array element. */ | |
6143 | if (! integer_zerop (low_bound)) | |
4845b383 KH |
6144 | index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), |
6145 | index, low_bound); | |
afe84921 RH |
6146 | |
6147 | offset = size_binop (PLUS_EXPR, offset, | |
6148 | size_binop (MULT_EXPR, | |
3967bc2d | 6149 | fold_convert (sizetype, index), |
afe84921 RH |
6150 | unit_size)); |
6151 | } | |
6152 | break; | |
6153 | ||
6154 | case REALPART_EXPR: | |
afe84921 RH |
6155 | break; |
6156 | ||
6157 | case IMAGPART_EXPR: | |
7fa5296e RG |
6158 | bit_offset = double_int_add (bit_offset, |
6159 | uhwi_to_double_int (*pbitsize)); | |
afe84921 RH |
6160 | break; |
6161 | ||
afe84921 | 6162 | case VIEW_CONVERT_EXPR: |
2614034e EB |
6163 | if (keep_aligning && STRICT_ALIGNMENT |
6164 | && (TYPE_ALIGN (TREE_TYPE (exp)) | |
afe84921 | 6165 | > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
afe84921 RH |
6166 | && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))) |
6167 | < BIGGEST_ALIGNMENT) | |
6168 | && (TYPE_ALIGN_OK (TREE_TYPE (exp)) | |
6169 | || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0))))) | |
6170 | goto done; | |
6171 | break; | |
6172 | ||
70f34814 RG |
6173 | case MEM_REF: |
6174 | /* Hand back the decl for MEM[&decl, off]. */ | |
6175 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR) | |
6176 | { | |
6177 | tree off = TREE_OPERAND (exp, 1); | |
6178 | if (!integer_zerop (off)) | |
6179 | { | |
6180 | double_int boff, coff = mem_ref_offset (exp); | |
6181 | boff = double_int_lshift (coff, | |
6182 | BITS_PER_UNIT == 8 | |
6183 | ? 3 : exact_log2 (BITS_PER_UNIT), | |
6184 | HOST_BITS_PER_DOUBLE_INT, true); | |
6185 | bit_offset = double_int_add (bit_offset, boff); | |
6186 | } | |
6187 | exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
6188 | } | |
6189 | goto done; | |
6190 | ||
afe84921 RH |
6191 | default: |
6192 | goto done; | |
6193 | } | |
7bb0943f RS |
6194 | |
6195 | /* If any reference in the chain is volatile, the effect is volatile. */ | |
6196 | if (TREE_THIS_VOLATILE (exp)) | |
6197 | *pvolatilep = 1; | |
839c4796 | 6198 | |
bbf6f052 RK |
6199 | exp = TREE_OPERAND (exp, 0); |
6200 | } | |
afe84921 | 6201 | done: |
bbf6f052 | 6202 | |
770ae6cc | 6203 | /* If OFFSET is constant, see if we can return the whole thing as a |
9c219b9b RG |
6204 | constant bit position. Make sure to handle overflow during |
6205 | this conversion. */ | |
6206 | if (host_integerp (offset, 0)) | |
6207 | { | |
7fa5296e RG |
6208 | double_int tem = double_int_lshift (tree_to_double_int (offset), |
6209 | BITS_PER_UNIT == 8 | |
6210 | ? 3 : exact_log2 (BITS_PER_UNIT), | |
6211 | HOST_BITS_PER_DOUBLE_INT, true); | |
6212 | tem = double_int_add (tem, bit_offset); | |
9c219b9b RG |
6213 | if (double_int_fits_in_shwi_p (tem)) |
6214 | { | |
6215 | *pbitpos = double_int_to_shwi (tem); | |
bd323ce5 | 6216 | *poffset = offset = NULL_TREE; |
9c219b9b RG |
6217 | } |
6218 | } | |
6219 | ||
6220 | /* Otherwise, split it up. */ | |
bd323ce5 EB |
6221 | if (offset) |
6222 | { | |
7fa5296e | 6223 | *pbitpos = double_int_to_shwi (bit_offset); |
bd323ce5 EB |
6224 | *poffset = offset; |
6225 | } | |
6226 | ||
6227 | /* We can use BLKmode for a byte-aligned BLKmode bitfield. */ | |
6228 | if (mode == VOIDmode | |
6229 | && blkmode_bitfield | |
6230 | && (*pbitpos % BITS_PER_UNIT) == 0 | |
6231 | && (*pbitsize % BITS_PER_UNIT) == 0) | |
6232 | *pmode = BLKmode; | |
6233 | else | |
6234 | *pmode = mode; | |
b50d17a1 | 6235 | |
bbf6f052 RK |
6236 | return exp; |
6237 | } | |
921b3427 | 6238 | |
9f7ccf69 EB |
6239 | /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an |
6240 | ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within | |
6241 | EXP is marked as PACKED. */ | |
5b900a4c DN |
6242 | |
6243 | bool | |
fa233e34 | 6244 | contains_packed_reference (const_tree exp) |
5b900a4c DN |
6245 | { |
6246 | bool packed_p = false; | |
6247 | ||
6248 | while (1) | |
6249 | { | |
6250 | switch (TREE_CODE (exp)) | |
6251 | { | |
6252 | case COMPONENT_REF: | |
6253 | { | |
6254 | tree field = TREE_OPERAND (exp, 1); | |
b8698a0f | 6255 | packed_p = DECL_PACKED (field) |
5b900a4c DN |
6256 | || TYPE_PACKED (TREE_TYPE (field)) |
6257 | || TYPE_PACKED (TREE_TYPE (exp)); | |
6258 | if (packed_p) | |
6259 | goto done; | |
6260 | } | |
6261 | break; | |
6262 | ||
6263 | case BIT_FIELD_REF: | |
6264 | case ARRAY_REF: | |
6265 | case ARRAY_RANGE_REF: | |
6266 | case REALPART_EXPR: | |
6267 | case IMAGPART_EXPR: | |
6268 | case VIEW_CONVERT_EXPR: | |
6269 | break; | |
6270 | ||
6271 | default: | |
6272 | goto done; | |
6273 | } | |
6274 | exp = TREE_OPERAND (exp, 0); | |
6275 | } | |
6276 | done: | |
6277 | return packed_p; | |
6278 | } | |
6279 | ||
44de5aeb | 6280 | /* Return a tree of sizetype representing the size, in bytes, of the element |
9f7ccf69 | 6281 | of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ |
44de5aeb RK |
6282 | |
6283 | tree | |
6284 | array_ref_element_size (tree exp) | |
6285 | { | |
6286 | tree aligned_size = TREE_OPERAND (exp, 3); | |
6287 | tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
db3927fb | 6288 | location_t loc = EXPR_LOCATION (exp); |
44de5aeb RK |
6289 | |
6290 | /* If a size was specified in the ARRAY_REF, it's the size measured | |
6291 | in alignment units of the element type. So multiply by that value. */ | |
6292 | if (aligned_size) | |
bc482be4 RH |
6293 | { |
6294 | /* ??? tree_ssa_useless_type_conversion will eliminate casts to | |
6295 | sizetype from another type of the same width and signedness. */ | |
6296 | if (TREE_TYPE (aligned_size) != sizetype) | |
db3927fb AH |
6297 | aligned_size = fold_convert_loc (loc, sizetype, aligned_size); |
6298 | return size_binop_loc (loc, MULT_EXPR, aligned_size, | |
6299 | size_int (TYPE_ALIGN_UNIT (elmt_type))); | |
bc482be4 | 6300 | } |
44de5aeb | 6301 | |
caf93cb0 | 6302 | /* Otherwise, take the size from that of the element type. Substitute |
44de5aeb RK |
6303 | any PLACEHOLDER_EXPR that we have. */ |
6304 | else | |
6305 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp); | |
6306 | } | |
6307 | ||
6308 | /* Return a tree representing the lower bound of the array mentioned in | |
9f7ccf69 | 6309 | EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ |
44de5aeb RK |
6310 | |
6311 | tree | |
6312 | array_ref_low_bound (tree exp) | |
6313 | { | |
6314 | tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
6315 | ||
6316 | /* If a lower bound is specified in EXP, use it. */ | |
6317 | if (TREE_OPERAND (exp, 2)) | |
6318 | return TREE_OPERAND (exp, 2); | |
6319 | ||
6320 | /* Otherwise, if there is a domain type and it has a lower bound, use it, | |
6321 | substituting for a PLACEHOLDER_EXPR as needed. */ | |
6322 | if (domain_type && TYPE_MIN_VALUE (domain_type)) | |
6323 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp); | |
6324 | ||
6325 | /* Otherwise, return a zero of the appropriate type. */ | |
5212068f | 6326 | return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0); |
44de5aeb RK |
6327 | } |
6328 | ||
a7e5372d | 6329 | /* Return a tree representing the upper bound of the array mentioned in |
9f7ccf69 | 6330 | EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ |
a7e5372d ZD |
6331 | |
6332 | tree | |
6333 | array_ref_up_bound (tree exp) | |
6334 | { | |
6335 | tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
6336 | ||
6337 | /* If there is a domain type and it has an upper bound, use it, substituting | |
6338 | for a PLACEHOLDER_EXPR as needed. */ | |
6339 | if (domain_type && TYPE_MAX_VALUE (domain_type)) | |
6340 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp); | |
6341 | ||
6342 | /* Otherwise fail. */ | |
6343 | return NULL_TREE; | |
6344 | } | |
6345 | ||
44de5aeb RK |
6346 | /* Return a tree representing the offset, in bytes, of the field referenced |
6347 | by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */ | |
6348 | ||
6349 | tree | |
6350 | component_ref_field_offset (tree exp) | |
6351 | { | |
6352 | tree aligned_offset = TREE_OPERAND (exp, 2); | |
6353 | tree field = TREE_OPERAND (exp, 1); | |
db3927fb | 6354 | location_t loc = EXPR_LOCATION (exp); |
44de5aeb RK |
6355 | |
6356 | /* If an offset was specified in the COMPONENT_REF, it's the offset measured | |
6357 | in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that | |
6358 | value. */ | |
6359 | if (aligned_offset) | |
bc482be4 RH |
6360 | { |
6361 | /* ??? tree_ssa_useless_type_conversion will eliminate casts to | |
6362 | sizetype from another type of the same width and signedness. */ | |
6363 | if (TREE_TYPE (aligned_offset) != sizetype) | |
db3927fb AH |
6364 | aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset); |
6365 | return size_binop_loc (loc, MULT_EXPR, aligned_offset, | |
6366 | size_int (DECL_OFFSET_ALIGN (field) | |
6367 | / BITS_PER_UNIT)); | |
bc482be4 | 6368 | } |
44de5aeb | 6369 | |
caf93cb0 | 6370 | /* Otherwise, take the offset from that of the field. Substitute |
44de5aeb RK |
6371 | any PLACEHOLDER_EXPR that we have. */ |
6372 | else | |
6373 | return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp); | |
6374 | } | |
ceadb728 OH |
6375 | |
6376 | /* Alignment in bits the TARGET of an assignment may be assumed to have. */ | |
6377 | ||
6378 | static unsigned HOST_WIDE_INT | |
6379 | target_align (const_tree target) | |
6380 | { | |
6381 | /* We might have a chain of nested references with intermediate misaligning | |
6382 | bitfields components, so need to recurse to find out. */ | |
6383 | ||
6384 | unsigned HOST_WIDE_INT this_align, outer_align; | |
6385 | ||
6386 | switch (TREE_CODE (target)) | |
6387 | { | |
6388 | case BIT_FIELD_REF: | |
6389 | return 1; | |
6390 | ||
6391 | case COMPONENT_REF: | |
6392 | this_align = DECL_ALIGN (TREE_OPERAND (target, 1)); | |
6393 | outer_align = target_align (TREE_OPERAND (target, 0)); | |
6394 | return MIN (this_align, outer_align); | |
6395 | ||
6396 | case ARRAY_REF: | |
6397 | case ARRAY_RANGE_REF: | |
6398 | this_align = TYPE_ALIGN (TREE_TYPE (target)); | |
6399 | outer_align = target_align (TREE_OPERAND (target, 0)); | |
6400 | return MIN (this_align, outer_align); | |
6401 | ||
6402 | CASE_CONVERT: | |
6403 | case NON_LVALUE_EXPR: | |
6404 | case VIEW_CONVERT_EXPR: | |
6405 | this_align = TYPE_ALIGN (TREE_TYPE (target)); | |
6406 | outer_align = target_align (TREE_OPERAND (target, 0)); | |
6407 | return MAX (this_align, outer_align); | |
6408 | ||
6409 | default: | |
6410 | return TYPE_ALIGN (TREE_TYPE (target)); | |
6411 | } | |
6412 | } | |
6413 | ||
bbf6f052 | 6414 | \f |
3fe44edd RK |
6415 | /* Given an rtx VALUE that may contain additions and multiplications, return |
6416 | an equivalent value that just refers to a register, memory, or constant. | |
6417 | This is done by generating instructions to perform the arithmetic and | |
6418 | returning a pseudo-register containing the value. | |
c45a13a6 RK |
6419 | |
6420 | The returned value may be a REG, SUBREG, MEM or constant. */ | |
bbf6f052 RK |
6421 | |
6422 | rtx | |
502b8322 | 6423 | force_operand (rtx value, rtx target) |
bbf6f052 | 6424 | { |
8a28dbcc | 6425 | rtx op1, op2; |
bbf6f052 | 6426 | /* Use subtarget as the target for operand 0 of a binary operation. */ |
b3694847 | 6427 | rtx subtarget = get_subtarget (target); |
8a28dbcc | 6428 | enum rtx_code code = GET_CODE (value); |
bbf6f052 | 6429 | |
50654f6c ZD |
6430 | /* Check for subreg applied to an expression produced by loop optimizer. */ |
6431 | if (code == SUBREG | |
f8cfc6aa | 6432 | && !REG_P (SUBREG_REG (value)) |
3c0cb5de | 6433 | && !MEM_P (SUBREG_REG (value))) |
50654f6c | 6434 | { |
b7e6d1da UB |
6435 | value |
6436 | = simplify_gen_subreg (GET_MODE (value), | |
6437 | force_reg (GET_MODE (SUBREG_REG (value)), | |
6438 | force_operand (SUBREG_REG (value), | |
6439 | NULL_RTX)), | |
6440 | GET_MODE (SUBREG_REG (value)), | |
6441 | SUBREG_BYTE (value)); | |
50654f6c ZD |
6442 | code = GET_CODE (value); |
6443 | } | |
6444 | ||
8b015896 | 6445 | /* Check for a PIC address load. */ |
8a28dbcc | 6446 | if ((code == PLUS || code == MINUS) |
8b015896 RH |
6447 | && XEXP (value, 0) == pic_offset_table_rtx |
6448 | && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF | |
6449 | || GET_CODE (XEXP (value, 1)) == LABEL_REF | |
6450 | || GET_CODE (XEXP (value, 1)) == CONST)) | |
6451 | { | |
6452 | if (!subtarget) | |
6453 | subtarget = gen_reg_rtx (GET_MODE (value)); | |
6454 | emit_move_insn (subtarget, value); | |
6455 | return subtarget; | |
6456 | } | |
6457 | ||
ec8e098d | 6458 | if (ARITHMETIC_P (value)) |
bbf6f052 RK |
6459 | { |
6460 | op2 = XEXP (value, 1); | |
f8cfc6aa | 6461 | if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget)) |
bbf6f052 | 6462 | subtarget = 0; |
481683e1 | 6463 | if (code == MINUS && CONST_INT_P (op2)) |
bbf6f052 | 6464 | { |
8a28dbcc | 6465 | code = PLUS; |
bbf6f052 RK |
6466 | op2 = negate_rtx (GET_MODE (value), op2); |
6467 | } | |
6468 | ||
6469 | /* Check for an addition with OP2 a constant integer and our first | |
8a28dbcc JH |
6470 | operand a PLUS of a virtual register and something else. In that |
6471 | case, we want to emit the sum of the virtual register and the | |
6472 | constant first and then add the other value. This allows virtual | |
6473 | register instantiation to simply modify the constant rather than | |
6474 | creating another one around this addition. */ | |
481683e1 | 6475 | if (code == PLUS && CONST_INT_P (op2) |
bbf6f052 | 6476 | && GET_CODE (XEXP (value, 0)) == PLUS |
f8cfc6aa | 6477 | && REG_P (XEXP (XEXP (value, 0), 0)) |
bbf6f052 RK |
6478 | && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER |
6479 | && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER) | |
6480 | { | |
8a28dbcc JH |
6481 | rtx temp = expand_simple_binop (GET_MODE (value), code, |
6482 | XEXP (XEXP (value, 0), 0), op2, | |
6483 | subtarget, 0, OPTAB_LIB_WIDEN); | |
6484 | return expand_simple_binop (GET_MODE (value), code, temp, | |
6485 | force_operand (XEXP (XEXP (value, | |
6486 | 0), 1), 0), | |
6487 | target, 0, OPTAB_LIB_WIDEN); | |
bbf6f052 | 6488 | } |
3a94c984 | 6489 | |
8a28dbcc JH |
6490 | op1 = force_operand (XEXP (value, 0), subtarget); |
6491 | op2 = force_operand (op2, NULL_RTX); | |
6492 | switch (code) | |
6493 | { | |
6494 | case MULT: | |
6495 | return expand_mult (GET_MODE (value), op1, op2, target, 1); | |
6496 | case DIV: | |
6497 | if (!INTEGRAL_MODE_P (GET_MODE (value))) | |
6498 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
6499 | target, 1, OPTAB_LIB_WIDEN); | |
6500 | else | |
6501 | return expand_divmod (0, | |
6502 | FLOAT_MODE_P (GET_MODE (value)) | |
6503 | ? RDIV_EXPR : TRUNC_DIV_EXPR, | |
6504 | GET_MODE (value), op1, op2, target, 0); | |
8a28dbcc JH |
6505 | case MOD: |
6506 | return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, | |
6507 | target, 0); | |
8a28dbcc JH |
6508 | case UDIV: |
6509 | return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2, | |
6510 | target, 1); | |
8a28dbcc JH |
6511 | case UMOD: |
6512 | return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2, | |
6513 | target, 1); | |
8a28dbcc JH |
6514 | case ASHIFTRT: |
6515 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
6516 | target, 0, OPTAB_LIB_WIDEN); | |
8a28dbcc JH |
6517 | default: |
6518 | return expand_simple_binop (GET_MODE (value), code, op1, op2, | |
6519 | target, 1, OPTAB_LIB_WIDEN); | |
6520 | } | |
6521 | } | |
ec8e098d | 6522 | if (UNARY_P (value)) |
8a28dbcc | 6523 | { |
72a10eff RS |
6524 | if (!target) |
6525 | target = gen_reg_rtx (GET_MODE (value)); | |
8a28dbcc | 6526 | op1 = force_operand (XEXP (value, 0), NULL_RTX); |
1fd5360d R |
6527 | switch (code) |
6528 | { | |
72a10eff RS |
6529 | case ZERO_EXTEND: |
6530 | case SIGN_EXTEND: | |
1fd5360d | 6531 | case TRUNCATE: |
e69e3d0e ZD |
6532 | case FLOAT_EXTEND: |
6533 | case FLOAT_TRUNCATE: | |
72a10eff RS |
6534 | convert_move (target, op1, code == ZERO_EXTEND); |
6535 | return target; | |
6536 | ||
6537 | case FIX: | |
6538 | case UNSIGNED_FIX: | |
6539 | expand_fix (target, op1, code == UNSIGNED_FIX); | |
6540 | return target; | |
6541 | ||
6542 | case FLOAT: | |
6543 | case UNSIGNED_FLOAT: | |
6544 | expand_float (target, op1, code == UNSIGNED_FLOAT); | |
6545 | return target; | |
6546 | ||
1fd5360d R |
6547 | default: |
6548 | return expand_simple_unop (GET_MODE (value), code, op1, target, 0); | |
6549 | } | |
bbf6f052 | 6550 | } |
34e81b5a RK |
6551 | |
6552 | #ifdef INSN_SCHEDULING | |
6553 | /* On machines that have insn scheduling, we want all memory reference to be | |
6554 | explicit, so we need to deal with such paradoxical SUBREGs. */ | |
3c0cb5de | 6555 | if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value)) |
34e81b5a RK |
6556 | && (GET_MODE_SIZE (GET_MODE (value)) |
6557 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value))))) | |
6558 | value | |
6559 | = simplify_gen_subreg (GET_MODE (value), | |
6560 | force_reg (GET_MODE (SUBREG_REG (value)), | |
6561 | force_operand (SUBREG_REG (value), | |
6562 | NULL_RTX)), | |
6563 | GET_MODE (SUBREG_REG (value)), | |
6564 | SUBREG_BYTE (value)); | |
6565 | #endif | |
6566 | ||
bbf6f052 RK |
6567 | return value; |
6568 | } | |
6569 | \f | |
bbf6f052 | 6570 | /* Subroutine of expand_expr: return nonzero iff there is no way that |
e5e809f4 JL |
6571 | EXP can reference X, which is being modified. TOP_P is nonzero if this |
6572 | call is going to be used to determine whether we need a temporary | |
ff439b5f CB |
6573 | for EXP, as opposed to a recursive call to this function. |
6574 | ||
6575 | It is always safe for this routine to return zero since it merely | |
6576 | searches for optimization opportunities. */ | |
bbf6f052 | 6577 | |
8f17b5c5 | 6578 | int |
22ea9ec0 | 6579 | safe_from_p (const_rtx x, tree exp, int top_p) |
bbf6f052 RK |
6580 | { |
6581 | rtx exp_rtl = 0; | |
6582 | int i, nops; | |
6583 | ||
6676e72f RK |
6584 | if (x == 0 |
6585 | /* If EXP has varying size, we MUST use a target since we currently | |
8f6562d0 PB |
6586 | have no way of allocating temporaries of variable size |
6587 | (except for arrays that have TYPE_ARRAY_MAX_SIZE set). | |
6588 | So we assume here that something at a higher level has prevented a | |
f4510f37 | 6589 | clash. This is somewhat bogus, but the best we can do. Only |
e5e809f4 | 6590 | do this when X is BLKmode and when we are at the top level. */ |
d0f062fb | 6591 | || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp)) |
f4510f37 | 6592 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST |
8f6562d0 PB |
6593 | && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE |
6594 | || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE | |
6595 | || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp))) | |
6596 | != INTEGER_CST) | |
1da68f56 RK |
6597 | && GET_MODE (x) == BLKmode) |
6598 | /* If X is in the outgoing argument area, it is always safe. */ | |
3c0cb5de | 6599 | || (MEM_P (x) |
1da68f56 RK |
6600 | && (XEXP (x, 0) == virtual_outgoing_args_rtx |
6601 | || (GET_CODE (XEXP (x, 0)) == PLUS | |
6602 | && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))) | |
bbf6f052 RK |
6603 | return 1; |
6604 | ||
6605 | /* If this is a subreg of a hard register, declare it unsafe, otherwise, | |
6606 | find the underlying pseudo. */ | |
6607 | if (GET_CODE (x) == SUBREG) | |
6608 | { | |
6609 | x = SUBREG_REG (x); | |
f8cfc6aa | 6610 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) |
bbf6f052 RK |
6611 | return 0; |
6612 | } | |
6613 | ||
1da68f56 | 6614 | /* Now look at our tree code and possibly recurse. */ |
bbf6f052 RK |
6615 | switch (TREE_CODE_CLASS (TREE_CODE (exp))) |
6616 | { | |
6615c446 | 6617 | case tcc_declaration: |
a9772b60 | 6618 | exp_rtl = DECL_RTL_IF_SET (exp); |
bbf6f052 RK |
6619 | break; |
6620 | ||
6615c446 | 6621 | case tcc_constant: |
bbf6f052 RK |
6622 | return 1; |
6623 | ||
6615c446 | 6624 | case tcc_exceptional: |
bbf6f052 | 6625 | if (TREE_CODE (exp) == TREE_LIST) |
f8d4be57 CE |
6626 | { |
6627 | while (1) | |
6628 | { | |
6629 | if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0)) | |
6630 | return 0; | |
6631 | exp = TREE_CHAIN (exp); | |
6632 | if (!exp) | |
6633 | return 1; | |
6634 | if (TREE_CODE (exp) != TREE_LIST) | |
6635 | return safe_from_p (x, exp, 0); | |
6636 | } | |
6637 | } | |
33598a1b BS |
6638 | else if (TREE_CODE (exp) == CONSTRUCTOR) |
6639 | { | |
6640 | constructor_elt *ce; | |
6641 | unsigned HOST_WIDE_INT idx; | |
6642 | ||
6643 | for (idx = 0; | |
6644 | VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce); | |
6645 | idx++) | |
6646 | if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0)) | |
6647 | || !safe_from_p (x, ce->value, 0)) | |
6648 | return 0; | |
6649 | return 1; | |
6650 | } | |
ff439b5f CB |
6651 | else if (TREE_CODE (exp) == ERROR_MARK) |
6652 | return 1; /* An already-visited SAVE_EXPR? */ | |
bbf6f052 RK |
6653 | else |
6654 | return 0; | |
6655 | ||
6615c446 | 6656 | case tcc_statement: |
350fae66 RK |
6657 | /* The only case we look at here is the DECL_INITIAL inside a |
6658 | DECL_EXPR. */ | |
6659 | return (TREE_CODE (exp) != DECL_EXPR | |
6660 | || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL | |
6661 | || !DECL_INITIAL (DECL_EXPR_DECL (exp)) | |
6662 | || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0)); | |
6663 | ||
6615c446 JO |
6664 | case tcc_binary: |
6665 | case tcc_comparison: | |
f8d4be57 CE |
6666 | if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0)) |
6667 | return 0; | |
5d3cc252 | 6668 | /* Fall through. */ |
f8d4be57 | 6669 | |
6615c446 | 6670 | case tcc_unary: |
f8d4be57 | 6671 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); |
bbf6f052 | 6672 | |
6615c446 JO |
6673 | case tcc_expression: |
6674 | case tcc_reference: | |
5039610b | 6675 | case tcc_vl_exp: |
bbf6f052 RK |
6676 | /* Now do code-specific tests. EXP_RTL is set to any rtx we find in |
6677 | the expression. If it is set, we conflict iff we are that rtx or | |
6678 | both are in memory. Otherwise, we check all operands of the | |
6679 | expression recursively. */ | |
6680 | ||
6681 | switch (TREE_CODE (exp)) | |
6682 | { | |
6683 | case ADDR_EXPR: | |
70072ed9 RK |
6684 | /* If the operand is static or we are static, we can't conflict. |
6685 | Likewise if we don't conflict with the operand at all. */ | |
6686 | if (staticp (TREE_OPERAND (exp, 0)) | |
6687 | || TREE_STATIC (exp) | |
6688 | || safe_from_p (x, TREE_OPERAND (exp, 0), 0)) | |
6689 | return 1; | |
6690 | ||
6691 | /* Otherwise, the only way this can conflict is if we are taking | |
6692 | the address of a DECL a that address if part of X, which is | |
6693 | very rare. */ | |
6694 | exp = TREE_OPERAND (exp, 0); | |
6695 | if (DECL_P (exp)) | |
6696 | { | |
6697 | if (!DECL_RTL_SET_P (exp) | |
3c0cb5de | 6698 | || !MEM_P (DECL_RTL (exp))) |
70072ed9 RK |
6699 | return 0; |
6700 | else | |
6701 | exp_rtl = XEXP (DECL_RTL (exp), 0); | |
6702 | } | |
6703 | break; | |
bbf6f052 | 6704 | |
7ccf35ed DN |
6705 | case MISALIGNED_INDIRECT_REF: |
6706 | case ALIGN_INDIRECT_REF: | |
bbf6f052 | 6707 | case INDIRECT_REF: |
3c0cb5de | 6708 | if (MEM_P (x) |
1da68f56 RK |
6709 | && alias_sets_conflict_p (MEM_ALIAS_SET (x), |
6710 | get_alias_set (exp))) | |
bbf6f052 RK |
6711 | return 0; |
6712 | break; | |
6713 | ||
6714 | case CALL_EXPR: | |
f9808f81 MM |
6715 | /* Assume that the call will clobber all hard registers and |
6716 | all of memory. */ | |
f8cfc6aa | 6717 | if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) |
3c0cb5de | 6718 | || MEM_P (x)) |
f9808f81 | 6719 | return 0; |
bbf6f052 RK |
6720 | break; |
6721 | ||
bbf6f052 | 6722 | case WITH_CLEANUP_EXPR: |
5dab5552 | 6723 | case CLEANUP_POINT_EXPR: |
ac45df5d | 6724 | /* Lowered by gimplify.c. */ |
5b0264cb | 6725 | gcc_unreachable (); |
ac45df5d | 6726 | |
bbf6f052 | 6727 | case SAVE_EXPR: |
82c82743 | 6728 | return safe_from_p (x, TREE_OPERAND (exp, 0), 0); |
bbf6f052 | 6729 | |
e9a25f70 JL |
6730 | default: |
6731 | break; | |
bbf6f052 RK |
6732 | } |
6733 | ||
6734 | /* If we have an rtx, we do not need to scan our operands. */ | |
6735 | if (exp_rtl) | |
6736 | break; | |
6737 | ||
5039610b | 6738 | nops = TREE_OPERAND_LENGTH (exp); |
bbf6f052 RK |
6739 | for (i = 0; i < nops; i++) |
6740 | if (TREE_OPERAND (exp, i) != 0 | |
e5e809f4 | 6741 | && ! safe_from_p (x, TREE_OPERAND (exp, i), 0)) |
bbf6f052 | 6742 | return 0; |
8f17b5c5 | 6743 | |
6615c446 JO |
6744 | break; |
6745 | ||
6746 | case tcc_type: | |
6747 | /* Should never get a type here. */ | |
6748 | gcc_unreachable (); | |
bbf6f052 RK |
6749 | } |
6750 | ||
6751 | /* If we have an rtl, find any enclosed object. Then see if we conflict | |
6752 | with it. */ | |
6753 | if (exp_rtl) | |
6754 | { | |
6755 | if (GET_CODE (exp_rtl) == SUBREG) | |
6756 | { | |
6757 | exp_rtl = SUBREG_REG (exp_rtl); | |
f8cfc6aa | 6758 | if (REG_P (exp_rtl) |
bbf6f052 RK |
6759 | && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER) |
6760 | return 0; | |
6761 | } | |
6762 | ||
6763 | /* If the rtl is X, then it is not safe. Otherwise, it is unless both | |
1da68f56 | 6764 | are memory and they conflict. */ |
bbf6f052 | 6765 | return ! (rtx_equal_p (x, exp_rtl) |
3c0cb5de | 6766 | || (MEM_P (x) && MEM_P (exp_rtl) |
21117a17 | 6767 | && true_dependence (exp_rtl, VOIDmode, x, |
1da68f56 | 6768 | rtx_addr_varies_p))); |
bbf6f052 RK |
6769 | } |
6770 | ||
6771 | /* If we reach here, it is safe. */ | |
6772 | return 1; | |
6773 | } | |
6774 | ||
14a774a9 | 6775 | \f |
0d4903b8 RK |
6776 | /* Return the highest power of two that EXP is known to be a multiple of. |
6777 | This is used in updating alignment of MEMs in array references. */ | |
6778 | ||
86a07404 | 6779 | unsigned HOST_WIDE_INT |
fa233e34 | 6780 | highest_pow2_factor (const_tree exp) |
0d4903b8 | 6781 | { |
9ceca302 | 6782 | unsigned HOST_WIDE_INT c0, c1; |
0d4903b8 RK |
6783 | |
6784 | switch (TREE_CODE (exp)) | |
6785 | { | |
6786 | case INTEGER_CST: | |
e0f1be5c JJ |
6787 | /* We can find the lowest bit that's a one. If the low |
6788 | HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT. | |
6789 | We need to handle this case since we can find it in a COND_EXPR, | |
a98ebe2e | 6790 | a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an |
e0f1be5c | 6791 | erroneous program, so return BIGGEST_ALIGNMENT to avoid any |
3a531a8b | 6792 | later ICE. */ |
455f14dd | 6793 | if (TREE_OVERFLOW (exp)) |
1ed1b4fb | 6794 | return BIGGEST_ALIGNMENT; |
e0f1be5c | 6795 | else |
0d4903b8 | 6796 | { |
e0f1be5c JJ |
6797 | /* Note: tree_low_cst is intentionally not used here, |
6798 | we don't care about the upper bits. */ | |
6799 | c0 = TREE_INT_CST_LOW (exp); | |
6800 | c0 &= -c0; | |
6801 | return c0 ? c0 : BIGGEST_ALIGNMENT; | |
0d4903b8 RK |
6802 | } |
6803 | break; | |
6804 | ||
65a07688 | 6805 | case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR: |
0d4903b8 RK |
6806 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); |
6807 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6808 | return MIN (c0, c1); | |
6809 | ||
6810 | case MULT_EXPR: | |
6811 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); | |
6812 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6813 | return c0 * c1; | |
6814 | ||
6815 | case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR: | |
6816 | case CEIL_DIV_EXPR: | |
65a07688 RK |
6817 | if (integer_pow2p (TREE_OPERAND (exp, 1)) |
6818 | && host_integerp (TREE_OPERAND (exp, 1), 1)) | |
6819 | { | |
6820 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 0)); | |
6821 | c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1); | |
6822 | return MAX (1, c0 / c1); | |
6823 | } | |
6824 | break; | |
0d4903b8 | 6825 | |
a2acd8bf EB |
6826 | case BIT_AND_EXPR: |
6827 | /* The highest power of two of a bit-and expression is the maximum of | |
6828 | that of its operands. We typically get here for a complex LHS and | |
6829 | a constant negative power of two on the RHS to force an explicit | |
6830 | alignment, so don't bother looking at the LHS. */ | |
6831 | return highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6832 | ||
1043771b | 6833 | CASE_CONVERT: |
6fce44af | 6834 | case SAVE_EXPR: |
0d4903b8 RK |
6835 | return highest_pow2_factor (TREE_OPERAND (exp, 0)); |
6836 | ||
65a07688 RK |
6837 | case COMPOUND_EXPR: |
6838 | return highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6839 | ||
0d4903b8 RK |
6840 | case COND_EXPR: |
6841 | c0 = highest_pow2_factor (TREE_OPERAND (exp, 1)); | |
6842 | c1 = highest_pow2_factor (TREE_OPERAND (exp, 2)); | |
6843 | return MIN (c0, c1); | |
6844 | ||
6845 | default: | |
6846 | break; | |
6847 | } | |
6848 | ||
6849 | return 1; | |
6850 | } | |
818c0c94 | 6851 | |
d50a16c4 EB |
6852 | /* Similar, except that the alignment requirements of TARGET are |
6853 | taken into account. Assume it is at least as aligned as its | |
6854 | type, unless it is a COMPONENT_REF in which case the layout of | |
6855 | the structure gives the alignment. */ | |
818c0c94 | 6856 | |
9ceca302 | 6857 | static unsigned HOST_WIDE_INT |
fa233e34 | 6858 | highest_pow2_factor_for_target (const_tree target, const_tree exp) |
818c0c94 | 6859 | { |
ceadb728 OH |
6860 | unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT; |
6861 | unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp); | |
b8698a0f | 6862 | |
ceadb728 | 6863 | return MAX (factor, talign); |
818c0c94 | 6864 | } |
0d4903b8 | 6865 | \f |
8893239d RH |
6866 | /* Return &VAR expression for emulated thread local VAR. */ |
6867 | ||
6868 | static tree | |
6869 | emutls_var_address (tree var) | |
6870 | { | |
6871 | tree emuvar = emutls_decl (var); | |
6872 | tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS]; | |
6873 | tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node); | |
6874 | tree arglist = build_tree_list (NULL_TREE, arg); | |
db3927fb | 6875 | tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist); |
8893239d RH |
6876 | return fold_convert (build_pointer_type (TREE_TYPE (var)), call); |
6877 | } | |
6878 | \f | |
6de9cd9a | 6879 | |
eb698c58 RS |
6880 | /* Subroutine of expand_expr. Expand the two operands of a binary |
6881 | expression EXP0 and EXP1 placing the results in OP0 and OP1. | |
6882 | The value may be stored in TARGET if TARGET is nonzero. The | |
6883 | MODIFIER argument is as documented by expand_expr. */ | |
6884 | ||
6885 | static void | |
6886 | expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1, | |
6887 | enum expand_modifier modifier) | |
6888 | { | |
6889 | if (! safe_from_p (target, exp1, 1)) | |
6890 | target = 0; | |
6891 | if (operand_equal_p (exp0, exp1, 0)) | |
6892 | { | |
6893 | *op0 = expand_expr (exp0, target, VOIDmode, modifier); | |
6894 | *op1 = copy_rtx (*op0); | |
6895 | } | |
6896 | else | |
6897 | { | |
c67e6e14 RS |
6898 | /* If we need to preserve evaluation order, copy exp0 into its own |
6899 | temporary variable so that it can't be clobbered by exp1. */ | |
6900 | if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1)) | |
6901 | exp0 = save_expr (exp0); | |
eb698c58 RS |
6902 | *op0 = expand_expr (exp0, target, VOIDmode, modifier); |
6903 | *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier); | |
6904 | } | |
6905 | } | |
6906 | ||
f47e9b4e | 6907 | \f |
c0220ea4 | 6908 | /* Return a MEM that contains constant EXP. DEFER is as for |
aacd3885 RS |
6909 | output_constant_def and MODIFIER is as for expand_expr. */ |
6910 | ||
6911 | static rtx | |
6912 | expand_expr_constant (tree exp, int defer, enum expand_modifier modifier) | |
6913 | { | |
6914 | rtx mem; | |
6915 | ||
6916 | mem = output_constant_def (exp, defer); | |
6917 | if (modifier != EXPAND_INITIALIZER) | |
6918 | mem = use_anchored_address (mem); | |
6919 | return mem; | |
6920 | } | |
6921 | ||
70bb498a | 6922 | /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP. |
6377bb9a RH |
6923 | The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */ |
6924 | ||
6925 | static rtx | |
70bb498a | 6926 | expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, |
d4ebfa65 | 6927 | enum expand_modifier modifier, addr_space_t as) |
6377bb9a RH |
6928 | { |
6929 | rtx result, subtarget; | |
6930 | tree inner, offset; | |
6931 | HOST_WIDE_INT bitsize, bitpos; | |
6932 | int volatilep, unsignedp; | |
6933 | enum machine_mode mode1; | |
6934 | ||
6935 | /* If we are taking the address of a constant and are at the top level, | |
6936 | we have to use output_constant_def since we can't call force_const_mem | |
6937 | at top level. */ | |
6938 | /* ??? This should be considered a front-end bug. We should not be | |
6939 | generating ADDR_EXPR of something that isn't an LVALUE. The only | |
6940 | exception here is STRING_CST. */ | |
16089886 | 6941 | if (CONSTANT_CLASS_P (exp)) |
aacd3885 | 6942 | return XEXP (expand_expr_constant (exp, 0, modifier), 0); |
6377bb9a RH |
6943 | |
6944 | /* Everything must be something allowed by is_gimple_addressable. */ | |
6945 | switch (TREE_CODE (exp)) | |
6946 | { | |
6947 | case INDIRECT_REF: | |
6948 | /* This case will happen via recursion for &a->b. */ | |
aacd3885 | 6949 | return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier); |
6377bb9a | 6950 | |
70f34814 RG |
6951 | case MEM_REF: |
6952 | { | |
6953 | tree tem = TREE_OPERAND (exp, 0); | |
6954 | if (!integer_zerop (TREE_OPERAND (exp, 1))) | |
6955 | tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)), | |
6956 | tem, | |
6957 | double_int_to_tree (sizetype, mem_ref_offset (exp))); | |
6958 | return expand_expr (tem, target, tmode, modifier); | |
6959 | } | |
6960 | ||
6377bb9a | 6961 | case CONST_DECL: |
ffab1d07 RG |
6962 | /* Expand the initializer like constants above. */ |
6963 | return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0); | |
6377bb9a RH |
6964 | |
6965 | case REALPART_EXPR: | |
6966 | /* The real part of the complex number is always first, therefore | |
6967 | the address is the same as the address of the parent object. */ | |
6968 | offset = 0; | |
6969 | bitpos = 0; | |
6970 | inner = TREE_OPERAND (exp, 0); | |
6971 | break; | |
6972 | ||
6973 | case IMAGPART_EXPR: | |
6974 | /* The imaginary part of the complex number is always second. | |
2a7e31df | 6975 | The expression is therefore always offset by the size of the |
6377bb9a RH |
6976 | scalar type. */ |
6977 | offset = 0; | |
6978 | bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp))); | |
6979 | inner = TREE_OPERAND (exp, 0); | |
6980 | break; | |
6981 | ||
8893239d RH |
6982 | case VAR_DECL: |
6983 | /* TLS emulation hook - replace __thread VAR's &VAR with | |
6984 | __emutls_get_address (&_emutls.VAR). */ | |
6985 | if (! targetm.have_tls | |
6986 | && TREE_CODE (exp) == VAR_DECL | |
6987 | && DECL_THREAD_LOCAL_P (exp)) | |
6988 | { | |
6989 | exp = emutls_var_address (exp); | |
6990 | return expand_expr (exp, target, tmode, modifier); | |
6991 | } | |
6992 | /* Fall through. */ | |
6993 | ||
6377bb9a RH |
6994 | default: |
6995 | /* If the object is a DECL, then expand it for its rtl. Don't bypass | |
6996 | expand_expr, as that can have various side effects; LABEL_DECLs for | |
16089886 RS |
6997 | example, may not have their DECL_RTL set yet. Expand the rtl of |
6998 | CONSTRUCTORs too, which should yield a memory reference for the | |
6999 | constructor's contents. Assume language specific tree nodes can | |
7000 | be expanded in some interesting way. */ | |
2ec5deb5 | 7001 | gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE); |
6377bb9a | 7002 | if (DECL_P (exp) |
16089886 | 7003 | || TREE_CODE (exp) == CONSTRUCTOR |
2ec5deb5 | 7004 | || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR) |
6377bb9a RH |
7005 | { |
7006 | result = expand_expr (exp, target, tmode, | |
7007 | modifier == EXPAND_INITIALIZER | |
7008 | ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS); | |
7009 | ||
7010 | /* If the DECL isn't in memory, then the DECL wasn't properly | |
7011 | marked TREE_ADDRESSABLE, which will be either a front-end | |
7012 | or a tree optimizer bug. */ | |
2ca202e7 | 7013 | gcc_assert (MEM_P (result)); |
6377bb9a RH |
7014 | result = XEXP (result, 0); |
7015 | ||
7016 | /* ??? Is this needed anymore? */ | |
b0b324b0 | 7017 | if (DECL_P (exp) && !TREE_USED (exp) == 0) |
6377bb9a RH |
7018 | { |
7019 | assemble_external (exp); | |
7020 | TREE_USED (exp) = 1; | |
7021 | } | |
7022 | ||
7023 | if (modifier != EXPAND_INITIALIZER | |
7024 | && modifier != EXPAND_CONST_ADDRESS) | |
7025 | result = force_operand (result, target); | |
7026 | return result; | |
7027 | } | |
7028 | ||
2614034e EB |
7029 | /* Pass FALSE as the last argument to get_inner_reference although |
7030 | we are expanding to RTL. The rationale is that we know how to | |
7031 | handle "aligning nodes" here: we can just bypass them because | |
7032 | they won't change the final object whose address will be returned | |
7033 | (they actually exist only for that purpose). */ | |
6377bb9a | 7034 | inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, |
2614034e | 7035 | &mode1, &unsignedp, &volatilep, false); |
6377bb9a RH |
7036 | break; |
7037 | } | |
7038 | ||
7039 | /* We must have made progress. */ | |
5b0264cb | 7040 | gcc_assert (inner != exp); |
6377bb9a RH |
7041 | |
7042 | subtarget = offset || bitpos ? NULL_RTX : target; | |
8ebec1a5 JJ |
7043 | /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than |
7044 | inner alignment, force the inner to be sufficiently aligned. */ | |
7045 | if (CONSTANT_CLASS_P (inner) | |
7046 | && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp))) | |
7047 | { | |
7048 | inner = copy_node (inner); | |
7049 | TREE_TYPE (inner) = copy_node (TREE_TYPE (inner)); | |
7050 | TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp)); | |
7051 | TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1; | |
7052 | } | |
d4ebfa65 | 7053 | result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as); |
6377bb9a | 7054 | |
6377bb9a RH |
7055 | if (offset) |
7056 | { | |
7057 | rtx tmp; | |
7058 | ||
7059 | if (modifier != EXPAND_NORMAL) | |
7060 | result = force_operand (result, NULL); | |
b8698a0f | 7061 | tmp = expand_expr (offset, NULL_RTX, tmode, |
4543943a AP |
7062 | modifier == EXPAND_INITIALIZER |
7063 | ? EXPAND_INITIALIZER : EXPAND_NORMAL); | |
6377bb9a | 7064 | |
d4ebfa65 BE |
7065 | result = convert_memory_address_addr_space (tmode, result, as); |
7066 | tmp = convert_memory_address_addr_space (tmode, tmp, as); | |
b0b324b0 | 7067 | |
d047a201 | 7068 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) |
6377bb9a RH |
7069 | result = gen_rtx_PLUS (tmode, result, tmp); |
7070 | else | |
7071 | { | |
7072 | subtarget = bitpos ? NULL_RTX : target; | |
7073 | result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget, | |
7074 | 1, OPTAB_LIB_WIDEN); | |
7075 | } | |
7076 | } | |
7077 | ||
7078 | if (bitpos) | |
7079 | { | |
7080 | /* Someone beforehand should have rejected taking the address | |
7081 | of such an object. */ | |
b0b324b0 | 7082 | gcc_assert ((bitpos % BITS_PER_UNIT) == 0); |
6377bb9a RH |
7083 | |
7084 | result = plus_constant (result, bitpos / BITS_PER_UNIT); | |
7085 | if (modifier < EXPAND_SUM) | |
7086 | result = force_operand (result, target); | |
7087 | } | |
7088 | ||
7089 | return result; | |
7090 | } | |
7091 | ||
70bb498a RH |
7092 | /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR. |
7093 | The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */ | |
7094 | ||
7095 | static rtx | |
7096 | expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, | |
7097 | enum expand_modifier modifier) | |
7098 | { | |
d4ebfa65 BE |
7099 | addr_space_t as = ADDR_SPACE_GENERIC; |
7100 | enum machine_mode address_mode = Pmode; | |
7101 | enum machine_mode pointer_mode = ptr_mode; | |
70bb498a RH |
7102 | enum machine_mode rmode; |
7103 | rtx result; | |
7104 | ||
b0b324b0 RH |
7105 | /* Target mode of VOIDmode says "whatever's natural". */ |
7106 | if (tmode == VOIDmode) | |
7107 | tmode = TYPE_MODE (TREE_TYPE (exp)); | |
7108 | ||
d4ebfa65 BE |
7109 | if (POINTER_TYPE_P (TREE_TYPE (exp))) |
7110 | { | |
7111 | as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); | |
7112 | address_mode = targetm.addr_space.address_mode (as); | |
7113 | pointer_mode = targetm.addr_space.pointer_mode (as); | |
7114 | } | |
7115 | ||
b0b324b0 RH |
7116 | /* We can get called with some Weird Things if the user does silliness |
7117 | like "(short) &a". In that case, convert_memory_address won't do | |
7118 | the right thing, so ignore the given target mode. */ | |
d4ebfa65 BE |
7119 | if (tmode != address_mode && tmode != pointer_mode) |
7120 | tmode = address_mode; | |
b0b324b0 | 7121 | |
70bb498a | 7122 | result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target, |
d4ebfa65 | 7123 | tmode, modifier, as); |
70bb498a RH |
7124 | |
7125 | /* Despite expand_expr claims concerning ignoring TMODE when not | |
b0b324b0 RH |
7126 | strictly convenient, stuff breaks if we don't honor it. Note |
7127 | that combined with the above, we only do this for pointer modes. */ | |
70bb498a RH |
7128 | rmode = GET_MODE (result); |
7129 | if (rmode == VOIDmode) | |
7130 | rmode = tmode; | |
7131 | if (rmode != tmode) | |
d4ebfa65 | 7132 | result = convert_memory_address_addr_space (tmode, result, as); |
b0b324b0 | 7133 | |
70bb498a RH |
7134 | return result; |
7135 | } | |
7136 | ||
32eed045 JJ |
7137 | /* Generate code for computing CONSTRUCTOR EXP. |
7138 | An rtx for the computed value is returned. If AVOID_TEMP_MEM | |
7139 | is TRUE, instead of creating a temporary variable in memory | |
7140 | NULL is returned and the caller needs to handle it differently. */ | |
7141 | ||
7142 | static rtx | |
7143 | expand_constructor (tree exp, rtx target, enum expand_modifier modifier, | |
7144 | bool avoid_temp_mem) | |
7145 | { | |
7146 | tree type = TREE_TYPE (exp); | |
7147 | enum machine_mode mode = TYPE_MODE (type); | |
7148 | ||
7149 | /* Try to avoid creating a temporary at all. This is possible | |
7150 | if all of the initializer is zero. | |
7151 | FIXME: try to handle all [0..255] initializers we can handle | |
7152 | with memset. */ | |
7153 | if (TREE_STATIC (exp) | |
7154 | && !TREE_ADDRESSABLE (exp) | |
7155 | && target != 0 && mode == BLKmode | |
7156 | && all_zeros_p (exp)) | |
7157 | { | |
7158 | clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL); | |
7159 | return target; | |
7160 | } | |
7161 | ||
7162 | /* All elts simple constants => refer to a constant in memory. But | |
7163 | if this is a non-BLKmode mode, let it store a field at a time | |
7164 | since that should make a CONST_INT or CONST_DOUBLE when we | |
7165 | fold. Likewise, if we have a target we can use, it is best to | |
7166 | store directly into the target unless the type is large enough | |
7167 | that memcpy will be used. If we are making an initializer and | |
7168 | all operands are constant, put it in memory as well. | |
7169 | ||
7170 | FIXME: Avoid trying to fill vector constructors piece-meal. | |
7171 | Output them with output_constant_def below unless we're sure | |
7172 | they're zeros. This should go away when vector initializers | |
7173 | are treated like VECTOR_CST instead of arrays. */ | |
7174 | if ((TREE_STATIC (exp) | |
7175 | && ((mode == BLKmode | |
7176 | && ! (target != 0 && safe_from_p (target, exp, 1))) | |
7177 | || TREE_ADDRESSABLE (exp) | |
7178 | || (host_integerp (TYPE_SIZE_UNIT (type), 1) | |
7179 | && (! MOVE_BY_PIECES_P | |
7180 | (tree_low_cst (TYPE_SIZE_UNIT (type), 1), | |
7181 | TYPE_ALIGN (type))) | |
7182 | && ! mostly_zeros_p (exp)))) | |
7183 | || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS) | |
7184 | && TREE_CONSTANT (exp))) | |
7185 | { | |
7186 | rtx constructor; | |
7187 | ||
7188 | if (avoid_temp_mem) | |
7189 | return NULL_RTX; | |
7190 | ||
7191 | constructor = expand_expr_constant (exp, 1, modifier); | |
7192 | ||
7193 | if (modifier != EXPAND_CONST_ADDRESS | |
7194 | && modifier != EXPAND_INITIALIZER | |
7195 | && modifier != EXPAND_SUM) | |
7196 | constructor = validize_mem (constructor); | |
7197 | ||
7198 | return constructor; | |
7199 | } | |
7200 | ||
7201 | /* Handle calls that pass values in multiple non-contiguous | |
7202 | locations. The Irix 6 ABI has examples of this. */ | |
7203 | if (target == 0 || ! safe_from_p (target, exp, 1) | |
7204 | || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM) | |
7205 | { | |
7206 | if (avoid_temp_mem) | |
7207 | return NULL_RTX; | |
7208 | ||
7209 | target | |
7210 | = assign_temp (build_qualified_type (type, (TYPE_QUALS (type) | |
7211 | | (TREE_READONLY (exp) | |
7212 | * TYPE_QUAL_CONST))), | |
7213 | 0, TREE_ADDRESSABLE (exp), 1); | |
7214 | } | |
7215 | ||
7216 | store_constructor (exp, target, 0, int_expr_size (exp)); | |
7217 | return target; | |
7218 | } | |
7219 | ||
70bb498a | 7220 | |
bbf6f052 RK |
7221 | /* expand_expr: generate code for computing expression EXP. |
7222 | An rtx for the computed value is returned. The value is never null. | |
7223 | In the case of a void EXP, const0_rtx is returned. | |
7224 | ||
7225 | The value may be stored in TARGET if TARGET is nonzero. | |
7226 | TARGET is just a suggestion; callers must assume that | |
7227 | the rtx returned may not be the same as TARGET. | |
7228 | ||
7229 | If TARGET is CONST0_RTX, it means that the value will be ignored. | |
7230 | ||
7231 | If TMODE is not VOIDmode, it suggests generating the | |
7232 | result in mode TMODE. But this is done only when convenient. | |
7233 | Otherwise, TMODE is ignored and the value generated in its natural mode. | |
7234 | TMODE is just a suggestion; callers must assume that | |
7235 | the rtx returned may not have mode TMODE. | |
7236 | ||
d6a5ac33 RK |
7237 | Note that TARGET may have neither TMODE nor MODE. In that case, it |
7238 | probably will not be used. | |
bbf6f052 RK |
7239 | |
7240 | If MODIFIER is EXPAND_SUM then when EXP is an addition | |
7241 | we can return an rtx of the form (MULT (REG ...) (CONST_INT ...)) | |
7242 | or a nest of (PLUS ...) and (MINUS ...) where the terms are | |
7243 | products as above, or REG or MEM, or constant. | |
7244 | Ordinarily in such cases we would output mul or add instructions | |
7245 | and then return a pseudo reg containing the sum. | |
7246 | ||
7247 | EXPAND_INITIALIZER is much like EXPAND_SUM except that | |
7248 | it also marks a label as absolutely required (it can't be dead). | |
26fcb35a | 7249 | It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns. |
d6a5ac33 RK |
7250 | This is used for outputting expressions used in initializers. |
7251 | ||
7252 | EXPAND_CONST_ADDRESS says that it is okay to return a MEM | |
7253 | with a constant address even if that address is not normally legitimate. | |
8403445a AM |
7254 | EXPAND_INITIALIZER and EXPAND_SUM also have this effect. |
7255 | ||
7256 | EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for | |
7257 | a call parameter. Such targets require special care as we haven't yet | |
7258 | marked TARGET so that it's safe from being trashed by libcalls. We | |
7259 | don't want to use TARGET for anything but the final result; | |
7260 | Intermediate values must go elsewhere. Additionally, calls to | |
caf93cb0 | 7261 | emit_block_move will be flagged with BLOCK_OP_CALL_PARM. |
0fab64a3 MM |
7262 | |
7263 | If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid | |
7264 | address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the | |
7265 | DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a | |
7266 | COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on | |
7267 | recursively. */ | |
bbf6f052 RK |
7268 | |
7269 | rtx | |
0fab64a3 MM |
7270 | expand_expr_real (tree exp, rtx target, enum machine_mode tmode, |
7271 | enum expand_modifier modifier, rtx *alt_rtl) | |
6de9cd9a | 7272 | { |
a5883ba0 | 7273 | rtx ret; |
6de9cd9a DN |
7274 | |
7275 | /* Handle ERROR_MARK before anybody tries to access its type. */ | |
7276 | if (TREE_CODE (exp) == ERROR_MARK | |
726a989a | 7277 | || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)) |
6de9cd9a DN |
7278 | { |
7279 | ret = CONST0_RTX (tmode); | |
7280 | return ret ? ret : const0_rtx; | |
7281 | } | |
7282 | ||
6de9cd9a | 7283 | /* If this is an expression of some kind and it has an associated line |
caf93cb0 | 7284 | number, then emit the line number before expanding the expression. |
6de9cd9a DN |
7285 | |
7286 | We need to save and restore the file and line information so that | |
7287 | errors discovered during expansion are emitted with the right | |
caf93cb0 | 7288 | information. It would be better of the diagnostic routines |
6de9cd9a DN |
7289 | used the file/line information embedded in the tree nodes rather |
7290 | than globals. */ | |
55e092c4 | 7291 | if (cfun && EXPR_HAS_LOCATION (exp)) |
6de9cd9a DN |
7292 | { |
7293 | location_t saved_location = input_location; | |
d0ed412a JJ |
7294 | location_t saved_curr_loc = get_curr_insn_source_location (); |
7295 | tree saved_block = get_curr_insn_block (); | |
6de9cd9a | 7296 | input_location = EXPR_LOCATION (exp); |
55e092c4 | 7297 | set_curr_insn_source_location (input_location); |
caf93cb0 | 7298 | |
6de9cd9a | 7299 | /* Record where the insns produced belong. */ |
55e092c4 | 7300 | set_curr_insn_block (TREE_BLOCK (exp)); |
6de9cd9a DN |
7301 | |
7302 | ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); | |
7303 | ||
7304 | input_location = saved_location; | |
d0ed412a JJ |
7305 | set_curr_insn_block (saved_block); |
7306 | set_curr_insn_source_location (saved_curr_loc); | |
6de9cd9a DN |
7307 | } |
7308 | else | |
7309 | { | |
7310 | ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl); | |
7311 | } | |
7312 | ||
6de9cd9a DN |
7313 | return ret; |
7314 | } | |
7315 | ||
28ed065e | 7316 | rtx |
f994f296 MM |
7317 | expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode, |
7318 | enum expand_modifier modifier) | |
bbf6f052 | 7319 | { |
f994f296 | 7320 | rtx op0, op1, op2, temp; |
07beea0d | 7321 | tree type; |
8df83eae | 7322 | int unsignedp; |
b3694847 | 7323 | enum machine_mode mode; |
f994f296 | 7324 | enum tree_code code = ops->code; |
bbf6f052 | 7325 | optab this_optab; |
68557e14 ML |
7326 | rtx subtarget, original_target; |
7327 | int ignore; | |
ac5dc795 | 7328 | bool reduce_bit_field; |
f994f296 | 7329 | location_t loc = ops->location; |
038dc49a | 7330 | tree treeop0, treeop1; |
ac5dc795 | 7331 | #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \ |
bc15d0ef JM |
7332 | ? reduce_to_bit_field_precision ((expr), \ |
7333 | target, \ | |
7334 | type) \ | |
7335 | : (expr)) | |
bbf6f052 | 7336 | |
f994f296 | 7337 | type = ops->type; |
726a989a RB |
7338 | mode = TYPE_MODE (type); |
7339 | unsignedp = TYPE_UNSIGNED (type); | |
8df83eae | 7340 | |
f994f296 MM |
7341 | treeop0 = ops->op0; |
7342 | treeop1 = ops->op1; | |
f994f296 MM |
7343 | |
7344 | /* We should be called only on simple (binary or unary) expressions, | |
7345 | exactly those that are valid in gimple expressions that aren't | |
7346 | GIMPLE_SINGLE_RHS (or invalid). */ | |
7347 | gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS | |
0354c0c7 BS |
7348 | || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS |
7349 | || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS); | |
b32e7cdb | 7350 | |
68557e14 | 7351 | ignore = (target == const0_rtx |
1a87cf0c | 7352 | || ((CONVERT_EXPR_CODE_P (code) |
a134e5f3 | 7353 | || code == COND_EXPR || code == VIEW_CONVERT_EXPR) |
68557e14 ML |
7354 | && TREE_CODE (type) == VOID_TYPE)); |
7355 | ||
f994f296 MM |
7356 | /* We should be called only if we need the result. */ |
7357 | gcc_assert (!ignore); | |
7358 | ||
ac5dc795 PB |
7359 | /* An operation in what may be a bit-field type needs the |
7360 | result to be reduced to the precision of the bit-field type, | |
7361 | which is narrower than that of the type's mode. */ | |
f994f296 | 7362 | reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE |
ac5dc795 PB |
7363 | && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type)); |
7364 | ||
ac5dc795 PB |
7365 | if (reduce_bit_field && modifier == EXPAND_STACK_PARM) |
7366 | target = 0; | |
7367 | ||
7368 | /* Use subtarget as the target for operand 0 of a binary operation. */ | |
7369 | subtarget = get_subtarget (target); | |
7370 | original_target = target; | |
bbf6f052 | 7371 | |
bbf6f052 RK |
7372 | switch (code) |
7373 | { | |
1d65f45c | 7374 | case NON_LVALUE_EXPR: |
f994f296 MM |
7375 | case PAREN_EXPR: |
7376 | CASE_CONVERT: | |
7377 | if (treeop0 == error_mark_node) | |
7378 | return const0_rtx; | |
6de9cd9a | 7379 | |
f994f296 MM |
7380 | if (TREE_CODE (type) == UNION_TYPE) |
7381 | { | |
7382 | tree valtype = TREE_TYPE (treeop0); | |
6de9cd9a | 7383 | |
f994f296 MM |
7384 | /* If both input and output are BLKmode, this conversion isn't doing |
7385 | anything except possibly changing memory attribute. */ | |
7386 | if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode) | |
7387 | { | |
7388 | rtx result = expand_expr (treeop0, target, tmode, | |
7389 | modifier); | |
bbf6f052 | 7390 | |
f994f296 MM |
7391 | result = copy_rtx (result); |
7392 | set_mem_attributes (result, type, 0); | |
7393 | return result; | |
7394 | } | |
8b11a64c | 7395 | |
f994f296 MM |
7396 | if (target == 0) |
7397 | { | |
7398 | if (TYPE_MODE (type) != BLKmode) | |
7399 | target = gen_reg_rtx (TYPE_MODE (type)); | |
7400 | else | |
7401 | target = assign_temp (type, 0, 1, 1); | |
7402 | } | |
921b3427 | 7403 | |
f994f296 MM |
7404 | if (MEM_P (target)) |
7405 | /* Store data into beginning of memory target. */ | |
7406 | store_expr (treeop0, | |
7407 | adjust_address (target, TYPE_MODE (valtype), 0), | |
7408 | modifier == EXPAND_STACK_PARM, | |
7409 | false); | |
8893239d | 7410 | |
f994f296 MM |
7411 | else |
7412 | { | |
7413 | gcc_assert (REG_P (target)); | |
d6a5ac33 | 7414 | |
f994f296 MM |
7415 | /* Store this field into a union of the proper type. */ |
7416 | store_field (target, | |
7417 | MIN ((int_size_in_bytes (TREE_TYPE | |
7418 | (treeop0)) | |
7419 | * BITS_PER_UNIT), | |
7420 | (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), | |
7421 | 0, TYPE_MODE (valtype), treeop0, | |
7422 | type, 0, false); | |
7423 | } | |
d6a5ac33 | 7424 | |
f994f296 MM |
7425 | /* Return the entire union. */ |
7426 | return target; | |
e44842fe RK |
7427 | } |
7428 | ||
f994f296 MM |
7429 | if (mode == TYPE_MODE (TREE_TYPE (treeop0))) |
7430 | { | |
7431 | op0 = expand_expr (treeop0, target, VOIDmode, | |
7432 | modifier); | |
d6a5ac33 | 7433 | |
f994f296 MM |
7434 | /* If the signedness of the conversion differs and OP0 is |
7435 | a promoted SUBREG, clear that indication since we now | |
7436 | have to do the proper extension. */ | |
7437 | if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp | |
7438 | && GET_CODE (op0) == SUBREG) | |
7439 | SUBREG_PROMOTED_VAR_P (op0) = 0; | |
d6a5ac33 | 7440 | |
f994f296 | 7441 | return REDUCE_BIT_FIELD (op0); |
0fab64a3 | 7442 | } |
1499e0a8 | 7443 | |
f994f296 MM |
7444 | op0 = expand_expr (treeop0, NULL_RTX, mode, |
7445 | modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier); | |
7446 | if (GET_MODE (op0) == mode) | |
7447 | ; | |
7448 | ||
7449 | /* If OP0 is a constant, just convert it into the proper mode. */ | |
7450 | else if (CONSTANT_P (op0)) | |
dc6d66b3 | 7451 | { |
f994f296 MM |
7452 | tree inner_type = TREE_TYPE (treeop0); |
7453 | enum machine_mode inner_mode = TYPE_MODE (inner_type); | |
dc6d66b3 | 7454 | |
f994f296 MM |
7455 | if (modifier == EXPAND_INITIALIZER) |
7456 | op0 = simplify_gen_subreg (mode, op0, inner_mode, | |
7457 | subreg_lowpart_offset (mode, | |
7458 | inner_mode)); | |
7459 | else | |
7460 | op0= convert_modes (mode, inner_mode, op0, | |
7461 | TYPE_UNSIGNED (inner_type)); | |
dc6d66b3 RK |
7462 | } |
7463 | ||
f994f296 MM |
7464 | else if (modifier == EXPAND_INITIALIZER) |
7465 | op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0); | |
1499e0a8 | 7466 | |
f994f296 MM |
7467 | else if (target == 0) |
7468 | op0 = convert_to_mode (mode, op0, | |
7469 | TYPE_UNSIGNED (TREE_TYPE | |
7470 | (treeop0))); | |
7471 | else | |
1499e0a8 | 7472 | { |
f994f296 MM |
7473 | convert_move (target, op0, |
7474 | TYPE_UNSIGNED (TREE_TYPE (treeop0))); | |
7475 | op0 = target; | |
7476 | } | |
ed1223ba | 7477 | |
f994f296 | 7478 | return REDUCE_BIT_FIELD (op0); |
bbf6f052 | 7479 | |
09e881c9 BE |
7480 | case ADDR_SPACE_CONVERT_EXPR: |
7481 | { | |
7482 | tree treeop0_type = TREE_TYPE (treeop0); | |
7483 | addr_space_t as_to; | |
7484 | addr_space_t as_from; | |
7485 | ||
7486 | gcc_assert (POINTER_TYPE_P (type)); | |
7487 | gcc_assert (POINTER_TYPE_P (treeop0_type)); | |
7488 | ||
7489 | as_to = TYPE_ADDR_SPACE (TREE_TYPE (type)); | |
7490 | as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type)); | |
7491 | ||
7492 | /* Conversions between pointers to the same address space should | |
7493 | have been implemented via CONVERT_EXPR / NOP_EXPR. */ | |
7494 | gcc_assert (as_to != as_from); | |
7495 | ||
7496 | /* Ask target code to handle conversion between pointers | |
7497 | to overlapping address spaces. */ | |
7498 | if (targetm.addr_space.subset_p (as_to, as_from) | |
7499 | || targetm.addr_space.subset_p (as_from, as_to)) | |
7500 | { | |
7501 | op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier); | |
7502 | op0 = targetm.addr_space.convert (op0, treeop0_type, type); | |
7503 | gcc_assert (op0); | |
7504 | return op0; | |
7505 | } | |
7506 | ||
7507 | /* For disjoint address spaces, converting anything but | |
7508 | a null pointer invokes undefined behaviour. We simply | |
7509 | always return a null pointer here. */ | |
7510 | return CONST0_RTX (mode); | |
7511 | } | |
7512 | ||
b8698a0f | 7513 | case POINTER_PLUS_EXPR: |
f994f296 | 7514 | /* Even though the sizetype mode and the pointer's mode can be different |
b8698a0f | 7515 | expand is able to handle this correctly and get the correct result out |
f994f296 MM |
7516 | of the PLUS_EXPR code. */ |
7517 | /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR | |
7518 | if sizetype precision is smaller than pointer precision. */ | |
7519 | if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type)) | |
7520 | treeop1 = fold_convert_loc (loc, type, | |
7521 | fold_convert_loc (loc, ssizetype, | |
7522 | treeop1)); | |
7523 | case PLUS_EXPR: | |
f994f296 MM |
7524 | /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and |
7525 | something else, make sure we add the register to the constant and | |
7526 | then to the other thing. This case can occur during strength | |
7527 | reduction and doing it this way will produce better code if the | |
7528 | frame pointer or argument pointer is eliminated. | |
bbf6f052 | 7529 | |
f994f296 MM |
7530 | fold-const.c will ensure that the constant is always in the inner |
7531 | PLUS_EXPR, so the only case we need to do anything about is if | |
7532 | sp, ap, or fp is our second argument, in which case we must swap | |
7533 | the innermost first argument and our second argument. */ | |
3a94c984 | 7534 | |
f994f296 MM |
7535 | if (TREE_CODE (treeop0) == PLUS_EXPR |
7536 | && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST | |
7537 | && TREE_CODE (treeop1) == VAR_DECL | |
7538 | && (DECL_RTL (treeop1) == frame_pointer_rtx | |
7539 | || DECL_RTL (treeop1) == stack_pointer_rtx | |
7540 | || DECL_RTL (treeop1) == arg_pointer_rtx)) | |
7541 | { | |
7542 | tree t = treeop1; | |
bbf6f052 | 7543 | |
f994f296 MM |
7544 | treeop1 = TREE_OPERAND (treeop0, 0); |
7545 | TREE_OPERAND (treeop0, 0) = t; | |
7546 | } | |
bbf6f052 | 7547 | |
f994f296 MM |
7548 | /* If the result is to be ptr_mode and we are adding an integer to |
7549 | something, we might be forming a constant. So try to use | |
7550 | plus_constant. If it produces a sum and we can't accept it, | |
7551 | use force_operand. This allows P = &ARR[const] to generate | |
7552 | efficient code on machines where a SYMBOL_REF is not a valid | |
7553 | address. | |
0f996086 | 7554 | |
f994f296 MM |
7555 | If this is an EXPAND_SUM call, always return the sum. */ |
7556 | if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER | |
7557 | || (mode == ptr_mode && (unsignedp || ! flag_trapv))) | |
9ad58e09 | 7558 | { |
f994f296 MM |
7559 | if (modifier == EXPAND_STACK_PARM) |
7560 | target = 0; | |
7561 | if (TREE_CODE (treeop0) == INTEGER_CST | |
7562 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
7563 | && TREE_CONSTANT (treeop1)) | |
7564 | { | |
7565 | rtx constant_part; | |
9ad58e09 | 7566 | |
f994f296 MM |
7567 | op1 = expand_expr (treeop1, subtarget, VOIDmode, |
7568 | EXPAND_SUM); | |
7569 | /* Use immed_double_const to ensure that the constant is | |
7570 | truncated according to the mode of OP1, then sign extended | |
7571 | to a HOST_WIDE_INT. Using the constant directly can result | |
7572 | in non-canonical RTL in a 64x32 cross compile. */ | |
7573 | constant_part | |
7574 | = immed_double_const (TREE_INT_CST_LOW (treeop0), | |
7575 | (HOST_WIDE_INT) 0, | |
7576 | TYPE_MODE (TREE_TYPE (treeop1))); | |
7577 | op1 = plus_constant (op1, INTVAL (constant_part)); | |
7578 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
7579 | op1 = force_operand (op1, target); | |
7580 | return REDUCE_BIT_FIELD (op1); | |
7581 | } | |
9ad58e09 | 7582 | |
f994f296 MM |
7583 | else if (TREE_CODE (treeop1) == INTEGER_CST |
7584 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
7585 | && TREE_CONSTANT (treeop0)) | |
7586 | { | |
7587 | rtx constant_part; | |
9ad58e09 | 7588 | |
f994f296 MM |
7589 | op0 = expand_expr (treeop0, subtarget, VOIDmode, |
7590 | (modifier == EXPAND_INITIALIZER | |
7591 | ? EXPAND_INITIALIZER : EXPAND_SUM)); | |
7592 | if (! CONSTANT_P (op0)) | |
7593 | { | |
7594 | op1 = expand_expr (treeop1, NULL_RTX, | |
7595 | VOIDmode, modifier); | |
7596 | /* Return a PLUS if modifier says it's OK. */ | |
7597 | if (modifier == EXPAND_SUM | |
7598 | || modifier == EXPAND_INITIALIZER) | |
7599 | return simplify_gen_binary (PLUS, mode, op0, op1); | |
7600 | goto binop2; | |
7601 | } | |
7602 | /* Use immed_double_const to ensure that the constant is | |
7603 | truncated according to the mode of OP1, then sign extended | |
7604 | to a HOST_WIDE_INT. Using the constant directly can result | |
7605 | in non-canonical RTL in a 64x32 cross compile. */ | |
7606 | constant_part | |
7607 | = immed_double_const (TREE_INT_CST_LOW (treeop1), | |
7608 | (HOST_WIDE_INT) 0, | |
7609 | TYPE_MODE (TREE_TYPE (treeop0))); | |
7610 | op0 = plus_constant (op0, INTVAL (constant_part)); | |
7611 | if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
7612 | op0 = force_operand (op0, target); | |
7613 | return REDUCE_BIT_FIELD (op0); | |
7614 | } | |
9ad58e09 RS |
7615 | } |
7616 | ||
f994f296 MM |
7617 | /* No sense saving up arithmetic to be done |
7618 | if it's all in the wrong mode to form part of an address. | |
7619 | And force_operand won't know whether to sign-extend or | |
7620 | zero-extend. */ | |
7621 | if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
7622 | || mode != ptr_mode) | |
7623 | { | |
7624 | expand_operands (treeop0, treeop1, | |
7625 | subtarget, &op0, &op1, EXPAND_NORMAL); | |
7626 | if (op0 == const0_rtx) | |
7627 | return op1; | |
7628 | if (op1 == const0_rtx) | |
7629 | return op0; | |
7630 | goto binop2; | |
7631 | } | |
9ad58e09 | 7632 | |
f994f296 MM |
7633 | expand_operands (treeop0, treeop1, |
7634 | subtarget, &op0, &op1, modifier); | |
7635 | return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); | |
bbf6f052 | 7636 | |
f994f296 | 7637 | case MINUS_EXPR: |
f994f296 MM |
7638 | /* For initializers, we are allowed to return a MINUS of two |
7639 | symbolic constants. Here we handle all cases when both operands | |
7640 | are constant. */ | |
7641 | /* Handle difference of two symbolic constants, | |
7642 | for the sake of an initializer. */ | |
7643 | if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
7644 | && really_constant_p (treeop0) | |
7645 | && really_constant_p (treeop1)) | |
7646 | { | |
7647 | expand_operands (treeop0, treeop1, | |
7648 | NULL_RTX, &op0, &op1, modifier); | |
1499e0a8 | 7649 | |
f994f296 MM |
7650 | /* If the last operand is a CONST_INT, use plus_constant of |
7651 | the negated constant. Else make the MINUS. */ | |
7652 | if (CONST_INT_P (op1)) | |
7653 | return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1))); | |
7654 | else | |
7655 | return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1)); | |
7656 | } | |
1499e0a8 | 7657 | |
f994f296 MM |
7658 | /* No sense saving up arithmetic to be done |
7659 | if it's all in the wrong mode to form part of an address. | |
7660 | And force_operand won't know whether to sign-extend or | |
7661 | zero-extend. */ | |
7662 | if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER) | |
7663 | || mode != ptr_mode) | |
7664 | goto binop; | |
bbf6f052 | 7665 | |
f994f296 MM |
7666 | expand_operands (treeop0, treeop1, |
7667 | subtarget, &op0, &op1, modifier); | |
70e6ca43 | 7668 | |
f994f296 MM |
7669 | /* Convert A - const to A + (-const). */ |
7670 | if (CONST_INT_P (op1)) | |
dd27116b | 7671 | { |
f994f296 MM |
7672 | op1 = negate_rtx (mode, op1); |
7673 | return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1)); | |
dd27116b | 7674 | } |
3207b172 | 7675 | |
f994f296 | 7676 | goto binop2; |
3a94c984 | 7677 | |
0354c0c7 BS |
7678 | case WIDEN_MULT_PLUS_EXPR: |
7679 | case WIDEN_MULT_MINUS_EXPR: | |
7680 | expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); | |
7681 | op2 = expand_normal (ops->op2); | |
7682 | target = expand_widen_pattern_expr (ops, op0, op1, op2, | |
7683 | target, unsignedp); | |
7684 | return target; | |
7685 | ||
5b58b39b | 7686 | case WIDEN_MULT_EXPR: |
f994f296 MM |
7687 | /* If first operand is constant, swap them. |
7688 | Thus the following special case checks need only | |
7689 | check the second operand. */ | |
7690 | if (TREE_CODE (treeop0) == INTEGER_CST) | |
7691 | { | |
7692 | tree t1 = treeop0; | |
7693 | treeop0 = treeop1; | |
7694 | treeop1 = t1; | |
7695 | } | |
bbf6f052 | 7696 | |
f994f296 MM |
7697 | /* First, check if we have a multiplication of one signed and one |
7698 | unsigned operand. */ | |
5b58b39b BS |
7699 | if (TREE_CODE (treeop1) != INTEGER_CST |
7700 | && (TYPE_UNSIGNED (TREE_TYPE (treeop0)) | |
7701 | != TYPE_UNSIGNED (TREE_TYPE (treeop1)))) | |
f994f296 | 7702 | { |
5b58b39b | 7703 | enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0)); |
f994f296 | 7704 | this_optab = usmul_widen_optab; |
5b58b39b | 7705 | if (mode == GET_MODE_2XWIDER_MODE (innermode)) |
f994f296 MM |
7706 | { |
7707 | if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing) | |
7708 | { | |
5b58b39b BS |
7709 | if (TYPE_UNSIGNED (TREE_TYPE (treeop0))) |
7710 | expand_operands (treeop0, treeop1, subtarget, &op0, &op1, | |
f994f296 MM |
7711 | EXPAND_NORMAL); |
7712 | else | |
5b58b39b | 7713 | expand_operands (treeop0, treeop1, subtarget, &op1, &op0, |
f994f296 | 7714 | EXPAND_NORMAL); |
f994f296 MM |
7715 | goto binop3; |
7716 | } | |
7717 | } | |
7718 | } | |
5b58b39b BS |
7719 | /* Check for a multiplication with matching signedness. */ |
7720 | else if ((TREE_CODE (treeop1) == INTEGER_CST | |
7721 | && int_fits_type_p (treeop1, TREE_TYPE (treeop0))) | |
7722 | || (TYPE_UNSIGNED (TREE_TYPE (treeop1)) | |
7723 | == TYPE_UNSIGNED (TREE_TYPE (treeop0)))) | |
f994f296 | 7724 | { |
5b58b39b | 7725 | tree op0type = TREE_TYPE (treeop0); |
f994f296 MM |
7726 | enum machine_mode innermode = TYPE_MODE (op0type); |
7727 | bool zextend_p = TYPE_UNSIGNED (op0type); | |
7728 | optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; | |
7729 | this_optab = zextend_p ? umul_widen_optab : smul_widen_optab; | |
1e0598e2 | 7730 | |
f994f296 MM |
7731 | if (mode == GET_MODE_2XWIDER_MODE (innermode)) |
7732 | { | |
7733 | if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing) | |
7734 | { | |
5b58b39b BS |
7735 | expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, |
7736 | EXPAND_NORMAL); | |
7737 | temp = expand_widening_mult (mode, op0, op1, target, | |
7738 | unsignedp, this_optab); | |
7739 | return REDUCE_BIT_FIELD (temp); | |
f994f296 | 7740 | } |
5b58b39b BS |
7741 | if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing |
7742 | && innermode == word_mode) | |
f994f296 MM |
7743 | { |
7744 | rtx htem, hipart; | |
5b58b39b BS |
7745 | op0 = expand_normal (treeop0); |
7746 | if (TREE_CODE (treeop1) == INTEGER_CST) | |
f994f296 | 7747 | op1 = convert_modes (innermode, mode, |
5b58b39b | 7748 | expand_normal (treeop1), unsignedp); |
f994f296 | 7749 | else |
5b58b39b | 7750 | op1 = expand_normal (treeop1); |
f994f296 MM |
7751 | temp = expand_binop (mode, other_optab, op0, op1, target, |
7752 | unsignedp, OPTAB_LIB_WIDEN); | |
7753 | hipart = gen_highpart (innermode, temp); | |
7754 | htem = expand_mult_highpart_adjust (innermode, hipart, | |
7755 | op0, op1, hipart, | |
7756 | zextend_p); | |
7757 | if (htem != hipart) | |
7758 | emit_move_insn (hipart, htem); | |
7759 | return REDUCE_BIT_FIELD (temp); | |
7760 | } | |
7761 | } | |
7762 | } | |
5b58b39b BS |
7763 | treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0); |
7764 | treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1); | |
7765 | expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL); | |
7766 | return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp)); | |
7767 | ||
7768 | case MULT_EXPR: | |
7769 | /* If this is a fixed-point operation, then we cannot use the code | |
7770 | below because "expand_mult" doesn't support sat/no-sat fixed-point | |
7771 | multiplications. */ | |
7772 | if (ALL_FIXED_POINT_MODE_P (mode)) | |
7773 | goto binop; | |
7774 | ||
7775 | /* If first operand is constant, swap them. | |
7776 | Thus the following special case checks need only | |
7777 | check the second operand. */ | |
7778 | if (TREE_CODE (treeop0) == INTEGER_CST) | |
7779 | { | |
7780 | tree t1 = treeop0; | |
7781 | treeop0 = treeop1; | |
7782 | treeop1 = t1; | |
7783 | } | |
7784 | ||
7785 | /* Attempt to return something suitable for generating an | |
7786 | indexed address, for machines that support that. */ | |
7787 | ||
7788 | if (modifier == EXPAND_SUM && mode == ptr_mode | |
7789 | && host_integerp (treeop1, 0)) | |
7790 | { | |
7791 | tree exp1 = treeop1; | |
7792 | ||
7793 | op0 = expand_expr (treeop0, subtarget, VOIDmode, | |
7794 | EXPAND_SUM); | |
7795 | ||
7796 | if (!REG_P (op0)) | |
7797 | op0 = force_operand (op0, NULL_RTX); | |
7798 | if (!REG_P (op0)) | |
7799 | op0 = copy_to_mode_reg (mode, op0); | |
7800 | ||
7801 | return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0, | |
7802 | gen_int_mode (tree_low_cst (exp1, 0), | |
7803 | TYPE_MODE (TREE_TYPE (exp1))))); | |
7804 | } | |
7805 | ||
7806 | if (modifier == EXPAND_STACK_PARM) | |
7807 | target = 0; | |
7808 | ||
7809 | expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL); | |
f994f296 | 7810 | return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp)); |
bbf6f052 | 7811 | |
f994f296 MM |
7812 | case TRUNC_DIV_EXPR: |
7813 | case FLOOR_DIV_EXPR: | |
7814 | case CEIL_DIV_EXPR: | |
7815 | case ROUND_DIV_EXPR: | |
7816 | case EXACT_DIV_EXPR: | |
7817 | /* If this is a fixed-point operation, then we cannot use the code | |
7818 | below because "expand_divmod" doesn't support sat/no-sat fixed-point | |
7819 | divisions. */ | |
7820 | if (ALL_FIXED_POINT_MODE_P (mode)) | |
7821 | goto binop; | |
ac182688 | 7822 | |
f994f296 MM |
7823 | if (modifier == EXPAND_STACK_PARM) |
7824 | target = 0; | |
7825 | /* Possible optimization: compute the dividend with EXPAND_SUM | |
7826 | then if the divisor is constant can optimize the case | |
7827 | where some terms of the dividend have coeffs divisible by it. */ | |
7828 | expand_operands (treeop0, treeop1, | |
7829 | subtarget, &op0, &op1, EXPAND_NORMAL); | |
7830 | return expand_divmod (0, code, mode, op0, op1, target, unsignedp); | |
ac182688 | 7831 | |
f994f296 MM |
7832 | case RDIV_EXPR: |
7833 | goto binop; | |
6de9cd9a | 7834 | |
f994f296 MM |
7835 | case TRUNC_MOD_EXPR: |
7836 | case FLOOR_MOD_EXPR: | |
7837 | case CEIL_MOD_EXPR: | |
7838 | case ROUND_MOD_EXPR: | |
7839 | if (modifier == EXPAND_STACK_PARM) | |
7840 | target = 0; | |
7841 | expand_operands (treeop0, treeop1, | |
7842 | subtarget, &op0, &op1, EXPAND_NORMAL); | |
7843 | return expand_divmod (1, code, mode, op0, op1, target, unsignedp); | |
742920c7 | 7844 | |
f994f296 MM |
7845 | case FIXED_CONVERT_EXPR: |
7846 | op0 = expand_normal (treeop0); | |
7847 | if (target == 0 || modifier == EXPAND_STACK_PARM) | |
7848 | target = gen_reg_rtx (mode); | |
742920c7 | 7849 | |
f994f296 MM |
7850 | if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE |
7851 | && TYPE_UNSIGNED (TREE_TYPE (treeop0))) | |
7852 | || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type))) | |
7853 | expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type)); | |
7854 | else | |
7855 | expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type)); | |
7856 | return target; | |
6de9cd9a | 7857 | |
f994f296 MM |
7858 | case FIX_TRUNC_EXPR: |
7859 | op0 = expand_normal (treeop0); | |
7860 | if (target == 0 || modifier == EXPAND_STACK_PARM) | |
7861 | target = gen_reg_rtx (mode); | |
7862 | expand_fix (target, op0, unsignedp); | |
7863 | return target; | |
bbf6f052 | 7864 | |
f994f296 MM |
7865 | case FLOAT_EXPR: |
7866 | op0 = expand_normal (treeop0); | |
7867 | if (target == 0 || modifier == EXPAND_STACK_PARM) | |
7868 | target = gen_reg_rtx (mode); | |
7869 | /* expand_float can't figure out what to do if FROM has VOIDmode. | |
7870 | So give it the correct mode. With -O, cse will optimize this. */ | |
7871 | if (GET_MODE (op0) == VOIDmode) | |
7872 | op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)), | |
7873 | op0); | |
7874 | expand_float (target, op0, | |
7875 | TYPE_UNSIGNED (TREE_TYPE (treeop0))); | |
7876 | return target; | |
4af3895e | 7877 | |
f994f296 MM |
7878 | case NEGATE_EXPR: |
7879 | op0 = expand_expr (treeop0, subtarget, | |
7880 | VOIDmode, EXPAND_NORMAL); | |
7881 | if (modifier == EXPAND_STACK_PARM) | |
7882 | target = 0; | |
7883 | temp = expand_unop (mode, | |
7884 | optab_for_tree_code (NEGATE_EXPR, type, | |
7885 | optab_default), | |
7886 | op0, target, 0); | |
7887 | gcc_assert (temp); | |
7888 | return REDUCE_BIT_FIELD (temp); | |
05bccae2 | 7889 | |
f994f296 MM |
7890 | case ABS_EXPR: |
7891 | op0 = expand_expr (treeop0, subtarget, | |
7892 | VOIDmode, EXPAND_NORMAL); | |
7893 | if (modifier == EXPAND_STACK_PARM) | |
7894 | target = 0; | |
3a94c984 | 7895 | |
f994f296 MM |
7896 | /* ABS_EXPR is not valid for complex arguments. */ |
7897 | gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
7898 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT); | |
742920c7 | 7899 | |
f994f296 MM |
7900 | /* Unsigned abs is simply the operand. Testing here means we don't |
7901 | risk generating incorrect code below. */ | |
7902 | if (TYPE_UNSIGNED (type)) | |
7903 | return op0; | |
4038c495 | 7904 | |
f994f296 MM |
7905 | return expand_abs (mode, op0, target, unsignedp, |
7906 | safe_from_p (target, treeop0, 1)); | |
32eed045 | 7907 | |
f994f296 MM |
7908 | case MAX_EXPR: |
7909 | case MIN_EXPR: | |
7910 | target = original_target; | |
7911 | if (target == 0 | |
7912 | || modifier == EXPAND_STACK_PARM | |
7913 | || (MEM_P (target) && MEM_VOLATILE_P (target)) | |
7914 | || GET_MODE (target) != mode | |
7915 | || (REG_P (target) | |
7916 | && REGNO (target) < FIRST_PSEUDO_REGISTER)) | |
7917 | target = gen_reg_rtx (mode); | |
7918 | expand_operands (treeop0, treeop1, | |
7919 | target, &op0, &op1, EXPAND_NORMAL); | |
32eed045 | 7920 | |
f994f296 MM |
7921 | /* First try to do it with a special MIN or MAX instruction. |
7922 | If that does not win, use a conditional jump to select the proper | |
7923 | value. */ | |
7924 | this_optab = optab_for_tree_code (code, type, optab_default); | |
7925 | temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, | |
7926 | OPTAB_WIDEN); | |
7927 | if (temp != 0) | |
7928 | return temp; | |
ed1223ba | 7929 | |
f994f296 MM |
7930 | /* At this point, a MEM target is no longer useful; we will get better |
7931 | code without it. */ | |
ed1223ba | 7932 | |
f994f296 MM |
7933 | if (! REG_P (target)) |
7934 | target = gen_reg_rtx (mode); | |
ed1223ba | 7935 | |
f994f296 MM |
7936 | /* If op1 was placed in target, swap op0 and op1. */ |
7937 | if (target != op0 && target == op1) | |
7938 | { | |
7939 | temp = op0; | |
7940 | op0 = op1; | |
7941 | op1 = temp; | |
7942 | } | |
ed1223ba | 7943 | |
f994f296 MM |
7944 | /* We generate better code and avoid problems with op1 mentioning |
7945 | target by forcing op1 into a pseudo if it isn't a constant. */ | |
7946 | if (! CONSTANT_P (op1)) | |
7947 | op1 = force_reg (mode, op1); | |
4af3895e | 7948 | |
bbf6f052 | 7949 | { |
f994f296 MM |
7950 | enum rtx_code comparison_code; |
7951 | rtx cmpop1 = op1; | |
ae00112b | 7952 | |
f994f296 MM |
7953 | if (code == MAX_EXPR) |
7954 | comparison_code = unsignedp ? GEU : GE; | |
7955 | else | |
7956 | comparison_code = unsignedp ? LEU : LE; | |
ae00112b | 7957 | |
f994f296 MM |
7958 | /* Canonicalize to comparisons against 0. */ |
7959 | if (op1 == const1_rtx) | |
b8e444f4 | 7960 | { |
f994f296 MM |
7961 | /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1) |
7962 | or (a != 0 ? a : 1) for unsigned. | |
7963 | For MIN we are safe converting (a <= 1 ? a : 1) | |
7964 | into (a <= 0 ? a : 1) */ | |
7965 | cmpop1 = const0_rtx; | |
7966 | if (code == MAX_EXPR) | |
7967 | comparison_code = unsignedp ? NE : GT; | |
b8e444f4 | 7968 | } |
f994f296 | 7969 | if (op1 == constm1_rtx && !unsignedp) |
8d2e5f72 | 7970 | { |
f994f296 MM |
7971 | /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1) |
7972 | and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */ | |
7973 | cmpop1 = const0_rtx; | |
7974 | if (code == MIN_EXPR) | |
7975 | comparison_code = LT; | |
8d2e5f72 | 7976 | } |
f994f296 MM |
7977 | #ifdef HAVE_conditional_move |
7978 | /* Use a conditional move if possible. */ | |
7979 | if (can_conditionally_move_p (mode)) | |
7bb0943f | 7980 | { |
f994f296 | 7981 | rtx insn; |
7bb0943f | 7982 | |
f994f296 MM |
7983 | /* ??? Same problem as in expmed.c: emit_conditional_move |
7984 | forces a stack adjustment via compare_from_rtx, and we | |
7985 | lose the stack adjustment if the sequence we are about | |
7986 | to create is discarded. */ | |
7987 | do_pending_stack_adjust (); | |
2d48c13d | 7988 | |
f994f296 | 7989 | start_sequence (); |
2d48c13d | 7990 | |
f994f296 MM |
7991 | /* Try to emit the conditional move. */ |
7992 | insn = emit_conditional_move (target, comparison_code, | |
7993 | op0, cmpop1, mode, | |
7994 | op0, op1, mode, | |
7995 | unsignedp); | |
7996 | ||
7997 | /* If we could do the conditional move, emit the sequence, | |
7998 | and return. */ | |
7999 | if (insn) | |
89752202 | 8000 | { |
f994f296 MM |
8001 | rtx seq = get_insns (); |
8002 | end_sequence (); | |
8003 | emit_insn (seq); | |
8004 | return target; | |
89752202 HB |
8005 | } |
8006 | ||
f994f296 MM |
8007 | /* Otherwise discard the sequence and fall back to code with |
8008 | branches. */ | |
8009 | end_sequence (); | |
7bb0943f | 8010 | } |
f994f296 MM |
8011 | #endif |
8012 | if (target != op0) | |
8013 | emit_move_insn (target, op0); | |
7bb0943f | 8014 | |
f994f296 MM |
8015 | temp = gen_label_rtx (); |
8016 | do_compare_rtx_and_jump (target, cmpop1, comparison_code, | |
40e90eac JJ |
8017 | unsignedp, mode, NULL_RTX, NULL_RTX, temp, |
8018 | -1); | |
f994f296 MM |
8019 | } |
8020 | emit_move_insn (target, op1); | |
8021 | emit_label (temp); | |
8022 | return target; | |
1ce7f3c2 | 8023 | |
f994f296 MM |
8024 | case BIT_NOT_EXPR: |
8025 | op0 = expand_expr (treeop0, subtarget, | |
8026 | VOIDmode, EXPAND_NORMAL); | |
8027 | if (modifier == EXPAND_STACK_PARM) | |
8028 | target = 0; | |
8029 | temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); | |
8030 | gcc_assert (temp); | |
8031 | return temp; | |
f47e9b4e | 8032 | |
f994f296 MM |
8033 | /* ??? Can optimize bitwise operations with one arg constant. |
8034 | Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b) | |
8035 | and (a bitwise1 b) bitwise2 b (etc) | |
8036 | but that is probably not worth while. */ | |
bbf6f052 | 8037 | |
f994f296 MM |
8038 | /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two |
8039 | boolean values when we want in all cases to compute both of them. In | |
8040 | general it is fastest to do TRUTH_AND_EXPR by computing both operands | |
8041 | as actual zero-or-1 values and then bitwise anding. In cases where | |
8042 | there cannot be any side effects, better code would be made by | |
8043 | treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is | |
8044 | how to recognize those cases. */ | |
bbf6f052 | 8045 | |
f994f296 MM |
8046 | case TRUTH_AND_EXPR: |
8047 | code = BIT_AND_EXPR; | |
8048 | case BIT_AND_EXPR: | |
8049 | goto binop; | |
bbf6f052 | 8050 | |
f994f296 MM |
8051 | case TRUTH_OR_EXPR: |
8052 | code = BIT_IOR_EXPR; | |
8053 | case BIT_IOR_EXPR: | |
8054 | goto binop; | |
7a06d606 | 8055 | |
f994f296 MM |
8056 | case TRUTH_XOR_EXPR: |
8057 | code = BIT_XOR_EXPR; | |
8058 | case BIT_XOR_EXPR: | |
8059 | goto binop; | |
7a06d606 | 8060 | |
f994f296 MM |
8061 | case LROTATE_EXPR: |
8062 | case RROTATE_EXPR: | |
8063 | gcc_assert (VECTOR_MODE_P (TYPE_MODE (type)) | |
8064 | || (GET_MODE_PRECISION (TYPE_MODE (type)) | |
8065 | == TYPE_PRECISION (type))); | |
8066 | /* fall through */ | |
a281e72d | 8067 | |
f994f296 MM |
8068 | case LSHIFT_EXPR: |
8069 | case RSHIFT_EXPR: | |
8070 | /* If this is a fixed-point operation, then we cannot use the code | |
8071 | below because "expand_shift" doesn't support sat/no-sat fixed-point | |
8072 | shifts. */ | |
8073 | if (ALL_FIXED_POINT_MODE_P (mode)) | |
8074 | goto binop; | |
3a94c984 | 8075 | |
f994f296 MM |
8076 | if (! safe_from_p (subtarget, treeop1, 1)) |
8077 | subtarget = 0; | |
8078 | if (modifier == EXPAND_STACK_PARM) | |
8079 | target = 0; | |
8080 | op0 = expand_expr (treeop0, subtarget, | |
8081 | VOIDmode, EXPAND_NORMAL); | |
8082 | temp = expand_shift (code, mode, op0, treeop1, target, | |
8083 | unsignedp); | |
8084 | if (code == LSHIFT_EXPR) | |
8085 | temp = REDUCE_BIT_FIELD (temp); | |
8086 | return temp; | |
bbf6f052 | 8087 | |
f994f296 MM |
8088 | /* Could determine the answer when only additive constants differ. Also, |
8089 | the addition of one can be handled by changing the condition. */ | |
8090 | case LT_EXPR: | |
8091 | case LE_EXPR: | |
8092 | case GT_EXPR: | |
8093 | case GE_EXPR: | |
8094 | case EQ_EXPR: | |
8095 | case NE_EXPR: | |
8096 | case UNORDERED_EXPR: | |
8097 | case ORDERED_EXPR: | |
8098 | case UNLT_EXPR: | |
8099 | case UNLE_EXPR: | |
8100 | case UNGT_EXPR: | |
8101 | case UNGE_EXPR: | |
8102 | case UNEQ_EXPR: | |
8103 | case LTGT_EXPR: | |
8104 | temp = do_store_flag (ops, | |
8105 | modifier != EXPAND_STACK_PARM ? target : NULL_RTX, | |
8106 | tmode != VOIDmode ? tmode : mode); | |
8107 | if (temp) | |
8108 | return temp; | |
dc6d66b3 | 8109 | |
f994f296 MM |
8110 | /* Use a compare and a jump for BLKmode comparisons, or for function |
8111 | type comparisons is HAVE_canonicalize_funcptr_for_compare. */ | |
dc6d66b3 | 8112 | |
f994f296 MM |
8113 | if ((target == 0 |
8114 | || modifier == EXPAND_STACK_PARM | |
8115 | || ! safe_from_p (target, treeop0, 1) | |
8116 | || ! safe_from_p (target, treeop1, 1) | |
8117 | /* Make sure we don't have a hard reg (such as function's return | |
8118 | value) live across basic blocks, if not optimizing. */ | |
8119 | || (!optimize && REG_P (target) | |
8120 | && REGNO (target) < FIRST_PSEUDO_REGISTER))) | |
8121 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); | |
ef19912d | 8122 | |
f994f296 | 8123 | emit_move_insn (target, const0_rtx); |
ef19912d | 8124 | |
f994f296 | 8125 | op1 = gen_label_rtx (); |
40e90eac | 8126 | jumpifnot_1 (code, treeop0, treeop1, op1, -1); |
befdad07 | 8127 | |
f994f296 | 8128 | emit_move_insn (target, const1_rtx); |
bbf6f052 | 8129 | |
f994f296 MM |
8130 | emit_label (op1); |
8131 | return target; | |
bbf6f052 | 8132 | |
f994f296 MM |
8133 | case TRUTH_NOT_EXPR: |
8134 | if (modifier == EXPAND_STACK_PARM) | |
8135 | target = 0; | |
8136 | op0 = expand_expr (treeop0, target, | |
8137 | VOIDmode, EXPAND_NORMAL); | |
8138 | /* The parser is careful to generate TRUTH_NOT_EXPR | |
8139 | only with operands that are always zero or one. */ | |
8140 | temp = expand_binop (mode, xor_optab, op0, const1_rtx, | |
8141 | target, 1, OPTAB_LIB_WIDEN); | |
8142 | gcc_assert (temp); | |
8143 | return temp; | |
bbf6f052 | 8144 | |
f994f296 MM |
8145 | case COMPLEX_EXPR: |
8146 | /* Get the rtx code of the operands. */ | |
8147 | op0 = expand_normal (treeop0); | |
8148 | op1 = expand_normal (treeop1); | |
05019f83 | 8149 | |
f994f296 MM |
8150 | if (!target) |
8151 | target = gen_reg_rtx (TYPE_MODE (type)); | |
41472af8 | 8152 | |
f994f296 MM |
8153 | /* Move the real (op0) and imaginary (op1) parts to their location. */ |
8154 | write_complex_part (target, op0, false); | |
8155 | write_complex_part (target, op1, true); | |
f47e9b4e | 8156 | |
f994f296 | 8157 | return target; |
dc6d66b3 | 8158 | |
f994f296 MM |
8159 | case WIDEN_SUM_EXPR: |
8160 | { | |
8161 | tree oprnd0 = treeop0; | |
8162 | tree oprnd1 = treeop1; | |
0d15e60c | 8163 | |
f994f296 MM |
8164 | expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
8165 | target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1, | |
8166 | target, unsignedp); | |
8167 | return target; | |
bbf6f052 RK |
8168 | } |
8169 | ||
f994f296 MM |
8170 | case REDUC_MAX_EXPR: |
8171 | case REDUC_MIN_EXPR: | |
8172 | case REDUC_PLUS_EXPR: | |
d2af6a68 | 8173 | { |
f994f296 MM |
8174 | op0 = expand_normal (treeop0); |
8175 | this_optab = optab_for_tree_code (code, type, optab_default); | |
8176 | temp = expand_unop (mode, this_optab, op0, target, unsignedp); | |
8177 | gcc_assert (temp); | |
8178 | return temp; | |
8179 | } | |
d2af6a68 | 8180 | |
f994f296 MM |
8181 | case VEC_EXTRACT_EVEN_EXPR: |
8182 | case VEC_EXTRACT_ODD_EXPR: | |
8183 | { | |
8184 | expand_operands (treeop0, treeop1, | |
8185 | NULL_RTX, &op0, &op1, EXPAND_NORMAL); | |
8186 | this_optab = optab_for_tree_code (code, type, optab_default); | |
8187 | temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, | |
8188 | OPTAB_WIDEN); | |
8189 | gcc_assert (temp); | |
8190 | return temp; | |
d2af6a68 | 8191 | } |
bbf6f052 | 8192 | |
f994f296 MM |
8193 | case VEC_INTERLEAVE_HIGH_EXPR: |
8194 | case VEC_INTERLEAVE_LOW_EXPR: | |
8195 | { | |
8196 | expand_operands (treeop0, treeop1, | |
8197 | NULL_RTX, &op0, &op1, EXPAND_NORMAL); | |
8198 | this_optab = optab_for_tree_code (code, type, optab_default); | |
8199 | temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, | |
8200 | OPTAB_WIDEN); | |
8201 | gcc_assert (temp); | |
8202 | return temp; | |
8203 | } | |
4a53008b | 8204 | |
f994f296 MM |
8205 | case VEC_LSHIFT_EXPR: |
8206 | case VEC_RSHIFT_EXPR: | |
8207 | { | |
8208 | target = expand_vec_shift_expr (ops, target); | |
8209 | return target; | |
8210 | } | |
14a774a9 | 8211 | |
f994f296 MM |
8212 | case VEC_UNPACK_HI_EXPR: |
8213 | case VEC_UNPACK_LO_EXPR: | |
8214 | { | |
8215 | op0 = expand_normal (treeop0); | |
8216 | this_optab = optab_for_tree_code (code, type, optab_default); | |
8217 | temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX, | |
8218 | target, unsignedp); | |
8219 | gcc_assert (temp); | |
8220 | return temp; | |
8221 | } | |
c3d32120 | 8222 | |
f994f296 MM |
8223 | case VEC_UNPACK_FLOAT_HI_EXPR: |
8224 | case VEC_UNPACK_FLOAT_LO_EXPR: | |
8225 | { | |
8226 | op0 = expand_normal (treeop0); | |
8227 | /* The signedness is determined from input operand. */ | |
8228 | this_optab = optab_for_tree_code (code, | |
8229 | TREE_TYPE (treeop0), | |
8230 | optab_default); | |
8231 | temp = expand_widen_pattern_expr | |
8232 | (ops, op0, NULL_RTX, NULL_RTX, | |
8233 | target, TYPE_UNSIGNED (TREE_TYPE (treeop0))); | |
14a774a9 | 8234 | |
f994f296 MM |
8235 | gcc_assert (temp); |
8236 | return temp; | |
8237 | } | |
d6a5ac33 | 8238 | |
f994f296 MM |
8239 | case VEC_WIDEN_MULT_HI_EXPR: |
8240 | case VEC_WIDEN_MULT_LO_EXPR: | |
8241 | { | |
8242 | tree oprnd0 = treeop0; | |
8243 | tree oprnd1 = treeop1; | |
1499e0a8 | 8244 | |
f994f296 MM |
8245 | expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
8246 | target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX, | |
8247 | target, unsignedp); | |
8248 | gcc_assert (target); | |
8249 | return target; | |
8250 | } | |
ed1223ba | 8251 | |
f994f296 MM |
8252 | case VEC_PACK_TRUNC_EXPR: |
8253 | case VEC_PACK_SAT_EXPR: | |
8254 | case VEC_PACK_FIX_TRUNC_EXPR: | |
8255 | mode = TYPE_MODE (TREE_TYPE (treeop0)); | |
8256 | goto binop; | |
bbf6f052 | 8257 | |
f994f296 MM |
8258 | default: |
8259 | gcc_unreachable (); | |
8260 | } | |
d6a5ac33 | 8261 | |
f994f296 MM |
8262 | /* Here to do an ordinary binary operator. */ |
8263 | binop: | |
8264 | expand_operands (treeop0, treeop1, | |
8265 | subtarget, &op0, &op1, EXPAND_NORMAL); | |
8266 | binop2: | |
8267 | this_optab = optab_for_tree_code (code, type, optab_default); | |
8268 | binop3: | |
8269 | if (modifier == EXPAND_STACK_PARM) | |
8270 | target = 0; | |
8271 | temp = expand_binop (mode, this_optab, op0, op1, target, | |
8272 | unsignedp, OPTAB_LIB_WIDEN); | |
8273 | gcc_assert (temp); | |
8274 | return REDUCE_BIT_FIELD (temp); | |
8275 | } | |
8276 | #undef REDUCE_BIT_FIELD | |
7f62854a | 8277 | |
28ed065e | 8278 | rtx |
f994f296 MM |
8279 | expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, |
8280 | enum expand_modifier modifier, rtx *alt_rtl) | |
8281 | { | |
8282 | rtx op0, op1, temp, decl_rtl; | |
8283 | tree type; | |
8284 | int unsignedp; | |
8285 | enum machine_mode mode; | |
8286 | enum tree_code code = TREE_CODE (exp); | |
8287 | optab this_optab; | |
8288 | rtx subtarget, original_target; | |
8289 | int ignore; | |
8290 | tree context; | |
8291 | bool reduce_bit_field; | |
8292 | location_t loc = EXPR_LOCATION (exp); | |
8293 | struct separate_ops ops; | |
8294 | tree treeop0, treeop1, treeop2; | |
d36d83e9 EB |
8295 | tree ssa_name = NULL_TREE; |
8296 | gimple g; | |
7f62854a | 8297 | |
f994f296 MM |
8298 | type = TREE_TYPE (exp); |
8299 | mode = TYPE_MODE (type); | |
8300 | unsignedp = TYPE_UNSIGNED (type); | |
7f62854a | 8301 | |
f994f296 MM |
8302 | treeop0 = treeop1 = treeop2 = NULL_TREE; |
8303 | if (!VL_EXP_CLASS_P (exp)) | |
8304 | switch (TREE_CODE_LENGTH (code)) | |
8305 | { | |
8306 | default: | |
8307 | case 3: treeop2 = TREE_OPERAND (exp, 2); | |
8308 | case 2: treeop1 = TREE_OPERAND (exp, 1); | |
8309 | case 1: treeop0 = TREE_OPERAND (exp, 0); | |
8310 | case 0: break; | |
8311 | } | |
8312 | ops.code = code; | |
8313 | ops.type = type; | |
8314 | ops.op0 = treeop0; | |
8315 | ops.op1 = treeop1; | |
8316 | ops.op2 = treeop2; | |
8317 | ops.location = loc; | |
12342f90 | 8318 | |
f994f296 MM |
8319 | ignore = (target == const0_rtx |
8320 | || ((CONVERT_EXPR_CODE_P (code) | |
8321 | || code == COND_EXPR || code == VIEW_CONVERT_EXPR) | |
8322 | && TREE_CODE (type) == VOID_TYPE)); | |
fdf473ae | 8323 | |
f994f296 MM |
8324 | /* An operation in what may be a bit-field type needs the |
8325 | result to be reduced to the precision of the bit-field type, | |
8326 | which is narrower than that of the type's mode. */ | |
8327 | reduce_bit_field = (!ignore | |
8328 | && TREE_CODE (type) == INTEGER_TYPE | |
8329 | && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type)); | |
12342f90 | 8330 | |
f994f296 MM |
8331 | /* If we are going to ignore this result, we need only do something |
8332 | if there is a side-effect somewhere in the expression. If there | |
8333 | is, short-circuit the most common cases here. Note that we must | |
8334 | not call expand_expr with anything but const0_rtx in case this | |
8335 | is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */ | |
d6a5ac33 | 8336 | |
f994f296 MM |
8337 | if (ignore) |
8338 | { | |
8339 | if (! TREE_SIDE_EFFECTS (exp)) | |
8340 | return const0_rtx; | |
8341 | ||
8342 | /* Ensure we reference a volatile object even if value is ignored, but | |
8343 | don't do this if all we are doing is taking its address. */ | |
8344 | if (TREE_THIS_VOLATILE (exp) | |
8345 | && TREE_CODE (exp) != FUNCTION_DECL | |
8346 | && mode != VOIDmode && mode != BLKmode | |
8347 | && modifier != EXPAND_CONST_ADDRESS) | |
7acda552 | 8348 | { |
f994f296 MM |
8349 | temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier); |
8350 | if (MEM_P (temp)) | |
8351 | temp = copy_to_reg (temp); | |
8352 | return const0_rtx; | |
7acda552 RK |
8353 | } |
8354 | ||
f994f296 MM |
8355 | if (TREE_CODE_CLASS (code) == tcc_unary |
8356 | || code == COMPONENT_REF || code == INDIRECT_REF) | |
8357 | return expand_expr (treeop0, const0_rtx, VOIDmode, | |
8358 | modifier); | |
e675826d | 8359 | |
f994f296 MM |
8360 | else if (TREE_CODE_CLASS (code) == tcc_binary |
8361 | || TREE_CODE_CLASS (code) == tcc_comparison | |
8362 | || code == ARRAY_REF || code == ARRAY_RANGE_REF) | |
8363 | { | |
8364 | expand_expr (treeop0, const0_rtx, VOIDmode, modifier); | |
8365 | expand_expr (treeop1, const0_rtx, VOIDmode, modifier); | |
8366 | return const0_rtx; | |
8367 | } | |
8368 | else if (code == BIT_FIELD_REF) | |
8369 | { | |
8370 | expand_expr (treeop0, const0_rtx, VOIDmode, modifier); | |
8371 | expand_expr (treeop1, const0_rtx, VOIDmode, modifier); | |
8372 | expand_expr (treeop2, const0_rtx, VOIDmode, modifier); | |
8373 | return const0_rtx; | |
8374 | } | |
e675826d | 8375 | |
f994f296 MM |
8376 | target = 0; |
8377 | } | |
e675826d | 8378 | |
f994f296 MM |
8379 | if (reduce_bit_field && modifier == EXPAND_STACK_PARM) |
8380 | target = 0; | |
e675826d | 8381 | |
f994f296 MM |
8382 | /* Use subtarget as the target for operand 0 of a binary operation. */ |
8383 | subtarget = get_subtarget (target); | |
8384 | original_target = target; | |
e675826d | 8385 | |
f994f296 MM |
8386 | switch (code) |
8387 | { | |
8388 | case LABEL_DECL: | |
8389 | { | |
8390 | tree function = decl_function_context (exp); | |
e675826d | 8391 | |
f994f296 MM |
8392 | temp = label_rtx (exp); |
8393 | temp = gen_rtx_LABEL_REF (Pmode, temp); | |
e675826d | 8394 | |
f994f296 MM |
8395 | if (function != current_function_decl |
8396 | && function != 0) | |
8397 | LABEL_REF_NONLOCAL_P (temp) = 1; | |
8398 | ||
8399 | temp = gen_rtx_MEM (FUNCTION_MODE, temp); | |
8400 | return temp; | |
e675826d EB |
8401 | } |
8402 | ||
f994f296 MM |
8403 | case SSA_NAME: |
8404 | /* ??? ivopts calls expander, without any preparation from | |
8405 | out-of-ssa. So fake instructions as if this was an access to the | |
8406 | base variable. This unnecessarily allocates a pseudo, see how we can | |
8407 | reuse it, if partition base vars have it set already. */ | |
8408 | if (!currently_expanding_to_rtl) | |
d36d83e9 EB |
8409 | return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, |
8410 | NULL); | |
8411 | ||
8412 | g = get_gimple_for_ssa_name (exp); | |
8413 | if (g) | |
8414 | return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode, | |
8415 | modifier, NULL); | |
8416 | ||
8417 | ssa_name = exp; | |
8418 | decl_rtl = get_rtx_for_ssa_name (ssa_name); | |
8419 | exp = SSA_NAME_VAR (ssa_name); | |
f994f296 | 8420 | goto expand_decl_rtl; |
ed239f5a | 8421 | |
f994f296 MM |
8422 | case PARM_DECL: |
8423 | case VAR_DECL: | |
8424 | /* If a static var's type was incomplete when the decl was written, | |
8425 | but the type is complete now, lay out the decl now. */ | |
8426 | if (DECL_SIZE (exp) == 0 | |
8427 | && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp)) | |
8428 | && (TREE_STATIC (exp) || DECL_EXTERNAL (exp))) | |
8429 | layout_decl (exp, 0); | |
8430 | ||
8431 | /* TLS emulation hook - replace __thread vars with | |
8432 | *__emutls_get_address (&_emutls.var). */ | |
8433 | if (! targetm.have_tls | |
8434 | && TREE_CODE (exp) == VAR_DECL | |
8435 | && DECL_THREAD_LOCAL_P (exp)) | |
0fd662ee | 8436 | { |
f994f296 MM |
8437 | exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp)); |
8438 | return expand_expr_real_1 (exp, target, tmode, modifier, NULL); | |
0fd662ee | 8439 | } |
ed239f5a | 8440 | |
f994f296 | 8441 | /* ... fall through ... */ |
ed239f5a | 8442 | |
f994f296 MM |
8443 | case FUNCTION_DECL: |
8444 | case RESULT_DECL: | |
8445 | decl_rtl = DECL_RTL (exp); | |
8446 | expand_decl_rtl: | |
8447 | gcc_assert (decl_rtl); | |
8448 | decl_rtl = copy_rtx (decl_rtl); | |
b748fbd6 PB |
8449 | /* Record writes to register variables. */ |
8450 | if (modifier == EXPAND_WRITE && REG_P (decl_rtl) | |
8451 | && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER) | |
8452 | { | |
8453 | int i = REGNO (decl_rtl); | |
8454 | int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)]; | |
8455 | while (nregs) | |
8456 | { | |
8457 | SET_HARD_REG_BIT (crtl->asm_clobbers, i); | |
8458 | i++; | |
8459 | nregs--; | |
8460 | } | |
8461 | } | |
ed239f5a | 8462 | |
f994f296 MM |
8463 | /* Ensure variable marked as used even if it doesn't go through |
8464 | a parser. If it hasn't be used yet, write out an external | |
8465 | definition. */ | |
8466 | if (! TREE_USED (exp)) | |
8467 | { | |
8468 | assemble_external (exp); | |
8469 | TREE_USED (exp) = 1; | |
ed239f5a RK |
8470 | } |
8471 | ||
f994f296 MM |
8472 | /* Show we haven't gotten RTL for this yet. */ |
8473 | temp = 0; | |
ed239f5a | 8474 | |
f994f296 MM |
8475 | /* Variables inherited from containing functions should have |
8476 | been lowered by this point. */ | |
8477 | context = decl_function_context (exp); | |
8478 | gcc_assert (!context | |
8479 | || context == current_function_decl | |
8480 | || TREE_STATIC (exp) | |
8481 | /* ??? C++ creates functions that are not TREE_STATIC. */ | |
8482 | || TREE_CODE (exp) == FUNCTION_DECL); | |
ed239f5a | 8483 | |
f994f296 MM |
8484 | /* This is the case of an array whose size is to be determined |
8485 | from its initializer, while the initializer is still being parsed. | |
8486 | See expand_decl. */ | |
c11c10d8 | 8487 | |
f994f296 MM |
8488 | if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0))) |
8489 | temp = validize_mem (decl_rtl); | |
ed239f5a | 8490 | |
f994f296 MM |
8491 | /* If DECL_RTL is memory, we are in the normal case and the |
8492 | address is not valid, get the address into a register. */ | |
0fb7aeda | 8493 | |
f994f296 MM |
8494 | else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER) |
8495 | { | |
8496 | if (alt_rtl) | |
8497 | *alt_rtl = decl_rtl; | |
8498 | decl_rtl = use_anchored_address (decl_rtl); | |
8499 | if (modifier != EXPAND_CONST_ADDRESS | |
8500 | && modifier != EXPAND_SUM | |
09e881c9 BE |
8501 | && !memory_address_addr_space_p (DECL_MODE (exp), |
8502 | XEXP (decl_rtl, 0), | |
8503 | MEM_ADDR_SPACE (decl_rtl))) | |
f994f296 MM |
8504 | temp = replace_equiv_address (decl_rtl, |
8505 | copy_rtx (XEXP (decl_rtl, 0))); | |
ed239f5a RK |
8506 | } |
8507 | ||
f994f296 MM |
8508 | /* If we got something, return it. But first, set the alignment |
8509 | if the address is a register. */ | |
8510 | if (temp != 0) | |
8511 | { | |
8512 | if (MEM_P (temp) && REG_P (XEXP (temp, 0))) | |
8513 | mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp)); | |
ed239f5a | 8514 | |
f994f296 MM |
8515 | return temp; |
8516 | } | |
5be014d5 | 8517 | |
f994f296 MM |
8518 | /* If the mode of DECL_RTL does not match that of the decl, it |
8519 | must be a promoted value. We return a SUBREG of the wanted mode, | |
8520 | but mark it so that we know that it was already extended. */ | |
d36d83e9 | 8521 | if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp)) |
7f9844ca | 8522 | { |
f994f296 | 8523 | enum machine_mode pmode; |
7f9844ca | 8524 | |
d36d83e9 EB |
8525 | /* Get the signedness to be used for this variable. Ensure we get |
8526 | the same mode we got when the variable was declared. */ | |
8527 | if (code == SSA_NAME | |
8528 | && (g = SSA_NAME_DEF_STMT (ssa_name)) | |
8529 | && gimple_code (g) == GIMPLE_CALL) | |
8530 | pmode = promote_function_mode (type, mode, &unsignedp, | |
8531 | TREE_TYPE | |
8532 | (TREE_TYPE (gimple_call_fn (g))), | |
8533 | 2); | |
8534 | else | |
8535 | pmode = promote_decl_mode (exp, &unsignedp); | |
f994f296 MM |
8536 | gcc_assert (GET_MODE (decl_rtl) == pmode); |
8537 | ||
8538 | temp = gen_lowpart_SUBREG (mode, decl_rtl); | |
8539 | SUBREG_PROMOTED_VAR_P (temp) = 1; | |
8540 | SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp); | |
8541 | return temp; | |
7f9844ca RS |
8542 | } |
8543 | ||
f994f296 | 8544 | return decl_rtl; |
bbf6f052 | 8545 | |
f994f296 MM |
8546 | case INTEGER_CST: |
8547 | temp = immed_double_const (TREE_INT_CST_LOW (exp), | |
8548 | TREE_INT_CST_HIGH (exp), mode); | |
bbf6f052 | 8549 | |
f994f296 | 8550 | return temp; |
bbf6f052 | 8551 | |
f994f296 MM |
8552 | case VECTOR_CST: |
8553 | { | |
8554 | tree tmp = NULL_TREE; | |
8555 | if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT | |
8556 | || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT | |
8557 | || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT | |
8558 | || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT | |
8559 | || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM | |
8560 | || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM) | |
8561 | return const_vector_from_tree (exp); | |
8562 | if (GET_MODE_CLASS (mode) == MODE_INT) | |
8563 | { | |
8564 | tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1); | |
8565 | if (type_for_mode) | |
8566 | tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp); | |
8567 | } | |
8568 | if (!tmp) | |
8569 | tmp = build_constructor_from_list (type, | |
8570 | TREE_VECTOR_CST_ELTS (exp)); | |
8571 | return expand_expr (tmp, ignore ? const0_rtx : target, | |
8572 | tmode, modifier); | |
8573 | } | |
bbf6f052 | 8574 | |
f994f296 MM |
8575 | case CONST_DECL: |
8576 | return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier); | |
bbf6f052 | 8577 | |
f994f296 MM |
8578 | case REAL_CST: |
8579 | /* If optimized, generate immediate CONST_DOUBLE | |
8580 | which will be turned into memory by reload if necessary. | |
cbbc503e | 8581 | |
f994f296 MM |
8582 | We used to force a register so that loop.c could see it. But |
8583 | this does not allow gen_* patterns to perform optimizations with | |
8584 | the constants. It also produces two insns in cases like "x = 1.0;". | |
8585 | On most machines, floating-point constants are not permitted in | |
8586 | many insns, so we'd end up copying it to a register in any case. | |
bbf6f052 | 8587 | |
f994f296 MM |
8588 | Now, we do the copying in expand_binop, if appropriate. */ |
8589 | return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp), | |
8590 | TYPE_MODE (TREE_TYPE (exp))); | |
cbbc503e | 8591 | |
f994f296 MM |
8592 | case FIXED_CST: |
8593 | return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp), | |
8594 | TYPE_MODE (TREE_TYPE (exp))); | |
bbf6f052 | 8595 | |
f994f296 MM |
8596 | case COMPLEX_CST: |
8597 | /* Handle evaluating a complex constant in a CONCAT target. */ | |
8598 | if (original_target && GET_CODE (original_target) == CONCAT) | |
4ef7870a | 8599 | { |
f994f296 MM |
8600 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); |
8601 | rtx rtarg, itarg; | |
8602 | ||
8603 | rtarg = XEXP (original_target, 0); | |
8604 | itarg = XEXP (original_target, 1); | |
8605 | ||
8606 | /* Move the real and imaginary parts separately. */ | |
8607 | op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL); | |
8608 | op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL); | |
8609 | ||
8610 | if (op0 != rtarg) | |
8611 | emit_move_insn (rtarg, op0); | |
8612 | if (op1 != itarg) | |
8613 | emit_move_insn (itarg, op1); | |
8614 | ||
8615 | return original_target; | |
4ef7870a | 8616 | } |
bbf6f052 | 8617 | |
f994f296 | 8618 | /* ... fall through ... */ |
bbf6f052 | 8619 | |
f994f296 MM |
8620 | case STRING_CST: |
8621 | temp = expand_expr_constant (exp, 1, modifier); | |
14661f36 | 8622 | |
f994f296 MM |
8623 | /* temp contains a constant address. |
8624 | On RISC machines where a constant address isn't valid, | |
8625 | make some insns to get that address into a register. */ | |
8626 | if (modifier != EXPAND_CONST_ADDRESS | |
8627 | && modifier != EXPAND_INITIALIZER | |
8628 | && modifier != EXPAND_SUM | |
09e881c9 BE |
8629 | && ! memory_address_addr_space_p (mode, XEXP (temp, 0), |
8630 | MEM_ADDR_SPACE (temp))) | |
f994f296 MM |
8631 | return replace_equiv_address (temp, |
8632 | copy_rtx (XEXP (temp, 0))); | |
8633 | return temp; | |
14661f36 | 8634 | |
f994f296 MM |
8635 | case SAVE_EXPR: |
8636 | { | |
8637 | tree val = treeop0; | |
8638 | rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl); | |
ea87523e | 8639 | |
f994f296 MM |
8640 | if (!SAVE_EXPR_RESOLVED_P (exp)) |
8641 | { | |
8642 | /* We can indeed still hit this case, typically via builtin | |
8643 | expanders calling save_expr immediately before expanding | |
8644 | something. Assume this means that we only have to deal | |
8645 | with non-BLKmode values. */ | |
8646 | gcc_assert (GET_MODE (ret) != BLKmode); | |
ae431183 | 8647 | |
f994f296 MM |
8648 | val = build_decl (EXPR_LOCATION (exp), |
8649 | VAR_DECL, NULL, TREE_TYPE (exp)); | |
8650 | DECL_ARTIFICIAL (val) = 1; | |
8651 | DECL_IGNORED_P (val) = 1; | |
8652 | treeop0 = val; | |
8653 | TREE_OPERAND (exp, 0) = treeop0; | |
8654 | SAVE_EXPR_RESOLVED_P (exp) = 1; | |
1717e19e | 8655 | |
f994f296 MM |
8656 | if (!CONSTANT_P (ret)) |
8657 | ret = copy_to_reg (ret); | |
8658 | SET_DECL_RTL (val, ret); | |
8659 | } | |
1717e19e | 8660 | |
f994f296 MM |
8661 | return ret; |
8662 | } | |
8663 | ||
f994f296 MM |
8664 | |
8665 | case CONSTRUCTOR: | |
8666 | /* If we don't need the result, just ensure we evaluate any | |
8667 | subexpressions. */ | |
8668 | if (ignore) | |
1717e19e | 8669 | { |
f994f296 MM |
8670 | unsigned HOST_WIDE_INT idx; |
8671 | tree value; | |
8672 | ||
8673 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) | |
8674 | expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
8675 | ||
8676 | return const0_rtx; | |
1717e19e UW |
8677 | } |
8678 | ||
f994f296 | 8679 | return expand_constructor (exp, target, modifier, false); |
bbf6f052 | 8680 | |
f994f296 MM |
8681 | case MISALIGNED_INDIRECT_REF: |
8682 | case ALIGN_INDIRECT_REF: | |
8683 | case INDIRECT_REF: | |
8684 | { | |
8685 | tree exp1 = treeop0; | |
09e881c9 | 8686 | addr_space_t as = ADDR_SPACE_GENERIC; |
d4ebfa65 | 8687 | enum machine_mode address_mode = Pmode; |
0f996086 | 8688 | |
f994f296 MM |
8689 | if (modifier != EXPAND_WRITE) |
8690 | { | |
8691 | tree t; | |
bbf6f052 | 8692 | |
f994f296 MM |
8693 | t = fold_read_from_constant_string (exp); |
8694 | if (t) | |
8695 | return expand_expr (t, target, tmode, modifier); | |
8696 | } | |
bbf6f052 | 8697 | |
09e881c9 | 8698 | if (POINTER_TYPE_P (TREE_TYPE (exp1))) |
d4ebfa65 BE |
8699 | { |
8700 | as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1))); | |
8701 | address_mode = targetm.addr_space.address_mode (as); | |
8702 | } | |
09e881c9 | 8703 | |
f994f296 | 8704 | op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM); |
09e881c9 | 8705 | op0 = memory_address_addr_space (mode, op0, as); |
48a5f2fa | 8706 | |
f994f296 MM |
8707 | if (code == ALIGN_INDIRECT_REF) |
8708 | { | |
8709 | int align = TYPE_ALIGN_UNIT (type); | |
d4ebfa65 | 8710 | op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align)); |
09e881c9 | 8711 | op0 = memory_address_addr_space (mode, op0, as); |
f994f296 | 8712 | } |
bbf6f052 | 8713 | |
f994f296 | 8714 | temp = gen_rtx_MEM (mode, op0); |
bbf6f052 | 8715 | |
f994f296 | 8716 | set_mem_attributes (temp, exp, 0); |
09e881c9 | 8717 | set_mem_addr_space (temp, as); |
bbf6f052 | 8718 | |
f994f296 MM |
8719 | /* Resolve the misalignment now, so that we don't have to remember |
8720 | to resolve it later. Of course, this only works for reads. */ | |
8721 | if (code == MISALIGNED_INDIRECT_REF) | |
8722 | { | |
8723 | int icode; | |
8724 | rtx reg, insn; | |
8403445a | 8725 | |
f994f296 MM |
8726 | gcc_assert (modifier == EXPAND_NORMAL |
8727 | || modifier == EXPAND_STACK_PARM); | |
8b44057d | 8728 | |
f994f296 MM |
8729 | /* The vectorizer should have already checked the mode. */ |
8730 | icode = optab_handler (movmisalign_optab, mode)->insn_code; | |
8731 | gcc_assert (icode != CODE_FOR_nothing); | |
8c7926c4 | 8732 | |
f994f296 MM |
8733 | /* We've already validated the memory, and we're creating a |
8734 | new pseudo destination. The predicates really can't fail. */ | |
8735 | reg = gen_reg_rtx (mode); | |
8b44057d | 8736 | |
f994f296 MM |
8737 | /* Nor can the insn generator. */ |
8738 | insn = GEN_FCN (icode) (reg, temp); | |
8739 | emit_insn (insn); | |
888d65b5 | 8740 | |
f994f296 MM |
8741 | return reg; |
8742 | } | |
8743 | ||
8744 | return temp; | |
8745 | } | |
8746 | ||
8747 | case TARGET_MEM_REF: | |
8748 | { | |
09e881c9 | 8749 | addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp)); |
f994f296 | 8750 | struct mem_address addr; |
83a416b0 | 8751 | tree base; |
f994f296 MM |
8752 | |
8753 | get_address_description (exp, &addr); | |
d4ebfa65 | 8754 | op0 = addr_for_mem_ref (&addr, as, true); |
09e881c9 | 8755 | op0 = memory_address_addr_space (mode, op0, as); |
f994f296 MM |
8756 | temp = gen_rtx_MEM (mode, op0); |
8757 | set_mem_attributes (temp, TMR_ORIGINAL (exp), 0); | |
09e881c9 | 8758 | set_mem_addr_space (temp, as); |
83a416b0 RG |
8759 | base = get_base_address (TMR_ORIGINAL (exp)); |
8760 | if (INDIRECT_REF_P (base) | |
8761 | && TMR_BASE (exp) | |
8762 | && TREE_CODE (TMR_BASE (exp)) == SSA_NAME | |
8763 | && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp)))) | |
8764 | { | |
8765 | set_mem_expr (temp, build1 (INDIRECT_REF, | |
8766 | TREE_TYPE (exp), TMR_BASE (exp))); | |
8767 | set_mem_offset (temp, NULL_RTX); | |
8768 | } | |
f994f296 MM |
8769 | } |
8770 | return temp; | |
8771 | ||
70f34814 RG |
8772 | case MEM_REF: |
8773 | { | |
8774 | addr_space_t as | |
8775 | = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))); | |
8776 | enum machine_mode address_mode; | |
8777 | tree base = TREE_OPERAND (exp, 0); | |
8778 | /* Handle expansion of non-aliased memory with non-BLKmode. That | |
8779 | might end up in a register. */ | |
8780 | if (TREE_CODE (base) == ADDR_EXPR) | |
8781 | { | |
8782 | HOST_WIDE_INT offset = mem_ref_offset (exp).low; | |
8783 | tree bit_offset; | |
8784 | base = TREE_OPERAND (base, 0); | |
8785 | if (!DECL_P (base)) | |
8786 | { | |
8787 | HOST_WIDE_INT off; | |
8788 | base = get_addr_base_and_unit_offset (base, &off); | |
8789 | gcc_assert (base); | |
8790 | offset += off; | |
8791 | } | |
8792 | /* If we are expanding a MEM_REF of a non-BLKmode non-addressable | |
8793 | decl we must use bitfield operations. */ | |
8794 | if (DECL_P (base) | |
8795 | && !TREE_ADDRESSABLE (base) | |
8796 | && DECL_MODE (base) != BLKmode | |
8797 | && DECL_RTL_SET_P (base) | |
8798 | && !MEM_P (DECL_RTL (base))) | |
8799 | { | |
8800 | tree bftype; | |
8801 | if (offset == 0 | |
8802 | && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1) | |
8803 | && (GET_MODE_BITSIZE (DECL_MODE (base)) | |
8804 | == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))))) | |
8805 | return expand_expr (build1 (VIEW_CONVERT_EXPR, | |
8806 | TREE_TYPE (exp), base), | |
8807 | target, tmode, modifier); | |
8808 | bit_offset = bitsize_int (offset * BITS_PER_UNIT); | |
8809 | bftype = TREE_TYPE (base); | |
8810 | if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode) | |
8811 | bftype = TREE_TYPE (exp); | |
8812 | return expand_expr (build3 (BIT_FIELD_REF, bftype, | |
8813 | base, | |
8814 | TYPE_SIZE (TREE_TYPE (exp)), | |
8815 | bit_offset), | |
8816 | target, tmode, modifier); | |
8817 | } | |
8818 | } | |
8819 | address_mode = targetm.addr_space.address_mode (as); | |
8820 | op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, address_mode, | |
8821 | EXPAND_NORMAL); | |
8822 | if (!integer_zerop (TREE_OPERAND (exp, 1))) | |
8823 | { | |
8824 | rtx off; | |
8825 | off = immed_double_int_const (mem_ref_offset (exp), address_mode); | |
8826 | op0 = simplify_gen_binary (PLUS, address_mode, op0, off); | |
8827 | } | |
8828 | op0 = memory_address_addr_space (mode, op0, as); | |
8829 | temp = gen_rtx_MEM (mode, op0); | |
8830 | set_mem_attributes (temp, exp, 0); | |
8831 | set_mem_addr_space (temp, as); | |
8832 | if (TREE_THIS_VOLATILE (exp)) | |
8833 | MEM_VOLATILE_P (temp) = 1; | |
8834 | return temp; | |
8835 | } | |
8836 | ||
f994f296 MM |
8837 | case ARRAY_REF: |
8838 | ||
8839 | { | |
8840 | tree array = treeop0; | |
8841 | tree index = treeop1; | |
8842 | ||
8843 | /* Fold an expression like: "foo"[2]. | |
8844 | This is not done in fold so it won't happen inside &. | |
8845 | Don't fold if this is for wide characters since it's too | |
8846 | difficult to do correctly and this is a very rare case. */ | |
8847 | ||
8848 | if (modifier != EXPAND_CONST_ADDRESS | |
8849 | && modifier != EXPAND_INITIALIZER | |
8850 | && modifier != EXPAND_MEMORY) | |
8851 | { | |
8852 | tree t = fold_read_from_constant_string (exp); | |
8853 | ||
8854 | if (t) | |
8855 | return expand_expr (t, target, tmode, modifier); | |
8856 | } | |
8857 | ||
8858 | /* If this is a constant index into a constant array, | |
8859 | just get the value from the array. Handle both the cases when | |
8860 | we have an explicit constructor and when our operand is a variable | |
8861 | that was declared const. */ | |
8862 | ||
8863 | if (modifier != EXPAND_CONST_ADDRESS | |
8864 | && modifier != EXPAND_INITIALIZER | |
8865 | && modifier != EXPAND_MEMORY | |
8866 | && TREE_CODE (array) == CONSTRUCTOR | |
8867 | && ! TREE_SIDE_EFFECTS (array) | |
8868 | && TREE_CODE (index) == INTEGER_CST) | |
8869 | { | |
8870 | unsigned HOST_WIDE_INT ix; | |
8871 | tree field, value; | |
8872 | ||
8873 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix, | |
8874 | field, value) | |
8875 | if (tree_int_cst_equal (field, index)) | |
b10af0c8 | 8876 | { |
f994f296 MM |
8877 | if (!TREE_SIDE_EFFECTS (value)) |
8878 | return expand_expr (fold (value), target, tmode, modifier); | |
8879 | break; | |
b10af0c8 | 8880 | } |
f994f296 | 8881 | } |
bbf6f052 | 8882 | |
f994f296 MM |
8883 | else if (optimize >= 1 |
8884 | && modifier != EXPAND_CONST_ADDRESS | |
8885 | && modifier != EXPAND_INITIALIZER | |
8886 | && modifier != EXPAND_MEMORY | |
8887 | && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array) | |
8888 | && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array) | |
8889 | && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK | |
8890 | && targetm.binds_local_p (array)) | |
8891 | { | |
8892 | if (TREE_CODE (index) == INTEGER_CST) | |
8893 | { | |
8894 | tree init = DECL_INITIAL (array); | |
0f996086 | 8895 | |
f994f296 MM |
8896 | if (TREE_CODE (init) == CONSTRUCTOR) |
8897 | { | |
8898 | unsigned HOST_WIDE_INT ix; | |
8899 | tree field, value; | |
bbf6f052 | 8900 | |
f994f296 MM |
8901 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix, |
8902 | field, value) | |
8903 | if (tree_int_cst_equal (field, index)) | |
8904 | { | |
8905 | if (TREE_SIDE_EFFECTS (value)) | |
8906 | break; | |
bbf6f052 | 8907 | |
f994f296 MM |
8908 | if (TREE_CODE (value) == CONSTRUCTOR) |
8909 | { | |
8910 | /* If VALUE is a CONSTRUCTOR, this | |
8911 | optimization is only useful if | |
8912 | this doesn't store the CONSTRUCTOR | |
8913 | into memory. If it does, it is more | |
8914 | efficient to just load the data from | |
8915 | the array directly. */ | |
8916 | rtx ret = expand_constructor (value, target, | |
8917 | modifier, true); | |
8918 | if (ret == NULL_RTX) | |
8919 | break; | |
8920 | } | |
bbf6f052 | 8921 | |
f994f296 MM |
8922 | return expand_expr (fold (value), target, tmode, |
8923 | modifier); | |
8924 | } | |
8925 | } | |
8926 | else if(TREE_CODE (init) == STRING_CST) | |
8927 | { | |
8928 | tree index1 = index; | |
8929 | tree low_bound = array_ref_low_bound (exp); | |
8930 | index1 = fold_convert_loc (loc, sizetype, | |
8931 | treeop1); | |
0f996086 | 8932 | |
f994f296 | 8933 | /* Optimize the special-case of a zero lower bound. |
0f996086 | 8934 | |
f994f296 MM |
8935 | We convert the low_bound to sizetype to avoid some problems |
8936 | with constant folding. (E.g. suppose the lower bound is 1, | |
8937 | and its mode is QI. Without the conversion,l (ARRAY | |
8938 | +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) | |
8939 | +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */ | |
bbf6f052 | 8940 | |
f994f296 MM |
8941 | if (! integer_zerop (low_bound)) |
8942 | index1 = size_diffop_loc (loc, index1, | |
8943 | fold_convert_loc (loc, sizetype, | |
8944 | low_bound)); | |
bbf6f052 | 8945 | |
f994f296 MM |
8946 | if (0 > compare_tree_int (index1, |
8947 | TREE_STRING_LENGTH (init))) | |
8948 | { | |
8949 | tree type = TREE_TYPE (TREE_TYPE (init)); | |
8950 | enum machine_mode mode = TYPE_MODE (type); | |
bbf6f052 | 8951 | |
f994f296 MM |
8952 | if (GET_MODE_CLASS (mode) == MODE_INT |
8953 | && GET_MODE_SIZE (mode) == 1) | |
8954 | return gen_int_mode (TREE_STRING_POINTER (init) | |
8955 | [TREE_INT_CST_LOW (index1)], | |
8956 | mode); | |
8957 | } | |
8958 | } | |
8959 | } | |
8960 | } | |
8961 | } | |
8962 | goto normal_inner_ref; | |
8963 | ||
8964 | case COMPONENT_REF: | |
8965 | /* If the operand is a CONSTRUCTOR, we can just extract the | |
8966 | appropriate field if it is present. */ | |
8967 | if (TREE_CODE (treeop0) == CONSTRUCTOR) | |
8968 | { | |
8969 | unsigned HOST_WIDE_INT idx; | |
8970 | tree field, value; | |
8971 | ||
8972 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0), | |
8973 | idx, field, value) | |
8974 | if (field == treeop1 | |
8975 | /* We can normally use the value of the field in the | |
8976 | CONSTRUCTOR. However, if this is a bitfield in | |
8977 | an integral mode that we can fit in a HOST_WIDE_INT, | |
8978 | we must mask only the number of bits in the bitfield, | |
8979 | since this is done implicitly by the constructor. If | |
8980 | the bitfield does not meet either of those conditions, | |
8981 | we can't do this optimization. */ | |
8982 | && (! DECL_BIT_FIELD (field) | |
8983 | || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT) | |
8984 | && (GET_MODE_BITSIZE (DECL_MODE (field)) | |
8985 | <= HOST_BITS_PER_WIDE_INT)))) | |
8986 | { | |
8987 | if (DECL_BIT_FIELD (field) | |
8988 | && modifier == EXPAND_STACK_PARM) | |
8989 | target = 0; | |
8990 | op0 = expand_expr (value, target, tmode, modifier); | |
8991 | if (DECL_BIT_FIELD (field)) | |
8992 | { | |
8993 | HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)); | |
8994 | enum machine_mode imode = TYPE_MODE (TREE_TYPE (field)); | |
8995 | ||
8996 | if (TYPE_UNSIGNED (TREE_TYPE (field))) | |
8997 | { | |
8998 | op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1); | |
8999 | op0 = expand_and (imode, op0, op1, target); | |
9000 | } | |
9001 | else | |
9002 | { | |
9003 | tree count | |
9004 | = build_int_cst (NULL_TREE, | |
9005 | GET_MODE_BITSIZE (imode) - bitsize); | |
9006 | ||
9007 | op0 = expand_shift (LSHIFT_EXPR, imode, op0, count, | |
9008 | target, 0); | |
9009 | op0 = expand_shift (RSHIFT_EXPR, imode, op0, count, | |
9010 | target, 0); | |
9011 | } | |
9012 | } | |
9013 | ||
9014 | return op0; | |
9015 | } | |
9016 | } | |
9017 | goto normal_inner_ref; | |
9018 | ||
9019 | case BIT_FIELD_REF: | |
9020 | case ARRAY_RANGE_REF: | |
9021 | normal_inner_ref: | |
9022 | { | |
9023 | enum machine_mode mode1, mode2; | |
9024 | HOST_WIDE_INT bitsize, bitpos; | |
9025 | tree offset; | |
9026 | int volatilep = 0, must_force_mem; | |
9027 | tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, | |
9028 | &mode1, &unsignedp, &volatilep, true); | |
9029 | rtx orig_op0, memloc; | |
9030 | ||
9031 | /* If we got back the original object, something is wrong. Perhaps | |
9032 | we are evaluating an expression too early. In any event, don't | |
9033 | infinitely recurse. */ | |
9034 | gcc_assert (tem != exp); | |
9035 | ||
9036 | /* If TEM's type is a union of variable size, pass TARGET to the inner | |
9037 | computation, since it will need a temporary and TARGET is known | |
9038 | to have to do. This occurs in unchecked conversion in Ada. */ | |
9039 | orig_op0 = op0 | |
9040 | = expand_expr (tem, | |
9041 | (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE | |
9042 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) | |
9043 | != INTEGER_CST) | |
9044 | && modifier != EXPAND_STACK_PARM | |
9045 | ? target : NULL_RTX), | |
9046 | VOIDmode, | |
9047 | (modifier == EXPAND_INITIALIZER | |
9048 | || modifier == EXPAND_CONST_ADDRESS | |
9049 | || modifier == EXPAND_STACK_PARM) | |
9050 | ? modifier : EXPAND_NORMAL); | |
9051 | ||
6a78b724 DD |
9052 | |
9053 | /* If the bitfield is volatile, we want to access it in the | |
9054 | field's mode, not the computed mode. */ | |
9055 | if (volatilep | |
9056 | && GET_CODE (op0) == MEM | |
9057 | && flag_strict_volatile_bitfields > 0) | |
9058 | op0 = adjust_address (op0, mode1, 0); | |
9059 | ||
f994f296 MM |
9060 | mode2 |
9061 | = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0); | |
9062 | ||
9063 | /* If we have either an offset, a BLKmode result, or a reference | |
9064 | outside the underlying object, we must force it to memory. | |
9065 | Such a case can occur in Ada if we have unchecked conversion | |
9066 | of an expression from a scalar type to an aggregate type or | |
9067 | for an ARRAY_RANGE_REF whose type is BLKmode, or if we were | |
9068 | passed a partially uninitialized object or a view-conversion | |
9069 | to a larger size. */ | |
9070 | must_force_mem = (offset | |
9071 | || mode1 == BLKmode | |
9072 | || bitpos + bitsize > GET_MODE_BITSIZE (mode2)); | |
9073 | ||
9074 | /* Handle CONCAT first. */ | |
9075 | if (GET_CODE (op0) == CONCAT && !must_force_mem) | |
9076 | { | |
9077 | if (bitpos == 0 | |
9078 | && bitsize == GET_MODE_BITSIZE (GET_MODE (op0))) | |
9079 | return op0; | |
9080 | if (bitpos == 0 | |
9081 | && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | |
9082 | && bitsize) | |
9083 | { | |
9084 | op0 = XEXP (op0, 0); | |
9085 | mode2 = GET_MODE (op0); | |
9086 | } | |
9087 | else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | |
9088 | && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1))) | |
9089 | && bitpos | |
9090 | && bitsize) | |
9091 | { | |
9092 | op0 = XEXP (op0, 1); | |
9093 | bitpos = 0; | |
9094 | mode2 = GET_MODE (op0); | |
9095 | } | |
9096 | else | |
9097 | /* Otherwise force into memory. */ | |
9098 | must_force_mem = 1; | |
9099 | } | |
9100 | ||
9101 | /* If this is a constant, put it in a register if it is a legitimate | |
9102 | constant and we don't need a memory reference. */ | |
9103 | if (CONSTANT_P (op0) | |
9104 | && mode2 != BLKmode | |
9105 | && LEGITIMATE_CONSTANT_P (op0) | |
9106 | && !must_force_mem) | |
9107 | op0 = force_reg (mode2, op0); | |
9108 | ||
9109 | /* Otherwise, if this is a constant, try to force it to the constant | |
9110 | pool. Note that back-ends, e.g. MIPS, may refuse to do so if it | |
9111 | is a legitimate constant. */ | |
9112 | else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0))) | |
9113 | op0 = validize_mem (memloc); | |
9114 | ||
9115 | /* Otherwise, if this is a constant or the object is not in memory | |
9116 | and need be, put it there. */ | |
9117 | else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem)) | |
9118 | { | |
9119 | tree nt = build_qualified_type (TREE_TYPE (tem), | |
9120 | (TYPE_QUALS (TREE_TYPE (tem)) | |
9121 | | TYPE_QUAL_CONST)); | |
9122 | memloc = assign_temp (nt, 1, 1, 1); | |
9123 | emit_move_insn (memloc, op0); | |
9124 | op0 = memloc; | |
9125 | } | |
9126 | ||
9127 | if (offset) | |
9128 | { | |
d4ebfa65 | 9129 | enum machine_mode address_mode; |
f994f296 MM |
9130 | rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, |
9131 | EXPAND_SUM); | |
9132 | ||
9133 | gcc_assert (MEM_P (op0)); | |
9134 | ||
d4ebfa65 BE |
9135 | address_mode |
9136 | = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0)); | |
9137 | if (GET_MODE (offset_rtx) != address_mode) | |
9138 | offset_rtx = convert_to_mode (address_mode, offset_rtx, 0); | |
f994f296 MM |
9139 | |
9140 | if (GET_MODE (op0) == BLKmode | |
9141 | /* A constant address in OP0 can have VOIDmode, we must | |
9142 | not try to call force_reg in that case. */ | |
9143 | && GET_MODE (XEXP (op0, 0)) != VOIDmode | |
9144 | && bitsize != 0 | |
9145 | && (bitpos % bitsize) == 0 | |
9146 | && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0 | |
9147 | && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1)) | |
9148 | { | |
9149 | op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); | |
9150 | bitpos = 0; | |
9151 | } | |
9152 | ||
9153 | op0 = offset_address (op0, offset_rtx, | |
9154 | highest_pow2_factor (offset)); | |
9155 | } | |
9156 | ||
9157 | /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT, | |
9158 | record its alignment as BIGGEST_ALIGNMENT. */ | |
9159 | if (MEM_P (op0) && bitpos == 0 && offset != 0 | |
9160 | && is_aligning_offset (offset, tem)) | |
9161 | set_mem_align (op0, BIGGEST_ALIGNMENT); | |
9162 | ||
9163 | /* Don't forget about volatility even if this is a bitfield. */ | |
9164 | if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0)) | |
9165 | { | |
9166 | if (op0 == orig_op0) | |
9167 | op0 = copy_rtx (op0); | |
9168 | ||
9169 | MEM_VOLATILE_P (op0) = 1; | |
9170 | } | |
9171 | ||
9172 | /* In cases where an aligned union has an unaligned object | |
9173 | as a field, we might be extracting a BLKmode value from | |
9174 | an integer-mode (e.g., SImode) object. Handle this case | |
9175 | by doing the extract into an object as wide as the field | |
9176 | (which we know to be the width of a basic mode), then | |
9177 | storing into memory, and changing the mode to BLKmode. */ | |
9178 | if (mode1 == VOIDmode | |
9179 | || REG_P (op0) || GET_CODE (op0) == SUBREG | |
9180 | || (mode1 != BLKmode && ! direct_load[(int) mode1] | |
9181 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT | |
9182 | && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT | |
9183 | && modifier != EXPAND_CONST_ADDRESS | |
9184 | && modifier != EXPAND_INITIALIZER) | |
6a78b724 DD |
9185 | /* If the field is volatile, we always want an aligned |
9186 | access. */ | |
9187 | || (volatilep && flag_strict_volatile_bitfields > 0) | |
f994f296 MM |
9188 | /* If the field isn't aligned enough to fetch as a memref, |
9189 | fetch it as a bit field. */ | |
9190 | || (mode1 != BLKmode | |
9191 | && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode) | |
9192 | || (bitpos % GET_MODE_ALIGNMENT (mode) != 0) | |
9193 | || (MEM_P (op0) | |
9194 | && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1) | |
9195 | || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0)))) | |
9196 | && ((modifier == EXPAND_CONST_ADDRESS | |
9197 | || modifier == EXPAND_INITIALIZER) | |
9198 | ? STRICT_ALIGNMENT | |
9199 | : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))) | |
9200 | || (bitpos % BITS_PER_UNIT != 0))) | |
9201 | /* If the type and the field are a constant size and the | |
9202 | size of the type isn't the same size as the bitfield, | |
9203 | we must use bitfield operations. */ | |
9204 | || (bitsize >= 0 | |
9205 | && TYPE_SIZE (TREE_TYPE (exp)) | |
9206 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | |
9207 | && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), | |
9208 | bitsize))) | |
9209 | { | |
9210 | enum machine_mode ext_mode = mode; | |
9211 | ||
9212 | if (ext_mode == BLKmode | |
9213 | && ! (target != 0 && MEM_P (op0) | |
9214 | && MEM_P (target) | |
9215 | && bitpos % BITS_PER_UNIT == 0)) | |
9216 | ext_mode = mode_for_size (bitsize, MODE_INT, 1); | |
9217 | ||
9218 | if (ext_mode == BLKmode) | |
9219 | { | |
9220 | if (target == 0) | |
9221 | target = assign_temp (type, 0, 1, 1); | |
9222 | ||
9223 | if (bitsize == 0) | |
9224 | return target; | |
9225 | ||
9226 | /* In this case, BITPOS must start at a byte boundary and | |
9227 | TARGET, if specified, must be a MEM. */ | |
9228 | gcc_assert (MEM_P (op0) | |
9229 | && (!target || MEM_P (target)) | |
9230 | && !(bitpos % BITS_PER_UNIT)); | |
9231 | ||
9232 | emit_block_move (target, | |
9233 | adjust_address (op0, VOIDmode, | |
9234 | bitpos / BITS_PER_UNIT), | |
9235 | GEN_INT ((bitsize + BITS_PER_UNIT - 1) | |
9236 | / BITS_PER_UNIT), | |
9237 | (modifier == EXPAND_STACK_PARM | |
9238 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
9239 | ||
9240 | return target; | |
9241 | } | |
9242 | ||
9243 | op0 = validize_mem (op0); | |
9244 | ||
9245 | if (MEM_P (op0) && REG_P (XEXP (op0, 0))) | |
9246 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); | |
9247 | ||
9248 | op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, | |
9249 | (modifier == EXPAND_STACK_PARM | |
9250 | ? NULL_RTX : target), | |
9251 | ext_mode, ext_mode); | |
9252 | ||
9253 | /* If the result is a record type and BITSIZE is narrower than | |
9254 | the mode of OP0, an integral mode, and this is a big endian | |
9255 | machine, we must put the field into the high-order bits. */ | |
9256 | if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN | |
9257 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
9258 | && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0))) | |
9259 | op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0, | |
9260 | size_int (GET_MODE_BITSIZE (GET_MODE (op0)) | |
9261 | - bitsize), | |
9262 | op0, 1); | |
9263 | ||
9264 | /* If the result type is BLKmode, store the data into a temporary | |
9265 | of the appropriate type, but with the mode corresponding to the | |
9266 | mode for the data we have (op0's mode). It's tempting to make | |
9267 | this a constant type, since we know it's only being stored once, | |
9268 | but that can cause problems if we are taking the address of this | |
9269 | COMPONENT_REF because the MEM of any reference via that address | |
9270 | will have flags corresponding to the type, which will not | |
9271 | necessarily be constant. */ | |
9272 | if (mode == BLKmode) | |
9273 | { | |
9274 | HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode); | |
9275 | rtx new_rtx; | |
9276 | ||
9277 | /* If the reference doesn't use the alias set of its type, | |
9278 | we cannot create the temporary using that type. */ | |
9279 | if (component_uses_parent_alias_set (exp)) | |
9280 | { | |
9281 | new_rtx = assign_stack_local (ext_mode, size, 0); | |
9282 | set_mem_alias_set (new_rtx, get_alias_set (exp)); | |
9283 | } | |
9284 | else | |
9285 | new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type); | |
bbf6f052 | 9286 | |
f994f296 MM |
9287 | emit_move_insn (new_rtx, op0); |
9288 | op0 = copy_rtx (new_rtx); | |
9289 | PUT_MODE (op0, BLKmode); | |
9290 | set_mem_attributes (op0, exp, 1); | |
9291 | } | |
2d7050fd | 9292 | |
f994f296 MM |
9293 | return op0; |
9294 | } | |
bbf6f052 | 9295 | |
f994f296 MM |
9296 | /* If the result is BLKmode, use that to access the object |
9297 | now as well. */ | |
9298 | if (mode == BLKmode) | |
9299 | mode1 = BLKmode; | |
bbf6f052 | 9300 | |
f994f296 MM |
9301 | /* Get a reference to just this component. */ |
9302 | if (modifier == EXPAND_CONST_ADDRESS | |
9303 | || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) | |
9304 | op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); | |
9305 | else | |
9306 | op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT); | |
bbf6f052 | 9307 | |
f994f296 MM |
9308 | if (op0 == orig_op0) |
9309 | op0 = copy_rtx (op0); | |
bbf6f052 | 9310 | |
f994f296 MM |
9311 | set_mem_attributes (op0, exp, 0); |
9312 | if (REG_P (XEXP (op0, 0))) | |
9313 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); | |
3a94c984 | 9314 | |
f994f296 MM |
9315 | MEM_VOLATILE_P (op0) |= volatilep; |
9316 | if (mode == mode1 || mode1 == BLKmode || mode1 == tmode | |
9317 | || modifier == EXPAND_CONST_ADDRESS | |
9318 | || modifier == EXPAND_INITIALIZER) | |
9319 | return op0; | |
9320 | else if (target == 0) | |
9321 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); | |
fa2981d8 | 9322 | |
f994f296 MM |
9323 | convert_move (target, op0, unsignedp); |
9324 | return target; | |
9325 | } | |
e3be1116 | 9326 | |
f994f296 MM |
9327 | case OBJ_TYPE_REF: |
9328 | return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier); | |
dbedefae | 9329 | |
f994f296 MM |
9330 | case CALL_EXPR: |
9331 | /* All valid uses of __builtin_va_arg_pack () are removed during | |
9332 | inlining. */ | |
9333 | if (CALL_EXPR_VA_ARG_PACK (exp)) | |
9334 | error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp); | |
230dedb3 | 9335 | { |
f994f296 | 9336 | tree fndecl = get_callee_fndecl (exp), attr; |
927630a5 | 9337 | |
f994f296 MM |
9338 | if (fndecl |
9339 | && (attr = lookup_attribute ("error", | |
9340 | DECL_ATTRIBUTES (fndecl))) != NULL) | |
9341 | error ("%Kcall to %qs declared with attribute error: %s", | |
9342 | exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)), | |
9343 | TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); | |
9344 | if (fndecl | |
9345 | && (attr = lookup_attribute ("warning", | |
9346 | DECL_ATTRIBUTES (fndecl))) != NULL) | |
9347 | warning_at (tree_nonartificial_location (exp), | |
9348 | 0, "%Kcall to %qs declared with attribute warning: %s", | |
9349 | exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)), | |
9350 | TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); | |
927630a5 | 9351 | |
f994f296 MM |
9352 | /* Check for a built-in function. */ |
9353 | if (fndecl && DECL_BUILT_IN (fndecl)) | |
230dedb3 | 9354 | { |
f994f296 MM |
9355 | gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND); |
9356 | return expand_builtin (exp, target, subtarget, tmode, ignore); | |
230dedb3 | 9357 | } |
f994f296 MM |
9358 | } |
9359 | return expand_call (exp, target, ignore); | |
927630a5 | 9360 | |
f994f296 MM |
9361 | case VIEW_CONVERT_EXPR: |
9362 | op0 = NULL_RTX; | |
927630a5 | 9363 | |
f994f296 MM |
9364 | /* If we are converting to BLKmode, try to avoid an intermediate |
9365 | temporary by fetching an inner memory reference. */ | |
9366 | if (mode == BLKmode | |
9367 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST | |
9368 | && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode | |
9369 | && handled_component_p (treeop0)) | |
9370 | { | |
9371 | enum machine_mode mode1; | |
9372 | HOST_WIDE_INT bitsize, bitpos; | |
9373 | tree offset; | |
9374 | int unsignedp; | |
9375 | int volatilep = 0; | |
9376 | tree tem | |
9377 | = get_inner_reference (treeop0, &bitsize, &bitpos, | |
9378 | &offset, &mode1, &unsignedp, &volatilep, | |
9379 | true); | |
9380 | rtx orig_op0; | |
927630a5 | 9381 | |
f994f296 MM |
9382 | /* ??? We should work harder and deal with non-zero offsets. */ |
9383 | if (!offset | |
9384 | && (bitpos % BITS_PER_UNIT) == 0 | |
9385 | && bitsize >= 0 | |
9386 | && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0) | |
9387 | { | |
9388 | /* See the normal_inner_ref case for the rationale. */ | |
9389 | orig_op0 | |
9390 | = expand_expr (tem, | |
9391 | (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE | |
9392 | && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) | |
9393 | != INTEGER_CST) | |
9394 | && modifier != EXPAND_STACK_PARM | |
9395 | ? target : NULL_RTX), | |
9396 | VOIDmode, | |
9397 | (modifier == EXPAND_INITIALIZER | |
9398 | || modifier == EXPAND_CONST_ADDRESS | |
9399 | || modifier == EXPAND_STACK_PARM) | |
9400 | ? modifier : EXPAND_NORMAL); | |
927630a5 | 9401 | |
f994f296 | 9402 | if (MEM_P (orig_op0)) |
230dedb3 | 9403 | { |
f994f296 | 9404 | op0 = orig_op0; |
230dedb3 | 9405 | |
f994f296 MM |
9406 | /* Get a reference to just this component. */ |
9407 | if (modifier == EXPAND_CONST_ADDRESS | |
9408 | || modifier == EXPAND_SUM | |
9409 | || modifier == EXPAND_INITIALIZER) | |
9410 | op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT); | |
9411 | else | |
9412 | op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT); | |
d6a5ac33 | 9413 | |
f994f296 MM |
9414 | if (op0 == orig_op0) |
9415 | op0 = copy_rtx (op0); | |
9416 | ||
9417 | set_mem_attributes (op0, treeop0, 0); | |
9418 | if (REG_P (XEXP (op0, 0))) | |
9419 | mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); | |
9420 | ||
9421 | MEM_VOLATILE_P (op0) |= volatilep; | |
9422 | } | |
9423 | } | |
230dedb3 | 9424 | } |
bbf6f052 | 9425 | |
f994f296 MM |
9426 | if (!op0) |
9427 | op0 = expand_expr (treeop0, | |
9428 | NULL_RTX, VOIDmode, modifier); | |
9429 | ||
9430 | /* If the input and output modes are both the same, we are done. */ | |
9431 | if (mode == GET_MODE (op0)) | |
9432 | ; | |
9433 | /* If neither mode is BLKmode, and both modes are the same size | |
9434 | then we can use gen_lowpart. */ | |
9435 | else if (mode != BLKmode && GET_MODE (op0) != BLKmode | |
9436 | && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0)) | |
9437 | && !COMPLEX_MODE_P (GET_MODE (op0))) | |
9438 | { | |
9439 | if (GET_CODE (op0) == SUBREG) | |
9440 | op0 = force_reg (GET_MODE (op0), op0); | |
9441 | op0 = gen_lowpart (mode, op0); | |
9442 | } | |
915f5921 EB |
9443 | /* If both types are integral, convert from one mode to the other. */ |
9444 | else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0))) | |
b8698a0f | 9445 | op0 = convert_modes (mode, GET_MODE (op0), op0, |
f994f296 MM |
9446 | TYPE_UNSIGNED (TREE_TYPE (treeop0))); |
9447 | /* As a last resort, spill op0 to memory, and reload it in a | |
9448 | different mode. */ | |
9449 | else if (!MEM_P (op0)) | |
9450 | { | |
9451 | /* If the operand is not a MEM, force it into memory. Since we | |
9452 | are going to be changing the mode of the MEM, don't call | |
9453 | force_const_mem for constants because we don't allow pool | |
9454 | constants to change mode. */ | |
9455 | tree inner_type = TREE_TYPE (treeop0); | |
9456 | ||
9457 | gcc_assert (!TREE_ADDRESSABLE (exp)); | |
bbf6f052 | 9458 | |
f994f296 MM |
9459 | if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type)) |
9460 | target | |
9461 | = assign_stack_temp_for_type | |
9462 | (TYPE_MODE (inner_type), | |
9463 | GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type); | |
d6a5ac33 | 9464 | |
f994f296 MM |
9465 | emit_move_insn (target, op0); |
9466 | op0 = target; | |
9467 | } | |
d6a5ac33 | 9468 | |
f994f296 MM |
9469 | /* At this point, OP0 is in the correct mode. If the output type is |
9470 | such that the operand is known to be aligned, indicate that it is. | |
9471 | Otherwise, we need only be concerned about alignment for non-BLKmode | |
9472 | results. */ | |
9473 | if (MEM_P (op0)) | |
9474 | { | |
9475 | op0 = copy_rtx (op0); | |
bbf6f052 | 9476 | |
f994f296 MM |
9477 | if (TYPE_ALIGN_OK (type)) |
9478 | set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type))); | |
9479 | else if (STRICT_ALIGNMENT | |
9480 | && mode != BLKmode | |
9481 | && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)) | |
9482 | { | |
9483 | tree inner_type = TREE_TYPE (treeop0); | |
9484 | HOST_WIDE_INT temp_size | |
9485 | = MAX (int_size_in_bytes (inner_type), | |
9486 | (HOST_WIDE_INT) GET_MODE_SIZE (mode)); | |
9487 | rtx new_rtx | |
9488 | = assign_stack_temp_for_type (mode, temp_size, 0, type); | |
9489 | rtx new_with_op0_mode | |
9490 | = adjust_address (new_rtx, GET_MODE (op0), 0); | |
bbf6f052 | 9491 | |
f994f296 | 9492 | gcc_assert (!TREE_ADDRESSABLE (exp)); |
bbf6f052 | 9493 | |
f994f296 MM |
9494 | if (GET_MODE (op0) == BLKmode) |
9495 | emit_block_move (new_with_op0_mode, op0, | |
9496 | GEN_INT (GET_MODE_SIZE (mode)), | |
9497 | (modifier == EXPAND_STACK_PARM | |
9498 | ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); | |
9499 | else | |
9500 | emit_move_insn (new_with_op0_mode, op0); | |
70582b3a | 9501 | |
f994f296 MM |
9502 | op0 = new_rtx; |
9503 | } | |
0f996086 | 9504 | |
f994f296 MM |
9505 | op0 = adjust_address (op0, mode, 0); |
9506 | } | |
bbf6f052 | 9507 | |
f994f296 | 9508 | return op0; |
ce3aea35 PB |
9509 | |
9510 | /* Use a compare and a jump for BLKmode comparisons, or for function | |
9511 | type comparisons is HAVE_canonicalize_funcptr_for_compare. */ | |
d6a5ac33 | 9512 | |
c0285905 JJ |
9513 | /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they |
9514 | are occassionally created by folding during expansion. */ | |
9515 | case TRUTH_ANDIF_EXPR: | |
9516 | case TRUTH_ORIF_EXPR: | |
25f3e06c PB |
9517 | if (! ignore |
9518 | && (target == 0 | |
9519 | || modifier == EXPAND_STACK_PARM | |
b32e7cdb MM |
9520 | || ! safe_from_p (target, treeop0, 1) |
9521 | || ! safe_from_p (target, treeop1, 1) | |
25f3e06c PB |
9522 | /* Make sure we don't have a hard reg (such as function's return |
9523 | value) live across basic blocks, if not optimizing. */ | |
9524 | || (!optimize && REG_P (target) | |
9525 | && REGNO (target) < FIRST_PSEUDO_REGISTER))) | |
9526 | target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode); | |
9527 | ||
9528 | if (target) | |
9529 | emit_move_insn (target, const0_rtx); | |
9530 | ||
9531 | op1 = gen_label_rtx (); | |
40e90eac | 9532 | jumpifnot_1 (code, treeop0, treeop1, op1, -1); |
25f3e06c PB |
9533 | |
9534 | if (target) | |
9535 | emit_move_insn (target, const1_rtx); | |
9536 | ||
9537 | emit_label (op1); | |
9538 | return ignore ? const0_rtx : target; | |
9539 | ||
6de9cd9a DN |
9540 | case STATEMENT_LIST: |
9541 | { | |
9542 | tree_stmt_iterator iter; | |
9543 | ||
5b0264cb | 9544 | gcc_assert (ignore); |
6de9cd9a DN |
9545 | |
9546 | for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter)) | |
9547 | expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier); | |
9548 | } | |
9549 | return const0_rtx; | |
9550 | ||
bbf6f052 | 9551 | case COND_EXPR: |
ba8081eb KH |
9552 | /* A COND_EXPR with its type being VOID_TYPE represents a |
9553 | conditional jump and is handled in | |
9554 | expand_gimple_cond_expr. */ | |
b32e7cdb | 9555 | gcc_assert (!VOID_TYPE_P (type)); |
f676971a | 9556 | |
e5bacf32 PB |
9557 | /* Note that COND_EXPRs whose type is a structure or union |
9558 | are required to be constructed to contain assignments of | |
9559 | a temporary variable, so that we can evaluate them here | |
9560 | for side effect only. If type is void, we must do likewise. */ | |
9561 | ||
5b0264cb NS |
9562 | gcc_assert (!TREE_ADDRESSABLE (type) |
9563 | && !ignore | |
b32e7cdb MM |
9564 | && TREE_TYPE (treeop1) != void_type_node |
9565 | && TREE_TYPE (treeop2) != void_type_node); | |
f676971a | 9566 | |
e5bacf32 PB |
9567 | /* If we are not to produce a result, we have no target. Otherwise, |
9568 | if a target was specified use it; it will not be used as an | |
9569 | intermediate target unless it is safe. If no target, use a | |
9570 | temporary. */ | |
f676971a | 9571 | |
e5bacf32 PB |
9572 | if (modifier != EXPAND_STACK_PARM |
9573 | && original_target | |
b32e7cdb | 9574 | && safe_from_p (original_target, treeop0, 1) |
e5bacf32 | 9575 | && GET_MODE (original_target) == mode |
7c00d1fe | 9576 | #ifdef HAVE_conditional_move |
e5bacf32 PB |
9577 | && (! can_conditionally_move_p (mode) |
9578 | || REG_P (original_target)) | |
7c00d1fe | 9579 | #endif |
e5bacf32 PB |
9580 | && !MEM_P (original_target)) |
9581 | temp = original_target; | |
9582 | else | |
9583 | temp = assign_temp (type, 0, 0, 1); | |
f676971a | 9584 | |
e5bacf32 PB |
9585 | do_pending_stack_adjust (); |
9586 | NO_DEFER_POP; | |
9587 | op0 = gen_label_rtx (); | |
9588 | op1 = gen_label_rtx (); | |
40e90eac | 9589 | jumpifnot (treeop0, op0, -1); |
b32e7cdb | 9590 | store_expr (treeop1, temp, |
79f5e442 ZD |
9591 | modifier == EXPAND_STACK_PARM, |
9592 | false); | |
f676971a | 9593 | |
e5bacf32 PB |
9594 | emit_jump_insn (gen_jump (op1)); |
9595 | emit_barrier (); | |
9596 | emit_label (op0); | |
b32e7cdb | 9597 | store_expr (treeop2, temp, |
79f5e442 ZD |
9598 | modifier == EXPAND_STACK_PARM, |
9599 | false); | |
f676971a | 9600 | |
e5bacf32 PB |
9601 | emit_label (op1); |
9602 | OK_DEFER_POP; | |
9603 | return temp; | |
f676971a | 9604 | |
7ce67fbe | 9605 | case VEC_COND_EXPR: |
8e7aa1f9 MM |
9606 | target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target); |
9607 | return target; | |
7ce67fbe | 9608 | |
939409af RS |
9609 | case MODIFY_EXPR: |
9610 | { | |
b32e7cdb MM |
9611 | tree lhs = treeop0; |
9612 | tree rhs = treeop1; | |
df9af2bb KH |
9613 | gcc_assert (ignore); |
9614 | ||
bbf6f052 RK |
9615 | /* Check for |= or &= of a bitfield of size one into another bitfield |
9616 | of size 1. In this case, (unless we need the result of the | |
9617 | assignment) we can do this more efficiently with a | |
9618 | test followed by an assignment, if necessary. | |
9619 | ||
9620 | ??? At this point, we can't get a BIT_FIELD_REF here. But if | |
9621 | things change so we do, this code should be enhanced to | |
9622 | support it. */ | |
df9af2bb | 9623 | if (TREE_CODE (lhs) == COMPONENT_REF |
bbf6f052 RK |
9624 | && (TREE_CODE (rhs) == BIT_IOR_EXPR |
9625 | || TREE_CODE (rhs) == BIT_AND_EXPR) | |
9626 | && TREE_OPERAND (rhs, 0) == lhs | |
9627 | && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF | |
05bccae2 RK |
9628 | && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1))) |
9629 | && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1)))) | |
bbf6f052 RK |
9630 | { |
9631 | rtx label = gen_label_rtx (); | |
3967bc2d | 9632 | int value = TREE_CODE (rhs) == BIT_IOR_EXPR; |
bbf6f052 | 9633 | do_jump (TREE_OPERAND (rhs, 1), |
3967bc2d | 9634 | value ? label : 0, |
40e90eac | 9635 | value ? 0 : label, -1); |
79f5e442 ZD |
9636 | expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value), |
9637 | MOVE_NONTEMPORAL (exp)); | |
e7c33f54 | 9638 | do_pending_stack_adjust (); |
bbf6f052 RK |
9639 | emit_label (label); |
9640 | return const0_rtx; | |
9641 | } | |
9642 | ||
79f5e442 | 9643 | expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp)); |
7f8adc4e | 9644 | return const0_rtx; |
bbf6f052 RK |
9645 | } |
9646 | ||
bbf6f052 | 9647 | case ADDR_EXPR: |
70bb498a | 9648 | return expand_expr_addr_expr (exp, target, tmode, modifier); |
bbf6f052 | 9649 | |
7308a047 | 9650 | case REALPART_EXPR: |
b32e7cdb | 9651 | op0 = expand_normal (treeop0); |
1466e387 | 9652 | return read_complex_part (op0, false); |
3a94c984 | 9653 | |
7308a047 | 9654 | case IMAGPART_EXPR: |
b32e7cdb | 9655 | op0 = expand_normal (treeop0); |
1466e387 | 9656 | return read_complex_part (op0, true); |
7308a047 | 9657 | |
28ed065e MM |
9658 | case RETURN_EXPR: |
9659 | case LABEL_EXPR: | |
9660 | case GOTO_EXPR: | |
9661 | case SWITCH_EXPR: | |
9662 | case ASM_EXPR: | |
28ed065e MM |
9663 | /* Expanded in cfgexpand.c. */ |
9664 | gcc_unreachable (); | |
6de9cd9a | 9665 | |
e976b8b2 | 9666 | case TRY_CATCH_EXPR: |
6de9cd9a | 9667 | case CATCH_EXPR: |
6de9cd9a | 9668 | case EH_FILTER_EXPR: |
b335b813 | 9669 | case TRY_FINALLY_EXPR: |
ac45df5d | 9670 | /* Lowered by tree-eh.c. */ |
5b0264cb | 9671 | gcc_unreachable (); |
b335b813 | 9672 | |
ac45df5d RH |
9673 | case WITH_CLEANUP_EXPR: |
9674 | case CLEANUP_POINT_EXPR: | |
9675 | case TARGET_EXPR: | |
165b54c3 | 9676 | case CASE_LABEL_EXPR: |
77c9db77 | 9677 | case VA_ARG_EXPR: |
caf93cb0 | 9678 | case BIND_EXPR: |
e5bacf32 PB |
9679 | case INIT_EXPR: |
9680 | case CONJ_EXPR: | |
9681 | case COMPOUND_EXPR: | |
9682 | case PREINCREMENT_EXPR: | |
9683 | case PREDECREMENT_EXPR: | |
9684 | case POSTINCREMENT_EXPR: | |
9685 | case POSTDECREMENT_EXPR: | |
9686 | case LOOP_EXPR: | |
9687 | case EXIT_EXPR: | |
ac45df5d | 9688 | /* Lowered by gimplify.c. */ |
5b0264cb | 9689 | gcc_unreachable (); |
b335b813 | 9690 | |
67231816 RH |
9691 | case FDESC_EXPR: |
9692 | /* Function descriptors are not valid except for as | |
9693 | initialization constants, and should not be expanded. */ | |
5b0264cb | 9694 | gcc_unreachable (); |
67231816 | 9695 | |
d25cee4d RH |
9696 | case WITH_SIZE_EXPR: |
9697 | /* WITH_SIZE_EXPR expands to its first argument. The caller should | |
9698 | have pulled out the size to use in whatever context it needed. */ | |
b32e7cdb | 9699 | return expand_expr_real (treeop0, original_target, tmode, |
d25cee4d RH |
9700 | modifier, alt_rtl); |
9701 | ||
7ccf35ed DN |
9702 | case REALIGN_LOAD_EXPR: |
9703 | { | |
b32e7cdb MM |
9704 | tree oprnd0 = treeop0; |
9705 | tree oprnd1 = treeop1; | |
9706 | tree oprnd2 = treeop2; | |
7ccf35ed DN |
9707 | rtx op2; |
9708 | ||
71d46ca5 | 9709 | this_optab = optab_for_tree_code (code, type, optab_default); |
84217346 MD |
9710 | expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
9711 | op2 = expand_normal (oprnd2); | |
ed1223ba | 9712 | temp = expand_ternary_op (mode, this_optab, op0, op1, op2, |
7ccf35ed | 9713 | target, unsignedp); |
535a42b1 | 9714 | gcc_assert (temp); |
7ccf35ed DN |
9715 | return temp; |
9716 | } | |
9717 | ||
20f06221 DN |
9718 | case DOT_PROD_EXPR: |
9719 | { | |
b32e7cdb MM |
9720 | tree oprnd0 = treeop0; |
9721 | tree oprnd1 = treeop1; | |
9722 | tree oprnd2 = treeop2; | |
20f06221 DN |
9723 | rtx op2; |
9724 | ||
84217346 MD |
9725 | expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); |
9726 | op2 = expand_normal (oprnd2); | |
8e7aa1f9 | 9727 | target = expand_widen_pattern_expr (&ops, op0, op1, op2, |
20f06221 DN |
9728 | target, unsignedp); |
9729 | return target; | |
9730 | } | |
9731 | ||
2ec5deb5 PB |
9732 | case COMPOUND_LITERAL_EXPR: |
9733 | { | |
9734 | /* Initialize the anonymous variable declared in the compound | |
9735 | literal, then return the variable. */ | |
9736 | tree decl = COMPOUND_LITERAL_EXPR_DECL (exp); | |
9737 | ||
9738 | /* Create RTL for this variable. */ | |
9739 | if (!DECL_RTL_SET_P (decl)) | |
9740 | { | |
9741 | if (DECL_HARD_REGISTER (decl)) | |
9742 | /* The user specified an assembler name for this variable. | |
9743 | Set that up now. */ | |
9744 | rest_of_decl_compilation (decl, 0, 0); | |
9745 | else | |
9746 | expand_decl (decl); | |
9747 | } | |
9748 | ||
9749 | return expand_expr_real (decl, original_target, tmode, | |
9750 | modifier, alt_rtl); | |
9751 | } | |
9752 | ||
bbf6f052 | 9753 | default: |
f994f296 | 9754 | return expand_expr_real_2 (&ops, target, tmode, modifier); |
bbf6f052 | 9755 | } |
bc15d0ef | 9756 | } |
bc15d0ef JM |
9757 | \f |
9758 | /* Subroutine of above: reduce EXP to the precision of TYPE (in the | |
9759 | signedness of TYPE), possibly returning the result in TARGET. */ | |
9760 | static rtx | |
9761 | reduce_to_bit_field_precision (rtx exp, rtx target, tree type) | |
9762 | { | |
9763 | HOST_WIDE_INT prec = TYPE_PRECISION (type); | |
9764 | if (target && GET_MODE (target) != GET_MODE (exp)) | |
9765 | target = 0; | |
1f2ad84c | 9766 | /* For constant values, reduce using build_int_cst_type. */ |
481683e1 | 9767 | if (CONST_INT_P (exp)) |
1f2ad84c AP |
9768 | { |
9769 | HOST_WIDE_INT value = INTVAL (exp); | |
9770 | tree t = build_int_cst_type (type, value); | |
9771 | return expand_expr (t, target, VOIDmode, EXPAND_NORMAL); | |
9772 | } | |
9773 | else if (TYPE_UNSIGNED (type)) | |
bc15d0ef | 9774 | { |
54fb1ae0 AS |
9775 | rtx mask = immed_double_int_const (double_int_mask (prec), |
9776 | GET_MODE (exp)); | |
bc15d0ef JM |
9777 | return expand_and (GET_MODE (exp), exp, mask, target); |
9778 | } | |
9779 | else | |
9780 | { | |
4a90aeeb | 9781 | tree count = build_int_cst (NULL_TREE, |
7d60be94 | 9782 | GET_MODE_BITSIZE (GET_MODE (exp)) - prec); |
bc15d0ef JM |
9783 | exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0); |
9784 | return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0); | |
9785 | } | |
bbf6f052 | 9786 | } |
b93a436e | 9787 | \f |
1ce7f3c2 RK |
9788 | /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that |
9789 | when applied to the address of EXP produces an address known to be | |
9790 | aligned more than BIGGEST_ALIGNMENT. */ | |
9791 | ||
9792 | static int | |
22ea9ec0 | 9793 | is_aligning_offset (const_tree offset, const_tree exp) |
1ce7f3c2 | 9794 | { |
6fce44af | 9795 | /* Strip off any conversions. */ |
1043771b | 9796 | while (CONVERT_EXPR_P (offset)) |
1ce7f3c2 RK |
9797 | offset = TREE_OPERAND (offset, 0); |
9798 | ||
9799 | /* We must now have a BIT_AND_EXPR with a constant that is one less than | |
9800 | power of 2 and which is larger than BIGGEST_ALIGNMENT. */ | |
9801 | if (TREE_CODE (offset) != BIT_AND_EXPR | |
9802 | || !host_integerp (TREE_OPERAND (offset, 1), 1) | |
caf93cb0 | 9803 | || compare_tree_int (TREE_OPERAND (offset, 1), |
c0cfc691 | 9804 | BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0 |
1ce7f3c2 RK |
9805 | || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0) |
9806 | return 0; | |
9807 | ||
9808 | /* Look at the first operand of BIT_AND_EXPR and strip any conversion. | |
9809 | It must be NEGATE_EXPR. Then strip any more conversions. */ | |
9810 | offset = TREE_OPERAND (offset, 0); | |
1043771b | 9811 | while (CONVERT_EXPR_P (offset)) |
1ce7f3c2 RK |
9812 | offset = TREE_OPERAND (offset, 0); |
9813 | ||
9814 | if (TREE_CODE (offset) != NEGATE_EXPR) | |
9815 | return 0; | |
9816 | ||
9817 | offset = TREE_OPERAND (offset, 0); | |
1043771b | 9818 | while (CONVERT_EXPR_P (offset)) |
1ce7f3c2 RK |
9819 | offset = TREE_OPERAND (offset, 0); |
9820 | ||
6fce44af RK |
9821 | /* This must now be the address of EXP. */ |
9822 | return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp; | |
1ce7f3c2 RK |
9823 | } |
9824 | \f | |
e0a2f705 | 9825 | /* Return the tree node if an ARG corresponds to a string constant or zero |
cc2902df | 9826 | if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset |
fed3cef0 RK |
9827 | in bytes within the string that ARG is accessing. The type of the |
9828 | offset will be `sizetype'. */ | |
b93a436e | 9829 | |
28f4ec01 | 9830 | tree |
502b8322 | 9831 | string_constant (tree arg, tree *ptr_offset) |
b93a436e | 9832 | { |
a3de5951 | 9833 | tree array, offset, lower_bound; |
b93a436e JL |
9834 | STRIP_NOPS (arg); |
9835 | ||
a45f71f5 | 9836 | if (TREE_CODE (arg) == ADDR_EXPR) |
b93a436e | 9837 | { |
a45f71f5 JJ |
9838 | if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST) |
9839 | { | |
9840 | *ptr_offset = size_zero_node; | |
9841 | return TREE_OPERAND (arg, 0); | |
9842 | } | |
9843 | else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL) | |
9844 | { | |
9845 | array = TREE_OPERAND (arg, 0); | |
9846 | offset = size_zero_node; | |
9847 | } | |
9848 | else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF) | |
9849 | { | |
9850 | array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); | |
9851 | offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1); | |
9852 | if (TREE_CODE (array) != STRING_CST | |
9853 | && TREE_CODE (array) != VAR_DECL) | |
9854 | return 0; | |
a3de5951 | 9855 | |
9f5ed61a | 9856 | /* Check if the array has a nonzero lower bound. */ |
a3de5951 AM |
9857 | lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0)); |
9858 | if (!integer_zerop (lower_bound)) | |
9859 | { | |
9860 | /* If the offset and base aren't both constants, return 0. */ | |
9861 | if (TREE_CODE (lower_bound) != INTEGER_CST) | |
9862 | return 0; | |
9863 | if (TREE_CODE (offset) != INTEGER_CST) | |
9864 | return 0; | |
9865 | /* Adjust offset by the lower bound. */ | |
ed1223ba | 9866 | offset = size_diffop (fold_convert (sizetype, offset), |
a3de5951 AM |
9867 | fold_convert (sizetype, lower_bound)); |
9868 | } | |
a45f71f5 JJ |
9869 | } |
9870 | else | |
9871 | return 0; | |
6de9cd9a | 9872 | } |
5be014d5 | 9873 | else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR) |
b93a436e JL |
9874 | { |
9875 | tree arg0 = TREE_OPERAND (arg, 0); | |
9876 | tree arg1 = TREE_OPERAND (arg, 1); | |
9877 | ||
9878 | STRIP_NOPS (arg0); | |
9879 | STRIP_NOPS (arg1); | |
9880 | ||
9881 | if (TREE_CODE (arg0) == ADDR_EXPR | |
a45f71f5 JJ |
9882 | && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST |
9883 | || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL)) | |
bbf6f052 | 9884 | { |
a45f71f5 JJ |
9885 | array = TREE_OPERAND (arg0, 0); |
9886 | offset = arg1; | |
bbf6f052 | 9887 | } |
b93a436e | 9888 | else if (TREE_CODE (arg1) == ADDR_EXPR |
a45f71f5 JJ |
9889 | && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST |
9890 | || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL)) | |
bbf6f052 | 9891 | { |
a45f71f5 JJ |
9892 | array = TREE_OPERAND (arg1, 0); |
9893 | offset = arg0; | |
bbf6f052 | 9894 | } |
a45f71f5 JJ |
9895 | else |
9896 | return 0; | |
9897 | } | |
9898 | else | |
9899 | return 0; | |
9900 | ||
9901 | if (TREE_CODE (array) == STRING_CST) | |
9902 | { | |
3967bc2d | 9903 | *ptr_offset = fold_convert (sizetype, offset); |
a45f71f5 JJ |
9904 | return array; |
9905 | } | |
9906 | else if (TREE_CODE (array) == VAR_DECL) | |
9907 | { | |
9908 | int length; | |
9909 | ||
9910 | /* Variables initialized to string literals can be handled too. */ | |
9911 | if (DECL_INITIAL (array) == NULL_TREE | |
9912 | || TREE_CODE (DECL_INITIAL (array)) != STRING_CST) | |
9913 | return 0; | |
9914 | ||
9915 | /* If they are read-only, non-volatile and bind locally. */ | |
9916 | if (! TREE_READONLY (array) | |
9917 | || TREE_SIDE_EFFECTS (array) | |
9918 | || ! targetm.binds_local_p (array)) | |
9919 | return 0; | |
9920 | ||
9921 | /* Avoid const char foo[4] = "abcde"; */ | |
9922 | if (DECL_SIZE_UNIT (array) == NULL_TREE | |
9923 | || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST | |
9924 | || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0 | |
9925 | || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0) | |
9926 | return 0; | |
9927 | ||
9928 | /* If variable is bigger than the string literal, OFFSET must be constant | |
9929 | and inside of the bounds of the string literal. */ | |
3967bc2d | 9930 | offset = fold_convert (sizetype, offset); |
a45f71f5 JJ |
9931 | if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0 |
9932 | && (! host_integerp (offset, 1) | |
9933 | || compare_tree_int (offset, length) >= 0)) | |
9934 | return 0; | |
9935 | ||
9936 | *ptr_offset = offset; | |
9937 | return DECL_INITIAL (array); | |
b93a436e | 9938 | } |
ca695ac9 | 9939 | |
b93a436e JL |
9940 | return 0; |
9941 | } | |
ca695ac9 | 9942 | \f |
8e7aa1f9 MM |
9943 | /* Generate code to calculate OPS, and exploded expression |
9944 | using a store-flag instruction and return an rtx for the result. | |
9945 | OPS reflects a comparison. | |
ca695ac9 | 9946 | |
b93a436e | 9947 | If TARGET is nonzero, store the result there if convenient. |
ca695ac9 | 9948 | |
b93a436e JL |
9949 | Return zero if there is no suitable set-flag instruction |
9950 | available on this machine. | |
ca695ac9 | 9951 | |
b93a436e JL |
9952 | Once expand_expr has been called on the arguments of the comparison, |
9953 | we are committed to doing the store flag, since it is not safe to | |
9954 | re-evaluate the expression. We emit the store-flag insn by calling | |
9955 | emit_store_flag, but only expand the arguments if we have a reason | |
9956 | to believe that emit_store_flag will be successful. If we think that | |
9957 | it will, but it isn't, we have to simulate the store-flag with a | |
9958 | set/jump/set sequence. */ | |
ca695ac9 | 9959 | |
b93a436e | 9960 | static rtx |
8e7aa1f9 | 9961 | do_store_flag (sepops ops, rtx target, enum machine_mode mode) |
b93a436e JL |
9962 | { |
9963 | enum rtx_code code; | |
9964 | tree arg0, arg1, type; | |
9965 | tree tem; | |
9966 | enum machine_mode operand_mode; | |
b93a436e JL |
9967 | int unsignedp; |
9968 | rtx op0, op1; | |
b93a436e | 9969 | rtx subtarget = target; |
8e7aa1f9 | 9970 | location_t loc = ops->location; |
ca695ac9 | 9971 | |
8e7aa1f9 MM |
9972 | arg0 = ops->op0; |
9973 | arg1 = ops->op1; | |
5129d2ce AH |
9974 | |
9975 | /* Don't crash if the comparison was erroneous. */ | |
9976 | if (arg0 == error_mark_node || arg1 == error_mark_node) | |
9977 | return const0_rtx; | |
9978 | ||
b93a436e JL |
9979 | type = TREE_TYPE (arg0); |
9980 | operand_mode = TYPE_MODE (type); | |
8df83eae | 9981 | unsignedp = TYPE_UNSIGNED (type); |
ca695ac9 | 9982 | |
b93a436e JL |
9983 | /* We won't bother with BLKmode store-flag operations because it would mean |
9984 | passing a lot of information to emit_store_flag. */ | |
9985 | if (operand_mode == BLKmode) | |
9986 | return 0; | |
ca695ac9 | 9987 | |
b93a436e JL |
9988 | /* We won't bother with store-flag operations involving function pointers |
9989 | when function pointers must be canonicalized before comparisons. */ | |
9990 | #ifdef HAVE_canonicalize_funcptr_for_compare | |
9991 | if (HAVE_canonicalize_funcptr_for_compare | |
8e7aa1f9 MM |
9992 | && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE |
9993 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) | |
b93a436e | 9994 | == FUNCTION_TYPE)) |
8e7aa1f9 MM |
9995 | || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE |
9996 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) | |
b93a436e JL |
9997 | == FUNCTION_TYPE)))) |
9998 | return 0; | |
ca695ac9 JB |
9999 | #endif |
10000 | ||
b93a436e JL |
10001 | STRIP_NOPS (arg0); |
10002 | STRIP_NOPS (arg1); | |
ca695ac9 | 10003 | |
b93a436e JL |
10004 | /* Get the rtx comparison code to use. We know that EXP is a comparison |
10005 | operation of some type. Some comparisons against 1 and -1 can be | |
10006 | converted to comparisons with zero. Do so here so that the tests | |
10007 | below will be aware that we have a comparison with zero. These | |
10008 | tests will not catch constants in the first operand, but constants | |
10009 | are rarely passed as the first operand. */ | |
ca695ac9 | 10010 | |
8e7aa1f9 | 10011 | switch (ops->code) |
b93a436e JL |
10012 | { |
10013 | case EQ_EXPR: | |
10014 | code = EQ; | |
bbf6f052 | 10015 | break; |
b93a436e JL |
10016 | case NE_EXPR: |
10017 | code = NE; | |
bbf6f052 | 10018 | break; |
b93a436e JL |
10019 | case LT_EXPR: |
10020 | if (integer_onep (arg1)) | |
10021 | arg1 = integer_zero_node, code = unsignedp ? LEU : LE; | |
10022 | else | |
10023 | code = unsignedp ? LTU : LT; | |
ca695ac9 | 10024 | break; |
b93a436e JL |
10025 | case LE_EXPR: |
10026 | if (! unsignedp && integer_all_onesp (arg1)) | |
10027 | arg1 = integer_zero_node, code = LT; | |
10028 | else | |
10029 | code = unsignedp ? LEU : LE; | |
ca695ac9 | 10030 | break; |
b93a436e JL |
10031 | case GT_EXPR: |
10032 | if (! unsignedp && integer_all_onesp (arg1)) | |
10033 | arg1 = integer_zero_node, code = GE; | |
10034 | else | |
10035 | code = unsignedp ? GTU : GT; | |
10036 | break; | |
10037 | case GE_EXPR: | |
10038 | if (integer_onep (arg1)) | |
10039 | arg1 = integer_zero_node, code = unsignedp ? GTU : GT; | |
10040 | else | |
10041 | code = unsignedp ? GEU : GE; | |
ca695ac9 | 10042 | break; |
1eb8759b RH |
10043 | |
10044 | case UNORDERED_EXPR: | |
10045 | code = UNORDERED; | |
10046 | break; | |
10047 | case ORDERED_EXPR: | |
10048 | code = ORDERED; | |
10049 | break; | |
10050 | case UNLT_EXPR: | |
10051 | code = UNLT; | |
10052 | break; | |
10053 | case UNLE_EXPR: | |
10054 | code = UNLE; | |
10055 | break; | |
10056 | case UNGT_EXPR: | |
10057 | code = UNGT; | |
10058 | break; | |
10059 | case UNGE_EXPR: | |
10060 | code = UNGE; | |
10061 | break; | |
10062 | case UNEQ_EXPR: | |
10063 | code = UNEQ; | |
10064 | break; | |
d1a7edaf PB |
10065 | case LTGT_EXPR: |
10066 | code = LTGT; | |
10067 | break; | |
1eb8759b | 10068 | |
ca695ac9 | 10069 | default: |
5b0264cb | 10070 | gcc_unreachable (); |
bbf6f052 | 10071 | } |
bbf6f052 | 10072 | |
b93a436e | 10073 | /* Put a constant second. */ |
0f996086 CF |
10074 | if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST |
10075 | || TREE_CODE (arg0) == FIXED_CST) | |
b93a436e JL |
10076 | { |
10077 | tem = arg0; arg0 = arg1; arg1 = tem; | |
10078 | code = swap_condition (code); | |
ca695ac9 | 10079 | } |
bbf6f052 | 10080 | |
b93a436e JL |
10081 | /* If this is an equality or inequality test of a single bit, we can |
10082 | do this by shifting the bit being tested to the low-order bit and | |
10083 | masking the result with the constant 1. If the condition was EQ, | |
10084 | we xor it with 1. This does not require an scc insn and is faster | |
7960bf22 JL |
10085 | than an scc insn even if we have it. |
10086 | ||
10087 | The code to make this transformation was moved into fold_single_bit_test, | |
10088 | so we just call into the folder and expand its result. */ | |
d39985fa | 10089 | |
b93a436e JL |
10090 | if ((code == NE || code == EQ) |
10091 | && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) | |
10092 | && integer_pow2p (TREE_OPERAND (arg0, 1))) | |
60cd4dae | 10093 | { |
ae2bcd98 | 10094 | tree type = lang_hooks.types.type_for_mode (mode, unsignedp); |
db3927fb AH |
10095 | return expand_expr (fold_single_bit_test (loc, |
10096 | code == NE ? NE_EXPR : EQ_EXPR, | |
450b1728 | 10097 | arg0, arg1, type), |
60cd4dae JL |
10098 | target, VOIDmode, EXPAND_NORMAL); |
10099 | } | |
bbf6f052 | 10100 | |
296b4ed9 | 10101 | if (! get_subtarget (target) |
e3be1116 | 10102 | || GET_MODE (subtarget) != operand_mode) |
b93a436e JL |
10103 | subtarget = 0; |
10104 | ||
bbbbb16a | 10105 | expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL); |
b93a436e JL |
10106 | |
10107 | if (target == 0) | |
10108 | target = gen_reg_rtx (mode); | |
10109 | ||
495499da PB |
10110 | /* Try a cstore if possible. */ |
10111 | return emit_store_flag_force (target, code, op0, op1, | |
10112 | operand_mode, unsignedp, 1); | |
ca695ac9 | 10113 | } |
b93a436e | 10114 | \f |
b93a436e | 10115 | |
ad82abb8 ZW |
10116 | /* Stubs in case we haven't got a casesi insn. */ |
10117 | #ifndef HAVE_casesi | |
10118 | # define HAVE_casesi 0 | |
10119 | # define gen_casesi(a, b, c, d, e) (0) | |
10120 | # define CODE_FOR_casesi CODE_FOR_nothing | |
10121 | #endif | |
10122 | ||
ad82abb8 ZW |
10123 | /* Attempt to generate a casesi instruction. Returns 1 if successful, |
10124 | 0 otherwise (i.e. if there is no casesi instruction). */ | |
10125 | int | |
502b8322 | 10126 | try_casesi (tree index_type, tree index_expr, tree minval, tree range, |
55187c8a RG |
10127 | rtx table_label ATTRIBUTE_UNUSED, rtx default_label, |
10128 | rtx fallback_label ATTRIBUTE_UNUSED) | |
ad82abb8 ZW |
10129 | { |
10130 | enum machine_mode index_mode = SImode; | |
10131 | int index_bits = GET_MODE_BITSIZE (index_mode); | |
10132 | rtx op1, op2, index; | |
10133 | enum machine_mode op_mode; | |
10134 | ||
10135 | if (! HAVE_casesi) | |
10136 | return 0; | |
10137 | ||
10138 | /* Convert the index to SImode. */ | |
10139 | if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode)) | |
10140 | { | |
10141 | enum machine_mode omode = TYPE_MODE (index_type); | |
84217346 | 10142 | rtx rangertx = expand_normal (range); |
ad82abb8 ZW |
10143 | |
10144 | /* We must handle the endpoints in the original mode. */ | |
3244e67d RS |
10145 | index_expr = build2 (MINUS_EXPR, index_type, |
10146 | index_expr, minval); | |
ad82abb8 | 10147 | minval = integer_zero_node; |
84217346 | 10148 | index = expand_normal (index_expr); |
b7814a18 RG |
10149 | if (default_label) |
10150 | emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX, | |
10151 | omode, 1, default_label); | |
ad82abb8 ZW |
10152 | /* Now we can safely truncate. */ |
10153 | index = convert_to_mode (index_mode, index, 0); | |
10154 | } | |
10155 | else | |
10156 | { | |
10157 | if (TYPE_MODE (index_type) != index_mode) | |
10158 | { | |
3967bc2d RS |
10159 | index_type = lang_hooks.types.type_for_size (index_bits, 0); |
10160 | index_expr = fold_convert (index_type, index_expr); | |
ad82abb8 ZW |
10161 | } |
10162 | ||
84217346 | 10163 | index = expand_normal (index_expr); |
ad82abb8 | 10164 | } |
ad76cef8 | 10165 | |
ad82abb8 ZW |
10166 | do_pending_stack_adjust (); |
10167 | ||
10168 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode; | |
10169 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate) | |
10170 | (index, op_mode)) | |
10171 | index = copy_to_mode_reg (op_mode, index); | |
e87b4f3f | 10172 | |
84217346 | 10173 | op1 = expand_normal (minval); |
ad82abb8 ZW |
10174 | |
10175 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode; | |
10176 | op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)), | |
8df83eae | 10177 | op1, TYPE_UNSIGNED (TREE_TYPE (minval))); |
ad82abb8 ZW |
10178 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate) |
10179 | (op1, op_mode)) | |
10180 | op1 = copy_to_mode_reg (op_mode, op1); | |
10181 | ||
84217346 | 10182 | op2 = expand_normal (range); |
ad82abb8 ZW |
10183 | |
10184 | op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode; | |
10185 | op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)), | |
8df83eae | 10186 | op2, TYPE_UNSIGNED (TREE_TYPE (range))); |
ad82abb8 ZW |
10187 | if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate) |
10188 | (op2, op_mode)) | |
10189 | op2 = copy_to_mode_reg (op_mode, op2); | |
10190 | ||
10191 | emit_jump_insn (gen_casesi (index, op1, op2, | |
55187c8a RG |
10192 | table_label, !default_label |
10193 | ? fallback_label : default_label)); | |
ad82abb8 ZW |
10194 | return 1; |
10195 | } | |
10196 | ||
10197 | /* Attempt to generate a tablejump instruction; same concept. */ | |
10198 | #ifndef HAVE_tablejump | |
10199 | #define HAVE_tablejump 0 | |
10200 | #define gen_tablejump(x, y) (0) | |
10201 | #endif | |
10202 | ||
10203 | /* Subroutine of the next function. | |
10204 | ||
10205 | INDEX is the value being switched on, with the lowest value | |
b93a436e JL |
10206 | in the table already subtracted. |
10207 | MODE is its expected mode (needed if INDEX is constant). | |
10208 | RANGE is the length of the jump table. | |
10209 | TABLE_LABEL is a CODE_LABEL rtx for the table itself. | |
88d3b7f0 | 10210 | |
b93a436e JL |
10211 | DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the |
10212 | index value is out of range. */ | |
0f41302f | 10213 | |
ad82abb8 | 10214 | static void |
502b8322 AJ |
10215 | do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label, |
10216 | rtx default_label) | |
ca695ac9 | 10217 | { |
b3694847 | 10218 | rtx temp, vector; |
88d3b7f0 | 10219 | |
cb91fab0 JH |
10220 | if (INTVAL (range) > cfun->cfg->max_jumptable_ents) |
10221 | cfun->cfg->max_jumptable_ents = INTVAL (range); | |
1877be45 | 10222 | |
b93a436e JL |
10223 | /* Do an unsigned comparison (in the proper mode) between the index |
10224 | expression and the value which represents the length of the range. | |
10225 | Since we just finished subtracting the lower bound of the range | |
10226 | from the index expression, this comparison allows us to simultaneously | |
10227 | check that the original index expression value is both greater than | |
10228 | or equal to the minimum value of the range and less than or equal to | |
10229 | the maximum value of the range. */ | |
709f5be1 | 10230 | |
b7814a18 RG |
10231 | if (default_label) |
10232 | emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1, | |
10233 | default_label); | |
bbf6f052 | 10234 | |
b93a436e JL |
10235 | /* If index is in range, it must fit in Pmode. |
10236 | Convert to Pmode so we can index with it. */ | |
10237 | if (mode != Pmode) | |
10238 | index = convert_to_mode (Pmode, index, 1); | |
bbf6f052 | 10239 | |
ba228239 | 10240 | /* Don't let a MEM slip through, because then INDEX that comes |
b93a436e JL |
10241 | out of PIC_CASE_VECTOR_ADDRESS won't be a valid address, |
10242 | and break_out_memory_refs will go to work on it and mess it up. */ | |
10243 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
f8cfc6aa | 10244 | if (flag_pic && !REG_P (index)) |
b93a436e JL |
10245 | index = copy_to_mode_reg (Pmode, index); |
10246 | #endif | |
ca695ac9 | 10247 | |
b93a436e JL |
10248 | /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the |
10249 | GET_MODE_SIZE, because this indicates how large insns are. The other | |
10250 | uses should all be Pmode, because they are addresses. This code | |
10251 | could fail if addresses and insns are not the same size. */ | |
10252 | index = gen_rtx_PLUS (Pmode, | |
10253 | gen_rtx_MULT (Pmode, index, | |
10254 | GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))), | |
10255 | gen_rtx_LABEL_REF (Pmode, table_label)); | |
10256 | #ifdef PIC_CASE_VECTOR_ADDRESS | |
10257 | if (flag_pic) | |
10258 | index = PIC_CASE_VECTOR_ADDRESS (index); | |
10259 | else | |
bbf6f052 | 10260 | #endif |
3de5e93a | 10261 | index = memory_address (CASE_VECTOR_MODE, index); |
b93a436e | 10262 | temp = gen_reg_rtx (CASE_VECTOR_MODE); |
542a8afa | 10263 | vector = gen_const_mem (CASE_VECTOR_MODE, index); |
b93a436e JL |
10264 | convert_move (temp, vector, 0); |
10265 | ||
10266 | emit_jump_insn (gen_tablejump (temp, table_label)); | |
10267 | ||
10268 | /* If we are generating PIC code or if the table is PC-relative, the | |
10269 | table and JUMP_INSN must be adjacent, so don't output a BARRIER. */ | |
10270 | if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic) | |
10271 | emit_barrier (); | |
bbf6f052 | 10272 | } |
b93a436e | 10273 | |
ad82abb8 | 10274 | int |
502b8322 AJ |
10275 | try_tablejump (tree index_type, tree index_expr, tree minval, tree range, |
10276 | rtx table_label, rtx default_label) | |
ad82abb8 ZW |
10277 | { |
10278 | rtx index; | |
10279 | ||
10280 | if (! HAVE_tablejump) | |
10281 | return 0; | |
10282 | ||
4845b383 | 10283 | index_expr = fold_build2 (MINUS_EXPR, index_type, |
3967bc2d RS |
10284 | fold_convert (index_type, index_expr), |
10285 | fold_convert (index_type, minval)); | |
84217346 | 10286 | index = expand_normal (index_expr); |
ad82abb8 ZW |
10287 | do_pending_stack_adjust (); |
10288 | ||
10289 | do_tablejump (index, TYPE_MODE (index_type), | |
10290 | convert_modes (TYPE_MODE (index_type), | |
10291 | TYPE_MODE (TREE_TYPE (range)), | |
84217346 | 10292 | expand_normal (range), |
8df83eae | 10293 | TYPE_UNSIGNED (TREE_TYPE (range))), |
ad82abb8 ZW |
10294 | table_label, default_label); |
10295 | return 1; | |
10296 | } | |
e2500fed | 10297 | |
cb2a532e AH |
10298 | /* Nonzero if the mode is a valid vector mode for this architecture. |
10299 | This returns nonzero even if there is no hardware support for the | |
10300 | vector mode, but we can emulate with narrower modes. */ | |
10301 | ||
10302 | int | |
502b8322 | 10303 | vector_mode_valid_p (enum machine_mode mode) |
cb2a532e | 10304 | { |
82d6e6fc | 10305 | enum mode_class mclass = GET_MODE_CLASS (mode); |
cb2a532e AH |
10306 | enum machine_mode innermode; |
10307 | ||
10308 | /* Doh! What's going on? */ | |
82d6e6fc KG |
10309 | if (mclass != MODE_VECTOR_INT |
10310 | && mclass != MODE_VECTOR_FLOAT | |
10311 | && mclass != MODE_VECTOR_FRACT | |
10312 | && mclass != MODE_VECTOR_UFRACT | |
10313 | && mclass != MODE_VECTOR_ACCUM | |
10314 | && mclass != MODE_VECTOR_UACCUM) | |
cb2a532e AH |
10315 | return 0; |
10316 | ||
10317 | /* Hardware support. Woo hoo! */ | |
f676971a | 10318 | if (targetm.vector_mode_supported_p (mode)) |
cb2a532e AH |
10319 | return 1; |
10320 | ||
10321 | innermode = GET_MODE_INNER (mode); | |
10322 | ||
10323 | /* We should probably return 1 if requesting V4DI and we have no DI, | |
10324 | but we have V2DI, but this is probably very unlikely. */ | |
10325 | ||
10326 | /* If we have support for the inner mode, we can safely emulate it. | |
10327 | We may not have V2DI, but me can emulate with a pair of DIs. */ | |
6dd53648 | 10328 | return targetm.scalar_mode_supported_p (innermode); |
cb2a532e AH |
10329 | } |
10330 | ||
d744e06e AH |
10331 | /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */ |
10332 | static rtx | |
502b8322 | 10333 | const_vector_from_tree (tree exp) |
d744e06e AH |
10334 | { |
10335 | rtvec v; | |
10336 | int units, i; | |
10337 | tree link, elt; | |
10338 | enum machine_mode inner, mode; | |
10339 | ||
10340 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
10341 | ||
6de9cd9a | 10342 | if (initializer_zerop (exp)) |
d744e06e AH |
10343 | return CONST0_RTX (mode); |
10344 | ||
10345 | units = GET_MODE_NUNITS (mode); | |
10346 | inner = GET_MODE_INNER (mode); | |
10347 | ||
10348 | v = rtvec_alloc (units); | |
10349 | ||
10350 | link = TREE_VECTOR_CST_ELTS (exp); | |
10351 | for (i = 0; link; link = TREE_CHAIN (link), ++i) | |
10352 | { | |
10353 | elt = TREE_VALUE (link); | |
10354 | ||
10355 | if (TREE_CODE (elt) == REAL_CST) | |
10356 | RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt), | |
10357 | inner); | |
0f996086 CF |
10358 | else if (TREE_CODE (elt) == FIXED_CST) |
10359 | RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt), | |
10360 | inner); | |
d744e06e | 10361 | else |
54fb1ae0 AS |
10362 | RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt), |
10363 | inner); | |
d744e06e AH |
10364 | } |
10365 | ||
5f6c070d AH |
10366 | /* Initialize remaining elements to 0. */ |
10367 | for (; i < units; ++i) | |
10368 | RTVEC_ELT (v, i) = CONST0_RTX (inner); | |
10369 | ||
a73b091d | 10370 | return gen_rtx_CONST_VECTOR (mode, v); |
d744e06e | 10371 | } |
f9417da1 RG |
10372 | |
10373 | ||
10374 | /* Build a decl for a EH personality function named NAME. */ | |
10375 | ||
10376 | tree | |
10377 | build_personality_function (const char *name) | |
10378 | { | |
10379 | tree decl, type; | |
10380 | ||
10381 | type = build_function_type_list (integer_type_node, integer_type_node, | |
10382 | long_long_unsigned_type_node, | |
10383 | ptr_type_node, ptr_type_node, NULL_TREE); | |
10384 | decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, | |
10385 | get_identifier (name), type); | |
10386 | DECL_ARTIFICIAL (decl) = 1; | |
10387 | DECL_EXTERNAL (decl) = 1; | |
10388 | TREE_PUBLIC (decl) = 1; | |
10389 | ||
10390 | /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with | |
10391 | are the flags assigned by targetm.encode_section_info. */ | |
10392 | SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL); | |
10393 | ||
10394 | return decl; | |
10395 | } | |
10396 | ||
10397 | /* Extracts the personality function of DECL and returns the corresponding | |
10398 | libfunc. */ | |
10399 | ||
10400 | rtx | |
10401 | get_personality_function (tree decl) | |
10402 | { | |
10403 | tree personality = DECL_FUNCTION_PERSONALITY (decl); | |
10404 | enum eh_personality_kind pk; | |
10405 | ||
10406 | pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl)); | |
10407 | if (pk == eh_personality_none) | |
10408 | return NULL; | |
10409 | ||
10410 | if (!personality | |
10411 | && pk == eh_personality_any) | |
10412 | personality = lang_hooks.eh_personality (); | |
10413 | ||
10414 | if (pk == eh_personality_lang) | |
10415 | gcc_assert (personality != NULL_TREE); | |
10416 | ||
10417 | return XEXP (DECL_RTL (personality), 0); | |
10418 | } | |
10419 | ||
e2500fed | 10420 | #include "gt-expr.h" |