]>
Commit | Line | Data |
---|---|---|
53800dbe | 1 | /* Expand builtin functions. |
0b387d23 | 2 | Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
3 | 1999, 2000 Free Software Foundation, Inc. | |
53800dbe | 4 | |
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "machmode.h" | |
25 | #include "rtl.h" | |
26 | #include "tree.h" | |
27 | #include "obstack.h" | |
28 | #include "flags.h" | |
29 | #include "regs.h" | |
30 | #include "hard-reg-set.h" | |
31 | #include "except.h" | |
32 | #include "function.h" | |
33 | #include "insn-flags.h" | |
34 | #include "insn-codes.h" | |
35 | #include "insn-config.h" | |
36 | #include "expr.h" | |
37 | #include "recog.h" | |
38 | #include "output.h" | |
39 | #include "typeclass.h" | |
40 | #include "defaults.h" | |
41 | #include "toplev.h" | |
1dd6c958 | 42 | #include "tm_p.h" |
53800dbe | 43 | |
44 | #define CALLED_AS_BUILT_IN(NODE) \ | |
45 | (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10)) | |
46 | ||
53800dbe | 47 | /* Register mappings for target machines without register windows. */ |
48 | #ifndef INCOMING_REGNO | |
49 | #define INCOMING_REGNO(OUT) (OUT) | |
50 | #endif | |
51 | #ifndef OUTGOING_REGNO | |
52 | #define OUTGOING_REGNO(IN) (IN) | |
53 | #endif | |
54 | ||
726e2588 | 55 | #ifndef PAD_VARARGS_DOWN |
56 | #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN | |
57 | #endif | |
58 | ||
ab7943b9 | 59 | /* Define the names of the builtin function types and codes. */ |
8934cb0c | 60 | const char *const built_in_class_names[4] |
ab7943b9 | 61 | = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"}; |
62 | ||
4e9d90c7 | 63 | #define DEF_BUILTIN(x) STRINGIFY(x), |
8934cb0c | 64 | const char *const built_in_names[(int) END_BUILTINS] = |
4e9d90c7 | 65 | { |
66 | #include "builtins.def" | |
67 | }; | |
68 | #undef DEF_BUILTIN | |
ab7943b9 | 69 | |
6bcfea9e | 70 | tree (*lang_type_promotes_to) PARAMS ((tree)); |
e94026da | 71 | |
6bcfea9e | 72 | static int get_pointer_alignment PARAMS ((tree, unsigned)); |
73 | static tree c_strlen PARAMS ((tree)); | |
74 | static rtx get_memory_rtx PARAMS ((tree)); | |
75 | static int apply_args_size PARAMS ((void)); | |
76 | static int apply_result_size PARAMS ((void)); | |
d8c9779c | 77 | #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) |
6bcfea9e | 78 | static rtx result_vector PARAMS ((int, rtx)); |
d8c9779c | 79 | #endif |
6bcfea9e | 80 | static rtx expand_builtin_apply_args PARAMS ((void)); |
81 | static rtx expand_builtin_apply_args_1 PARAMS ((void)); | |
82 | static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx)); | |
83 | static void expand_builtin_return PARAMS ((rtx)); | |
84 | static rtx expand_builtin_classify_type PARAMS ((tree)); | |
85 | static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx)); | |
86 | static rtx expand_builtin_constant_p PARAMS ((tree)); | |
87 | static rtx expand_builtin_args_info PARAMS ((tree)); | |
88 | static rtx expand_builtin_next_arg PARAMS ((tree)); | |
89 | static rtx expand_builtin_va_start PARAMS ((int, tree)); | |
90 | static rtx expand_builtin_va_end PARAMS ((tree)); | |
91 | static rtx expand_builtin_va_copy PARAMS ((tree)); | |
95d038e4 | 92 | #ifdef HAVE_cmpstrsi |
6bcfea9e | 93 | static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx)); |
94 | static rtx expand_builtin_strcmp PARAMS ((tree, rtx)); | |
95d038e4 | 95 | #endif |
6bcfea9e | 96 | static rtx expand_builtin_memcpy PARAMS ((tree)); |
97 | static rtx expand_builtin_strcpy PARAMS ((tree)); | |
98 | static rtx expand_builtin_memset PARAMS ((tree)); | |
ffc83088 | 99 | static rtx expand_builtin_bzero PARAMS ((tree)); |
ab7943b9 | 100 | static rtx expand_builtin_strlen PARAMS ((tree, rtx, |
101 | enum machine_mode)); | |
6bcfea9e | 102 | static rtx expand_builtin_alloca PARAMS ((tree, rtx)); |
103 | static rtx expand_builtin_ffs PARAMS ((tree, rtx, rtx)); | |
104 | static rtx expand_builtin_frame_address PARAMS ((tree)); | |
105 | static tree stabilize_va_list PARAMS ((tree, int)); | |
89cfe6e5 | 106 | static rtx expand_builtin_expect PARAMS ((tree, rtx)); |
53800dbe | 107 | |
108 | /* Return the alignment in bits of EXP, a pointer valued expression. | |
109 | But don't return more than MAX_ALIGN no matter what. | |
110 | The alignment returned is, by default, the alignment of the thing that | |
111 | EXP points to (if it is not a POINTER_TYPE, 0 is returned). | |
112 | ||
113 | Otherwise, look at the expression to see if we can do better, i.e., if the | |
114 | expression is actually pointing at an object whose alignment is tighter. */ | |
115 | ||
116 | static int | |
117 | get_pointer_alignment (exp, max_align) | |
118 | tree exp; | |
119 | unsigned max_align; | |
120 | { | |
121 | unsigned align, inner; | |
122 | ||
123 | if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) | |
124 | return 0; | |
125 | ||
126 | align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); | |
127 | align = MIN (align, max_align); | |
128 | ||
129 | while (1) | |
130 | { | |
131 | switch (TREE_CODE (exp)) | |
132 | { | |
133 | case NOP_EXPR: | |
134 | case CONVERT_EXPR: | |
135 | case NON_LVALUE_EXPR: | |
136 | exp = TREE_OPERAND (exp, 0); | |
137 | if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) | |
138 | return align; | |
325d1c45 | 139 | |
53800dbe | 140 | inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); |
141 | align = MIN (inner, max_align); | |
142 | break; | |
143 | ||
144 | case PLUS_EXPR: | |
145 | /* If sum of pointer + int, restrict our maximum alignment to that | |
146 | imposed by the integer. If not, we can't do any better than | |
147 | ALIGN. */ | |
325d1c45 | 148 | if (! host_integerp (TREE_OPERAND (exp, 1), 1)) |
53800dbe | 149 | return align; |
150 | ||
325d1c45 | 151 | while (((tree_low_cst (TREE_OPERAND (exp, 1), 1) * BITS_PER_UNIT) |
53800dbe | 152 | & (max_align - 1)) |
153 | != 0) | |
154 | max_align >>= 1; | |
155 | ||
156 | exp = TREE_OPERAND (exp, 0); | |
157 | break; | |
158 | ||
159 | case ADDR_EXPR: | |
160 | /* See what we are pointing at and look at its alignment. */ | |
161 | exp = TREE_OPERAND (exp, 0); | |
162 | if (TREE_CODE (exp) == FUNCTION_DECL) | |
163 | align = FUNCTION_BOUNDARY; | |
9308e976 | 164 | else if (DECL_P (exp)) |
53800dbe | 165 | align = DECL_ALIGN (exp); |
166 | #ifdef CONSTANT_ALIGNMENT | |
167 | else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c') | |
168 | align = CONSTANT_ALIGNMENT (exp, align); | |
169 | #endif | |
170 | return MIN (align, max_align); | |
171 | ||
172 | default: | |
173 | return align; | |
174 | } | |
175 | } | |
176 | } | |
177 | ||
178 | /* Compute the length of a C string. TREE_STRING_LENGTH is not the right | |
179 | way, because it could contain a zero byte in the middle. | |
180 | TREE_STRING_LENGTH is the size of the character array, not the string. | |
181 | ||
902de8ed | 182 | The value returned is of type `ssizetype'. |
183 | ||
53800dbe | 184 | Unfortunately, string_constant can't access the values of const char |
185 | arrays with initializers, so neither can we do so here. */ | |
186 | ||
187 | static tree | |
188 | c_strlen (src) | |
189 | tree src; | |
190 | { | |
191 | tree offset_node; | |
192 | int offset, max; | |
193 | char *ptr; | |
194 | ||
195 | src = string_constant (src, &offset_node); | |
196 | if (src == 0) | |
197 | return 0; | |
902de8ed | 198 | |
53800dbe | 199 | max = TREE_STRING_LENGTH (src); |
200 | ptr = TREE_STRING_POINTER (src); | |
902de8ed | 201 | |
53800dbe | 202 | if (offset_node && TREE_CODE (offset_node) != INTEGER_CST) |
203 | { | |
204 | /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't | |
205 | compute the offset to the following null if we don't know where to | |
206 | start searching for it. */ | |
207 | int i; | |
902de8ed | 208 | |
53800dbe | 209 | for (i = 0; i < max; i++) |
210 | if (ptr[i] == 0) | |
211 | return 0; | |
902de8ed | 212 | |
53800dbe | 213 | /* We don't know the starting offset, but we do know that the string |
214 | has no internal zero bytes. We can assume that the offset falls | |
215 | within the bounds of the string; otherwise, the programmer deserves | |
216 | what he gets. Subtract the offset from the length of the string, | |
902de8ed | 217 | and return that. This would perhaps not be valid if we were dealing |
218 | with named arrays in addition to literal string constants. */ | |
219 | ||
220 | return size_diffop (size_int (max), offset_node); | |
53800dbe | 221 | } |
222 | ||
223 | /* We have a known offset into the string. Start searching there for | |
224 | a null character. */ | |
225 | if (offset_node == 0) | |
226 | offset = 0; | |
227 | else | |
228 | { | |
229 | /* Did we get a long long offset? If so, punt. */ | |
230 | if (TREE_INT_CST_HIGH (offset_node) != 0) | |
231 | return 0; | |
232 | offset = TREE_INT_CST_LOW (offset_node); | |
233 | } | |
902de8ed | 234 | |
53800dbe | 235 | /* If the offset is known to be out of bounds, warn, and call strlen at |
236 | runtime. */ | |
237 | if (offset < 0 || offset > max) | |
238 | { | |
239 | warning ("offset outside bounds of constant string"); | |
240 | return 0; | |
241 | } | |
902de8ed | 242 | |
53800dbe | 243 | /* Use strlen to search for the first zero byte. Since any strings |
244 | constructed with build_string will have nulls appended, we win even | |
245 | if we get handed something like (char[4])"abcd". | |
246 | ||
247 | Since OFFSET is our starting index into the string, no further | |
248 | calculation is needed. */ | |
902de8ed | 249 | return ssize_int (strlen (ptr + offset)); |
53800dbe | 250 | } |
251 | ||
252 | /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT | |
253 | times to get the address of either a higher stack frame, or a return | |
254 | address located within it (depending on FNDECL_CODE). */ | |
902de8ed | 255 | |
53800dbe | 256 | rtx |
257 | expand_builtin_return_addr (fndecl_code, count, tem) | |
258 | enum built_in_function fndecl_code; | |
259 | int count; | |
260 | rtx tem; | |
261 | { | |
262 | int i; | |
263 | ||
264 | /* Some machines need special handling before we can access | |
265 | arbitrary frames. For example, on the sparc, we must first flush | |
266 | all register windows to the stack. */ | |
267 | #ifdef SETUP_FRAME_ADDRESSES | |
268 | if (count > 0) | |
269 | SETUP_FRAME_ADDRESSES (); | |
270 | #endif | |
271 | ||
272 | /* On the sparc, the return address is not in the frame, it is in a | |
273 | register. There is no way to access it off of the current frame | |
274 | pointer, but it can be accessed off the previous frame pointer by | |
275 | reading the value from the register window save area. */ | |
276 | #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME | |
277 | if (fndecl_code == BUILT_IN_RETURN_ADDRESS) | |
278 | count--; | |
279 | #endif | |
280 | ||
281 | /* Scan back COUNT frames to the specified frame. */ | |
282 | for (i = 0; i < count; i++) | |
283 | { | |
284 | /* Assume the dynamic chain pointer is in the word that the | |
285 | frame address points to, unless otherwise specified. */ | |
286 | #ifdef DYNAMIC_CHAIN_ADDRESS | |
287 | tem = DYNAMIC_CHAIN_ADDRESS (tem); | |
288 | #endif | |
289 | tem = memory_address (Pmode, tem); | |
290 | tem = copy_to_reg (gen_rtx_MEM (Pmode, tem)); | |
291 | } | |
292 | ||
293 | /* For __builtin_frame_address, return what we've got. */ | |
294 | if (fndecl_code == BUILT_IN_FRAME_ADDRESS) | |
295 | return tem; | |
296 | ||
297 | /* For __builtin_return_address, Get the return address from that | |
298 | frame. */ | |
299 | #ifdef RETURN_ADDR_RTX | |
300 | tem = RETURN_ADDR_RTX (count, tem); | |
301 | #else | |
302 | tem = memory_address (Pmode, | |
303 | plus_constant (tem, GET_MODE_SIZE (Pmode))); | |
304 | tem = gen_rtx_MEM (Pmode, tem); | |
305 | #endif | |
306 | return tem; | |
307 | } | |
308 | ||
309 | /* __builtin_setjmp is passed a pointer to an array of five words (not | |
310 | all will be used on all machines). It operates similarly to the C | |
311 | library function of the same name, but is more efficient. Much of | |
312 | the code below (and for longjmp) is copied from the handling of | |
313 | non-local gotos. | |
314 | ||
315 | NOTE: This is intended for use by GNAT and the exception handling | |
316 | scheme in the compiler and will only work in the method used by | |
317 | them. */ | |
318 | ||
319 | rtx | |
320 | expand_builtin_setjmp (buf_addr, target, first_label, next_label) | |
321 | rtx buf_addr; | |
322 | rtx target; | |
323 | rtx first_label, next_label; | |
324 | { | |
325 | rtx lab1 = gen_label_rtx (); | |
326 | enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); | |
327 | enum machine_mode value_mode; | |
328 | rtx stack_save; | |
329 | ||
330 | value_mode = TYPE_MODE (integer_type_node); | |
331 | ||
332 | #ifdef POINTERS_EXTEND_UNSIGNED | |
333 | buf_addr = convert_memory_address (Pmode, buf_addr); | |
334 | #endif | |
335 | ||
37ae8504 | 336 | buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX)); |
53800dbe | 337 | |
338 | if (target == 0 || GET_CODE (target) != REG | |
339 | || REGNO (target) < FIRST_PSEUDO_REGISTER) | |
340 | target = gen_reg_rtx (value_mode); | |
341 | ||
342 | emit_queue (); | |
343 | ||
344 | /* We store the frame pointer and the address of lab1 in the buffer | |
345 | and use the rest of it for the stack save area, which is | |
346 | machine-dependent. */ | |
347 | ||
348 | #ifndef BUILTIN_SETJMP_FRAME_VALUE | |
349 | #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx | |
350 | #endif | |
351 | ||
352 | emit_move_insn (gen_rtx_MEM (Pmode, buf_addr), | |
353 | BUILTIN_SETJMP_FRAME_VALUE); | |
354 | emit_move_insn (validize_mem | |
355 | (gen_rtx_MEM (Pmode, | |
356 | plus_constant (buf_addr, | |
357 | GET_MODE_SIZE (Pmode)))), | |
358 | force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1))); | |
359 | ||
360 | stack_save = gen_rtx_MEM (sa_mode, | |
361 | plus_constant (buf_addr, | |
362 | 2 * GET_MODE_SIZE (Pmode))); | |
363 | emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX); | |
364 | ||
365 | /* If there is further processing to do, do it. */ | |
366 | #ifdef HAVE_builtin_setjmp_setup | |
367 | if (HAVE_builtin_setjmp_setup) | |
368 | emit_insn (gen_builtin_setjmp_setup (buf_addr)); | |
369 | #endif | |
370 | ||
371 | /* Set TARGET to zero and branch to the first-time-through label. */ | |
372 | emit_move_insn (target, const0_rtx); | |
373 | emit_jump_insn (gen_jump (first_label)); | |
374 | emit_barrier (); | |
375 | emit_label (lab1); | |
376 | ||
377 | /* Tell flow about the strange goings on. Putting `lab1' on | |
378 | `nonlocal_goto_handler_labels' to indicates that function | |
379 | calls may traverse the arc back to this label. */ | |
380 | ||
381 | current_function_has_nonlocal_label = 1; | |
791dcc3a | 382 | nonlocal_goto_handler_labels |
383 | = gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels); | |
53800dbe | 384 | |
385 | /* Clobber the FP when we get here, so we have to make sure it's | |
386 | marked as used by this function. */ | |
387 | emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
388 | ||
389 | /* Mark the static chain as clobbered here so life information | |
390 | doesn't get messed up for it. */ | |
391 | emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx)); | |
392 | ||
393 | /* Now put in the code to restore the frame pointer, and argument | |
394 | pointer, if needed. The code below is from expand_end_bindings | |
395 | in stmt.c; see detailed documentation there. */ | |
396 | #ifdef HAVE_nonlocal_goto | |
397 | if (! HAVE_nonlocal_goto) | |
398 | #endif | |
399 | emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); | |
400 | ||
401 | #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
402 | if (fixed_regs[ARG_POINTER_REGNUM]) | |
403 | { | |
404 | #ifdef ELIMINABLE_REGS | |
405 | size_t i; | |
406 | static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS; | |
407 | ||
408 | for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++) | |
409 | if (elim_regs[i].from == ARG_POINTER_REGNUM | |
410 | && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) | |
411 | break; | |
412 | ||
413 | if (i == sizeof elim_regs / sizeof elim_regs [0]) | |
414 | #endif | |
415 | { | |
416 | /* Now restore our arg pointer from the address at which it | |
417 | was saved in our stack frame. | |
418 | If there hasn't be space allocated for it yet, make | |
419 | some now. */ | |
420 | if (arg_pointer_save_area == 0) | |
421 | arg_pointer_save_area | |
422 | = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
423 | emit_move_insn (virtual_incoming_args_rtx, | |
424 | copy_to_reg (arg_pointer_save_area)); | |
425 | } | |
426 | } | |
427 | #endif | |
428 | ||
429 | #ifdef HAVE_builtin_setjmp_receiver | |
430 | if (HAVE_builtin_setjmp_receiver) | |
431 | emit_insn (gen_builtin_setjmp_receiver (lab1)); | |
432 | else | |
433 | #endif | |
434 | #ifdef HAVE_nonlocal_goto_receiver | |
435 | if (HAVE_nonlocal_goto_receiver) | |
436 | emit_insn (gen_nonlocal_goto_receiver ()); | |
437 | else | |
438 | #endif | |
439 | { | |
440 | ; /* Nothing */ | |
441 | } | |
442 | ||
443 | /* Set TARGET, and branch to the next-time-through label. */ | |
444 | emit_move_insn (target, const1_rtx); | |
445 | emit_jump_insn (gen_jump (next_label)); | |
446 | emit_barrier (); | |
447 | ||
448 | return target; | |
449 | } | |
450 | ||
451 | /* __builtin_longjmp is passed a pointer to an array of five words (not | |
452 | all will be used on all machines). It operates similarly to the C | |
453 | library function of the same name, but is more efficient. Much of | |
454 | the code below is copied from the handling of non-local gotos. | |
455 | ||
456 | NOTE: This is intended for use by GNAT and the exception handling | |
457 | scheme in the compiler and will only work in the method used by | |
458 | them. */ | |
459 | ||
460 | void | |
461 | expand_builtin_longjmp (buf_addr, value) | |
462 | rtx buf_addr, value; | |
463 | { | |
464 | rtx fp, lab, stack; | |
465 | enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); | |
466 | ||
467 | #ifdef POINTERS_EXTEND_UNSIGNED | |
468 | buf_addr = convert_memory_address (Pmode, buf_addr); | |
469 | #endif | |
470 | buf_addr = force_reg (Pmode, buf_addr); | |
471 | ||
472 | /* We used to store value in static_chain_rtx, but that fails if pointers | |
473 | are smaller than integers. We instead require that the user must pass | |
474 | a second argument of 1, because that is what builtin_setjmp will | |
475 | return. This also makes EH slightly more efficient, since we are no | |
476 | longer copying around a value that we don't care about. */ | |
477 | if (value != const1_rtx) | |
478 | abort (); | |
479 | ||
480 | #ifdef HAVE_builtin_longjmp | |
481 | if (HAVE_builtin_longjmp) | |
482 | emit_insn (gen_builtin_longjmp (buf_addr)); | |
483 | else | |
484 | #endif | |
485 | { | |
486 | fp = gen_rtx_MEM (Pmode, buf_addr); | |
487 | lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr, | |
488 | GET_MODE_SIZE (Pmode))); | |
489 | ||
490 | stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr, | |
491 | 2 * GET_MODE_SIZE (Pmode))); | |
492 | ||
493 | /* Pick up FP, label, and SP from the block and jump. This code is | |
494 | from expand_goto in stmt.c; see there for detailed comments. */ | |
495 | #if HAVE_nonlocal_goto | |
496 | if (HAVE_nonlocal_goto) | |
497 | /* We have to pass a value to the nonlocal_goto pattern that will | |
498 | get copied into the static_chain pointer, but it does not matter | |
499 | what that value is, because builtin_setjmp does not use it. */ | |
500 | emit_insn (gen_nonlocal_goto (value, fp, stack, lab)); | |
501 | else | |
502 | #endif | |
503 | { | |
504 | lab = copy_to_reg (lab); | |
505 | ||
506 | emit_move_insn (hard_frame_pointer_rtx, fp); | |
507 | emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX); | |
508 | ||
509 | emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
510 | emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx)); | |
511 | emit_indirect_jump (lab); | |
512 | } | |
513 | } | |
514 | } | |
515 | ||
516 | /* Get a MEM rtx for expression EXP which can be used in a string instruction | |
517 | (cmpstrsi, movstrsi, ..). */ | |
518 | static rtx | |
519 | get_memory_rtx (exp) | |
520 | tree exp; | |
521 | { | |
522 | rtx mem; | |
523 | int is_aggregate; | |
524 | ||
525 | mem = gen_rtx_MEM (BLKmode, | |
526 | memory_address (BLKmode, | |
527 | expand_expr (exp, NULL_RTX, | |
528 | ptr_mode, EXPAND_SUM))); | |
529 | ||
530 | RTX_UNCHANGING_P (mem) = TREE_READONLY (exp); | |
531 | ||
532 | /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P | |
533 | if the value is the address of a structure or if the expression is | |
534 | cast to a pointer to structure type. */ | |
535 | is_aggregate = 0; | |
536 | ||
537 | while (TREE_CODE (exp) == NOP_EXPR) | |
538 | { | |
539 | tree cast_type = TREE_TYPE (exp); | |
540 | if (TREE_CODE (cast_type) == POINTER_TYPE | |
541 | && AGGREGATE_TYPE_P (TREE_TYPE (cast_type))) | |
542 | { | |
543 | is_aggregate = 1; | |
544 | break; | |
545 | } | |
546 | exp = TREE_OPERAND (exp, 0); | |
547 | } | |
548 | ||
549 | if (is_aggregate == 0) | |
550 | { | |
551 | tree type; | |
552 | ||
553 | if (TREE_CODE (exp) == ADDR_EXPR) | |
554 | /* If this is the address of an object, check whether the | |
555 | object is an array. */ | |
556 | type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
557 | else | |
558 | type = TREE_TYPE (TREE_TYPE (exp)); | |
559 | is_aggregate = AGGREGATE_TYPE_P (type); | |
560 | } | |
561 | ||
562 | MEM_SET_IN_STRUCT_P (mem, is_aggregate); | |
563 | return mem; | |
564 | } | |
565 | \f | |
566 | /* Built-in functions to perform an untyped call and return. */ | |
567 | ||
568 | /* For each register that may be used for calling a function, this | |
569 | gives a mode used to copy the register's value. VOIDmode indicates | |
570 | the register is not used for calling a function. If the machine | |
571 | has register windows, this gives only the outbound registers. | |
572 | INCOMING_REGNO gives the corresponding inbound register. */ | |
573 | static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER]; | |
574 | ||
575 | /* For each register that may be used for returning values, this gives | |
576 | a mode used to copy the register's value. VOIDmode indicates the | |
577 | register is not used for returning values. If the machine has | |
578 | register windows, this gives only the outbound registers. | |
579 | INCOMING_REGNO gives the corresponding inbound register. */ | |
580 | static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER]; | |
581 | ||
582 | /* For each register that may be used for calling a function, this | |
583 | gives the offset of that register into the block returned by | |
584 | __builtin_apply_args. 0 indicates that the register is not | |
585 | used for calling a function. */ | |
586 | static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER]; | |
587 | ||
588 | /* Return the offset of register REGNO into the block returned by | |
589 | __builtin_apply_args. This is not declared static, since it is | |
590 | needed in objc-act.c. */ | |
591 | ||
592 | int | |
593 | apply_args_register_offset (regno) | |
594 | int regno; | |
595 | { | |
596 | apply_args_size (); | |
597 | ||
598 | /* Arguments are always put in outgoing registers (in the argument | |
599 | block) if such make sense. */ | |
600 | #ifdef OUTGOING_REGNO | |
601 | regno = OUTGOING_REGNO(regno); | |
602 | #endif | |
603 | return apply_args_reg_offset[regno]; | |
604 | } | |
605 | ||
606 | /* Return the size required for the block returned by __builtin_apply_args, | |
607 | and initialize apply_args_mode. */ | |
608 | ||
609 | static int | |
610 | apply_args_size () | |
611 | { | |
612 | static int size = -1; | |
613 | int align, regno; | |
614 | enum machine_mode mode; | |
615 | ||
616 | /* The values computed by this function never change. */ | |
617 | if (size < 0) | |
618 | { | |
619 | /* The first value is the incoming arg-pointer. */ | |
620 | size = GET_MODE_SIZE (Pmode); | |
621 | ||
622 | /* The second value is the structure value address unless this is | |
623 | passed as an "invisible" first argument. */ | |
624 | if (struct_value_rtx) | |
625 | size += GET_MODE_SIZE (Pmode); | |
626 | ||
627 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
628 | if (FUNCTION_ARG_REGNO_P (regno)) | |
629 | { | |
630 | /* Search for the proper mode for copying this register's | |
631 | value. I'm not sure this is right, but it works so far. */ | |
632 | enum machine_mode best_mode = VOIDmode; | |
633 | ||
634 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
635 | mode != VOIDmode; | |
636 | mode = GET_MODE_WIDER_MODE (mode)) | |
637 | if (HARD_REGNO_MODE_OK (regno, mode) | |
638 | && HARD_REGNO_NREGS (regno, mode) == 1) | |
639 | best_mode = mode; | |
640 | ||
641 | if (best_mode == VOIDmode) | |
642 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
643 | mode != VOIDmode; | |
644 | mode = GET_MODE_WIDER_MODE (mode)) | |
645 | if (HARD_REGNO_MODE_OK (regno, mode) | |
646 | && (mov_optab->handlers[(int) mode].insn_code | |
647 | != CODE_FOR_nothing)) | |
648 | best_mode = mode; | |
649 | ||
650 | mode = best_mode; | |
651 | if (mode == VOIDmode) | |
652 | abort (); | |
653 | ||
654 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
655 | if (size % align != 0) | |
656 | size = CEIL (size, align) * align; | |
657 | apply_args_reg_offset[regno] = size; | |
658 | size += GET_MODE_SIZE (mode); | |
659 | apply_args_mode[regno] = mode; | |
660 | } | |
661 | else | |
662 | { | |
663 | apply_args_mode[regno] = VOIDmode; | |
664 | apply_args_reg_offset[regno] = 0; | |
665 | } | |
666 | } | |
667 | return size; | |
668 | } | |
669 | ||
670 | /* Return the size required for the block returned by __builtin_apply, | |
671 | and initialize apply_result_mode. */ | |
672 | ||
673 | static int | |
674 | apply_result_size () | |
675 | { | |
676 | static int size = -1; | |
677 | int align, regno; | |
678 | enum machine_mode mode; | |
679 | ||
680 | /* The values computed by this function never change. */ | |
681 | if (size < 0) | |
682 | { | |
683 | size = 0; | |
684 | ||
685 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
686 | if (FUNCTION_VALUE_REGNO_P (regno)) | |
687 | { | |
688 | /* Search for the proper mode for copying this register's | |
689 | value. I'm not sure this is right, but it works so far. */ | |
690 | enum machine_mode best_mode = VOIDmode; | |
691 | ||
692 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
693 | mode != TImode; | |
694 | mode = GET_MODE_WIDER_MODE (mode)) | |
695 | if (HARD_REGNO_MODE_OK (regno, mode)) | |
696 | best_mode = mode; | |
697 | ||
698 | if (best_mode == VOIDmode) | |
699 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
700 | mode != VOIDmode; | |
701 | mode = GET_MODE_WIDER_MODE (mode)) | |
702 | if (HARD_REGNO_MODE_OK (regno, mode) | |
703 | && (mov_optab->handlers[(int) mode].insn_code | |
704 | != CODE_FOR_nothing)) | |
705 | best_mode = mode; | |
706 | ||
707 | mode = best_mode; | |
708 | if (mode == VOIDmode) | |
709 | abort (); | |
710 | ||
711 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
712 | if (size % align != 0) | |
713 | size = CEIL (size, align) * align; | |
714 | size += GET_MODE_SIZE (mode); | |
715 | apply_result_mode[regno] = mode; | |
716 | } | |
717 | else | |
718 | apply_result_mode[regno] = VOIDmode; | |
719 | ||
720 | /* Allow targets that use untyped_call and untyped_return to override | |
721 | the size so that machine-specific information can be stored here. */ | |
722 | #ifdef APPLY_RESULT_SIZE | |
723 | size = APPLY_RESULT_SIZE; | |
724 | #endif | |
725 | } | |
726 | return size; | |
727 | } | |
728 | ||
729 | #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) | |
730 | /* Create a vector describing the result block RESULT. If SAVEP is true, | |
731 | the result block is used to save the values; otherwise it is used to | |
732 | restore the values. */ | |
733 | ||
734 | static rtx | |
735 | result_vector (savep, result) | |
736 | int savep; | |
737 | rtx result; | |
738 | { | |
739 | int regno, size, align, nelts; | |
740 | enum machine_mode mode; | |
741 | rtx reg, mem; | |
742 | rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx)); | |
743 | ||
744 | size = nelts = 0; | |
745 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
746 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
747 | { | |
748 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
749 | if (size % align != 0) | |
750 | size = CEIL (size, align) * align; | |
751 | reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)); | |
752 | mem = change_address (result, mode, | |
753 | plus_constant (XEXP (result, 0), size)); | |
754 | savevec[nelts++] = (savep | |
755 | ? gen_rtx_SET (VOIDmode, mem, reg) | |
756 | : gen_rtx_SET (VOIDmode, reg, mem)); | |
757 | size += GET_MODE_SIZE (mode); | |
758 | } | |
759 | return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec)); | |
760 | } | |
761 | #endif /* HAVE_untyped_call or HAVE_untyped_return */ | |
762 | ||
763 | /* Save the state required to perform an untyped call with the same | |
764 | arguments as were passed to the current function. */ | |
765 | ||
766 | static rtx | |
767 | expand_builtin_apply_args_1 () | |
768 | { | |
769 | rtx registers; | |
770 | int size, align, regno; | |
771 | enum machine_mode mode; | |
772 | ||
773 | /* Create a block where the arg-pointer, structure value address, | |
774 | and argument registers can be saved. */ | |
775 | registers = assign_stack_local (BLKmode, apply_args_size (), -1); | |
776 | ||
777 | /* Walk past the arg-pointer and structure value address. */ | |
778 | size = GET_MODE_SIZE (Pmode); | |
779 | if (struct_value_rtx) | |
780 | size += GET_MODE_SIZE (Pmode); | |
781 | ||
782 | /* Save each register used in calling a function to the block. */ | |
783 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
784 | if ((mode = apply_args_mode[regno]) != VOIDmode) | |
785 | { | |
786 | rtx tem; | |
787 | ||
788 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
789 | if (size % align != 0) | |
790 | size = CEIL (size, align) * align; | |
791 | ||
792 | tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)); | |
793 | ||
53800dbe | 794 | emit_move_insn (change_address (registers, mode, |
795 | plus_constant (XEXP (registers, 0), | |
796 | size)), | |
797 | tem); | |
798 | size += GET_MODE_SIZE (mode); | |
799 | } | |
800 | ||
801 | /* Save the arg pointer to the block. */ | |
802 | emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)), | |
803 | copy_to_reg (virtual_incoming_args_rtx)); | |
804 | size = GET_MODE_SIZE (Pmode); | |
805 | ||
806 | /* Save the structure value address unless this is passed as an | |
807 | "invisible" first argument. */ | |
808 | if (struct_value_incoming_rtx) | |
809 | { | |
810 | emit_move_insn (change_address (registers, Pmode, | |
811 | plus_constant (XEXP (registers, 0), | |
812 | size)), | |
813 | copy_to_reg (struct_value_incoming_rtx)); | |
814 | size += GET_MODE_SIZE (Pmode); | |
815 | } | |
816 | ||
817 | /* Return the address of the block. */ | |
818 | return copy_addr_to_reg (XEXP (registers, 0)); | |
819 | } | |
820 | ||
821 | /* __builtin_apply_args returns block of memory allocated on | |
822 | the stack into which is stored the arg pointer, structure | |
823 | value address, static chain, and all the registers that might | |
824 | possibly be used in performing a function call. The code is | |
825 | moved to the start of the function so the incoming values are | |
826 | saved. */ | |
827 | static rtx | |
828 | expand_builtin_apply_args () | |
829 | { | |
830 | /* Don't do __builtin_apply_args more than once in a function. | |
831 | Save the result of the first call and reuse it. */ | |
832 | if (apply_args_value != 0) | |
833 | return apply_args_value; | |
834 | { | |
835 | /* When this function is called, it means that registers must be | |
836 | saved on entry to this function. So we migrate the | |
837 | call to the first insn of this function. */ | |
838 | rtx temp; | |
839 | rtx seq; | |
840 | ||
841 | start_sequence (); | |
842 | temp = expand_builtin_apply_args_1 (); | |
843 | seq = get_insns (); | |
844 | end_sequence (); | |
845 | ||
846 | apply_args_value = temp; | |
847 | ||
848 | /* Put the sequence after the NOTE that starts the function. | |
849 | If this is inside a SEQUENCE, make the outer-level insn | |
850 | chain current, so the code is placed at the start of the | |
851 | function. */ | |
852 | push_topmost_sequence (); | |
853 | emit_insns_before (seq, NEXT_INSN (get_insns ())); | |
854 | pop_topmost_sequence (); | |
855 | return temp; | |
856 | } | |
857 | } | |
858 | ||
859 | /* Perform an untyped call and save the state required to perform an | |
860 | untyped return of whatever value was returned by the given function. */ | |
861 | ||
862 | static rtx | |
863 | expand_builtin_apply (function, arguments, argsize) | |
864 | rtx function, arguments, argsize; | |
865 | { | |
866 | int size, align, regno; | |
867 | enum machine_mode mode; | |
868 | rtx incoming_args, result, reg, dest, call_insn; | |
869 | rtx old_stack_level = 0; | |
870 | rtx call_fusage = 0; | |
871 | ||
872 | /* Create a block where the return registers can be saved. */ | |
873 | result = assign_stack_local (BLKmode, apply_result_size (), -1); | |
874 | ||
53800dbe | 875 | /* Fetch the arg pointer from the ARGUMENTS block. */ |
876 | incoming_args = gen_reg_rtx (Pmode); | |
877 | emit_move_insn (incoming_args, | |
878 | gen_rtx_MEM (Pmode, arguments)); | |
879 | #ifndef STACK_GROWS_DOWNWARD | |
880 | incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize, | |
881 | incoming_args, 0, OPTAB_LIB_WIDEN); | |
882 | #endif | |
883 | ||
884 | /* Perform postincrements before actually calling the function. */ | |
885 | emit_queue (); | |
886 | ||
887 | /* Push a new argument block and copy the arguments. */ | |
888 | do_pending_stack_adjust (); | |
889 | ||
890 | /* Save the stack with nonlocal if available */ | |
891 | #ifdef HAVE_save_stack_nonlocal | |
892 | if (HAVE_save_stack_nonlocal) | |
893 | emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX); | |
894 | else | |
895 | #endif | |
896 | emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); | |
897 | ||
898 | /* Push a block of memory onto the stack to store the memory arguments. | |
899 | Save the address in a register, and copy the memory arguments. ??? I | |
900 | haven't figured out how the calling convention macros effect this, | |
901 | but it's likely that the source and/or destination addresses in | |
902 | the block copy will need updating in machine specific ways. */ | |
91b70175 | 903 | dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT); |
53800dbe | 904 | emit_block_move (gen_rtx_MEM (BLKmode, dest), |
905 | gen_rtx_MEM (BLKmode, incoming_args), | |
325d1c45 | 906 | argsize, PARM_BOUNDARY); |
53800dbe | 907 | |
908 | /* Refer to the argument block. */ | |
909 | apply_args_size (); | |
910 | arguments = gen_rtx_MEM (BLKmode, arguments); | |
911 | ||
912 | /* Walk past the arg-pointer and structure value address. */ | |
913 | size = GET_MODE_SIZE (Pmode); | |
914 | if (struct_value_rtx) | |
915 | size += GET_MODE_SIZE (Pmode); | |
916 | ||
917 | /* Restore each of the registers previously saved. Make USE insns | |
918 | for each of these registers for use in making the call. */ | |
919 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
920 | if ((mode = apply_args_mode[regno]) != VOIDmode) | |
921 | { | |
922 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
923 | if (size % align != 0) | |
924 | size = CEIL (size, align) * align; | |
925 | reg = gen_rtx_REG (mode, regno); | |
926 | emit_move_insn (reg, | |
927 | change_address (arguments, mode, | |
928 | plus_constant (XEXP (arguments, 0), | |
929 | size))); | |
930 | ||
931 | use_reg (&call_fusage, reg); | |
932 | size += GET_MODE_SIZE (mode); | |
933 | } | |
934 | ||
935 | /* Restore the structure value address unless this is passed as an | |
936 | "invisible" first argument. */ | |
937 | size = GET_MODE_SIZE (Pmode); | |
938 | if (struct_value_rtx) | |
939 | { | |
940 | rtx value = gen_reg_rtx (Pmode); | |
941 | emit_move_insn (value, | |
942 | change_address (arguments, Pmode, | |
943 | plus_constant (XEXP (arguments, 0), | |
944 | size))); | |
945 | emit_move_insn (struct_value_rtx, value); | |
946 | if (GET_CODE (struct_value_rtx) == REG) | |
947 | use_reg (&call_fusage, struct_value_rtx); | |
948 | size += GET_MODE_SIZE (Pmode); | |
949 | } | |
950 | ||
951 | /* All arguments and registers used for the call are set up by now! */ | |
952 | function = prepare_call_address (function, NULL_TREE, &call_fusage, 0); | |
953 | ||
954 | /* Ensure address is valid. SYMBOL_REF is already valid, so no need, | |
955 | and we don't want to load it into a register as an optimization, | |
956 | because prepare_call_address already did it if it should be done. */ | |
957 | if (GET_CODE (function) != SYMBOL_REF) | |
958 | function = memory_address (FUNCTION_MODE, function); | |
959 | ||
960 | /* Generate the actual call instruction and save the return value. */ | |
961 | #ifdef HAVE_untyped_call | |
962 | if (HAVE_untyped_call) | |
963 | emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function), | |
964 | result, result_vector (1, result))); | |
965 | else | |
966 | #endif | |
967 | #ifdef HAVE_call_value | |
968 | if (HAVE_call_value) | |
969 | { | |
970 | rtx valreg = 0; | |
971 | ||
972 | /* Locate the unique return register. It is not possible to | |
973 | express a call that sets more than one return register using | |
974 | call_value; use untyped_call for that. In fact, untyped_call | |
975 | only needs to save the return registers in the given block. */ | |
976 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
977 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
978 | { | |
979 | if (valreg) | |
980 | abort (); /* HAVE_untyped_call required. */ | |
981 | valreg = gen_rtx_REG (mode, regno); | |
982 | } | |
983 | ||
2ed6c343 | 984 | emit_call_insn (GEN_CALL_VALUE (valreg, |
53800dbe | 985 | gen_rtx_MEM (FUNCTION_MODE, function), |
986 | const0_rtx, NULL_RTX, const0_rtx)); | |
987 | ||
988 | emit_move_insn (change_address (result, GET_MODE (valreg), | |
989 | XEXP (result, 0)), | |
990 | valreg); | |
991 | } | |
992 | else | |
993 | #endif | |
994 | abort (); | |
995 | ||
996 | /* Find the CALL insn we just emitted. */ | |
997 | for (call_insn = get_last_insn (); | |
998 | call_insn && GET_CODE (call_insn) != CALL_INSN; | |
999 | call_insn = PREV_INSN (call_insn)) | |
1000 | ; | |
1001 | ||
1002 | if (! call_insn) | |
1003 | abort (); | |
1004 | ||
1005 | /* Put the register usage information on the CALL. If there is already | |
1006 | some usage information, put ours at the end. */ | |
1007 | if (CALL_INSN_FUNCTION_USAGE (call_insn)) | |
1008 | { | |
1009 | rtx link; | |
1010 | ||
1011 | for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; | |
1012 | link = XEXP (link, 1)) | |
1013 | ; | |
1014 | ||
1015 | XEXP (link, 1) = call_fusage; | |
1016 | } | |
1017 | else | |
1018 | CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; | |
1019 | ||
1020 | /* Restore the stack. */ | |
1021 | #ifdef HAVE_save_stack_nonlocal | |
1022 | if (HAVE_save_stack_nonlocal) | |
1023 | emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX); | |
1024 | else | |
1025 | #endif | |
1026 | emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); | |
1027 | ||
1028 | /* Return the address of the result block. */ | |
1029 | return copy_addr_to_reg (XEXP (result, 0)); | |
1030 | } | |
1031 | ||
1032 | /* Perform an untyped return. */ | |
1033 | ||
1034 | static void | |
1035 | expand_builtin_return (result) | |
1036 | rtx result; | |
1037 | { | |
1038 | int size, align, regno; | |
1039 | enum machine_mode mode; | |
1040 | rtx reg; | |
1041 | rtx call_fusage = 0; | |
1042 | ||
1043 | apply_result_size (); | |
1044 | result = gen_rtx_MEM (BLKmode, result); | |
1045 | ||
1046 | #ifdef HAVE_untyped_return | |
1047 | if (HAVE_untyped_return) | |
1048 | { | |
1049 | emit_jump_insn (gen_untyped_return (result, result_vector (0, result))); | |
1050 | emit_barrier (); | |
1051 | return; | |
1052 | } | |
1053 | #endif | |
1054 | ||
1055 | /* Restore the return value and note that each value is used. */ | |
1056 | size = 0; | |
1057 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
1058 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
1059 | { | |
1060 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
1061 | if (size % align != 0) | |
1062 | size = CEIL (size, align) * align; | |
1063 | reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)); | |
1064 | emit_move_insn (reg, | |
1065 | change_address (result, mode, | |
1066 | plus_constant (XEXP (result, 0), | |
1067 | size))); | |
1068 | ||
1069 | push_to_sequence (call_fusage); | |
1070 | emit_insn (gen_rtx_USE (VOIDmode, reg)); | |
1071 | call_fusage = get_insns (); | |
1072 | end_sequence (); | |
1073 | size += GET_MODE_SIZE (mode); | |
1074 | } | |
1075 | ||
1076 | /* Put the USE insns before the return. */ | |
1077 | emit_insns (call_fusage); | |
1078 | ||
1079 | /* Return whatever values was restored by jumping directly to the end | |
1080 | of the function. */ | |
1081 | expand_null_return (); | |
1082 | } | |
1083 | ||
1084 | /* Expand a call to __builtin_classify_type with arguments found in | |
1085 | ARGLIST. */ | |
1086 | static rtx | |
1087 | expand_builtin_classify_type (arglist) | |
1088 | tree arglist; | |
1089 | { | |
1090 | if (arglist != 0) | |
1091 | { | |
1092 | tree type = TREE_TYPE (TREE_VALUE (arglist)); | |
1093 | enum tree_code code = TREE_CODE (type); | |
1094 | if (code == VOID_TYPE) | |
1095 | return GEN_INT (void_type_class); | |
1096 | if (code == INTEGER_TYPE) | |
1097 | return GEN_INT (integer_type_class); | |
1098 | if (code == CHAR_TYPE) | |
1099 | return GEN_INT (char_type_class); | |
1100 | if (code == ENUMERAL_TYPE) | |
1101 | return GEN_INT (enumeral_type_class); | |
1102 | if (code == BOOLEAN_TYPE) | |
1103 | return GEN_INT (boolean_type_class); | |
1104 | if (code == POINTER_TYPE) | |
1105 | return GEN_INT (pointer_type_class); | |
1106 | if (code == REFERENCE_TYPE) | |
1107 | return GEN_INT (reference_type_class); | |
1108 | if (code == OFFSET_TYPE) | |
1109 | return GEN_INT (offset_type_class); | |
1110 | if (code == REAL_TYPE) | |
1111 | return GEN_INT (real_type_class); | |
1112 | if (code == COMPLEX_TYPE) | |
1113 | return GEN_INT (complex_type_class); | |
1114 | if (code == FUNCTION_TYPE) | |
1115 | return GEN_INT (function_type_class); | |
1116 | if (code == METHOD_TYPE) | |
1117 | return GEN_INT (method_type_class); | |
1118 | if (code == RECORD_TYPE) | |
1119 | return GEN_INT (record_type_class); | |
1120 | if (code == UNION_TYPE || code == QUAL_UNION_TYPE) | |
1121 | return GEN_INT (union_type_class); | |
1122 | if (code == ARRAY_TYPE) | |
1123 | { | |
1124 | if (TYPE_STRING_FLAG (type)) | |
1125 | return GEN_INT (string_type_class); | |
1126 | else | |
1127 | return GEN_INT (array_type_class); | |
1128 | } | |
1129 | if (code == SET_TYPE) | |
1130 | return GEN_INT (set_type_class); | |
1131 | if (code == FILE_TYPE) | |
1132 | return GEN_INT (file_type_class); | |
1133 | if (code == LANG_TYPE) | |
1134 | return GEN_INT (lang_type_class); | |
1135 | } | |
1136 | return GEN_INT (no_type_class); | |
1137 | } | |
1138 | ||
1139 | /* Expand expression EXP, which is a call to __builtin_constant_p. */ | |
1140 | static rtx | |
1141 | expand_builtin_constant_p (exp) | |
1142 | tree exp; | |
1143 | { | |
1144 | tree arglist = TREE_OPERAND (exp, 1); | |
1145 | enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); | |
1146 | ||
1147 | if (arglist == 0) | |
1148 | return const0_rtx; | |
1149 | else | |
1150 | { | |
1151 | tree arg = TREE_VALUE (arglist); | |
1152 | rtx tmp; | |
1153 | ||
1154 | /* We return 1 for a numeric type that's known to be a constant | |
1155 | value at compile-time or for an aggregate type that's a | |
1156 | literal constant. */ | |
1157 | STRIP_NOPS (arg); | |
1158 | ||
1159 | /* If we know this is a constant, emit the constant of one. */ | |
1160 | if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c' | |
1161 | || (TREE_CODE (arg) == CONSTRUCTOR | |
1162 | && TREE_CONSTANT (arg)) | |
1163 | || (TREE_CODE (arg) == ADDR_EXPR | |
1164 | && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)) | |
1165 | return const1_rtx; | |
1166 | ||
1167 | /* If we aren't going to be running CSE or this expression | |
1168 | has side effects, show we don't know it to be a constant. | |
1169 | Likewise if it's a pointer or aggregate type since in those | |
1170 | case we only want literals, since those are only optimized | |
1171 | when generating RTL, not later. */ | |
1172 | if (TREE_SIDE_EFFECTS (arg) || cse_not_expected | |
1173 | || AGGREGATE_TYPE_P (TREE_TYPE (arg)) | |
1174 | || POINTER_TYPE_P (TREE_TYPE (arg))) | |
1175 | return const0_rtx; | |
1176 | ||
1177 | /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a | |
1178 | chance to see if it can deduce whether ARG is constant. */ | |
1179 | ||
1180 | tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0); | |
1181 | tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp); | |
1182 | return tmp; | |
1183 | } | |
1184 | } | |
1185 | ||
1186 | /* Expand a call to one of the builtin math functions (sin, cos, or sqrt). | |
1187 | Return 0 if a normal call should be emitted rather than expanding the | |
1188 | function in-line. EXP is the expression that is a call to the builtin | |
1189 | function; if convenient, the result should be placed in TARGET. | |
1190 | SUBTARGET may be used as the target for computing one of EXP's operands. */ | |
1191 | static rtx | |
1192 | expand_builtin_mathfn (exp, target, subtarget) | |
1193 | tree exp; | |
1194 | rtx target, subtarget; | |
1195 | { | |
1196 | optab builtin_optab; | |
1197 | rtx op0, insns; | |
1198 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
1199 | tree arglist = TREE_OPERAND (exp, 1); | |
1200 | ||
1201 | if (arglist == 0 | |
1202 | /* Arg could be wrong type if user redeclared this fcn wrong. */ | |
1203 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE) | |
1204 | return 0; | |
1205 | ||
1206 | /* Stabilize and compute the argument. */ | |
1207 | if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL | |
1208 | && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL) | |
1209 | { | |
1210 | exp = copy_node (exp); | |
53800dbe | 1211 | TREE_OPERAND (exp, 1) = arglist; |
f6326164 | 1212 | /* Wrap the computation of the argument in a SAVE_EXPR. That |
1213 | way, if we need to expand the argument again (as in the | |
1214 | flag_errno_math case below where we cannot directly set | |
1215 | errno), we will not perform side-effects more than once. | |
1216 | Note that here we're mutating the original EXP as well as the | |
1217 | copy; that's the right thing to do in case the original EXP | |
1218 | is expanded later. */ | |
53800dbe | 1219 | TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist)); |
f6326164 | 1220 | arglist = copy_node (arglist); |
53800dbe | 1221 | } |
1222 | op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); | |
1223 | ||
1224 | /* Make a suitable register to place result in. */ | |
1225 | target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); | |
1226 | ||
1227 | emit_queue (); | |
1228 | start_sequence (); | |
1229 | ||
1230 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1231 | { | |
1232 | case BUILT_IN_SIN: | |
1233 | builtin_optab = sin_optab; break; | |
1234 | case BUILT_IN_COS: | |
1235 | builtin_optab = cos_optab; break; | |
1236 | case BUILT_IN_FSQRT: | |
1237 | builtin_optab = sqrt_optab; break; | |
1238 | default: | |
1239 | abort (); | |
1240 | } | |
1241 | ||
1242 | /* Compute into TARGET. | |
1243 | Set TARGET to wherever the result comes back. */ | |
1244 | target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), | |
1245 | builtin_optab, op0, target, 0); | |
1246 | ||
1247 | /* If we were unable to expand via the builtin, stop the | |
1248 | sequence (without outputting the insns) and return 0, causing | |
1249 | a call to the library function. */ | |
1250 | if (target == 0) | |
1251 | { | |
1252 | end_sequence (); | |
1253 | return 0; | |
1254 | } | |
1255 | ||
1256 | /* Check the results by default. But if flag_fast_math is turned on, | |
1257 | then assume sqrt will always be called with valid arguments. */ | |
1258 | ||
1259 | if (flag_errno_math && ! flag_fast_math) | |
1260 | { | |
1261 | rtx lab1; | |
1262 | ||
1263 | /* Don't define the builtin FP instructions | |
1264 | if your machine is not IEEE. */ | |
1265 | if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT) | |
1266 | abort (); | |
1267 | ||
1268 | lab1 = gen_label_rtx (); | |
1269 | ||
1270 | /* Test the result; if it is NaN, set errno=EDOM because | |
1271 | the argument was not in the domain. */ | |
1272 | emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target), | |
1273 | 0, 0, lab1); | |
1274 | ||
1275 | #ifdef TARGET_EDOM | |
1276 | { | |
1277 | #ifdef GEN_ERRNO_RTX | |
1278 | rtx errno_rtx = GEN_ERRNO_RTX; | |
1279 | #else | |
1280 | rtx errno_rtx | |
1281 | = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno")); | |
1282 | #endif | |
1283 | ||
1284 | emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM)); | |
1285 | } | |
1286 | #else | |
1287 | /* We can't set errno=EDOM directly; let the library call do it. | |
1288 | Pop the arguments right away in case the call gets deleted. */ | |
1289 | NO_DEFER_POP; | |
1290 | expand_call (exp, target, 0); | |
1291 | OK_DEFER_POP; | |
1292 | #endif | |
1293 | ||
1294 | emit_label (lab1); | |
1295 | } | |
1296 | ||
1297 | /* Output the entire sequence. */ | |
1298 | insns = get_insns (); | |
1299 | end_sequence (); | |
1300 | emit_insns (insns); | |
1301 | ||
1302 | return target; | |
1303 | } | |
1304 | ||
1305 | /* Expand expression EXP which is a call to the strlen builtin. Return 0 | |
1306 | if we failed the caller should emit a normal call, otherwise | |
1307 | try to get the result in TARGET, if convenient (and in mode MODE if that's | |
1308 | convenient). */ | |
1309 | static rtx | |
1310 | expand_builtin_strlen (exp, target, mode) | |
1311 | tree exp; | |
1312 | rtx target; | |
1313 | enum machine_mode mode; | |
1314 | { | |
1315 | tree arglist = TREE_OPERAND (exp, 1); | |
1316 | enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); | |
1317 | ||
1318 | if (arglist == 0 | |
1319 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1320 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
1321 | return 0; | |
1322 | else | |
1323 | { | |
911c0150 | 1324 | rtx pat; |
53800dbe | 1325 | tree src = TREE_VALUE (arglist); |
1326 | tree len = c_strlen (src); | |
1327 | ||
1328 | int align | |
1329 | = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1330 | ||
911c0150 | 1331 | rtx result, src_reg, char_rtx, before_strlen; |
53800dbe | 1332 | enum machine_mode insn_mode = value_mode, char_mode; |
ef2c4a29 | 1333 | enum insn_code icode = CODE_FOR_nothing; |
53800dbe | 1334 | |
1335 | /* If the length is known, just return it. */ | |
1336 | if (len != 0) | |
1337 | return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD); | |
1338 | ||
1339 | /* If SRC is not a pointer type, don't do this operation inline. */ | |
1340 | if (align == 0) | |
1341 | return 0; | |
1342 | ||
911c0150 | 1343 | /* Bail out if we can't compute strlen in the right mode. */ |
53800dbe | 1344 | while (insn_mode != VOIDmode) |
1345 | { | |
1346 | icode = strlen_optab->handlers[(int) insn_mode].insn_code; | |
1347 | if (icode != CODE_FOR_nothing) | |
c28ae87f | 1348 | break; |
53800dbe | 1349 | |
1350 | insn_mode = GET_MODE_WIDER_MODE (insn_mode); | |
1351 | } | |
1352 | if (insn_mode == VOIDmode) | |
1353 | return 0; | |
1354 | ||
1355 | /* Make a place to write the result of the instruction. */ | |
1356 | result = target; | |
1357 | if (! (result != 0 | |
1358 | && GET_CODE (result) == REG | |
1359 | && GET_MODE (result) == insn_mode | |
1360 | && REGNO (result) >= FIRST_PSEUDO_REGISTER)) | |
1361 | result = gen_reg_rtx (insn_mode); | |
1362 | ||
911c0150 | 1363 | /* Make a place to hold the source address. We will not expand |
1364 | the actual source until we are sure that the expansion will | |
1365 | not fail -- there are trees that cannot be expanded twice. */ | |
1366 | src_reg = gen_reg_rtx (Pmode); | |
53800dbe | 1367 | |
911c0150 | 1368 | /* Mark the beginning of the strlen sequence so we can emit the |
1369 | source operand later. */ | |
1370 | before_strlen = get_last_insn(); | |
53800dbe | 1371 | |
1372 | /* Check the string is readable and has an end. */ | |
1373 | if (current_function_check_memory_usage) | |
1374 | emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2, | |
911c0150 | 1375 | src_reg, Pmode, |
53800dbe | 1376 | GEN_INT (MEMORY_USE_RO), |
1377 | TYPE_MODE (integer_type_node)); | |
1378 | ||
1379 | char_rtx = const0_rtx; | |
6357eaae | 1380 | char_mode = insn_data[(int)icode].operand[2].mode; |
1381 | if (! (*insn_data[(int)icode].operand[2].predicate) (char_rtx, char_mode)) | |
53800dbe | 1382 | char_rtx = copy_to_mode_reg (char_mode, char_rtx); |
1383 | ||
911c0150 | 1384 | pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg), |
1385 | char_rtx, GEN_INT (align)); | |
1386 | if (! pat) | |
1387 | return 0; | |
1388 | emit_insn (pat); | |
1389 | ||
1390 | /* Now that we are assured of success, expand the source. */ | |
1391 | start_sequence (); | |
f508dc8b | 1392 | pat = memory_address (BLKmode, |
1393 | expand_expr (src, src_reg, ptr_mode, EXPAND_SUM)); | |
911c0150 | 1394 | if (pat != src_reg) |
1395 | emit_move_insn (src_reg, pat); | |
1396 | pat = gen_sequence (); | |
1397 | end_sequence (); | |
bceb0d1f | 1398 | |
1399 | if (before_strlen) | |
1400 | emit_insn_after (pat, before_strlen); | |
1401 | else | |
1402 | emit_insn_before (pat, get_insns ()); | |
53800dbe | 1403 | |
1404 | /* Return the value in the proper mode for this function. */ | |
1405 | if (GET_MODE (result) == value_mode) | |
911c0150 | 1406 | target = result; |
53800dbe | 1407 | else if (target != 0) |
911c0150 | 1408 | convert_move (target, result, 0); |
53800dbe | 1409 | else |
911c0150 | 1410 | target = convert_to_mode (value_mode, result, 0); |
1411 | ||
1412 | return target; | |
53800dbe | 1413 | } |
1414 | } | |
1415 | ||
1416 | /* Expand a call to the memcpy builtin, with arguments in ARGLIST. */ | |
1417 | static rtx | |
1418 | expand_builtin_memcpy (arglist) | |
1419 | tree arglist; | |
1420 | { | |
1421 | if (arglist == 0 | |
1422 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1423 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1424 | || TREE_CHAIN (arglist) == 0 | |
1425 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) | |
1426 | != POINTER_TYPE) | |
1427 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1428 | || (TREE_CODE (TREE_TYPE (TREE_VALUE | |
1429 | (TREE_CHAIN (TREE_CHAIN (arglist))))) | |
1430 | != INTEGER_TYPE)) | |
1431 | return 0; | |
1432 | else | |
1433 | { | |
1434 | tree dest = TREE_VALUE (arglist); | |
1435 | tree src = TREE_VALUE (TREE_CHAIN (arglist)); | |
1436 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1437 | ||
325d1c45 | 1438 | int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT); |
1439 | int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT); | |
53800dbe | 1440 | rtx dest_mem, src_mem, dest_addr, len_rtx; |
1441 | ||
1442 | /* If either SRC or DEST is not a pointer type, don't do | |
1443 | this operation in-line. */ | |
1444 | if (src_align == 0 || dest_align == 0) | |
1445 | return 0; | |
1446 | ||
1447 | dest_mem = get_memory_rtx (dest); | |
1448 | src_mem = get_memory_rtx (src); | |
1449 | len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); | |
1450 | ||
1451 | /* Just copy the rights of SRC to the rights of DEST. */ | |
1452 | if (current_function_check_memory_usage) | |
1453 | emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, | |
1454 | XEXP (dest_mem, 0), Pmode, | |
1455 | XEXP (src_mem, 0), Pmode, | |
1456 | len_rtx, TYPE_MODE (sizetype)); | |
1457 | ||
1458 | /* Copy word part most expediently. */ | |
1459 | dest_addr | |
1460 | = emit_block_move (dest_mem, src_mem, len_rtx, | |
1461 | MIN (src_align, dest_align)); | |
1462 | ||
1463 | if (dest_addr == 0) | |
1464 | dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); | |
1465 | ||
1466 | return dest_addr; | |
1467 | } | |
1468 | } | |
1469 | ||
1470 | /* Expand expression EXP, which is a call to the strcpy builtin. Return 0 | |
1471 | if we failed the caller should emit a normal call. */ | |
902de8ed | 1472 | |
53800dbe | 1473 | static rtx |
1474 | expand_builtin_strcpy (exp) | |
1475 | tree exp; | |
1476 | { | |
1477 | tree arglist = TREE_OPERAND (exp, 1); | |
1478 | rtx result; | |
1479 | ||
1480 | if (arglist == 0 | |
1481 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1482 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1483 | || TREE_CHAIN (arglist) == 0 | |
902de8ed | 1484 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) |
1485 | != POINTER_TYPE)) | |
53800dbe | 1486 | return 0; |
1487 | else | |
1488 | { | |
1489 | tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist))); | |
1490 | ||
1491 | if (len == 0) | |
1492 | return 0; | |
1493 | ||
902de8ed | 1494 | len = size_binop (PLUS_EXPR, len, ssize_int (1)); |
53800dbe | 1495 | chainon (arglist, build_tree_list (NULL_TREE, len)); |
1496 | } | |
902de8ed | 1497 | |
53800dbe | 1498 | result = expand_builtin_memcpy (arglist); |
902de8ed | 1499 | |
53800dbe | 1500 | if (! result) |
1501 | TREE_CHAIN (TREE_CHAIN (arglist)) = 0; | |
1502 | return result; | |
1503 | } | |
1504 | ||
1505 | /* Expand expression EXP, which is a call to the memset builtin. Return 0 | |
1506 | if we failed the caller should emit a normal call. */ | |
902de8ed | 1507 | |
53800dbe | 1508 | static rtx |
1509 | expand_builtin_memset (exp) | |
1510 | tree exp; | |
1511 | { | |
1512 | tree arglist = TREE_OPERAND (exp, 1); | |
1513 | ||
1514 | if (arglist == 0 | |
1515 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1516 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1517 | || TREE_CHAIN (arglist) == 0 | |
1518 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) | |
1519 | != INTEGER_TYPE) | |
1520 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1521 | || (INTEGER_TYPE | |
1522 | != (TREE_CODE (TREE_TYPE | |
1523 | (TREE_VALUE | |
1524 | (TREE_CHAIN (TREE_CHAIN (arglist)))))))) | |
1525 | return 0; | |
1526 | else | |
1527 | { | |
1528 | tree dest = TREE_VALUE (arglist); | |
1529 | tree val = TREE_VALUE (TREE_CHAIN (arglist)); | |
1530 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1531 | ||
325d1c45 | 1532 | int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT); |
53800dbe | 1533 | rtx dest_mem, dest_addr, len_rtx; |
1534 | ||
1535 | /* If DEST is not a pointer type, don't do this | |
1536 | operation in-line. */ | |
1537 | if (dest_align == 0) | |
1538 | return 0; | |
1539 | ||
1540 | /* If the arguments have side-effects, then we can only evaluate | |
1541 | them at most once. The following code evaluates them twice if | |
1542 | they are not constants because we break out to expand_call | |
1543 | in that case. They can't be constants if they have side-effects | |
1544 | so we can check for that first. Alternatively, we could call | |
1545 | save_expr to make multiple evaluation safe. */ | |
1546 | if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len)) | |
1547 | return 0; | |
1548 | ||
1549 | /* If VAL is not 0, don't do this operation in-line. */ | |
1550 | if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx) | |
1551 | return 0; | |
1552 | ||
53800dbe | 1553 | len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); |
53800dbe | 1554 | |
1555 | dest_mem = get_memory_rtx (dest); | |
1556 | ||
1557 | /* Just check DST is writable and mark it as readable. */ | |
1558 | if (current_function_check_memory_usage) | |
1559 | emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, | |
1560 | XEXP (dest_mem, 0), Pmode, | |
1561 | len_rtx, TYPE_MODE (sizetype), | |
1562 | GEN_INT (MEMORY_USE_WO), | |
1563 | TYPE_MODE (integer_type_node)); | |
1564 | ||
1565 | ||
1566 | dest_addr = clear_storage (dest_mem, len_rtx, dest_align); | |
1567 | ||
1568 | if (dest_addr == 0) | |
1569 | dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); | |
1570 | ||
1571 | return dest_addr; | |
1572 | } | |
1573 | } | |
1574 | ||
ffc83088 | 1575 | /* Expand expression EXP, which is a call to the bzero builtin. Return 0 |
1576 | if we failed the caller should emit a normal call. */ | |
1577 | static rtx | |
1578 | expand_builtin_bzero (exp) | |
1579 | tree exp; | |
1580 | { | |
1581 | tree arglist = TREE_OPERAND (exp, 1); | |
7369e7ba | 1582 | tree dest, size, newarglist; |
1583 | rtx result; | |
ffc83088 | 1584 | |
1585 | if (arglist == 0 | |
1586 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
7369e7ba | 1587 | || TREE_CODE (TREE_TYPE (dest = TREE_VALUE (arglist))) != POINTER_TYPE |
ffc83088 | 1588 | || TREE_CHAIN (arglist) == 0 |
7369e7ba | 1589 | || (TREE_CODE (TREE_TYPE (size = TREE_VALUE (TREE_CHAIN (arglist)))) |
ffc83088 | 1590 | != INTEGER_TYPE)) |
7369e7ba | 1591 | return NULL_RTX; |
ffc83088 | 1592 | |
7369e7ba | 1593 | /* New argument list transforming bzero(ptr x, int y) to |
1594 | memset(ptr x, int 0, size_t y). */ | |
1595 | ||
1596 | newarglist = build_tree_list (NULL_TREE, convert (sizetype, size)); | |
1597 | newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist); | |
1598 | newarglist = tree_cons (NULL_TREE, dest, newarglist); | |
ffc83088 | 1599 | |
7369e7ba | 1600 | TREE_OPERAND (exp, 1) = newarglist; |
1601 | result = expand_builtin_memset(exp); | |
ffc83088 | 1602 | |
7369e7ba | 1603 | /* Always restore the original arguments. */ |
1604 | TREE_OPERAND (exp, 1) = arglist; | |
ffc83088 | 1605 | |
7369e7ba | 1606 | return result; |
ffc83088 | 1607 | } |
1608 | ||
53800dbe | 1609 | #ifdef HAVE_cmpstrsi |
1610 | /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin. | |
1611 | ARGLIST is the argument list for this call. Return 0 if we failed and the | |
1612 | caller should emit a normal call, otherwise try to get the result in | |
1613 | TARGET, if convenient. */ | |
1614 | static rtx | |
1615 | expand_builtin_memcmp (exp, arglist, target) | |
1616 | tree exp; | |
1617 | tree arglist; | |
1618 | rtx target; | |
1619 | { | |
1620 | /* If we need to check memory accesses, call the library function. */ | |
1621 | if (current_function_check_memory_usage) | |
1622 | return 0; | |
1623 | ||
1624 | if (arglist == 0 | |
1625 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1626 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1627 | || TREE_CHAIN (arglist) == 0 | |
1628 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE | |
1629 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1630 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) | |
1631 | return 0; | |
53800dbe | 1632 | |
1633 | { | |
1634 | enum machine_mode mode; | |
1635 | tree arg1 = TREE_VALUE (arglist); | |
1636 | tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); | |
1637 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
0cd832f0 | 1638 | rtx arg1_rtx, arg2_rtx, arg3_rtx; |
53800dbe | 1639 | rtx result; |
0cd832f0 | 1640 | rtx insn; |
53800dbe | 1641 | |
1642 | int arg1_align | |
1643 | = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1644 | int arg2_align | |
1645 | = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1646 | enum machine_mode insn_mode | |
6357eaae | 1647 | = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode; |
53800dbe | 1648 | |
1649 | /* If we don't have POINTER_TYPE, call the function. */ | |
1650 | if (arg1_align == 0 || arg2_align == 0) | |
1651 | return 0; | |
1652 | ||
1653 | /* Make a place to write the result of the instruction. */ | |
1654 | result = target; | |
1655 | if (! (result != 0 | |
1656 | && GET_CODE (result) == REG && GET_MODE (result) == insn_mode | |
1657 | && REGNO (result) >= FIRST_PSEUDO_REGISTER)) | |
1658 | result = gen_reg_rtx (insn_mode); | |
1659 | ||
0cd832f0 | 1660 | arg1_rtx = get_memory_rtx (arg1); |
1661 | arg2_rtx = get_memory_rtx (arg2); | |
1662 | arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); | |
1663 | if (!HAVE_cmpstrsi) | |
1664 | insn = NULL_RTX; | |
1665 | else | |
1666 | insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx, | |
1667 | GEN_INT (MIN (arg1_align, arg2_align))); | |
1668 | ||
1669 | if (insn) | |
1670 | emit_insn (insn); | |
1671 | else | |
1672 | emit_library_call_value (memcmp_libfunc, result, 2, | |
1673 | TYPE_MODE (integer_type_node), 3, | |
1674 | XEXP (arg1_rtx, 0), Pmode, | |
1675 | XEXP (arg2_rtx, 0), Pmode, | |
1676 | convert_to_mode (TYPE_MODE (sizetype), arg3_rtx, | |
1677 | TREE_UNSIGNED (sizetype)), | |
1678 | TYPE_MODE (sizetype)); | |
53800dbe | 1679 | |
1680 | /* Return the value in the proper mode for this function. */ | |
1681 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
1682 | if (GET_MODE (result) == mode) | |
1683 | return result; | |
1684 | else if (target != 0) | |
1685 | { | |
1686 | convert_move (target, result, 0); | |
1687 | return target; | |
1688 | } | |
1689 | else | |
1690 | return convert_to_mode (mode, result, 0); | |
1691 | } | |
1692 | } | |
1693 | ||
1694 | /* Expand expression EXP, which is a call to the strcmp builtin. Return 0 | |
1695 | if we failed the caller should emit a normal call, otherwise try to get | |
1696 | the result in TARGET, if convenient. */ | |
902de8ed | 1697 | |
53800dbe | 1698 | static rtx |
1699 | expand_builtin_strcmp (exp, target) | |
1700 | tree exp; | |
1701 | rtx target; | |
1702 | { | |
1703 | tree arglist = TREE_OPERAND (exp, 1); | |
1704 | ||
1705 | /* If we need to check memory accesses, call the library function. */ | |
1706 | if (current_function_check_memory_usage) | |
1707 | return 0; | |
1708 | ||
1709 | if (arglist == 0 | |
1710 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1711 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1712 | || TREE_CHAIN (arglist) == 0 | |
902de8ed | 1713 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) |
1714 | != POINTER_TYPE)) | |
53800dbe | 1715 | return 0; |
902de8ed | 1716 | |
1717 | else if (! HAVE_cmpstrsi) | |
53800dbe | 1718 | return 0; |
1719 | { | |
1720 | tree arg1 = TREE_VALUE (arglist); | |
1721 | tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); | |
902de8ed | 1722 | tree len = c_strlen (arg1); |
1723 | tree len2 = c_strlen (arg2); | |
53800dbe | 1724 | rtx result; |
902de8ed | 1725 | |
53800dbe | 1726 | if (len) |
902de8ed | 1727 | len = size_binop (PLUS_EXPR, ssize_int (1), len); |
1728 | ||
53800dbe | 1729 | if (len2) |
902de8ed | 1730 | len2 = size_binop (PLUS_EXPR, ssize_int (1), len2); |
53800dbe | 1731 | |
1732 | /* If we don't have a constant length for the first, use the length | |
1733 | of the second, if we know it. We don't require a constant for | |
1734 | this case; some cost analysis could be done if both are available | |
1735 | but neither is constant. For now, assume they're equally cheap. | |
1736 | ||
1737 | If both strings have constant lengths, use the smaller. This | |
1738 | could arise if optimization results in strcpy being called with | |
1739 | two fixed strings, or if the code was machine-generated. We should | |
1740 | add some code to the `memcmp' handler below to deal with such | |
1741 | situations, someday. */ | |
902de8ed | 1742 | |
53800dbe | 1743 | if (!len || TREE_CODE (len) != INTEGER_CST) |
1744 | { | |
1745 | if (len2) | |
1746 | len = len2; | |
1747 | else if (len == 0) | |
1748 | return 0; | |
1749 | } | |
902de8ed | 1750 | else if (len2 && TREE_CODE (len2) == INTEGER_CST |
1751 | && tree_int_cst_lt (len2, len)) | |
1752 | len = len2; | |
53800dbe | 1753 | |
1754 | chainon (arglist, build_tree_list (NULL_TREE, len)); | |
1755 | result = expand_builtin_memcmp (exp, arglist, target); | |
1756 | if (! result) | |
1757 | TREE_CHAIN (TREE_CHAIN (arglist)) = 0; | |
902de8ed | 1758 | |
53800dbe | 1759 | return result; |
1760 | } | |
1761 | } | |
1762 | #endif | |
1763 | ||
a66c9326 | 1764 | /* Expand a call to __builtin_saveregs, generating the result in TARGET, |
1765 | if that's convenient. */ | |
902de8ed | 1766 | |
a66c9326 | 1767 | rtx |
1768 | expand_builtin_saveregs () | |
53800dbe | 1769 | { |
a66c9326 | 1770 | rtx val, seq; |
53800dbe | 1771 | |
1772 | /* Don't do __builtin_saveregs more than once in a function. | |
1773 | Save the result of the first call and reuse it. */ | |
1774 | if (saveregs_value != 0) | |
1775 | return saveregs_value; | |
53800dbe | 1776 | |
a66c9326 | 1777 | /* When this function is called, it means that registers must be |
1778 | saved on entry to this function. So we migrate the call to the | |
1779 | first insn of this function. */ | |
1780 | ||
1781 | start_sequence (); | |
53800dbe | 1782 | |
1783 | #ifdef EXPAND_BUILTIN_SAVEREGS | |
a66c9326 | 1784 | /* Do whatever the machine needs done in this case. */ |
1785 | val = EXPAND_BUILTIN_SAVEREGS (); | |
53800dbe | 1786 | #else |
a66c9326 | 1787 | /* ??? We used to try and build up a call to the out of line function, |
1788 | guessing about what registers needed saving etc. This became much | |
1789 | harder with __builtin_va_start, since we don't have a tree for a | |
1790 | call to __builtin_saveregs to fall back on. There was exactly one | |
1791 | port (i860) that used this code, and I'm unconvinced it could actually | |
1792 | handle the general case. So we no longer try to handle anything | |
1793 | weird and make the backend absorb the evil. */ | |
1794 | ||
1795 | error ("__builtin_saveregs not supported by this target"); | |
1796 | val = const0_rtx; | |
53800dbe | 1797 | #endif |
1798 | ||
a66c9326 | 1799 | seq = get_insns (); |
1800 | end_sequence (); | |
53800dbe | 1801 | |
a66c9326 | 1802 | saveregs_value = val; |
53800dbe | 1803 | |
a66c9326 | 1804 | /* Put the sequence after the NOTE that starts the function. If this |
1805 | is inside a SEQUENCE, make the outer-level insn chain current, so | |
1806 | the code is placed at the start of the function. */ | |
1807 | push_topmost_sequence (); | |
1808 | emit_insns_after (seq, get_insns ()); | |
1809 | pop_topmost_sequence (); | |
1810 | ||
1811 | return val; | |
53800dbe | 1812 | } |
1813 | ||
1814 | /* __builtin_args_info (N) returns word N of the arg space info | |
1815 | for the current function. The number and meanings of words | |
1816 | is controlled by the definition of CUMULATIVE_ARGS. */ | |
1817 | static rtx | |
1818 | expand_builtin_args_info (exp) | |
1819 | tree exp; | |
1820 | { | |
1821 | tree arglist = TREE_OPERAND (exp, 1); | |
1822 | int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int); | |
1823 | int *word_ptr = (int *) ¤t_function_args_info; | |
1824 | #if 0 | |
1825 | /* These are used by the code below that is if 0'ed away */ | |
1826 | int i; | |
1827 | tree type, elts, result; | |
1828 | #endif | |
1829 | ||
1830 | if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0) | |
dda90815 | 1831 | abort (); |
53800dbe | 1832 | |
1833 | if (arglist != 0) | |
1834 | { | |
1835 | tree arg = TREE_VALUE (arglist); | |
1836 | if (TREE_CODE (arg) != INTEGER_CST) | |
1837 | error ("argument of `__builtin_args_info' must be constant"); | |
1838 | else | |
1839 | { | |
1840 | int wordnum = TREE_INT_CST_LOW (arg); | |
1841 | ||
1842 | if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg)) | |
1843 | error ("argument of `__builtin_args_info' out of range"); | |
1844 | else | |
1845 | return GEN_INT (word_ptr[wordnum]); | |
1846 | } | |
1847 | } | |
1848 | else | |
1849 | error ("missing argument in `__builtin_args_info'"); | |
1850 | ||
1851 | return const0_rtx; | |
1852 | ||
1853 | #if 0 | |
1854 | for (i = 0; i < nwords; i++) | |
1855 | elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0)); | |
1856 | ||
1857 | type = build_array_type (integer_type_node, | |
1858 | build_index_type (build_int_2 (nwords, 0))); | |
1859 | result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts)); | |
1860 | TREE_CONSTANT (result) = 1; | |
1861 | TREE_STATIC (result) = 1; | |
a66c9326 | 1862 | result = build1 (INDIRECT_REF, build_pointer_type (type), result); |
53800dbe | 1863 | TREE_CONSTANT (result) = 1; |
1864 | return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD); | |
1865 | #endif | |
1866 | } | |
1867 | ||
a66c9326 | 1868 | /* Expand ARGLIST, from a call to __builtin_next_arg. */ |
53800dbe | 1869 | static rtx |
a66c9326 | 1870 | expand_builtin_next_arg (arglist) |
1871 | tree arglist; | |
53800dbe | 1872 | { |
53800dbe | 1873 | tree fntype = TREE_TYPE (current_function_decl); |
1874 | ||
1875 | if ((TYPE_ARG_TYPES (fntype) == 0 | |
1876 | || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) | |
1877 | == void_type_node)) | |
1878 | && ! current_function_varargs) | |
1879 | { | |
1880 | error ("`va_start' used in function with fixed args"); | |
1881 | return const0_rtx; | |
1882 | } | |
1883 | ||
1884 | if (arglist) | |
1885 | { | |
1886 | tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl)); | |
1887 | tree arg = TREE_VALUE (arglist); | |
1888 | ||
1889 | /* Strip off all nops for the sake of the comparison. This | |
1890 | is not quite the same as STRIP_NOPS. It does more. | |
1891 | We must also strip off INDIRECT_EXPR for C++ reference | |
1892 | parameters. */ | |
1893 | while (TREE_CODE (arg) == NOP_EXPR | |
1894 | || TREE_CODE (arg) == CONVERT_EXPR | |
1895 | || TREE_CODE (arg) == NON_LVALUE_EXPR | |
1896 | || TREE_CODE (arg) == INDIRECT_REF) | |
1897 | arg = TREE_OPERAND (arg, 0); | |
1898 | if (arg != last_parm) | |
1899 | warning ("second parameter of `va_start' not last named argument"); | |
1900 | } | |
1901 | else if (! current_function_varargs) | |
1902 | /* Evidently an out of date version of <stdarg.h>; can't validate | |
1903 | va_start's second argument, but can still work as intended. */ | |
1904 | warning ("`__builtin_next_arg' called without an argument"); | |
1905 | ||
1906 | return expand_binop (Pmode, add_optab, | |
1907 | current_function_internal_arg_pointer, | |
1908 | current_function_arg_offset_rtx, | |
1909 | NULL_RTX, 0, OPTAB_LIB_WIDEN); | |
1910 | } | |
1911 | ||
a66c9326 | 1912 | /* Make it easier for the backends by protecting the valist argument |
1913 | from multiple evaluations. */ | |
1914 | ||
1915 | static tree | |
2d47cc32 | 1916 | stabilize_va_list (valist, needs_lvalue) |
a66c9326 | 1917 | tree valist; |
2d47cc32 | 1918 | int needs_lvalue; |
a66c9326 | 1919 | { |
11a61dea | 1920 | if (TREE_CODE (va_list_type_node) == ARRAY_TYPE) |
a66c9326 | 1921 | { |
2d47cc32 | 1922 | if (TREE_SIDE_EFFECTS (valist)) |
1923 | valist = save_expr (valist); | |
11a61dea | 1924 | |
2d47cc32 | 1925 | /* For this case, the backends will be expecting a pointer to |
1926 | TREE_TYPE (va_list_type_node), but it's possible we've | |
1927 | actually been given an array (an actual va_list_type_node). | |
1928 | So fix it. */ | |
1929 | if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE) | |
8a15c04a | 1930 | { |
2d47cc32 | 1931 | tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node)); |
1932 | tree p2 = build_pointer_type (va_list_type_node); | |
325d1c45 | 1933 | |
2d47cc32 | 1934 | valist = build1 (ADDR_EXPR, p2, valist); |
1935 | valist = fold (build1 (NOP_EXPR, p1, valist)); | |
8a15c04a | 1936 | } |
a66c9326 | 1937 | } |
11a61dea | 1938 | else |
a66c9326 | 1939 | { |
2d47cc32 | 1940 | tree pt; |
11a61dea | 1941 | |
2d47cc32 | 1942 | if (! needs_lvalue) |
1943 | { | |
11a61dea | 1944 | if (! TREE_SIDE_EFFECTS (valist)) |
1945 | return valist; | |
2d47cc32 | 1946 | |
11a61dea | 1947 | pt = build_pointer_type (va_list_type_node); |
2d47cc32 | 1948 | valist = fold (build1 (ADDR_EXPR, pt, valist)); |
a66c9326 | 1949 | TREE_SIDE_EFFECTS (valist) = 1; |
a66c9326 | 1950 | } |
2d47cc32 | 1951 | |
11a61dea | 1952 | if (TREE_SIDE_EFFECTS (valist)) |
2d47cc32 | 1953 | valist = save_expr (valist); |
11a61dea | 1954 | valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), |
1955 | valist)); | |
a66c9326 | 1956 | } |
1957 | ||
1958 | return valist; | |
1959 | } | |
1960 | ||
1961 | /* The "standard" implementation of va_start: just assign `nextarg' to | |
1962 | the variable. */ | |
1963 | void | |
1964 | std_expand_builtin_va_start (stdarg_p, valist, nextarg) | |
1965 | int stdarg_p ATTRIBUTE_UNUSED; | |
1966 | tree valist; | |
1967 | rtx nextarg; | |
1968 | { | |
1969 | tree t; | |
1970 | ||
8a15c04a | 1971 | if (!stdarg_p) |
1972 | nextarg = plus_constant (nextarg, -UNITS_PER_WORD); | |
1973 | ||
a66c9326 | 1974 | t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, |
1975 | make_tree (ptr_type_node, nextarg)); | |
1976 | TREE_SIDE_EFFECTS (t) = 1; | |
1977 | ||
1978 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1979 | } | |
1980 | ||
1981 | /* Expand ARGLIST, which from a call to __builtin_stdarg_va_start or | |
1982 | __builtin_varargs_va_start, depending on STDARG_P. */ | |
1983 | static rtx | |
1984 | expand_builtin_va_start (stdarg_p, arglist) | |
1985 | int stdarg_p; | |
1986 | tree arglist; | |
1987 | { | |
1988 | rtx nextarg; | |
1989 | tree chain = arglist, valist; | |
1990 | ||
1991 | if (stdarg_p) | |
1992 | nextarg = expand_builtin_next_arg (chain = TREE_CHAIN (arglist)); | |
1993 | else | |
1994 | nextarg = expand_builtin_next_arg (NULL_TREE); | |
1995 | ||
1996 | if (TREE_CHAIN (chain)) | |
1997 | error ("too many arguments to function `va_start'"); | |
1998 | ||
1999 | valist = stabilize_va_list (TREE_VALUE (arglist), 1); | |
2000 | ||
2001 | #ifdef EXPAND_BUILTIN_VA_START | |
2002 | EXPAND_BUILTIN_VA_START (stdarg_p, valist, nextarg); | |
2003 | #else | |
2004 | std_expand_builtin_va_start (stdarg_p, valist, nextarg); | |
2005 | #endif | |
2006 | ||
2007 | return const0_rtx; | |
2008 | } | |
2009 | ||
2010 | /* Allocate an alias set for use in storing and reading from the varargs | |
2011 | spill area. */ | |
2012 | int | |
2013 | get_varargs_alias_set () | |
2014 | { | |
2015 | static int set = -1; | |
2016 | if (set == -1) | |
2017 | set = new_alias_set (); | |
2018 | return set; | |
2019 | } | |
2020 | ||
2021 | /* The "standard" implementation of va_arg: read the value from the | |
2022 | current (padded) address and increment by the (padded) size. */ | |
2023 | rtx | |
2024 | std_expand_builtin_va_arg (valist, type) | |
2025 | tree valist, type; | |
2026 | { | |
2027 | tree addr_tree, t; | |
2028 | HOST_WIDE_INT align; | |
2029 | HOST_WIDE_INT rounded_size; | |
2030 | rtx addr; | |
2031 | ||
2032 | /* Compute the rounded size of the type. */ | |
2033 | align = PARM_BOUNDARY / BITS_PER_UNIT; | |
7102dbcc | 2034 | rounded_size = (((int_size_in_bytes (type) + align - 1) / align) * align); |
a66c9326 | 2035 | |
2036 | /* Get AP. */ | |
2037 | addr_tree = valist; | |
726e2588 | 2038 | if (PAD_VARARGS_DOWN) |
a66c9326 | 2039 | { |
2040 | /* Small args are padded downward. */ | |
2041 | ||
2042 | HOST_WIDE_INT adj; | |
2043 | adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT; | |
2044 | if (rounded_size > align) | |
2045 | adj = rounded_size; | |
2046 | ||
2047 | addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree, | |
2048 | build_int_2 (rounded_size - adj, 0)); | |
2049 | } | |
2050 | ||
2051 | addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2052 | addr = copy_to_reg (addr); | |
2053 | ||
2054 | /* Compute new value for AP. */ | |
2055 | t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, | |
2056 | build (PLUS_EXPR, TREE_TYPE (valist), valist, | |
2057 | build_int_2 (rounded_size, 0))); | |
2058 | TREE_SIDE_EFFECTS (t) = 1; | |
2059 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
2060 | ||
2061 | return addr; | |
2062 | } | |
2063 | ||
2064 | /* Expand __builtin_va_arg, which is not really a builtin function, but | |
2065 | a very special sort of operator. */ | |
2066 | rtx | |
2067 | expand_builtin_va_arg (valist, type) | |
2068 | tree valist, type; | |
2069 | { | |
2070 | rtx addr, result; | |
6cd005c9 | 2071 | tree promoted_type, want_va_type, have_va_type; |
a66c9326 | 2072 | |
6cd005c9 | 2073 | /* Verify that valist is of the proper type. */ |
2074 | ||
2075 | want_va_type = va_list_type_node; | |
2076 | have_va_type = TREE_TYPE (valist); | |
2077 | if (TREE_CODE (want_va_type) == ARRAY_TYPE) | |
2078 | { | |
2079 | /* If va_list is an array type, the argument may have decayed | |
2080 | to a pointer type, e.g. by being passed to another function. | |
2081 | In that case, unwrap both types so that we can compare the | |
2082 | underlying records. */ | |
2083 | if (TREE_CODE (have_va_type) == ARRAY_TYPE | |
2084 | || TREE_CODE (have_va_type) == POINTER_TYPE) | |
2085 | { | |
2086 | want_va_type = TREE_TYPE (want_va_type); | |
2087 | have_va_type = TREE_TYPE (have_va_type); | |
2088 | } | |
2089 | } | |
2090 | if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type)) | |
a66c9326 | 2091 | { |
e94026da | 2092 | error ("first argument to `va_arg' not of type `va_list'"); |
2093 | addr = const0_rtx; | |
2094 | } | |
6cd005c9 | 2095 | |
2096 | /* Generate a diagnostic for requesting data of a type that cannot | |
2097 | be passed through `...' due to type promotion at the call site. */ | |
e94026da | 2098 | else if ((promoted_type = (*lang_type_promotes_to) (type)) != NULL_TREE) |
2099 | { | |
01ce7a1b | 2100 | const char *name = "<anonymous type>", *pname = 0; |
e94026da | 2101 | static int gave_help; |
2102 | ||
2103 | if (TYPE_NAME (type)) | |
2104 | { | |
2105 | if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE) | |
2106 | name = IDENTIFIER_POINTER (TYPE_NAME (type)); | |
2107 | else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL | |
2108 | && DECL_NAME (TYPE_NAME (type))) | |
2109 | name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))); | |
2110 | } | |
2111 | if (TYPE_NAME (promoted_type)) | |
2112 | { | |
2113 | if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE) | |
2114 | pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type)); | |
2115 | else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL | |
2116 | && DECL_NAME (TYPE_NAME (promoted_type))) | |
2117 | pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type))); | |
2118 | } | |
2119 | ||
2120 | error ("`%s' is promoted to `%s' when passed through `...'", name, pname); | |
2121 | if (! gave_help) | |
2122 | { | |
2123 | gave_help = 1; | |
2124 | error ("(so you should pass `%s' not `%s' to `va_arg')", pname, name); | |
2125 | } | |
2126 | ||
a66c9326 | 2127 | addr = const0_rtx; |
2128 | } | |
2129 | else | |
2130 | { | |
2131 | /* Make it easier for the backends by protecting the valist argument | |
2132 | from multiple evaluations. */ | |
2133 | valist = stabilize_va_list (valist, 0); | |
2134 | ||
2135 | #ifdef EXPAND_BUILTIN_VA_ARG | |
2136 | addr = EXPAND_BUILTIN_VA_ARG (valist, type); | |
2137 | #else | |
2138 | addr = std_expand_builtin_va_arg (valist, type); | |
2139 | #endif | |
2140 | } | |
2141 | ||
2142 | result = gen_rtx_MEM (TYPE_MODE (type), addr); | |
2143 | MEM_ALIAS_SET (result) = get_varargs_alias_set (); | |
2144 | ||
2145 | return result; | |
2146 | } | |
2147 | ||
2148 | /* Expand ARGLIST, from a call to __builtin_va_end. */ | |
2149 | static rtx | |
2150 | expand_builtin_va_end (arglist) | |
8a15c04a | 2151 | tree arglist; |
a66c9326 | 2152 | { |
8a15c04a | 2153 | tree valist = TREE_VALUE (arglist); |
2154 | ||
a66c9326 | 2155 | #ifdef EXPAND_BUILTIN_VA_END |
a66c9326 | 2156 | valist = stabilize_va_list (valist, 0); |
2157 | EXPAND_BUILTIN_VA_END(arglist); | |
8a15c04a | 2158 | #else |
2159 | /* Evaluate for side effects, if needed. I hate macros that don't | |
2160 | do that. */ | |
2161 | if (TREE_SIDE_EFFECTS (valist)) | |
2162 | expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
a66c9326 | 2163 | #endif |
2164 | ||
2165 | return const0_rtx; | |
2166 | } | |
2167 | ||
2168 | /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a | |
2169 | builtin rather than just as an assignment in stdarg.h because of the | |
2170 | nastiness of array-type va_list types. */ | |
2171 | static rtx | |
2172 | expand_builtin_va_copy (arglist) | |
2173 | tree arglist; | |
2174 | { | |
2175 | tree dst, src, t; | |
2176 | ||
2177 | dst = TREE_VALUE (arglist); | |
2178 | src = TREE_VALUE (TREE_CHAIN (arglist)); | |
2179 | ||
2180 | dst = stabilize_va_list (dst, 1); | |
2181 | src = stabilize_va_list (src, 0); | |
2182 | ||
2183 | if (TREE_CODE (va_list_type_node) != ARRAY_TYPE) | |
2184 | { | |
2185 | t = build (MODIFY_EXPR, va_list_type_node, dst, src); | |
2186 | TREE_SIDE_EFFECTS (t) = 1; | |
2187 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
2188 | } | |
2189 | else | |
2190 | { | |
11a61dea | 2191 | rtx dstb, srcb, size; |
2192 | ||
2193 | /* Evaluate to pointers. */ | |
2194 | dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2195 | srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2196 | size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX, | |
2197 | VOIDmode, EXPAND_NORMAL); | |
2198 | ||
2199 | /* "Dereference" to BLKmode memories. */ | |
2200 | dstb = gen_rtx_MEM (BLKmode, dstb); | |
2201 | MEM_ALIAS_SET (dstb) = get_alias_set (TREE_TYPE (TREE_TYPE (dst))); | |
2202 | srcb = gen_rtx_MEM (BLKmode, srcb); | |
2203 | MEM_ALIAS_SET (srcb) = get_alias_set (TREE_TYPE (TREE_TYPE (src))); | |
2204 | ||
2205 | /* Copy. */ | |
325d1c45 | 2206 | emit_block_move (dstb, srcb, size, TYPE_ALIGN (va_list_type_node)); |
a66c9326 | 2207 | } |
2208 | ||
2209 | return const0_rtx; | |
2210 | } | |
2211 | ||
53800dbe | 2212 | /* Expand a call to one of the builtin functions __builtin_frame_address or |
2213 | __builtin_return_address. */ | |
2214 | static rtx | |
2215 | expand_builtin_frame_address (exp) | |
2216 | tree exp; | |
2217 | { | |
2218 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
2219 | tree arglist = TREE_OPERAND (exp, 1); | |
2220 | ||
2221 | /* The argument must be a nonnegative integer constant. | |
2222 | It counts the number of frames to scan up the stack. | |
2223 | The value is the return address saved in that frame. */ | |
2224 | if (arglist == 0) | |
2225 | /* Warning about missing arg was already issued. */ | |
2226 | return const0_rtx; | |
2227 | else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST | |
2228 | || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0) | |
2229 | { | |
2230 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2231 | error ("invalid arg to `__builtin_frame_address'"); | |
2232 | else | |
2233 | error ("invalid arg to `__builtin_return_address'"); | |
2234 | return const0_rtx; | |
2235 | } | |
2236 | else | |
2237 | { | |
2238 | rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), | |
2239 | TREE_INT_CST_LOW (TREE_VALUE (arglist)), | |
2240 | hard_frame_pointer_rtx); | |
2241 | ||
2242 | /* Some ports cannot access arbitrary stack frames. */ | |
2243 | if (tem == NULL) | |
2244 | { | |
2245 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2246 | warning ("unsupported arg to `__builtin_frame_address'"); | |
2247 | else | |
2248 | warning ("unsupported arg to `__builtin_return_address'"); | |
2249 | return const0_rtx; | |
2250 | } | |
2251 | ||
2252 | /* For __builtin_frame_address, return what we've got. */ | |
2253 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2254 | return tem; | |
2255 | ||
2256 | if (GET_CODE (tem) != REG | |
2257 | && ! CONSTANT_P (tem)) | |
2258 | tem = copy_to_mode_reg (Pmode, tem); | |
2259 | return tem; | |
2260 | } | |
2261 | } | |
2262 | ||
2263 | /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if | |
2264 | we failed and the caller should emit a normal call, otherwise try to get | |
2265 | the result in TARGET, if convenient. */ | |
2266 | static rtx | |
2267 | expand_builtin_alloca (arglist, target) | |
2268 | tree arglist; | |
2269 | rtx target; | |
2270 | { | |
2271 | rtx op0; | |
2272 | ||
2273 | if (arglist == 0 | |
2274 | /* Arg could be non-integer if user redeclared this fcn wrong. */ | |
2275 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) | |
2276 | return 0; | |
2277 | ||
2278 | /* Compute the argument. */ | |
2279 | op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0); | |
2280 | ||
2281 | /* Allocate the desired space. */ | |
2282 | return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT); | |
2283 | } | |
2284 | ||
2285 | /* Expand a call to the ffs builtin. The arguments are in ARGLIST. | |
2286 | Return 0 if a normal call should be emitted rather than expanding the | |
2287 | function in-line. If convenient, the result should be placed in TARGET. | |
2288 | SUBTARGET may be used as the target for computing one of EXP's operands. */ | |
2289 | static rtx | |
2290 | expand_builtin_ffs (arglist, target, subtarget) | |
2291 | tree arglist; | |
2292 | rtx target, subtarget; | |
2293 | { | |
2294 | rtx op0; | |
2295 | if (arglist == 0 | |
2296 | /* Arg could be non-integer if user redeclared this fcn wrong. */ | |
2297 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) | |
2298 | return 0; | |
2299 | ||
2300 | /* Compute the argument. */ | |
2301 | op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); | |
2302 | /* Compute ffs, into TARGET if possible. | |
2303 | Set TARGET to wherever the result comes back. */ | |
2304 | target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), | |
2305 | ffs_optab, op0, target, 1); | |
2306 | if (target == 0) | |
2307 | abort (); | |
2308 | return target; | |
2309 | } | |
89cfe6e5 | 2310 | |
2311 | /* Expand a call to __builtin_expect. We return our argument and | |
2312 | emit a NOTE_INSN_EXPECTED_VALUE note. */ | |
2313 | ||
2314 | static rtx | |
2315 | expand_builtin_expect (arglist, target) | |
2316 | tree arglist; | |
2317 | rtx target; | |
2318 | { | |
2319 | tree exp, c; | |
2320 | rtx note, rtx_c; | |
2321 | ||
2322 | if (arglist == NULL_TREE | |
2323 | || TREE_CHAIN (arglist) == NULL_TREE) | |
2324 | return const0_rtx; | |
2325 | exp = TREE_VALUE (arglist); | |
2326 | c = TREE_VALUE (TREE_CHAIN (arglist)); | |
2327 | ||
2328 | if (TREE_CODE (c) != INTEGER_CST) | |
2329 | { | |
2330 | error ("second arg to `__builtin_expect' must be a constant"); | |
2331 | c = integer_zero_node; | |
2332 | } | |
2333 | ||
2334 | target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL); | |
2335 | ||
2336 | /* Don't bother with expected value notes for integral constants. */ | |
2337 | if (GET_CODE (target) != CONST_INT) | |
2338 | { | |
2339 | /* We do need to force this into a register so that we can be | |
2340 | moderately sure to be able to correctly interpret the branch | |
2341 | condition later. */ | |
2342 | target = force_reg (GET_MODE (target), target); | |
2343 | ||
2344 | rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL); | |
2345 | ||
2346 | note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE); | |
2347 | NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c); | |
2348 | } | |
2349 | ||
2350 | return target; | |
2351 | } | |
53800dbe | 2352 | \f |
2353 | /* Expand an expression EXP that calls a built-in function, | |
2354 | with result going to TARGET if that's convenient | |
2355 | (and in mode MODE if that's convenient). | |
2356 | SUBTARGET may be used as the target for computing one of EXP's operands. | |
2357 | IGNORE is nonzero if the value is to be ignored. */ | |
2358 | ||
2359 | rtx | |
2360 | expand_builtin (exp, target, subtarget, mode, ignore) | |
2361 | tree exp; | |
2362 | rtx target; | |
2363 | rtx subtarget; | |
2364 | enum machine_mode mode; | |
2365 | int ignore; | |
2366 | { | |
2367 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
2368 | tree arglist = TREE_OPERAND (exp, 1); | |
2369 | enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); | |
2370 | ||
8305149e | 2371 | #ifdef MD_EXPAND_BUILTIN |
2372 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
2373 | return MD_EXPAND_BUILTIN (exp, target, subtarget, mode, ignore); | |
2374 | #endif | |
2375 | ||
53800dbe | 2376 | /* When not optimizing, generate calls to library functions for a certain |
2377 | set of builtins. */ | |
2378 | if (! optimize && ! CALLED_AS_BUILT_IN (fndecl) | |
2379 | && (fcode == BUILT_IN_SIN || fcode == BUILT_IN_COS | |
2380 | || fcode == BUILT_IN_FSQRT || fcode == BUILT_IN_MEMSET | |
2381 | || fcode == BUILT_IN_MEMCPY || fcode == BUILT_IN_MEMCMP | |
ffc83088 | 2382 | || fcode == BUILT_IN_BCMP || fcode == BUILT_IN_BZERO |
53800dbe | 2383 | || fcode == BUILT_IN_STRLEN || fcode == BUILT_IN_STRCPY |
2384 | || fcode == BUILT_IN_STRCMP || fcode == BUILT_IN_FFS)) | |
2385 | return expand_call (exp, target, ignore); | |
2386 | ||
2387 | switch (fcode) | |
2388 | { | |
2389 | case BUILT_IN_ABS: | |
2390 | case BUILT_IN_LABS: | |
2391 | case BUILT_IN_FABS: | |
2392 | /* build_function_call changes these into ABS_EXPR. */ | |
2393 | abort (); | |
2394 | ||
2395 | case BUILT_IN_SIN: | |
2396 | case BUILT_IN_COS: | |
2397 | /* Treat these like sqrt, but only if the user asks for them. */ | |
2398 | if (! flag_fast_math) | |
2399 | break; | |
2400 | case BUILT_IN_FSQRT: | |
2401 | target = expand_builtin_mathfn (exp, target, subtarget); | |
2402 | if (target) | |
2403 | return target; | |
2404 | break; | |
2405 | ||
2406 | case BUILT_IN_FMOD: | |
2407 | break; | |
2408 | ||
2409 | case BUILT_IN_APPLY_ARGS: | |
2410 | return expand_builtin_apply_args (); | |
2411 | ||
2412 | /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes | |
2413 | FUNCTION with a copy of the parameters described by | |
2414 | ARGUMENTS, and ARGSIZE. It returns a block of memory | |
2415 | allocated on the stack into which is stored all the registers | |
2416 | that might possibly be used for returning the result of a | |
2417 | function. ARGUMENTS is the value returned by | |
2418 | __builtin_apply_args. ARGSIZE is the number of bytes of | |
2419 | arguments that must be copied. ??? How should this value be | |
2420 | computed? We'll also need a safe worst case value for varargs | |
2421 | functions. */ | |
2422 | case BUILT_IN_APPLY: | |
2423 | if (arglist == 0 | |
2424 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
2425 | || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))) | |
2426 | || TREE_CHAIN (arglist) == 0 | |
2427 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE | |
2428 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
2429 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) | |
2430 | return const0_rtx; | |
2431 | else | |
2432 | { | |
2433 | int i; | |
2434 | tree t; | |
2435 | rtx ops[3]; | |
2436 | ||
2437 | for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++) | |
2438 | ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0); | |
2439 | ||
2440 | return expand_builtin_apply (ops[0], ops[1], ops[2]); | |
2441 | } | |
2442 | ||
2443 | /* __builtin_return (RESULT) causes the function to return the | |
2444 | value described by RESULT. RESULT is address of the block of | |
2445 | memory returned by __builtin_apply. */ | |
2446 | case BUILT_IN_RETURN: | |
2447 | if (arglist | |
2448 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
2449 | && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE) | |
2450 | expand_builtin_return (expand_expr (TREE_VALUE (arglist), | |
2451 | NULL_RTX, VOIDmode, 0)); | |
2452 | return const0_rtx; | |
2453 | ||
2454 | case BUILT_IN_SAVEREGS: | |
a66c9326 | 2455 | return expand_builtin_saveregs (); |
53800dbe | 2456 | |
2457 | case BUILT_IN_ARGS_INFO: | |
2458 | return expand_builtin_args_info (exp); | |
2459 | ||
2460 | /* Return the address of the first anonymous stack arg. */ | |
2461 | case BUILT_IN_NEXT_ARG: | |
a66c9326 | 2462 | return expand_builtin_next_arg (arglist); |
53800dbe | 2463 | |
2464 | case BUILT_IN_CLASSIFY_TYPE: | |
2465 | return expand_builtin_classify_type (arglist); | |
2466 | ||
2467 | case BUILT_IN_CONSTANT_P: | |
2468 | return expand_builtin_constant_p (exp); | |
2469 | ||
2470 | case BUILT_IN_FRAME_ADDRESS: | |
2471 | case BUILT_IN_RETURN_ADDRESS: | |
2472 | return expand_builtin_frame_address (exp); | |
2473 | ||
2474 | /* Returns the address of the area where the structure is returned. | |
2475 | 0 otherwise. */ | |
2476 | case BUILT_IN_AGGREGATE_INCOMING_ADDRESS: | |
2477 | if (arglist != 0 | |
2478 | || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))) | |
2479 | || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM) | |
2480 | return const0_rtx; | |
2481 | else | |
2482 | return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); | |
2483 | ||
2484 | case BUILT_IN_ALLOCA: | |
2485 | target = expand_builtin_alloca (arglist, target); | |
2486 | if (target) | |
2487 | return target; | |
2488 | break; | |
2489 | ||
2490 | case BUILT_IN_FFS: | |
bdc5170d | 2491 | target = expand_builtin_ffs (arglist, target, subtarget); |
53800dbe | 2492 | if (target) |
2493 | return target; | |
2494 | break; | |
2495 | ||
2496 | case BUILT_IN_STRLEN: | |
2497 | target = expand_builtin_strlen (exp, target, mode); | |
2498 | if (target) | |
2499 | return target; | |
2500 | break; | |
2501 | ||
2502 | case BUILT_IN_STRCPY: | |
2503 | target = expand_builtin_strcpy (exp); | |
2504 | if (target) | |
2505 | return target; | |
2506 | break; | |
2507 | ||
2508 | case BUILT_IN_MEMCPY: | |
2509 | target = expand_builtin_memcpy (arglist); | |
2510 | if (target) | |
2511 | return target; | |
2512 | break; | |
2513 | ||
2514 | case BUILT_IN_MEMSET: | |
2515 | target = expand_builtin_memset (exp); | |
2516 | if (target) | |
2517 | return target; | |
2518 | break; | |
2519 | ||
ffc83088 | 2520 | case BUILT_IN_BZERO: |
2521 | target = expand_builtin_bzero (exp); | |
2522 | if (target) | |
2523 | return target; | |
2524 | break; | |
2525 | ||
53800dbe | 2526 | /* These comparison functions need an instruction that returns an actual |
2527 | index. An ordinary compare that just sets the condition codes | |
2528 | is not enough. */ | |
2529 | #ifdef HAVE_cmpstrsi | |
2530 | case BUILT_IN_STRCMP: | |
2531 | target = expand_builtin_strcmp (exp, target); | |
2532 | if (target) | |
2533 | return target; | |
2534 | break; | |
2535 | ||
071f1696 | 2536 | case BUILT_IN_BCMP: |
53800dbe | 2537 | case BUILT_IN_MEMCMP: |
2538 | target = expand_builtin_memcmp (exp, arglist, target); | |
2539 | if (target) | |
2540 | return target; | |
2541 | break; | |
2542 | #else | |
2543 | case BUILT_IN_STRCMP: | |
071f1696 | 2544 | case BUILT_IN_BCMP: |
53800dbe | 2545 | case BUILT_IN_MEMCMP: |
2546 | break; | |
2547 | #endif | |
2548 | ||
2549 | case BUILT_IN_SETJMP: | |
2550 | if (arglist == 0 | |
2551 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
2552 | break; | |
2553 | else | |
2554 | { | |
2555 | rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, | |
2556 | VOIDmode, 0); | |
2557 | rtx lab = gen_label_rtx (); | |
2558 | rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab); | |
2559 | emit_label (lab); | |
2560 | return ret; | |
2561 | } | |
2562 | ||
2563 | /* __builtin_longjmp is passed a pointer to an array of five words. | |
2564 | It's similar to the C library longjmp function but works with | |
2565 | __builtin_setjmp above. */ | |
2566 | case BUILT_IN_LONGJMP: | |
2567 | if (arglist == 0 || TREE_CHAIN (arglist) == 0 | |
2568 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
2569 | break; | |
2570 | else | |
2571 | { | |
2572 | rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, | |
2573 | VOIDmode, 0); | |
2574 | rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), | |
2575 | NULL_RTX, VOIDmode, 0); | |
2576 | ||
2577 | if (value != const1_rtx) | |
2578 | { | |
2579 | error ("__builtin_longjmp second argument must be 1"); | |
2580 | return const0_rtx; | |
2581 | } | |
2582 | ||
2583 | expand_builtin_longjmp (buf_addr, value); | |
2584 | return const0_rtx; | |
2585 | } | |
2586 | ||
2587 | case BUILT_IN_TRAP: | |
2588 | #ifdef HAVE_trap | |
2589 | if (HAVE_trap) | |
2590 | emit_insn (gen_trap ()); | |
2591 | else | |
2592 | #endif | |
2593 | error ("__builtin_trap not supported by this target"); | |
2594 | emit_barrier (); | |
2595 | return const0_rtx; | |
2596 | ||
2597 | /* Various hooks for the DWARF 2 __throw routine. */ | |
2598 | case BUILT_IN_UNWIND_INIT: | |
2599 | expand_builtin_unwind_init (); | |
2600 | return const0_rtx; | |
2601 | case BUILT_IN_DWARF_CFA: | |
2602 | return virtual_cfa_rtx; | |
2603 | #ifdef DWARF2_UNWIND_INFO | |
2604 | case BUILT_IN_DWARF_FP_REGNUM: | |
2605 | return expand_builtin_dwarf_fp_regnum (); | |
695e919b | 2606 | case BUILT_IN_INIT_DWARF_REG_SIZES: |
2607 | expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist)); | |
2608 | return const0_rtx; | |
53800dbe | 2609 | #endif |
2610 | case BUILT_IN_FROB_RETURN_ADDR: | |
2611 | return expand_builtin_frob_return_addr (TREE_VALUE (arglist)); | |
2612 | case BUILT_IN_EXTRACT_RETURN_ADDR: | |
2613 | return expand_builtin_extract_return_addr (TREE_VALUE (arglist)); | |
2614 | case BUILT_IN_EH_RETURN: | |
2615 | expand_builtin_eh_return (TREE_VALUE (arglist), | |
2616 | TREE_VALUE (TREE_CHAIN (arglist)), | |
2617 | TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)))); | |
2618 | return const0_rtx; | |
a66c9326 | 2619 | case BUILT_IN_VARARGS_START: |
2620 | return expand_builtin_va_start (0, arglist); | |
2621 | case BUILT_IN_STDARG_START: | |
2622 | return expand_builtin_va_start (1, arglist); | |
2623 | case BUILT_IN_VA_END: | |
2624 | return expand_builtin_va_end (arglist); | |
2625 | case BUILT_IN_VA_COPY: | |
2626 | return expand_builtin_va_copy (arglist); | |
89cfe6e5 | 2627 | case BUILT_IN_EXPECT: |
2628 | return expand_builtin_expect (arglist, target); | |
53800dbe | 2629 | |
2630 | default: /* just do library call, if unknown builtin */ | |
2631 | error ("built-in function `%s' not currently supported", | |
2632 | IDENTIFIER_POINTER (DECL_NAME (fndecl))); | |
2633 | } | |
2634 | ||
2635 | /* The switch statement above can drop through to cause the function | |
2636 | to be called normally. */ | |
2637 | return expand_call (exp, target, ignore); | |
2638 | } |