]>
Commit | Line | Data |
---|---|---|
53800dbe | 1 | /* Expand builtin functions. |
0b387d23 | 2 | Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
3 | 1999, 2000 Free Software Foundation, Inc. | |
53800dbe | 4 | |
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "machmode.h" | |
25 | #include "rtl.h" | |
26 | #include "tree.h" | |
27 | #include "obstack.h" | |
28 | #include "flags.h" | |
29 | #include "regs.h" | |
30 | #include "hard-reg-set.h" | |
31 | #include "except.h" | |
32 | #include "function.h" | |
33 | #include "insn-flags.h" | |
34 | #include "insn-codes.h" | |
35 | #include "insn-config.h" | |
36 | #include "expr.h" | |
37 | #include "recog.h" | |
38 | #include "output.h" | |
39 | #include "typeclass.h" | |
40 | #include "defaults.h" | |
41 | #include "toplev.h" | |
1dd6c958 | 42 | #include "tm_p.h" |
53800dbe | 43 | |
44 | #define CALLED_AS_BUILT_IN(NODE) \ | |
45 | (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10)) | |
46 | ||
53800dbe | 47 | /* Register mappings for target machines without register windows. */ |
48 | #ifndef INCOMING_REGNO | |
49 | #define INCOMING_REGNO(OUT) (OUT) | |
50 | #endif | |
51 | #ifndef OUTGOING_REGNO | |
52 | #define OUTGOING_REGNO(IN) (IN) | |
53 | #endif | |
54 | ||
726e2588 | 55 | #ifndef PAD_VARARGS_DOWN |
56 | #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN | |
57 | #endif | |
58 | ||
ab7943b9 | 59 | /* Define the names of the builtin function types and codes. */ |
8934cb0c | 60 | const char *const built_in_class_names[4] |
ab7943b9 | 61 | = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"}; |
62 | ||
4e9d90c7 | 63 | #define DEF_BUILTIN(x) STRINGIFY(x), |
8934cb0c | 64 | const char *const built_in_names[(int) END_BUILTINS] = |
4e9d90c7 | 65 | { |
66 | #include "builtins.def" | |
67 | }; | |
68 | #undef DEF_BUILTIN | |
ab7943b9 | 69 | |
6bcfea9e | 70 | tree (*lang_type_promotes_to) PARAMS ((tree)); |
e94026da | 71 | |
6bcfea9e | 72 | static int get_pointer_alignment PARAMS ((tree, unsigned)); |
73 | static tree c_strlen PARAMS ((tree)); | |
74 | static rtx get_memory_rtx PARAMS ((tree)); | |
75 | static int apply_args_size PARAMS ((void)); | |
76 | static int apply_result_size PARAMS ((void)); | |
d8c9779c | 77 | #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) |
6bcfea9e | 78 | static rtx result_vector PARAMS ((int, rtx)); |
d8c9779c | 79 | #endif |
6bcfea9e | 80 | static rtx expand_builtin_apply_args PARAMS ((void)); |
81 | static rtx expand_builtin_apply_args_1 PARAMS ((void)); | |
82 | static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx)); | |
83 | static void expand_builtin_return PARAMS ((rtx)); | |
84 | static rtx expand_builtin_classify_type PARAMS ((tree)); | |
85 | static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx)); | |
86 | static rtx expand_builtin_constant_p PARAMS ((tree)); | |
87 | static rtx expand_builtin_args_info PARAMS ((tree)); | |
88 | static rtx expand_builtin_next_arg PARAMS ((tree)); | |
89 | static rtx expand_builtin_va_start PARAMS ((int, tree)); | |
90 | static rtx expand_builtin_va_end PARAMS ((tree)); | |
91 | static rtx expand_builtin_va_copy PARAMS ((tree)); | |
95d038e4 | 92 | #ifdef HAVE_cmpstrsi |
6bcfea9e | 93 | static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx)); |
94 | static rtx expand_builtin_strcmp PARAMS ((tree, rtx)); | |
95d038e4 | 95 | #endif |
6bcfea9e | 96 | static rtx expand_builtin_memcpy PARAMS ((tree)); |
97 | static rtx expand_builtin_strcpy PARAMS ((tree)); | |
98 | static rtx expand_builtin_memset PARAMS ((tree)); | |
ab7943b9 | 99 | static rtx expand_builtin_strlen PARAMS ((tree, rtx, |
100 | enum machine_mode)); | |
6bcfea9e | 101 | static rtx expand_builtin_alloca PARAMS ((tree, rtx)); |
102 | static rtx expand_builtin_ffs PARAMS ((tree, rtx, rtx)); | |
103 | static rtx expand_builtin_frame_address PARAMS ((tree)); | |
104 | static tree stabilize_va_list PARAMS ((tree, int)); | |
53800dbe | 105 | |
106 | /* Return the alignment in bits of EXP, a pointer valued expression. | |
107 | But don't return more than MAX_ALIGN no matter what. | |
108 | The alignment returned is, by default, the alignment of the thing that | |
109 | EXP points to (if it is not a POINTER_TYPE, 0 is returned). | |
110 | ||
111 | Otherwise, look at the expression to see if we can do better, i.e., if the | |
112 | expression is actually pointing at an object whose alignment is tighter. */ | |
113 | ||
114 | static int | |
115 | get_pointer_alignment (exp, max_align) | |
116 | tree exp; | |
117 | unsigned max_align; | |
118 | { | |
119 | unsigned align, inner; | |
120 | ||
121 | if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) | |
122 | return 0; | |
123 | ||
124 | align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); | |
125 | align = MIN (align, max_align); | |
126 | ||
127 | while (1) | |
128 | { | |
129 | switch (TREE_CODE (exp)) | |
130 | { | |
131 | case NOP_EXPR: | |
132 | case CONVERT_EXPR: | |
133 | case NON_LVALUE_EXPR: | |
134 | exp = TREE_OPERAND (exp, 0); | |
135 | if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) | |
136 | return align; | |
137 | inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); | |
138 | align = MIN (inner, max_align); | |
139 | break; | |
140 | ||
141 | case PLUS_EXPR: | |
142 | /* If sum of pointer + int, restrict our maximum alignment to that | |
143 | imposed by the integer. If not, we can't do any better than | |
144 | ALIGN. */ | |
145 | if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST) | |
146 | return align; | |
147 | ||
148 | while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT) | |
149 | & (max_align - 1)) | |
150 | != 0) | |
151 | max_align >>= 1; | |
152 | ||
153 | exp = TREE_OPERAND (exp, 0); | |
154 | break; | |
155 | ||
156 | case ADDR_EXPR: | |
157 | /* See what we are pointing at and look at its alignment. */ | |
158 | exp = TREE_OPERAND (exp, 0); | |
159 | if (TREE_CODE (exp) == FUNCTION_DECL) | |
160 | align = FUNCTION_BOUNDARY; | |
9308e976 | 161 | else if (DECL_P (exp)) |
53800dbe | 162 | align = DECL_ALIGN (exp); |
163 | #ifdef CONSTANT_ALIGNMENT | |
164 | else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c') | |
165 | align = CONSTANT_ALIGNMENT (exp, align); | |
166 | #endif | |
167 | return MIN (align, max_align); | |
168 | ||
169 | default: | |
170 | return align; | |
171 | } | |
172 | } | |
173 | } | |
174 | ||
175 | /* Compute the length of a C string. TREE_STRING_LENGTH is not the right | |
176 | way, because it could contain a zero byte in the middle. | |
177 | TREE_STRING_LENGTH is the size of the character array, not the string. | |
178 | ||
902de8ed | 179 | The value returned is of type `ssizetype'. |
180 | ||
53800dbe | 181 | Unfortunately, string_constant can't access the values of const char |
182 | arrays with initializers, so neither can we do so here. */ | |
183 | ||
184 | static tree | |
185 | c_strlen (src) | |
186 | tree src; | |
187 | { | |
188 | tree offset_node; | |
189 | int offset, max; | |
190 | char *ptr; | |
191 | ||
192 | src = string_constant (src, &offset_node); | |
193 | if (src == 0) | |
194 | return 0; | |
902de8ed | 195 | |
53800dbe | 196 | max = TREE_STRING_LENGTH (src); |
197 | ptr = TREE_STRING_POINTER (src); | |
902de8ed | 198 | |
53800dbe | 199 | if (offset_node && TREE_CODE (offset_node) != INTEGER_CST) |
200 | { | |
201 | /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't | |
202 | compute the offset to the following null if we don't know where to | |
203 | start searching for it. */ | |
204 | int i; | |
902de8ed | 205 | |
53800dbe | 206 | for (i = 0; i < max; i++) |
207 | if (ptr[i] == 0) | |
208 | return 0; | |
902de8ed | 209 | |
53800dbe | 210 | /* We don't know the starting offset, but we do know that the string |
211 | has no internal zero bytes. We can assume that the offset falls | |
212 | within the bounds of the string; otherwise, the programmer deserves | |
213 | what he gets. Subtract the offset from the length of the string, | |
902de8ed | 214 | and return that. This would perhaps not be valid if we were dealing |
215 | with named arrays in addition to literal string constants. */ | |
216 | ||
217 | return size_diffop (size_int (max), offset_node); | |
53800dbe | 218 | } |
219 | ||
220 | /* We have a known offset into the string. Start searching there for | |
221 | a null character. */ | |
222 | if (offset_node == 0) | |
223 | offset = 0; | |
224 | else | |
225 | { | |
226 | /* Did we get a long long offset? If so, punt. */ | |
227 | if (TREE_INT_CST_HIGH (offset_node) != 0) | |
228 | return 0; | |
229 | offset = TREE_INT_CST_LOW (offset_node); | |
230 | } | |
902de8ed | 231 | |
53800dbe | 232 | /* If the offset is known to be out of bounds, warn, and call strlen at |
233 | runtime. */ | |
234 | if (offset < 0 || offset > max) | |
235 | { | |
236 | warning ("offset outside bounds of constant string"); | |
237 | return 0; | |
238 | } | |
902de8ed | 239 | |
53800dbe | 240 | /* Use strlen to search for the first zero byte. Since any strings |
241 | constructed with build_string will have nulls appended, we win even | |
242 | if we get handed something like (char[4])"abcd". | |
243 | ||
244 | Since OFFSET is our starting index into the string, no further | |
245 | calculation is needed. */ | |
902de8ed | 246 | return ssize_int (strlen (ptr + offset)); |
53800dbe | 247 | } |
248 | ||
249 | /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT | |
250 | times to get the address of either a higher stack frame, or a return | |
251 | address located within it (depending on FNDECL_CODE). */ | |
902de8ed | 252 | |
53800dbe | 253 | rtx |
254 | expand_builtin_return_addr (fndecl_code, count, tem) | |
255 | enum built_in_function fndecl_code; | |
256 | int count; | |
257 | rtx tem; | |
258 | { | |
259 | int i; | |
260 | ||
261 | /* Some machines need special handling before we can access | |
262 | arbitrary frames. For example, on the sparc, we must first flush | |
263 | all register windows to the stack. */ | |
264 | #ifdef SETUP_FRAME_ADDRESSES | |
265 | if (count > 0) | |
266 | SETUP_FRAME_ADDRESSES (); | |
267 | #endif | |
268 | ||
269 | /* On the sparc, the return address is not in the frame, it is in a | |
270 | register. There is no way to access it off of the current frame | |
271 | pointer, but it can be accessed off the previous frame pointer by | |
272 | reading the value from the register window save area. */ | |
273 | #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME | |
274 | if (fndecl_code == BUILT_IN_RETURN_ADDRESS) | |
275 | count--; | |
276 | #endif | |
277 | ||
278 | /* Scan back COUNT frames to the specified frame. */ | |
279 | for (i = 0; i < count; i++) | |
280 | { | |
281 | /* Assume the dynamic chain pointer is in the word that the | |
282 | frame address points to, unless otherwise specified. */ | |
283 | #ifdef DYNAMIC_CHAIN_ADDRESS | |
284 | tem = DYNAMIC_CHAIN_ADDRESS (tem); | |
285 | #endif | |
286 | tem = memory_address (Pmode, tem); | |
287 | tem = copy_to_reg (gen_rtx_MEM (Pmode, tem)); | |
288 | } | |
289 | ||
290 | /* For __builtin_frame_address, return what we've got. */ | |
291 | if (fndecl_code == BUILT_IN_FRAME_ADDRESS) | |
292 | return tem; | |
293 | ||
294 | /* For __builtin_return_address, Get the return address from that | |
295 | frame. */ | |
296 | #ifdef RETURN_ADDR_RTX | |
297 | tem = RETURN_ADDR_RTX (count, tem); | |
298 | #else | |
299 | tem = memory_address (Pmode, | |
300 | plus_constant (tem, GET_MODE_SIZE (Pmode))); | |
301 | tem = gen_rtx_MEM (Pmode, tem); | |
302 | #endif | |
303 | return tem; | |
304 | } | |
305 | ||
306 | /* __builtin_setjmp is passed a pointer to an array of five words (not | |
307 | all will be used on all machines). It operates similarly to the C | |
308 | library function of the same name, but is more efficient. Much of | |
309 | the code below (and for longjmp) is copied from the handling of | |
310 | non-local gotos. | |
311 | ||
312 | NOTE: This is intended for use by GNAT and the exception handling | |
313 | scheme in the compiler and will only work in the method used by | |
314 | them. */ | |
315 | ||
316 | rtx | |
317 | expand_builtin_setjmp (buf_addr, target, first_label, next_label) | |
318 | rtx buf_addr; | |
319 | rtx target; | |
320 | rtx first_label, next_label; | |
321 | { | |
322 | rtx lab1 = gen_label_rtx (); | |
323 | enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); | |
324 | enum machine_mode value_mode; | |
325 | rtx stack_save; | |
326 | ||
327 | value_mode = TYPE_MODE (integer_type_node); | |
328 | ||
329 | #ifdef POINTERS_EXTEND_UNSIGNED | |
330 | buf_addr = convert_memory_address (Pmode, buf_addr); | |
331 | #endif | |
332 | ||
37ae8504 | 333 | buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX)); |
53800dbe | 334 | |
335 | if (target == 0 || GET_CODE (target) != REG | |
336 | || REGNO (target) < FIRST_PSEUDO_REGISTER) | |
337 | target = gen_reg_rtx (value_mode); | |
338 | ||
339 | emit_queue (); | |
340 | ||
341 | /* We store the frame pointer and the address of lab1 in the buffer | |
342 | and use the rest of it for the stack save area, which is | |
343 | machine-dependent. */ | |
344 | ||
345 | #ifndef BUILTIN_SETJMP_FRAME_VALUE | |
346 | #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx | |
347 | #endif | |
348 | ||
349 | emit_move_insn (gen_rtx_MEM (Pmode, buf_addr), | |
350 | BUILTIN_SETJMP_FRAME_VALUE); | |
351 | emit_move_insn (validize_mem | |
352 | (gen_rtx_MEM (Pmode, | |
353 | plus_constant (buf_addr, | |
354 | GET_MODE_SIZE (Pmode)))), | |
355 | force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1))); | |
356 | ||
357 | stack_save = gen_rtx_MEM (sa_mode, | |
358 | plus_constant (buf_addr, | |
359 | 2 * GET_MODE_SIZE (Pmode))); | |
360 | emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX); | |
361 | ||
362 | /* If there is further processing to do, do it. */ | |
363 | #ifdef HAVE_builtin_setjmp_setup | |
364 | if (HAVE_builtin_setjmp_setup) | |
365 | emit_insn (gen_builtin_setjmp_setup (buf_addr)); | |
366 | #endif | |
367 | ||
368 | /* Set TARGET to zero and branch to the first-time-through label. */ | |
369 | emit_move_insn (target, const0_rtx); | |
370 | emit_jump_insn (gen_jump (first_label)); | |
371 | emit_barrier (); | |
372 | emit_label (lab1); | |
373 | ||
374 | /* Tell flow about the strange goings on. Putting `lab1' on | |
375 | `nonlocal_goto_handler_labels' to indicates that function | |
376 | calls may traverse the arc back to this label. */ | |
377 | ||
378 | current_function_has_nonlocal_label = 1; | |
379 | nonlocal_goto_handler_labels = | |
380 | gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels); | |
381 | ||
382 | /* Clobber the FP when we get here, so we have to make sure it's | |
383 | marked as used by this function. */ | |
384 | emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
385 | ||
386 | /* Mark the static chain as clobbered here so life information | |
387 | doesn't get messed up for it. */ | |
388 | emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx)); | |
389 | ||
390 | /* Now put in the code to restore the frame pointer, and argument | |
391 | pointer, if needed. The code below is from expand_end_bindings | |
392 | in stmt.c; see detailed documentation there. */ | |
393 | #ifdef HAVE_nonlocal_goto | |
394 | if (! HAVE_nonlocal_goto) | |
395 | #endif | |
396 | emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); | |
397 | ||
398 | #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
399 | if (fixed_regs[ARG_POINTER_REGNUM]) | |
400 | { | |
401 | #ifdef ELIMINABLE_REGS | |
402 | size_t i; | |
403 | static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS; | |
404 | ||
405 | for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++) | |
406 | if (elim_regs[i].from == ARG_POINTER_REGNUM | |
407 | && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) | |
408 | break; | |
409 | ||
410 | if (i == sizeof elim_regs / sizeof elim_regs [0]) | |
411 | #endif | |
412 | { | |
413 | /* Now restore our arg pointer from the address at which it | |
414 | was saved in our stack frame. | |
415 | If there hasn't be space allocated for it yet, make | |
416 | some now. */ | |
417 | if (arg_pointer_save_area == 0) | |
418 | arg_pointer_save_area | |
419 | = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
420 | emit_move_insn (virtual_incoming_args_rtx, | |
421 | copy_to_reg (arg_pointer_save_area)); | |
422 | } | |
423 | } | |
424 | #endif | |
425 | ||
426 | #ifdef HAVE_builtin_setjmp_receiver | |
427 | if (HAVE_builtin_setjmp_receiver) | |
428 | emit_insn (gen_builtin_setjmp_receiver (lab1)); | |
429 | else | |
430 | #endif | |
431 | #ifdef HAVE_nonlocal_goto_receiver | |
432 | if (HAVE_nonlocal_goto_receiver) | |
433 | emit_insn (gen_nonlocal_goto_receiver ()); | |
434 | else | |
435 | #endif | |
436 | { | |
437 | ; /* Nothing */ | |
438 | } | |
439 | ||
440 | /* Set TARGET, and branch to the next-time-through label. */ | |
441 | emit_move_insn (target, const1_rtx); | |
442 | emit_jump_insn (gen_jump (next_label)); | |
443 | emit_barrier (); | |
444 | ||
445 | return target; | |
446 | } | |
447 | ||
448 | /* __builtin_longjmp is passed a pointer to an array of five words (not | |
449 | all will be used on all machines). It operates similarly to the C | |
450 | library function of the same name, but is more efficient. Much of | |
451 | the code below is copied from the handling of non-local gotos. | |
452 | ||
453 | NOTE: This is intended for use by GNAT and the exception handling | |
454 | scheme in the compiler and will only work in the method used by | |
455 | them. */ | |
456 | ||
457 | void | |
458 | expand_builtin_longjmp (buf_addr, value) | |
459 | rtx buf_addr, value; | |
460 | { | |
461 | rtx fp, lab, stack; | |
462 | enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); | |
463 | ||
464 | #ifdef POINTERS_EXTEND_UNSIGNED | |
465 | buf_addr = convert_memory_address (Pmode, buf_addr); | |
466 | #endif | |
467 | buf_addr = force_reg (Pmode, buf_addr); | |
468 | ||
469 | /* We used to store value in static_chain_rtx, but that fails if pointers | |
470 | are smaller than integers. We instead require that the user must pass | |
471 | a second argument of 1, because that is what builtin_setjmp will | |
472 | return. This also makes EH slightly more efficient, since we are no | |
473 | longer copying around a value that we don't care about. */ | |
474 | if (value != const1_rtx) | |
475 | abort (); | |
476 | ||
477 | #ifdef HAVE_builtin_longjmp | |
478 | if (HAVE_builtin_longjmp) | |
479 | emit_insn (gen_builtin_longjmp (buf_addr)); | |
480 | else | |
481 | #endif | |
482 | { | |
483 | fp = gen_rtx_MEM (Pmode, buf_addr); | |
484 | lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr, | |
485 | GET_MODE_SIZE (Pmode))); | |
486 | ||
487 | stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr, | |
488 | 2 * GET_MODE_SIZE (Pmode))); | |
489 | ||
490 | /* Pick up FP, label, and SP from the block and jump. This code is | |
491 | from expand_goto in stmt.c; see there for detailed comments. */ | |
492 | #if HAVE_nonlocal_goto | |
493 | if (HAVE_nonlocal_goto) | |
494 | /* We have to pass a value to the nonlocal_goto pattern that will | |
495 | get copied into the static_chain pointer, but it does not matter | |
496 | what that value is, because builtin_setjmp does not use it. */ | |
497 | emit_insn (gen_nonlocal_goto (value, fp, stack, lab)); | |
498 | else | |
499 | #endif | |
500 | { | |
501 | lab = copy_to_reg (lab); | |
502 | ||
503 | emit_move_insn (hard_frame_pointer_rtx, fp); | |
504 | emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX); | |
505 | ||
506 | emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
507 | emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx)); | |
508 | emit_indirect_jump (lab); | |
509 | } | |
510 | } | |
511 | } | |
512 | ||
513 | /* Get a MEM rtx for expression EXP which can be used in a string instruction | |
514 | (cmpstrsi, movstrsi, ..). */ | |
515 | static rtx | |
516 | get_memory_rtx (exp) | |
517 | tree exp; | |
518 | { | |
519 | rtx mem; | |
520 | int is_aggregate; | |
521 | ||
522 | mem = gen_rtx_MEM (BLKmode, | |
523 | memory_address (BLKmode, | |
524 | expand_expr (exp, NULL_RTX, | |
525 | ptr_mode, EXPAND_SUM))); | |
526 | ||
527 | RTX_UNCHANGING_P (mem) = TREE_READONLY (exp); | |
528 | ||
529 | /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P | |
530 | if the value is the address of a structure or if the expression is | |
531 | cast to a pointer to structure type. */ | |
532 | is_aggregate = 0; | |
533 | ||
534 | while (TREE_CODE (exp) == NOP_EXPR) | |
535 | { | |
536 | tree cast_type = TREE_TYPE (exp); | |
537 | if (TREE_CODE (cast_type) == POINTER_TYPE | |
538 | && AGGREGATE_TYPE_P (TREE_TYPE (cast_type))) | |
539 | { | |
540 | is_aggregate = 1; | |
541 | break; | |
542 | } | |
543 | exp = TREE_OPERAND (exp, 0); | |
544 | } | |
545 | ||
546 | if (is_aggregate == 0) | |
547 | { | |
548 | tree type; | |
549 | ||
550 | if (TREE_CODE (exp) == ADDR_EXPR) | |
551 | /* If this is the address of an object, check whether the | |
552 | object is an array. */ | |
553 | type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
554 | else | |
555 | type = TREE_TYPE (TREE_TYPE (exp)); | |
556 | is_aggregate = AGGREGATE_TYPE_P (type); | |
557 | } | |
558 | ||
559 | MEM_SET_IN_STRUCT_P (mem, is_aggregate); | |
560 | return mem; | |
561 | } | |
562 | \f | |
563 | /* Built-in functions to perform an untyped call and return. */ | |
564 | ||
565 | /* For each register that may be used for calling a function, this | |
566 | gives a mode used to copy the register's value. VOIDmode indicates | |
567 | the register is not used for calling a function. If the machine | |
568 | has register windows, this gives only the outbound registers. | |
569 | INCOMING_REGNO gives the corresponding inbound register. */ | |
570 | static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER]; | |
571 | ||
572 | /* For each register that may be used for returning values, this gives | |
573 | a mode used to copy the register's value. VOIDmode indicates the | |
574 | register is not used for returning values. If the machine has | |
575 | register windows, this gives only the outbound registers. | |
576 | INCOMING_REGNO gives the corresponding inbound register. */ | |
577 | static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER]; | |
578 | ||
579 | /* For each register that may be used for calling a function, this | |
580 | gives the offset of that register into the block returned by | |
581 | __builtin_apply_args. 0 indicates that the register is not | |
582 | used for calling a function. */ | |
583 | static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER]; | |
584 | ||
585 | /* Return the offset of register REGNO into the block returned by | |
586 | __builtin_apply_args. This is not declared static, since it is | |
587 | needed in objc-act.c. */ | |
588 | ||
589 | int | |
590 | apply_args_register_offset (regno) | |
591 | int regno; | |
592 | { | |
593 | apply_args_size (); | |
594 | ||
595 | /* Arguments are always put in outgoing registers (in the argument | |
596 | block) if such make sense. */ | |
597 | #ifdef OUTGOING_REGNO | |
598 | regno = OUTGOING_REGNO(regno); | |
599 | #endif | |
600 | return apply_args_reg_offset[regno]; | |
601 | } | |
602 | ||
603 | /* Return the size required for the block returned by __builtin_apply_args, | |
604 | and initialize apply_args_mode. */ | |
605 | ||
606 | static int | |
607 | apply_args_size () | |
608 | { | |
609 | static int size = -1; | |
610 | int align, regno; | |
611 | enum machine_mode mode; | |
612 | ||
613 | /* The values computed by this function never change. */ | |
614 | if (size < 0) | |
615 | { | |
616 | /* The first value is the incoming arg-pointer. */ | |
617 | size = GET_MODE_SIZE (Pmode); | |
618 | ||
619 | /* The second value is the structure value address unless this is | |
620 | passed as an "invisible" first argument. */ | |
621 | if (struct_value_rtx) | |
622 | size += GET_MODE_SIZE (Pmode); | |
623 | ||
624 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
625 | if (FUNCTION_ARG_REGNO_P (regno)) | |
626 | { | |
627 | /* Search for the proper mode for copying this register's | |
628 | value. I'm not sure this is right, but it works so far. */ | |
629 | enum machine_mode best_mode = VOIDmode; | |
630 | ||
631 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
632 | mode != VOIDmode; | |
633 | mode = GET_MODE_WIDER_MODE (mode)) | |
634 | if (HARD_REGNO_MODE_OK (regno, mode) | |
635 | && HARD_REGNO_NREGS (regno, mode) == 1) | |
636 | best_mode = mode; | |
637 | ||
638 | if (best_mode == VOIDmode) | |
639 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
640 | mode != VOIDmode; | |
641 | mode = GET_MODE_WIDER_MODE (mode)) | |
642 | if (HARD_REGNO_MODE_OK (regno, mode) | |
643 | && (mov_optab->handlers[(int) mode].insn_code | |
644 | != CODE_FOR_nothing)) | |
645 | best_mode = mode; | |
646 | ||
647 | mode = best_mode; | |
648 | if (mode == VOIDmode) | |
649 | abort (); | |
650 | ||
651 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
652 | if (size % align != 0) | |
653 | size = CEIL (size, align) * align; | |
654 | apply_args_reg_offset[regno] = size; | |
655 | size += GET_MODE_SIZE (mode); | |
656 | apply_args_mode[regno] = mode; | |
657 | } | |
658 | else | |
659 | { | |
660 | apply_args_mode[regno] = VOIDmode; | |
661 | apply_args_reg_offset[regno] = 0; | |
662 | } | |
663 | } | |
664 | return size; | |
665 | } | |
666 | ||
667 | /* Return the size required for the block returned by __builtin_apply, | |
668 | and initialize apply_result_mode. */ | |
669 | ||
670 | static int | |
671 | apply_result_size () | |
672 | { | |
673 | static int size = -1; | |
674 | int align, regno; | |
675 | enum machine_mode mode; | |
676 | ||
677 | /* The values computed by this function never change. */ | |
678 | if (size < 0) | |
679 | { | |
680 | size = 0; | |
681 | ||
682 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
683 | if (FUNCTION_VALUE_REGNO_P (regno)) | |
684 | { | |
685 | /* Search for the proper mode for copying this register's | |
686 | value. I'm not sure this is right, but it works so far. */ | |
687 | enum machine_mode best_mode = VOIDmode; | |
688 | ||
689 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
690 | mode != TImode; | |
691 | mode = GET_MODE_WIDER_MODE (mode)) | |
692 | if (HARD_REGNO_MODE_OK (regno, mode)) | |
693 | best_mode = mode; | |
694 | ||
695 | if (best_mode == VOIDmode) | |
696 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
697 | mode != VOIDmode; | |
698 | mode = GET_MODE_WIDER_MODE (mode)) | |
699 | if (HARD_REGNO_MODE_OK (regno, mode) | |
700 | && (mov_optab->handlers[(int) mode].insn_code | |
701 | != CODE_FOR_nothing)) | |
702 | best_mode = mode; | |
703 | ||
704 | mode = best_mode; | |
705 | if (mode == VOIDmode) | |
706 | abort (); | |
707 | ||
708 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
709 | if (size % align != 0) | |
710 | size = CEIL (size, align) * align; | |
711 | size += GET_MODE_SIZE (mode); | |
712 | apply_result_mode[regno] = mode; | |
713 | } | |
714 | else | |
715 | apply_result_mode[regno] = VOIDmode; | |
716 | ||
717 | /* Allow targets that use untyped_call and untyped_return to override | |
718 | the size so that machine-specific information can be stored here. */ | |
719 | #ifdef APPLY_RESULT_SIZE | |
720 | size = APPLY_RESULT_SIZE; | |
721 | #endif | |
722 | } | |
723 | return size; | |
724 | } | |
725 | ||
726 | #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) | |
727 | /* Create a vector describing the result block RESULT. If SAVEP is true, | |
728 | the result block is used to save the values; otherwise it is used to | |
729 | restore the values. */ | |
730 | ||
731 | static rtx | |
732 | result_vector (savep, result) | |
733 | int savep; | |
734 | rtx result; | |
735 | { | |
736 | int regno, size, align, nelts; | |
737 | enum machine_mode mode; | |
738 | rtx reg, mem; | |
739 | rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx)); | |
740 | ||
741 | size = nelts = 0; | |
742 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
743 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
744 | { | |
745 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
746 | if (size % align != 0) | |
747 | size = CEIL (size, align) * align; | |
748 | reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)); | |
749 | mem = change_address (result, mode, | |
750 | plus_constant (XEXP (result, 0), size)); | |
751 | savevec[nelts++] = (savep | |
752 | ? gen_rtx_SET (VOIDmode, mem, reg) | |
753 | : gen_rtx_SET (VOIDmode, reg, mem)); | |
754 | size += GET_MODE_SIZE (mode); | |
755 | } | |
756 | return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec)); | |
757 | } | |
758 | #endif /* HAVE_untyped_call or HAVE_untyped_return */ | |
759 | ||
760 | /* Save the state required to perform an untyped call with the same | |
761 | arguments as were passed to the current function. */ | |
762 | ||
763 | static rtx | |
764 | expand_builtin_apply_args_1 () | |
765 | { | |
766 | rtx registers; | |
767 | int size, align, regno; | |
768 | enum machine_mode mode; | |
769 | ||
770 | /* Create a block where the arg-pointer, structure value address, | |
771 | and argument registers can be saved. */ | |
772 | registers = assign_stack_local (BLKmode, apply_args_size (), -1); | |
773 | ||
774 | /* Walk past the arg-pointer and structure value address. */ | |
775 | size = GET_MODE_SIZE (Pmode); | |
776 | if (struct_value_rtx) | |
777 | size += GET_MODE_SIZE (Pmode); | |
778 | ||
779 | /* Save each register used in calling a function to the block. */ | |
780 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
781 | if ((mode = apply_args_mode[regno]) != VOIDmode) | |
782 | { | |
783 | rtx tem; | |
784 | ||
785 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
786 | if (size % align != 0) | |
787 | size = CEIL (size, align) * align; | |
788 | ||
789 | tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)); | |
790 | ||
53800dbe | 791 | emit_move_insn (change_address (registers, mode, |
792 | plus_constant (XEXP (registers, 0), | |
793 | size)), | |
794 | tem); | |
795 | size += GET_MODE_SIZE (mode); | |
796 | } | |
797 | ||
798 | /* Save the arg pointer to the block. */ | |
799 | emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)), | |
800 | copy_to_reg (virtual_incoming_args_rtx)); | |
801 | size = GET_MODE_SIZE (Pmode); | |
802 | ||
803 | /* Save the structure value address unless this is passed as an | |
804 | "invisible" first argument. */ | |
805 | if (struct_value_incoming_rtx) | |
806 | { | |
807 | emit_move_insn (change_address (registers, Pmode, | |
808 | plus_constant (XEXP (registers, 0), | |
809 | size)), | |
810 | copy_to_reg (struct_value_incoming_rtx)); | |
811 | size += GET_MODE_SIZE (Pmode); | |
812 | } | |
813 | ||
814 | /* Return the address of the block. */ | |
815 | return copy_addr_to_reg (XEXP (registers, 0)); | |
816 | } | |
817 | ||
818 | /* __builtin_apply_args returns block of memory allocated on | |
819 | the stack into which is stored the arg pointer, structure | |
820 | value address, static chain, and all the registers that might | |
821 | possibly be used in performing a function call. The code is | |
822 | moved to the start of the function so the incoming values are | |
823 | saved. */ | |
824 | static rtx | |
825 | expand_builtin_apply_args () | |
826 | { | |
827 | /* Don't do __builtin_apply_args more than once in a function. | |
828 | Save the result of the first call and reuse it. */ | |
829 | if (apply_args_value != 0) | |
830 | return apply_args_value; | |
831 | { | |
832 | /* When this function is called, it means that registers must be | |
833 | saved on entry to this function. So we migrate the | |
834 | call to the first insn of this function. */ | |
835 | rtx temp; | |
836 | rtx seq; | |
837 | ||
838 | start_sequence (); | |
839 | temp = expand_builtin_apply_args_1 (); | |
840 | seq = get_insns (); | |
841 | end_sequence (); | |
842 | ||
843 | apply_args_value = temp; | |
844 | ||
845 | /* Put the sequence after the NOTE that starts the function. | |
846 | If this is inside a SEQUENCE, make the outer-level insn | |
847 | chain current, so the code is placed at the start of the | |
848 | function. */ | |
849 | push_topmost_sequence (); | |
850 | emit_insns_before (seq, NEXT_INSN (get_insns ())); | |
851 | pop_topmost_sequence (); | |
852 | return temp; | |
853 | } | |
854 | } | |
855 | ||
856 | /* Perform an untyped call and save the state required to perform an | |
857 | untyped return of whatever value was returned by the given function. */ | |
858 | ||
859 | static rtx | |
860 | expand_builtin_apply (function, arguments, argsize) | |
861 | rtx function, arguments, argsize; | |
862 | { | |
863 | int size, align, regno; | |
864 | enum machine_mode mode; | |
865 | rtx incoming_args, result, reg, dest, call_insn; | |
866 | rtx old_stack_level = 0; | |
867 | rtx call_fusage = 0; | |
868 | ||
869 | /* Create a block where the return registers can be saved. */ | |
870 | result = assign_stack_local (BLKmode, apply_result_size (), -1); | |
871 | ||
872 | /* ??? The argsize value should be adjusted here. */ | |
873 | ||
874 | /* Fetch the arg pointer from the ARGUMENTS block. */ | |
875 | incoming_args = gen_reg_rtx (Pmode); | |
876 | emit_move_insn (incoming_args, | |
877 | gen_rtx_MEM (Pmode, arguments)); | |
878 | #ifndef STACK_GROWS_DOWNWARD | |
879 | incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize, | |
880 | incoming_args, 0, OPTAB_LIB_WIDEN); | |
881 | #endif | |
882 | ||
883 | /* Perform postincrements before actually calling the function. */ | |
884 | emit_queue (); | |
885 | ||
886 | /* Push a new argument block and copy the arguments. */ | |
887 | do_pending_stack_adjust (); | |
888 | ||
889 | /* Save the stack with nonlocal if available */ | |
890 | #ifdef HAVE_save_stack_nonlocal | |
891 | if (HAVE_save_stack_nonlocal) | |
892 | emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX); | |
893 | else | |
894 | #endif | |
895 | emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); | |
896 | ||
897 | /* Push a block of memory onto the stack to store the memory arguments. | |
898 | Save the address in a register, and copy the memory arguments. ??? I | |
899 | haven't figured out how the calling convention macros effect this, | |
900 | but it's likely that the source and/or destination addresses in | |
901 | the block copy will need updating in machine specific ways. */ | |
902 | dest = allocate_dynamic_stack_space (argsize, 0, 0); | |
903 | emit_block_move (gen_rtx_MEM (BLKmode, dest), | |
904 | gen_rtx_MEM (BLKmode, incoming_args), | |
905 | argsize, | |
906 | PARM_BOUNDARY / BITS_PER_UNIT); | |
907 | ||
908 | /* Refer to the argument block. */ | |
909 | apply_args_size (); | |
910 | arguments = gen_rtx_MEM (BLKmode, arguments); | |
911 | ||
912 | /* Walk past the arg-pointer and structure value address. */ | |
913 | size = GET_MODE_SIZE (Pmode); | |
914 | if (struct_value_rtx) | |
915 | size += GET_MODE_SIZE (Pmode); | |
916 | ||
917 | /* Restore each of the registers previously saved. Make USE insns | |
918 | for each of these registers for use in making the call. */ | |
919 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
920 | if ((mode = apply_args_mode[regno]) != VOIDmode) | |
921 | { | |
922 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
923 | if (size % align != 0) | |
924 | size = CEIL (size, align) * align; | |
925 | reg = gen_rtx_REG (mode, regno); | |
926 | emit_move_insn (reg, | |
927 | change_address (arguments, mode, | |
928 | plus_constant (XEXP (arguments, 0), | |
929 | size))); | |
930 | ||
931 | use_reg (&call_fusage, reg); | |
932 | size += GET_MODE_SIZE (mode); | |
933 | } | |
934 | ||
935 | /* Restore the structure value address unless this is passed as an | |
936 | "invisible" first argument. */ | |
937 | size = GET_MODE_SIZE (Pmode); | |
938 | if (struct_value_rtx) | |
939 | { | |
940 | rtx value = gen_reg_rtx (Pmode); | |
941 | emit_move_insn (value, | |
942 | change_address (arguments, Pmode, | |
943 | plus_constant (XEXP (arguments, 0), | |
944 | size))); | |
945 | emit_move_insn (struct_value_rtx, value); | |
946 | if (GET_CODE (struct_value_rtx) == REG) | |
947 | use_reg (&call_fusage, struct_value_rtx); | |
948 | size += GET_MODE_SIZE (Pmode); | |
949 | } | |
950 | ||
951 | /* All arguments and registers used for the call are set up by now! */ | |
952 | function = prepare_call_address (function, NULL_TREE, &call_fusage, 0); | |
953 | ||
954 | /* Ensure address is valid. SYMBOL_REF is already valid, so no need, | |
955 | and we don't want to load it into a register as an optimization, | |
956 | because prepare_call_address already did it if it should be done. */ | |
957 | if (GET_CODE (function) != SYMBOL_REF) | |
958 | function = memory_address (FUNCTION_MODE, function); | |
959 | ||
960 | /* Generate the actual call instruction and save the return value. */ | |
961 | #ifdef HAVE_untyped_call | |
962 | if (HAVE_untyped_call) | |
963 | emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function), | |
964 | result, result_vector (1, result))); | |
965 | else | |
966 | #endif | |
967 | #ifdef HAVE_call_value | |
968 | if (HAVE_call_value) | |
969 | { | |
970 | rtx valreg = 0; | |
971 | ||
972 | /* Locate the unique return register. It is not possible to | |
973 | express a call that sets more than one return register using | |
974 | call_value; use untyped_call for that. In fact, untyped_call | |
975 | only needs to save the return registers in the given block. */ | |
976 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
977 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
978 | { | |
979 | if (valreg) | |
980 | abort (); /* HAVE_untyped_call required. */ | |
981 | valreg = gen_rtx_REG (mode, regno); | |
982 | } | |
983 | ||
984 | emit_call_insn (gen_call_value (valreg, | |
985 | gen_rtx_MEM (FUNCTION_MODE, function), | |
986 | const0_rtx, NULL_RTX, const0_rtx)); | |
987 | ||
988 | emit_move_insn (change_address (result, GET_MODE (valreg), | |
989 | XEXP (result, 0)), | |
990 | valreg); | |
991 | } | |
992 | else | |
993 | #endif | |
994 | abort (); | |
995 | ||
996 | /* Find the CALL insn we just emitted. */ | |
997 | for (call_insn = get_last_insn (); | |
998 | call_insn && GET_CODE (call_insn) != CALL_INSN; | |
999 | call_insn = PREV_INSN (call_insn)) | |
1000 | ; | |
1001 | ||
1002 | if (! call_insn) | |
1003 | abort (); | |
1004 | ||
1005 | /* Put the register usage information on the CALL. If there is already | |
1006 | some usage information, put ours at the end. */ | |
1007 | if (CALL_INSN_FUNCTION_USAGE (call_insn)) | |
1008 | { | |
1009 | rtx link; | |
1010 | ||
1011 | for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; | |
1012 | link = XEXP (link, 1)) | |
1013 | ; | |
1014 | ||
1015 | XEXP (link, 1) = call_fusage; | |
1016 | } | |
1017 | else | |
1018 | CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; | |
1019 | ||
1020 | /* Restore the stack. */ | |
1021 | #ifdef HAVE_save_stack_nonlocal | |
1022 | if (HAVE_save_stack_nonlocal) | |
1023 | emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX); | |
1024 | else | |
1025 | #endif | |
1026 | emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); | |
1027 | ||
1028 | /* Return the address of the result block. */ | |
1029 | return copy_addr_to_reg (XEXP (result, 0)); | |
1030 | } | |
1031 | ||
1032 | /* Perform an untyped return. */ | |
1033 | ||
1034 | static void | |
1035 | expand_builtin_return (result) | |
1036 | rtx result; | |
1037 | { | |
1038 | int size, align, regno; | |
1039 | enum machine_mode mode; | |
1040 | rtx reg; | |
1041 | rtx call_fusage = 0; | |
1042 | ||
1043 | apply_result_size (); | |
1044 | result = gen_rtx_MEM (BLKmode, result); | |
1045 | ||
1046 | #ifdef HAVE_untyped_return | |
1047 | if (HAVE_untyped_return) | |
1048 | { | |
1049 | emit_jump_insn (gen_untyped_return (result, result_vector (0, result))); | |
1050 | emit_barrier (); | |
1051 | return; | |
1052 | } | |
1053 | #endif | |
1054 | ||
1055 | /* Restore the return value and note that each value is used. */ | |
1056 | size = 0; | |
1057 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
1058 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
1059 | { | |
1060 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
1061 | if (size % align != 0) | |
1062 | size = CEIL (size, align) * align; | |
1063 | reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)); | |
1064 | emit_move_insn (reg, | |
1065 | change_address (result, mode, | |
1066 | plus_constant (XEXP (result, 0), | |
1067 | size))); | |
1068 | ||
1069 | push_to_sequence (call_fusage); | |
1070 | emit_insn (gen_rtx_USE (VOIDmode, reg)); | |
1071 | call_fusage = get_insns (); | |
1072 | end_sequence (); | |
1073 | size += GET_MODE_SIZE (mode); | |
1074 | } | |
1075 | ||
1076 | /* Put the USE insns before the return. */ | |
1077 | emit_insns (call_fusage); | |
1078 | ||
1079 | /* Return whatever values was restored by jumping directly to the end | |
1080 | of the function. */ | |
1081 | expand_null_return (); | |
1082 | } | |
1083 | ||
1084 | /* Expand a call to __builtin_classify_type with arguments found in | |
1085 | ARGLIST. */ | |
1086 | static rtx | |
1087 | expand_builtin_classify_type (arglist) | |
1088 | tree arglist; | |
1089 | { | |
1090 | if (arglist != 0) | |
1091 | { | |
1092 | tree type = TREE_TYPE (TREE_VALUE (arglist)); | |
1093 | enum tree_code code = TREE_CODE (type); | |
1094 | if (code == VOID_TYPE) | |
1095 | return GEN_INT (void_type_class); | |
1096 | if (code == INTEGER_TYPE) | |
1097 | return GEN_INT (integer_type_class); | |
1098 | if (code == CHAR_TYPE) | |
1099 | return GEN_INT (char_type_class); | |
1100 | if (code == ENUMERAL_TYPE) | |
1101 | return GEN_INT (enumeral_type_class); | |
1102 | if (code == BOOLEAN_TYPE) | |
1103 | return GEN_INT (boolean_type_class); | |
1104 | if (code == POINTER_TYPE) | |
1105 | return GEN_INT (pointer_type_class); | |
1106 | if (code == REFERENCE_TYPE) | |
1107 | return GEN_INT (reference_type_class); | |
1108 | if (code == OFFSET_TYPE) | |
1109 | return GEN_INT (offset_type_class); | |
1110 | if (code == REAL_TYPE) | |
1111 | return GEN_INT (real_type_class); | |
1112 | if (code == COMPLEX_TYPE) | |
1113 | return GEN_INT (complex_type_class); | |
1114 | if (code == FUNCTION_TYPE) | |
1115 | return GEN_INT (function_type_class); | |
1116 | if (code == METHOD_TYPE) | |
1117 | return GEN_INT (method_type_class); | |
1118 | if (code == RECORD_TYPE) | |
1119 | return GEN_INT (record_type_class); | |
1120 | if (code == UNION_TYPE || code == QUAL_UNION_TYPE) | |
1121 | return GEN_INT (union_type_class); | |
1122 | if (code == ARRAY_TYPE) | |
1123 | { | |
1124 | if (TYPE_STRING_FLAG (type)) | |
1125 | return GEN_INT (string_type_class); | |
1126 | else | |
1127 | return GEN_INT (array_type_class); | |
1128 | } | |
1129 | if (code == SET_TYPE) | |
1130 | return GEN_INT (set_type_class); | |
1131 | if (code == FILE_TYPE) | |
1132 | return GEN_INT (file_type_class); | |
1133 | if (code == LANG_TYPE) | |
1134 | return GEN_INT (lang_type_class); | |
1135 | } | |
1136 | return GEN_INT (no_type_class); | |
1137 | } | |
1138 | ||
1139 | /* Expand expression EXP, which is a call to __builtin_constant_p. */ | |
1140 | static rtx | |
1141 | expand_builtin_constant_p (exp) | |
1142 | tree exp; | |
1143 | { | |
1144 | tree arglist = TREE_OPERAND (exp, 1); | |
1145 | enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); | |
1146 | ||
1147 | if (arglist == 0) | |
1148 | return const0_rtx; | |
1149 | else | |
1150 | { | |
1151 | tree arg = TREE_VALUE (arglist); | |
1152 | rtx tmp; | |
1153 | ||
1154 | /* We return 1 for a numeric type that's known to be a constant | |
1155 | value at compile-time or for an aggregate type that's a | |
1156 | literal constant. */ | |
1157 | STRIP_NOPS (arg); | |
1158 | ||
1159 | /* If we know this is a constant, emit the constant of one. */ | |
1160 | if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c' | |
1161 | || (TREE_CODE (arg) == CONSTRUCTOR | |
1162 | && TREE_CONSTANT (arg)) | |
1163 | || (TREE_CODE (arg) == ADDR_EXPR | |
1164 | && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)) | |
1165 | return const1_rtx; | |
1166 | ||
1167 | /* If we aren't going to be running CSE or this expression | |
1168 | has side effects, show we don't know it to be a constant. | |
1169 | Likewise if it's a pointer or aggregate type since in those | |
1170 | case we only want literals, since those are only optimized | |
1171 | when generating RTL, not later. */ | |
1172 | if (TREE_SIDE_EFFECTS (arg) || cse_not_expected | |
1173 | || AGGREGATE_TYPE_P (TREE_TYPE (arg)) | |
1174 | || POINTER_TYPE_P (TREE_TYPE (arg))) | |
1175 | return const0_rtx; | |
1176 | ||
1177 | /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a | |
1178 | chance to see if it can deduce whether ARG is constant. */ | |
1179 | ||
1180 | tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0); | |
1181 | tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp); | |
1182 | return tmp; | |
1183 | } | |
1184 | } | |
1185 | ||
1186 | /* Expand a call to one of the builtin math functions (sin, cos, or sqrt). | |
1187 | Return 0 if a normal call should be emitted rather than expanding the | |
1188 | function in-line. EXP is the expression that is a call to the builtin | |
1189 | function; if convenient, the result should be placed in TARGET. | |
1190 | SUBTARGET may be used as the target for computing one of EXP's operands. */ | |
1191 | static rtx | |
1192 | expand_builtin_mathfn (exp, target, subtarget) | |
1193 | tree exp; | |
1194 | rtx target, subtarget; | |
1195 | { | |
1196 | optab builtin_optab; | |
1197 | rtx op0, insns; | |
1198 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
1199 | tree arglist = TREE_OPERAND (exp, 1); | |
1200 | ||
1201 | if (arglist == 0 | |
1202 | /* Arg could be wrong type if user redeclared this fcn wrong. */ | |
1203 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE) | |
1204 | return 0; | |
1205 | ||
1206 | /* Stabilize and compute the argument. */ | |
1207 | if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL | |
1208 | && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL) | |
1209 | { | |
1210 | exp = copy_node (exp); | |
53800dbe | 1211 | TREE_OPERAND (exp, 1) = arglist; |
f6326164 | 1212 | /* Wrap the computation of the argument in a SAVE_EXPR. That |
1213 | way, if we need to expand the argument again (as in the | |
1214 | flag_errno_math case below where we cannot directly set | |
1215 | errno), we will not perform side-effects more than once. | |
1216 | Note that here we're mutating the original EXP as well as the | |
1217 | copy; that's the right thing to do in case the original EXP | |
1218 | is expanded later. */ | |
53800dbe | 1219 | TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist)); |
f6326164 | 1220 | arglist = copy_node (arglist); |
53800dbe | 1221 | } |
1222 | op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); | |
1223 | ||
1224 | /* Make a suitable register to place result in. */ | |
1225 | target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); | |
1226 | ||
1227 | emit_queue (); | |
1228 | start_sequence (); | |
1229 | ||
1230 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1231 | { | |
1232 | case BUILT_IN_SIN: | |
1233 | builtin_optab = sin_optab; break; | |
1234 | case BUILT_IN_COS: | |
1235 | builtin_optab = cos_optab; break; | |
1236 | case BUILT_IN_FSQRT: | |
1237 | builtin_optab = sqrt_optab; break; | |
1238 | default: | |
1239 | abort (); | |
1240 | } | |
1241 | ||
1242 | /* Compute into TARGET. | |
1243 | Set TARGET to wherever the result comes back. */ | |
1244 | target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), | |
1245 | builtin_optab, op0, target, 0); | |
1246 | ||
1247 | /* If we were unable to expand via the builtin, stop the | |
1248 | sequence (without outputting the insns) and return 0, causing | |
1249 | a call to the library function. */ | |
1250 | if (target == 0) | |
1251 | { | |
1252 | end_sequence (); | |
1253 | return 0; | |
1254 | } | |
1255 | ||
1256 | /* Check the results by default. But if flag_fast_math is turned on, | |
1257 | then assume sqrt will always be called with valid arguments. */ | |
1258 | ||
1259 | if (flag_errno_math && ! flag_fast_math) | |
1260 | { | |
1261 | rtx lab1; | |
1262 | ||
1263 | /* Don't define the builtin FP instructions | |
1264 | if your machine is not IEEE. */ | |
1265 | if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT) | |
1266 | abort (); | |
1267 | ||
1268 | lab1 = gen_label_rtx (); | |
1269 | ||
1270 | /* Test the result; if it is NaN, set errno=EDOM because | |
1271 | the argument was not in the domain. */ | |
1272 | emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target), | |
1273 | 0, 0, lab1); | |
1274 | ||
1275 | #ifdef TARGET_EDOM | |
1276 | { | |
1277 | #ifdef GEN_ERRNO_RTX | |
1278 | rtx errno_rtx = GEN_ERRNO_RTX; | |
1279 | #else | |
1280 | rtx errno_rtx | |
1281 | = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno")); | |
1282 | #endif | |
1283 | ||
1284 | emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM)); | |
1285 | } | |
1286 | #else | |
1287 | /* We can't set errno=EDOM directly; let the library call do it. | |
1288 | Pop the arguments right away in case the call gets deleted. */ | |
1289 | NO_DEFER_POP; | |
1290 | expand_call (exp, target, 0); | |
1291 | OK_DEFER_POP; | |
1292 | #endif | |
1293 | ||
1294 | emit_label (lab1); | |
1295 | } | |
1296 | ||
1297 | /* Output the entire sequence. */ | |
1298 | insns = get_insns (); | |
1299 | end_sequence (); | |
1300 | emit_insns (insns); | |
1301 | ||
1302 | return target; | |
1303 | } | |
1304 | ||
1305 | /* Expand expression EXP which is a call to the strlen builtin. Return 0 | |
1306 | if we failed the caller should emit a normal call, otherwise | |
1307 | try to get the result in TARGET, if convenient (and in mode MODE if that's | |
1308 | convenient). */ | |
1309 | static rtx | |
1310 | expand_builtin_strlen (exp, target, mode) | |
1311 | tree exp; | |
1312 | rtx target; | |
1313 | enum machine_mode mode; | |
1314 | { | |
1315 | tree arglist = TREE_OPERAND (exp, 1); | |
1316 | enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); | |
1317 | ||
1318 | if (arglist == 0 | |
1319 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1320 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
1321 | return 0; | |
1322 | else | |
1323 | { | |
911c0150 | 1324 | rtx pat; |
53800dbe | 1325 | tree src = TREE_VALUE (arglist); |
1326 | tree len = c_strlen (src); | |
1327 | ||
1328 | int align | |
1329 | = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1330 | ||
911c0150 | 1331 | rtx result, src_reg, char_rtx, before_strlen; |
53800dbe | 1332 | enum machine_mode insn_mode = value_mode, char_mode; |
ef2c4a29 | 1333 | enum insn_code icode = CODE_FOR_nothing; |
53800dbe | 1334 | |
1335 | /* If the length is known, just return it. */ | |
1336 | if (len != 0) | |
1337 | return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD); | |
1338 | ||
1339 | /* If SRC is not a pointer type, don't do this operation inline. */ | |
1340 | if (align == 0) | |
1341 | return 0; | |
1342 | ||
911c0150 | 1343 | /* Bail out if we can't compute strlen in the right mode. */ |
53800dbe | 1344 | while (insn_mode != VOIDmode) |
1345 | { | |
1346 | icode = strlen_optab->handlers[(int) insn_mode].insn_code; | |
1347 | if (icode != CODE_FOR_nothing) | |
c28ae87f | 1348 | break; |
53800dbe | 1349 | |
1350 | insn_mode = GET_MODE_WIDER_MODE (insn_mode); | |
1351 | } | |
1352 | if (insn_mode == VOIDmode) | |
1353 | return 0; | |
1354 | ||
1355 | /* Make a place to write the result of the instruction. */ | |
1356 | result = target; | |
1357 | if (! (result != 0 | |
1358 | && GET_CODE (result) == REG | |
1359 | && GET_MODE (result) == insn_mode | |
1360 | && REGNO (result) >= FIRST_PSEUDO_REGISTER)) | |
1361 | result = gen_reg_rtx (insn_mode); | |
1362 | ||
911c0150 | 1363 | /* Make a place to hold the source address. We will not expand |
1364 | the actual source until we are sure that the expansion will | |
1365 | not fail -- there are trees that cannot be expanded twice. */ | |
1366 | src_reg = gen_reg_rtx (Pmode); | |
53800dbe | 1367 | |
911c0150 | 1368 | /* Mark the beginning of the strlen sequence so we can emit the |
1369 | source operand later. */ | |
1370 | before_strlen = get_last_insn(); | |
53800dbe | 1371 | |
1372 | /* Check the string is readable and has an end. */ | |
1373 | if (current_function_check_memory_usage) | |
1374 | emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2, | |
911c0150 | 1375 | src_reg, Pmode, |
53800dbe | 1376 | GEN_INT (MEMORY_USE_RO), |
1377 | TYPE_MODE (integer_type_node)); | |
1378 | ||
1379 | char_rtx = const0_rtx; | |
6357eaae | 1380 | char_mode = insn_data[(int)icode].operand[2].mode; |
1381 | if (! (*insn_data[(int)icode].operand[2].predicate) (char_rtx, char_mode)) | |
53800dbe | 1382 | char_rtx = copy_to_mode_reg (char_mode, char_rtx); |
1383 | ||
911c0150 | 1384 | pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg), |
1385 | char_rtx, GEN_INT (align)); | |
1386 | if (! pat) | |
1387 | return 0; | |
1388 | emit_insn (pat); | |
1389 | ||
1390 | /* Now that we are assured of success, expand the source. */ | |
1391 | start_sequence (); | |
1392 | pat = expand_expr (src, src_reg, ptr_mode, EXPAND_SUM); | |
1393 | if (pat != src_reg) | |
1394 | emit_move_insn (src_reg, pat); | |
1395 | pat = gen_sequence (); | |
1396 | end_sequence (); | |
bceb0d1f | 1397 | |
1398 | if (before_strlen) | |
1399 | emit_insn_after (pat, before_strlen); | |
1400 | else | |
1401 | emit_insn_before (pat, get_insns ()); | |
53800dbe | 1402 | |
1403 | /* Return the value in the proper mode for this function. */ | |
1404 | if (GET_MODE (result) == value_mode) | |
911c0150 | 1405 | target = result; |
53800dbe | 1406 | else if (target != 0) |
911c0150 | 1407 | convert_move (target, result, 0); |
53800dbe | 1408 | else |
911c0150 | 1409 | target = convert_to_mode (value_mode, result, 0); |
1410 | ||
1411 | return target; | |
53800dbe | 1412 | } |
1413 | } | |
1414 | ||
1415 | /* Expand a call to the memcpy builtin, with arguments in ARGLIST. */ | |
1416 | static rtx | |
1417 | expand_builtin_memcpy (arglist) | |
1418 | tree arglist; | |
1419 | { | |
1420 | if (arglist == 0 | |
1421 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1422 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1423 | || TREE_CHAIN (arglist) == 0 | |
1424 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) | |
1425 | != POINTER_TYPE) | |
1426 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1427 | || (TREE_CODE (TREE_TYPE (TREE_VALUE | |
1428 | (TREE_CHAIN (TREE_CHAIN (arglist))))) | |
1429 | != INTEGER_TYPE)) | |
1430 | return 0; | |
1431 | else | |
1432 | { | |
1433 | tree dest = TREE_VALUE (arglist); | |
1434 | tree src = TREE_VALUE (TREE_CHAIN (arglist)); | |
1435 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1436 | ||
1437 | int src_align | |
1438 | = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1439 | int dest_align | |
1440 | = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1441 | rtx dest_mem, src_mem, dest_addr, len_rtx; | |
1442 | ||
1443 | /* If either SRC or DEST is not a pointer type, don't do | |
1444 | this operation in-line. */ | |
1445 | if (src_align == 0 || dest_align == 0) | |
1446 | return 0; | |
1447 | ||
1448 | dest_mem = get_memory_rtx (dest); | |
1449 | src_mem = get_memory_rtx (src); | |
1450 | len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); | |
1451 | ||
1452 | /* Just copy the rights of SRC to the rights of DEST. */ | |
1453 | if (current_function_check_memory_usage) | |
1454 | emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, | |
1455 | XEXP (dest_mem, 0), Pmode, | |
1456 | XEXP (src_mem, 0), Pmode, | |
1457 | len_rtx, TYPE_MODE (sizetype)); | |
1458 | ||
1459 | /* Copy word part most expediently. */ | |
1460 | dest_addr | |
1461 | = emit_block_move (dest_mem, src_mem, len_rtx, | |
1462 | MIN (src_align, dest_align)); | |
1463 | ||
1464 | if (dest_addr == 0) | |
1465 | dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); | |
1466 | ||
1467 | return dest_addr; | |
1468 | } | |
1469 | } | |
1470 | ||
1471 | /* Expand expression EXP, which is a call to the strcpy builtin. Return 0 | |
1472 | if we failed the caller should emit a normal call. */ | |
902de8ed | 1473 | |
53800dbe | 1474 | static rtx |
1475 | expand_builtin_strcpy (exp) | |
1476 | tree exp; | |
1477 | { | |
1478 | tree arglist = TREE_OPERAND (exp, 1); | |
1479 | rtx result; | |
1480 | ||
1481 | if (arglist == 0 | |
1482 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1483 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1484 | || TREE_CHAIN (arglist) == 0 | |
902de8ed | 1485 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) |
1486 | != POINTER_TYPE)) | |
53800dbe | 1487 | return 0; |
1488 | else | |
1489 | { | |
1490 | tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist))); | |
1491 | ||
1492 | if (len == 0) | |
1493 | return 0; | |
1494 | ||
902de8ed | 1495 | len = size_binop (PLUS_EXPR, len, ssize_int (1)); |
53800dbe | 1496 | chainon (arglist, build_tree_list (NULL_TREE, len)); |
1497 | } | |
902de8ed | 1498 | |
53800dbe | 1499 | result = expand_builtin_memcpy (arglist); |
902de8ed | 1500 | |
53800dbe | 1501 | if (! result) |
1502 | TREE_CHAIN (TREE_CHAIN (arglist)) = 0; | |
1503 | return result; | |
1504 | } | |
1505 | ||
1506 | /* Expand expression EXP, which is a call to the memset builtin. Return 0 | |
1507 | if we failed the caller should emit a normal call. */ | |
902de8ed | 1508 | |
53800dbe | 1509 | static rtx |
1510 | expand_builtin_memset (exp) | |
1511 | tree exp; | |
1512 | { | |
1513 | tree arglist = TREE_OPERAND (exp, 1); | |
1514 | ||
1515 | if (arglist == 0 | |
1516 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1517 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1518 | || TREE_CHAIN (arglist) == 0 | |
1519 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) | |
1520 | != INTEGER_TYPE) | |
1521 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1522 | || (INTEGER_TYPE | |
1523 | != (TREE_CODE (TREE_TYPE | |
1524 | (TREE_VALUE | |
1525 | (TREE_CHAIN (TREE_CHAIN (arglist)))))))) | |
1526 | return 0; | |
1527 | else | |
1528 | { | |
1529 | tree dest = TREE_VALUE (arglist); | |
1530 | tree val = TREE_VALUE (TREE_CHAIN (arglist)); | |
1531 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1532 | ||
1533 | int dest_align | |
1534 | = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1535 | rtx dest_mem, dest_addr, len_rtx; | |
1536 | ||
1537 | /* If DEST is not a pointer type, don't do this | |
1538 | operation in-line. */ | |
1539 | if (dest_align == 0) | |
1540 | return 0; | |
1541 | ||
1542 | /* If the arguments have side-effects, then we can only evaluate | |
1543 | them at most once. The following code evaluates them twice if | |
1544 | they are not constants because we break out to expand_call | |
1545 | in that case. They can't be constants if they have side-effects | |
1546 | so we can check for that first. Alternatively, we could call | |
1547 | save_expr to make multiple evaluation safe. */ | |
1548 | if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len)) | |
1549 | return 0; | |
1550 | ||
1551 | /* If VAL is not 0, don't do this operation in-line. */ | |
1552 | if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx) | |
1553 | return 0; | |
1554 | ||
53800dbe | 1555 | len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); |
53800dbe | 1556 | |
1557 | dest_mem = get_memory_rtx (dest); | |
1558 | ||
1559 | /* Just check DST is writable and mark it as readable. */ | |
1560 | if (current_function_check_memory_usage) | |
1561 | emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, | |
1562 | XEXP (dest_mem, 0), Pmode, | |
1563 | len_rtx, TYPE_MODE (sizetype), | |
1564 | GEN_INT (MEMORY_USE_WO), | |
1565 | TYPE_MODE (integer_type_node)); | |
1566 | ||
1567 | ||
1568 | dest_addr = clear_storage (dest_mem, len_rtx, dest_align); | |
1569 | ||
1570 | if (dest_addr == 0) | |
1571 | dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); | |
1572 | ||
1573 | return dest_addr; | |
1574 | } | |
1575 | } | |
1576 | ||
1577 | #ifdef HAVE_cmpstrsi | |
1578 | /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin. | |
1579 | ARGLIST is the argument list for this call. Return 0 if we failed and the | |
1580 | caller should emit a normal call, otherwise try to get the result in | |
1581 | TARGET, if convenient. */ | |
1582 | static rtx | |
1583 | expand_builtin_memcmp (exp, arglist, target) | |
1584 | tree exp; | |
1585 | tree arglist; | |
1586 | rtx target; | |
1587 | { | |
1588 | /* If we need to check memory accesses, call the library function. */ | |
1589 | if (current_function_check_memory_usage) | |
1590 | return 0; | |
1591 | ||
1592 | if (arglist == 0 | |
1593 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1594 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1595 | || TREE_CHAIN (arglist) == 0 | |
1596 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE | |
1597 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1598 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) | |
1599 | return 0; | |
1600 | else if (!HAVE_cmpstrsi) | |
1601 | return 0; | |
1602 | ||
1603 | { | |
1604 | enum machine_mode mode; | |
1605 | tree arg1 = TREE_VALUE (arglist); | |
1606 | tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); | |
1607 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1608 | rtx result; | |
1609 | ||
1610 | int arg1_align | |
1611 | = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1612 | int arg2_align | |
1613 | = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1614 | enum machine_mode insn_mode | |
6357eaae | 1615 | = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode; |
53800dbe | 1616 | |
1617 | /* If we don't have POINTER_TYPE, call the function. */ | |
1618 | if (arg1_align == 0 || arg2_align == 0) | |
1619 | return 0; | |
1620 | ||
1621 | /* Make a place to write the result of the instruction. */ | |
1622 | result = target; | |
1623 | if (! (result != 0 | |
1624 | && GET_CODE (result) == REG && GET_MODE (result) == insn_mode | |
1625 | && REGNO (result) >= FIRST_PSEUDO_REGISTER)) | |
1626 | result = gen_reg_rtx (insn_mode); | |
1627 | ||
1628 | emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1), | |
1629 | get_memory_rtx (arg2), | |
1630 | expand_expr (len, NULL_RTX, VOIDmode, 0), | |
1631 | GEN_INT (MIN (arg1_align, arg2_align)))); | |
1632 | ||
1633 | /* Return the value in the proper mode for this function. */ | |
1634 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
1635 | if (GET_MODE (result) == mode) | |
1636 | return result; | |
1637 | else if (target != 0) | |
1638 | { | |
1639 | convert_move (target, result, 0); | |
1640 | return target; | |
1641 | } | |
1642 | else | |
1643 | return convert_to_mode (mode, result, 0); | |
1644 | } | |
1645 | } | |
1646 | ||
1647 | /* Expand expression EXP, which is a call to the strcmp builtin. Return 0 | |
1648 | if we failed the caller should emit a normal call, otherwise try to get | |
1649 | the result in TARGET, if convenient. */ | |
902de8ed | 1650 | |
53800dbe | 1651 | static rtx |
1652 | expand_builtin_strcmp (exp, target) | |
1653 | tree exp; | |
1654 | rtx target; | |
1655 | { | |
1656 | tree arglist = TREE_OPERAND (exp, 1); | |
1657 | ||
1658 | /* If we need to check memory accesses, call the library function. */ | |
1659 | if (current_function_check_memory_usage) | |
1660 | return 0; | |
1661 | ||
1662 | if (arglist == 0 | |
1663 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1664 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1665 | || TREE_CHAIN (arglist) == 0 | |
902de8ed | 1666 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) |
1667 | != POINTER_TYPE)) | |
53800dbe | 1668 | return 0; |
902de8ed | 1669 | |
1670 | else if (! HAVE_cmpstrsi) | |
53800dbe | 1671 | return 0; |
1672 | { | |
1673 | tree arg1 = TREE_VALUE (arglist); | |
1674 | tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); | |
902de8ed | 1675 | tree len = c_strlen (arg1); |
1676 | tree len2 = c_strlen (arg2); | |
53800dbe | 1677 | rtx result; |
902de8ed | 1678 | |
53800dbe | 1679 | if (len) |
902de8ed | 1680 | len = size_binop (PLUS_EXPR, ssize_int (1), len); |
1681 | ||
53800dbe | 1682 | if (len2) |
902de8ed | 1683 | len2 = size_binop (PLUS_EXPR, ssize_int (1), len2); |
53800dbe | 1684 | |
1685 | /* If we don't have a constant length for the first, use the length | |
1686 | of the second, if we know it. We don't require a constant for | |
1687 | this case; some cost analysis could be done if both are available | |
1688 | but neither is constant. For now, assume they're equally cheap. | |
1689 | ||
1690 | If both strings have constant lengths, use the smaller. This | |
1691 | could arise if optimization results in strcpy being called with | |
1692 | two fixed strings, or if the code was machine-generated. We should | |
1693 | add some code to the `memcmp' handler below to deal with such | |
1694 | situations, someday. */ | |
902de8ed | 1695 | |
53800dbe | 1696 | if (!len || TREE_CODE (len) != INTEGER_CST) |
1697 | { | |
1698 | if (len2) | |
1699 | len = len2; | |
1700 | else if (len == 0) | |
1701 | return 0; | |
1702 | } | |
902de8ed | 1703 | else if (len2 && TREE_CODE (len2) == INTEGER_CST |
1704 | && tree_int_cst_lt (len2, len)) | |
1705 | len = len2; | |
53800dbe | 1706 | |
1707 | chainon (arglist, build_tree_list (NULL_TREE, len)); | |
1708 | result = expand_builtin_memcmp (exp, arglist, target); | |
1709 | if (! result) | |
1710 | TREE_CHAIN (TREE_CHAIN (arglist)) = 0; | |
902de8ed | 1711 | |
53800dbe | 1712 | return result; |
1713 | } | |
1714 | } | |
1715 | #endif | |
1716 | ||
a66c9326 | 1717 | /* Expand a call to __builtin_saveregs, generating the result in TARGET, |
1718 | if that's convenient. */ | |
902de8ed | 1719 | |
a66c9326 | 1720 | rtx |
1721 | expand_builtin_saveregs () | |
53800dbe | 1722 | { |
a66c9326 | 1723 | rtx val, seq; |
53800dbe | 1724 | |
1725 | /* Don't do __builtin_saveregs more than once in a function. | |
1726 | Save the result of the first call and reuse it. */ | |
1727 | if (saveregs_value != 0) | |
1728 | return saveregs_value; | |
53800dbe | 1729 | |
a66c9326 | 1730 | /* When this function is called, it means that registers must be |
1731 | saved on entry to this function. So we migrate the call to the | |
1732 | first insn of this function. */ | |
1733 | ||
1734 | start_sequence (); | |
53800dbe | 1735 | |
1736 | #ifdef EXPAND_BUILTIN_SAVEREGS | |
a66c9326 | 1737 | /* Do whatever the machine needs done in this case. */ |
1738 | val = EXPAND_BUILTIN_SAVEREGS (); | |
53800dbe | 1739 | #else |
a66c9326 | 1740 | /* ??? We used to try and build up a call to the out of line function, |
1741 | guessing about what registers needed saving etc. This became much | |
1742 | harder with __builtin_va_start, since we don't have a tree for a | |
1743 | call to __builtin_saveregs to fall back on. There was exactly one | |
1744 | port (i860) that used this code, and I'm unconvinced it could actually | |
1745 | handle the general case. So we no longer try to handle anything | |
1746 | weird and make the backend absorb the evil. */ | |
1747 | ||
1748 | error ("__builtin_saveregs not supported by this target"); | |
1749 | val = const0_rtx; | |
53800dbe | 1750 | #endif |
1751 | ||
a66c9326 | 1752 | seq = get_insns (); |
1753 | end_sequence (); | |
53800dbe | 1754 | |
a66c9326 | 1755 | saveregs_value = val; |
53800dbe | 1756 | |
a66c9326 | 1757 | /* Put the sequence after the NOTE that starts the function. If this |
1758 | is inside a SEQUENCE, make the outer-level insn chain current, so | |
1759 | the code is placed at the start of the function. */ | |
1760 | push_topmost_sequence (); | |
1761 | emit_insns_after (seq, get_insns ()); | |
1762 | pop_topmost_sequence (); | |
1763 | ||
1764 | return val; | |
53800dbe | 1765 | } |
1766 | ||
1767 | /* __builtin_args_info (N) returns word N of the arg space info | |
1768 | for the current function. The number and meanings of words | |
1769 | is controlled by the definition of CUMULATIVE_ARGS. */ | |
1770 | static rtx | |
1771 | expand_builtin_args_info (exp) | |
1772 | tree exp; | |
1773 | { | |
1774 | tree arglist = TREE_OPERAND (exp, 1); | |
1775 | int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int); | |
1776 | int *word_ptr = (int *) ¤t_function_args_info; | |
1777 | #if 0 | |
1778 | /* These are used by the code below that is if 0'ed away */ | |
1779 | int i; | |
1780 | tree type, elts, result; | |
1781 | #endif | |
1782 | ||
1783 | if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0) | |
dda90815 | 1784 | abort (); |
53800dbe | 1785 | |
1786 | if (arglist != 0) | |
1787 | { | |
1788 | tree arg = TREE_VALUE (arglist); | |
1789 | if (TREE_CODE (arg) != INTEGER_CST) | |
1790 | error ("argument of `__builtin_args_info' must be constant"); | |
1791 | else | |
1792 | { | |
1793 | int wordnum = TREE_INT_CST_LOW (arg); | |
1794 | ||
1795 | if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg)) | |
1796 | error ("argument of `__builtin_args_info' out of range"); | |
1797 | else | |
1798 | return GEN_INT (word_ptr[wordnum]); | |
1799 | } | |
1800 | } | |
1801 | else | |
1802 | error ("missing argument in `__builtin_args_info'"); | |
1803 | ||
1804 | return const0_rtx; | |
1805 | ||
1806 | #if 0 | |
1807 | for (i = 0; i < nwords; i++) | |
1808 | elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0)); | |
1809 | ||
1810 | type = build_array_type (integer_type_node, | |
1811 | build_index_type (build_int_2 (nwords, 0))); | |
1812 | result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts)); | |
1813 | TREE_CONSTANT (result) = 1; | |
1814 | TREE_STATIC (result) = 1; | |
a66c9326 | 1815 | result = build1 (INDIRECT_REF, build_pointer_type (type), result); |
53800dbe | 1816 | TREE_CONSTANT (result) = 1; |
1817 | return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD); | |
1818 | #endif | |
1819 | } | |
1820 | ||
a66c9326 | 1821 | /* Expand ARGLIST, from a call to __builtin_next_arg. */ |
53800dbe | 1822 | static rtx |
a66c9326 | 1823 | expand_builtin_next_arg (arglist) |
1824 | tree arglist; | |
53800dbe | 1825 | { |
53800dbe | 1826 | tree fntype = TREE_TYPE (current_function_decl); |
1827 | ||
1828 | if ((TYPE_ARG_TYPES (fntype) == 0 | |
1829 | || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) | |
1830 | == void_type_node)) | |
1831 | && ! current_function_varargs) | |
1832 | { | |
1833 | error ("`va_start' used in function with fixed args"); | |
1834 | return const0_rtx; | |
1835 | } | |
1836 | ||
1837 | if (arglist) | |
1838 | { | |
1839 | tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl)); | |
1840 | tree arg = TREE_VALUE (arglist); | |
1841 | ||
1842 | /* Strip off all nops for the sake of the comparison. This | |
1843 | is not quite the same as STRIP_NOPS. It does more. | |
1844 | We must also strip off INDIRECT_EXPR for C++ reference | |
1845 | parameters. */ | |
1846 | while (TREE_CODE (arg) == NOP_EXPR | |
1847 | || TREE_CODE (arg) == CONVERT_EXPR | |
1848 | || TREE_CODE (arg) == NON_LVALUE_EXPR | |
1849 | || TREE_CODE (arg) == INDIRECT_REF) | |
1850 | arg = TREE_OPERAND (arg, 0); | |
1851 | if (arg != last_parm) | |
1852 | warning ("second parameter of `va_start' not last named argument"); | |
1853 | } | |
1854 | else if (! current_function_varargs) | |
1855 | /* Evidently an out of date version of <stdarg.h>; can't validate | |
1856 | va_start's second argument, but can still work as intended. */ | |
1857 | warning ("`__builtin_next_arg' called without an argument"); | |
1858 | ||
1859 | return expand_binop (Pmode, add_optab, | |
1860 | current_function_internal_arg_pointer, | |
1861 | current_function_arg_offset_rtx, | |
1862 | NULL_RTX, 0, OPTAB_LIB_WIDEN); | |
1863 | } | |
1864 | ||
a66c9326 | 1865 | /* Make it easier for the backends by protecting the valist argument |
1866 | from multiple evaluations. */ | |
1867 | ||
1868 | static tree | |
1869 | stabilize_va_list (valist, was_ptr) | |
1870 | tree valist; | |
1871 | int was_ptr; | |
1872 | { | |
11a61dea | 1873 | if (TREE_CODE (va_list_type_node) == ARRAY_TYPE) |
a66c9326 | 1874 | { |
1875 | /* If stdarg.h took the address of an array-type valist that was passed | |
1876 | as a parameter, we'll have taken the address of the parameter itself | |
1877 | rather than the array as we'd intended. Undo this mistake. */ | |
11a61dea | 1878 | |
1879 | if (was_ptr) | |
8a15c04a | 1880 | { |
11a61dea | 1881 | STRIP_NOPS (valist); |
1882 | ||
1883 | /* Two cases: either &array, which decomposed to | |
1884 | <ptr <array <record> valist>> | |
1885 | or &ptr, which turned into | |
1886 | <ptr <ptr <record>>> | |
1887 | In the first case we'll need to put the ADDR_EXPR back | |
1888 | after frobbing the types as if &array[0]. */ | |
1889 | ||
1890 | if (TREE_CODE (valist) != ADDR_EXPR) | |
1891 | abort (); | |
8a15c04a | 1892 | valist = TREE_OPERAND (valist, 0); |
11a61dea | 1893 | } |
1894 | ||
1895 | if (TYPE_MAIN_VARIANT (TREE_TYPE (valist)) | |
1896 | == TYPE_MAIN_VARIANT (va_list_type_node)) | |
1897 | { | |
1898 | tree pt = build_pointer_type (TREE_TYPE (va_list_type_node)); | |
1899 | valist = build1 (ADDR_EXPR, pt, valist); | |
1900 | TREE_SIDE_EFFECTS (valist) | |
1901 | = TREE_SIDE_EFFECTS (TREE_OPERAND (valist, 0)); | |
8a15c04a | 1902 | } |
1903 | else | |
1904 | { | |
11a61dea | 1905 | if (! POINTER_TYPE_P (TREE_TYPE (valist)) |
1906 | || (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (valist))) | |
1907 | != TYPE_MAIN_VARIANT (TREE_TYPE (va_list_type_node)))) | |
1908 | abort (); | |
8a15c04a | 1909 | } |
11a61dea | 1910 | |
1911 | if (TREE_SIDE_EFFECTS (valist)) | |
1912 | valist = save_expr (valist); | |
a66c9326 | 1913 | } |
11a61dea | 1914 | else |
a66c9326 | 1915 | { |
11a61dea | 1916 | if (! was_ptr) |
a66c9326 | 1917 | { |
11a61dea | 1918 | tree pt; |
1919 | ||
1920 | if (! TREE_SIDE_EFFECTS (valist)) | |
1921 | return valist; | |
1922 | ||
1923 | pt = build_pointer_type (va_list_type_node); | |
1924 | valist = fold (build1 (ADDR_EXPR, pt, valist)); | |
a66c9326 | 1925 | TREE_SIDE_EFFECTS (valist) = 1; |
a66c9326 | 1926 | } |
11a61dea | 1927 | if (TREE_SIDE_EFFECTS (valist)) |
1928 | valist = save_expr (valist); | |
1929 | valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), | |
1930 | valist)); | |
a66c9326 | 1931 | } |
1932 | ||
1933 | return valist; | |
1934 | } | |
1935 | ||
1936 | /* The "standard" implementation of va_start: just assign `nextarg' to | |
1937 | the variable. */ | |
1938 | void | |
1939 | std_expand_builtin_va_start (stdarg_p, valist, nextarg) | |
1940 | int stdarg_p ATTRIBUTE_UNUSED; | |
1941 | tree valist; | |
1942 | rtx nextarg; | |
1943 | { | |
1944 | tree t; | |
1945 | ||
8a15c04a | 1946 | if (!stdarg_p) |
1947 | nextarg = plus_constant (nextarg, -UNITS_PER_WORD); | |
1948 | ||
a66c9326 | 1949 | t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, |
1950 | make_tree (ptr_type_node, nextarg)); | |
1951 | TREE_SIDE_EFFECTS (t) = 1; | |
1952 | ||
1953 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1954 | } | |
1955 | ||
1956 | /* Expand ARGLIST, which from a call to __builtin_stdarg_va_start or | |
1957 | __builtin_varargs_va_start, depending on STDARG_P. */ | |
1958 | static rtx | |
1959 | expand_builtin_va_start (stdarg_p, arglist) | |
1960 | int stdarg_p; | |
1961 | tree arglist; | |
1962 | { | |
1963 | rtx nextarg; | |
1964 | tree chain = arglist, valist; | |
1965 | ||
1966 | if (stdarg_p) | |
1967 | nextarg = expand_builtin_next_arg (chain = TREE_CHAIN (arglist)); | |
1968 | else | |
1969 | nextarg = expand_builtin_next_arg (NULL_TREE); | |
1970 | ||
1971 | if (TREE_CHAIN (chain)) | |
1972 | error ("too many arguments to function `va_start'"); | |
1973 | ||
1974 | valist = stabilize_va_list (TREE_VALUE (arglist), 1); | |
1975 | ||
1976 | #ifdef EXPAND_BUILTIN_VA_START | |
1977 | EXPAND_BUILTIN_VA_START (stdarg_p, valist, nextarg); | |
1978 | #else | |
1979 | std_expand_builtin_va_start (stdarg_p, valist, nextarg); | |
1980 | #endif | |
1981 | ||
1982 | return const0_rtx; | |
1983 | } | |
1984 | ||
1985 | /* Allocate an alias set for use in storing and reading from the varargs | |
1986 | spill area. */ | |
1987 | int | |
1988 | get_varargs_alias_set () | |
1989 | { | |
1990 | static int set = -1; | |
1991 | if (set == -1) | |
1992 | set = new_alias_set (); | |
1993 | return set; | |
1994 | } | |
1995 | ||
1996 | /* The "standard" implementation of va_arg: read the value from the | |
1997 | current (padded) address and increment by the (padded) size. */ | |
1998 | rtx | |
1999 | std_expand_builtin_va_arg (valist, type) | |
2000 | tree valist, type; | |
2001 | { | |
2002 | tree addr_tree, t; | |
2003 | HOST_WIDE_INT align; | |
2004 | HOST_WIDE_INT rounded_size; | |
2005 | rtx addr; | |
2006 | ||
2007 | /* Compute the rounded size of the type. */ | |
2008 | align = PARM_BOUNDARY / BITS_PER_UNIT; | |
7102dbcc | 2009 | rounded_size = (((int_size_in_bytes (type) + align - 1) / align) * align); |
a66c9326 | 2010 | |
2011 | /* Get AP. */ | |
2012 | addr_tree = valist; | |
726e2588 | 2013 | if (PAD_VARARGS_DOWN) |
a66c9326 | 2014 | { |
2015 | /* Small args are padded downward. */ | |
2016 | ||
2017 | HOST_WIDE_INT adj; | |
2018 | adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT; | |
2019 | if (rounded_size > align) | |
2020 | adj = rounded_size; | |
2021 | ||
2022 | addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree, | |
2023 | build_int_2 (rounded_size - adj, 0)); | |
2024 | } | |
2025 | ||
2026 | addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2027 | addr = copy_to_reg (addr); | |
2028 | ||
2029 | /* Compute new value for AP. */ | |
2030 | t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, | |
2031 | build (PLUS_EXPR, TREE_TYPE (valist), valist, | |
2032 | build_int_2 (rounded_size, 0))); | |
2033 | TREE_SIDE_EFFECTS (t) = 1; | |
2034 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
2035 | ||
2036 | return addr; | |
2037 | } | |
2038 | ||
2039 | /* Expand __builtin_va_arg, which is not really a builtin function, but | |
2040 | a very special sort of operator. */ | |
2041 | rtx | |
2042 | expand_builtin_va_arg (valist, type) | |
2043 | tree valist, type; | |
2044 | { | |
2045 | rtx addr, result; | |
6cd005c9 | 2046 | tree promoted_type, want_va_type, have_va_type; |
a66c9326 | 2047 | |
6cd005c9 | 2048 | /* Verify that valist is of the proper type. */ |
2049 | ||
2050 | want_va_type = va_list_type_node; | |
2051 | have_va_type = TREE_TYPE (valist); | |
2052 | if (TREE_CODE (want_va_type) == ARRAY_TYPE) | |
2053 | { | |
2054 | /* If va_list is an array type, the argument may have decayed | |
2055 | to a pointer type, e.g. by being passed to another function. | |
2056 | In that case, unwrap both types so that we can compare the | |
2057 | underlying records. */ | |
2058 | if (TREE_CODE (have_va_type) == ARRAY_TYPE | |
2059 | || TREE_CODE (have_va_type) == POINTER_TYPE) | |
2060 | { | |
2061 | want_va_type = TREE_TYPE (want_va_type); | |
2062 | have_va_type = TREE_TYPE (have_va_type); | |
2063 | } | |
2064 | } | |
2065 | if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type)) | |
a66c9326 | 2066 | { |
e94026da | 2067 | error ("first argument to `va_arg' not of type `va_list'"); |
2068 | addr = const0_rtx; | |
2069 | } | |
6cd005c9 | 2070 | |
2071 | /* Generate a diagnostic for requesting data of a type that cannot | |
2072 | be passed through `...' due to type promotion at the call site. */ | |
e94026da | 2073 | else if ((promoted_type = (*lang_type_promotes_to) (type)) != NULL_TREE) |
2074 | { | |
01ce7a1b | 2075 | const char *name = "<anonymous type>", *pname = 0; |
e94026da | 2076 | static int gave_help; |
2077 | ||
2078 | if (TYPE_NAME (type)) | |
2079 | { | |
2080 | if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE) | |
2081 | name = IDENTIFIER_POINTER (TYPE_NAME (type)); | |
2082 | else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL | |
2083 | && DECL_NAME (TYPE_NAME (type))) | |
2084 | name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))); | |
2085 | } | |
2086 | if (TYPE_NAME (promoted_type)) | |
2087 | { | |
2088 | if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE) | |
2089 | pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type)); | |
2090 | else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL | |
2091 | && DECL_NAME (TYPE_NAME (promoted_type))) | |
2092 | pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type))); | |
2093 | } | |
2094 | ||
2095 | error ("`%s' is promoted to `%s' when passed through `...'", name, pname); | |
2096 | if (! gave_help) | |
2097 | { | |
2098 | gave_help = 1; | |
2099 | error ("(so you should pass `%s' not `%s' to `va_arg')", pname, name); | |
2100 | } | |
2101 | ||
a66c9326 | 2102 | addr = const0_rtx; |
2103 | } | |
2104 | else | |
2105 | { | |
2106 | /* Make it easier for the backends by protecting the valist argument | |
2107 | from multiple evaluations. */ | |
2108 | valist = stabilize_va_list (valist, 0); | |
2109 | ||
2110 | #ifdef EXPAND_BUILTIN_VA_ARG | |
2111 | addr = EXPAND_BUILTIN_VA_ARG (valist, type); | |
2112 | #else | |
2113 | addr = std_expand_builtin_va_arg (valist, type); | |
2114 | #endif | |
2115 | } | |
2116 | ||
2117 | result = gen_rtx_MEM (TYPE_MODE (type), addr); | |
2118 | MEM_ALIAS_SET (result) = get_varargs_alias_set (); | |
2119 | ||
2120 | return result; | |
2121 | } | |
2122 | ||
2123 | /* Expand ARGLIST, from a call to __builtin_va_end. */ | |
2124 | static rtx | |
2125 | expand_builtin_va_end (arglist) | |
8a15c04a | 2126 | tree arglist; |
a66c9326 | 2127 | { |
8a15c04a | 2128 | tree valist = TREE_VALUE (arglist); |
2129 | ||
a66c9326 | 2130 | #ifdef EXPAND_BUILTIN_VA_END |
a66c9326 | 2131 | valist = stabilize_va_list (valist, 0); |
2132 | EXPAND_BUILTIN_VA_END(arglist); | |
8a15c04a | 2133 | #else |
2134 | /* Evaluate for side effects, if needed. I hate macros that don't | |
2135 | do that. */ | |
2136 | if (TREE_SIDE_EFFECTS (valist)) | |
2137 | expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
a66c9326 | 2138 | #endif |
2139 | ||
2140 | return const0_rtx; | |
2141 | } | |
2142 | ||
2143 | /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a | |
2144 | builtin rather than just as an assignment in stdarg.h because of the | |
2145 | nastiness of array-type va_list types. */ | |
2146 | static rtx | |
2147 | expand_builtin_va_copy (arglist) | |
2148 | tree arglist; | |
2149 | { | |
2150 | tree dst, src, t; | |
2151 | ||
2152 | dst = TREE_VALUE (arglist); | |
2153 | src = TREE_VALUE (TREE_CHAIN (arglist)); | |
2154 | ||
2155 | dst = stabilize_va_list (dst, 1); | |
2156 | src = stabilize_va_list (src, 0); | |
2157 | ||
2158 | if (TREE_CODE (va_list_type_node) != ARRAY_TYPE) | |
2159 | { | |
2160 | t = build (MODIFY_EXPR, va_list_type_node, dst, src); | |
2161 | TREE_SIDE_EFFECTS (t) = 1; | |
2162 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
2163 | } | |
2164 | else | |
2165 | { | |
11a61dea | 2166 | rtx dstb, srcb, size; |
2167 | ||
2168 | /* Evaluate to pointers. */ | |
2169 | dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2170 | srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2171 | size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX, | |
2172 | VOIDmode, EXPAND_NORMAL); | |
2173 | ||
2174 | /* "Dereference" to BLKmode memories. */ | |
2175 | dstb = gen_rtx_MEM (BLKmode, dstb); | |
2176 | MEM_ALIAS_SET (dstb) = get_alias_set (TREE_TYPE (TREE_TYPE (dst))); | |
2177 | srcb = gen_rtx_MEM (BLKmode, srcb); | |
2178 | MEM_ALIAS_SET (srcb) = get_alias_set (TREE_TYPE (TREE_TYPE (src))); | |
2179 | ||
2180 | /* Copy. */ | |
2181 | emit_block_move (dstb, srcb, size, | |
a66c9326 | 2182 | TYPE_ALIGN (va_list_type_node) / BITS_PER_UNIT); |
2183 | } | |
2184 | ||
2185 | return const0_rtx; | |
2186 | } | |
2187 | ||
53800dbe | 2188 | /* Expand a call to one of the builtin functions __builtin_frame_address or |
2189 | __builtin_return_address. */ | |
2190 | static rtx | |
2191 | expand_builtin_frame_address (exp) | |
2192 | tree exp; | |
2193 | { | |
2194 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
2195 | tree arglist = TREE_OPERAND (exp, 1); | |
2196 | ||
2197 | /* The argument must be a nonnegative integer constant. | |
2198 | It counts the number of frames to scan up the stack. | |
2199 | The value is the return address saved in that frame. */ | |
2200 | if (arglist == 0) | |
2201 | /* Warning about missing arg was already issued. */ | |
2202 | return const0_rtx; | |
2203 | else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST | |
2204 | || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0) | |
2205 | { | |
2206 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2207 | error ("invalid arg to `__builtin_frame_address'"); | |
2208 | else | |
2209 | error ("invalid arg to `__builtin_return_address'"); | |
2210 | return const0_rtx; | |
2211 | } | |
2212 | else | |
2213 | { | |
2214 | rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), | |
2215 | TREE_INT_CST_LOW (TREE_VALUE (arglist)), | |
2216 | hard_frame_pointer_rtx); | |
2217 | ||
2218 | /* Some ports cannot access arbitrary stack frames. */ | |
2219 | if (tem == NULL) | |
2220 | { | |
2221 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2222 | warning ("unsupported arg to `__builtin_frame_address'"); | |
2223 | else | |
2224 | warning ("unsupported arg to `__builtin_return_address'"); | |
2225 | return const0_rtx; | |
2226 | } | |
2227 | ||
2228 | /* For __builtin_frame_address, return what we've got. */ | |
2229 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2230 | return tem; | |
2231 | ||
2232 | if (GET_CODE (tem) != REG | |
2233 | && ! CONSTANT_P (tem)) | |
2234 | tem = copy_to_mode_reg (Pmode, tem); | |
2235 | return tem; | |
2236 | } | |
2237 | } | |
2238 | ||
2239 | /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if | |
2240 | we failed and the caller should emit a normal call, otherwise try to get | |
2241 | the result in TARGET, if convenient. */ | |
2242 | static rtx | |
2243 | expand_builtin_alloca (arglist, target) | |
2244 | tree arglist; | |
2245 | rtx target; | |
2246 | { | |
2247 | rtx op0; | |
2248 | ||
2249 | if (arglist == 0 | |
2250 | /* Arg could be non-integer if user redeclared this fcn wrong. */ | |
2251 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) | |
2252 | return 0; | |
2253 | ||
2254 | /* Compute the argument. */ | |
2255 | op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0); | |
2256 | ||
2257 | /* Allocate the desired space. */ | |
2258 | return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT); | |
2259 | } | |
2260 | ||
2261 | /* Expand a call to the ffs builtin. The arguments are in ARGLIST. | |
2262 | Return 0 if a normal call should be emitted rather than expanding the | |
2263 | function in-line. If convenient, the result should be placed in TARGET. | |
2264 | SUBTARGET may be used as the target for computing one of EXP's operands. */ | |
2265 | static rtx | |
2266 | expand_builtin_ffs (arglist, target, subtarget) | |
2267 | tree arglist; | |
2268 | rtx target, subtarget; | |
2269 | { | |
2270 | rtx op0; | |
2271 | if (arglist == 0 | |
2272 | /* Arg could be non-integer if user redeclared this fcn wrong. */ | |
2273 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) | |
2274 | return 0; | |
2275 | ||
2276 | /* Compute the argument. */ | |
2277 | op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); | |
2278 | /* Compute ffs, into TARGET if possible. | |
2279 | Set TARGET to wherever the result comes back. */ | |
2280 | target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), | |
2281 | ffs_optab, op0, target, 1); | |
2282 | if (target == 0) | |
2283 | abort (); | |
2284 | return target; | |
2285 | } | |
2286 | \f | |
2287 | /* Expand an expression EXP that calls a built-in function, | |
2288 | with result going to TARGET if that's convenient | |
2289 | (and in mode MODE if that's convenient). | |
2290 | SUBTARGET may be used as the target for computing one of EXP's operands. | |
2291 | IGNORE is nonzero if the value is to be ignored. */ | |
2292 | ||
2293 | rtx | |
2294 | expand_builtin (exp, target, subtarget, mode, ignore) | |
2295 | tree exp; | |
2296 | rtx target; | |
2297 | rtx subtarget; | |
2298 | enum machine_mode mode; | |
2299 | int ignore; | |
2300 | { | |
2301 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
2302 | tree arglist = TREE_OPERAND (exp, 1); | |
2303 | enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); | |
2304 | ||
8305149e | 2305 | #ifdef MD_EXPAND_BUILTIN |
2306 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
2307 | return MD_EXPAND_BUILTIN (exp, target, subtarget, mode, ignore); | |
2308 | #endif | |
2309 | ||
53800dbe | 2310 | /* When not optimizing, generate calls to library functions for a certain |
2311 | set of builtins. */ | |
2312 | if (! optimize && ! CALLED_AS_BUILT_IN (fndecl) | |
2313 | && (fcode == BUILT_IN_SIN || fcode == BUILT_IN_COS | |
2314 | || fcode == BUILT_IN_FSQRT || fcode == BUILT_IN_MEMSET | |
2315 | || fcode == BUILT_IN_MEMCPY || fcode == BUILT_IN_MEMCMP | |
071f1696 | 2316 | || fcode == BUILT_IN_BCMP |
53800dbe | 2317 | || fcode == BUILT_IN_STRLEN || fcode == BUILT_IN_STRCPY |
2318 | || fcode == BUILT_IN_STRCMP || fcode == BUILT_IN_FFS)) | |
2319 | return expand_call (exp, target, ignore); | |
2320 | ||
2321 | switch (fcode) | |
2322 | { | |
2323 | case BUILT_IN_ABS: | |
2324 | case BUILT_IN_LABS: | |
2325 | case BUILT_IN_FABS: | |
2326 | /* build_function_call changes these into ABS_EXPR. */ | |
2327 | abort (); | |
2328 | ||
2329 | case BUILT_IN_SIN: | |
2330 | case BUILT_IN_COS: | |
2331 | /* Treat these like sqrt, but only if the user asks for them. */ | |
2332 | if (! flag_fast_math) | |
2333 | break; | |
2334 | case BUILT_IN_FSQRT: | |
2335 | target = expand_builtin_mathfn (exp, target, subtarget); | |
2336 | if (target) | |
2337 | return target; | |
2338 | break; | |
2339 | ||
2340 | case BUILT_IN_FMOD: | |
2341 | break; | |
2342 | ||
2343 | case BUILT_IN_APPLY_ARGS: | |
2344 | return expand_builtin_apply_args (); | |
2345 | ||
2346 | /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes | |
2347 | FUNCTION with a copy of the parameters described by | |
2348 | ARGUMENTS, and ARGSIZE. It returns a block of memory | |
2349 | allocated on the stack into which is stored all the registers | |
2350 | that might possibly be used for returning the result of a | |
2351 | function. ARGUMENTS is the value returned by | |
2352 | __builtin_apply_args. ARGSIZE is the number of bytes of | |
2353 | arguments that must be copied. ??? How should this value be | |
2354 | computed? We'll also need a safe worst case value for varargs | |
2355 | functions. */ | |
2356 | case BUILT_IN_APPLY: | |
2357 | if (arglist == 0 | |
2358 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
2359 | || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))) | |
2360 | || TREE_CHAIN (arglist) == 0 | |
2361 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE | |
2362 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
2363 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) | |
2364 | return const0_rtx; | |
2365 | else | |
2366 | { | |
2367 | int i; | |
2368 | tree t; | |
2369 | rtx ops[3]; | |
2370 | ||
2371 | for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++) | |
2372 | ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0); | |
2373 | ||
2374 | return expand_builtin_apply (ops[0], ops[1], ops[2]); | |
2375 | } | |
2376 | ||
2377 | /* __builtin_return (RESULT) causes the function to return the | |
2378 | value described by RESULT. RESULT is address of the block of | |
2379 | memory returned by __builtin_apply. */ | |
2380 | case BUILT_IN_RETURN: | |
2381 | if (arglist | |
2382 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
2383 | && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE) | |
2384 | expand_builtin_return (expand_expr (TREE_VALUE (arglist), | |
2385 | NULL_RTX, VOIDmode, 0)); | |
2386 | return const0_rtx; | |
2387 | ||
2388 | case BUILT_IN_SAVEREGS: | |
a66c9326 | 2389 | return expand_builtin_saveregs (); |
53800dbe | 2390 | |
2391 | case BUILT_IN_ARGS_INFO: | |
2392 | return expand_builtin_args_info (exp); | |
2393 | ||
2394 | /* Return the address of the first anonymous stack arg. */ | |
2395 | case BUILT_IN_NEXT_ARG: | |
a66c9326 | 2396 | return expand_builtin_next_arg (arglist); |
53800dbe | 2397 | |
2398 | case BUILT_IN_CLASSIFY_TYPE: | |
2399 | return expand_builtin_classify_type (arglist); | |
2400 | ||
2401 | case BUILT_IN_CONSTANT_P: | |
2402 | return expand_builtin_constant_p (exp); | |
2403 | ||
2404 | case BUILT_IN_FRAME_ADDRESS: | |
2405 | case BUILT_IN_RETURN_ADDRESS: | |
2406 | return expand_builtin_frame_address (exp); | |
2407 | ||
2408 | /* Returns the address of the area where the structure is returned. | |
2409 | 0 otherwise. */ | |
2410 | case BUILT_IN_AGGREGATE_INCOMING_ADDRESS: | |
2411 | if (arglist != 0 | |
2412 | || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))) | |
2413 | || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM) | |
2414 | return const0_rtx; | |
2415 | else | |
2416 | return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); | |
2417 | ||
2418 | case BUILT_IN_ALLOCA: | |
2419 | target = expand_builtin_alloca (arglist, target); | |
2420 | if (target) | |
2421 | return target; | |
2422 | break; | |
2423 | ||
2424 | case BUILT_IN_FFS: | |
bdc5170d | 2425 | target = expand_builtin_ffs (arglist, target, subtarget); |
53800dbe | 2426 | if (target) |
2427 | return target; | |
2428 | break; | |
2429 | ||
2430 | case BUILT_IN_STRLEN: | |
2431 | target = expand_builtin_strlen (exp, target, mode); | |
2432 | if (target) | |
2433 | return target; | |
2434 | break; | |
2435 | ||
2436 | case BUILT_IN_STRCPY: | |
2437 | target = expand_builtin_strcpy (exp); | |
2438 | if (target) | |
2439 | return target; | |
2440 | break; | |
2441 | ||
2442 | case BUILT_IN_MEMCPY: | |
2443 | target = expand_builtin_memcpy (arglist); | |
2444 | if (target) | |
2445 | return target; | |
2446 | break; | |
2447 | ||
2448 | case BUILT_IN_MEMSET: | |
2449 | target = expand_builtin_memset (exp); | |
2450 | if (target) | |
2451 | return target; | |
2452 | break; | |
2453 | ||
2454 | /* These comparison functions need an instruction that returns an actual | |
2455 | index. An ordinary compare that just sets the condition codes | |
2456 | is not enough. */ | |
2457 | #ifdef HAVE_cmpstrsi | |
2458 | case BUILT_IN_STRCMP: | |
2459 | target = expand_builtin_strcmp (exp, target); | |
2460 | if (target) | |
2461 | return target; | |
2462 | break; | |
2463 | ||
071f1696 | 2464 | case BUILT_IN_BCMP: |
53800dbe | 2465 | case BUILT_IN_MEMCMP: |
2466 | target = expand_builtin_memcmp (exp, arglist, target); | |
2467 | if (target) | |
2468 | return target; | |
2469 | break; | |
2470 | #else | |
2471 | case BUILT_IN_STRCMP: | |
071f1696 | 2472 | case BUILT_IN_BCMP: |
53800dbe | 2473 | case BUILT_IN_MEMCMP: |
2474 | break; | |
2475 | #endif | |
2476 | ||
2477 | case BUILT_IN_SETJMP: | |
2478 | if (arglist == 0 | |
2479 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
2480 | break; | |
2481 | else | |
2482 | { | |
2483 | rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, | |
2484 | VOIDmode, 0); | |
2485 | rtx lab = gen_label_rtx (); | |
2486 | rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab); | |
2487 | emit_label (lab); | |
2488 | return ret; | |
2489 | } | |
2490 | ||
2491 | /* __builtin_longjmp is passed a pointer to an array of five words. | |
2492 | It's similar to the C library longjmp function but works with | |
2493 | __builtin_setjmp above. */ | |
2494 | case BUILT_IN_LONGJMP: | |
2495 | if (arglist == 0 || TREE_CHAIN (arglist) == 0 | |
2496 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
2497 | break; | |
2498 | else | |
2499 | { | |
2500 | rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, | |
2501 | VOIDmode, 0); | |
2502 | rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), | |
2503 | NULL_RTX, VOIDmode, 0); | |
2504 | ||
2505 | if (value != const1_rtx) | |
2506 | { | |
2507 | error ("__builtin_longjmp second argument must be 1"); | |
2508 | return const0_rtx; | |
2509 | } | |
2510 | ||
2511 | expand_builtin_longjmp (buf_addr, value); | |
2512 | return const0_rtx; | |
2513 | } | |
2514 | ||
2515 | case BUILT_IN_TRAP: | |
2516 | #ifdef HAVE_trap | |
2517 | if (HAVE_trap) | |
2518 | emit_insn (gen_trap ()); | |
2519 | else | |
2520 | #endif | |
2521 | error ("__builtin_trap not supported by this target"); | |
2522 | emit_barrier (); | |
2523 | return const0_rtx; | |
2524 | ||
2525 | /* Various hooks for the DWARF 2 __throw routine. */ | |
2526 | case BUILT_IN_UNWIND_INIT: | |
2527 | expand_builtin_unwind_init (); | |
2528 | return const0_rtx; | |
2529 | case BUILT_IN_DWARF_CFA: | |
2530 | return virtual_cfa_rtx; | |
2531 | #ifdef DWARF2_UNWIND_INFO | |
2532 | case BUILT_IN_DWARF_FP_REGNUM: | |
2533 | return expand_builtin_dwarf_fp_regnum (); | |
695e919b | 2534 | case BUILT_IN_INIT_DWARF_REG_SIZES: |
2535 | expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist)); | |
2536 | return const0_rtx; | |
53800dbe | 2537 | #endif |
2538 | case BUILT_IN_FROB_RETURN_ADDR: | |
2539 | return expand_builtin_frob_return_addr (TREE_VALUE (arglist)); | |
2540 | case BUILT_IN_EXTRACT_RETURN_ADDR: | |
2541 | return expand_builtin_extract_return_addr (TREE_VALUE (arglist)); | |
2542 | case BUILT_IN_EH_RETURN: | |
2543 | expand_builtin_eh_return (TREE_VALUE (arglist), | |
2544 | TREE_VALUE (TREE_CHAIN (arglist)), | |
2545 | TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)))); | |
2546 | return const0_rtx; | |
a66c9326 | 2547 | case BUILT_IN_VARARGS_START: |
2548 | return expand_builtin_va_start (0, arglist); | |
2549 | case BUILT_IN_STDARG_START: | |
2550 | return expand_builtin_va_start (1, arglist); | |
2551 | case BUILT_IN_VA_END: | |
2552 | return expand_builtin_va_end (arglist); | |
2553 | case BUILT_IN_VA_COPY: | |
2554 | return expand_builtin_va_copy (arglist); | |
53800dbe | 2555 | |
2556 | default: /* just do library call, if unknown builtin */ | |
2557 | error ("built-in function `%s' not currently supported", | |
2558 | IDENTIFIER_POINTER (DECL_NAME (fndecl))); | |
2559 | } | |
2560 | ||
2561 | /* The switch statement above can drop through to cause the function | |
2562 | to be called normally. */ | |
2563 | return expand_call (exp, target, ignore); | |
2564 | } |