]>
Commit | Line | Data |
---|---|---|
53800dbe | 1 | /* Expand builtin functions. |
0b387d23 | 2 | Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
3 | 1999, 2000 Free Software Foundation, Inc. | |
53800dbe | 4 | |
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "machmode.h" | |
25 | #include "rtl.h" | |
26 | #include "tree.h" | |
27 | #include "obstack.h" | |
28 | #include "flags.h" | |
29 | #include "regs.h" | |
30 | #include "hard-reg-set.h" | |
31 | #include "except.h" | |
32 | #include "function.h" | |
33 | #include "insn-flags.h" | |
34 | #include "insn-codes.h" | |
35 | #include "insn-config.h" | |
36 | #include "expr.h" | |
37 | #include "recog.h" | |
38 | #include "output.h" | |
39 | #include "typeclass.h" | |
40 | #include "defaults.h" | |
41 | #include "toplev.h" | |
1dd6c958 | 42 | #include "tm_p.h" |
53800dbe | 43 | |
44 | #define CALLED_AS_BUILT_IN(NODE) \ | |
45 | (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10)) | |
46 | ||
53800dbe | 47 | /* Register mappings for target machines without register windows. */ |
48 | #ifndef INCOMING_REGNO | |
49 | #define INCOMING_REGNO(OUT) (OUT) | |
50 | #endif | |
51 | #ifndef OUTGOING_REGNO | |
52 | #define OUTGOING_REGNO(IN) (IN) | |
53 | #endif | |
54 | ||
726e2588 | 55 | #ifndef PAD_VARARGS_DOWN |
56 | #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN | |
57 | #endif | |
58 | ||
6bcfea9e | 59 | tree (*lang_type_promotes_to) PARAMS ((tree)); |
e94026da | 60 | |
6bcfea9e | 61 | static int get_pointer_alignment PARAMS ((tree, unsigned)); |
62 | static tree c_strlen PARAMS ((tree)); | |
63 | static rtx get_memory_rtx PARAMS ((tree)); | |
64 | static int apply_args_size PARAMS ((void)); | |
65 | static int apply_result_size PARAMS ((void)); | |
d8c9779c | 66 | #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) |
6bcfea9e | 67 | static rtx result_vector PARAMS ((int, rtx)); |
d8c9779c | 68 | #endif |
6bcfea9e | 69 | static rtx expand_builtin_apply_args PARAMS ((void)); |
70 | static rtx expand_builtin_apply_args_1 PARAMS ((void)); | |
71 | static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx)); | |
72 | static void expand_builtin_return PARAMS ((rtx)); | |
73 | static rtx expand_builtin_classify_type PARAMS ((tree)); | |
74 | static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx)); | |
75 | static rtx expand_builtin_constant_p PARAMS ((tree)); | |
76 | static rtx expand_builtin_args_info PARAMS ((tree)); | |
77 | static rtx expand_builtin_next_arg PARAMS ((tree)); | |
78 | static rtx expand_builtin_va_start PARAMS ((int, tree)); | |
79 | static rtx expand_builtin_va_end PARAMS ((tree)); | |
80 | static rtx expand_builtin_va_copy PARAMS ((tree)); | |
95d038e4 | 81 | #ifdef HAVE_cmpstrsi |
6bcfea9e | 82 | static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx)); |
83 | static rtx expand_builtin_strcmp PARAMS ((tree, rtx)); | |
95d038e4 | 84 | #endif |
6bcfea9e | 85 | static rtx expand_builtin_memcpy PARAMS ((tree)); |
86 | static rtx expand_builtin_strcpy PARAMS ((tree)); | |
87 | static rtx expand_builtin_memset PARAMS ((tree)); | |
88 | static rtx expand_builtin_strlen PARAMS ((tree, rtx, enum machine_mode)); | |
89 | static rtx expand_builtin_alloca PARAMS ((tree, rtx)); | |
90 | static rtx expand_builtin_ffs PARAMS ((tree, rtx, rtx)); | |
91 | static rtx expand_builtin_frame_address PARAMS ((tree)); | |
92 | static tree stabilize_va_list PARAMS ((tree, int)); | |
53800dbe | 93 | |
94 | /* Return the alignment in bits of EXP, a pointer valued expression. | |
95 | But don't return more than MAX_ALIGN no matter what. | |
96 | The alignment returned is, by default, the alignment of the thing that | |
97 | EXP points to (if it is not a POINTER_TYPE, 0 is returned). | |
98 | ||
99 | Otherwise, look at the expression to see if we can do better, i.e., if the | |
100 | expression is actually pointing at an object whose alignment is tighter. */ | |
101 | ||
102 | static int | |
103 | get_pointer_alignment (exp, max_align) | |
104 | tree exp; | |
105 | unsigned max_align; | |
106 | { | |
107 | unsigned align, inner; | |
108 | ||
109 | if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) | |
110 | return 0; | |
111 | ||
112 | align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); | |
113 | align = MIN (align, max_align); | |
114 | ||
115 | while (1) | |
116 | { | |
117 | switch (TREE_CODE (exp)) | |
118 | { | |
119 | case NOP_EXPR: | |
120 | case CONVERT_EXPR: | |
121 | case NON_LVALUE_EXPR: | |
122 | exp = TREE_OPERAND (exp, 0); | |
123 | if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) | |
124 | return align; | |
125 | inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); | |
126 | align = MIN (inner, max_align); | |
127 | break; | |
128 | ||
129 | case PLUS_EXPR: | |
130 | /* If sum of pointer + int, restrict our maximum alignment to that | |
131 | imposed by the integer. If not, we can't do any better than | |
132 | ALIGN. */ | |
133 | if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST) | |
134 | return align; | |
135 | ||
136 | while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT) | |
137 | & (max_align - 1)) | |
138 | != 0) | |
139 | max_align >>= 1; | |
140 | ||
141 | exp = TREE_OPERAND (exp, 0); | |
142 | break; | |
143 | ||
144 | case ADDR_EXPR: | |
145 | /* See what we are pointing at and look at its alignment. */ | |
146 | exp = TREE_OPERAND (exp, 0); | |
147 | if (TREE_CODE (exp) == FUNCTION_DECL) | |
148 | align = FUNCTION_BOUNDARY; | |
149 | else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd') | |
150 | align = DECL_ALIGN (exp); | |
151 | #ifdef CONSTANT_ALIGNMENT | |
152 | else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c') | |
153 | align = CONSTANT_ALIGNMENT (exp, align); | |
154 | #endif | |
155 | return MIN (align, max_align); | |
156 | ||
157 | default: | |
158 | return align; | |
159 | } | |
160 | } | |
161 | } | |
162 | ||
163 | /* Compute the length of a C string. TREE_STRING_LENGTH is not the right | |
164 | way, because it could contain a zero byte in the middle. | |
165 | TREE_STRING_LENGTH is the size of the character array, not the string. | |
166 | ||
902de8ed | 167 | The value returned is of type `ssizetype'. |
168 | ||
53800dbe | 169 | Unfortunately, string_constant can't access the values of const char |
170 | arrays with initializers, so neither can we do so here. */ | |
171 | ||
172 | static tree | |
173 | c_strlen (src) | |
174 | tree src; | |
175 | { | |
176 | tree offset_node; | |
177 | int offset, max; | |
178 | char *ptr; | |
179 | ||
180 | src = string_constant (src, &offset_node); | |
181 | if (src == 0) | |
182 | return 0; | |
902de8ed | 183 | |
53800dbe | 184 | max = TREE_STRING_LENGTH (src); |
185 | ptr = TREE_STRING_POINTER (src); | |
902de8ed | 186 | |
53800dbe | 187 | if (offset_node && TREE_CODE (offset_node) != INTEGER_CST) |
188 | { | |
189 | /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't | |
190 | compute the offset to the following null if we don't know where to | |
191 | start searching for it. */ | |
192 | int i; | |
902de8ed | 193 | |
53800dbe | 194 | for (i = 0; i < max; i++) |
195 | if (ptr[i] == 0) | |
196 | return 0; | |
902de8ed | 197 | |
53800dbe | 198 | /* We don't know the starting offset, but we do know that the string |
199 | has no internal zero bytes. We can assume that the offset falls | |
200 | within the bounds of the string; otherwise, the programmer deserves | |
201 | what he gets. Subtract the offset from the length of the string, | |
902de8ed | 202 | and return that. This would perhaps not be valid if we were dealing |
203 | with named arrays in addition to literal string constants. */ | |
204 | ||
205 | return size_diffop (size_int (max), offset_node); | |
53800dbe | 206 | } |
207 | ||
208 | /* We have a known offset into the string. Start searching there for | |
209 | a null character. */ | |
210 | if (offset_node == 0) | |
211 | offset = 0; | |
212 | else | |
213 | { | |
214 | /* Did we get a long long offset? If so, punt. */ | |
215 | if (TREE_INT_CST_HIGH (offset_node) != 0) | |
216 | return 0; | |
217 | offset = TREE_INT_CST_LOW (offset_node); | |
218 | } | |
902de8ed | 219 | |
53800dbe | 220 | /* If the offset is known to be out of bounds, warn, and call strlen at |
221 | runtime. */ | |
222 | if (offset < 0 || offset > max) | |
223 | { | |
224 | warning ("offset outside bounds of constant string"); | |
225 | return 0; | |
226 | } | |
902de8ed | 227 | |
53800dbe | 228 | /* Use strlen to search for the first zero byte. Since any strings |
229 | constructed with build_string will have nulls appended, we win even | |
230 | if we get handed something like (char[4])"abcd". | |
231 | ||
232 | Since OFFSET is our starting index into the string, no further | |
233 | calculation is needed. */ | |
902de8ed | 234 | return ssize_int (strlen (ptr + offset)); |
53800dbe | 235 | } |
236 | ||
237 | /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT | |
238 | times to get the address of either a higher stack frame, or a return | |
239 | address located within it (depending on FNDECL_CODE). */ | |
902de8ed | 240 | |
53800dbe | 241 | rtx |
242 | expand_builtin_return_addr (fndecl_code, count, tem) | |
243 | enum built_in_function fndecl_code; | |
244 | int count; | |
245 | rtx tem; | |
246 | { | |
247 | int i; | |
248 | ||
249 | /* Some machines need special handling before we can access | |
250 | arbitrary frames. For example, on the sparc, we must first flush | |
251 | all register windows to the stack. */ | |
252 | #ifdef SETUP_FRAME_ADDRESSES | |
253 | if (count > 0) | |
254 | SETUP_FRAME_ADDRESSES (); | |
255 | #endif | |
256 | ||
257 | /* On the sparc, the return address is not in the frame, it is in a | |
258 | register. There is no way to access it off of the current frame | |
259 | pointer, but it can be accessed off the previous frame pointer by | |
260 | reading the value from the register window save area. */ | |
261 | #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME | |
262 | if (fndecl_code == BUILT_IN_RETURN_ADDRESS) | |
263 | count--; | |
264 | #endif | |
265 | ||
266 | /* Scan back COUNT frames to the specified frame. */ | |
267 | for (i = 0; i < count; i++) | |
268 | { | |
269 | /* Assume the dynamic chain pointer is in the word that the | |
270 | frame address points to, unless otherwise specified. */ | |
271 | #ifdef DYNAMIC_CHAIN_ADDRESS | |
272 | tem = DYNAMIC_CHAIN_ADDRESS (tem); | |
273 | #endif | |
274 | tem = memory_address (Pmode, tem); | |
275 | tem = copy_to_reg (gen_rtx_MEM (Pmode, tem)); | |
276 | } | |
277 | ||
278 | /* For __builtin_frame_address, return what we've got. */ | |
279 | if (fndecl_code == BUILT_IN_FRAME_ADDRESS) | |
280 | return tem; | |
281 | ||
282 | /* For __builtin_return_address, Get the return address from that | |
283 | frame. */ | |
284 | #ifdef RETURN_ADDR_RTX | |
285 | tem = RETURN_ADDR_RTX (count, tem); | |
286 | #else | |
287 | tem = memory_address (Pmode, | |
288 | plus_constant (tem, GET_MODE_SIZE (Pmode))); | |
289 | tem = gen_rtx_MEM (Pmode, tem); | |
290 | #endif | |
291 | return tem; | |
292 | } | |
293 | ||
294 | /* __builtin_setjmp is passed a pointer to an array of five words (not | |
295 | all will be used on all machines). It operates similarly to the C | |
296 | library function of the same name, but is more efficient. Much of | |
297 | the code below (and for longjmp) is copied from the handling of | |
298 | non-local gotos. | |
299 | ||
300 | NOTE: This is intended for use by GNAT and the exception handling | |
301 | scheme in the compiler and will only work in the method used by | |
302 | them. */ | |
303 | ||
304 | rtx | |
305 | expand_builtin_setjmp (buf_addr, target, first_label, next_label) | |
306 | rtx buf_addr; | |
307 | rtx target; | |
308 | rtx first_label, next_label; | |
309 | { | |
310 | rtx lab1 = gen_label_rtx (); | |
311 | enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); | |
312 | enum machine_mode value_mode; | |
313 | rtx stack_save; | |
314 | ||
315 | value_mode = TYPE_MODE (integer_type_node); | |
316 | ||
317 | #ifdef POINTERS_EXTEND_UNSIGNED | |
318 | buf_addr = convert_memory_address (Pmode, buf_addr); | |
319 | #endif | |
320 | ||
37ae8504 | 321 | buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX)); |
53800dbe | 322 | |
323 | if (target == 0 || GET_CODE (target) != REG | |
324 | || REGNO (target) < FIRST_PSEUDO_REGISTER) | |
325 | target = gen_reg_rtx (value_mode); | |
326 | ||
327 | emit_queue (); | |
328 | ||
329 | /* We store the frame pointer and the address of lab1 in the buffer | |
330 | and use the rest of it for the stack save area, which is | |
331 | machine-dependent. */ | |
332 | ||
333 | #ifndef BUILTIN_SETJMP_FRAME_VALUE | |
334 | #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx | |
335 | #endif | |
336 | ||
337 | emit_move_insn (gen_rtx_MEM (Pmode, buf_addr), | |
338 | BUILTIN_SETJMP_FRAME_VALUE); | |
339 | emit_move_insn (validize_mem | |
340 | (gen_rtx_MEM (Pmode, | |
341 | plus_constant (buf_addr, | |
342 | GET_MODE_SIZE (Pmode)))), | |
343 | force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1))); | |
344 | ||
345 | stack_save = gen_rtx_MEM (sa_mode, | |
346 | plus_constant (buf_addr, | |
347 | 2 * GET_MODE_SIZE (Pmode))); | |
348 | emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX); | |
349 | ||
350 | /* If there is further processing to do, do it. */ | |
351 | #ifdef HAVE_builtin_setjmp_setup | |
352 | if (HAVE_builtin_setjmp_setup) | |
353 | emit_insn (gen_builtin_setjmp_setup (buf_addr)); | |
354 | #endif | |
355 | ||
356 | /* Set TARGET to zero and branch to the first-time-through label. */ | |
357 | emit_move_insn (target, const0_rtx); | |
358 | emit_jump_insn (gen_jump (first_label)); | |
359 | emit_barrier (); | |
360 | emit_label (lab1); | |
361 | ||
362 | /* Tell flow about the strange goings on. Putting `lab1' on | |
363 | `nonlocal_goto_handler_labels' to indicates that function | |
364 | calls may traverse the arc back to this label. */ | |
365 | ||
366 | current_function_has_nonlocal_label = 1; | |
367 | nonlocal_goto_handler_labels = | |
368 | gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels); | |
369 | ||
370 | /* Clobber the FP when we get here, so we have to make sure it's | |
371 | marked as used by this function. */ | |
372 | emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
373 | ||
374 | /* Mark the static chain as clobbered here so life information | |
375 | doesn't get messed up for it. */ | |
376 | emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx)); | |
377 | ||
378 | /* Now put in the code to restore the frame pointer, and argument | |
379 | pointer, if needed. The code below is from expand_end_bindings | |
380 | in stmt.c; see detailed documentation there. */ | |
381 | #ifdef HAVE_nonlocal_goto | |
382 | if (! HAVE_nonlocal_goto) | |
383 | #endif | |
384 | emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); | |
385 | ||
386 | #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
387 | if (fixed_regs[ARG_POINTER_REGNUM]) | |
388 | { | |
389 | #ifdef ELIMINABLE_REGS | |
390 | size_t i; | |
391 | static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS; | |
392 | ||
393 | for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++) | |
394 | if (elim_regs[i].from == ARG_POINTER_REGNUM | |
395 | && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) | |
396 | break; | |
397 | ||
398 | if (i == sizeof elim_regs / sizeof elim_regs [0]) | |
399 | #endif | |
400 | { | |
401 | /* Now restore our arg pointer from the address at which it | |
402 | was saved in our stack frame. | |
403 | If there hasn't be space allocated for it yet, make | |
404 | some now. */ | |
405 | if (arg_pointer_save_area == 0) | |
406 | arg_pointer_save_area | |
407 | = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
408 | emit_move_insn (virtual_incoming_args_rtx, | |
409 | copy_to_reg (arg_pointer_save_area)); | |
410 | } | |
411 | } | |
412 | #endif | |
413 | ||
414 | #ifdef HAVE_builtin_setjmp_receiver | |
415 | if (HAVE_builtin_setjmp_receiver) | |
416 | emit_insn (gen_builtin_setjmp_receiver (lab1)); | |
417 | else | |
418 | #endif | |
419 | #ifdef HAVE_nonlocal_goto_receiver | |
420 | if (HAVE_nonlocal_goto_receiver) | |
421 | emit_insn (gen_nonlocal_goto_receiver ()); | |
422 | else | |
423 | #endif | |
424 | { | |
425 | ; /* Nothing */ | |
426 | } | |
427 | ||
428 | /* Set TARGET, and branch to the next-time-through label. */ | |
429 | emit_move_insn (target, const1_rtx); | |
430 | emit_jump_insn (gen_jump (next_label)); | |
431 | emit_barrier (); | |
432 | ||
433 | return target; | |
434 | } | |
435 | ||
436 | /* __builtin_longjmp is passed a pointer to an array of five words (not | |
437 | all will be used on all machines). It operates similarly to the C | |
438 | library function of the same name, but is more efficient. Much of | |
439 | the code below is copied from the handling of non-local gotos. | |
440 | ||
441 | NOTE: This is intended for use by GNAT and the exception handling | |
442 | scheme in the compiler and will only work in the method used by | |
443 | them. */ | |
444 | ||
445 | void | |
446 | expand_builtin_longjmp (buf_addr, value) | |
447 | rtx buf_addr, value; | |
448 | { | |
449 | rtx fp, lab, stack; | |
450 | enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); | |
451 | ||
452 | #ifdef POINTERS_EXTEND_UNSIGNED | |
453 | buf_addr = convert_memory_address (Pmode, buf_addr); | |
454 | #endif | |
455 | buf_addr = force_reg (Pmode, buf_addr); | |
456 | ||
457 | /* We used to store value in static_chain_rtx, but that fails if pointers | |
458 | are smaller than integers. We instead require that the user must pass | |
459 | a second argument of 1, because that is what builtin_setjmp will | |
460 | return. This also makes EH slightly more efficient, since we are no | |
461 | longer copying around a value that we don't care about. */ | |
462 | if (value != const1_rtx) | |
463 | abort (); | |
464 | ||
465 | #ifdef HAVE_builtin_longjmp | |
466 | if (HAVE_builtin_longjmp) | |
467 | emit_insn (gen_builtin_longjmp (buf_addr)); | |
468 | else | |
469 | #endif | |
470 | { | |
471 | fp = gen_rtx_MEM (Pmode, buf_addr); | |
472 | lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr, | |
473 | GET_MODE_SIZE (Pmode))); | |
474 | ||
475 | stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr, | |
476 | 2 * GET_MODE_SIZE (Pmode))); | |
477 | ||
478 | /* Pick up FP, label, and SP from the block and jump. This code is | |
479 | from expand_goto in stmt.c; see there for detailed comments. */ | |
480 | #if HAVE_nonlocal_goto | |
481 | if (HAVE_nonlocal_goto) | |
482 | /* We have to pass a value to the nonlocal_goto pattern that will | |
483 | get copied into the static_chain pointer, but it does not matter | |
484 | what that value is, because builtin_setjmp does not use it. */ | |
485 | emit_insn (gen_nonlocal_goto (value, fp, stack, lab)); | |
486 | else | |
487 | #endif | |
488 | { | |
489 | lab = copy_to_reg (lab); | |
490 | ||
491 | emit_move_insn (hard_frame_pointer_rtx, fp); | |
492 | emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX); | |
493 | ||
494 | emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
495 | emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx)); | |
496 | emit_indirect_jump (lab); | |
497 | } | |
498 | } | |
499 | } | |
500 | ||
501 | /* Get a MEM rtx for expression EXP which can be used in a string instruction | |
502 | (cmpstrsi, movstrsi, ..). */ | |
503 | static rtx | |
504 | get_memory_rtx (exp) | |
505 | tree exp; | |
506 | { | |
507 | rtx mem; | |
508 | int is_aggregate; | |
509 | ||
510 | mem = gen_rtx_MEM (BLKmode, | |
511 | memory_address (BLKmode, | |
512 | expand_expr (exp, NULL_RTX, | |
513 | ptr_mode, EXPAND_SUM))); | |
514 | ||
515 | RTX_UNCHANGING_P (mem) = TREE_READONLY (exp); | |
516 | ||
517 | /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P | |
518 | if the value is the address of a structure or if the expression is | |
519 | cast to a pointer to structure type. */ | |
520 | is_aggregate = 0; | |
521 | ||
522 | while (TREE_CODE (exp) == NOP_EXPR) | |
523 | { | |
524 | tree cast_type = TREE_TYPE (exp); | |
525 | if (TREE_CODE (cast_type) == POINTER_TYPE | |
526 | && AGGREGATE_TYPE_P (TREE_TYPE (cast_type))) | |
527 | { | |
528 | is_aggregate = 1; | |
529 | break; | |
530 | } | |
531 | exp = TREE_OPERAND (exp, 0); | |
532 | } | |
533 | ||
534 | if (is_aggregate == 0) | |
535 | { | |
536 | tree type; | |
537 | ||
538 | if (TREE_CODE (exp) == ADDR_EXPR) | |
539 | /* If this is the address of an object, check whether the | |
540 | object is an array. */ | |
541 | type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
542 | else | |
543 | type = TREE_TYPE (TREE_TYPE (exp)); | |
544 | is_aggregate = AGGREGATE_TYPE_P (type); | |
545 | } | |
546 | ||
547 | MEM_SET_IN_STRUCT_P (mem, is_aggregate); | |
548 | return mem; | |
549 | } | |
550 | \f | |
551 | /* Built-in functions to perform an untyped call and return. */ | |
552 | ||
553 | /* For each register that may be used for calling a function, this | |
554 | gives a mode used to copy the register's value. VOIDmode indicates | |
555 | the register is not used for calling a function. If the machine | |
556 | has register windows, this gives only the outbound registers. | |
557 | INCOMING_REGNO gives the corresponding inbound register. */ | |
558 | static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER]; | |
559 | ||
560 | /* For each register that may be used for returning values, this gives | |
561 | a mode used to copy the register's value. VOIDmode indicates the | |
562 | register is not used for returning values. If the machine has | |
563 | register windows, this gives only the outbound registers. | |
564 | INCOMING_REGNO gives the corresponding inbound register. */ | |
565 | static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER]; | |
566 | ||
567 | /* For each register that may be used for calling a function, this | |
568 | gives the offset of that register into the block returned by | |
569 | __builtin_apply_args. 0 indicates that the register is not | |
570 | used for calling a function. */ | |
571 | static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER]; | |
572 | ||
573 | /* Return the offset of register REGNO into the block returned by | |
574 | __builtin_apply_args. This is not declared static, since it is | |
575 | needed in objc-act.c. */ | |
576 | ||
577 | int | |
578 | apply_args_register_offset (regno) | |
579 | int regno; | |
580 | { | |
581 | apply_args_size (); | |
582 | ||
583 | /* Arguments are always put in outgoing registers (in the argument | |
584 | block) if such make sense. */ | |
585 | #ifdef OUTGOING_REGNO | |
586 | regno = OUTGOING_REGNO(regno); | |
587 | #endif | |
588 | return apply_args_reg_offset[regno]; | |
589 | } | |
590 | ||
591 | /* Return the size required for the block returned by __builtin_apply_args, | |
592 | and initialize apply_args_mode. */ | |
593 | ||
594 | static int | |
595 | apply_args_size () | |
596 | { | |
597 | static int size = -1; | |
598 | int align, regno; | |
599 | enum machine_mode mode; | |
600 | ||
601 | /* The values computed by this function never change. */ | |
602 | if (size < 0) | |
603 | { | |
604 | /* The first value is the incoming arg-pointer. */ | |
605 | size = GET_MODE_SIZE (Pmode); | |
606 | ||
607 | /* The second value is the structure value address unless this is | |
608 | passed as an "invisible" first argument. */ | |
609 | if (struct_value_rtx) | |
610 | size += GET_MODE_SIZE (Pmode); | |
611 | ||
612 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
613 | if (FUNCTION_ARG_REGNO_P (regno)) | |
614 | { | |
615 | /* Search for the proper mode for copying this register's | |
616 | value. I'm not sure this is right, but it works so far. */ | |
617 | enum machine_mode best_mode = VOIDmode; | |
618 | ||
619 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
620 | mode != VOIDmode; | |
621 | mode = GET_MODE_WIDER_MODE (mode)) | |
622 | if (HARD_REGNO_MODE_OK (regno, mode) | |
623 | && HARD_REGNO_NREGS (regno, mode) == 1) | |
624 | best_mode = mode; | |
625 | ||
626 | if (best_mode == VOIDmode) | |
627 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
628 | mode != VOIDmode; | |
629 | mode = GET_MODE_WIDER_MODE (mode)) | |
630 | if (HARD_REGNO_MODE_OK (regno, mode) | |
631 | && (mov_optab->handlers[(int) mode].insn_code | |
632 | != CODE_FOR_nothing)) | |
633 | best_mode = mode; | |
634 | ||
635 | mode = best_mode; | |
636 | if (mode == VOIDmode) | |
637 | abort (); | |
638 | ||
639 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
640 | if (size % align != 0) | |
641 | size = CEIL (size, align) * align; | |
642 | apply_args_reg_offset[regno] = size; | |
643 | size += GET_MODE_SIZE (mode); | |
644 | apply_args_mode[regno] = mode; | |
645 | } | |
646 | else | |
647 | { | |
648 | apply_args_mode[regno] = VOIDmode; | |
649 | apply_args_reg_offset[regno] = 0; | |
650 | } | |
651 | } | |
652 | return size; | |
653 | } | |
654 | ||
655 | /* Return the size required for the block returned by __builtin_apply, | |
656 | and initialize apply_result_mode. */ | |
657 | ||
658 | static int | |
659 | apply_result_size () | |
660 | { | |
661 | static int size = -1; | |
662 | int align, regno; | |
663 | enum machine_mode mode; | |
664 | ||
665 | /* The values computed by this function never change. */ | |
666 | if (size < 0) | |
667 | { | |
668 | size = 0; | |
669 | ||
670 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
671 | if (FUNCTION_VALUE_REGNO_P (regno)) | |
672 | { | |
673 | /* Search for the proper mode for copying this register's | |
674 | value. I'm not sure this is right, but it works so far. */ | |
675 | enum machine_mode best_mode = VOIDmode; | |
676 | ||
677 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
678 | mode != TImode; | |
679 | mode = GET_MODE_WIDER_MODE (mode)) | |
680 | if (HARD_REGNO_MODE_OK (regno, mode)) | |
681 | best_mode = mode; | |
682 | ||
683 | if (best_mode == VOIDmode) | |
684 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
685 | mode != VOIDmode; | |
686 | mode = GET_MODE_WIDER_MODE (mode)) | |
687 | if (HARD_REGNO_MODE_OK (regno, mode) | |
688 | && (mov_optab->handlers[(int) mode].insn_code | |
689 | != CODE_FOR_nothing)) | |
690 | best_mode = mode; | |
691 | ||
692 | mode = best_mode; | |
693 | if (mode == VOIDmode) | |
694 | abort (); | |
695 | ||
696 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
697 | if (size % align != 0) | |
698 | size = CEIL (size, align) * align; | |
699 | size += GET_MODE_SIZE (mode); | |
700 | apply_result_mode[regno] = mode; | |
701 | } | |
702 | else | |
703 | apply_result_mode[regno] = VOIDmode; | |
704 | ||
705 | /* Allow targets that use untyped_call and untyped_return to override | |
706 | the size so that machine-specific information can be stored here. */ | |
707 | #ifdef APPLY_RESULT_SIZE | |
708 | size = APPLY_RESULT_SIZE; | |
709 | #endif | |
710 | } | |
711 | return size; | |
712 | } | |
713 | ||
714 | #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) | |
715 | /* Create a vector describing the result block RESULT. If SAVEP is true, | |
716 | the result block is used to save the values; otherwise it is used to | |
717 | restore the values. */ | |
718 | ||
719 | static rtx | |
720 | result_vector (savep, result) | |
721 | int savep; | |
722 | rtx result; | |
723 | { | |
724 | int regno, size, align, nelts; | |
725 | enum machine_mode mode; | |
726 | rtx reg, mem; | |
727 | rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx)); | |
728 | ||
729 | size = nelts = 0; | |
730 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
731 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
732 | { | |
733 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
734 | if (size % align != 0) | |
735 | size = CEIL (size, align) * align; | |
736 | reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)); | |
737 | mem = change_address (result, mode, | |
738 | plus_constant (XEXP (result, 0), size)); | |
739 | savevec[nelts++] = (savep | |
740 | ? gen_rtx_SET (VOIDmode, mem, reg) | |
741 | : gen_rtx_SET (VOIDmode, reg, mem)); | |
742 | size += GET_MODE_SIZE (mode); | |
743 | } | |
744 | return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec)); | |
745 | } | |
746 | #endif /* HAVE_untyped_call or HAVE_untyped_return */ | |
747 | ||
748 | /* Save the state required to perform an untyped call with the same | |
749 | arguments as were passed to the current function. */ | |
750 | ||
751 | static rtx | |
752 | expand_builtin_apply_args_1 () | |
753 | { | |
754 | rtx registers; | |
755 | int size, align, regno; | |
756 | enum machine_mode mode; | |
757 | ||
758 | /* Create a block where the arg-pointer, structure value address, | |
759 | and argument registers can be saved. */ | |
760 | registers = assign_stack_local (BLKmode, apply_args_size (), -1); | |
761 | ||
762 | /* Walk past the arg-pointer and structure value address. */ | |
763 | size = GET_MODE_SIZE (Pmode); | |
764 | if (struct_value_rtx) | |
765 | size += GET_MODE_SIZE (Pmode); | |
766 | ||
767 | /* Save each register used in calling a function to the block. */ | |
768 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
769 | if ((mode = apply_args_mode[regno]) != VOIDmode) | |
770 | { | |
771 | rtx tem; | |
772 | ||
773 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
774 | if (size % align != 0) | |
775 | size = CEIL (size, align) * align; | |
776 | ||
777 | tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)); | |
778 | ||
53800dbe | 779 | emit_move_insn (change_address (registers, mode, |
780 | plus_constant (XEXP (registers, 0), | |
781 | size)), | |
782 | tem); | |
783 | size += GET_MODE_SIZE (mode); | |
784 | } | |
785 | ||
786 | /* Save the arg pointer to the block. */ | |
787 | emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)), | |
788 | copy_to_reg (virtual_incoming_args_rtx)); | |
789 | size = GET_MODE_SIZE (Pmode); | |
790 | ||
791 | /* Save the structure value address unless this is passed as an | |
792 | "invisible" first argument. */ | |
793 | if (struct_value_incoming_rtx) | |
794 | { | |
795 | emit_move_insn (change_address (registers, Pmode, | |
796 | plus_constant (XEXP (registers, 0), | |
797 | size)), | |
798 | copy_to_reg (struct_value_incoming_rtx)); | |
799 | size += GET_MODE_SIZE (Pmode); | |
800 | } | |
801 | ||
802 | /* Return the address of the block. */ | |
803 | return copy_addr_to_reg (XEXP (registers, 0)); | |
804 | } | |
805 | ||
806 | /* __builtin_apply_args returns block of memory allocated on | |
807 | the stack into which is stored the arg pointer, structure | |
808 | value address, static chain, and all the registers that might | |
809 | possibly be used in performing a function call. The code is | |
810 | moved to the start of the function so the incoming values are | |
811 | saved. */ | |
812 | static rtx | |
813 | expand_builtin_apply_args () | |
814 | { | |
815 | /* Don't do __builtin_apply_args more than once in a function. | |
816 | Save the result of the first call and reuse it. */ | |
817 | if (apply_args_value != 0) | |
818 | return apply_args_value; | |
819 | { | |
820 | /* When this function is called, it means that registers must be | |
821 | saved on entry to this function. So we migrate the | |
822 | call to the first insn of this function. */ | |
823 | rtx temp; | |
824 | rtx seq; | |
825 | ||
826 | start_sequence (); | |
827 | temp = expand_builtin_apply_args_1 (); | |
828 | seq = get_insns (); | |
829 | end_sequence (); | |
830 | ||
831 | apply_args_value = temp; | |
832 | ||
833 | /* Put the sequence after the NOTE that starts the function. | |
834 | If this is inside a SEQUENCE, make the outer-level insn | |
835 | chain current, so the code is placed at the start of the | |
836 | function. */ | |
837 | push_topmost_sequence (); | |
838 | emit_insns_before (seq, NEXT_INSN (get_insns ())); | |
839 | pop_topmost_sequence (); | |
840 | return temp; | |
841 | } | |
842 | } | |
843 | ||
844 | /* Perform an untyped call and save the state required to perform an | |
845 | untyped return of whatever value was returned by the given function. */ | |
846 | ||
847 | static rtx | |
848 | expand_builtin_apply (function, arguments, argsize) | |
849 | rtx function, arguments, argsize; | |
850 | { | |
851 | int size, align, regno; | |
852 | enum machine_mode mode; | |
853 | rtx incoming_args, result, reg, dest, call_insn; | |
854 | rtx old_stack_level = 0; | |
855 | rtx call_fusage = 0; | |
856 | ||
857 | /* Create a block where the return registers can be saved. */ | |
858 | result = assign_stack_local (BLKmode, apply_result_size (), -1); | |
859 | ||
860 | /* ??? The argsize value should be adjusted here. */ | |
861 | ||
862 | /* Fetch the arg pointer from the ARGUMENTS block. */ | |
863 | incoming_args = gen_reg_rtx (Pmode); | |
864 | emit_move_insn (incoming_args, | |
865 | gen_rtx_MEM (Pmode, arguments)); | |
866 | #ifndef STACK_GROWS_DOWNWARD | |
867 | incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize, | |
868 | incoming_args, 0, OPTAB_LIB_WIDEN); | |
869 | #endif | |
870 | ||
871 | /* Perform postincrements before actually calling the function. */ | |
872 | emit_queue (); | |
873 | ||
874 | /* Push a new argument block and copy the arguments. */ | |
875 | do_pending_stack_adjust (); | |
876 | ||
877 | /* Save the stack with nonlocal if available */ | |
878 | #ifdef HAVE_save_stack_nonlocal | |
879 | if (HAVE_save_stack_nonlocal) | |
880 | emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX); | |
881 | else | |
882 | #endif | |
883 | emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); | |
884 | ||
885 | /* Push a block of memory onto the stack to store the memory arguments. | |
886 | Save the address in a register, and copy the memory arguments. ??? I | |
887 | haven't figured out how the calling convention macros effect this, | |
888 | but it's likely that the source and/or destination addresses in | |
889 | the block copy will need updating in machine specific ways. */ | |
890 | dest = allocate_dynamic_stack_space (argsize, 0, 0); | |
891 | emit_block_move (gen_rtx_MEM (BLKmode, dest), | |
892 | gen_rtx_MEM (BLKmode, incoming_args), | |
893 | argsize, | |
894 | PARM_BOUNDARY / BITS_PER_UNIT); | |
895 | ||
896 | /* Refer to the argument block. */ | |
897 | apply_args_size (); | |
898 | arguments = gen_rtx_MEM (BLKmode, arguments); | |
899 | ||
900 | /* Walk past the arg-pointer and structure value address. */ | |
901 | size = GET_MODE_SIZE (Pmode); | |
902 | if (struct_value_rtx) | |
903 | size += GET_MODE_SIZE (Pmode); | |
904 | ||
905 | /* Restore each of the registers previously saved. Make USE insns | |
906 | for each of these registers for use in making the call. */ | |
907 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
908 | if ((mode = apply_args_mode[regno]) != VOIDmode) | |
909 | { | |
910 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
911 | if (size % align != 0) | |
912 | size = CEIL (size, align) * align; | |
913 | reg = gen_rtx_REG (mode, regno); | |
914 | emit_move_insn (reg, | |
915 | change_address (arguments, mode, | |
916 | plus_constant (XEXP (arguments, 0), | |
917 | size))); | |
918 | ||
919 | use_reg (&call_fusage, reg); | |
920 | size += GET_MODE_SIZE (mode); | |
921 | } | |
922 | ||
923 | /* Restore the structure value address unless this is passed as an | |
924 | "invisible" first argument. */ | |
925 | size = GET_MODE_SIZE (Pmode); | |
926 | if (struct_value_rtx) | |
927 | { | |
928 | rtx value = gen_reg_rtx (Pmode); | |
929 | emit_move_insn (value, | |
930 | change_address (arguments, Pmode, | |
931 | plus_constant (XEXP (arguments, 0), | |
932 | size))); | |
933 | emit_move_insn (struct_value_rtx, value); | |
934 | if (GET_CODE (struct_value_rtx) == REG) | |
935 | use_reg (&call_fusage, struct_value_rtx); | |
936 | size += GET_MODE_SIZE (Pmode); | |
937 | } | |
938 | ||
939 | /* All arguments and registers used for the call are set up by now! */ | |
940 | function = prepare_call_address (function, NULL_TREE, &call_fusage, 0); | |
941 | ||
942 | /* Ensure address is valid. SYMBOL_REF is already valid, so no need, | |
943 | and we don't want to load it into a register as an optimization, | |
944 | because prepare_call_address already did it if it should be done. */ | |
945 | if (GET_CODE (function) != SYMBOL_REF) | |
946 | function = memory_address (FUNCTION_MODE, function); | |
947 | ||
948 | /* Generate the actual call instruction and save the return value. */ | |
949 | #ifdef HAVE_untyped_call | |
950 | if (HAVE_untyped_call) | |
951 | emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function), | |
952 | result, result_vector (1, result))); | |
953 | else | |
954 | #endif | |
955 | #ifdef HAVE_call_value | |
956 | if (HAVE_call_value) | |
957 | { | |
958 | rtx valreg = 0; | |
959 | ||
960 | /* Locate the unique return register. It is not possible to | |
961 | express a call that sets more than one return register using | |
962 | call_value; use untyped_call for that. In fact, untyped_call | |
963 | only needs to save the return registers in the given block. */ | |
964 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
965 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
966 | { | |
967 | if (valreg) | |
968 | abort (); /* HAVE_untyped_call required. */ | |
969 | valreg = gen_rtx_REG (mode, regno); | |
970 | } | |
971 | ||
972 | emit_call_insn (gen_call_value (valreg, | |
973 | gen_rtx_MEM (FUNCTION_MODE, function), | |
974 | const0_rtx, NULL_RTX, const0_rtx)); | |
975 | ||
976 | emit_move_insn (change_address (result, GET_MODE (valreg), | |
977 | XEXP (result, 0)), | |
978 | valreg); | |
979 | } | |
980 | else | |
981 | #endif | |
982 | abort (); | |
983 | ||
984 | /* Find the CALL insn we just emitted. */ | |
985 | for (call_insn = get_last_insn (); | |
986 | call_insn && GET_CODE (call_insn) != CALL_INSN; | |
987 | call_insn = PREV_INSN (call_insn)) | |
988 | ; | |
989 | ||
990 | if (! call_insn) | |
991 | abort (); | |
992 | ||
993 | /* Put the register usage information on the CALL. If there is already | |
994 | some usage information, put ours at the end. */ | |
995 | if (CALL_INSN_FUNCTION_USAGE (call_insn)) | |
996 | { | |
997 | rtx link; | |
998 | ||
999 | for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; | |
1000 | link = XEXP (link, 1)) | |
1001 | ; | |
1002 | ||
1003 | XEXP (link, 1) = call_fusage; | |
1004 | } | |
1005 | else | |
1006 | CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; | |
1007 | ||
1008 | /* Restore the stack. */ | |
1009 | #ifdef HAVE_save_stack_nonlocal | |
1010 | if (HAVE_save_stack_nonlocal) | |
1011 | emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX); | |
1012 | else | |
1013 | #endif | |
1014 | emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); | |
1015 | ||
1016 | /* Return the address of the result block. */ | |
1017 | return copy_addr_to_reg (XEXP (result, 0)); | |
1018 | } | |
1019 | ||
1020 | /* Perform an untyped return. */ | |
1021 | ||
1022 | static void | |
1023 | expand_builtin_return (result) | |
1024 | rtx result; | |
1025 | { | |
1026 | int size, align, regno; | |
1027 | enum machine_mode mode; | |
1028 | rtx reg; | |
1029 | rtx call_fusage = 0; | |
1030 | ||
1031 | apply_result_size (); | |
1032 | result = gen_rtx_MEM (BLKmode, result); | |
1033 | ||
1034 | #ifdef HAVE_untyped_return | |
1035 | if (HAVE_untyped_return) | |
1036 | { | |
1037 | emit_jump_insn (gen_untyped_return (result, result_vector (0, result))); | |
1038 | emit_barrier (); | |
1039 | return; | |
1040 | } | |
1041 | #endif | |
1042 | ||
1043 | /* Restore the return value and note that each value is used. */ | |
1044 | size = 0; | |
1045 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
1046 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
1047 | { | |
1048 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
1049 | if (size % align != 0) | |
1050 | size = CEIL (size, align) * align; | |
1051 | reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)); | |
1052 | emit_move_insn (reg, | |
1053 | change_address (result, mode, | |
1054 | plus_constant (XEXP (result, 0), | |
1055 | size))); | |
1056 | ||
1057 | push_to_sequence (call_fusage); | |
1058 | emit_insn (gen_rtx_USE (VOIDmode, reg)); | |
1059 | call_fusage = get_insns (); | |
1060 | end_sequence (); | |
1061 | size += GET_MODE_SIZE (mode); | |
1062 | } | |
1063 | ||
1064 | /* Put the USE insns before the return. */ | |
1065 | emit_insns (call_fusage); | |
1066 | ||
1067 | /* Return whatever values was restored by jumping directly to the end | |
1068 | of the function. */ | |
1069 | expand_null_return (); | |
1070 | } | |
1071 | ||
1072 | /* Expand a call to __builtin_classify_type with arguments found in | |
1073 | ARGLIST. */ | |
1074 | static rtx | |
1075 | expand_builtin_classify_type (arglist) | |
1076 | tree arglist; | |
1077 | { | |
1078 | if (arglist != 0) | |
1079 | { | |
1080 | tree type = TREE_TYPE (TREE_VALUE (arglist)); | |
1081 | enum tree_code code = TREE_CODE (type); | |
1082 | if (code == VOID_TYPE) | |
1083 | return GEN_INT (void_type_class); | |
1084 | if (code == INTEGER_TYPE) | |
1085 | return GEN_INT (integer_type_class); | |
1086 | if (code == CHAR_TYPE) | |
1087 | return GEN_INT (char_type_class); | |
1088 | if (code == ENUMERAL_TYPE) | |
1089 | return GEN_INT (enumeral_type_class); | |
1090 | if (code == BOOLEAN_TYPE) | |
1091 | return GEN_INT (boolean_type_class); | |
1092 | if (code == POINTER_TYPE) | |
1093 | return GEN_INT (pointer_type_class); | |
1094 | if (code == REFERENCE_TYPE) | |
1095 | return GEN_INT (reference_type_class); | |
1096 | if (code == OFFSET_TYPE) | |
1097 | return GEN_INT (offset_type_class); | |
1098 | if (code == REAL_TYPE) | |
1099 | return GEN_INT (real_type_class); | |
1100 | if (code == COMPLEX_TYPE) | |
1101 | return GEN_INT (complex_type_class); | |
1102 | if (code == FUNCTION_TYPE) | |
1103 | return GEN_INT (function_type_class); | |
1104 | if (code == METHOD_TYPE) | |
1105 | return GEN_INT (method_type_class); | |
1106 | if (code == RECORD_TYPE) | |
1107 | return GEN_INT (record_type_class); | |
1108 | if (code == UNION_TYPE || code == QUAL_UNION_TYPE) | |
1109 | return GEN_INT (union_type_class); | |
1110 | if (code == ARRAY_TYPE) | |
1111 | { | |
1112 | if (TYPE_STRING_FLAG (type)) | |
1113 | return GEN_INT (string_type_class); | |
1114 | else | |
1115 | return GEN_INT (array_type_class); | |
1116 | } | |
1117 | if (code == SET_TYPE) | |
1118 | return GEN_INT (set_type_class); | |
1119 | if (code == FILE_TYPE) | |
1120 | return GEN_INT (file_type_class); | |
1121 | if (code == LANG_TYPE) | |
1122 | return GEN_INT (lang_type_class); | |
1123 | } | |
1124 | return GEN_INT (no_type_class); | |
1125 | } | |
1126 | ||
1127 | /* Expand expression EXP, which is a call to __builtin_constant_p. */ | |
1128 | static rtx | |
1129 | expand_builtin_constant_p (exp) | |
1130 | tree exp; | |
1131 | { | |
1132 | tree arglist = TREE_OPERAND (exp, 1); | |
1133 | enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); | |
1134 | ||
1135 | if (arglist == 0) | |
1136 | return const0_rtx; | |
1137 | else | |
1138 | { | |
1139 | tree arg = TREE_VALUE (arglist); | |
1140 | rtx tmp; | |
1141 | ||
1142 | /* We return 1 for a numeric type that's known to be a constant | |
1143 | value at compile-time or for an aggregate type that's a | |
1144 | literal constant. */ | |
1145 | STRIP_NOPS (arg); | |
1146 | ||
1147 | /* If we know this is a constant, emit the constant of one. */ | |
1148 | if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c' | |
1149 | || (TREE_CODE (arg) == CONSTRUCTOR | |
1150 | && TREE_CONSTANT (arg)) | |
1151 | || (TREE_CODE (arg) == ADDR_EXPR | |
1152 | && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)) | |
1153 | return const1_rtx; | |
1154 | ||
1155 | /* If we aren't going to be running CSE or this expression | |
1156 | has side effects, show we don't know it to be a constant. | |
1157 | Likewise if it's a pointer or aggregate type since in those | |
1158 | case we only want literals, since those are only optimized | |
1159 | when generating RTL, not later. */ | |
1160 | if (TREE_SIDE_EFFECTS (arg) || cse_not_expected | |
1161 | || AGGREGATE_TYPE_P (TREE_TYPE (arg)) | |
1162 | || POINTER_TYPE_P (TREE_TYPE (arg))) | |
1163 | return const0_rtx; | |
1164 | ||
1165 | /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a | |
1166 | chance to see if it can deduce whether ARG is constant. */ | |
1167 | ||
1168 | tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0); | |
1169 | tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp); | |
1170 | return tmp; | |
1171 | } | |
1172 | } | |
1173 | ||
1174 | /* Expand a call to one of the builtin math functions (sin, cos, or sqrt). | |
1175 | Return 0 if a normal call should be emitted rather than expanding the | |
1176 | function in-line. EXP is the expression that is a call to the builtin | |
1177 | function; if convenient, the result should be placed in TARGET. | |
1178 | SUBTARGET may be used as the target for computing one of EXP's operands. */ | |
1179 | static rtx | |
1180 | expand_builtin_mathfn (exp, target, subtarget) | |
1181 | tree exp; | |
1182 | rtx target, subtarget; | |
1183 | { | |
1184 | optab builtin_optab; | |
1185 | rtx op0, insns; | |
1186 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
1187 | tree arglist = TREE_OPERAND (exp, 1); | |
1188 | ||
1189 | if (arglist == 0 | |
1190 | /* Arg could be wrong type if user redeclared this fcn wrong. */ | |
1191 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE) | |
1192 | return 0; | |
1193 | ||
1194 | /* Stabilize and compute the argument. */ | |
1195 | if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL | |
1196 | && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL) | |
1197 | { | |
1198 | exp = copy_node (exp); | |
53800dbe | 1199 | TREE_OPERAND (exp, 1) = arglist; |
f6326164 | 1200 | /* Wrap the computation of the argument in a SAVE_EXPR. That |
1201 | way, if we need to expand the argument again (as in the | |
1202 | flag_errno_math case below where we cannot directly set | |
1203 | errno), we will not perform side-effects more than once. | |
1204 | Note that here we're mutating the original EXP as well as the | |
1205 | copy; that's the right thing to do in case the original EXP | |
1206 | is expanded later. */ | |
53800dbe | 1207 | TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist)); |
f6326164 | 1208 | arglist = copy_node (arglist); |
53800dbe | 1209 | } |
1210 | op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); | |
1211 | ||
1212 | /* Make a suitable register to place result in. */ | |
1213 | target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); | |
1214 | ||
1215 | emit_queue (); | |
1216 | start_sequence (); | |
1217 | ||
1218 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1219 | { | |
1220 | case BUILT_IN_SIN: | |
1221 | builtin_optab = sin_optab; break; | |
1222 | case BUILT_IN_COS: | |
1223 | builtin_optab = cos_optab; break; | |
1224 | case BUILT_IN_FSQRT: | |
1225 | builtin_optab = sqrt_optab; break; | |
1226 | default: | |
1227 | abort (); | |
1228 | } | |
1229 | ||
1230 | /* Compute into TARGET. | |
1231 | Set TARGET to wherever the result comes back. */ | |
1232 | target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), | |
1233 | builtin_optab, op0, target, 0); | |
1234 | ||
1235 | /* If we were unable to expand via the builtin, stop the | |
1236 | sequence (without outputting the insns) and return 0, causing | |
1237 | a call to the library function. */ | |
1238 | if (target == 0) | |
1239 | { | |
1240 | end_sequence (); | |
1241 | return 0; | |
1242 | } | |
1243 | ||
1244 | /* Check the results by default. But if flag_fast_math is turned on, | |
1245 | then assume sqrt will always be called with valid arguments. */ | |
1246 | ||
1247 | if (flag_errno_math && ! flag_fast_math) | |
1248 | { | |
1249 | rtx lab1; | |
1250 | ||
1251 | /* Don't define the builtin FP instructions | |
1252 | if your machine is not IEEE. */ | |
1253 | if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT) | |
1254 | abort (); | |
1255 | ||
1256 | lab1 = gen_label_rtx (); | |
1257 | ||
1258 | /* Test the result; if it is NaN, set errno=EDOM because | |
1259 | the argument was not in the domain. */ | |
1260 | emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target), | |
1261 | 0, 0, lab1); | |
1262 | ||
1263 | #ifdef TARGET_EDOM | |
1264 | { | |
1265 | #ifdef GEN_ERRNO_RTX | |
1266 | rtx errno_rtx = GEN_ERRNO_RTX; | |
1267 | #else | |
1268 | rtx errno_rtx | |
1269 | = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno")); | |
1270 | #endif | |
1271 | ||
1272 | emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM)); | |
1273 | } | |
1274 | #else | |
1275 | /* We can't set errno=EDOM directly; let the library call do it. | |
1276 | Pop the arguments right away in case the call gets deleted. */ | |
1277 | NO_DEFER_POP; | |
1278 | expand_call (exp, target, 0); | |
1279 | OK_DEFER_POP; | |
1280 | #endif | |
1281 | ||
1282 | emit_label (lab1); | |
1283 | } | |
1284 | ||
1285 | /* Output the entire sequence. */ | |
1286 | insns = get_insns (); | |
1287 | end_sequence (); | |
1288 | emit_insns (insns); | |
1289 | ||
1290 | return target; | |
1291 | } | |
1292 | ||
1293 | /* Expand expression EXP which is a call to the strlen builtin. Return 0 | |
1294 | if we failed the caller should emit a normal call, otherwise | |
1295 | try to get the result in TARGET, if convenient (and in mode MODE if that's | |
1296 | convenient). */ | |
1297 | static rtx | |
1298 | expand_builtin_strlen (exp, target, mode) | |
1299 | tree exp; | |
1300 | rtx target; | |
1301 | enum machine_mode mode; | |
1302 | { | |
1303 | tree arglist = TREE_OPERAND (exp, 1); | |
1304 | enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); | |
1305 | ||
1306 | if (arglist == 0 | |
1307 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1308 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
1309 | return 0; | |
1310 | else | |
1311 | { | |
1312 | tree src = TREE_VALUE (arglist); | |
1313 | tree len = c_strlen (src); | |
1314 | ||
1315 | int align | |
1316 | = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1317 | ||
1318 | rtx result, src_rtx, char_rtx; | |
1319 | enum machine_mode insn_mode = value_mode, char_mode; | |
ef2c4a29 | 1320 | enum insn_code icode = CODE_FOR_nothing; |
53800dbe | 1321 | |
1322 | /* If the length is known, just return it. */ | |
1323 | if (len != 0) | |
1324 | return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD); | |
1325 | ||
1326 | /* If SRC is not a pointer type, don't do this operation inline. */ | |
1327 | if (align == 0) | |
1328 | return 0; | |
1329 | ||
1330 | /* Call a function if we can't compute strlen in the right mode. */ | |
1331 | ||
1332 | while (insn_mode != VOIDmode) | |
1333 | { | |
1334 | icode = strlen_optab->handlers[(int) insn_mode].insn_code; | |
1335 | if (icode != CODE_FOR_nothing) | |
c28ae87f | 1336 | break; |
53800dbe | 1337 | |
1338 | insn_mode = GET_MODE_WIDER_MODE (insn_mode); | |
1339 | } | |
1340 | if (insn_mode == VOIDmode) | |
1341 | return 0; | |
1342 | ||
1343 | /* Make a place to write the result of the instruction. */ | |
1344 | result = target; | |
1345 | if (! (result != 0 | |
1346 | && GET_CODE (result) == REG | |
1347 | && GET_MODE (result) == insn_mode | |
1348 | && REGNO (result) >= FIRST_PSEUDO_REGISTER)) | |
1349 | result = gen_reg_rtx (insn_mode); | |
1350 | ||
1351 | /* Make sure the operands are acceptable to the predicates. */ | |
1352 | ||
6357eaae | 1353 | if (! (*insn_data[(int)icode].operand[0].predicate) (result, insn_mode)) |
53800dbe | 1354 | result = gen_reg_rtx (insn_mode); |
1355 | src_rtx = memory_address (BLKmode, | |
1356 | expand_expr (src, NULL_RTX, ptr_mode, | |
1357 | EXPAND_NORMAL)); | |
1358 | ||
6357eaae | 1359 | if (! (*insn_data[(int)icode].operand[1].predicate) (src_rtx, Pmode)) |
53800dbe | 1360 | src_rtx = copy_to_mode_reg (Pmode, src_rtx); |
1361 | ||
1362 | /* Check the string is readable and has an end. */ | |
1363 | if (current_function_check_memory_usage) | |
1364 | emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2, | |
1365 | src_rtx, Pmode, | |
1366 | GEN_INT (MEMORY_USE_RO), | |
1367 | TYPE_MODE (integer_type_node)); | |
1368 | ||
1369 | char_rtx = const0_rtx; | |
6357eaae | 1370 | char_mode = insn_data[(int)icode].operand[2].mode; |
1371 | if (! (*insn_data[(int)icode].operand[2].predicate) (char_rtx, char_mode)) | |
53800dbe | 1372 | char_rtx = copy_to_mode_reg (char_mode, char_rtx); |
1373 | ||
1374 | emit_insn (GEN_FCN (icode) (result, | |
1375 | gen_rtx_MEM (BLKmode, src_rtx), | |
1376 | char_rtx, GEN_INT (align))); | |
1377 | ||
1378 | /* Return the value in the proper mode for this function. */ | |
1379 | if (GET_MODE (result) == value_mode) | |
1380 | return result; | |
1381 | else if (target != 0) | |
1382 | { | |
1383 | convert_move (target, result, 0); | |
1384 | return target; | |
1385 | } | |
1386 | else | |
1387 | return convert_to_mode (value_mode, result, 0); | |
1388 | } | |
1389 | } | |
1390 | ||
1391 | /* Expand a call to the memcpy builtin, with arguments in ARGLIST. */ | |
1392 | static rtx | |
1393 | expand_builtin_memcpy (arglist) | |
1394 | tree arglist; | |
1395 | { | |
1396 | if (arglist == 0 | |
1397 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1398 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1399 | || TREE_CHAIN (arglist) == 0 | |
1400 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) | |
1401 | != POINTER_TYPE) | |
1402 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1403 | || (TREE_CODE (TREE_TYPE (TREE_VALUE | |
1404 | (TREE_CHAIN (TREE_CHAIN (arglist))))) | |
1405 | != INTEGER_TYPE)) | |
1406 | return 0; | |
1407 | else | |
1408 | { | |
1409 | tree dest = TREE_VALUE (arglist); | |
1410 | tree src = TREE_VALUE (TREE_CHAIN (arglist)); | |
1411 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1412 | ||
1413 | int src_align | |
1414 | = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1415 | int dest_align | |
1416 | = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1417 | rtx dest_mem, src_mem, dest_addr, len_rtx; | |
1418 | ||
1419 | /* If either SRC or DEST is not a pointer type, don't do | |
1420 | this operation in-line. */ | |
1421 | if (src_align == 0 || dest_align == 0) | |
1422 | return 0; | |
1423 | ||
1424 | dest_mem = get_memory_rtx (dest); | |
1425 | src_mem = get_memory_rtx (src); | |
1426 | len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); | |
1427 | ||
1428 | /* Just copy the rights of SRC to the rights of DEST. */ | |
1429 | if (current_function_check_memory_usage) | |
1430 | emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, | |
1431 | XEXP (dest_mem, 0), Pmode, | |
1432 | XEXP (src_mem, 0), Pmode, | |
1433 | len_rtx, TYPE_MODE (sizetype)); | |
1434 | ||
1435 | /* Copy word part most expediently. */ | |
1436 | dest_addr | |
1437 | = emit_block_move (dest_mem, src_mem, len_rtx, | |
1438 | MIN (src_align, dest_align)); | |
1439 | ||
1440 | if (dest_addr == 0) | |
1441 | dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); | |
1442 | ||
1443 | return dest_addr; | |
1444 | } | |
1445 | } | |
1446 | ||
1447 | /* Expand expression EXP, which is a call to the strcpy builtin. Return 0 | |
1448 | if we failed the caller should emit a normal call. */ | |
902de8ed | 1449 | |
53800dbe | 1450 | static rtx |
1451 | expand_builtin_strcpy (exp) | |
1452 | tree exp; | |
1453 | { | |
1454 | tree arglist = TREE_OPERAND (exp, 1); | |
1455 | rtx result; | |
1456 | ||
1457 | if (arglist == 0 | |
1458 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1459 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1460 | || TREE_CHAIN (arglist) == 0 | |
902de8ed | 1461 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) |
1462 | != POINTER_TYPE)) | |
53800dbe | 1463 | return 0; |
1464 | else | |
1465 | { | |
1466 | tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist))); | |
1467 | ||
1468 | if (len == 0) | |
1469 | return 0; | |
1470 | ||
902de8ed | 1471 | len = size_binop (PLUS_EXPR, len, ssize_int (1)); |
53800dbe | 1472 | chainon (arglist, build_tree_list (NULL_TREE, len)); |
1473 | } | |
902de8ed | 1474 | |
53800dbe | 1475 | result = expand_builtin_memcpy (arglist); |
902de8ed | 1476 | |
53800dbe | 1477 | if (! result) |
1478 | TREE_CHAIN (TREE_CHAIN (arglist)) = 0; | |
1479 | return result; | |
1480 | } | |
1481 | ||
1482 | /* Expand expression EXP, which is a call to the memset builtin. Return 0 | |
1483 | if we failed the caller should emit a normal call. */ | |
902de8ed | 1484 | |
53800dbe | 1485 | static rtx |
1486 | expand_builtin_memset (exp) | |
1487 | tree exp; | |
1488 | { | |
1489 | tree arglist = TREE_OPERAND (exp, 1); | |
1490 | ||
1491 | if (arglist == 0 | |
1492 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1493 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1494 | || TREE_CHAIN (arglist) == 0 | |
1495 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) | |
1496 | != INTEGER_TYPE) | |
1497 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1498 | || (INTEGER_TYPE | |
1499 | != (TREE_CODE (TREE_TYPE | |
1500 | (TREE_VALUE | |
1501 | (TREE_CHAIN (TREE_CHAIN (arglist)))))))) | |
1502 | return 0; | |
1503 | else | |
1504 | { | |
1505 | tree dest = TREE_VALUE (arglist); | |
1506 | tree val = TREE_VALUE (TREE_CHAIN (arglist)); | |
1507 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1508 | ||
1509 | int dest_align | |
1510 | = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1511 | rtx dest_mem, dest_addr, len_rtx; | |
1512 | ||
1513 | /* If DEST is not a pointer type, don't do this | |
1514 | operation in-line. */ | |
1515 | if (dest_align == 0) | |
1516 | return 0; | |
1517 | ||
1518 | /* If the arguments have side-effects, then we can only evaluate | |
1519 | them at most once. The following code evaluates them twice if | |
1520 | they are not constants because we break out to expand_call | |
1521 | in that case. They can't be constants if they have side-effects | |
1522 | so we can check for that first. Alternatively, we could call | |
1523 | save_expr to make multiple evaluation safe. */ | |
1524 | if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len)) | |
1525 | return 0; | |
1526 | ||
1527 | /* If VAL is not 0, don't do this operation in-line. */ | |
1528 | if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx) | |
1529 | return 0; | |
1530 | ||
53800dbe | 1531 | len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); |
53800dbe | 1532 | |
1533 | dest_mem = get_memory_rtx (dest); | |
1534 | ||
1535 | /* Just check DST is writable and mark it as readable. */ | |
1536 | if (current_function_check_memory_usage) | |
1537 | emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, | |
1538 | XEXP (dest_mem, 0), Pmode, | |
1539 | len_rtx, TYPE_MODE (sizetype), | |
1540 | GEN_INT (MEMORY_USE_WO), | |
1541 | TYPE_MODE (integer_type_node)); | |
1542 | ||
1543 | ||
1544 | dest_addr = clear_storage (dest_mem, len_rtx, dest_align); | |
1545 | ||
1546 | if (dest_addr == 0) | |
1547 | dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); | |
1548 | ||
1549 | return dest_addr; | |
1550 | } | |
1551 | } | |
1552 | ||
1553 | #ifdef HAVE_cmpstrsi | |
1554 | /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin. | |
1555 | ARGLIST is the argument list for this call. Return 0 if we failed and the | |
1556 | caller should emit a normal call, otherwise try to get the result in | |
1557 | TARGET, if convenient. */ | |
1558 | static rtx | |
1559 | expand_builtin_memcmp (exp, arglist, target) | |
1560 | tree exp; | |
1561 | tree arglist; | |
1562 | rtx target; | |
1563 | { | |
1564 | /* If we need to check memory accesses, call the library function. */ | |
1565 | if (current_function_check_memory_usage) | |
1566 | return 0; | |
1567 | ||
1568 | if (arglist == 0 | |
1569 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1570 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1571 | || TREE_CHAIN (arglist) == 0 | |
1572 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE | |
1573 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1574 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) | |
1575 | return 0; | |
1576 | else if (!HAVE_cmpstrsi) | |
1577 | return 0; | |
1578 | ||
1579 | { | |
1580 | enum machine_mode mode; | |
1581 | tree arg1 = TREE_VALUE (arglist); | |
1582 | tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); | |
1583 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1584 | rtx result; | |
1585 | ||
1586 | int arg1_align | |
1587 | = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1588 | int arg2_align | |
1589 | = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1590 | enum machine_mode insn_mode | |
6357eaae | 1591 | = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode; |
53800dbe | 1592 | |
1593 | /* If we don't have POINTER_TYPE, call the function. */ | |
1594 | if (arg1_align == 0 || arg2_align == 0) | |
1595 | return 0; | |
1596 | ||
1597 | /* Make a place to write the result of the instruction. */ | |
1598 | result = target; | |
1599 | if (! (result != 0 | |
1600 | && GET_CODE (result) == REG && GET_MODE (result) == insn_mode | |
1601 | && REGNO (result) >= FIRST_PSEUDO_REGISTER)) | |
1602 | result = gen_reg_rtx (insn_mode); | |
1603 | ||
1604 | emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1), | |
1605 | get_memory_rtx (arg2), | |
1606 | expand_expr (len, NULL_RTX, VOIDmode, 0), | |
1607 | GEN_INT (MIN (arg1_align, arg2_align)))); | |
1608 | ||
1609 | /* Return the value in the proper mode for this function. */ | |
1610 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
1611 | if (GET_MODE (result) == mode) | |
1612 | return result; | |
1613 | else if (target != 0) | |
1614 | { | |
1615 | convert_move (target, result, 0); | |
1616 | return target; | |
1617 | } | |
1618 | else | |
1619 | return convert_to_mode (mode, result, 0); | |
1620 | } | |
1621 | } | |
1622 | ||
1623 | /* Expand expression EXP, which is a call to the strcmp builtin. Return 0 | |
1624 | if we failed the caller should emit a normal call, otherwise try to get | |
1625 | the result in TARGET, if convenient. */ | |
902de8ed | 1626 | |
53800dbe | 1627 | static rtx |
1628 | expand_builtin_strcmp (exp, target) | |
1629 | tree exp; | |
1630 | rtx target; | |
1631 | { | |
1632 | tree arglist = TREE_OPERAND (exp, 1); | |
1633 | ||
1634 | /* If we need to check memory accesses, call the library function. */ | |
1635 | if (current_function_check_memory_usage) | |
1636 | return 0; | |
1637 | ||
1638 | if (arglist == 0 | |
1639 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1640 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1641 | || TREE_CHAIN (arglist) == 0 | |
902de8ed | 1642 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) |
1643 | != POINTER_TYPE)) | |
53800dbe | 1644 | return 0; |
902de8ed | 1645 | |
1646 | else if (! HAVE_cmpstrsi) | |
53800dbe | 1647 | return 0; |
1648 | { | |
1649 | tree arg1 = TREE_VALUE (arglist); | |
1650 | tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); | |
902de8ed | 1651 | tree len = c_strlen (arg1); |
1652 | tree len2 = c_strlen (arg2); | |
53800dbe | 1653 | rtx result; |
902de8ed | 1654 | |
53800dbe | 1655 | if (len) |
902de8ed | 1656 | len = size_binop (PLUS_EXPR, ssize_int (1), len); |
1657 | ||
53800dbe | 1658 | if (len2) |
902de8ed | 1659 | len2 = size_binop (PLUS_EXPR, ssize_int (1), len2); |
53800dbe | 1660 | |
1661 | /* If we don't have a constant length for the first, use the length | |
1662 | of the second, if we know it. We don't require a constant for | |
1663 | this case; some cost analysis could be done if both are available | |
1664 | but neither is constant. For now, assume they're equally cheap. | |
1665 | ||
1666 | If both strings have constant lengths, use the smaller. This | |
1667 | could arise if optimization results in strcpy being called with | |
1668 | two fixed strings, or if the code was machine-generated. We should | |
1669 | add some code to the `memcmp' handler below to deal with such | |
1670 | situations, someday. */ | |
902de8ed | 1671 | |
53800dbe | 1672 | if (!len || TREE_CODE (len) != INTEGER_CST) |
1673 | { | |
1674 | if (len2) | |
1675 | len = len2; | |
1676 | else if (len == 0) | |
1677 | return 0; | |
1678 | } | |
902de8ed | 1679 | else if (len2 && TREE_CODE (len2) == INTEGER_CST |
1680 | && tree_int_cst_lt (len2, len)) | |
1681 | len = len2; | |
53800dbe | 1682 | |
1683 | chainon (arglist, build_tree_list (NULL_TREE, len)); | |
1684 | result = expand_builtin_memcmp (exp, arglist, target); | |
1685 | if (! result) | |
1686 | TREE_CHAIN (TREE_CHAIN (arglist)) = 0; | |
902de8ed | 1687 | |
53800dbe | 1688 | return result; |
1689 | } | |
1690 | } | |
1691 | #endif | |
1692 | ||
a66c9326 | 1693 | /* Expand a call to __builtin_saveregs, generating the result in TARGET, |
1694 | if that's convenient. */ | |
902de8ed | 1695 | |
a66c9326 | 1696 | rtx |
1697 | expand_builtin_saveregs () | |
53800dbe | 1698 | { |
a66c9326 | 1699 | rtx val, seq; |
53800dbe | 1700 | |
1701 | /* Don't do __builtin_saveregs more than once in a function. | |
1702 | Save the result of the first call and reuse it. */ | |
1703 | if (saveregs_value != 0) | |
1704 | return saveregs_value; | |
53800dbe | 1705 | |
a66c9326 | 1706 | /* When this function is called, it means that registers must be |
1707 | saved on entry to this function. So we migrate the call to the | |
1708 | first insn of this function. */ | |
1709 | ||
1710 | start_sequence (); | |
53800dbe | 1711 | |
1712 | #ifdef EXPAND_BUILTIN_SAVEREGS | |
a66c9326 | 1713 | /* Do whatever the machine needs done in this case. */ |
1714 | val = EXPAND_BUILTIN_SAVEREGS (); | |
53800dbe | 1715 | #else |
a66c9326 | 1716 | /* ??? We used to try and build up a call to the out of line function, |
1717 | guessing about what registers needed saving etc. This became much | |
1718 | harder with __builtin_va_start, since we don't have a tree for a | |
1719 | call to __builtin_saveregs to fall back on. There was exactly one | |
1720 | port (i860) that used this code, and I'm unconvinced it could actually | |
1721 | handle the general case. So we no longer try to handle anything | |
1722 | weird and make the backend absorb the evil. */ | |
1723 | ||
1724 | error ("__builtin_saveregs not supported by this target"); | |
1725 | val = const0_rtx; | |
53800dbe | 1726 | #endif |
1727 | ||
a66c9326 | 1728 | seq = get_insns (); |
1729 | end_sequence (); | |
53800dbe | 1730 | |
a66c9326 | 1731 | saveregs_value = val; |
53800dbe | 1732 | |
a66c9326 | 1733 | /* Put the sequence after the NOTE that starts the function. If this |
1734 | is inside a SEQUENCE, make the outer-level insn chain current, so | |
1735 | the code is placed at the start of the function. */ | |
1736 | push_topmost_sequence (); | |
1737 | emit_insns_after (seq, get_insns ()); | |
1738 | pop_topmost_sequence (); | |
1739 | ||
1740 | return val; | |
53800dbe | 1741 | } |
1742 | ||
1743 | /* __builtin_args_info (N) returns word N of the arg space info | |
1744 | for the current function. The number and meanings of words | |
1745 | is controlled by the definition of CUMULATIVE_ARGS. */ | |
1746 | static rtx | |
1747 | expand_builtin_args_info (exp) | |
1748 | tree exp; | |
1749 | { | |
1750 | tree arglist = TREE_OPERAND (exp, 1); | |
1751 | int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int); | |
1752 | int *word_ptr = (int *) ¤t_function_args_info; | |
1753 | #if 0 | |
1754 | /* These are used by the code below that is if 0'ed away */ | |
1755 | int i; | |
1756 | tree type, elts, result; | |
1757 | #endif | |
1758 | ||
1759 | if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0) | |
dda90815 | 1760 | abort (); |
53800dbe | 1761 | |
1762 | if (arglist != 0) | |
1763 | { | |
1764 | tree arg = TREE_VALUE (arglist); | |
1765 | if (TREE_CODE (arg) != INTEGER_CST) | |
1766 | error ("argument of `__builtin_args_info' must be constant"); | |
1767 | else | |
1768 | { | |
1769 | int wordnum = TREE_INT_CST_LOW (arg); | |
1770 | ||
1771 | if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg)) | |
1772 | error ("argument of `__builtin_args_info' out of range"); | |
1773 | else | |
1774 | return GEN_INT (word_ptr[wordnum]); | |
1775 | } | |
1776 | } | |
1777 | else | |
1778 | error ("missing argument in `__builtin_args_info'"); | |
1779 | ||
1780 | return const0_rtx; | |
1781 | ||
1782 | #if 0 | |
1783 | for (i = 0; i < nwords; i++) | |
1784 | elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0)); | |
1785 | ||
1786 | type = build_array_type (integer_type_node, | |
1787 | build_index_type (build_int_2 (nwords, 0))); | |
1788 | result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts)); | |
1789 | TREE_CONSTANT (result) = 1; | |
1790 | TREE_STATIC (result) = 1; | |
a66c9326 | 1791 | result = build1 (INDIRECT_REF, build_pointer_type (type), result); |
53800dbe | 1792 | TREE_CONSTANT (result) = 1; |
1793 | return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD); | |
1794 | #endif | |
1795 | } | |
1796 | ||
a66c9326 | 1797 | /* Expand ARGLIST, from a call to __builtin_next_arg. */ |
53800dbe | 1798 | static rtx |
a66c9326 | 1799 | expand_builtin_next_arg (arglist) |
1800 | tree arglist; | |
53800dbe | 1801 | { |
53800dbe | 1802 | tree fntype = TREE_TYPE (current_function_decl); |
1803 | ||
1804 | if ((TYPE_ARG_TYPES (fntype) == 0 | |
1805 | || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) | |
1806 | == void_type_node)) | |
1807 | && ! current_function_varargs) | |
1808 | { | |
1809 | error ("`va_start' used in function with fixed args"); | |
1810 | return const0_rtx; | |
1811 | } | |
1812 | ||
1813 | if (arglist) | |
1814 | { | |
1815 | tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl)); | |
1816 | tree arg = TREE_VALUE (arglist); | |
1817 | ||
1818 | /* Strip off all nops for the sake of the comparison. This | |
1819 | is not quite the same as STRIP_NOPS. It does more. | |
1820 | We must also strip off INDIRECT_EXPR for C++ reference | |
1821 | parameters. */ | |
1822 | while (TREE_CODE (arg) == NOP_EXPR | |
1823 | || TREE_CODE (arg) == CONVERT_EXPR | |
1824 | || TREE_CODE (arg) == NON_LVALUE_EXPR | |
1825 | || TREE_CODE (arg) == INDIRECT_REF) | |
1826 | arg = TREE_OPERAND (arg, 0); | |
1827 | if (arg != last_parm) | |
1828 | warning ("second parameter of `va_start' not last named argument"); | |
1829 | } | |
1830 | else if (! current_function_varargs) | |
1831 | /* Evidently an out of date version of <stdarg.h>; can't validate | |
1832 | va_start's second argument, but can still work as intended. */ | |
1833 | warning ("`__builtin_next_arg' called without an argument"); | |
1834 | ||
1835 | return expand_binop (Pmode, add_optab, | |
1836 | current_function_internal_arg_pointer, | |
1837 | current_function_arg_offset_rtx, | |
1838 | NULL_RTX, 0, OPTAB_LIB_WIDEN); | |
1839 | } | |
1840 | ||
a66c9326 | 1841 | /* Make it easier for the backends by protecting the valist argument |
1842 | from multiple evaluations. */ | |
1843 | ||
1844 | static tree | |
1845 | stabilize_va_list (valist, was_ptr) | |
1846 | tree valist; | |
1847 | int was_ptr; | |
1848 | { | |
11a61dea | 1849 | if (TREE_CODE (va_list_type_node) == ARRAY_TYPE) |
a66c9326 | 1850 | { |
1851 | /* If stdarg.h took the address of an array-type valist that was passed | |
1852 | as a parameter, we'll have taken the address of the parameter itself | |
1853 | rather than the array as we'd intended. Undo this mistake. */ | |
11a61dea | 1854 | |
1855 | if (was_ptr) | |
8a15c04a | 1856 | { |
11a61dea | 1857 | STRIP_NOPS (valist); |
1858 | ||
1859 | /* Two cases: either &array, which decomposed to | |
1860 | <ptr <array <record> valist>> | |
1861 | or &ptr, which turned into | |
1862 | <ptr <ptr <record>>> | |
1863 | In the first case we'll need to put the ADDR_EXPR back | |
1864 | after frobbing the types as if &array[0]. */ | |
1865 | ||
1866 | if (TREE_CODE (valist) != ADDR_EXPR) | |
1867 | abort (); | |
8a15c04a | 1868 | valist = TREE_OPERAND (valist, 0); |
11a61dea | 1869 | } |
1870 | ||
1871 | if (TYPE_MAIN_VARIANT (TREE_TYPE (valist)) | |
1872 | == TYPE_MAIN_VARIANT (va_list_type_node)) | |
1873 | { | |
1874 | tree pt = build_pointer_type (TREE_TYPE (va_list_type_node)); | |
1875 | valist = build1 (ADDR_EXPR, pt, valist); | |
1876 | TREE_SIDE_EFFECTS (valist) | |
1877 | = TREE_SIDE_EFFECTS (TREE_OPERAND (valist, 0)); | |
8a15c04a | 1878 | } |
1879 | else | |
1880 | { | |
11a61dea | 1881 | if (! POINTER_TYPE_P (TREE_TYPE (valist)) |
1882 | || (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (valist))) | |
1883 | != TYPE_MAIN_VARIANT (TREE_TYPE (va_list_type_node)))) | |
1884 | abort (); | |
8a15c04a | 1885 | } |
11a61dea | 1886 | |
1887 | if (TREE_SIDE_EFFECTS (valist)) | |
1888 | valist = save_expr (valist); | |
a66c9326 | 1889 | } |
11a61dea | 1890 | else |
a66c9326 | 1891 | { |
11a61dea | 1892 | if (! was_ptr) |
a66c9326 | 1893 | { |
11a61dea | 1894 | tree pt; |
1895 | ||
1896 | if (! TREE_SIDE_EFFECTS (valist)) | |
1897 | return valist; | |
1898 | ||
1899 | pt = build_pointer_type (va_list_type_node); | |
1900 | valist = fold (build1 (ADDR_EXPR, pt, valist)); | |
a66c9326 | 1901 | TREE_SIDE_EFFECTS (valist) = 1; |
a66c9326 | 1902 | } |
11a61dea | 1903 | if (TREE_SIDE_EFFECTS (valist)) |
1904 | valist = save_expr (valist); | |
1905 | valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), | |
1906 | valist)); | |
a66c9326 | 1907 | } |
1908 | ||
1909 | return valist; | |
1910 | } | |
1911 | ||
1912 | /* The "standard" implementation of va_start: just assign `nextarg' to | |
1913 | the variable. */ | |
1914 | void | |
1915 | std_expand_builtin_va_start (stdarg_p, valist, nextarg) | |
1916 | int stdarg_p ATTRIBUTE_UNUSED; | |
1917 | tree valist; | |
1918 | rtx nextarg; | |
1919 | { | |
1920 | tree t; | |
1921 | ||
8a15c04a | 1922 | if (!stdarg_p) |
1923 | nextarg = plus_constant (nextarg, -UNITS_PER_WORD); | |
1924 | ||
a66c9326 | 1925 | t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, |
1926 | make_tree (ptr_type_node, nextarg)); | |
1927 | TREE_SIDE_EFFECTS (t) = 1; | |
1928 | ||
1929 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1930 | } | |
1931 | ||
1932 | /* Expand ARGLIST, which from a call to __builtin_stdarg_va_start or | |
1933 | __builtin_varargs_va_start, depending on STDARG_P. */ | |
1934 | static rtx | |
1935 | expand_builtin_va_start (stdarg_p, arglist) | |
1936 | int stdarg_p; | |
1937 | tree arglist; | |
1938 | { | |
1939 | rtx nextarg; | |
1940 | tree chain = arglist, valist; | |
1941 | ||
1942 | if (stdarg_p) | |
1943 | nextarg = expand_builtin_next_arg (chain = TREE_CHAIN (arglist)); | |
1944 | else | |
1945 | nextarg = expand_builtin_next_arg (NULL_TREE); | |
1946 | ||
1947 | if (TREE_CHAIN (chain)) | |
1948 | error ("too many arguments to function `va_start'"); | |
1949 | ||
1950 | valist = stabilize_va_list (TREE_VALUE (arglist), 1); | |
1951 | ||
1952 | #ifdef EXPAND_BUILTIN_VA_START | |
1953 | EXPAND_BUILTIN_VA_START (stdarg_p, valist, nextarg); | |
1954 | #else | |
1955 | std_expand_builtin_va_start (stdarg_p, valist, nextarg); | |
1956 | #endif | |
1957 | ||
1958 | return const0_rtx; | |
1959 | } | |
1960 | ||
1961 | /* Allocate an alias set for use in storing and reading from the varargs | |
1962 | spill area. */ | |
1963 | int | |
1964 | get_varargs_alias_set () | |
1965 | { | |
1966 | static int set = -1; | |
1967 | if (set == -1) | |
1968 | set = new_alias_set (); | |
1969 | return set; | |
1970 | } | |
1971 | ||
1972 | /* The "standard" implementation of va_arg: read the value from the | |
1973 | current (padded) address and increment by the (padded) size. */ | |
1974 | rtx | |
1975 | std_expand_builtin_va_arg (valist, type) | |
1976 | tree valist, type; | |
1977 | { | |
1978 | tree addr_tree, t; | |
1979 | HOST_WIDE_INT align; | |
1980 | HOST_WIDE_INT rounded_size; | |
1981 | rtx addr; | |
1982 | ||
1983 | /* Compute the rounded size of the type. */ | |
1984 | align = PARM_BOUNDARY / BITS_PER_UNIT; | |
7102dbcc | 1985 | rounded_size = (((int_size_in_bytes (type) + align - 1) / align) * align); |
a66c9326 | 1986 | |
1987 | /* Get AP. */ | |
1988 | addr_tree = valist; | |
726e2588 | 1989 | if (PAD_VARARGS_DOWN) |
a66c9326 | 1990 | { |
1991 | /* Small args are padded downward. */ | |
1992 | ||
1993 | HOST_WIDE_INT adj; | |
1994 | adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT; | |
1995 | if (rounded_size > align) | |
1996 | adj = rounded_size; | |
1997 | ||
1998 | addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree, | |
1999 | build_int_2 (rounded_size - adj, 0)); | |
2000 | } | |
2001 | ||
2002 | addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2003 | addr = copy_to_reg (addr); | |
2004 | ||
2005 | /* Compute new value for AP. */ | |
2006 | t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, | |
2007 | build (PLUS_EXPR, TREE_TYPE (valist), valist, | |
2008 | build_int_2 (rounded_size, 0))); | |
2009 | TREE_SIDE_EFFECTS (t) = 1; | |
2010 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
2011 | ||
2012 | return addr; | |
2013 | } | |
2014 | ||
2015 | /* Expand __builtin_va_arg, which is not really a builtin function, but | |
2016 | a very special sort of operator. */ | |
2017 | rtx | |
2018 | expand_builtin_va_arg (valist, type) | |
2019 | tree valist, type; | |
2020 | { | |
2021 | rtx addr, result; | |
6cd005c9 | 2022 | tree promoted_type, want_va_type, have_va_type; |
a66c9326 | 2023 | |
6cd005c9 | 2024 | /* Verify that valist is of the proper type. */ |
2025 | ||
2026 | want_va_type = va_list_type_node; | |
2027 | have_va_type = TREE_TYPE (valist); | |
2028 | if (TREE_CODE (want_va_type) == ARRAY_TYPE) | |
2029 | { | |
2030 | /* If va_list is an array type, the argument may have decayed | |
2031 | to a pointer type, e.g. by being passed to another function. | |
2032 | In that case, unwrap both types so that we can compare the | |
2033 | underlying records. */ | |
2034 | if (TREE_CODE (have_va_type) == ARRAY_TYPE | |
2035 | || TREE_CODE (have_va_type) == POINTER_TYPE) | |
2036 | { | |
2037 | want_va_type = TREE_TYPE (want_va_type); | |
2038 | have_va_type = TREE_TYPE (have_va_type); | |
2039 | } | |
2040 | } | |
2041 | if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type)) | |
a66c9326 | 2042 | { |
e94026da | 2043 | error ("first argument to `va_arg' not of type `va_list'"); |
2044 | addr = const0_rtx; | |
2045 | } | |
6cd005c9 | 2046 | |
2047 | /* Generate a diagnostic for requesting data of a type that cannot | |
2048 | be passed through `...' due to type promotion at the call site. */ | |
e94026da | 2049 | else if ((promoted_type = (*lang_type_promotes_to) (type)) != NULL_TREE) |
2050 | { | |
01ce7a1b | 2051 | const char *name = "<anonymous type>", *pname = 0; |
e94026da | 2052 | static int gave_help; |
2053 | ||
2054 | if (TYPE_NAME (type)) | |
2055 | { | |
2056 | if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE) | |
2057 | name = IDENTIFIER_POINTER (TYPE_NAME (type)); | |
2058 | else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL | |
2059 | && DECL_NAME (TYPE_NAME (type))) | |
2060 | name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))); | |
2061 | } | |
2062 | if (TYPE_NAME (promoted_type)) | |
2063 | { | |
2064 | if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE) | |
2065 | pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type)); | |
2066 | else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL | |
2067 | && DECL_NAME (TYPE_NAME (promoted_type))) | |
2068 | pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type))); | |
2069 | } | |
2070 | ||
2071 | error ("`%s' is promoted to `%s' when passed through `...'", name, pname); | |
2072 | if (! gave_help) | |
2073 | { | |
2074 | gave_help = 1; | |
2075 | error ("(so you should pass `%s' not `%s' to `va_arg')", pname, name); | |
2076 | } | |
2077 | ||
a66c9326 | 2078 | addr = const0_rtx; |
2079 | } | |
2080 | else | |
2081 | { | |
2082 | /* Make it easier for the backends by protecting the valist argument | |
2083 | from multiple evaluations. */ | |
2084 | valist = stabilize_va_list (valist, 0); | |
2085 | ||
2086 | #ifdef EXPAND_BUILTIN_VA_ARG | |
2087 | addr = EXPAND_BUILTIN_VA_ARG (valist, type); | |
2088 | #else | |
2089 | addr = std_expand_builtin_va_arg (valist, type); | |
2090 | #endif | |
2091 | } | |
2092 | ||
2093 | result = gen_rtx_MEM (TYPE_MODE (type), addr); | |
2094 | MEM_ALIAS_SET (result) = get_varargs_alias_set (); | |
2095 | ||
2096 | return result; | |
2097 | } | |
2098 | ||
2099 | /* Expand ARGLIST, from a call to __builtin_va_end. */ | |
2100 | static rtx | |
2101 | expand_builtin_va_end (arglist) | |
8a15c04a | 2102 | tree arglist; |
a66c9326 | 2103 | { |
8a15c04a | 2104 | tree valist = TREE_VALUE (arglist); |
2105 | ||
a66c9326 | 2106 | #ifdef EXPAND_BUILTIN_VA_END |
a66c9326 | 2107 | valist = stabilize_va_list (valist, 0); |
2108 | EXPAND_BUILTIN_VA_END(arglist); | |
8a15c04a | 2109 | #else |
2110 | /* Evaluate for side effects, if needed. I hate macros that don't | |
2111 | do that. */ | |
2112 | if (TREE_SIDE_EFFECTS (valist)) | |
2113 | expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
a66c9326 | 2114 | #endif |
2115 | ||
2116 | return const0_rtx; | |
2117 | } | |
2118 | ||
2119 | /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a | |
2120 | builtin rather than just as an assignment in stdarg.h because of the | |
2121 | nastiness of array-type va_list types. */ | |
2122 | static rtx | |
2123 | expand_builtin_va_copy (arglist) | |
2124 | tree arglist; | |
2125 | { | |
2126 | tree dst, src, t; | |
2127 | ||
2128 | dst = TREE_VALUE (arglist); | |
2129 | src = TREE_VALUE (TREE_CHAIN (arglist)); | |
2130 | ||
2131 | dst = stabilize_va_list (dst, 1); | |
2132 | src = stabilize_va_list (src, 0); | |
2133 | ||
2134 | if (TREE_CODE (va_list_type_node) != ARRAY_TYPE) | |
2135 | { | |
2136 | t = build (MODIFY_EXPR, va_list_type_node, dst, src); | |
2137 | TREE_SIDE_EFFECTS (t) = 1; | |
2138 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
2139 | } | |
2140 | else | |
2141 | { | |
11a61dea | 2142 | rtx dstb, srcb, size; |
2143 | ||
2144 | /* Evaluate to pointers. */ | |
2145 | dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2146 | srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2147 | size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX, | |
2148 | VOIDmode, EXPAND_NORMAL); | |
2149 | ||
2150 | /* "Dereference" to BLKmode memories. */ | |
2151 | dstb = gen_rtx_MEM (BLKmode, dstb); | |
2152 | MEM_ALIAS_SET (dstb) = get_alias_set (TREE_TYPE (TREE_TYPE (dst))); | |
2153 | srcb = gen_rtx_MEM (BLKmode, srcb); | |
2154 | MEM_ALIAS_SET (srcb) = get_alias_set (TREE_TYPE (TREE_TYPE (src))); | |
2155 | ||
2156 | /* Copy. */ | |
2157 | emit_block_move (dstb, srcb, size, | |
a66c9326 | 2158 | TYPE_ALIGN (va_list_type_node) / BITS_PER_UNIT); |
2159 | } | |
2160 | ||
2161 | return const0_rtx; | |
2162 | } | |
2163 | ||
53800dbe | 2164 | /* Expand a call to one of the builtin functions __builtin_frame_address or |
2165 | __builtin_return_address. */ | |
2166 | static rtx | |
2167 | expand_builtin_frame_address (exp) | |
2168 | tree exp; | |
2169 | { | |
2170 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
2171 | tree arglist = TREE_OPERAND (exp, 1); | |
2172 | ||
2173 | /* The argument must be a nonnegative integer constant. | |
2174 | It counts the number of frames to scan up the stack. | |
2175 | The value is the return address saved in that frame. */ | |
2176 | if (arglist == 0) | |
2177 | /* Warning about missing arg was already issued. */ | |
2178 | return const0_rtx; | |
2179 | else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST | |
2180 | || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0) | |
2181 | { | |
2182 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2183 | error ("invalid arg to `__builtin_frame_address'"); | |
2184 | else | |
2185 | error ("invalid arg to `__builtin_return_address'"); | |
2186 | return const0_rtx; | |
2187 | } | |
2188 | else | |
2189 | { | |
2190 | rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), | |
2191 | TREE_INT_CST_LOW (TREE_VALUE (arglist)), | |
2192 | hard_frame_pointer_rtx); | |
2193 | ||
2194 | /* Some ports cannot access arbitrary stack frames. */ | |
2195 | if (tem == NULL) | |
2196 | { | |
2197 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2198 | warning ("unsupported arg to `__builtin_frame_address'"); | |
2199 | else | |
2200 | warning ("unsupported arg to `__builtin_return_address'"); | |
2201 | return const0_rtx; | |
2202 | } | |
2203 | ||
2204 | /* For __builtin_frame_address, return what we've got. */ | |
2205 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2206 | return tem; | |
2207 | ||
2208 | if (GET_CODE (tem) != REG | |
2209 | && ! CONSTANT_P (tem)) | |
2210 | tem = copy_to_mode_reg (Pmode, tem); | |
2211 | return tem; | |
2212 | } | |
2213 | } | |
2214 | ||
2215 | /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if | |
2216 | we failed and the caller should emit a normal call, otherwise try to get | |
2217 | the result in TARGET, if convenient. */ | |
2218 | static rtx | |
2219 | expand_builtin_alloca (arglist, target) | |
2220 | tree arglist; | |
2221 | rtx target; | |
2222 | { | |
2223 | rtx op0; | |
2224 | ||
2225 | if (arglist == 0 | |
2226 | /* Arg could be non-integer if user redeclared this fcn wrong. */ | |
2227 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) | |
2228 | return 0; | |
2229 | ||
2230 | /* Compute the argument. */ | |
2231 | op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0); | |
2232 | ||
2233 | /* Allocate the desired space. */ | |
2234 | return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT); | |
2235 | } | |
2236 | ||
2237 | /* Expand a call to the ffs builtin. The arguments are in ARGLIST. | |
2238 | Return 0 if a normal call should be emitted rather than expanding the | |
2239 | function in-line. If convenient, the result should be placed in TARGET. | |
2240 | SUBTARGET may be used as the target for computing one of EXP's operands. */ | |
2241 | static rtx | |
2242 | expand_builtin_ffs (arglist, target, subtarget) | |
2243 | tree arglist; | |
2244 | rtx target, subtarget; | |
2245 | { | |
2246 | rtx op0; | |
2247 | if (arglist == 0 | |
2248 | /* Arg could be non-integer if user redeclared this fcn wrong. */ | |
2249 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) | |
2250 | return 0; | |
2251 | ||
2252 | /* Compute the argument. */ | |
2253 | op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); | |
2254 | /* Compute ffs, into TARGET if possible. | |
2255 | Set TARGET to wherever the result comes back. */ | |
2256 | target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), | |
2257 | ffs_optab, op0, target, 1); | |
2258 | if (target == 0) | |
2259 | abort (); | |
2260 | return target; | |
2261 | } | |
2262 | \f | |
2263 | /* Expand an expression EXP that calls a built-in function, | |
2264 | with result going to TARGET if that's convenient | |
2265 | (and in mode MODE if that's convenient). | |
2266 | SUBTARGET may be used as the target for computing one of EXP's operands. | |
2267 | IGNORE is nonzero if the value is to be ignored. */ | |
2268 | ||
2269 | rtx | |
2270 | expand_builtin (exp, target, subtarget, mode, ignore) | |
2271 | tree exp; | |
2272 | rtx target; | |
2273 | rtx subtarget; | |
2274 | enum machine_mode mode; | |
2275 | int ignore; | |
2276 | { | |
2277 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
2278 | tree arglist = TREE_OPERAND (exp, 1); | |
2279 | enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); | |
2280 | ||
8305149e | 2281 | #ifdef MD_EXPAND_BUILTIN |
2282 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
2283 | return MD_EXPAND_BUILTIN (exp, target, subtarget, mode, ignore); | |
2284 | #endif | |
2285 | ||
53800dbe | 2286 | /* When not optimizing, generate calls to library functions for a certain |
2287 | set of builtins. */ | |
2288 | if (! optimize && ! CALLED_AS_BUILT_IN (fndecl) | |
2289 | && (fcode == BUILT_IN_SIN || fcode == BUILT_IN_COS | |
2290 | || fcode == BUILT_IN_FSQRT || fcode == BUILT_IN_MEMSET | |
2291 | || fcode == BUILT_IN_MEMCPY || fcode == BUILT_IN_MEMCMP | |
2292 | || fcode == BUILT_IN_STRLEN || fcode == BUILT_IN_STRCPY | |
2293 | || fcode == BUILT_IN_STRCMP || fcode == BUILT_IN_FFS)) | |
2294 | return expand_call (exp, target, ignore); | |
2295 | ||
2296 | switch (fcode) | |
2297 | { | |
2298 | case BUILT_IN_ABS: | |
2299 | case BUILT_IN_LABS: | |
2300 | case BUILT_IN_FABS: | |
2301 | /* build_function_call changes these into ABS_EXPR. */ | |
2302 | abort (); | |
2303 | ||
2304 | case BUILT_IN_SIN: | |
2305 | case BUILT_IN_COS: | |
2306 | /* Treat these like sqrt, but only if the user asks for them. */ | |
2307 | if (! flag_fast_math) | |
2308 | break; | |
2309 | case BUILT_IN_FSQRT: | |
2310 | target = expand_builtin_mathfn (exp, target, subtarget); | |
2311 | if (target) | |
2312 | return target; | |
2313 | break; | |
2314 | ||
2315 | case BUILT_IN_FMOD: | |
2316 | break; | |
2317 | ||
2318 | case BUILT_IN_APPLY_ARGS: | |
2319 | return expand_builtin_apply_args (); | |
2320 | ||
2321 | /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes | |
2322 | FUNCTION with a copy of the parameters described by | |
2323 | ARGUMENTS, and ARGSIZE. It returns a block of memory | |
2324 | allocated on the stack into which is stored all the registers | |
2325 | that might possibly be used for returning the result of a | |
2326 | function. ARGUMENTS is the value returned by | |
2327 | __builtin_apply_args. ARGSIZE is the number of bytes of | |
2328 | arguments that must be copied. ??? How should this value be | |
2329 | computed? We'll also need a safe worst case value for varargs | |
2330 | functions. */ | |
2331 | case BUILT_IN_APPLY: | |
2332 | if (arglist == 0 | |
2333 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
2334 | || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))) | |
2335 | || TREE_CHAIN (arglist) == 0 | |
2336 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE | |
2337 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
2338 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) | |
2339 | return const0_rtx; | |
2340 | else | |
2341 | { | |
2342 | int i; | |
2343 | tree t; | |
2344 | rtx ops[3]; | |
2345 | ||
2346 | for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++) | |
2347 | ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0); | |
2348 | ||
2349 | return expand_builtin_apply (ops[0], ops[1], ops[2]); | |
2350 | } | |
2351 | ||
2352 | /* __builtin_return (RESULT) causes the function to return the | |
2353 | value described by RESULT. RESULT is address of the block of | |
2354 | memory returned by __builtin_apply. */ | |
2355 | case BUILT_IN_RETURN: | |
2356 | if (arglist | |
2357 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
2358 | && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE) | |
2359 | expand_builtin_return (expand_expr (TREE_VALUE (arglist), | |
2360 | NULL_RTX, VOIDmode, 0)); | |
2361 | return const0_rtx; | |
2362 | ||
2363 | case BUILT_IN_SAVEREGS: | |
a66c9326 | 2364 | return expand_builtin_saveregs (); |
53800dbe | 2365 | |
2366 | case BUILT_IN_ARGS_INFO: | |
2367 | return expand_builtin_args_info (exp); | |
2368 | ||
2369 | /* Return the address of the first anonymous stack arg. */ | |
2370 | case BUILT_IN_NEXT_ARG: | |
a66c9326 | 2371 | return expand_builtin_next_arg (arglist); |
53800dbe | 2372 | |
2373 | case BUILT_IN_CLASSIFY_TYPE: | |
2374 | return expand_builtin_classify_type (arglist); | |
2375 | ||
2376 | case BUILT_IN_CONSTANT_P: | |
2377 | return expand_builtin_constant_p (exp); | |
2378 | ||
2379 | case BUILT_IN_FRAME_ADDRESS: | |
2380 | case BUILT_IN_RETURN_ADDRESS: | |
2381 | return expand_builtin_frame_address (exp); | |
2382 | ||
2383 | /* Returns the address of the area where the structure is returned. | |
2384 | 0 otherwise. */ | |
2385 | case BUILT_IN_AGGREGATE_INCOMING_ADDRESS: | |
2386 | if (arglist != 0 | |
2387 | || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))) | |
2388 | || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM) | |
2389 | return const0_rtx; | |
2390 | else | |
2391 | return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); | |
2392 | ||
2393 | case BUILT_IN_ALLOCA: | |
2394 | target = expand_builtin_alloca (arglist, target); | |
2395 | if (target) | |
2396 | return target; | |
2397 | break; | |
2398 | ||
2399 | case BUILT_IN_FFS: | |
bdc5170d | 2400 | target = expand_builtin_ffs (arglist, target, subtarget); |
53800dbe | 2401 | if (target) |
2402 | return target; | |
2403 | break; | |
2404 | ||
2405 | case BUILT_IN_STRLEN: | |
2406 | target = expand_builtin_strlen (exp, target, mode); | |
2407 | if (target) | |
2408 | return target; | |
2409 | break; | |
2410 | ||
2411 | case BUILT_IN_STRCPY: | |
2412 | target = expand_builtin_strcpy (exp); | |
2413 | if (target) | |
2414 | return target; | |
2415 | break; | |
2416 | ||
2417 | case BUILT_IN_MEMCPY: | |
2418 | target = expand_builtin_memcpy (arglist); | |
2419 | if (target) | |
2420 | return target; | |
2421 | break; | |
2422 | ||
2423 | case BUILT_IN_MEMSET: | |
2424 | target = expand_builtin_memset (exp); | |
2425 | if (target) | |
2426 | return target; | |
2427 | break; | |
2428 | ||
2429 | /* These comparison functions need an instruction that returns an actual | |
2430 | index. An ordinary compare that just sets the condition codes | |
2431 | is not enough. */ | |
2432 | #ifdef HAVE_cmpstrsi | |
2433 | case BUILT_IN_STRCMP: | |
2434 | target = expand_builtin_strcmp (exp, target); | |
2435 | if (target) | |
2436 | return target; | |
2437 | break; | |
2438 | ||
2439 | case BUILT_IN_MEMCMP: | |
2440 | target = expand_builtin_memcmp (exp, arglist, target); | |
2441 | if (target) | |
2442 | return target; | |
2443 | break; | |
2444 | #else | |
2445 | case BUILT_IN_STRCMP: | |
2446 | case BUILT_IN_MEMCMP: | |
2447 | break; | |
2448 | #endif | |
2449 | ||
2450 | case BUILT_IN_SETJMP: | |
2451 | if (arglist == 0 | |
2452 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
2453 | break; | |
2454 | else | |
2455 | { | |
2456 | rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, | |
2457 | VOIDmode, 0); | |
2458 | rtx lab = gen_label_rtx (); | |
2459 | rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab); | |
2460 | emit_label (lab); | |
2461 | return ret; | |
2462 | } | |
2463 | ||
2464 | /* __builtin_longjmp is passed a pointer to an array of five words. | |
2465 | It's similar to the C library longjmp function but works with | |
2466 | __builtin_setjmp above. */ | |
2467 | case BUILT_IN_LONGJMP: | |
2468 | if (arglist == 0 || TREE_CHAIN (arglist) == 0 | |
2469 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
2470 | break; | |
2471 | else | |
2472 | { | |
2473 | rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, | |
2474 | VOIDmode, 0); | |
2475 | rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), | |
2476 | NULL_RTX, VOIDmode, 0); | |
2477 | ||
2478 | if (value != const1_rtx) | |
2479 | { | |
2480 | error ("__builtin_longjmp second argument must be 1"); | |
2481 | return const0_rtx; | |
2482 | } | |
2483 | ||
2484 | expand_builtin_longjmp (buf_addr, value); | |
2485 | return const0_rtx; | |
2486 | } | |
2487 | ||
2488 | case BUILT_IN_TRAP: | |
2489 | #ifdef HAVE_trap | |
2490 | if (HAVE_trap) | |
2491 | emit_insn (gen_trap ()); | |
2492 | else | |
2493 | #endif | |
2494 | error ("__builtin_trap not supported by this target"); | |
2495 | emit_barrier (); | |
2496 | return const0_rtx; | |
2497 | ||
2498 | /* Various hooks for the DWARF 2 __throw routine. */ | |
2499 | case BUILT_IN_UNWIND_INIT: | |
2500 | expand_builtin_unwind_init (); | |
2501 | return const0_rtx; | |
2502 | case BUILT_IN_DWARF_CFA: | |
2503 | return virtual_cfa_rtx; | |
2504 | #ifdef DWARF2_UNWIND_INFO | |
2505 | case BUILT_IN_DWARF_FP_REGNUM: | |
2506 | return expand_builtin_dwarf_fp_regnum (); | |
695e919b | 2507 | case BUILT_IN_INIT_DWARF_REG_SIZES: |
2508 | expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist)); | |
2509 | return const0_rtx; | |
53800dbe | 2510 | #endif |
2511 | case BUILT_IN_FROB_RETURN_ADDR: | |
2512 | return expand_builtin_frob_return_addr (TREE_VALUE (arglist)); | |
2513 | case BUILT_IN_EXTRACT_RETURN_ADDR: | |
2514 | return expand_builtin_extract_return_addr (TREE_VALUE (arglist)); | |
2515 | case BUILT_IN_EH_RETURN: | |
2516 | expand_builtin_eh_return (TREE_VALUE (arglist), | |
2517 | TREE_VALUE (TREE_CHAIN (arglist)), | |
2518 | TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)))); | |
2519 | return const0_rtx; | |
a66c9326 | 2520 | case BUILT_IN_VARARGS_START: |
2521 | return expand_builtin_va_start (0, arglist); | |
2522 | case BUILT_IN_STDARG_START: | |
2523 | return expand_builtin_va_start (1, arglist); | |
2524 | case BUILT_IN_VA_END: | |
2525 | return expand_builtin_va_end (arglist); | |
2526 | case BUILT_IN_VA_COPY: | |
2527 | return expand_builtin_va_copy (arglist); | |
53800dbe | 2528 | |
2529 | default: /* just do library call, if unknown builtin */ | |
2530 | error ("built-in function `%s' not currently supported", | |
2531 | IDENTIFIER_POINTER (DECL_NAME (fndecl))); | |
2532 | } | |
2533 | ||
2534 | /* The switch statement above can drop through to cause the function | |
2535 | to be called normally. */ | |
2536 | return expand_call (exp, target, ignore); | |
2537 | } |