]>
Commit | Line | Data |
---|---|---|
53800dbe | 1 | /* Expand builtin functions. |
2 | Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc. | |
3 | ||
4 | This file is part of GNU CC. | |
5 | ||
6 | GNU CC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GNU CC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GNU CC; see the file COPYING. If not, write to | |
18 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
19 | Boston, MA 02111-1307, USA. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "machmode.h" | |
24 | #include "rtl.h" | |
25 | #include "tree.h" | |
26 | #include "obstack.h" | |
27 | #include "flags.h" | |
28 | #include "regs.h" | |
29 | #include "hard-reg-set.h" | |
30 | #include "except.h" | |
31 | #include "function.h" | |
32 | #include "insn-flags.h" | |
33 | #include "insn-codes.h" | |
34 | #include "insn-config.h" | |
35 | #include "expr.h" | |
36 | #include "recog.h" | |
37 | #include "output.h" | |
38 | #include "typeclass.h" | |
39 | #include "defaults.h" | |
40 | #include "toplev.h" | |
41 | ||
42 | #define CALLED_AS_BUILT_IN(NODE) \ | |
43 | (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10)) | |
44 | ||
45 | #define CEIL(x,y) (((x) + (y) - 1) / (y)) | |
46 | ||
47 | /* Register mappings for target machines without register windows. */ | |
48 | #ifndef INCOMING_REGNO | |
49 | #define INCOMING_REGNO(OUT) (OUT) | |
50 | #endif | |
51 | #ifndef OUTGOING_REGNO | |
52 | #define OUTGOING_REGNO(IN) (IN) | |
53 | #endif | |
54 | ||
55 | /* Nonzero means __builtin_saveregs has already been done in this function. | |
56 | The value is the pseudoreg containing the value __builtin_saveregs | |
57 | returned. */ | |
58 | rtx saveregs_value; | |
59 | ||
60 | /* Similarly for __builtin_apply_args. */ | |
61 | rtx apply_args_value; | |
62 | ||
63 | static int get_pointer_alignment PROTO((tree, unsigned)); | |
64 | static tree c_strlen PROTO((tree)); | |
65 | static rtx get_memory_rtx PROTO((tree)); | |
66 | static int apply_args_size PROTO((void)); | |
67 | static int apply_result_size PROTO((void)); | |
68 | static rtx result_vector PROTO((int, rtx)); | |
69 | static rtx expand_builtin_apply_args PROTO((void)); | |
70 | static rtx expand_builtin_apply_args_1 PROTO((void)); | |
71 | static rtx expand_builtin_apply PROTO((rtx, rtx, rtx)); | |
72 | static void expand_builtin_return PROTO((rtx)); | |
73 | static rtx expand_builtin_classify_type PROTO((tree)); | |
74 | static rtx expand_builtin_mathfn PROTO((tree, rtx, rtx)); | |
75 | static rtx expand_builtin_constant_p PROTO((tree)); | |
53800dbe | 76 | static rtx expand_builtin_args_info PROTO((tree)); |
77 | static rtx expand_builtin_next_arg PROTO((tree)); | |
a66c9326 | 78 | static rtx expand_builtin_va_start PROTO((int, tree)); |
79 | static rtx expand_builtin_va_end PROTO((tree)); | |
80 | static rtx expand_builtin_va_copy PROTO((tree)); | |
53800dbe | 81 | static rtx expand_builtin_memcmp PROTO((tree, tree, rtx)); |
82 | static rtx expand_builtin_strcmp PROTO((tree, rtx)); | |
83 | static rtx expand_builtin_memcpy PROTO((tree)); | |
84 | static rtx expand_builtin_strcpy PROTO((tree)); | |
85 | static rtx expand_builtin_memset PROTO((tree)); | |
86 | static rtx expand_builtin_strlen PROTO((tree, rtx, enum machine_mode)); | |
87 | static rtx expand_builtin_alloca PROTO((tree, rtx)); | |
88 | static rtx expand_builtin_ffs PROTO((tree, rtx, rtx)); | |
89 | static rtx expand_builtin_frame_address PROTO((tree)); | |
90 | ||
91 | /* Return the alignment in bits of EXP, a pointer valued expression. | |
92 | But don't return more than MAX_ALIGN no matter what. | |
93 | The alignment returned is, by default, the alignment of the thing that | |
94 | EXP points to (if it is not a POINTER_TYPE, 0 is returned). | |
95 | ||
96 | Otherwise, look at the expression to see if we can do better, i.e., if the | |
97 | expression is actually pointing at an object whose alignment is tighter. */ | |
98 | ||
99 | static int | |
100 | get_pointer_alignment (exp, max_align) | |
101 | tree exp; | |
102 | unsigned max_align; | |
103 | { | |
104 | unsigned align, inner; | |
105 | ||
106 | if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) | |
107 | return 0; | |
108 | ||
109 | align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); | |
110 | align = MIN (align, max_align); | |
111 | ||
112 | while (1) | |
113 | { | |
114 | switch (TREE_CODE (exp)) | |
115 | { | |
116 | case NOP_EXPR: | |
117 | case CONVERT_EXPR: | |
118 | case NON_LVALUE_EXPR: | |
119 | exp = TREE_OPERAND (exp, 0); | |
120 | if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) | |
121 | return align; | |
122 | inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); | |
123 | align = MIN (inner, max_align); | |
124 | break; | |
125 | ||
126 | case PLUS_EXPR: | |
127 | /* If sum of pointer + int, restrict our maximum alignment to that | |
128 | imposed by the integer. If not, we can't do any better than | |
129 | ALIGN. */ | |
130 | if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST) | |
131 | return align; | |
132 | ||
133 | while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT) | |
134 | & (max_align - 1)) | |
135 | != 0) | |
136 | max_align >>= 1; | |
137 | ||
138 | exp = TREE_OPERAND (exp, 0); | |
139 | break; | |
140 | ||
141 | case ADDR_EXPR: | |
142 | /* See what we are pointing at and look at its alignment. */ | |
143 | exp = TREE_OPERAND (exp, 0); | |
144 | if (TREE_CODE (exp) == FUNCTION_DECL) | |
145 | align = FUNCTION_BOUNDARY; | |
146 | else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd') | |
147 | align = DECL_ALIGN (exp); | |
148 | #ifdef CONSTANT_ALIGNMENT | |
149 | else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c') | |
150 | align = CONSTANT_ALIGNMENT (exp, align); | |
151 | #endif | |
152 | return MIN (align, max_align); | |
153 | ||
154 | default: | |
155 | return align; | |
156 | } | |
157 | } | |
158 | } | |
159 | ||
160 | /* Compute the length of a C string. TREE_STRING_LENGTH is not the right | |
161 | way, because it could contain a zero byte in the middle. | |
162 | TREE_STRING_LENGTH is the size of the character array, not the string. | |
163 | ||
164 | Unfortunately, string_constant can't access the values of const char | |
165 | arrays with initializers, so neither can we do so here. */ | |
166 | ||
167 | static tree | |
168 | c_strlen (src) | |
169 | tree src; | |
170 | { | |
171 | tree offset_node; | |
172 | int offset, max; | |
173 | char *ptr; | |
174 | ||
175 | src = string_constant (src, &offset_node); | |
176 | if (src == 0) | |
177 | return 0; | |
178 | max = TREE_STRING_LENGTH (src); | |
179 | ptr = TREE_STRING_POINTER (src); | |
180 | if (offset_node && TREE_CODE (offset_node) != INTEGER_CST) | |
181 | { | |
182 | /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't | |
183 | compute the offset to the following null if we don't know where to | |
184 | start searching for it. */ | |
185 | int i; | |
186 | for (i = 0; i < max; i++) | |
187 | if (ptr[i] == 0) | |
188 | return 0; | |
189 | /* We don't know the starting offset, but we do know that the string | |
190 | has no internal zero bytes. We can assume that the offset falls | |
191 | within the bounds of the string; otherwise, the programmer deserves | |
192 | what he gets. Subtract the offset from the length of the string, | |
193 | and return that. */ | |
194 | /* This would perhaps not be valid if we were dealing with named | |
195 | arrays in addition to literal string constants. */ | |
196 | return size_binop (MINUS_EXPR, size_int (max), offset_node); | |
197 | } | |
198 | ||
199 | /* We have a known offset into the string. Start searching there for | |
200 | a null character. */ | |
201 | if (offset_node == 0) | |
202 | offset = 0; | |
203 | else | |
204 | { | |
205 | /* Did we get a long long offset? If so, punt. */ | |
206 | if (TREE_INT_CST_HIGH (offset_node) != 0) | |
207 | return 0; | |
208 | offset = TREE_INT_CST_LOW (offset_node); | |
209 | } | |
210 | /* If the offset is known to be out of bounds, warn, and call strlen at | |
211 | runtime. */ | |
212 | if (offset < 0 || offset > max) | |
213 | { | |
214 | warning ("offset outside bounds of constant string"); | |
215 | return 0; | |
216 | } | |
217 | /* Use strlen to search for the first zero byte. Since any strings | |
218 | constructed with build_string will have nulls appended, we win even | |
219 | if we get handed something like (char[4])"abcd". | |
220 | ||
221 | Since OFFSET is our starting index into the string, no further | |
222 | calculation is needed. */ | |
223 | return size_int (strlen (ptr + offset)); | |
224 | } | |
225 | ||
226 | /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT | |
227 | times to get the address of either a higher stack frame, or a return | |
228 | address located within it (depending on FNDECL_CODE). */ | |
229 | rtx | |
230 | expand_builtin_return_addr (fndecl_code, count, tem) | |
231 | enum built_in_function fndecl_code; | |
232 | int count; | |
233 | rtx tem; | |
234 | { | |
235 | int i; | |
236 | ||
237 | /* Some machines need special handling before we can access | |
238 | arbitrary frames. For example, on the sparc, we must first flush | |
239 | all register windows to the stack. */ | |
240 | #ifdef SETUP_FRAME_ADDRESSES | |
241 | if (count > 0) | |
242 | SETUP_FRAME_ADDRESSES (); | |
243 | #endif | |
244 | ||
245 | /* On the sparc, the return address is not in the frame, it is in a | |
246 | register. There is no way to access it off of the current frame | |
247 | pointer, but it can be accessed off the previous frame pointer by | |
248 | reading the value from the register window save area. */ | |
249 | #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME | |
250 | if (fndecl_code == BUILT_IN_RETURN_ADDRESS) | |
251 | count--; | |
252 | #endif | |
253 | ||
254 | /* Scan back COUNT frames to the specified frame. */ | |
255 | for (i = 0; i < count; i++) | |
256 | { | |
257 | /* Assume the dynamic chain pointer is in the word that the | |
258 | frame address points to, unless otherwise specified. */ | |
259 | #ifdef DYNAMIC_CHAIN_ADDRESS | |
260 | tem = DYNAMIC_CHAIN_ADDRESS (tem); | |
261 | #endif | |
262 | tem = memory_address (Pmode, tem); | |
263 | tem = copy_to_reg (gen_rtx_MEM (Pmode, tem)); | |
264 | } | |
265 | ||
266 | /* For __builtin_frame_address, return what we've got. */ | |
267 | if (fndecl_code == BUILT_IN_FRAME_ADDRESS) | |
268 | return tem; | |
269 | ||
270 | /* For __builtin_return_address, Get the return address from that | |
271 | frame. */ | |
272 | #ifdef RETURN_ADDR_RTX | |
273 | tem = RETURN_ADDR_RTX (count, tem); | |
274 | #else | |
275 | tem = memory_address (Pmode, | |
276 | plus_constant (tem, GET_MODE_SIZE (Pmode))); | |
277 | tem = gen_rtx_MEM (Pmode, tem); | |
278 | #endif | |
279 | return tem; | |
280 | } | |
281 | ||
282 | /* __builtin_setjmp is passed a pointer to an array of five words (not | |
283 | all will be used on all machines). It operates similarly to the C | |
284 | library function of the same name, but is more efficient. Much of | |
285 | the code below (and for longjmp) is copied from the handling of | |
286 | non-local gotos. | |
287 | ||
288 | NOTE: This is intended for use by GNAT and the exception handling | |
289 | scheme in the compiler and will only work in the method used by | |
290 | them. */ | |
291 | ||
292 | rtx | |
293 | expand_builtin_setjmp (buf_addr, target, first_label, next_label) | |
294 | rtx buf_addr; | |
295 | rtx target; | |
296 | rtx first_label, next_label; | |
297 | { | |
298 | rtx lab1 = gen_label_rtx (); | |
299 | enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); | |
300 | enum machine_mode value_mode; | |
301 | rtx stack_save; | |
302 | ||
303 | value_mode = TYPE_MODE (integer_type_node); | |
304 | ||
305 | #ifdef POINTERS_EXTEND_UNSIGNED | |
306 | buf_addr = convert_memory_address (Pmode, buf_addr); | |
307 | #endif | |
308 | ||
309 | buf_addr = force_reg (Pmode, buf_addr); | |
310 | ||
311 | if (target == 0 || GET_CODE (target) != REG | |
312 | || REGNO (target) < FIRST_PSEUDO_REGISTER) | |
313 | target = gen_reg_rtx (value_mode); | |
314 | ||
315 | emit_queue (); | |
316 | ||
317 | /* We store the frame pointer and the address of lab1 in the buffer | |
318 | and use the rest of it for the stack save area, which is | |
319 | machine-dependent. */ | |
320 | ||
321 | #ifndef BUILTIN_SETJMP_FRAME_VALUE | |
322 | #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx | |
323 | #endif | |
324 | ||
325 | emit_move_insn (gen_rtx_MEM (Pmode, buf_addr), | |
326 | BUILTIN_SETJMP_FRAME_VALUE); | |
327 | emit_move_insn (validize_mem | |
328 | (gen_rtx_MEM (Pmode, | |
329 | plus_constant (buf_addr, | |
330 | GET_MODE_SIZE (Pmode)))), | |
331 | force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1))); | |
332 | ||
333 | stack_save = gen_rtx_MEM (sa_mode, | |
334 | plus_constant (buf_addr, | |
335 | 2 * GET_MODE_SIZE (Pmode))); | |
336 | emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX); | |
337 | ||
338 | /* If there is further processing to do, do it. */ | |
339 | #ifdef HAVE_builtin_setjmp_setup | |
340 | if (HAVE_builtin_setjmp_setup) | |
341 | emit_insn (gen_builtin_setjmp_setup (buf_addr)); | |
342 | #endif | |
343 | ||
344 | /* Set TARGET to zero and branch to the first-time-through label. */ | |
345 | emit_move_insn (target, const0_rtx); | |
346 | emit_jump_insn (gen_jump (first_label)); | |
347 | emit_barrier (); | |
348 | emit_label (lab1); | |
349 | ||
350 | /* Tell flow about the strange goings on. Putting `lab1' on | |
351 | `nonlocal_goto_handler_labels' to indicates that function | |
352 | calls may traverse the arc back to this label. */ | |
353 | ||
354 | current_function_has_nonlocal_label = 1; | |
355 | nonlocal_goto_handler_labels = | |
356 | gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels); | |
357 | ||
358 | /* Clobber the FP when we get here, so we have to make sure it's | |
359 | marked as used by this function. */ | |
360 | emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
361 | ||
362 | /* Mark the static chain as clobbered here so life information | |
363 | doesn't get messed up for it. */ | |
364 | emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx)); | |
365 | ||
366 | /* Now put in the code to restore the frame pointer, and argument | |
367 | pointer, if needed. The code below is from expand_end_bindings | |
368 | in stmt.c; see detailed documentation there. */ | |
369 | #ifdef HAVE_nonlocal_goto | |
370 | if (! HAVE_nonlocal_goto) | |
371 | #endif | |
372 | emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); | |
373 | ||
374 | #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
375 | if (fixed_regs[ARG_POINTER_REGNUM]) | |
376 | { | |
377 | #ifdef ELIMINABLE_REGS | |
378 | size_t i; | |
379 | static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS; | |
380 | ||
381 | for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++) | |
382 | if (elim_regs[i].from == ARG_POINTER_REGNUM | |
383 | && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) | |
384 | break; | |
385 | ||
386 | if (i == sizeof elim_regs / sizeof elim_regs [0]) | |
387 | #endif | |
388 | { | |
389 | /* Now restore our arg pointer from the address at which it | |
390 | was saved in our stack frame. | |
391 | If there hasn't be space allocated for it yet, make | |
392 | some now. */ | |
393 | if (arg_pointer_save_area == 0) | |
394 | arg_pointer_save_area | |
395 | = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
396 | emit_move_insn (virtual_incoming_args_rtx, | |
397 | copy_to_reg (arg_pointer_save_area)); | |
398 | } | |
399 | } | |
400 | #endif | |
401 | ||
402 | #ifdef HAVE_builtin_setjmp_receiver | |
403 | if (HAVE_builtin_setjmp_receiver) | |
404 | emit_insn (gen_builtin_setjmp_receiver (lab1)); | |
405 | else | |
406 | #endif | |
407 | #ifdef HAVE_nonlocal_goto_receiver | |
408 | if (HAVE_nonlocal_goto_receiver) | |
409 | emit_insn (gen_nonlocal_goto_receiver ()); | |
410 | else | |
411 | #endif | |
412 | { | |
413 | ; /* Nothing */ | |
414 | } | |
415 | ||
416 | /* Set TARGET, and branch to the next-time-through label. */ | |
417 | emit_move_insn (target, const1_rtx); | |
418 | emit_jump_insn (gen_jump (next_label)); | |
419 | emit_barrier (); | |
420 | ||
421 | return target; | |
422 | } | |
423 | ||
424 | /* __builtin_longjmp is passed a pointer to an array of five words (not | |
425 | all will be used on all machines). It operates similarly to the C | |
426 | library function of the same name, but is more efficient. Much of | |
427 | the code below is copied from the handling of non-local gotos. | |
428 | ||
429 | NOTE: This is intended for use by GNAT and the exception handling | |
430 | scheme in the compiler and will only work in the method used by | |
431 | them. */ | |
432 | ||
433 | void | |
434 | expand_builtin_longjmp (buf_addr, value) | |
435 | rtx buf_addr, value; | |
436 | { | |
437 | rtx fp, lab, stack; | |
438 | enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); | |
439 | ||
440 | #ifdef POINTERS_EXTEND_UNSIGNED | |
441 | buf_addr = convert_memory_address (Pmode, buf_addr); | |
442 | #endif | |
443 | buf_addr = force_reg (Pmode, buf_addr); | |
444 | ||
445 | /* We used to store value in static_chain_rtx, but that fails if pointers | |
446 | are smaller than integers. We instead require that the user must pass | |
447 | a second argument of 1, because that is what builtin_setjmp will | |
448 | return. This also makes EH slightly more efficient, since we are no | |
449 | longer copying around a value that we don't care about. */ | |
450 | if (value != const1_rtx) | |
451 | abort (); | |
452 | ||
453 | #ifdef HAVE_builtin_longjmp | |
454 | if (HAVE_builtin_longjmp) | |
455 | emit_insn (gen_builtin_longjmp (buf_addr)); | |
456 | else | |
457 | #endif | |
458 | { | |
459 | fp = gen_rtx_MEM (Pmode, buf_addr); | |
460 | lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr, | |
461 | GET_MODE_SIZE (Pmode))); | |
462 | ||
463 | stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr, | |
464 | 2 * GET_MODE_SIZE (Pmode))); | |
465 | ||
466 | /* Pick up FP, label, and SP from the block and jump. This code is | |
467 | from expand_goto in stmt.c; see there for detailed comments. */ | |
468 | #if HAVE_nonlocal_goto | |
469 | if (HAVE_nonlocal_goto) | |
470 | /* We have to pass a value to the nonlocal_goto pattern that will | |
471 | get copied into the static_chain pointer, but it does not matter | |
472 | what that value is, because builtin_setjmp does not use it. */ | |
473 | emit_insn (gen_nonlocal_goto (value, fp, stack, lab)); | |
474 | else | |
475 | #endif | |
476 | { | |
477 | lab = copy_to_reg (lab); | |
478 | ||
479 | emit_move_insn (hard_frame_pointer_rtx, fp); | |
480 | emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX); | |
481 | ||
482 | emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
483 | emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx)); | |
484 | emit_indirect_jump (lab); | |
485 | } | |
486 | } | |
487 | } | |
488 | ||
489 | /* Get a MEM rtx for expression EXP which can be used in a string instruction | |
490 | (cmpstrsi, movstrsi, ..). */ | |
491 | static rtx | |
492 | get_memory_rtx (exp) | |
493 | tree exp; | |
494 | { | |
495 | rtx mem; | |
496 | int is_aggregate; | |
497 | ||
498 | mem = gen_rtx_MEM (BLKmode, | |
499 | memory_address (BLKmode, | |
500 | expand_expr (exp, NULL_RTX, | |
501 | ptr_mode, EXPAND_SUM))); | |
502 | ||
503 | RTX_UNCHANGING_P (mem) = TREE_READONLY (exp); | |
504 | ||
505 | /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P | |
506 | if the value is the address of a structure or if the expression is | |
507 | cast to a pointer to structure type. */ | |
508 | is_aggregate = 0; | |
509 | ||
510 | while (TREE_CODE (exp) == NOP_EXPR) | |
511 | { | |
512 | tree cast_type = TREE_TYPE (exp); | |
513 | if (TREE_CODE (cast_type) == POINTER_TYPE | |
514 | && AGGREGATE_TYPE_P (TREE_TYPE (cast_type))) | |
515 | { | |
516 | is_aggregate = 1; | |
517 | break; | |
518 | } | |
519 | exp = TREE_OPERAND (exp, 0); | |
520 | } | |
521 | ||
522 | if (is_aggregate == 0) | |
523 | { | |
524 | tree type; | |
525 | ||
526 | if (TREE_CODE (exp) == ADDR_EXPR) | |
527 | /* If this is the address of an object, check whether the | |
528 | object is an array. */ | |
529 | type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
530 | else | |
531 | type = TREE_TYPE (TREE_TYPE (exp)); | |
532 | is_aggregate = AGGREGATE_TYPE_P (type); | |
533 | } | |
534 | ||
535 | MEM_SET_IN_STRUCT_P (mem, is_aggregate); | |
536 | return mem; | |
537 | } | |
538 | \f | |
539 | /* Built-in functions to perform an untyped call and return. */ | |
540 | ||
541 | /* For each register that may be used for calling a function, this | |
542 | gives a mode used to copy the register's value. VOIDmode indicates | |
543 | the register is not used for calling a function. If the machine | |
544 | has register windows, this gives only the outbound registers. | |
545 | INCOMING_REGNO gives the corresponding inbound register. */ | |
546 | static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER]; | |
547 | ||
548 | /* For each register that may be used for returning values, this gives | |
549 | a mode used to copy the register's value. VOIDmode indicates the | |
550 | register is not used for returning values. If the machine has | |
551 | register windows, this gives only the outbound registers. | |
552 | INCOMING_REGNO gives the corresponding inbound register. */ | |
553 | static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER]; | |
554 | ||
555 | /* For each register that may be used for calling a function, this | |
556 | gives the offset of that register into the block returned by | |
557 | __builtin_apply_args. 0 indicates that the register is not | |
558 | used for calling a function. */ | |
559 | static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER]; | |
560 | ||
561 | /* Return the offset of register REGNO into the block returned by | |
562 | __builtin_apply_args. This is not declared static, since it is | |
563 | needed in objc-act.c. */ | |
564 | ||
565 | int | |
566 | apply_args_register_offset (regno) | |
567 | int regno; | |
568 | { | |
569 | apply_args_size (); | |
570 | ||
571 | /* Arguments are always put in outgoing registers (in the argument | |
572 | block) if such make sense. */ | |
573 | #ifdef OUTGOING_REGNO | |
574 | regno = OUTGOING_REGNO(regno); | |
575 | #endif | |
576 | return apply_args_reg_offset[regno]; | |
577 | } | |
578 | ||
579 | /* Return the size required for the block returned by __builtin_apply_args, | |
580 | and initialize apply_args_mode. */ | |
581 | ||
582 | static int | |
583 | apply_args_size () | |
584 | { | |
585 | static int size = -1; | |
586 | int align, regno; | |
587 | enum machine_mode mode; | |
588 | ||
589 | /* The values computed by this function never change. */ | |
590 | if (size < 0) | |
591 | { | |
592 | /* The first value is the incoming arg-pointer. */ | |
593 | size = GET_MODE_SIZE (Pmode); | |
594 | ||
595 | /* The second value is the structure value address unless this is | |
596 | passed as an "invisible" first argument. */ | |
597 | if (struct_value_rtx) | |
598 | size += GET_MODE_SIZE (Pmode); | |
599 | ||
600 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
601 | if (FUNCTION_ARG_REGNO_P (regno)) | |
602 | { | |
603 | /* Search for the proper mode for copying this register's | |
604 | value. I'm not sure this is right, but it works so far. */ | |
605 | enum machine_mode best_mode = VOIDmode; | |
606 | ||
607 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
608 | mode != VOIDmode; | |
609 | mode = GET_MODE_WIDER_MODE (mode)) | |
610 | if (HARD_REGNO_MODE_OK (regno, mode) | |
611 | && HARD_REGNO_NREGS (regno, mode) == 1) | |
612 | best_mode = mode; | |
613 | ||
614 | if (best_mode == VOIDmode) | |
615 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
616 | mode != VOIDmode; | |
617 | mode = GET_MODE_WIDER_MODE (mode)) | |
618 | if (HARD_REGNO_MODE_OK (regno, mode) | |
619 | && (mov_optab->handlers[(int) mode].insn_code | |
620 | != CODE_FOR_nothing)) | |
621 | best_mode = mode; | |
622 | ||
623 | mode = best_mode; | |
624 | if (mode == VOIDmode) | |
625 | abort (); | |
626 | ||
627 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
628 | if (size % align != 0) | |
629 | size = CEIL (size, align) * align; | |
630 | apply_args_reg_offset[regno] = size; | |
631 | size += GET_MODE_SIZE (mode); | |
632 | apply_args_mode[regno] = mode; | |
633 | } | |
634 | else | |
635 | { | |
636 | apply_args_mode[regno] = VOIDmode; | |
637 | apply_args_reg_offset[regno] = 0; | |
638 | } | |
639 | } | |
640 | return size; | |
641 | } | |
642 | ||
643 | /* Return the size required for the block returned by __builtin_apply, | |
644 | and initialize apply_result_mode. */ | |
645 | ||
646 | static int | |
647 | apply_result_size () | |
648 | { | |
649 | static int size = -1; | |
650 | int align, regno; | |
651 | enum machine_mode mode; | |
652 | ||
653 | /* The values computed by this function never change. */ | |
654 | if (size < 0) | |
655 | { | |
656 | size = 0; | |
657 | ||
658 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
659 | if (FUNCTION_VALUE_REGNO_P (regno)) | |
660 | { | |
661 | /* Search for the proper mode for copying this register's | |
662 | value. I'm not sure this is right, but it works so far. */ | |
663 | enum machine_mode best_mode = VOIDmode; | |
664 | ||
665 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
666 | mode != TImode; | |
667 | mode = GET_MODE_WIDER_MODE (mode)) | |
668 | if (HARD_REGNO_MODE_OK (regno, mode)) | |
669 | best_mode = mode; | |
670 | ||
671 | if (best_mode == VOIDmode) | |
672 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
673 | mode != VOIDmode; | |
674 | mode = GET_MODE_WIDER_MODE (mode)) | |
675 | if (HARD_REGNO_MODE_OK (regno, mode) | |
676 | && (mov_optab->handlers[(int) mode].insn_code | |
677 | != CODE_FOR_nothing)) | |
678 | best_mode = mode; | |
679 | ||
680 | mode = best_mode; | |
681 | if (mode == VOIDmode) | |
682 | abort (); | |
683 | ||
684 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
685 | if (size % align != 0) | |
686 | size = CEIL (size, align) * align; | |
687 | size += GET_MODE_SIZE (mode); | |
688 | apply_result_mode[regno] = mode; | |
689 | } | |
690 | else | |
691 | apply_result_mode[regno] = VOIDmode; | |
692 | ||
693 | /* Allow targets that use untyped_call and untyped_return to override | |
694 | the size so that machine-specific information can be stored here. */ | |
695 | #ifdef APPLY_RESULT_SIZE | |
696 | size = APPLY_RESULT_SIZE; | |
697 | #endif | |
698 | } | |
699 | return size; | |
700 | } | |
701 | ||
702 | #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) | |
703 | /* Create a vector describing the result block RESULT. If SAVEP is true, | |
704 | the result block is used to save the values; otherwise it is used to | |
705 | restore the values. */ | |
706 | ||
707 | static rtx | |
708 | result_vector (savep, result) | |
709 | int savep; | |
710 | rtx result; | |
711 | { | |
712 | int regno, size, align, nelts; | |
713 | enum machine_mode mode; | |
714 | rtx reg, mem; | |
715 | rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx)); | |
716 | ||
717 | size = nelts = 0; | |
718 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
719 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
720 | { | |
721 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
722 | if (size % align != 0) | |
723 | size = CEIL (size, align) * align; | |
724 | reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)); | |
725 | mem = change_address (result, mode, | |
726 | plus_constant (XEXP (result, 0), size)); | |
727 | savevec[nelts++] = (savep | |
728 | ? gen_rtx_SET (VOIDmode, mem, reg) | |
729 | : gen_rtx_SET (VOIDmode, reg, mem)); | |
730 | size += GET_MODE_SIZE (mode); | |
731 | } | |
732 | return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec)); | |
733 | } | |
734 | #endif /* HAVE_untyped_call or HAVE_untyped_return */ | |
735 | ||
736 | /* Save the state required to perform an untyped call with the same | |
737 | arguments as were passed to the current function. */ | |
738 | ||
739 | static rtx | |
740 | expand_builtin_apply_args_1 () | |
741 | { | |
742 | rtx registers; | |
743 | int size, align, regno; | |
744 | enum machine_mode mode; | |
745 | ||
746 | /* Create a block where the arg-pointer, structure value address, | |
747 | and argument registers can be saved. */ | |
748 | registers = assign_stack_local (BLKmode, apply_args_size (), -1); | |
749 | ||
750 | /* Walk past the arg-pointer and structure value address. */ | |
751 | size = GET_MODE_SIZE (Pmode); | |
752 | if (struct_value_rtx) | |
753 | size += GET_MODE_SIZE (Pmode); | |
754 | ||
755 | /* Save each register used in calling a function to the block. */ | |
756 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
757 | if ((mode = apply_args_mode[regno]) != VOIDmode) | |
758 | { | |
759 | rtx tem; | |
760 | ||
761 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
762 | if (size % align != 0) | |
763 | size = CEIL (size, align) * align; | |
764 | ||
765 | tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)); | |
766 | ||
767 | #ifdef STACK_REGS | |
768 | /* For reg-stack.c's stack register household. | |
769 | Compare with a similar piece of code in function.c. */ | |
770 | ||
771 | emit_insn (gen_rtx_USE (mode, tem)); | |
772 | #endif | |
773 | ||
774 | emit_move_insn (change_address (registers, mode, | |
775 | plus_constant (XEXP (registers, 0), | |
776 | size)), | |
777 | tem); | |
778 | size += GET_MODE_SIZE (mode); | |
779 | } | |
780 | ||
781 | /* Save the arg pointer to the block. */ | |
782 | emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)), | |
783 | copy_to_reg (virtual_incoming_args_rtx)); | |
784 | size = GET_MODE_SIZE (Pmode); | |
785 | ||
786 | /* Save the structure value address unless this is passed as an | |
787 | "invisible" first argument. */ | |
788 | if (struct_value_incoming_rtx) | |
789 | { | |
790 | emit_move_insn (change_address (registers, Pmode, | |
791 | plus_constant (XEXP (registers, 0), | |
792 | size)), | |
793 | copy_to_reg (struct_value_incoming_rtx)); | |
794 | size += GET_MODE_SIZE (Pmode); | |
795 | } | |
796 | ||
797 | /* Return the address of the block. */ | |
798 | return copy_addr_to_reg (XEXP (registers, 0)); | |
799 | } | |
800 | ||
801 | /* __builtin_apply_args returns block of memory allocated on | |
802 | the stack into which is stored the arg pointer, structure | |
803 | value address, static chain, and all the registers that might | |
804 | possibly be used in performing a function call. The code is | |
805 | moved to the start of the function so the incoming values are | |
806 | saved. */ | |
807 | static rtx | |
808 | expand_builtin_apply_args () | |
809 | { | |
810 | /* Don't do __builtin_apply_args more than once in a function. | |
811 | Save the result of the first call and reuse it. */ | |
812 | if (apply_args_value != 0) | |
813 | return apply_args_value; | |
814 | { | |
815 | /* When this function is called, it means that registers must be | |
816 | saved on entry to this function. So we migrate the | |
817 | call to the first insn of this function. */ | |
818 | rtx temp; | |
819 | rtx seq; | |
820 | ||
821 | start_sequence (); | |
822 | temp = expand_builtin_apply_args_1 (); | |
823 | seq = get_insns (); | |
824 | end_sequence (); | |
825 | ||
826 | apply_args_value = temp; | |
827 | ||
828 | /* Put the sequence after the NOTE that starts the function. | |
829 | If this is inside a SEQUENCE, make the outer-level insn | |
830 | chain current, so the code is placed at the start of the | |
831 | function. */ | |
832 | push_topmost_sequence (); | |
833 | emit_insns_before (seq, NEXT_INSN (get_insns ())); | |
834 | pop_topmost_sequence (); | |
835 | return temp; | |
836 | } | |
837 | } | |
838 | ||
839 | /* Perform an untyped call and save the state required to perform an | |
840 | untyped return of whatever value was returned by the given function. */ | |
841 | ||
842 | static rtx | |
843 | expand_builtin_apply (function, arguments, argsize) | |
844 | rtx function, arguments, argsize; | |
845 | { | |
846 | int size, align, regno; | |
847 | enum machine_mode mode; | |
848 | rtx incoming_args, result, reg, dest, call_insn; | |
849 | rtx old_stack_level = 0; | |
850 | rtx call_fusage = 0; | |
851 | ||
852 | /* Create a block where the return registers can be saved. */ | |
853 | result = assign_stack_local (BLKmode, apply_result_size (), -1); | |
854 | ||
855 | /* ??? The argsize value should be adjusted here. */ | |
856 | ||
857 | /* Fetch the arg pointer from the ARGUMENTS block. */ | |
858 | incoming_args = gen_reg_rtx (Pmode); | |
859 | emit_move_insn (incoming_args, | |
860 | gen_rtx_MEM (Pmode, arguments)); | |
861 | #ifndef STACK_GROWS_DOWNWARD | |
862 | incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize, | |
863 | incoming_args, 0, OPTAB_LIB_WIDEN); | |
864 | #endif | |
865 | ||
866 | /* Perform postincrements before actually calling the function. */ | |
867 | emit_queue (); | |
868 | ||
869 | /* Push a new argument block and copy the arguments. */ | |
870 | do_pending_stack_adjust (); | |
871 | ||
872 | /* Save the stack with nonlocal if available */ | |
873 | #ifdef HAVE_save_stack_nonlocal | |
874 | if (HAVE_save_stack_nonlocal) | |
875 | emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX); | |
876 | else | |
877 | #endif | |
878 | emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); | |
879 | ||
880 | /* Push a block of memory onto the stack to store the memory arguments. | |
881 | Save the address in a register, and copy the memory arguments. ??? I | |
882 | haven't figured out how the calling convention macros effect this, | |
883 | but it's likely that the source and/or destination addresses in | |
884 | the block copy will need updating in machine specific ways. */ | |
885 | dest = allocate_dynamic_stack_space (argsize, 0, 0); | |
886 | emit_block_move (gen_rtx_MEM (BLKmode, dest), | |
887 | gen_rtx_MEM (BLKmode, incoming_args), | |
888 | argsize, | |
889 | PARM_BOUNDARY / BITS_PER_UNIT); | |
890 | ||
891 | /* Refer to the argument block. */ | |
892 | apply_args_size (); | |
893 | arguments = gen_rtx_MEM (BLKmode, arguments); | |
894 | ||
895 | /* Walk past the arg-pointer and structure value address. */ | |
896 | size = GET_MODE_SIZE (Pmode); | |
897 | if (struct_value_rtx) | |
898 | size += GET_MODE_SIZE (Pmode); | |
899 | ||
900 | /* Restore each of the registers previously saved. Make USE insns | |
901 | for each of these registers for use in making the call. */ | |
902 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
903 | if ((mode = apply_args_mode[regno]) != VOIDmode) | |
904 | { | |
905 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
906 | if (size % align != 0) | |
907 | size = CEIL (size, align) * align; | |
908 | reg = gen_rtx_REG (mode, regno); | |
909 | emit_move_insn (reg, | |
910 | change_address (arguments, mode, | |
911 | plus_constant (XEXP (arguments, 0), | |
912 | size))); | |
913 | ||
914 | use_reg (&call_fusage, reg); | |
915 | size += GET_MODE_SIZE (mode); | |
916 | } | |
917 | ||
918 | /* Restore the structure value address unless this is passed as an | |
919 | "invisible" first argument. */ | |
920 | size = GET_MODE_SIZE (Pmode); | |
921 | if (struct_value_rtx) | |
922 | { | |
923 | rtx value = gen_reg_rtx (Pmode); | |
924 | emit_move_insn (value, | |
925 | change_address (arguments, Pmode, | |
926 | plus_constant (XEXP (arguments, 0), | |
927 | size))); | |
928 | emit_move_insn (struct_value_rtx, value); | |
929 | if (GET_CODE (struct_value_rtx) == REG) | |
930 | use_reg (&call_fusage, struct_value_rtx); | |
931 | size += GET_MODE_SIZE (Pmode); | |
932 | } | |
933 | ||
934 | /* All arguments and registers used for the call are set up by now! */ | |
935 | function = prepare_call_address (function, NULL_TREE, &call_fusage, 0); | |
936 | ||
937 | /* Ensure address is valid. SYMBOL_REF is already valid, so no need, | |
938 | and we don't want to load it into a register as an optimization, | |
939 | because prepare_call_address already did it if it should be done. */ | |
940 | if (GET_CODE (function) != SYMBOL_REF) | |
941 | function = memory_address (FUNCTION_MODE, function); | |
942 | ||
943 | /* Generate the actual call instruction and save the return value. */ | |
944 | #ifdef HAVE_untyped_call | |
945 | if (HAVE_untyped_call) | |
946 | emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function), | |
947 | result, result_vector (1, result))); | |
948 | else | |
949 | #endif | |
950 | #ifdef HAVE_call_value | |
951 | if (HAVE_call_value) | |
952 | { | |
953 | rtx valreg = 0; | |
954 | ||
955 | /* Locate the unique return register. It is not possible to | |
956 | express a call that sets more than one return register using | |
957 | call_value; use untyped_call for that. In fact, untyped_call | |
958 | only needs to save the return registers in the given block. */ | |
959 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
960 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
961 | { | |
962 | if (valreg) | |
963 | abort (); /* HAVE_untyped_call required. */ | |
964 | valreg = gen_rtx_REG (mode, regno); | |
965 | } | |
966 | ||
967 | emit_call_insn (gen_call_value (valreg, | |
968 | gen_rtx_MEM (FUNCTION_MODE, function), | |
969 | const0_rtx, NULL_RTX, const0_rtx)); | |
970 | ||
971 | emit_move_insn (change_address (result, GET_MODE (valreg), | |
972 | XEXP (result, 0)), | |
973 | valreg); | |
974 | } | |
975 | else | |
976 | #endif | |
977 | abort (); | |
978 | ||
979 | /* Find the CALL insn we just emitted. */ | |
980 | for (call_insn = get_last_insn (); | |
981 | call_insn && GET_CODE (call_insn) != CALL_INSN; | |
982 | call_insn = PREV_INSN (call_insn)) | |
983 | ; | |
984 | ||
985 | if (! call_insn) | |
986 | abort (); | |
987 | ||
988 | /* Put the register usage information on the CALL. If there is already | |
989 | some usage information, put ours at the end. */ | |
990 | if (CALL_INSN_FUNCTION_USAGE (call_insn)) | |
991 | { | |
992 | rtx link; | |
993 | ||
994 | for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; | |
995 | link = XEXP (link, 1)) | |
996 | ; | |
997 | ||
998 | XEXP (link, 1) = call_fusage; | |
999 | } | |
1000 | else | |
1001 | CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; | |
1002 | ||
1003 | /* Restore the stack. */ | |
1004 | #ifdef HAVE_save_stack_nonlocal | |
1005 | if (HAVE_save_stack_nonlocal) | |
1006 | emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX); | |
1007 | else | |
1008 | #endif | |
1009 | emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); | |
1010 | ||
1011 | /* Return the address of the result block. */ | |
1012 | return copy_addr_to_reg (XEXP (result, 0)); | |
1013 | } | |
1014 | ||
1015 | /* Perform an untyped return. */ | |
1016 | ||
1017 | static void | |
1018 | expand_builtin_return (result) | |
1019 | rtx result; | |
1020 | { | |
1021 | int size, align, regno; | |
1022 | enum machine_mode mode; | |
1023 | rtx reg; | |
1024 | rtx call_fusage = 0; | |
1025 | ||
1026 | apply_result_size (); | |
1027 | result = gen_rtx_MEM (BLKmode, result); | |
1028 | ||
1029 | #ifdef HAVE_untyped_return | |
1030 | if (HAVE_untyped_return) | |
1031 | { | |
1032 | emit_jump_insn (gen_untyped_return (result, result_vector (0, result))); | |
1033 | emit_barrier (); | |
1034 | return; | |
1035 | } | |
1036 | #endif | |
1037 | ||
1038 | /* Restore the return value and note that each value is used. */ | |
1039 | size = 0; | |
1040 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
1041 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
1042 | { | |
1043 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
1044 | if (size % align != 0) | |
1045 | size = CEIL (size, align) * align; | |
1046 | reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)); | |
1047 | emit_move_insn (reg, | |
1048 | change_address (result, mode, | |
1049 | plus_constant (XEXP (result, 0), | |
1050 | size))); | |
1051 | ||
1052 | push_to_sequence (call_fusage); | |
1053 | emit_insn (gen_rtx_USE (VOIDmode, reg)); | |
1054 | call_fusage = get_insns (); | |
1055 | end_sequence (); | |
1056 | size += GET_MODE_SIZE (mode); | |
1057 | } | |
1058 | ||
1059 | /* Put the USE insns before the return. */ | |
1060 | emit_insns (call_fusage); | |
1061 | ||
1062 | /* Return whatever values was restored by jumping directly to the end | |
1063 | of the function. */ | |
1064 | expand_null_return (); | |
1065 | } | |
1066 | ||
1067 | /* Expand a call to __builtin_classify_type with arguments found in | |
1068 | ARGLIST. */ | |
1069 | static rtx | |
1070 | expand_builtin_classify_type (arglist) | |
1071 | tree arglist; | |
1072 | { | |
1073 | if (arglist != 0) | |
1074 | { | |
1075 | tree type = TREE_TYPE (TREE_VALUE (arglist)); | |
1076 | enum tree_code code = TREE_CODE (type); | |
1077 | if (code == VOID_TYPE) | |
1078 | return GEN_INT (void_type_class); | |
1079 | if (code == INTEGER_TYPE) | |
1080 | return GEN_INT (integer_type_class); | |
1081 | if (code == CHAR_TYPE) | |
1082 | return GEN_INT (char_type_class); | |
1083 | if (code == ENUMERAL_TYPE) | |
1084 | return GEN_INT (enumeral_type_class); | |
1085 | if (code == BOOLEAN_TYPE) | |
1086 | return GEN_INT (boolean_type_class); | |
1087 | if (code == POINTER_TYPE) | |
1088 | return GEN_INT (pointer_type_class); | |
1089 | if (code == REFERENCE_TYPE) | |
1090 | return GEN_INT (reference_type_class); | |
1091 | if (code == OFFSET_TYPE) | |
1092 | return GEN_INT (offset_type_class); | |
1093 | if (code == REAL_TYPE) | |
1094 | return GEN_INT (real_type_class); | |
1095 | if (code == COMPLEX_TYPE) | |
1096 | return GEN_INT (complex_type_class); | |
1097 | if (code == FUNCTION_TYPE) | |
1098 | return GEN_INT (function_type_class); | |
1099 | if (code == METHOD_TYPE) | |
1100 | return GEN_INT (method_type_class); | |
1101 | if (code == RECORD_TYPE) | |
1102 | return GEN_INT (record_type_class); | |
1103 | if (code == UNION_TYPE || code == QUAL_UNION_TYPE) | |
1104 | return GEN_INT (union_type_class); | |
1105 | if (code == ARRAY_TYPE) | |
1106 | { | |
1107 | if (TYPE_STRING_FLAG (type)) | |
1108 | return GEN_INT (string_type_class); | |
1109 | else | |
1110 | return GEN_INT (array_type_class); | |
1111 | } | |
1112 | if (code == SET_TYPE) | |
1113 | return GEN_INT (set_type_class); | |
1114 | if (code == FILE_TYPE) | |
1115 | return GEN_INT (file_type_class); | |
1116 | if (code == LANG_TYPE) | |
1117 | return GEN_INT (lang_type_class); | |
1118 | } | |
1119 | return GEN_INT (no_type_class); | |
1120 | } | |
1121 | ||
1122 | /* Expand expression EXP, which is a call to __builtin_constant_p. */ | |
1123 | static rtx | |
1124 | expand_builtin_constant_p (exp) | |
1125 | tree exp; | |
1126 | { | |
1127 | tree arglist = TREE_OPERAND (exp, 1); | |
1128 | enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); | |
1129 | ||
1130 | if (arglist == 0) | |
1131 | return const0_rtx; | |
1132 | else | |
1133 | { | |
1134 | tree arg = TREE_VALUE (arglist); | |
1135 | rtx tmp; | |
1136 | ||
1137 | /* We return 1 for a numeric type that's known to be a constant | |
1138 | value at compile-time or for an aggregate type that's a | |
1139 | literal constant. */ | |
1140 | STRIP_NOPS (arg); | |
1141 | ||
1142 | /* If we know this is a constant, emit the constant of one. */ | |
1143 | if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c' | |
1144 | || (TREE_CODE (arg) == CONSTRUCTOR | |
1145 | && TREE_CONSTANT (arg)) | |
1146 | || (TREE_CODE (arg) == ADDR_EXPR | |
1147 | && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)) | |
1148 | return const1_rtx; | |
1149 | ||
1150 | /* If we aren't going to be running CSE or this expression | |
1151 | has side effects, show we don't know it to be a constant. | |
1152 | Likewise if it's a pointer or aggregate type since in those | |
1153 | case we only want literals, since those are only optimized | |
1154 | when generating RTL, not later. */ | |
1155 | if (TREE_SIDE_EFFECTS (arg) || cse_not_expected | |
1156 | || AGGREGATE_TYPE_P (TREE_TYPE (arg)) | |
1157 | || POINTER_TYPE_P (TREE_TYPE (arg))) | |
1158 | return const0_rtx; | |
1159 | ||
1160 | /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a | |
1161 | chance to see if it can deduce whether ARG is constant. */ | |
1162 | ||
1163 | tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0); | |
1164 | tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp); | |
1165 | return tmp; | |
1166 | } | |
1167 | } | |
1168 | ||
1169 | /* Expand a call to one of the builtin math functions (sin, cos, or sqrt). | |
1170 | Return 0 if a normal call should be emitted rather than expanding the | |
1171 | function in-line. EXP is the expression that is a call to the builtin | |
1172 | function; if convenient, the result should be placed in TARGET. | |
1173 | SUBTARGET may be used as the target for computing one of EXP's operands. */ | |
1174 | static rtx | |
1175 | expand_builtin_mathfn (exp, target, subtarget) | |
1176 | tree exp; | |
1177 | rtx target, subtarget; | |
1178 | { | |
1179 | optab builtin_optab; | |
1180 | rtx op0, insns; | |
1181 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
1182 | tree arglist = TREE_OPERAND (exp, 1); | |
1183 | ||
1184 | if (arglist == 0 | |
1185 | /* Arg could be wrong type if user redeclared this fcn wrong. */ | |
1186 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE) | |
1187 | return 0; | |
1188 | ||
1189 | /* Stabilize and compute the argument. */ | |
1190 | if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL | |
1191 | && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL) | |
1192 | { | |
1193 | exp = copy_node (exp); | |
1194 | arglist = copy_node (arglist); | |
1195 | TREE_OPERAND (exp, 1) = arglist; | |
1196 | TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist)); | |
1197 | } | |
1198 | op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); | |
1199 | ||
1200 | /* Make a suitable register to place result in. */ | |
1201 | target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); | |
1202 | ||
1203 | emit_queue (); | |
1204 | start_sequence (); | |
1205 | ||
1206 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1207 | { | |
1208 | case BUILT_IN_SIN: | |
1209 | builtin_optab = sin_optab; break; | |
1210 | case BUILT_IN_COS: | |
1211 | builtin_optab = cos_optab; break; | |
1212 | case BUILT_IN_FSQRT: | |
1213 | builtin_optab = sqrt_optab; break; | |
1214 | default: | |
1215 | abort (); | |
1216 | } | |
1217 | ||
1218 | /* Compute into TARGET. | |
1219 | Set TARGET to wherever the result comes back. */ | |
1220 | target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), | |
1221 | builtin_optab, op0, target, 0); | |
1222 | ||
1223 | /* If we were unable to expand via the builtin, stop the | |
1224 | sequence (without outputting the insns) and return 0, causing | |
1225 | a call to the library function. */ | |
1226 | if (target == 0) | |
1227 | { | |
1228 | end_sequence (); | |
1229 | return 0; | |
1230 | } | |
1231 | ||
1232 | /* Check the results by default. But if flag_fast_math is turned on, | |
1233 | then assume sqrt will always be called with valid arguments. */ | |
1234 | ||
1235 | if (flag_errno_math && ! flag_fast_math) | |
1236 | { | |
1237 | rtx lab1; | |
1238 | ||
1239 | /* Don't define the builtin FP instructions | |
1240 | if your machine is not IEEE. */ | |
1241 | if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT) | |
1242 | abort (); | |
1243 | ||
1244 | lab1 = gen_label_rtx (); | |
1245 | ||
1246 | /* Test the result; if it is NaN, set errno=EDOM because | |
1247 | the argument was not in the domain. */ | |
1248 | emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target), | |
1249 | 0, 0, lab1); | |
1250 | ||
1251 | #ifdef TARGET_EDOM | |
1252 | { | |
1253 | #ifdef GEN_ERRNO_RTX | |
1254 | rtx errno_rtx = GEN_ERRNO_RTX; | |
1255 | #else | |
1256 | rtx errno_rtx | |
1257 | = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno")); | |
1258 | #endif | |
1259 | ||
1260 | emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM)); | |
1261 | } | |
1262 | #else | |
1263 | /* We can't set errno=EDOM directly; let the library call do it. | |
1264 | Pop the arguments right away in case the call gets deleted. */ | |
1265 | NO_DEFER_POP; | |
1266 | expand_call (exp, target, 0); | |
1267 | OK_DEFER_POP; | |
1268 | #endif | |
1269 | ||
1270 | emit_label (lab1); | |
1271 | } | |
1272 | ||
1273 | /* Output the entire sequence. */ | |
1274 | insns = get_insns (); | |
1275 | end_sequence (); | |
1276 | emit_insns (insns); | |
1277 | ||
1278 | return target; | |
1279 | } | |
1280 | ||
1281 | /* Expand expression EXP which is a call to the strlen builtin. Return 0 | |
1282 | if we failed the caller should emit a normal call, otherwise | |
1283 | try to get the result in TARGET, if convenient (and in mode MODE if that's | |
1284 | convenient). */ | |
1285 | static rtx | |
1286 | expand_builtin_strlen (exp, target, mode) | |
1287 | tree exp; | |
1288 | rtx target; | |
1289 | enum machine_mode mode; | |
1290 | { | |
1291 | tree arglist = TREE_OPERAND (exp, 1); | |
1292 | enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); | |
1293 | ||
1294 | if (arglist == 0 | |
1295 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1296 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
1297 | return 0; | |
1298 | else | |
1299 | { | |
1300 | tree src = TREE_VALUE (arglist); | |
1301 | tree len = c_strlen (src); | |
1302 | ||
1303 | int align | |
1304 | = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1305 | ||
1306 | rtx result, src_rtx, char_rtx; | |
1307 | enum machine_mode insn_mode = value_mode, char_mode; | |
1308 | enum insn_code icode; | |
1309 | ||
1310 | /* If the length is known, just return it. */ | |
1311 | if (len != 0) | |
1312 | return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD); | |
1313 | ||
1314 | /* If SRC is not a pointer type, don't do this operation inline. */ | |
1315 | if (align == 0) | |
1316 | return 0; | |
1317 | ||
1318 | /* Call a function if we can't compute strlen in the right mode. */ | |
1319 | ||
1320 | while (insn_mode != VOIDmode) | |
1321 | { | |
1322 | icode = strlen_optab->handlers[(int) insn_mode].insn_code; | |
1323 | if (icode != CODE_FOR_nothing) | |
1324 | return 0; | |
1325 | ||
1326 | insn_mode = GET_MODE_WIDER_MODE (insn_mode); | |
1327 | } | |
1328 | if (insn_mode == VOIDmode) | |
1329 | return 0; | |
1330 | ||
1331 | /* Make a place to write the result of the instruction. */ | |
1332 | result = target; | |
1333 | if (! (result != 0 | |
1334 | && GET_CODE (result) == REG | |
1335 | && GET_MODE (result) == insn_mode | |
1336 | && REGNO (result) >= FIRST_PSEUDO_REGISTER)) | |
1337 | result = gen_reg_rtx (insn_mode); | |
1338 | ||
1339 | /* Make sure the operands are acceptable to the predicates. */ | |
1340 | ||
1341 | if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode)) | |
1342 | result = gen_reg_rtx (insn_mode); | |
1343 | src_rtx = memory_address (BLKmode, | |
1344 | expand_expr (src, NULL_RTX, ptr_mode, | |
1345 | EXPAND_NORMAL)); | |
1346 | ||
1347 | if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode)) | |
1348 | src_rtx = copy_to_mode_reg (Pmode, src_rtx); | |
1349 | ||
1350 | /* Check the string is readable and has an end. */ | |
1351 | if (current_function_check_memory_usage) | |
1352 | emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2, | |
1353 | src_rtx, Pmode, | |
1354 | GEN_INT (MEMORY_USE_RO), | |
1355 | TYPE_MODE (integer_type_node)); | |
1356 | ||
1357 | char_rtx = const0_rtx; | |
1358 | char_mode = insn_operand_mode[(int)icode][2]; | |
1359 | if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode)) | |
1360 | char_rtx = copy_to_mode_reg (char_mode, char_rtx); | |
1361 | ||
1362 | emit_insn (GEN_FCN (icode) (result, | |
1363 | gen_rtx_MEM (BLKmode, src_rtx), | |
1364 | char_rtx, GEN_INT (align))); | |
1365 | ||
1366 | /* Return the value in the proper mode for this function. */ | |
1367 | if (GET_MODE (result) == value_mode) | |
1368 | return result; | |
1369 | else if (target != 0) | |
1370 | { | |
1371 | convert_move (target, result, 0); | |
1372 | return target; | |
1373 | } | |
1374 | else | |
1375 | return convert_to_mode (value_mode, result, 0); | |
1376 | } | |
1377 | } | |
1378 | ||
1379 | /* Expand a call to the memcpy builtin, with arguments in ARGLIST. */ | |
1380 | static rtx | |
1381 | expand_builtin_memcpy (arglist) | |
1382 | tree arglist; | |
1383 | { | |
1384 | if (arglist == 0 | |
1385 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1386 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1387 | || TREE_CHAIN (arglist) == 0 | |
1388 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) | |
1389 | != POINTER_TYPE) | |
1390 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1391 | || (TREE_CODE (TREE_TYPE (TREE_VALUE | |
1392 | (TREE_CHAIN (TREE_CHAIN (arglist))))) | |
1393 | != INTEGER_TYPE)) | |
1394 | return 0; | |
1395 | else | |
1396 | { | |
1397 | tree dest = TREE_VALUE (arglist); | |
1398 | tree src = TREE_VALUE (TREE_CHAIN (arglist)); | |
1399 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1400 | ||
1401 | int src_align | |
1402 | = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1403 | int dest_align | |
1404 | = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1405 | rtx dest_mem, src_mem, dest_addr, len_rtx; | |
1406 | ||
1407 | /* If either SRC or DEST is not a pointer type, don't do | |
1408 | this operation in-line. */ | |
1409 | if (src_align == 0 || dest_align == 0) | |
1410 | return 0; | |
1411 | ||
1412 | dest_mem = get_memory_rtx (dest); | |
1413 | src_mem = get_memory_rtx (src); | |
1414 | len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); | |
1415 | ||
1416 | /* Just copy the rights of SRC to the rights of DEST. */ | |
1417 | if (current_function_check_memory_usage) | |
1418 | emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, | |
1419 | XEXP (dest_mem, 0), Pmode, | |
1420 | XEXP (src_mem, 0), Pmode, | |
1421 | len_rtx, TYPE_MODE (sizetype)); | |
1422 | ||
1423 | /* Copy word part most expediently. */ | |
1424 | dest_addr | |
1425 | = emit_block_move (dest_mem, src_mem, len_rtx, | |
1426 | MIN (src_align, dest_align)); | |
1427 | ||
1428 | if (dest_addr == 0) | |
1429 | dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); | |
1430 | ||
1431 | return dest_addr; | |
1432 | } | |
1433 | } | |
1434 | ||
1435 | /* Expand expression EXP, which is a call to the strcpy builtin. Return 0 | |
1436 | if we failed the caller should emit a normal call. */ | |
1437 | static rtx | |
1438 | expand_builtin_strcpy (exp) | |
1439 | tree exp; | |
1440 | { | |
1441 | tree arglist = TREE_OPERAND (exp, 1); | |
1442 | rtx result; | |
1443 | ||
1444 | if (arglist == 0 | |
1445 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1446 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1447 | || TREE_CHAIN (arglist) == 0 | |
1448 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE) | |
1449 | return 0; | |
1450 | else | |
1451 | { | |
1452 | tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist))); | |
1453 | ||
1454 | if (len == 0) | |
1455 | return 0; | |
1456 | ||
1457 | len = size_binop (PLUS_EXPR, len, integer_one_node); | |
1458 | ||
1459 | chainon (arglist, build_tree_list (NULL_TREE, len)); | |
1460 | } | |
1461 | result = expand_builtin_memcpy (arglist); | |
1462 | if (! result) | |
1463 | TREE_CHAIN (TREE_CHAIN (arglist)) = 0; | |
1464 | return result; | |
1465 | } | |
1466 | ||
1467 | /* Expand expression EXP, which is a call to the memset builtin. Return 0 | |
1468 | if we failed the caller should emit a normal call. */ | |
1469 | static rtx | |
1470 | expand_builtin_memset (exp) | |
1471 | tree exp; | |
1472 | { | |
1473 | tree arglist = TREE_OPERAND (exp, 1); | |
1474 | ||
1475 | if (arglist == 0 | |
1476 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1477 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1478 | || TREE_CHAIN (arglist) == 0 | |
1479 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) | |
1480 | != INTEGER_TYPE) | |
1481 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1482 | || (INTEGER_TYPE | |
1483 | != (TREE_CODE (TREE_TYPE | |
1484 | (TREE_VALUE | |
1485 | (TREE_CHAIN (TREE_CHAIN (arglist)))))))) | |
1486 | return 0; | |
1487 | else | |
1488 | { | |
1489 | tree dest = TREE_VALUE (arglist); | |
1490 | tree val = TREE_VALUE (TREE_CHAIN (arglist)); | |
1491 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1492 | ||
1493 | int dest_align | |
1494 | = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1495 | rtx dest_mem, dest_addr, len_rtx; | |
1496 | ||
1497 | /* If DEST is not a pointer type, don't do this | |
1498 | operation in-line. */ | |
1499 | if (dest_align == 0) | |
1500 | return 0; | |
1501 | ||
1502 | /* If the arguments have side-effects, then we can only evaluate | |
1503 | them at most once. The following code evaluates them twice if | |
1504 | they are not constants because we break out to expand_call | |
1505 | in that case. They can't be constants if they have side-effects | |
1506 | so we can check for that first. Alternatively, we could call | |
1507 | save_expr to make multiple evaluation safe. */ | |
1508 | if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len)) | |
1509 | return 0; | |
1510 | ||
1511 | /* If VAL is not 0, don't do this operation in-line. */ | |
1512 | if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx) | |
1513 | return 0; | |
1514 | ||
1515 | /* If LEN does not expand to a constant, don't do this | |
1516 | operation in-line. */ | |
1517 | len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); | |
1518 | if (GET_CODE (len_rtx) != CONST_INT) | |
1519 | return 0; | |
1520 | ||
1521 | dest_mem = get_memory_rtx (dest); | |
1522 | ||
1523 | /* Just check DST is writable and mark it as readable. */ | |
1524 | if (current_function_check_memory_usage) | |
1525 | emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, | |
1526 | XEXP (dest_mem, 0), Pmode, | |
1527 | len_rtx, TYPE_MODE (sizetype), | |
1528 | GEN_INT (MEMORY_USE_WO), | |
1529 | TYPE_MODE (integer_type_node)); | |
1530 | ||
1531 | ||
1532 | dest_addr = clear_storage (dest_mem, len_rtx, dest_align); | |
1533 | ||
1534 | if (dest_addr == 0) | |
1535 | dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); | |
1536 | ||
1537 | return dest_addr; | |
1538 | } | |
1539 | } | |
1540 | ||
1541 | #ifdef HAVE_cmpstrsi | |
1542 | /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin. | |
1543 | ARGLIST is the argument list for this call. Return 0 if we failed and the | |
1544 | caller should emit a normal call, otherwise try to get the result in | |
1545 | TARGET, if convenient. */ | |
1546 | static rtx | |
1547 | expand_builtin_memcmp (exp, arglist, target) | |
1548 | tree exp; | |
1549 | tree arglist; | |
1550 | rtx target; | |
1551 | { | |
1552 | /* If we need to check memory accesses, call the library function. */ | |
1553 | if (current_function_check_memory_usage) | |
1554 | return 0; | |
1555 | ||
1556 | if (arglist == 0 | |
1557 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1558 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1559 | || TREE_CHAIN (arglist) == 0 | |
1560 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE | |
1561 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1562 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) | |
1563 | return 0; | |
1564 | else if (!HAVE_cmpstrsi) | |
1565 | return 0; | |
1566 | ||
1567 | { | |
1568 | enum machine_mode mode; | |
1569 | tree arg1 = TREE_VALUE (arglist); | |
1570 | tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); | |
1571 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1572 | rtx result; | |
1573 | ||
1574 | int arg1_align | |
1575 | = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1576 | int arg2_align | |
1577 | = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1578 | enum machine_mode insn_mode | |
1579 | = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0]; | |
1580 | ||
1581 | /* If we don't have POINTER_TYPE, call the function. */ | |
1582 | if (arg1_align == 0 || arg2_align == 0) | |
1583 | return 0; | |
1584 | ||
1585 | /* Make a place to write the result of the instruction. */ | |
1586 | result = target; | |
1587 | if (! (result != 0 | |
1588 | && GET_CODE (result) == REG && GET_MODE (result) == insn_mode | |
1589 | && REGNO (result) >= FIRST_PSEUDO_REGISTER)) | |
1590 | result = gen_reg_rtx (insn_mode); | |
1591 | ||
1592 | emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1), | |
1593 | get_memory_rtx (arg2), | |
1594 | expand_expr (len, NULL_RTX, VOIDmode, 0), | |
1595 | GEN_INT (MIN (arg1_align, arg2_align)))); | |
1596 | ||
1597 | /* Return the value in the proper mode for this function. */ | |
1598 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
1599 | if (GET_MODE (result) == mode) | |
1600 | return result; | |
1601 | else if (target != 0) | |
1602 | { | |
1603 | convert_move (target, result, 0); | |
1604 | return target; | |
1605 | } | |
1606 | else | |
1607 | return convert_to_mode (mode, result, 0); | |
1608 | } | |
1609 | } | |
1610 | ||
1611 | /* Expand expression EXP, which is a call to the strcmp builtin. Return 0 | |
1612 | if we failed the caller should emit a normal call, otherwise try to get | |
1613 | the result in TARGET, if convenient. */ | |
1614 | static rtx | |
1615 | expand_builtin_strcmp (exp, target) | |
1616 | tree exp; | |
1617 | rtx target; | |
1618 | { | |
1619 | tree arglist = TREE_OPERAND (exp, 1); | |
1620 | ||
1621 | /* If we need to check memory accesses, call the library function. */ | |
1622 | if (current_function_check_memory_usage) | |
1623 | return 0; | |
1624 | ||
1625 | if (arglist == 0 | |
1626 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1627 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1628 | || TREE_CHAIN (arglist) == 0 | |
1629 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE) | |
1630 | return 0; | |
1631 | else if (!HAVE_cmpstrsi) | |
1632 | return 0; | |
1633 | { | |
1634 | tree arg1 = TREE_VALUE (arglist); | |
1635 | tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); | |
1636 | tree len, len2; | |
1637 | rtx result; | |
1638 | len = c_strlen (arg1); | |
1639 | if (len) | |
1640 | len = size_binop (PLUS_EXPR, integer_one_node, len); | |
1641 | len2 = c_strlen (arg2); | |
1642 | if (len2) | |
1643 | len2 = size_binop (PLUS_EXPR, integer_one_node, len2); | |
1644 | ||
1645 | /* If we don't have a constant length for the first, use the length | |
1646 | of the second, if we know it. We don't require a constant for | |
1647 | this case; some cost analysis could be done if both are available | |
1648 | but neither is constant. For now, assume they're equally cheap. | |
1649 | ||
1650 | If both strings have constant lengths, use the smaller. This | |
1651 | could arise if optimization results in strcpy being called with | |
1652 | two fixed strings, or if the code was machine-generated. We should | |
1653 | add some code to the `memcmp' handler below to deal with such | |
1654 | situations, someday. */ | |
1655 | if (!len || TREE_CODE (len) != INTEGER_CST) | |
1656 | { | |
1657 | if (len2) | |
1658 | len = len2; | |
1659 | else if (len == 0) | |
1660 | return 0; | |
1661 | } | |
1662 | else if (len2 && TREE_CODE (len2) == INTEGER_CST) | |
1663 | { | |
1664 | if (tree_int_cst_lt (len2, len)) | |
1665 | len = len2; | |
1666 | } | |
1667 | ||
1668 | chainon (arglist, build_tree_list (NULL_TREE, len)); | |
1669 | result = expand_builtin_memcmp (exp, arglist, target); | |
1670 | if (! result) | |
1671 | TREE_CHAIN (TREE_CHAIN (arglist)) = 0; | |
1672 | return result; | |
1673 | } | |
1674 | } | |
1675 | #endif | |
1676 | ||
a66c9326 | 1677 | /* Expand a call to __builtin_saveregs, generating the result in TARGET, |
1678 | if that's convenient. */ | |
1679 | rtx | |
1680 | expand_builtin_saveregs () | |
53800dbe | 1681 | { |
a66c9326 | 1682 | rtx val, seq; |
53800dbe | 1683 | |
1684 | /* Don't do __builtin_saveregs more than once in a function. | |
1685 | Save the result of the first call and reuse it. */ | |
1686 | if (saveregs_value != 0) | |
1687 | return saveregs_value; | |
53800dbe | 1688 | |
a66c9326 | 1689 | /* When this function is called, it means that registers must be |
1690 | saved on entry to this function. So we migrate the call to the | |
1691 | first insn of this function. */ | |
1692 | ||
1693 | start_sequence (); | |
53800dbe | 1694 | |
1695 | #ifdef EXPAND_BUILTIN_SAVEREGS | |
a66c9326 | 1696 | /* Do whatever the machine needs done in this case. */ |
1697 | val = EXPAND_BUILTIN_SAVEREGS (); | |
53800dbe | 1698 | #else |
a66c9326 | 1699 | /* ??? We used to try and build up a call to the out of line function, |
1700 | guessing about what registers needed saving etc. This became much | |
1701 | harder with __builtin_va_start, since we don't have a tree for a | |
1702 | call to __builtin_saveregs to fall back on. There was exactly one | |
1703 | port (i860) that used this code, and I'm unconvinced it could actually | |
1704 | handle the general case. So we no longer try to handle anything | |
1705 | weird and make the backend absorb the evil. */ | |
1706 | ||
1707 | error ("__builtin_saveregs not supported by this target"); | |
1708 | val = const0_rtx; | |
53800dbe | 1709 | #endif |
1710 | ||
a66c9326 | 1711 | seq = get_insns (); |
1712 | end_sequence (); | |
53800dbe | 1713 | |
a66c9326 | 1714 | saveregs_value = val; |
53800dbe | 1715 | |
a66c9326 | 1716 | /* Put the sequence after the NOTE that starts the function. If this |
1717 | is inside a SEQUENCE, make the outer-level insn chain current, so | |
1718 | the code is placed at the start of the function. */ | |
1719 | push_topmost_sequence (); | |
1720 | emit_insns_after (seq, get_insns ()); | |
1721 | pop_topmost_sequence (); | |
1722 | ||
1723 | return val; | |
53800dbe | 1724 | } |
1725 | ||
1726 | /* __builtin_args_info (N) returns word N of the arg space info | |
1727 | for the current function. The number and meanings of words | |
1728 | is controlled by the definition of CUMULATIVE_ARGS. */ | |
1729 | static rtx | |
1730 | expand_builtin_args_info (exp) | |
1731 | tree exp; | |
1732 | { | |
1733 | tree arglist = TREE_OPERAND (exp, 1); | |
1734 | int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int); | |
1735 | int *word_ptr = (int *) ¤t_function_args_info; | |
1736 | #if 0 | |
1737 | /* These are used by the code below that is if 0'ed away */ | |
1738 | int i; | |
1739 | tree type, elts, result; | |
1740 | #endif | |
1741 | ||
1742 | if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0) | |
1743 | fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d", | |
1744 | __FILE__, __LINE__); | |
1745 | ||
1746 | if (arglist != 0) | |
1747 | { | |
1748 | tree arg = TREE_VALUE (arglist); | |
1749 | if (TREE_CODE (arg) != INTEGER_CST) | |
1750 | error ("argument of `__builtin_args_info' must be constant"); | |
1751 | else | |
1752 | { | |
1753 | int wordnum = TREE_INT_CST_LOW (arg); | |
1754 | ||
1755 | if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg)) | |
1756 | error ("argument of `__builtin_args_info' out of range"); | |
1757 | else | |
1758 | return GEN_INT (word_ptr[wordnum]); | |
1759 | } | |
1760 | } | |
1761 | else | |
1762 | error ("missing argument in `__builtin_args_info'"); | |
1763 | ||
1764 | return const0_rtx; | |
1765 | ||
1766 | #if 0 | |
1767 | for (i = 0; i < nwords; i++) | |
1768 | elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0)); | |
1769 | ||
1770 | type = build_array_type (integer_type_node, | |
1771 | build_index_type (build_int_2 (nwords, 0))); | |
1772 | result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts)); | |
1773 | TREE_CONSTANT (result) = 1; | |
1774 | TREE_STATIC (result) = 1; | |
a66c9326 | 1775 | result = build1 (INDIRECT_REF, build_pointer_type (type), result); |
53800dbe | 1776 | TREE_CONSTANT (result) = 1; |
1777 | return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD); | |
1778 | #endif | |
1779 | } | |
1780 | ||
a66c9326 | 1781 | /* Expand ARGLIST, from a call to __builtin_next_arg. */ |
53800dbe | 1782 | static rtx |
a66c9326 | 1783 | expand_builtin_next_arg (arglist) |
1784 | tree arglist; | |
53800dbe | 1785 | { |
53800dbe | 1786 | tree fntype = TREE_TYPE (current_function_decl); |
1787 | ||
1788 | if ((TYPE_ARG_TYPES (fntype) == 0 | |
1789 | || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) | |
1790 | == void_type_node)) | |
1791 | && ! current_function_varargs) | |
1792 | { | |
1793 | error ("`va_start' used in function with fixed args"); | |
1794 | return const0_rtx; | |
1795 | } | |
1796 | ||
1797 | if (arglist) | |
1798 | { | |
1799 | tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl)); | |
1800 | tree arg = TREE_VALUE (arglist); | |
1801 | ||
1802 | /* Strip off all nops for the sake of the comparison. This | |
1803 | is not quite the same as STRIP_NOPS. It does more. | |
1804 | We must also strip off INDIRECT_EXPR for C++ reference | |
1805 | parameters. */ | |
1806 | while (TREE_CODE (arg) == NOP_EXPR | |
1807 | || TREE_CODE (arg) == CONVERT_EXPR | |
1808 | || TREE_CODE (arg) == NON_LVALUE_EXPR | |
1809 | || TREE_CODE (arg) == INDIRECT_REF) | |
1810 | arg = TREE_OPERAND (arg, 0); | |
1811 | if (arg != last_parm) | |
1812 | warning ("second parameter of `va_start' not last named argument"); | |
1813 | } | |
1814 | else if (! current_function_varargs) | |
1815 | /* Evidently an out of date version of <stdarg.h>; can't validate | |
1816 | va_start's second argument, but can still work as intended. */ | |
1817 | warning ("`__builtin_next_arg' called without an argument"); | |
1818 | ||
1819 | return expand_binop (Pmode, add_optab, | |
1820 | current_function_internal_arg_pointer, | |
1821 | current_function_arg_offset_rtx, | |
1822 | NULL_RTX, 0, OPTAB_LIB_WIDEN); | |
1823 | } | |
1824 | ||
a66c9326 | 1825 | /* Make it easier for the backends by protecting the valist argument |
1826 | from multiple evaluations. */ | |
1827 | ||
1828 | static tree | |
1829 | stabilize_va_list (valist, was_ptr) | |
1830 | tree valist; | |
1831 | int was_ptr; | |
1832 | { | |
1833 | int is_array = TREE_CODE (va_list_type_node) == ARRAY_TYPE; | |
1834 | ||
1835 | if (was_ptr) | |
1836 | { | |
1837 | /* If stdarg.h took the address of an array-type valist that was passed | |
1838 | as a parameter, we'll have taken the address of the parameter itself | |
1839 | rather than the array as we'd intended. Undo this mistake. */ | |
1840 | if (is_array | |
1841 | && TREE_CODE (valist) == ADDR_EXPR | |
1842 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (valist, 0))) == POINTER_TYPE) | |
8a15c04a | 1843 | { |
1844 | valist = TREE_OPERAND (valist, 0); | |
1845 | if (TREE_SIDE_EFFECTS (valist)) | |
1846 | valist = save_expr (valist); | |
1847 | } | |
1848 | else | |
1849 | { | |
1850 | if (TREE_SIDE_EFFECTS (valist)) | |
1851 | valist = save_expr (valist); | |
1852 | valist = fold (build1 (INDIRECT_REF, va_list_type_node, valist)); | |
1853 | } | |
a66c9326 | 1854 | } |
1855 | else if (TREE_SIDE_EFFECTS (valist)) | |
1856 | { | |
1857 | if (is_array) | |
1858 | valist = save_expr (valist); | |
1859 | else | |
1860 | { | |
1861 | valist = build1 (ADDR_EXPR, build_pointer_type (va_list_type_node), | |
1862 | valist); | |
1863 | TREE_SIDE_EFFECTS (valist) = 1; | |
1864 | valist = save_expr (valist); | |
1865 | valist = fold (build1 (INDIRECT_REF, va_list_type_node, valist)); | |
1866 | } | |
1867 | } | |
1868 | ||
1869 | return valist; | |
1870 | } | |
1871 | ||
1872 | /* The "standard" implementation of va_start: just assign `nextarg' to | |
1873 | the variable. */ | |
1874 | void | |
1875 | std_expand_builtin_va_start (stdarg_p, valist, nextarg) | |
1876 | int stdarg_p ATTRIBUTE_UNUSED; | |
1877 | tree valist; | |
1878 | rtx nextarg; | |
1879 | { | |
1880 | tree t; | |
1881 | ||
8a15c04a | 1882 | if (!stdarg_p) |
1883 | nextarg = plus_constant (nextarg, -UNITS_PER_WORD); | |
1884 | ||
a66c9326 | 1885 | t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, |
1886 | make_tree (ptr_type_node, nextarg)); | |
1887 | TREE_SIDE_EFFECTS (t) = 1; | |
1888 | ||
1889 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1890 | } | |
1891 | ||
1892 | /* Expand ARGLIST, which from a call to __builtin_stdarg_va_start or | |
1893 | __builtin_varargs_va_start, depending on STDARG_P. */ | |
1894 | static rtx | |
1895 | expand_builtin_va_start (stdarg_p, arglist) | |
1896 | int stdarg_p; | |
1897 | tree arglist; | |
1898 | { | |
1899 | rtx nextarg; | |
1900 | tree chain = arglist, valist; | |
1901 | ||
1902 | if (stdarg_p) | |
1903 | nextarg = expand_builtin_next_arg (chain = TREE_CHAIN (arglist)); | |
1904 | else | |
1905 | nextarg = expand_builtin_next_arg (NULL_TREE); | |
1906 | ||
1907 | if (TREE_CHAIN (chain)) | |
1908 | error ("too many arguments to function `va_start'"); | |
1909 | ||
1910 | valist = stabilize_va_list (TREE_VALUE (arglist), 1); | |
1911 | ||
1912 | #ifdef EXPAND_BUILTIN_VA_START | |
1913 | EXPAND_BUILTIN_VA_START (stdarg_p, valist, nextarg); | |
1914 | #else | |
1915 | std_expand_builtin_va_start (stdarg_p, valist, nextarg); | |
1916 | #endif | |
1917 | ||
1918 | return const0_rtx; | |
1919 | } | |
1920 | ||
1921 | /* Allocate an alias set for use in storing and reading from the varargs | |
1922 | spill area. */ | |
1923 | int | |
1924 | get_varargs_alias_set () | |
1925 | { | |
1926 | static int set = -1; | |
1927 | if (set == -1) | |
1928 | set = new_alias_set (); | |
1929 | return set; | |
1930 | } | |
1931 | ||
1932 | /* The "standard" implementation of va_arg: read the value from the | |
1933 | current (padded) address and increment by the (padded) size. */ | |
1934 | rtx | |
1935 | std_expand_builtin_va_arg (valist, type) | |
1936 | tree valist, type; | |
1937 | { | |
1938 | tree addr_tree, t; | |
1939 | HOST_WIDE_INT align; | |
1940 | HOST_WIDE_INT rounded_size; | |
1941 | rtx addr; | |
1942 | ||
1943 | /* Compute the rounded size of the type. */ | |
1944 | align = PARM_BOUNDARY / BITS_PER_UNIT; | |
7102dbcc | 1945 | rounded_size = (((int_size_in_bytes (type) + align - 1) / align) * align); |
a66c9326 | 1946 | |
1947 | /* Get AP. */ | |
1948 | addr_tree = valist; | |
1949 | if (BYTES_BIG_ENDIAN) | |
1950 | { | |
1951 | /* Small args are padded downward. */ | |
1952 | ||
1953 | HOST_WIDE_INT adj; | |
1954 | adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT; | |
1955 | if (rounded_size > align) | |
1956 | adj = rounded_size; | |
1957 | ||
1958 | addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree, | |
1959 | build_int_2 (rounded_size - adj, 0)); | |
1960 | } | |
1961 | ||
1962 | addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL); | |
1963 | addr = copy_to_reg (addr); | |
1964 | ||
1965 | /* Compute new value for AP. */ | |
1966 | t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, | |
1967 | build (PLUS_EXPR, TREE_TYPE (valist), valist, | |
1968 | build_int_2 (rounded_size, 0))); | |
1969 | TREE_SIDE_EFFECTS (t) = 1; | |
1970 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1971 | ||
1972 | return addr; | |
1973 | } | |
1974 | ||
1975 | /* Expand __builtin_va_arg, which is not really a builtin function, but | |
1976 | a very special sort of operator. */ | |
1977 | rtx | |
1978 | expand_builtin_va_arg (valist, type) | |
1979 | tree valist, type; | |
1980 | { | |
1981 | rtx addr, result; | |
1982 | ||
1983 | if (TYPE_MAIN_VARIANT (TREE_TYPE (valist)) | |
1984 | != TYPE_MAIN_VARIANT (va_list_type_node)) | |
1985 | { | |
1986 | error ("first argument to `__builtin_va_arg' not of type `va_list'"); | |
1987 | addr = const0_rtx; | |
1988 | } | |
1989 | else | |
1990 | { | |
1991 | /* Make it easier for the backends by protecting the valist argument | |
1992 | from multiple evaluations. */ | |
1993 | valist = stabilize_va_list (valist, 0); | |
1994 | ||
1995 | #ifdef EXPAND_BUILTIN_VA_ARG | |
1996 | addr = EXPAND_BUILTIN_VA_ARG (valist, type); | |
1997 | #else | |
1998 | addr = std_expand_builtin_va_arg (valist, type); | |
1999 | #endif | |
2000 | } | |
2001 | ||
2002 | result = gen_rtx_MEM (TYPE_MODE (type), addr); | |
2003 | MEM_ALIAS_SET (result) = get_varargs_alias_set (); | |
2004 | ||
2005 | return result; | |
2006 | } | |
2007 | ||
2008 | /* Expand ARGLIST, from a call to __builtin_va_end. */ | |
2009 | static rtx | |
2010 | expand_builtin_va_end (arglist) | |
8a15c04a | 2011 | tree arglist; |
a66c9326 | 2012 | { |
8a15c04a | 2013 | tree valist = TREE_VALUE (arglist); |
2014 | ||
a66c9326 | 2015 | #ifdef EXPAND_BUILTIN_VA_END |
a66c9326 | 2016 | valist = stabilize_va_list (valist, 0); |
2017 | EXPAND_BUILTIN_VA_END(arglist); | |
8a15c04a | 2018 | #else |
2019 | /* Evaluate for side effects, if needed. I hate macros that don't | |
2020 | do that. */ | |
2021 | if (TREE_SIDE_EFFECTS (valist)) | |
2022 | expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
a66c9326 | 2023 | #endif |
2024 | ||
2025 | return const0_rtx; | |
2026 | } | |
2027 | ||
2028 | /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a | |
2029 | builtin rather than just as an assignment in stdarg.h because of the | |
2030 | nastiness of array-type va_list types. */ | |
2031 | static rtx | |
2032 | expand_builtin_va_copy (arglist) | |
2033 | tree arglist; | |
2034 | { | |
2035 | tree dst, src, t; | |
2036 | ||
2037 | dst = TREE_VALUE (arglist); | |
2038 | src = TREE_VALUE (TREE_CHAIN (arglist)); | |
2039 | ||
2040 | dst = stabilize_va_list (dst, 1); | |
2041 | src = stabilize_va_list (src, 0); | |
2042 | ||
2043 | if (TREE_CODE (va_list_type_node) != ARRAY_TYPE) | |
2044 | { | |
2045 | t = build (MODIFY_EXPR, va_list_type_node, dst, src); | |
2046 | TREE_SIDE_EFFECTS (t) = 1; | |
2047 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
2048 | } | |
2049 | else | |
2050 | { | |
2051 | emit_block_move (expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL), | |
2052 | expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL), | |
2053 | expand_expr (TYPE_SIZE (va_list_type_node), NULL_RTX, | |
2054 | VOIDmode, EXPAND_NORMAL), | |
2055 | TYPE_ALIGN (va_list_type_node) / BITS_PER_UNIT); | |
2056 | } | |
2057 | ||
2058 | return const0_rtx; | |
2059 | } | |
2060 | ||
53800dbe | 2061 | /* Expand a call to one of the builtin functions __builtin_frame_address or |
2062 | __builtin_return_address. */ | |
2063 | static rtx | |
2064 | expand_builtin_frame_address (exp) | |
2065 | tree exp; | |
2066 | { | |
2067 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
2068 | tree arglist = TREE_OPERAND (exp, 1); | |
2069 | ||
2070 | /* The argument must be a nonnegative integer constant. | |
2071 | It counts the number of frames to scan up the stack. | |
2072 | The value is the return address saved in that frame. */ | |
2073 | if (arglist == 0) | |
2074 | /* Warning about missing arg was already issued. */ | |
2075 | return const0_rtx; | |
2076 | else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST | |
2077 | || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0) | |
2078 | { | |
2079 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2080 | error ("invalid arg to `__builtin_frame_address'"); | |
2081 | else | |
2082 | error ("invalid arg to `__builtin_return_address'"); | |
2083 | return const0_rtx; | |
2084 | } | |
2085 | else | |
2086 | { | |
2087 | rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), | |
2088 | TREE_INT_CST_LOW (TREE_VALUE (arglist)), | |
2089 | hard_frame_pointer_rtx); | |
2090 | ||
2091 | /* Some ports cannot access arbitrary stack frames. */ | |
2092 | if (tem == NULL) | |
2093 | { | |
2094 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2095 | warning ("unsupported arg to `__builtin_frame_address'"); | |
2096 | else | |
2097 | warning ("unsupported arg to `__builtin_return_address'"); | |
2098 | return const0_rtx; | |
2099 | } | |
2100 | ||
2101 | /* For __builtin_frame_address, return what we've got. */ | |
2102 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2103 | return tem; | |
2104 | ||
2105 | if (GET_CODE (tem) != REG | |
2106 | && ! CONSTANT_P (tem)) | |
2107 | tem = copy_to_mode_reg (Pmode, tem); | |
2108 | return tem; | |
2109 | } | |
2110 | } | |
2111 | ||
2112 | /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if | |
2113 | we failed and the caller should emit a normal call, otherwise try to get | |
2114 | the result in TARGET, if convenient. */ | |
2115 | static rtx | |
2116 | expand_builtin_alloca (arglist, target) | |
2117 | tree arglist; | |
2118 | rtx target; | |
2119 | { | |
2120 | rtx op0; | |
2121 | ||
2122 | if (arglist == 0 | |
2123 | /* Arg could be non-integer if user redeclared this fcn wrong. */ | |
2124 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) | |
2125 | return 0; | |
2126 | ||
2127 | /* Compute the argument. */ | |
2128 | op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0); | |
2129 | ||
2130 | /* Allocate the desired space. */ | |
2131 | return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT); | |
2132 | } | |
2133 | ||
2134 | /* Expand a call to the ffs builtin. The arguments are in ARGLIST. | |
2135 | Return 0 if a normal call should be emitted rather than expanding the | |
2136 | function in-line. If convenient, the result should be placed in TARGET. | |
2137 | SUBTARGET may be used as the target for computing one of EXP's operands. */ | |
2138 | static rtx | |
2139 | expand_builtin_ffs (arglist, target, subtarget) | |
2140 | tree arglist; | |
2141 | rtx target, subtarget; | |
2142 | { | |
2143 | rtx op0; | |
2144 | if (arglist == 0 | |
2145 | /* Arg could be non-integer if user redeclared this fcn wrong. */ | |
2146 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) | |
2147 | return 0; | |
2148 | ||
2149 | /* Compute the argument. */ | |
2150 | op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); | |
2151 | /* Compute ffs, into TARGET if possible. | |
2152 | Set TARGET to wherever the result comes back. */ | |
2153 | target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), | |
2154 | ffs_optab, op0, target, 1); | |
2155 | if (target == 0) | |
2156 | abort (); | |
2157 | return target; | |
2158 | } | |
2159 | \f | |
2160 | /* Expand an expression EXP that calls a built-in function, | |
2161 | with result going to TARGET if that's convenient | |
2162 | (and in mode MODE if that's convenient). | |
2163 | SUBTARGET may be used as the target for computing one of EXP's operands. | |
2164 | IGNORE is nonzero if the value is to be ignored. */ | |
2165 | ||
2166 | rtx | |
2167 | expand_builtin (exp, target, subtarget, mode, ignore) | |
2168 | tree exp; | |
2169 | rtx target; | |
2170 | rtx subtarget; | |
2171 | enum machine_mode mode; | |
2172 | int ignore; | |
2173 | { | |
2174 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
2175 | tree arglist = TREE_OPERAND (exp, 1); | |
2176 | enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); | |
2177 | ||
2178 | /* When not optimizing, generate calls to library functions for a certain | |
2179 | set of builtins. */ | |
2180 | if (! optimize && ! CALLED_AS_BUILT_IN (fndecl) | |
2181 | && (fcode == BUILT_IN_SIN || fcode == BUILT_IN_COS | |
2182 | || fcode == BUILT_IN_FSQRT || fcode == BUILT_IN_MEMSET | |
2183 | || fcode == BUILT_IN_MEMCPY || fcode == BUILT_IN_MEMCMP | |
2184 | || fcode == BUILT_IN_STRLEN || fcode == BUILT_IN_STRCPY | |
2185 | || fcode == BUILT_IN_STRCMP || fcode == BUILT_IN_FFS)) | |
2186 | return expand_call (exp, target, ignore); | |
2187 | ||
2188 | switch (fcode) | |
2189 | { | |
2190 | case BUILT_IN_ABS: | |
2191 | case BUILT_IN_LABS: | |
2192 | case BUILT_IN_FABS: | |
2193 | /* build_function_call changes these into ABS_EXPR. */ | |
2194 | abort (); | |
2195 | ||
2196 | case BUILT_IN_SIN: | |
2197 | case BUILT_IN_COS: | |
2198 | /* Treat these like sqrt, but only if the user asks for them. */ | |
2199 | if (! flag_fast_math) | |
2200 | break; | |
2201 | case BUILT_IN_FSQRT: | |
2202 | target = expand_builtin_mathfn (exp, target, subtarget); | |
2203 | if (target) | |
2204 | return target; | |
2205 | break; | |
2206 | ||
2207 | case BUILT_IN_FMOD: | |
2208 | break; | |
2209 | ||
2210 | case BUILT_IN_APPLY_ARGS: | |
2211 | return expand_builtin_apply_args (); | |
2212 | ||
2213 | /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes | |
2214 | FUNCTION with a copy of the parameters described by | |
2215 | ARGUMENTS, and ARGSIZE. It returns a block of memory | |
2216 | allocated on the stack into which is stored all the registers | |
2217 | that might possibly be used for returning the result of a | |
2218 | function. ARGUMENTS is the value returned by | |
2219 | __builtin_apply_args. ARGSIZE is the number of bytes of | |
2220 | arguments that must be copied. ??? How should this value be | |
2221 | computed? We'll also need a safe worst case value for varargs | |
2222 | functions. */ | |
2223 | case BUILT_IN_APPLY: | |
2224 | if (arglist == 0 | |
2225 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
2226 | || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))) | |
2227 | || TREE_CHAIN (arglist) == 0 | |
2228 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE | |
2229 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
2230 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) | |
2231 | return const0_rtx; | |
2232 | else | |
2233 | { | |
2234 | int i; | |
2235 | tree t; | |
2236 | rtx ops[3]; | |
2237 | ||
2238 | for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++) | |
2239 | ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0); | |
2240 | ||
2241 | return expand_builtin_apply (ops[0], ops[1], ops[2]); | |
2242 | } | |
2243 | ||
2244 | /* __builtin_return (RESULT) causes the function to return the | |
2245 | value described by RESULT. RESULT is address of the block of | |
2246 | memory returned by __builtin_apply. */ | |
2247 | case BUILT_IN_RETURN: | |
2248 | if (arglist | |
2249 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
2250 | && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE) | |
2251 | expand_builtin_return (expand_expr (TREE_VALUE (arglist), | |
2252 | NULL_RTX, VOIDmode, 0)); | |
2253 | return const0_rtx; | |
2254 | ||
2255 | case BUILT_IN_SAVEREGS: | |
a66c9326 | 2256 | return expand_builtin_saveregs (); |
53800dbe | 2257 | |
2258 | case BUILT_IN_ARGS_INFO: | |
2259 | return expand_builtin_args_info (exp); | |
2260 | ||
2261 | /* Return the address of the first anonymous stack arg. */ | |
2262 | case BUILT_IN_NEXT_ARG: | |
a66c9326 | 2263 | return expand_builtin_next_arg (arglist); |
53800dbe | 2264 | |
2265 | case BUILT_IN_CLASSIFY_TYPE: | |
2266 | return expand_builtin_classify_type (arglist); | |
2267 | ||
2268 | case BUILT_IN_CONSTANT_P: | |
2269 | return expand_builtin_constant_p (exp); | |
2270 | ||
2271 | case BUILT_IN_FRAME_ADDRESS: | |
2272 | case BUILT_IN_RETURN_ADDRESS: | |
2273 | return expand_builtin_frame_address (exp); | |
2274 | ||
2275 | /* Returns the address of the area where the structure is returned. | |
2276 | 0 otherwise. */ | |
2277 | case BUILT_IN_AGGREGATE_INCOMING_ADDRESS: | |
2278 | if (arglist != 0 | |
2279 | || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))) | |
2280 | || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM) | |
2281 | return const0_rtx; | |
2282 | else | |
2283 | return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); | |
2284 | ||
2285 | case BUILT_IN_ALLOCA: | |
2286 | target = expand_builtin_alloca (arglist, target); | |
2287 | if (target) | |
2288 | return target; | |
2289 | break; | |
2290 | ||
2291 | case BUILT_IN_FFS: | |
bdc5170d | 2292 | target = expand_builtin_ffs (arglist, target, subtarget); |
53800dbe | 2293 | if (target) |
2294 | return target; | |
2295 | break; | |
2296 | ||
2297 | case BUILT_IN_STRLEN: | |
2298 | target = expand_builtin_strlen (exp, target, mode); | |
2299 | if (target) | |
2300 | return target; | |
2301 | break; | |
2302 | ||
2303 | case BUILT_IN_STRCPY: | |
2304 | target = expand_builtin_strcpy (exp); | |
2305 | if (target) | |
2306 | return target; | |
2307 | break; | |
2308 | ||
2309 | case BUILT_IN_MEMCPY: | |
2310 | target = expand_builtin_memcpy (arglist); | |
2311 | if (target) | |
2312 | return target; | |
2313 | break; | |
2314 | ||
2315 | case BUILT_IN_MEMSET: | |
2316 | target = expand_builtin_memset (exp); | |
2317 | if (target) | |
2318 | return target; | |
2319 | break; | |
2320 | ||
2321 | /* These comparison functions need an instruction that returns an actual | |
2322 | index. An ordinary compare that just sets the condition codes | |
2323 | is not enough. */ | |
2324 | #ifdef HAVE_cmpstrsi | |
2325 | case BUILT_IN_STRCMP: | |
2326 | target = expand_builtin_strcmp (exp, target); | |
2327 | if (target) | |
2328 | return target; | |
2329 | break; | |
2330 | ||
2331 | case BUILT_IN_MEMCMP: | |
2332 | target = expand_builtin_memcmp (exp, arglist, target); | |
2333 | if (target) | |
2334 | return target; | |
2335 | break; | |
2336 | #else | |
2337 | case BUILT_IN_STRCMP: | |
2338 | case BUILT_IN_MEMCMP: | |
2339 | break; | |
2340 | #endif | |
2341 | ||
2342 | case BUILT_IN_SETJMP: | |
2343 | if (arglist == 0 | |
2344 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
2345 | break; | |
2346 | else | |
2347 | { | |
2348 | rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, | |
2349 | VOIDmode, 0); | |
2350 | rtx lab = gen_label_rtx (); | |
2351 | rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab); | |
2352 | emit_label (lab); | |
2353 | return ret; | |
2354 | } | |
2355 | ||
2356 | /* __builtin_longjmp is passed a pointer to an array of five words. | |
2357 | It's similar to the C library longjmp function but works with | |
2358 | __builtin_setjmp above. */ | |
2359 | case BUILT_IN_LONGJMP: | |
2360 | if (arglist == 0 || TREE_CHAIN (arglist) == 0 | |
2361 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
2362 | break; | |
2363 | else | |
2364 | { | |
2365 | rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, | |
2366 | VOIDmode, 0); | |
2367 | rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), | |
2368 | NULL_RTX, VOIDmode, 0); | |
2369 | ||
2370 | if (value != const1_rtx) | |
2371 | { | |
2372 | error ("__builtin_longjmp second argument must be 1"); | |
2373 | return const0_rtx; | |
2374 | } | |
2375 | ||
2376 | expand_builtin_longjmp (buf_addr, value); | |
2377 | return const0_rtx; | |
2378 | } | |
2379 | ||
2380 | case BUILT_IN_TRAP: | |
2381 | #ifdef HAVE_trap | |
2382 | if (HAVE_trap) | |
2383 | emit_insn (gen_trap ()); | |
2384 | else | |
2385 | #endif | |
2386 | error ("__builtin_trap not supported by this target"); | |
2387 | emit_barrier (); | |
2388 | return const0_rtx; | |
2389 | ||
2390 | /* Various hooks for the DWARF 2 __throw routine. */ | |
2391 | case BUILT_IN_UNWIND_INIT: | |
2392 | expand_builtin_unwind_init (); | |
2393 | return const0_rtx; | |
2394 | case BUILT_IN_DWARF_CFA: | |
2395 | return virtual_cfa_rtx; | |
2396 | #ifdef DWARF2_UNWIND_INFO | |
2397 | case BUILT_IN_DWARF_FP_REGNUM: | |
2398 | return expand_builtin_dwarf_fp_regnum (); | |
2399 | case BUILT_IN_DWARF_REG_SIZE: | |
2400 | return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target); | |
2401 | #endif | |
2402 | case BUILT_IN_FROB_RETURN_ADDR: | |
2403 | return expand_builtin_frob_return_addr (TREE_VALUE (arglist)); | |
2404 | case BUILT_IN_EXTRACT_RETURN_ADDR: | |
2405 | return expand_builtin_extract_return_addr (TREE_VALUE (arglist)); | |
2406 | case BUILT_IN_EH_RETURN: | |
2407 | expand_builtin_eh_return (TREE_VALUE (arglist), | |
2408 | TREE_VALUE (TREE_CHAIN (arglist)), | |
2409 | TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)))); | |
2410 | return const0_rtx; | |
a66c9326 | 2411 | case BUILT_IN_VARARGS_START: |
2412 | return expand_builtin_va_start (0, arglist); | |
2413 | case BUILT_IN_STDARG_START: | |
2414 | return expand_builtin_va_start (1, arglist); | |
2415 | case BUILT_IN_VA_END: | |
2416 | return expand_builtin_va_end (arglist); | |
2417 | case BUILT_IN_VA_COPY: | |
2418 | return expand_builtin_va_copy (arglist); | |
53800dbe | 2419 | |
2420 | default: /* just do library call, if unknown builtin */ | |
2421 | error ("built-in function `%s' not currently supported", | |
2422 | IDENTIFIER_POINTER (DECL_NAME (fndecl))); | |
2423 | } | |
2424 | ||
2425 | /* The switch statement above can drop through to cause the function | |
2426 | to be called normally. */ | |
2427 | return expand_call (exp, target, ignore); | |
2428 | } |