]>
Commit | Line | Data |
---|---|---|
53800dbe | 1 | /* Expand builtin functions. |
2 | Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc. | |
3 | ||
4 | This file is part of GNU CC. | |
5 | ||
6 | GNU CC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GNU CC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GNU CC; see the file COPYING. If not, write to | |
18 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
19 | Boston, MA 02111-1307, USA. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "machmode.h" | |
24 | #include "rtl.h" | |
25 | #include "tree.h" | |
26 | #include "obstack.h" | |
27 | #include "flags.h" | |
28 | #include "regs.h" | |
29 | #include "hard-reg-set.h" | |
30 | #include "except.h" | |
31 | #include "function.h" | |
32 | #include "insn-flags.h" | |
33 | #include "insn-codes.h" | |
34 | #include "insn-config.h" | |
35 | #include "expr.h" | |
36 | #include "recog.h" | |
37 | #include "output.h" | |
38 | #include "typeclass.h" | |
39 | #include "defaults.h" | |
40 | #include "toplev.h" | |
1dd6c958 | 41 | #include "tm_p.h" |
53800dbe | 42 | |
43 | #define CALLED_AS_BUILT_IN(NODE) \ | |
44 | (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10)) | |
45 | ||
46 | #define CEIL(x,y) (((x) + (y) - 1) / (y)) | |
47 | ||
48 | /* Register mappings for target machines without register windows. */ | |
49 | #ifndef INCOMING_REGNO | |
50 | #define INCOMING_REGNO(OUT) (OUT) | |
51 | #endif | |
52 | #ifndef OUTGOING_REGNO | |
53 | #define OUTGOING_REGNO(IN) (IN) | |
54 | #endif | |
55 | ||
e94026da | 56 | tree (*lang_type_promotes_to) PROTO((tree)); |
57 | ||
53800dbe | 58 | static int get_pointer_alignment PROTO((tree, unsigned)); |
59 | static tree c_strlen PROTO((tree)); | |
60 | static rtx get_memory_rtx PROTO((tree)); | |
61 | static int apply_args_size PROTO((void)); | |
62 | static int apply_result_size PROTO((void)); | |
63 | static rtx result_vector PROTO((int, rtx)); | |
64 | static rtx expand_builtin_apply_args PROTO((void)); | |
65 | static rtx expand_builtin_apply_args_1 PROTO((void)); | |
66 | static rtx expand_builtin_apply PROTO((rtx, rtx, rtx)); | |
67 | static void expand_builtin_return PROTO((rtx)); | |
68 | static rtx expand_builtin_classify_type PROTO((tree)); | |
69 | static rtx expand_builtin_mathfn PROTO((tree, rtx, rtx)); | |
70 | static rtx expand_builtin_constant_p PROTO((tree)); | |
53800dbe | 71 | static rtx expand_builtin_args_info PROTO((tree)); |
72 | static rtx expand_builtin_next_arg PROTO((tree)); | |
a66c9326 | 73 | static rtx expand_builtin_va_start PROTO((int, tree)); |
74 | static rtx expand_builtin_va_end PROTO((tree)); | |
75 | static rtx expand_builtin_va_copy PROTO((tree)); | |
95d038e4 | 76 | #ifdef HAVE_cmpstrsi |
53800dbe | 77 | static rtx expand_builtin_memcmp PROTO((tree, tree, rtx)); |
78 | static rtx expand_builtin_strcmp PROTO((tree, rtx)); | |
95d038e4 | 79 | #endif |
53800dbe | 80 | static rtx expand_builtin_memcpy PROTO((tree)); |
81 | static rtx expand_builtin_strcpy PROTO((tree)); | |
82 | static rtx expand_builtin_memset PROTO((tree)); | |
83 | static rtx expand_builtin_strlen PROTO((tree, rtx, enum machine_mode)); | |
84 | static rtx expand_builtin_alloca PROTO((tree, rtx)); | |
85 | static rtx expand_builtin_ffs PROTO((tree, rtx, rtx)); | |
86 | static rtx expand_builtin_frame_address PROTO((tree)); | |
87 | ||
88 | /* Return the alignment in bits of EXP, a pointer valued expression. | |
89 | But don't return more than MAX_ALIGN no matter what. | |
90 | The alignment returned is, by default, the alignment of the thing that | |
91 | EXP points to (if it is not a POINTER_TYPE, 0 is returned). | |
92 | ||
93 | Otherwise, look at the expression to see if we can do better, i.e., if the | |
94 | expression is actually pointing at an object whose alignment is tighter. */ | |
95 | ||
96 | static int | |
97 | get_pointer_alignment (exp, max_align) | |
98 | tree exp; | |
99 | unsigned max_align; | |
100 | { | |
101 | unsigned align, inner; | |
102 | ||
103 | if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) | |
104 | return 0; | |
105 | ||
106 | align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); | |
107 | align = MIN (align, max_align); | |
108 | ||
109 | while (1) | |
110 | { | |
111 | switch (TREE_CODE (exp)) | |
112 | { | |
113 | case NOP_EXPR: | |
114 | case CONVERT_EXPR: | |
115 | case NON_LVALUE_EXPR: | |
116 | exp = TREE_OPERAND (exp, 0); | |
117 | if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE) | |
118 | return align; | |
119 | inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); | |
120 | align = MIN (inner, max_align); | |
121 | break; | |
122 | ||
123 | case PLUS_EXPR: | |
124 | /* If sum of pointer + int, restrict our maximum alignment to that | |
125 | imposed by the integer. If not, we can't do any better than | |
126 | ALIGN. */ | |
127 | if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST) | |
128 | return align; | |
129 | ||
130 | while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT) | |
131 | & (max_align - 1)) | |
132 | != 0) | |
133 | max_align >>= 1; | |
134 | ||
135 | exp = TREE_OPERAND (exp, 0); | |
136 | break; | |
137 | ||
138 | case ADDR_EXPR: | |
139 | /* See what we are pointing at and look at its alignment. */ | |
140 | exp = TREE_OPERAND (exp, 0); | |
141 | if (TREE_CODE (exp) == FUNCTION_DECL) | |
142 | align = FUNCTION_BOUNDARY; | |
143 | else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd') | |
144 | align = DECL_ALIGN (exp); | |
145 | #ifdef CONSTANT_ALIGNMENT | |
146 | else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c') | |
147 | align = CONSTANT_ALIGNMENT (exp, align); | |
148 | #endif | |
149 | return MIN (align, max_align); | |
150 | ||
151 | default: | |
152 | return align; | |
153 | } | |
154 | } | |
155 | } | |
156 | ||
157 | /* Compute the length of a C string. TREE_STRING_LENGTH is not the right | |
158 | way, because it could contain a zero byte in the middle. | |
159 | TREE_STRING_LENGTH is the size of the character array, not the string. | |
160 | ||
161 | Unfortunately, string_constant can't access the values of const char | |
162 | arrays with initializers, so neither can we do so here. */ | |
163 | ||
164 | static tree | |
165 | c_strlen (src) | |
166 | tree src; | |
167 | { | |
168 | tree offset_node; | |
169 | int offset, max; | |
170 | char *ptr; | |
171 | ||
172 | src = string_constant (src, &offset_node); | |
173 | if (src == 0) | |
174 | return 0; | |
175 | max = TREE_STRING_LENGTH (src); | |
176 | ptr = TREE_STRING_POINTER (src); | |
177 | if (offset_node && TREE_CODE (offset_node) != INTEGER_CST) | |
178 | { | |
179 | /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't | |
180 | compute the offset to the following null if we don't know where to | |
181 | start searching for it. */ | |
182 | int i; | |
183 | for (i = 0; i < max; i++) | |
184 | if (ptr[i] == 0) | |
185 | return 0; | |
186 | /* We don't know the starting offset, but we do know that the string | |
187 | has no internal zero bytes. We can assume that the offset falls | |
188 | within the bounds of the string; otherwise, the programmer deserves | |
189 | what he gets. Subtract the offset from the length of the string, | |
190 | and return that. */ | |
191 | /* This would perhaps not be valid if we were dealing with named | |
192 | arrays in addition to literal string constants. */ | |
193 | return size_binop (MINUS_EXPR, size_int (max), offset_node); | |
194 | } | |
195 | ||
196 | /* We have a known offset into the string. Start searching there for | |
197 | a null character. */ | |
198 | if (offset_node == 0) | |
199 | offset = 0; | |
200 | else | |
201 | { | |
202 | /* Did we get a long long offset? If so, punt. */ | |
203 | if (TREE_INT_CST_HIGH (offset_node) != 0) | |
204 | return 0; | |
205 | offset = TREE_INT_CST_LOW (offset_node); | |
206 | } | |
207 | /* If the offset is known to be out of bounds, warn, and call strlen at | |
208 | runtime. */ | |
209 | if (offset < 0 || offset > max) | |
210 | { | |
211 | warning ("offset outside bounds of constant string"); | |
212 | return 0; | |
213 | } | |
214 | /* Use strlen to search for the first zero byte. Since any strings | |
215 | constructed with build_string will have nulls appended, we win even | |
216 | if we get handed something like (char[4])"abcd". | |
217 | ||
218 | Since OFFSET is our starting index into the string, no further | |
219 | calculation is needed. */ | |
220 | return size_int (strlen (ptr + offset)); | |
221 | } | |
222 | ||
223 | /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT | |
224 | times to get the address of either a higher stack frame, or a return | |
225 | address located within it (depending on FNDECL_CODE). */ | |
226 | rtx | |
227 | expand_builtin_return_addr (fndecl_code, count, tem) | |
228 | enum built_in_function fndecl_code; | |
229 | int count; | |
230 | rtx tem; | |
231 | { | |
232 | int i; | |
233 | ||
234 | /* Some machines need special handling before we can access | |
235 | arbitrary frames. For example, on the sparc, we must first flush | |
236 | all register windows to the stack. */ | |
237 | #ifdef SETUP_FRAME_ADDRESSES | |
238 | if (count > 0) | |
239 | SETUP_FRAME_ADDRESSES (); | |
240 | #endif | |
241 | ||
242 | /* On the sparc, the return address is not in the frame, it is in a | |
243 | register. There is no way to access it off of the current frame | |
244 | pointer, but it can be accessed off the previous frame pointer by | |
245 | reading the value from the register window save area. */ | |
246 | #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME | |
247 | if (fndecl_code == BUILT_IN_RETURN_ADDRESS) | |
248 | count--; | |
249 | #endif | |
250 | ||
251 | /* Scan back COUNT frames to the specified frame. */ | |
252 | for (i = 0; i < count; i++) | |
253 | { | |
254 | /* Assume the dynamic chain pointer is in the word that the | |
255 | frame address points to, unless otherwise specified. */ | |
256 | #ifdef DYNAMIC_CHAIN_ADDRESS | |
257 | tem = DYNAMIC_CHAIN_ADDRESS (tem); | |
258 | #endif | |
259 | tem = memory_address (Pmode, tem); | |
260 | tem = copy_to_reg (gen_rtx_MEM (Pmode, tem)); | |
261 | } | |
262 | ||
263 | /* For __builtin_frame_address, return what we've got. */ | |
264 | if (fndecl_code == BUILT_IN_FRAME_ADDRESS) | |
265 | return tem; | |
266 | ||
267 | /* For __builtin_return_address, Get the return address from that | |
268 | frame. */ | |
269 | #ifdef RETURN_ADDR_RTX | |
270 | tem = RETURN_ADDR_RTX (count, tem); | |
271 | #else | |
272 | tem = memory_address (Pmode, | |
273 | plus_constant (tem, GET_MODE_SIZE (Pmode))); | |
274 | tem = gen_rtx_MEM (Pmode, tem); | |
275 | #endif | |
276 | return tem; | |
277 | } | |
278 | ||
279 | /* __builtin_setjmp is passed a pointer to an array of five words (not | |
280 | all will be used on all machines). It operates similarly to the C | |
281 | library function of the same name, but is more efficient. Much of | |
282 | the code below (and for longjmp) is copied from the handling of | |
283 | non-local gotos. | |
284 | ||
285 | NOTE: This is intended for use by GNAT and the exception handling | |
286 | scheme in the compiler and will only work in the method used by | |
287 | them. */ | |
288 | ||
289 | rtx | |
290 | expand_builtin_setjmp (buf_addr, target, first_label, next_label) | |
291 | rtx buf_addr; | |
292 | rtx target; | |
293 | rtx first_label, next_label; | |
294 | { | |
295 | rtx lab1 = gen_label_rtx (); | |
296 | enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); | |
297 | enum machine_mode value_mode; | |
298 | rtx stack_save; | |
299 | ||
300 | value_mode = TYPE_MODE (integer_type_node); | |
301 | ||
302 | #ifdef POINTERS_EXTEND_UNSIGNED | |
303 | buf_addr = convert_memory_address (Pmode, buf_addr); | |
304 | #endif | |
305 | ||
37ae8504 | 306 | buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX)); |
53800dbe | 307 | |
308 | if (target == 0 || GET_CODE (target) != REG | |
309 | || REGNO (target) < FIRST_PSEUDO_REGISTER) | |
310 | target = gen_reg_rtx (value_mode); | |
311 | ||
312 | emit_queue (); | |
313 | ||
314 | /* We store the frame pointer and the address of lab1 in the buffer | |
315 | and use the rest of it for the stack save area, which is | |
316 | machine-dependent. */ | |
317 | ||
318 | #ifndef BUILTIN_SETJMP_FRAME_VALUE | |
319 | #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx | |
320 | #endif | |
321 | ||
322 | emit_move_insn (gen_rtx_MEM (Pmode, buf_addr), | |
323 | BUILTIN_SETJMP_FRAME_VALUE); | |
324 | emit_move_insn (validize_mem | |
325 | (gen_rtx_MEM (Pmode, | |
326 | plus_constant (buf_addr, | |
327 | GET_MODE_SIZE (Pmode)))), | |
328 | force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1))); | |
329 | ||
330 | stack_save = gen_rtx_MEM (sa_mode, | |
331 | plus_constant (buf_addr, | |
332 | 2 * GET_MODE_SIZE (Pmode))); | |
333 | emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX); | |
334 | ||
335 | /* If there is further processing to do, do it. */ | |
336 | #ifdef HAVE_builtin_setjmp_setup | |
337 | if (HAVE_builtin_setjmp_setup) | |
338 | emit_insn (gen_builtin_setjmp_setup (buf_addr)); | |
339 | #endif | |
340 | ||
341 | /* Set TARGET to zero and branch to the first-time-through label. */ | |
342 | emit_move_insn (target, const0_rtx); | |
343 | emit_jump_insn (gen_jump (first_label)); | |
344 | emit_barrier (); | |
345 | emit_label (lab1); | |
346 | ||
347 | /* Tell flow about the strange goings on. Putting `lab1' on | |
348 | `nonlocal_goto_handler_labels' to indicates that function | |
349 | calls may traverse the arc back to this label. */ | |
350 | ||
351 | current_function_has_nonlocal_label = 1; | |
352 | nonlocal_goto_handler_labels = | |
353 | gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels); | |
354 | ||
355 | /* Clobber the FP when we get here, so we have to make sure it's | |
356 | marked as used by this function. */ | |
357 | emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
358 | ||
359 | /* Mark the static chain as clobbered here so life information | |
360 | doesn't get messed up for it. */ | |
361 | emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx)); | |
362 | ||
363 | /* Now put in the code to restore the frame pointer, and argument | |
364 | pointer, if needed. The code below is from expand_end_bindings | |
365 | in stmt.c; see detailed documentation there. */ | |
366 | #ifdef HAVE_nonlocal_goto | |
367 | if (! HAVE_nonlocal_goto) | |
368 | #endif | |
369 | emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); | |
370 | ||
371 | #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
372 | if (fixed_regs[ARG_POINTER_REGNUM]) | |
373 | { | |
374 | #ifdef ELIMINABLE_REGS | |
375 | size_t i; | |
376 | static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS; | |
377 | ||
378 | for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++) | |
379 | if (elim_regs[i].from == ARG_POINTER_REGNUM | |
380 | && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) | |
381 | break; | |
382 | ||
383 | if (i == sizeof elim_regs / sizeof elim_regs [0]) | |
384 | #endif | |
385 | { | |
386 | /* Now restore our arg pointer from the address at which it | |
387 | was saved in our stack frame. | |
388 | If there hasn't be space allocated for it yet, make | |
389 | some now. */ | |
390 | if (arg_pointer_save_area == 0) | |
391 | arg_pointer_save_area | |
392 | = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
393 | emit_move_insn (virtual_incoming_args_rtx, | |
394 | copy_to_reg (arg_pointer_save_area)); | |
395 | } | |
396 | } | |
397 | #endif | |
398 | ||
399 | #ifdef HAVE_builtin_setjmp_receiver | |
400 | if (HAVE_builtin_setjmp_receiver) | |
401 | emit_insn (gen_builtin_setjmp_receiver (lab1)); | |
402 | else | |
403 | #endif | |
404 | #ifdef HAVE_nonlocal_goto_receiver | |
405 | if (HAVE_nonlocal_goto_receiver) | |
406 | emit_insn (gen_nonlocal_goto_receiver ()); | |
407 | else | |
408 | #endif | |
409 | { | |
410 | ; /* Nothing */ | |
411 | } | |
412 | ||
413 | /* Set TARGET, and branch to the next-time-through label. */ | |
414 | emit_move_insn (target, const1_rtx); | |
415 | emit_jump_insn (gen_jump (next_label)); | |
416 | emit_barrier (); | |
417 | ||
418 | return target; | |
419 | } | |
420 | ||
421 | /* __builtin_longjmp is passed a pointer to an array of five words (not | |
422 | all will be used on all machines). It operates similarly to the C | |
423 | library function of the same name, but is more efficient. Much of | |
424 | the code below is copied from the handling of non-local gotos. | |
425 | ||
426 | NOTE: This is intended for use by GNAT and the exception handling | |
427 | scheme in the compiler and will only work in the method used by | |
428 | them. */ | |
429 | ||
430 | void | |
431 | expand_builtin_longjmp (buf_addr, value) | |
432 | rtx buf_addr, value; | |
433 | { | |
434 | rtx fp, lab, stack; | |
435 | enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); | |
436 | ||
437 | #ifdef POINTERS_EXTEND_UNSIGNED | |
438 | buf_addr = convert_memory_address (Pmode, buf_addr); | |
439 | #endif | |
440 | buf_addr = force_reg (Pmode, buf_addr); | |
441 | ||
442 | /* We used to store value in static_chain_rtx, but that fails if pointers | |
443 | are smaller than integers. We instead require that the user must pass | |
444 | a second argument of 1, because that is what builtin_setjmp will | |
445 | return. This also makes EH slightly more efficient, since we are no | |
446 | longer copying around a value that we don't care about. */ | |
447 | if (value != const1_rtx) | |
448 | abort (); | |
449 | ||
450 | #ifdef HAVE_builtin_longjmp | |
451 | if (HAVE_builtin_longjmp) | |
452 | emit_insn (gen_builtin_longjmp (buf_addr)); | |
453 | else | |
454 | #endif | |
455 | { | |
456 | fp = gen_rtx_MEM (Pmode, buf_addr); | |
457 | lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr, | |
458 | GET_MODE_SIZE (Pmode))); | |
459 | ||
460 | stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr, | |
461 | 2 * GET_MODE_SIZE (Pmode))); | |
462 | ||
463 | /* Pick up FP, label, and SP from the block and jump. This code is | |
464 | from expand_goto in stmt.c; see there for detailed comments. */ | |
465 | #if HAVE_nonlocal_goto | |
466 | if (HAVE_nonlocal_goto) | |
467 | /* We have to pass a value to the nonlocal_goto pattern that will | |
468 | get copied into the static_chain pointer, but it does not matter | |
469 | what that value is, because builtin_setjmp does not use it. */ | |
470 | emit_insn (gen_nonlocal_goto (value, fp, stack, lab)); | |
471 | else | |
472 | #endif | |
473 | { | |
474 | lab = copy_to_reg (lab); | |
475 | ||
476 | emit_move_insn (hard_frame_pointer_rtx, fp); | |
477 | emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX); | |
478 | ||
479 | emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); | |
480 | emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx)); | |
481 | emit_indirect_jump (lab); | |
482 | } | |
483 | } | |
484 | } | |
485 | ||
486 | /* Get a MEM rtx for expression EXP which can be used in a string instruction | |
487 | (cmpstrsi, movstrsi, ..). */ | |
488 | static rtx | |
489 | get_memory_rtx (exp) | |
490 | tree exp; | |
491 | { | |
492 | rtx mem; | |
493 | int is_aggregate; | |
494 | ||
495 | mem = gen_rtx_MEM (BLKmode, | |
496 | memory_address (BLKmode, | |
497 | expand_expr (exp, NULL_RTX, | |
498 | ptr_mode, EXPAND_SUM))); | |
499 | ||
500 | RTX_UNCHANGING_P (mem) = TREE_READONLY (exp); | |
501 | ||
502 | /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P | |
503 | if the value is the address of a structure or if the expression is | |
504 | cast to a pointer to structure type. */ | |
505 | is_aggregate = 0; | |
506 | ||
507 | while (TREE_CODE (exp) == NOP_EXPR) | |
508 | { | |
509 | tree cast_type = TREE_TYPE (exp); | |
510 | if (TREE_CODE (cast_type) == POINTER_TYPE | |
511 | && AGGREGATE_TYPE_P (TREE_TYPE (cast_type))) | |
512 | { | |
513 | is_aggregate = 1; | |
514 | break; | |
515 | } | |
516 | exp = TREE_OPERAND (exp, 0); | |
517 | } | |
518 | ||
519 | if (is_aggregate == 0) | |
520 | { | |
521 | tree type; | |
522 | ||
523 | if (TREE_CODE (exp) == ADDR_EXPR) | |
524 | /* If this is the address of an object, check whether the | |
525 | object is an array. */ | |
526 | type = TREE_TYPE (TREE_OPERAND (exp, 0)); | |
527 | else | |
528 | type = TREE_TYPE (TREE_TYPE (exp)); | |
529 | is_aggregate = AGGREGATE_TYPE_P (type); | |
530 | } | |
531 | ||
532 | MEM_SET_IN_STRUCT_P (mem, is_aggregate); | |
533 | return mem; | |
534 | } | |
535 | \f | |
536 | /* Built-in functions to perform an untyped call and return. */ | |
537 | ||
538 | /* For each register that may be used for calling a function, this | |
539 | gives a mode used to copy the register's value. VOIDmode indicates | |
540 | the register is not used for calling a function. If the machine | |
541 | has register windows, this gives only the outbound registers. | |
542 | INCOMING_REGNO gives the corresponding inbound register. */ | |
543 | static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER]; | |
544 | ||
545 | /* For each register that may be used for returning values, this gives | |
546 | a mode used to copy the register's value. VOIDmode indicates the | |
547 | register is not used for returning values. If the machine has | |
548 | register windows, this gives only the outbound registers. | |
549 | INCOMING_REGNO gives the corresponding inbound register. */ | |
550 | static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER]; | |
551 | ||
552 | /* For each register that may be used for calling a function, this | |
553 | gives the offset of that register into the block returned by | |
554 | __builtin_apply_args. 0 indicates that the register is not | |
555 | used for calling a function. */ | |
556 | static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER]; | |
557 | ||
558 | /* Return the offset of register REGNO into the block returned by | |
559 | __builtin_apply_args. This is not declared static, since it is | |
560 | needed in objc-act.c. */ | |
561 | ||
562 | int | |
563 | apply_args_register_offset (regno) | |
564 | int regno; | |
565 | { | |
566 | apply_args_size (); | |
567 | ||
568 | /* Arguments are always put in outgoing registers (in the argument | |
569 | block) if such make sense. */ | |
570 | #ifdef OUTGOING_REGNO | |
571 | regno = OUTGOING_REGNO(regno); | |
572 | #endif | |
573 | return apply_args_reg_offset[regno]; | |
574 | } | |
575 | ||
576 | /* Return the size required for the block returned by __builtin_apply_args, | |
577 | and initialize apply_args_mode. */ | |
578 | ||
579 | static int | |
580 | apply_args_size () | |
581 | { | |
582 | static int size = -1; | |
583 | int align, regno; | |
584 | enum machine_mode mode; | |
585 | ||
586 | /* The values computed by this function never change. */ | |
587 | if (size < 0) | |
588 | { | |
589 | /* The first value is the incoming arg-pointer. */ | |
590 | size = GET_MODE_SIZE (Pmode); | |
591 | ||
592 | /* The second value is the structure value address unless this is | |
593 | passed as an "invisible" first argument. */ | |
594 | if (struct_value_rtx) | |
595 | size += GET_MODE_SIZE (Pmode); | |
596 | ||
597 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
598 | if (FUNCTION_ARG_REGNO_P (regno)) | |
599 | { | |
600 | /* Search for the proper mode for copying this register's | |
601 | value. I'm not sure this is right, but it works so far. */ | |
602 | enum machine_mode best_mode = VOIDmode; | |
603 | ||
604 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
605 | mode != VOIDmode; | |
606 | mode = GET_MODE_WIDER_MODE (mode)) | |
607 | if (HARD_REGNO_MODE_OK (regno, mode) | |
608 | && HARD_REGNO_NREGS (regno, mode) == 1) | |
609 | best_mode = mode; | |
610 | ||
611 | if (best_mode == VOIDmode) | |
612 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
613 | mode != VOIDmode; | |
614 | mode = GET_MODE_WIDER_MODE (mode)) | |
615 | if (HARD_REGNO_MODE_OK (regno, mode) | |
616 | && (mov_optab->handlers[(int) mode].insn_code | |
617 | != CODE_FOR_nothing)) | |
618 | best_mode = mode; | |
619 | ||
620 | mode = best_mode; | |
621 | if (mode == VOIDmode) | |
622 | abort (); | |
623 | ||
624 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
625 | if (size % align != 0) | |
626 | size = CEIL (size, align) * align; | |
627 | apply_args_reg_offset[regno] = size; | |
628 | size += GET_MODE_SIZE (mode); | |
629 | apply_args_mode[regno] = mode; | |
630 | } | |
631 | else | |
632 | { | |
633 | apply_args_mode[regno] = VOIDmode; | |
634 | apply_args_reg_offset[regno] = 0; | |
635 | } | |
636 | } | |
637 | return size; | |
638 | } | |
639 | ||
640 | /* Return the size required for the block returned by __builtin_apply, | |
641 | and initialize apply_result_mode. */ | |
642 | ||
643 | static int | |
644 | apply_result_size () | |
645 | { | |
646 | static int size = -1; | |
647 | int align, regno; | |
648 | enum machine_mode mode; | |
649 | ||
650 | /* The values computed by this function never change. */ | |
651 | if (size < 0) | |
652 | { | |
653 | size = 0; | |
654 | ||
655 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
656 | if (FUNCTION_VALUE_REGNO_P (regno)) | |
657 | { | |
658 | /* Search for the proper mode for copying this register's | |
659 | value. I'm not sure this is right, but it works so far. */ | |
660 | enum machine_mode best_mode = VOIDmode; | |
661 | ||
662 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
663 | mode != TImode; | |
664 | mode = GET_MODE_WIDER_MODE (mode)) | |
665 | if (HARD_REGNO_MODE_OK (regno, mode)) | |
666 | best_mode = mode; | |
667 | ||
668 | if (best_mode == VOIDmode) | |
669 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
670 | mode != VOIDmode; | |
671 | mode = GET_MODE_WIDER_MODE (mode)) | |
672 | if (HARD_REGNO_MODE_OK (regno, mode) | |
673 | && (mov_optab->handlers[(int) mode].insn_code | |
674 | != CODE_FOR_nothing)) | |
675 | best_mode = mode; | |
676 | ||
677 | mode = best_mode; | |
678 | if (mode == VOIDmode) | |
679 | abort (); | |
680 | ||
681 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
682 | if (size % align != 0) | |
683 | size = CEIL (size, align) * align; | |
684 | size += GET_MODE_SIZE (mode); | |
685 | apply_result_mode[regno] = mode; | |
686 | } | |
687 | else | |
688 | apply_result_mode[regno] = VOIDmode; | |
689 | ||
690 | /* Allow targets that use untyped_call and untyped_return to override | |
691 | the size so that machine-specific information can be stored here. */ | |
692 | #ifdef APPLY_RESULT_SIZE | |
693 | size = APPLY_RESULT_SIZE; | |
694 | #endif | |
695 | } | |
696 | return size; | |
697 | } | |
698 | ||
699 | #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) | |
700 | /* Create a vector describing the result block RESULT. If SAVEP is true, | |
701 | the result block is used to save the values; otherwise it is used to | |
702 | restore the values. */ | |
703 | ||
704 | static rtx | |
705 | result_vector (savep, result) | |
706 | int savep; | |
707 | rtx result; | |
708 | { | |
709 | int regno, size, align, nelts; | |
710 | enum machine_mode mode; | |
711 | rtx reg, mem; | |
712 | rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx)); | |
713 | ||
714 | size = nelts = 0; | |
715 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
716 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
717 | { | |
718 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
719 | if (size % align != 0) | |
720 | size = CEIL (size, align) * align; | |
721 | reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)); | |
722 | mem = change_address (result, mode, | |
723 | plus_constant (XEXP (result, 0), size)); | |
724 | savevec[nelts++] = (savep | |
725 | ? gen_rtx_SET (VOIDmode, mem, reg) | |
726 | : gen_rtx_SET (VOIDmode, reg, mem)); | |
727 | size += GET_MODE_SIZE (mode); | |
728 | } | |
729 | return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec)); | |
730 | } | |
731 | #endif /* HAVE_untyped_call or HAVE_untyped_return */ | |
732 | ||
733 | /* Save the state required to perform an untyped call with the same | |
734 | arguments as were passed to the current function. */ | |
735 | ||
736 | static rtx | |
737 | expand_builtin_apply_args_1 () | |
738 | { | |
739 | rtx registers; | |
740 | int size, align, regno; | |
741 | enum machine_mode mode; | |
742 | ||
743 | /* Create a block where the arg-pointer, structure value address, | |
744 | and argument registers can be saved. */ | |
745 | registers = assign_stack_local (BLKmode, apply_args_size (), -1); | |
746 | ||
747 | /* Walk past the arg-pointer and structure value address. */ | |
748 | size = GET_MODE_SIZE (Pmode); | |
749 | if (struct_value_rtx) | |
750 | size += GET_MODE_SIZE (Pmode); | |
751 | ||
752 | /* Save each register used in calling a function to the block. */ | |
753 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
754 | if ((mode = apply_args_mode[regno]) != VOIDmode) | |
755 | { | |
756 | rtx tem; | |
757 | ||
758 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
759 | if (size % align != 0) | |
760 | size = CEIL (size, align) * align; | |
761 | ||
762 | tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)); | |
763 | ||
764 | #ifdef STACK_REGS | |
765 | /* For reg-stack.c's stack register household. | |
766 | Compare with a similar piece of code in function.c. */ | |
767 | ||
768 | emit_insn (gen_rtx_USE (mode, tem)); | |
769 | #endif | |
770 | ||
771 | emit_move_insn (change_address (registers, mode, | |
772 | plus_constant (XEXP (registers, 0), | |
773 | size)), | |
774 | tem); | |
775 | size += GET_MODE_SIZE (mode); | |
776 | } | |
777 | ||
778 | /* Save the arg pointer to the block. */ | |
779 | emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)), | |
780 | copy_to_reg (virtual_incoming_args_rtx)); | |
781 | size = GET_MODE_SIZE (Pmode); | |
782 | ||
783 | /* Save the structure value address unless this is passed as an | |
784 | "invisible" first argument. */ | |
785 | if (struct_value_incoming_rtx) | |
786 | { | |
787 | emit_move_insn (change_address (registers, Pmode, | |
788 | plus_constant (XEXP (registers, 0), | |
789 | size)), | |
790 | copy_to_reg (struct_value_incoming_rtx)); | |
791 | size += GET_MODE_SIZE (Pmode); | |
792 | } | |
793 | ||
794 | /* Return the address of the block. */ | |
795 | return copy_addr_to_reg (XEXP (registers, 0)); | |
796 | } | |
797 | ||
798 | /* __builtin_apply_args returns block of memory allocated on | |
799 | the stack into which is stored the arg pointer, structure | |
800 | value address, static chain, and all the registers that might | |
801 | possibly be used in performing a function call. The code is | |
802 | moved to the start of the function so the incoming values are | |
803 | saved. */ | |
804 | static rtx | |
805 | expand_builtin_apply_args () | |
806 | { | |
807 | /* Don't do __builtin_apply_args more than once in a function. | |
808 | Save the result of the first call and reuse it. */ | |
809 | if (apply_args_value != 0) | |
810 | return apply_args_value; | |
811 | { | |
812 | /* When this function is called, it means that registers must be | |
813 | saved on entry to this function. So we migrate the | |
814 | call to the first insn of this function. */ | |
815 | rtx temp; | |
816 | rtx seq; | |
817 | ||
818 | start_sequence (); | |
819 | temp = expand_builtin_apply_args_1 (); | |
820 | seq = get_insns (); | |
821 | end_sequence (); | |
822 | ||
823 | apply_args_value = temp; | |
824 | ||
825 | /* Put the sequence after the NOTE that starts the function. | |
826 | If this is inside a SEQUENCE, make the outer-level insn | |
827 | chain current, so the code is placed at the start of the | |
828 | function. */ | |
829 | push_topmost_sequence (); | |
830 | emit_insns_before (seq, NEXT_INSN (get_insns ())); | |
831 | pop_topmost_sequence (); | |
832 | return temp; | |
833 | } | |
834 | } | |
835 | ||
836 | /* Perform an untyped call and save the state required to perform an | |
837 | untyped return of whatever value was returned by the given function. */ | |
838 | ||
839 | static rtx | |
840 | expand_builtin_apply (function, arguments, argsize) | |
841 | rtx function, arguments, argsize; | |
842 | { | |
843 | int size, align, regno; | |
844 | enum machine_mode mode; | |
845 | rtx incoming_args, result, reg, dest, call_insn; | |
846 | rtx old_stack_level = 0; | |
847 | rtx call_fusage = 0; | |
848 | ||
849 | /* Create a block where the return registers can be saved. */ | |
850 | result = assign_stack_local (BLKmode, apply_result_size (), -1); | |
851 | ||
852 | /* ??? The argsize value should be adjusted here. */ | |
853 | ||
854 | /* Fetch the arg pointer from the ARGUMENTS block. */ | |
855 | incoming_args = gen_reg_rtx (Pmode); | |
856 | emit_move_insn (incoming_args, | |
857 | gen_rtx_MEM (Pmode, arguments)); | |
858 | #ifndef STACK_GROWS_DOWNWARD | |
859 | incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize, | |
860 | incoming_args, 0, OPTAB_LIB_WIDEN); | |
861 | #endif | |
862 | ||
863 | /* Perform postincrements before actually calling the function. */ | |
864 | emit_queue (); | |
865 | ||
866 | /* Push a new argument block and copy the arguments. */ | |
867 | do_pending_stack_adjust (); | |
868 | ||
869 | /* Save the stack with nonlocal if available */ | |
870 | #ifdef HAVE_save_stack_nonlocal | |
871 | if (HAVE_save_stack_nonlocal) | |
872 | emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX); | |
873 | else | |
874 | #endif | |
875 | emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); | |
876 | ||
877 | /* Push a block of memory onto the stack to store the memory arguments. | |
878 | Save the address in a register, and copy the memory arguments. ??? I | |
879 | haven't figured out how the calling convention macros effect this, | |
880 | but it's likely that the source and/or destination addresses in | |
881 | the block copy will need updating in machine specific ways. */ | |
882 | dest = allocate_dynamic_stack_space (argsize, 0, 0); | |
883 | emit_block_move (gen_rtx_MEM (BLKmode, dest), | |
884 | gen_rtx_MEM (BLKmode, incoming_args), | |
885 | argsize, | |
886 | PARM_BOUNDARY / BITS_PER_UNIT); | |
887 | ||
888 | /* Refer to the argument block. */ | |
889 | apply_args_size (); | |
890 | arguments = gen_rtx_MEM (BLKmode, arguments); | |
891 | ||
892 | /* Walk past the arg-pointer and structure value address. */ | |
893 | size = GET_MODE_SIZE (Pmode); | |
894 | if (struct_value_rtx) | |
895 | size += GET_MODE_SIZE (Pmode); | |
896 | ||
897 | /* Restore each of the registers previously saved. Make USE insns | |
898 | for each of these registers for use in making the call. */ | |
899 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
900 | if ((mode = apply_args_mode[regno]) != VOIDmode) | |
901 | { | |
902 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
903 | if (size % align != 0) | |
904 | size = CEIL (size, align) * align; | |
905 | reg = gen_rtx_REG (mode, regno); | |
906 | emit_move_insn (reg, | |
907 | change_address (arguments, mode, | |
908 | plus_constant (XEXP (arguments, 0), | |
909 | size))); | |
910 | ||
911 | use_reg (&call_fusage, reg); | |
912 | size += GET_MODE_SIZE (mode); | |
913 | } | |
914 | ||
915 | /* Restore the structure value address unless this is passed as an | |
916 | "invisible" first argument. */ | |
917 | size = GET_MODE_SIZE (Pmode); | |
918 | if (struct_value_rtx) | |
919 | { | |
920 | rtx value = gen_reg_rtx (Pmode); | |
921 | emit_move_insn (value, | |
922 | change_address (arguments, Pmode, | |
923 | plus_constant (XEXP (arguments, 0), | |
924 | size))); | |
925 | emit_move_insn (struct_value_rtx, value); | |
926 | if (GET_CODE (struct_value_rtx) == REG) | |
927 | use_reg (&call_fusage, struct_value_rtx); | |
928 | size += GET_MODE_SIZE (Pmode); | |
929 | } | |
930 | ||
931 | /* All arguments and registers used for the call are set up by now! */ | |
932 | function = prepare_call_address (function, NULL_TREE, &call_fusage, 0); | |
933 | ||
934 | /* Ensure address is valid. SYMBOL_REF is already valid, so no need, | |
935 | and we don't want to load it into a register as an optimization, | |
936 | because prepare_call_address already did it if it should be done. */ | |
937 | if (GET_CODE (function) != SYMBOL_REF) | |
938 | function = memory_address (FUNCTION_MODE, function); | |
939 | ||
940 | /* Generate the actual call instruction and save the return value. */ | |
941 | #ifdef HAVE_untyped_call | |
942 | if (HAVE_untyped_call) | |
943 | emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function), | |
944 | result, result_vector (1, result))); | |
945 | else | |
946 | #endif | |
947 | #ifdef HAVE_call_value | |
948 | if (HAVE_call_value) | |
949 | { | |
950 | rtx valreg = 0; | |
951 | ||
952 | /* Locate the unique return register. It is not possible to | |
953 | express a call that sets more than one return register using | |
954 | call_value; use untyped_call for that. In fact, untyped_call | |
955 | only needs to save the return registers in the given block. */ | |
956 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
957 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
958 | { | |
959 | if (valreg) | |
960 | abort (); /* HAVE_untyped_call required. */ | |
961 | valreg = gen_rtx_REG (mode, regno); | |
962 | } | |
963 | ||
964 | emit_call_insn (gen_call_value (valreg, | |
965 | gen_rtx_MEM (FUNCTION_MODE, function), | |
966 | const0_rtx, NULL_RTX, const0_rtx)); | |
967 | ||
968 | emit_move_insn (change_address (result, GET_MODE (valreg), | |
969 | XEXP (result, 0)), | |
970 | valreg); | |
971 | } | |
972 | else | |
973 | #endif | |
974 | abort (); | |
975 | ||
976 | /* Find the CALL insn we just emitted. */ | |
977 | for (call_insn = get_last_insn (); | |
978 | call_insn && GET_CODE (call_insn) != CALL_INSN; | |
979 | call_insn = PREV_INSN (call_insn)) | |
980 | ; | |
981 | ||
982 | if (! call_insn) | |
983 | abort (); | |
984 | ||
985 | /* Put the register usage information on the CALL. If there is already | |
986 | some usage information, put ours at the end. */ | |
987 | if (CALL_INSN_FUNCTION_USAGE (call_insn)) | |
988 | { | |
989 | rtx link; | |
990 | ||
991 | for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; | |
992 | link = XEXP (link, 1)) | |
993 | ; | |
994 | ||
995 | XEXP (link, 1) = call_fusage; | |
996 | } | |
997 | else | |
998 | CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; | |
999 | ||
1000 | /* Restore the stack. */ | |
1001 | #ifdef HAVE_save_stack_nonlocal | |
1002 | if (HAVE_save_stack_nonlocal) | |
1003 | emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX); | |
1004 | else | |
1005 | #endif | |
1006 | emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); | |
1007 | ||
1008 | /* Return the address of the result block. */ | |
1009 | return copy_addr_to_reg (XEXP (result, 0)); | |
1010 | } | |
1011 | ||
1012 | /* Perform an untyped return. */ | |
1013 | ||
1014 | static void | |
1015 | expand_builtin_return (result) | |
1016 | rtx result; | |
1017 | { | |
1018 | int size, align, regno; | |
1019 | enum machine_mode mode; | |
1020 | rtx reg; | |
1021 | rtx call_fusage = 0; | |
1022 | ||
1023 | apply_result_size (); | |
1024 | result = gen_rtx_MEM (BLKmode, result); | |
1025 | ||
1026 | #ifdef HAVE_untyped_return | |
1027 | if (HAVE_untyped_return) | |
1028 | { | |
1029 | emit_jump_insn (gen_untyped_return (result, result_vector (0, result))); | |
1030 | emit_barrier (); | |
1031 | return; | |
1032 | } | |
1033 | #endif | |
1034 | ||
1035 | /* Restore the return value and note that each value is used. */ | |
1036 | size = 0; | |
1037 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | |
1038 | if ((mode = apply_result_mode[regno]) != VOIDmode) | |
1039 | { | |
1040 | align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
1041 | if (size % align != 0) | |
1042 | size = CEIL (size, align) * align; | |
1043 | reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)); | |
1044 | emit_move_insn (reg, | |
1045 | change_address (result, mode, | |
1046 | plus_constant (XEXP (result, 0), | |
1047 | size))); | |
1048 | ||
1049 | push_to_sequence (call_fusage); | |
1050 | emit_insn (gen_rtx_USE (VOIDmode, reg)); | |
1051 | call_fusage = get_insns (); | |
1052 | end_sequence (); | |
1053 | size += GET_MODE_SIZE (mode); | |
1054 | } | |
1055 | ||
1056 | /* Put the USE insns before the return. */ | |
1057 | emit_insns (call_fusage); | |
1058 | ||
1059 | /* Return whatever values was restored by jumping directly to the end | |
1060 | of the function. */ | |
1061 | expand_null_return (); | |
1062 | } | |
1063 | ||
1064 | /* Expand a call to __builtin_classify_type with arguments found in | |
1065 | ARGLIST. */ | |
1066 | static rtx | |
1067 | expand_builtin_classify_type (arglist) | |
1068 | tree arglist; | |
1069 | { | |
1070 | if (arglist != 0) | |
1071 | { | |
1072 | tree type = TREE_TYPE (TREE_VALUE (arglist)); | |
1073 | enum tree_code code = TREE_CODE (type); | |
1074 | if (code == VOID_TYPE) | |
1075 | return GEN_INT (void_type_class); | |
1076 | if (code == INTEGER_TYPE) | |
1077 | return GEN_INT (integer_type_class); | |
1078 | if (code == CHAR_TYPE) | |
1079 | return GEN_INT (char_type_class); | |
1080 | if (code == ENUMERAL_TYPE) | |
1081 | return GEN_INT (enumeral_type_class); | |
1082 | if (code == BOOLEAN_TYPE) | |
1083 | return GEN_INT (boolean_type_class); | |
1084 | if (code == POINTER_TYPE) | |
1085 | return GEN_INT (pointer_type_class); | |
1086 | if (code == REFERENCE_TYPE) | |
1087 | return GEN_INT (reference_type_class); | |
1088 | if (code == OFFSET_TYPE) | |
1089 | return GEN_INT (offset_type_class); | |
1090 | if (code == REAL_TYPE) | |
1091 | return GEN_INT (real_type_class); | |
1092 | if (code == COMPLEX_TYPE) | |
1093 | return GEN_INT (complex_type_class); | |
1094 | if (code == FUNCTION_TYPE) | |
1095 | return GEN_INT (function_type_class); | |
1096 | if (code == METHOD_TYPE) | |
1097 | return GEN_INT (method_type_class); | |
1098 | if (code == RECORD_TYPE) | |
1099 | return GEN_INT (record_type_class); | |
1100 | if (code == UNION_TYPE || code == QUAL_UNION_TYPE) | |
1101 | return GEN_INT (union_type_class); | |
1102 | if (code == ARRAY_TYPE) | |
1103 | { | |
1104 | if (TYPE_STRING_FLAG (type)) | |
1105 | return GEN_INT (string_type_class); | |
1106 | else | |
1107 | return GEN_INT (array_type_class); | |
1108 | } | |
1109 | if (code == SET_TYPE) | |
1110 | return GEN_INT (set_type_class); | |
1111 | if (code == FILE_TYPE) | |
1112 | return GEN_INT (file_type_class); | |
1113 | if (code == LANG_TYPE) | |
1114 | return GEN_INT (lang_type_class); | |
1115 | } | |
1116 | return GEN_INT (no_type_class); | |
1117 | } | |
1118 | ||
1119 | /* Expand expression EXP, which is a call to __builtin_constant_p. */ | |
1120 | static rtx | |
1121 | expand_builtin_constant_p (exp) | |
1122 | tree exp; | |
1123 | { | |
1124 | tree arglist = TREE_OPERAND (exp, 1); | |
1125 | enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); | |
1126 | ||
1127 | if (arglist == 0) | |
1128 | return const0_rtx; | |
1129 | else | |
1130 | { | |
1131 | tree arg = TREE_VALUE (arglist); | |
1132 | rtx tmp; | |
1133 | ||
1134 | /* We return 1 for a numeric type that's known to be a constant | |
1135 | value at compile-time or for an aggregate type that's a | |
1136 | literal constant. */ | |
1137 | STRIP_NOPS (arg); | |
1138 | ||
1139 | /* If we know this is a constant, emit the constant of one. */ | |
1140 | if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c' | |
1141 | || (TREE_CODE (arg) == CONSTRUCTOR | |
1142 | && TREE_CONSTANT (arg)) | |
1143 | || (TREE_CODE (arg) == ADDR_EXPR | |
1144 | && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)) | |
1145 | return const1_rtx; | |
1146 | ||
1147 | /* If we aren't going to be running CSE or this expression | |
1148 | has side effects, show we don't know it to be a constant. | |
1149 | Likewise if it's a pointer or aggregate type since in those | |
1150 | case we only want literals, since those are only optimized | |
1151 | when generating RTL, not later. */ | |
1152 | if (TREE_SIDE_EFFECTS (arg) || cse_not_expected | |
1153 | || AGGREGATE_TYPE_P (TREE_TYPE (arg)) | |
1154 | || POINTER_TYPE_P (TREE_TYPE (arg))) | |
1155 | return const0_rtx; | |
1156 | ||
1157 | /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a | |
1158 | chance to see if it can deduce whether ARG is constant. */ | |
1159 | ||
1160 | tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0); | |
1161 | tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp); | |
1162 | return tmp; | |
1163 | } | |
1164 | } | |
1165 | ||
1166 | /* Expand a call to one of the builtin math functions (sin, cos, or sqrt). | |
1167 | Return 0 if a normal call should be emitted rather than expanding the | |
1168 | function in-line. EXP is the expression that is a call to the builtin | |
1169 | function; if convenient, the result should be placed in TARGET. | |
1170 | SUBTARGET may be used as the target for computing one of EXP's operands. */ | |
1171 | static rtx | |
1172 | expand_builtin_mathfn (exp, target, subtarget) | |
1173 | tree exp; | |
1174 | rtx target, subtarget; | |
1175 | { | |
1176 | optab builtin_optab; | |
1177 | rtx op0, insns; | |
1178 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
1179 | tree arglist = TREE_OPERAND (exp, 1); | |
1180 | ||
1181 | if (arglist == 0 | |
1182 | /* Arg could be wrong type if user redeclared this fcn wrong. */ | |
1183 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE) | |
1184 | return 0; | |
1185 | ||
1186 | /* Stabilize and compute the argument. */ | |
1187 | if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL | |
1188 | && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL) | |
1189 | { | |
1190 | exp = copy_node (exp); | |
1191 | arglist = copy_node (arglist); | |
1192 | TREE_OPERAND (exp, 1) = arglist; | |
1193 | TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist)); | |
1194 | } | |
1195 | op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); | |
1196 | ||
1197 | /* Make a suitable register to place result in. */ | |
1198 | target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); | |
1199 | ||
1200 | emit_queue (); | |
1201 | start_sequence (); | |
1202 | ||
1203 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1204 | { | |
1205 | case BUILT_IN_SIN: | |
1206 | builtin_optab = sin_optab; break; | |
1207 | case BUILT_IN_COS: | |
1208 | builtin_optab = cos_optab; break; | |
1209 | case BUILT_IN_FSQRT: | |
1210 | builtin_optab = sqrt_optab; break; | |
1211 | default: | |
1212 | abort (); | |
1213 | } | |
1214 | ||
1215 | /* Compute into TARGET. | |
1216 | Set TARGET to wherever the result comes back. */ | |
1217 | target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), | |
1218 | builtin_optab, op0, target, 0); | |
1219 | ||
1220 | /* If we were unable to expand via the builtin, stop the | |
1221 | sequence (without outputting the insns) and return 0, causing | |
1222 | a call to the library function. */ | |
1223 | if (target == 0) | |
1224 | { | |
1225 | end_sequence (); | |
1226 | return 0; | |
1227 | } | |
1228 | ||
1229 | /* Check the results by default. But if flag_fast_math is turned on, | |
1230 | then assume sqrt will always be called with valid arguments. */ | |
1231 | ||
1232 | if (flag_errno_math && ! flag_fast_math) | |
1233 | { | |
1234 | rtx lab1; | |
1235 | ||
1236 | /* Don't define the builtin FP instructions | |
1237 | if your machine is not IEEE. */ | |
1238 | if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT) | |
1239 | abort (); | |
1240 | ||
1241 | lab1 = gen_label_rtx (); | |
1242 | ||
1243 | /* Test the result; if it is NaN, set errno=EDOM because | |
1244 | the argument was not in the domain. */ | |
1245 | emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target), | |
1246 | 0, 0, lab1); | |
1247 | ||
1248 | #ifdef TARGET_EDOM | |
1249 | { | |
1250 | #ifdef GEN_ERRNO_RTX | |
1251 | rtx errno_rtx = GEN_ERRNO_RTX; | |
1252 | #else | |
1253 | rtx errno_rtx | |
1254 | = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno")); | |
1255 | #endif | |
1256 | ||
1257 | emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM)); | |
1258 | } | |
1259 | #else | |
1260 | /* We can't set errno=EDOM directly; let the library call do it. | |
1261 | Pop the arguments right away in case the call gets deleted. */ | |
1262 | NO_DEFER_POP; | |
1263 | expand_call (exp, target, 0); | |
1264 | OK_DEFER_POP; | |
1265 | #endif | |
1266 | ||
1267 | emit_label (lab1); | |
1268 | } | |
1269 | ||
1270 | /* Output the entire sequence. */ | |
1271 | insns = get_insns (); | |
1272 | end_sequence (); | |
1273 | emit_insns (insns); | |
1274 | ||
1275 | return target; | |
1276 | } | |
1277 | ||
1278 | /* Expand expression EXP which is a call to the strlen builtin. Return 0 | |
1279 | if we failed the caller should emit a normal call, otherwise | |
1280 | try to get the result in TARGET, if convenient (and in mode MODE if that's | |
1281 | convenient). */ | |
1282 | static rtx | |
1283 | expand_builtin_strlen (exp, target, mode) | |
1284 | tree exp; | |
1285 | rtx target; | |
1286 | enum machine_mode mode; | |
1287 | { | |
1288 | tree arglist = TREE_OPERAND (exp, 1); | |
1289 | enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp)); | |
1290 | ||
1291 | if (arglist == 0 | |
1292 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1293 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
1294 | return 0; | |
1295 | else | |
1296 | { | |
1297 | tree src = TREE_VALUE (arglist); | |
1298 | tree len = c_strlen (src); | |
1299 | ||
1300 | int align | |
1301 | = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1302 | ||
1303 | rtx result, src_rtx, char_rtx; | |
1304 | enum machine_mode insn_mode = value_mode, char_mode; | |
1305 | enum insn_code icode; | |
1306 | ||
1307 | /* If the length is known, just return it. */ | |
1308 | if (len != 0) | |
1309 | return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD); | |
1310 | ||
1311 | /* If SRC is not a pointer type, don't do this operation inline. */ | |
1312 | if (align == 0) | |
1313 | return 0; | |
1314 | ||
1315 | /* Call a function if we can't compute strlen in the right mode. */ | |
1316 | ||
1317 | while (insn_mode != VOIDmode) | |
1318 | { | |
1319 | icode = strlen_optab->handlers[(int) insn_mode].insn_code; | |
1320 | if (icode != CODE_FOR_nothing) | |
1321 | return 0; | |
1322 | ||
1323 | insn_mode = GET_MODE_WIDER_MODE (insn_mode); | |
1324 | } | |
1325 | if (insn_mode == VOIDmode) | |
1326 | return 0; | |
1327 | ||
1328 | /* Make a place to write the result of the instruction. */ | |
1329 | result = target; | |
1330 | if (! (result != 0 | |
1331 | && GET_CODE (result) == REG | |
1332 | && GET_MODE (result) == insn_mode | |
1333 | && REGNO (result) >= FIRST_PSEUDO_REGISTER)) | |
1334 | result = gen_reg_rtx (insn_mode); | |
1335 | ||
1336 | /* Make sure the operands are acceptable to the predicates. */ | |
1337 | ||
6357eaae | 1338 | if (! (*insn_data[(int)icode].operand[0].predicate) (result, insn_mode)) |
53800dbe | 1339 | result = gen_reg_rtx (insn_mode); |
1340 | src_rtx = memory_address (BLKmode, | |
1341 | expand_expr (src, NULL_RTX, ptr_mode, | |
1342 | EXPAND_NORMAL)); | |
1343 | ||
6357eaae | 1344 | if (! (*insn_data[(int)icode].operand[1].predicate) (src_rtx, Pmode)) |
53800dbe | 1345 | src_rtx = copy_to_mode_reg (Pmode, src_rtx); |
1346 | ||
1347 | /* Check the string is readable and has an end. */ | |
1348 | if (current_function_check_memory_usage) | |
1349 | emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2, | |
1350 | src_rtx, Pmode, | |
1351 | GEN_INT (MEMORY_USE_RO), | |
1352 | TYPE_MODE (integer_type_node)); | |
1353 | ||
1354 | char_rtx = const0_rtx; | |
6357eaae | 1355 | char_mode = insn_data[(int)icode].operand[2].mode; |
1356 | if (! (*insn_data[(int)icode].operand[2].predicate) (char_rtx, char_mode)) | |
53800dbe | 1357 | char_rtx = copy_to_mode_reg (char_mode, char_rtx); |
1358 | ||
1359 | emit_insn (GEN_FCN (icode) (result, | |
1360 | gen_rtx_MEM (BLKmode, src_rtx), | |
1361 | char_rtx, GEN_INT (align))); | |
1362 | ||
1363 | /* Return the value in the proper mode for this function. */ | |
1364 | if (GET_MODE (result) == value_mode) | |
1365 | return result; | |
1366 | else if (target != 0) | |
1367 | { | |
1368 | convert_move (target, result, 0); | |
1369 | return target; | |
1370 | } | |
1371 | else | |
1372 | return convert_to_mode (value_mode, result, 0); | |
1373 | } | |
1374 | } | |
1375 | ||
1376 | /* Expand a call to the memcpy builtin, with arguments in ARGLIST. */ | |
1377 | static rtx | |
1378 | expand_builtin_memcpy (arglist) | |
1379 | tree arglist; | |
1380 | { | |
1381 | if (arglist == 0 | |
1382 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1383 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1384 | || TREE_CHAIN (arglist) == 0 | |
1385 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) | |
1386 | != POINTER_TYPE) | |
1387 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1388 | || (TREE_CODE (TREE_TYPE (TREE_VALUE | |
1389 | (TREE_CHAIN (TREE_CHAIN (arglist))))) | |
1390 | != INTEGER_TYPE)) | |
1391 | return 0; | |
1392 | else | |
1393 | { | |
1394 | tree dest = TREE_VALUE (arglist); | |
1395 | tree src = TREE_VALUE (TREE_CHAIN (arglist)); | |
1396 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1397 | ||
1398 | int src_align | |
1399 | = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1400 | int dest_align | |
1401 | = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1402 | rtx dest_mem, src_mem, dest_addr, len_rtx; | |
1403 | ||
1404 | /* If either SRC or DEST is not a pointer type, don't do | |
1405 | this operation in-line. */ | |
1406 | if (src_align == 0 || dest_align == 0) | |
1407 | return 0; | |
1408 | ||
1409 | dest_mem = get_memory_rtx (dest); | |
1410 | src_mem = get_memory_rtx (src); | |
1411 | len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); | |
1412 | ||
1413 | /* Just copy the rights of SRC to the rights of DEST. */ | |
1414 | if (current_function_check_memory_usage) | |
1415 | emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3, | |
1416 | XEXP (dest_mem, 0), Pmode, | |
1417 | XEXP (src_mem, 0), Pmode, | |
1418 | len_rtx, TYPE_MODE (sizetype)); | |
1419 | ||
1420 | /* Copy word part most expediently. */ | |
1421 | dest_addr | |
1422 | = emit_block_move (dest_mem, src_mem, len_rtx, | |
1423 | MIN (src_align, dest_align)); | |
1424 | ||
1425 | if (dest_addr == 0) | |
1426 | dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); | |
1427 | ||
1428 | return dest_addr; | |
1429 | } | |
1430 | } | |
1431 | ||
1432 | /* Expand expression EXP, which is a call to the strcpy builtin. Return 0 | |
1433 | if we failed the caller should emit a normal call. */ | |
1434 | static rtx | |
1435 | expand_builtin_strcpy (exp) | |
1436 | tree exp; | |
1437 | { | |
1438 | tree arglist = TREE_OPERAND (exp, 1); | |
1439 | rtx result; | |
1440 | ||
1441 | if (arglist == 0 | |
1442 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1443 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1444 | || TREE_CHAIN (arglist) == 0 | |
1445 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE) | |
1446 | return 0; | |
1447 | else | |
1448 | { | |
1449 | tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist))); | |
1450 | ||
1451 | if (len == 0) | |
1452 | return 0; | |
1453 | ||
1454 | len = size_binop (PLUS_EXPR, len, integer_one_node); | |
1455 | ||
1456 | chainon (arglist, build_tree_list (NULL_TREE, len)); | |
1457 | } | |
1458 | result = expand_builtin_memcpy (arglist); | |
1459 | if (! result) | |
1460 | TREE_CHAIN (TREE_CHAIN (arglist)) = 0; | |
1461 | return result; | |
1462 | } | |
1463 | ||
1464 | /* Expand expression EXP, which is a call to the memset builtin. Return 0 | |
1465 | if we failed the caller should emit a normal call. */ | |
1466 | static rtx | |
1467 | expand_builtin_memset (exp) | |
1468 | tree exp; | |
1469 | { | |
1470 | tree arglist = TREE_OPERAND (exp, 1); | |
1471 | ||
1472 | if (arglist == 0 | |
1473 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1474 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1475 | || TREE_CHAIN (arglist) == 0 | |
1476 | || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) | |
1477 | != INTEGER_TYPE) | |
1478 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1479 | || (INTEGER_TYPE | |
1480 | != (TREE_CODE (TREE_TYPE | |
1481 | (TREE_VALUE | |
1482 | (TREE_CHAIN (TREE_CHAIN (arglist)))))))) | |
1483 | return 0; | |
1484 | else | |
1485 | { | |
1486 | tree dest = TREE_VALUE (arglist); | |
1487 | tree val = TREE_VALUE (TREE_CHAIN (arglist)); | |
1488 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1489 | ||
1490 | int dest_align | |
1491 | = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1492 | rtx dest_mem, dest_addr, len_rtx; | |
1493 | ||
1494 | /* If DEST is not a pointer type, don't do this | |
1495 | operation in-line. */ | |
1496 | if (dest_align == 0) | |
1497 | return 0; | |
1498 | ||
1499 | /* If the arguments have side-effects, then we can only evaluate | |
1500 | them at most once. The following code evaluates them twice if | |
1501 | they are not constants because we break out to expand_call | |
1502 | in that case. They can't be constants if they have side-effects | |
1503 | so we can check for that first. Alternatively, we could call | |
1504 | save_expr to make multiple evaluation safe. */ | |
1505 | if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len)) | |
1506 | return 0; | |
1507 | ||
1508 | /* If VAL is not 0, don't do this operation in-line. */ | |
1509 | if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx) | |
1510 | return 0; | |
1511 | ||
1512 | /* If LEN does not expand to a constant, don't do this | |
1513 | operation in-line. */ | |
1514 | len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); | |
1515 | if (GET_CODE (len_rtx) != CONST_INT) | |
1516 | return 0; | |
1517 | ||
1518 | dest_mem = get_memory_rtx (dest); | |
1519 | ||
1520 | /* Just check DST is writable and mark it as readable. */ | |
1521 | if (current_function_check_memory_usage) | |
1522 | emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, | |
1523 | XEXP (dest_mem, 0), Pmode, | |
1524 | len_rtx, TYPE_MODE (sizetype), | |
1525 | GEN_INT (MEMORY_USE_WO), | |
1526 | TYPE_MODE (integer_type_node)); | |
1527 | ||
1528 | ||
1529 | dest_addr = clear_storage (dest_mem, len_rtx, dest_align); | |
1530 | ||
1531 | if (dest_addr == 0) | |
1532 | dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); | |
1533 | ||
1534 | return dest_addr; | |
1535 | } | |
1536 | } | |
1537 | ||
1538 | #ifdef HAVE_cmpstrsi | |
1539 | /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin. | |
1540 | ARGLIST is the argument list for this call. Return 0 if we failed and the | |
1541 | caller should emit a normal call, otherwise try to get the result in | |
1542 | TARGET, if convenient. */ | |
1543 | static rtx | |
1544 | expand_builtin_memcmp (exp, arglist, target) | |
1545 | tree exp; | |
1546 | tree arglist; | |
1547 | rtx target; | |
1548 | { | |
1549 | /* If we need to check memory accesses, call the library function. */ | |
1550 | if (current_function_check_memory_usage) | |
1551 | return 0; | |
1552 | ||
1553 | if (arglist == 0 | |
1554 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1555 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1556 | || TREE_CHAIN (arglist) == 0 | |
1557 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE | |
1558 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
1559 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) | |
1560 | return 0; | |
1561 | else if (!HAVE_cmpstrsi) | |
1562 | return 0; | |
1563 | ||
1564 | { | |
1565 | enum machine_mode mode; | |
1566 | tree arg1 = TREE_VALUE (arglist); | |
1567 | tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); | |
1568 | tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); | |
1569 | rtx result; | |
1570 | ||
1571 | int arg1_align | |
1572 | = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1573 | int arg2_align | |
1574 | = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; | |
1575 | enum machine_mode insn_mode | |
6357eaae | 1576 | = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode; |
53800dbe | 1577 | |
1578 | /* If we don't have POINTER_TYPE, call the function. */ | |
1579 | if (arg1_align == 0 || arg2_align == 0) | |
1580 | return 0; | |
1581 | ||
1582 | /* Make a place to write the result of the instruction. */ | |
1583 | result = target; | |
1584 | if (! (result != 0 | |
1585 | && GET_CODE (result) == REG && GET_MODE (result) == insn_mode | |
1586 | && REGNO (result) >= FIRST_PSEUDO_REGISTER)) | |
1587 | result = gen_reg_rtx (insn_mode); | |
1588 | ||
1589 | emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1), | |
1590 | get_memory_rtx (arg2), | |
1591 | expand_expr (len, NULL_RTX, VOIDmode, 0), | |
1592 | GEN_INT (MIN (arg1_align, arg2_align)))); | |
1593 | ||
1594 | /* Return the value in the proper mode for this function. */ | |
1595 | mode = TYPE_MODE (TREE_TYPE (exp)); | |
1596 | if (GET_MODE (result) == mode) | |
1597 | return result; | |
1598 | else if (target != 0) | |
1599 | { | |
1600 | convert_move (target, result, 0); | |
1601 | return target; | |
1602 | } | |
1603 | else | |
1604 | return convert_to_mode (mode, result, 0); | |
1605 | } | |
1606 | } | |
1607 | ||
1608 | /* Expand expression EXP, which is a call to the strcmp builtin. Return 0 | |
1609 | if we failed the caller should emit a normal call, otherwise try to get | |
1610 | the result in TARGET, if convenient. */ | |
1611 | static rtx | |
1612 | expand_builtin_strcmp (exp, target) | |
1613 | tree exp; | |
1614 | rtx target; | |
1615 | { | |
1616 | tree arglist = TREE_OPERAND (exp, 1); | |
1617 | ||
1618 | /* If we need to check memory accesses, call the library function. */ | |
1619 | if (current_function_check_memory_usage) | |
1620 | return 0; | |
1621 | ||
1622 | if (arglist == 0 | |
1623 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
1624 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE | |
1625 | || TREE_CHAIN (arglist) == 0 | |
1626 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE) | |
1627 | return 0; | |
1628 | else if (!HAVE_cmpstrsi) | |
1629 | return 0; | |
1630 | { | |
1631 | tree arg1 = TREE_VALUE (arglist); | |
1632 | tree arg2 = TREE_VALUE (TREE_CHAIN (arglist)); | |
1633 | tree len, len2; | |
1634 | rtx result; | |
1635 | len = c_strlen (arg1); | |
1636 | if (len) | |
1637 | len = size_binop (PLUS_EXPR, integer_one_node, len); | |
1638 | len2 = c_strlen (arg2); | |
1639 | if (len2) | |
1640 | len2 = size_binop (PLUS_EXPR, integer_one_node, len2); | |
1641 | ||
1642 | /* If we don't have a constant length for the first, use the length | |
1643 | of the second, if we know it. We don't require a constant for | |
1644 | this case; some cost analysis could be done if both are available | |
1645 | but neither is constant. For now, assume they're equally cheap. | |
1646 | ||
1647 | If both strings have constant lengths, use the smaller. This | |
1648 | could arise if optimization results in strcpy being called with | |
1649 | two fixed strings, or if the code was machine-generated. We should | |
1650 | add some code to the `memcmp' handler below to deal with such | |
1651 | situations, someday. */ | |
1652 | if (!len || TREE_CODE (len) != INTEGER_CST) | |
1653 | { | |
1654 | if (len2) | |
1655 | len = len2; | |
1656 | else if (len == 0) | |
1657 | return 0; | |
1658 | } | |
1659 | else if (len2 && TREE_CODE (len2) == INTEGER_CST) | |
1660 | { | |
1661 | if (tree_int_cst_lt (len2, len)) | |
1662 | len = len2; | |
1663 | } | |
1664 | ||
1665 | chainon (arglist, build_tree_list (NULL_TREE, len)); | |
1666 | result = expand_builtin_memcmp (exp, arglist, target); | |
1667 | if (! result) | |
1668 | TREE_CHAIN (TREE_CHAIN (arglist)) = 0; | |
1669 | return result; | |
1670 | } | |
1671 | } | |
1672 | #endif | |
1673 | ||
a66c9326 | 1674 | /* Expand a call to __builtin_saveregs, generating the result in TARGET, |
1675 | if that's convenient. */ | |
1676 | rtx | |
1677 | expand_builtin_saveregs () | |
53800dbe | 1678 | { |
a66c9326 | 1679 | rtx val, seq; |
53800dbe | 1680 | |
1681 | /* Don't do __builtin_saveregs more than once in a function. | |
1682 | Save the result of the first call and reuse it. */ | |
1683 | if (saveregs_value != 0) | |
1684 | return saveregs_value; | |
53800dbe | 1685 | |
a66c9326 | 1686 | /* When this function is called, it means that registers must be |
1687 | saved on entry to this function. So we migrate the call to the | |
1688 | first insn of this function. */ | |
1689 | ||
1690 | start_sequence (); | |
53800dbe | 1691 | |
1692 | #ifdef EXPAND_BUILTIN_SAVEREGS | |
a66c9326 | 1693 | /* Do whatever the machine needs done in this case. */ |
1694 | val = EXPAND_BUILTIN_SAVEREGS (); | |
53800dbe | 1695 | #else |
a66c9326 | 1696 | /* ??? We used to try and build up a call to the out of line function, |
1697 | guessing about what registers needed saving etc. This became much | |
1698 | harder with __builtin_va_start, since we don't have a tree for a | |
1699 | call to __builtin_saveregs to fall back on. There was exactly one | |
1700 | port (i860) that used this code, and I'm unconvinced it could actually | |
1701 | handle the general case. So we no longer try to handle anything | |
1702 | weird and make the backend absorb the evil. */ | |
1703 | ||
1704 | error ("__builtin_saveregs not supported by this target"); | |
1705 | val = const0_rtx; | |
53800dbe | 1706 | #endif |
1707 | ||
a66c9326 | 1708 | seq = get_insns (); |
1709 | end_sequence (); | |
53800dbe | 1710 | |
a66c9326 | 1711 | saveregs_value = val; |
53800dbe | 1712 | |
a66c9326 | 1713 | /* Put the sequence after the NOTE that starts the function. If this |
1714 | is inside a SEQUENCE, make the outer-level insn chain current, so | |
1715 | the code is placed at the start of the function. */ | |
1716 | push_topmost_sequence (); | |
1717 | emit_insns_after (seq, get_insns ()); | |
1718 | pop_topmost_sequence (); | |
1719 | ||
1720 | return val; | |
53800dbe | 1721 | } |
1722 | ||
1723 | /* __builtin_args_info (N) returns word N of the arg space info | |
1724 | for the current function. The number and meanings of words | |
1725 | is controlled by the definition of CUMULATIVE_ARGS. */ | |
1726 | static rtx | |
1727 | expand_builtin_args_info (exp) | |
1728 | tree exp; | |
1729 | { | |
1730 | tree arglist = TREE_OPERAND (exp, 1); | |
1731 | int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int); | |
1732 | int *word_ptr = (int *) ¤t_function_args_info; | |
1733 | #if 0 | |
1734 | /* These are used by the code below that is if 0'ed away */ | |
1735 | int i; | |
1736 | tree type, elts, result; | |
1737 | #endif | |
1738 | ||
1739 | if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0) | |
dda90815 | 1740 | abort (); |
53800dbe | 1741 | |
1742 | if (arglist != 0) | |
1743 | { | |
1744 | tree arg = TREE_VALUE (arglist); | |
1745 | if (TREE_CODE (arg) != INTEGER_CST) | |
1746 | error ("argument of `__builtin_args_info' must be constant"); | |
1747 | else | |
1748 | { | |
1749 | int wordnum = TREE_INT_CST_LOW (arg); | |
1750 | ||
1751 | if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg)) | |
1752 | error ("argument of `__builtin_args_info' out of range"); | |
1753 | else | |
1754 | return GEN_INT (word_ptr[wordnum]); | |
1755 | } | |
1756 | } | |
1757 | else | |
1758 | error ("missing argument in `__builtin_args_info'"); | |
1759 | ||
1760 | return const0_rtx; | |
1761 | ||
1762 | #if 0 | |
1763 | for (i = 0; i < nwords; i++) | |
1764 | elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0)); | |
1765 | ||
1766 | type = build_array_type (integer_type_node, | |
1767 | build_index_type (build_int_2 (nwords, 0))); | |
1768 | result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts)); | |
1769 | TREE_CONSTANT (result) = 1; | |
1770 | TREE_STATIC (result) = 1; | |
a66c9326 | 1771 | result = build1 (INDIRECT_REF, build_pointer_type (type), result); |
53800dbe | 1772 | TREE_CONSTANT (result) = 1; |
1773 | return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD); | |
1774 | #endif | |
1775 | } | |
1776 | ||
a66c9326 | 1777 | /* Expand ARGLIST, from a call to __builtin_next_arg. */ |
53800dbe | 1778 | static rtx |
a66c9326 | 1779 | expand_builtin_next_arg (arglist) |
1780 | tree arglist; | |
53800dbe | 1781 | { |
53800dbe | 1782 | tree fntype = TREE_TYPE (current_function_decl); |
1783 | ||
1784 | if ((TYPE_ARG_TYPES (fntype) == 0 | |
1785 | || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) | |
1786 | == void_type_node)) | |
1787 | && ! current_function_varargs) | |
1788 | { | |
1789 | error ("`va_start' used in function with fixed args"); | |
1790 | return const0_rtx; | |
1791 | } | |
1792 | ||
1793 | if (arglist) | |
1794 | { | |
1795 | tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl)); | |
1796 | tree arg = TREE_VALUE (arglist); | |
1797 | ||
1798 | /* Strip off all nops for the sake of the comparison. This | |
1799 | is not quite the same as STRIP_NOPS. It does more. | |
1800 | We must also strip off INDIRECT_EXPR for C++ reference | |
1801 | parameters. */ | |
1802 | while (TREE_CODE (arg) == NOP_EXPR | |
1803 | || TREE_CODE (arg) == CONVERT_EXPR | |
1804 | || TREE_CODE (arg) == NON_LVALUE_EXPR | |
1805 | || TREE_CODE (arg) == INDIRECT_REF) | |
1806 | arg = TREE_OPERAND (arg, 0); | |
1807 | if (arg != last_parm) | |
1808 | warning ("second parameter of `va_start' not last named argument"); | |
1809 | } | |
1810 | else if (! current_function_varargs) | |
1811 | /* Evidently an out of date version of <stdarg.h>; can't validate | |
1812 | va_start's second argument, but can still work as intended. */ | |
1813 | warning ("`__builtin_next_arg' called without an argument"); | |
1814 | ||
1815 | return expand_binop (Pmode, add_optab, | |
1816 | current_function_internal_arg_pointer, | |
1817 | current_function_arg_offset_rtx, | |
1818 | NULL_RTX, 0, OPTAB_LIB_WIDEN); | |
1819 | } | |
1820 | ||
a66c9326 | 1821 | /* Make it easier for the backends by protecting the valist argument |
1822 | from multiple evaluations. */ | |
1823 | ||
1824 | static tree | |
1825 | stabilize_va_list (valist, was_ptr) | |
1826 | tree valist; | |
1827 | int was_ptr; | |
1828 | { | |
11a61dea | 1829 | if (TREE_CODE (va_list_type_node) == ARRAY_TYPE) |
a66c9326 | 1830 | { |
1831 | /* If stdarg.h took the address of an array-type valist that was passed | |
1832 | as a parameter, we'll have taken the address of the parameter itself | |
1833 | rather than the array as we'd intended. Undo this mistake. */ | |
11a61dea | 1834 | |
1835 | if (was_ptr) | |
8a15c04a | 1836 | { |
11a61dea | 1837 | STRIP_NOPS (valist); |
1838 | ||
1839 | /* Two cases: either &array, which decomposed to | |
1840 | <ptr <array <record> valist>> | |
1841 | or &ptr, which turned into | |
1842 | <ptr <ptr <record>>> | |
1843 | In the first case we'll need to put the ADDR_EXPR back | |
1844 | after frobbing the types as if &array[0]. */ | |
1845 | ||
1846 | if (TREE_CODE (valist) != ADDR_EXPR) | |
1847 | abort (); | |
8a15c04a | 1848 | valist = TREE_OPERAND (valist, 0); |
11a61dea | 1849 | } |
1850 | ||
1851 | if (TYPE_MAIN_VARIANT (TREE_TYPE (valist)) | |
1852 | == TYPE_MAIN_VARIANT (va_list_type_node)) | |
1853 | { | |
1854 | tree pt = build_pointer_type (TREE_TYPE (va_list_type_node)); | |
1855 | valist = build1 (ADDR_EXPR, pt, valist); | |
1856 | TREE_SIDE_EFFECTS (valist) | |
1857 | = TREE_SIDE_EFFECTS (TREE_OPERAND (valist, 0)); | |
8a15c04a | 1858 | } |
1859 | else | |
1860 | { | |
11a61dea | 1861 | if (! POINTER_TYPE_P (TREE_TYPE (valist)) |
1862 | || (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (valist))) | |
1863 | != TYPE_MAIN_VARIANT (TREE_TYPE (va_list_type_node)))) | |
1864 | abort (); | |
8a15c04a | 1865 | } |
11a61dea | 1866 | |
1867 | if (TREE_SIDE_EFFECTS (valist)) | |
1868 | valist = save_expr (valist); | |
a66c9326 | 1869 | } |
11a61dea | 1870 | else |
a66c9326 | 1871 | { |
11a61dea | 1872 | if (! was_ptr) |
a66c9326 | 1873 | { |
11a61dea | 1874 | tree pt; |
1875 | ||
1876 | if (! TREE_SIDE_EFFECTS (valist)) | |
1877 | return valist; | |
1878 | ||
1879 | pt = build_pointer_type (va_list_type_node); | |
1880 | valist = fold (build1 (ADDR_EXPR, pt, valist)); | |
a66c9326 | 1881 | TREE_SIDE_EFFECTS (valist) = 1; |
a66c9326 | 1882 | } |
11a61dea | 1883 | if (TREE_SIDE_EFFECTS (valist)) |
1884 | valist = save_expr (valist); | |
1885 | valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), | |
1886 | valist)); | |
a66c9326 | 1887 | } |
1888 | ||
1889 | return valist; | |
1890 | } | |
1891 | ||
1892 | /* The "standard" implementation of va_start: just assign `nextarg' to | |
1893 | the variable. */ | |
1894 | void | |
1895 | std_expand_builtin_va_start (stdarg_p, valist, nextarg) | |
1896 | int stdarg_p ATTRIBUTE_UNUSED; | |
1897 | tree valist; | |
1898 | rtx nextarg; | |
1899 | { | |
1900 | tree t; | |
1901 | ||
8a15c04a | 1902 | if (!stdarg_p) |
1903 | nextarg = plus_constant (nextarg, -UNITS_PER_WORD); | |
1904 | ||
a66c9326 | 1905 | t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, |
1906 | make_tree (ptr_type_node, nextarg)); | |
1907 | TREE_SIDE_EFFECTS (t) = 1; | |
1908 | ||
1909 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1910 | } | |
1911 | ||
1912 | /* Expand ARGLIST, which from a call to __builtin_stdarg_va_start or | |
1913 | __builtin_varargs_va_start, depending on STDARG_P. */ | |
1914 | static rtx | |
1915 | expand_builtin_va_start (stdarg_p, arglist) | |
1916 | int stdarg_p; | |
1917 | tree arglist; | |
1918 | { | |
1919 | rtx nextarg; | |
1920 | tree chain = arglist, valist; | |
1921 | ||
1922 | if (stdarg_p) | |
1923 | nextarg = expand_builtin_next_arg (chain = TREE_CHAIN (arglist)); | |
1924 | else | |
1925 | nextarg = expand_builtin_next_arg (NULL_TREE); | |
1926 | ||
1927 | if (TREE_CHAIN (chain)) | |
1928 | error ("too many arguments to function `va_start'"); | |
1929 | ||
1930 | valist = stabilize_va_list (TREE_VALUE (arglist), 1); | |
1931 | ||
1932 | #ifdef EXPAND_BUILTIN_VA_START | |
1933 | EXPAND_BUILTIN_VA_START (stdarg_p, valist, nextarg); | |
1934 | #else | |
1935 | std_expand_builtin_va_start (stdarg_p, valist, nextarg); | |
1936 | #endif | |
1937 | ||
1938 | return const0_rtx; | |
1939 | } | |
1940 | ||
1941 | /* Allocate an alias set for use in storing and reading from the varargs | |
1942 | spill area. */ | |
1943 | int | |
1944 | get_varargs_alias_set () | |
1945 | { | |
1946 | static int set = -1; | |
1947 | if (set == -1) | |
1948 | set = new_alias_set (); | |
1949 | return set; | |
1950 | } | |
1951 | ||
1952 | /* The "standard" implementation of va_arg: read the value from the | |
1953 | current (padded) address and increment by the (padded) size. */ | |
1954 | rtx | |
1955 | std_expand_builtin_va_arg (valist, type) | |
1956 | tree valist, type; | |
1957 | { | |
1958 | tree addr_tree, t; | |
1959 | HOST_WIDE_INT align; | |
1960 | HOST_WIDE_INT rounded_size; | |
1961 | rtx addr; | |
1962 | ||
1963 | /* Compute the rounded size of the type. */ | |
1964 | align = PARM_BOUNDARY / BITS_PER_UNIT; | |
7102dbcc | 1965 | rounded_size = (((int_size_in_bytes (type) + align - 1) / align) * align); |
a66c9326 | 1966 | |
1967 | /* Get AP. */ | |
1968 | addr_tree = valist; | |
1969 | if (BYTES_BIG_ENDIAN) | |
1970 | { | |
1971 | /* Small args are padded downward. */ | |
1972 | ||
1973 | HOST_WIDE_INT adj; | |
1974 | adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT; | |
1975 | if (rounded_size > align) | |
1976 | adj = rounded_size; | |
1977 | ||
1978 | addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree, | |
1979 | build_int_2 (rounded_size - adj, 0)); | |
1980 | } | |
1981 | ||
1982 | addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL); | |
1983 | addr = copy_to_reg (addr); | |
1984 | ||
1985 | /* Compute new value for AP. */ | |
1986 | t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, | |
1987 | build (PLUS_EXPR, TREE_TYPE (valist), valist, | |
1988 | build_int_2 (rounded_size, 0))); | |
1989 | TREE_SIDE_EFFECTS (t) = 1; | |
1990 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
1991 | ||
1992 | return addr; | |
1993 | } | |
1994 | ||
1995 | /* Expand __builtin_va_arg, which is not really a builtin function, but | |
1996 | a very special sort of operator. */ | |
1997 | rtx | |
1998 | expand_builtin_va_arg (valist, type) | |
1999 | tree valist, type; | |
2000 | { | |
2001 | rtx addr, result; | |
6cd005c9 | 2002 | tree promoted_type, want_va_type, have_va_type; |
a66c9326 | 2003 | |
6cd005c9 | 2004 | /* Verify that valist is of the proper type. */ |
2005 | ||
2006 | want_va_type = va_list_type_node; | |
2007 | have_va_type = TREE_TYPE (valist); | |
2008 | if (TREE_CODE (want_va_type) == ARRAY_TYPE) | |
2009 | { | |
2010 | /* If va_list is an array type, the argument may have decayed | |
2011 | to a pointer type, e.g. by being passed to another function. | |
2012 | In that case, unwrap both types so that we can compare the | |
2013 | underlying records. */ | |
2014 | if (TREE_CODE (have_va_type) == ARRAY_TYPE | |
2015 | || TREE_CODE (have_va_type) == POINTER_TYPE) | |
2016 | { | |
2017 | want_va_type = TREE_TYPE (want_va_type); | |
2018 | have_va_type = TREE_TYPE (have_va_type); | |
2019 | } | |
2020 | } | |
2021 | if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type)) | |
a66c9326 | 2022 | { |
e94026da | 2023 | error ("first argument to `va_arg' not of type `va_list'"); |
2024 | addr = const0_rtx; | |
2025 | } | |
6cd005c9 | 2026 | |
2027 | /* Generate a diagnostic for requesting data of a type that cannot | |
2028 | be passed through `...' due to type promotion at the call site. */ | |
e94026da | 2029 | else if ((promoted_type = (*lang_type_promotes_to) (type)) != NULL_TREE) |
2030 | { | |
2031 | const char *name = "<anonymous type>", *pname; | |
2032 | static int gave_help; | |
2033 | ||
2034 | if (TYPE_NAME (type)) | |
2035 | { | |
2036 | if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE) | |
2037 | name = IDENTIFIER_POINTER (TYPE_NAME (type)); | |
2038 | else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL | |
2039 | && DECL_NAME (TYPE_NAME (type))) | |
2040 | name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))); | |
2041 | } | |
2042 | if (TYPE_NAME (promoted_type)) | |
2043 | { | |
2044 | if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE) | |
2045 | pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type)); | |
2046 | else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL | |
2047 | && DECL_NAME (TYPE_NAME (promoted_type))) | |
2048 | pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type))); | |
2049 | } | |
2050 | ||
2051 | error ("`%s' is promoted to `%s' when passed through `...'", name, pname); | |
2052 | if (! gave_help) | |
2053 | { | |
2054 | gave_help = 1; | |
2055 | error ("(so you should pass `%s' not `%s' to `va_arg')", pname, name); | |
2056 | } | |
2057 | ||
a66c9326 | 2058 | addr = const0_rtx; |
2059 | } | |
2060 | else | |
2061 | { | |
2062 | /* Make it easier for the backends by protecting the valist argument | |
2063 | from multiple evaluations. */ | |
2064 | valist = stabilize_va_list (valist, 0); | |
2065 | ||
2066 | #ifdef EXPAND_BUILTIN_VA_ARG | |
2067 | addr = EXPAND_BUILTIN_VA_ARG (valist, type); | |
2068 | #else | |
2069 | addr = std_expand_builtin_va_arg (valist, type); | |
2070 | #endif | |
2071 | } | |
2072 | ||
2073 | result = gen_rtx_MEM (TYPE_MODE (type), addr); | |
2074 | MEM_ALIAS_SET (result) = get_varargs_alias_set (); | |
2075 | ||
2076 | return result; | |
2077 | } | |
2078 | ||
2079 | /* Expand ARGLIST, from a call to __builtin_va_end. */ | |
2080 | static rtx | |
2081 | expand_builtin_va_end (arglist) | |
8a15c04a | 2082 | tree arglist; |
a66c9326 | 2083 | { |
8a15c04a | 2084 | tree valist = TREE_VALUE (arglist); |
2085 | ||
a66c9326 | 2086 | #ifdef EXPAND_BUILTIN_VA_END |
a66c9326 | 2087 | valist = stabilize_va_list (valist, 0); |
2088 | EXPAND_BUILTIN_VA_END(arglist); | |
8a15c04a | 2089 | #else |
2090 | /* Evaluate for side effects, if needed. I hate macros that don't | |
2091 | do that. */ | |
2092 | if (TREE_SIDE_EFFECTS (valist)) | |
2093 | expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
a66c9326 | 2094 | #endif |
2095 | ||
2096 | return const0_rtx; | |
2097 | } | |
2098 | ||
2099 | /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a | |
2100 | builtin rather than just as an assignment in stdarg.h because of the | |
2101 | nastiness of array-type va_list types. */ | |
2102 | static rtx | |
2103 | expand_builtin_va_copy (arglist) | |
2104 | tree arglist; | |
2105 | { | |
2106 | tree dst, src, t; | |
2107 | ||
2108 | dst = TREE_VALUE (arglist); | |
2109 | src = TREE_VALUE (TREE_CHAIN (arglist)); | |
2110 | ||
2111 | dst = stabilize_va_list (dst, 1); | |
2112 | src = stabilize_va_list (src, 0); | |
2113 | ||
2114 | if (TREE_CODE (va_list_type_node) != ARRAY_TYPE) | |
2115 | { | |
2116 | t = build (MODIFY_EXPR, va_list_type_node, dst, src); | |
2117 | TREE_SIDE_EFFECTS (t) = 1; | |
2118 | expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
2119 | } | |
2120 | else | |
2121 | { | |
11a61dea | 2122 | rtx dstb, srcb, size; |
2123 | ||
2124 | /* Evaluate to pointers. */ | |
2125 | dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2126 | srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2127 | size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX, | |
2128 | VOIDmode, EXPAND_NORMAL); | |
2129 | ||
2130 | /* "Dereference" to BLKmode memories. */ | |
2131 | dstb = gen_rtx_MEM (BLKmode, dstb); | |
2132 | MEM_ALIAS_SET (dstb) = get_alias_set (TREE_TYPE (TREE_TYPE (dst))); | |
2133 | srcb = gen_rtx_MEM (BLKmode, srcb); | |
2134 | MEM_ALIAS_SET (srcb) = get_alias_set (TREE_TYPE (TREE_TYPE (src))); | |
2135 | ||
2136 | /* Copy. */ | |
2137 | emit_block_move (dstb, srcb, size, | |
a66c9326 | 2138 | TYPE_ALIGN (va_list_type_node) / BITS_PER_UNIT); |
2139 | } | |
2140 | ||
2141 | return const0_rtx; | |
2142 | } | |
2143 | ||
53800dbe | 2144 | /* Expand a call to one of the builtin functions __builtin_frame_address or |
2145 | __builtin_return_address. */ | |
2146 | static rtx | |
2147 | expand_builtin_frame_address (exp) | |
2148 | tree exp; | |
2149 | { | |
2150 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
2151 | tree arglist = TREE_OPERAND (exp, 1); | |
2152 | ||
2153 | /* The argument must be a nonnegative integer constant. | |
2154 | It counts the number of frames to scan up the stack. | |
2155 | The value is the return address saved in that frame. */ | |
2156 | if (arglist == 0) | |
2157 | /* Warning about missing arg was already issued. */ | |
2158 | return const0_rtx; | |
2159 | else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST | |
2160 | || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0) | |
2161 | { | |
2162 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2163 | error ("invalid arg to `__builtin_frame_address'"); | |
2164 | else | |
2165 | error ("invalid arg to `__builtin_return_address'"); | |
2166 | return const0_rtx; | |
2167 | } | |
2168 | else | |
2169 | { | |
2170 | rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), | |
2171 | TREE_INT_CST_LOW (TREE_VALUE (arglist)), | |
2172 | hard_frame_pointer_rtx); | |
2173 | ||
2174 | /* Some ports cannot access arbitrary stack frames. */ | |
2175 | if (tem == NULL) | |
2176 | { | |
2177 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2178 | warning ("unsupported arg to `__builtin_frame_address'"); | |
2179 | else | |
2180 | warning ("unsupported arg to `__builtin_return_address'"); | |
2181 | return const0_rtx; | |
2182 | } | |
2183 | ||
2184 | /* For __builtin_frame_address, return what we've got. */ | |
2185 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) | |
2186 | return tem; | |
2187 | ||
2188 | if (GET_CODE (tem) != REG | |
2189 | && ! CONSTANT_P (tem)) | |
2190 | tem = copy_to_mode_reg (Pmode, tem); | |
2191 | return tem; | |
2192 | } | |
2193 | } | |
2194 | ||
2195 | /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if | |
2196 | we failed and the caller should emit a normal call, otherwise try to get | |
2197 | the result in TARGET, if convenient. */ | |
2198 | static rtx | |
2199 | expand_builtin_alloca (arglist, target) | |
2200 | tree arglist; | |
2201 | rtx target; | |
2202 | { | |
2203 | rtx op0; | |
2204 | ||
2205 | if (arglist == 0 | |
2206 | /* Arg could be non-integer if user redeclared this fcn wrong. */ | |
2207 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) | |
2208 | return 0; | |
2209 | ||
2210 | /* Compute the argument. */ | |
2211 | op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0); | |
2212 | ||
2213 | /* Allocate the desired space. */ | |
2214 | return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT); | |
2215 | } | |
2216 | ||
2217 | /* Expand a call to the ffs builtin. The arguments are in ARGLIST. | |
2218 | Return 0 if a normal call should be emitted rather than expanding the | |
2219 | function in-line. If convenient, the result should be placed in TARGET. | |
2220 | SUBTARGET may be used as the target for computing one of EXP's operands. */ | |
2221 | static rtx | |
2222 | expand_builtin_ffs (arglist, target, subtarget) | |
2223 | tree arglist; | |
2224 | rtx target, subtarget; | |
2225 | { | |
2226 | rtx op0; | |
2227 | if (arglist == 0 | |
2228 | /* Arg could be non-integer if user redeclared this fcn wrong. */ | |
2229 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE) | |
2230 | return 0; | |
2231 | ||
2232 | /* Compute the argument. */ | |
2233 | op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0); | |
2234 | /* Compute ffs, into TARGET if possible. | |
2235 | Set TARGET to wherever the result comes back. */ | |
2236 | target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))), | |
2237 | ffs_optab, op0, target, 1); | |
2238 | if (target == 0) | |
2239 | abort (); | |
2240 | return target; | |
2241 | } | |
2242 | \f | |
2243 | /* Expand an expression EXP that calls a built-in function, | |
2244 | with result going to TARGET if that's convenient | |
2245 | (and in mode MODE if that's convenient). | |
2246 | SUBTARGET may be used as the target for computing one of EXP's operands. | |
2247 | IGNORE is nonzero if the value is to be ignored. */ | |
2248 | ||
2249 | rtx | |
2250 | expand_builtin (exp, target, subtarget, mode, ignore) | |
2251 | tree exp; | |
2252 | rtx target; | |
2253 | rtx subtarget; | |
2254 | enum machine_mode mode; | |
2255 | int ignore; | |
2256 | { | |
2257 | tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); | |
2258 | tree arglist = TREE_OPERAND (exp, 1); | |
2259 | enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); | |
2260 | ||
8305149e | 2261 | #ifdef MD_EXPAND_BUILTIN |
2262 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
2263 | return MD_EXPAND_BUILTIN (exp, target, subtarget, mode, ignore); | |
2264 | #endif | |
2265 | ||
53800dbe | 2266 | /* When not optimizing, generate calls to library functions for a certain |
2267 | set of builtins. */ | |
2268 | if (! optimize && ! CALLED_AS_BUILT_IN (fndecl) | |
2269 | && (fcode == BUILT_IN_SIN || fcode == BUILT_IN_COS | |
2270 | || fcode == BUILT_IN_FSQRT || fcode == BUILT_IN_MEMSET | |
2271 | || fcode == BUILT_IN_MEMCPY || fcode == BUILT_IN_MEMCMP | |
2272 | || fcode == BUILT_IN_STRLEN || fcode == BUILT_IN_STRCPY | |
2273 | || fcode == BUILT_IN_STRCMP || fcode == BUILT_IN_FFS)) | |
2274 | return expand_call (exp, target, ignore); | |
2275 | ||
2276 | switch (fcode) | |
2277 | { | |
2278 | case BUILT_IN_ABS: | |
2279 | case BUILT_IN_LABS: | |
2280 | case BUILT_IN_FABS: | |
2281 | /* build_function_call changes these into ABS_EXPR. */ | |
2282 | abort (); | |
2283 | ||
2284 | case BUILT_IN_SIN: | |
2285 | case BUILT_IN_COS: | |
2286 | /* Treat these like sqrt, but only if the user asks for them. */ | |
2287 | if (! flag_fast_math) | |
2288 | break; | |
2289 | case BUILT_IN_FSQRT: | |
2290 | target = expand_builtin_mathfn (exp, target, subtarget); | |
2291 | if (target) | |
2292 | return target; | |
2293 | break; | |
2294 | ||
2295 | case BUILT_IN_FMOD: | |
2296 | break; | |
2297 | ||
2298 | case BUILT_IN_APPLY_ARGS: | |
2299 | return expand_builtin_apply_args (); | |
2300 | ||
2301 | /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes | |
2302 | FUNCTION with a copy of the parameters described by | |
2303 | ARGUMENTS, and ARGSIZE. It returns a block of memory | |
2304 | allocated on the stack into which is stored all the registers | |
2305 | that might possibly be used for returning the result of a | |
2306 | function. ARGUMENTS is the value returned by | |
2307 | __builtin_apply_args. ARGSIZE is the number of bytes of | |
2308 | arguments that must be copied. ??? How should this value be | |
2309 | computed? We'll also need a safe worst case value for varargs | |
2310 | functions. */ | |
2311 | case BUILT_IN_APPLY: | |
2312 | if (arglist == 0 | |
2313 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
2314 | || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))) | |
2315 | || TREE_CHAIN (arglist) == 0 | |
2316 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE | |
2317 | || TREE_CHAIN (TREE_CHAIN (arglist)) == 0 | |
2318 | || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE) | |
2319 | return const0_rtx; | |
2320 | else | |
2321 | { | |
2322 | int i; | |
2323 | tree t; | |
2324 | rtx ops[3]; | |
2325 | ||
2326 | for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++) | |
2327 | ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0); | |
2328 | ||
2329 | return expand_builtin_apply (ops[0], ops[1], ops[2]); | |
2330 | } | |
2331 | ||
2332 | /* __builtin_return (RESULT) causes the function to return the | |
2333 | value described by RESULT. RESULT is address of the block of | |
2334 | memory returned by __builtin_apply. */ | |
2335 | case BUILT_IN_RETURN: | |
2336 | if (arglist | |
2337 | /* Arg could be non-pointer if user redeclared this fcn wrong. */ | |
2338 | && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE) | |
2339 | expand_builtin_return (expand_expr (TREE_VALUE (arglist), | |
2340 | NULL_RTX, VOIDmode, 0)); | |
2341 | return const0_rtx; | |
2342 | ||
2343 | case BUILT_IN_SAVEREGS: | |
a66c9326 | 2344 | return expand_builtin_saveregs (); |
53800dbe | 2345 | |
2346 | case BUILT_IN_ARGS_INFO: | |
2347 | return expand_builtin_args_info (exp); | |
2348 | ||
2349 | /* Return the address of the first anonymous stack arg. */ | |
2350 | case BUILT_IN_NEXT_ARG: | |
a66c9326 | 2351 | return expand_builtin_next_arg (arglist); |
53800dbe | 2352 | |
2353 | case BUILT_IN_CLASSIFY_TYPE: | |
2354 | return expand_builtin_classify_type (arglist); | |
2355 | ||
2356 | case BUILT_IN_CONSTANT_P: | |
2357 | return expand_builtin_constant_p (exp); | |
2358 | ||
2359 | case BUILT_IN_FRAME_ADDRESS: | |
2360 | case BUILT_IN_RETURN_ADDRESS: | |
2361 | return expand_builtin_frame_address (exp); | |
2362 | ||
2363 | /* Returns the address of the area where the structure is returned. | |
2364 | 0 otherwise. */ | |
2365 | case BUILT_IN_AGGREGATE_INCOMING_ADDRESS: | |
2366 | if (arglist != 0 | |
2367 | || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))) | |
2368 | || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM) | |
2369 | return const0_rtx; | |
2370 | else | |
2371 | return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); | |
2372 | ||
2373 | case BUILT_IN_ALLOCA: | |
2374 | target = expand_builtin_alloca (arglist, target); | |
2375 | if (target) | |
2376 | return target; | |
2377 | break; | |
2378 | ||
2379 | case BUILT_IN_FFS: | |
bdc5170d | 2380 | target = expand_builtin_ffs (arglist, target, subtarget); |
53800dbe | 2381 | if (target) |
2382 | return target; | |
2383 | break; | |
2384 | ||
2385 | case BUILT_IN_STRLEN: | |
2386 | target = expand_builtin_strlen (exp, target, mode); | |
2387 | if (target) | |
2388 | return target; | |
2389 | break; | |
2390 | ||
2391 | case BUILT_IN_STRCPY: | |
2392 | target = expand_builtin_strcpy (exp); | |
2393 | if (target) | |
2394 | return target; | |
2395 | break; | |
2396 | ||
2397 | case BUILT_IN_MEMCPY: | |
2398 | target = expand_builtin_memcpy (arglist); | |
2399 | if (target) | |
2400 | return target; | |
2401 | break; | |
2402 | ||
2403 | case BUILT_IN_MEMSET: | |
2404 | target = expand_builtin_memset (exp); | |
2405 | if (target) | |
2406 | return target; | |
2407 | break; | |
2408 | ||
2409 | /* These comparison functions need an instruction that returns an actual | |
2410 | index. An ordinary compare that just sets the condition codes | |
2411 | is not enough. */ | |
2412 | #ifdef HAVE_cmpstrsi | |
2413 | case BUILT_IN_STRCMP: | |
2414 | target = expand_builtin_strcmp (exp, target); | |
2415 | if (target) | |
2416 | return target; | |
2417 | break; | |
2418 | ||
2419 | case BUILT_IN_MEMCMP: | |
2420 | target = expand_builtin_memcmp (exp, arglist, target); | |
2421 | if (target) | |
2422 | return target; | |
2423 | break; | |
2424 | #else | |
2425 | case BUILT_IN_STRCMP: | |
2426 | case BUILT_IN_MEMCMP: | |
2427 | break; | |
2428 | #endif | |
2429 | ||
2430 | case BUILT_IN_SETJMP: | |
2431 | if (arglist == 0 | |
2432 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
2433 | break; | |
2434 | else | |
2435 | { | |
2436 | rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, | |
2437 | VOIDmode, 0); | |
2438 | rtx lab = gen_label_rtx (); | |
2439 | rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab); | |
2440 | emit_label (lab); | |
2441 | return ret; | |
2442 | } | |
2443 | ||
2444 | /* __builtin_longjmp is passed a pointer to an array of five words. | |
2445 | It's similar to the C library longjmp function but works with | |
2446 | __builtin_setjmp above. */ | |
2447 | case BUILT_IN_LONGJMP: | |
2448 | if (arglist == 0 || TREE_CHAIN (arglist) == 0 | |
2449 | || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE) | |
2450 | break; | |
2451 | else | |
2452 | { | |
2453 | rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget, | |
2454 | VOIDmode, 0); | |
2455 | rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), | |
2456 | NULL_RTX, VOIDmode, 0); | |
2457 | ||
2458 | if (value != const1_rtx) | |
2459 | { | |
2460 | error ("__builtin_longjmp second argument must be 1"); | |
2461 | return const0_rtx; | |
2462 | } | |
2463 | ||
2464 | expand_builtin_longjmp (buf_addr, value); | |
2465 | return const0_rtx; | |
2466 | } | |
2467 | ||
2468 | case BUILT_IN_TRAP: | |
2469 | #ifdef HAVE_trap | |
2470 | if (HAVE_trap) | |
2471 | emit_insn (gen_trap ()); | |
2472 | else | |
2473 | #endif | |
2474 | error ("__builtin_trap not supported by this target"); | |
2475 | emit_barrier (); | |
2476 | return const0_rtx; | |
2477 | ||
2478 | /* Various hooks for the DWARF 2 __throw routine. */ | |
2479 | case BUILT_IN_UNWIND_INIT: | |
2480 | expand_builtin_unwind_init (); | |
2481 | return const0_rtx; | |
2482 | case BUILT_IN_DWARF_CFA: | |
2483 | return virtual_cfa_rtx; | |
2484 | #ifdef DWARF2_UNWIND_INFO | |
2485 | case BUILT_IN_DWARF_FP_REGNUM: | |
2486 | return expand_builtin_dwarf_fp_regnum (); | |
2487 | case BUILT_IN_DWARF_REG_SIZE: | |
2488 | return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target); | |
2489 | #endif | |
2490 | case BUILT_IN_FROB_RETURN_ADDR: | |
2491 | return expand_builtin_frob_return_addr (TREE_VALUE (arglist)); | |
2492 | case BUILT_IN_EXTRACT_RETURN_ADDR: | |
2493 | return expand_builtin_extract_return_addr (TREE_VALUE (arglist)); | |
2494 | case BUILT_IN_EH_RETURN: | |
2495 | expand_builtin_eh_return (TREE_VALUE (arglist), | |
2496 | TREE_VALUE (TREE_CHAIN (arglist)), | |
2497 | TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)))); | |
2498 | return const0_rtx; | |
a66c9326 | 2499 | case BUILT_IN_VARARGS_START: |
2500 | return expand_builtin_va_start (0, arglist); | |
2501 | case BUILT_IN_STDARG_START: | |
2502 | return expand_builtin_va_start (1, arglist); | |
2503 | case BUILT_IN_VA_END: | |
2504 | return expand_builtin_va_end (arglist); | |
2505 | case BUILT_IN_VA_COPY: | |
2506 | return expand_builtin_va_copy (arglist); | |
53800dbe | 2507 | |
2508 | default: /* just do library call, if unknown builtin */ | |
2509 | error ("built-in function `%s' not currently supported", | |
2510 | IDENTIFIER_POINTER (DECL_NAME (fndecl))); | |
2511 | } | |
2512 | ||
2513 | /* The switch statement above can drop through to cause the function | |
2514 | to be called normally. */ | |
2515 | return expand_call (exp, target, ignore); | |
2516 | } |