]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
re PR middle-end/19402 (__builtin_powi? still missing)
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tree-gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50
51 #define CALLED_AS_BUILT_IN(NODE) \
52 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
53
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
57
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
61
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
63 const char *const built_in_names[(int) END_BUILTINS] =
64 {
65 #include "builtins.def"
66 };
67 #undef DEF_BUILTIN
68
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
76
77 static int get_pointer_alignment (tree, unsigned int);
78 static const char *c_getstr (tree);
79 static rtx c_readstr (const char *, enum machine_mode);
80 static int target_char_cast (tree, char *);
81 static rtx get_memory_rtx (tree);
82 static tree build_string_literal (int, const char *);
83 static int apply_args_size (void);
84 static int apply_result_size (void);
85 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
86 static rtx result_vector (int, rtx);
87 #endif
88 static rtx expand_builtin_setjmp (tree, rtx);
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_args_info (tree);
102 static rtx expand_builtin_next_arg (void);
103 static rtx expand_builtin_va_start (tree);
104 static rtx expand_builtin_va_end (tree);
105 static rtx expand_builtin_va_copy (tree);
106 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
107 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
108 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
109 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
110 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_mempcpy (tree, tree, rtx, enum machine_mode, int);
116 static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode);
117 static rtx expand_builtin_bcopy (tree, tree);
118 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
120 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
121 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
122 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_bzero (tree);
126 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_alloca (tree, rtx);
132 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
133 static rtx expand_builtin_frame_address (tree, tree);
134 static rtx expand_builtin_fputs (tree, rtx, bool);
135 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
136 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
137 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
138 static tree stabilize_va_list (tree, int);
139 static rtx expand_builtin_expect (tree, rtx);
140 static tree fold_builtin_constant_p (tree);
141 static tree fold_builtin_classify_type (tree);
142 static tree fold_builtin_strlen (tree);
143 static tree fold_builtin_inf (tree, int);
144 static tree fold_builtin_nan (tree, tree, int);
145 static int validate_arglist (tree, ...);
146 static bool integer_valued_real_p (tree);
147 static tree fold_trunc_transparent_mathfn (tree);
148 static bool readonly_data_expr (tree);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_cabs (tree, tree);
152 static tree fold_builtin_sqrt (tree, tree);
153 static tree fold_builtin_cbrt (tree, tree);
154 static tree fold_builtin_pow (tree, tree, tree);
155 static tree fold_builtin_sin (tree);
156 static tree fold_builtin_cos (tree, tree, tree);
157 static tree fold_builtin_tan (tree);
158 static tree fold_builtin_atan (tree, tree);
159 static tree fold_builtin_trunc (tree);
160 static tree fold_builtin_floor (tree);
161 static tree fold_builtin_ceil (tree);
162 static tree fold_builtin_round (tree);
163 static tree fold_builtin_bitop (tree);
164 static tree fold_builtin_memcpy (tree);
165 static tree fold_builtin_mempcpy (tree, tree, int);
166 static tree fold_builtin_memmove (tree, tree);
167 static tree fold_builtin_strchr (tree);
168 static tree fold_builtin_memcmp (tree);
169 static tree fold_builtin_strcmp (tree);
170 static tree fold_builtin_strncmp (tree);
171 static tree fold_builtin_signbit (tree);
172 static tree fold_builtin_copysign (tree, tree);
173 static tree fold_builtin_isascii (tree);
174 static tree fold_builtin_toascii (tree);
175 static tree fold_builtin_isdigit (tree);
176 static tree fold_builtin_fabs (tree, tree);
177 static tree fold_builtin_abs (tree, tree);
178 static tree fold_builtin_unordered_cmp (tree, enum tree_code, enum tree_code);
179 static tree fold_builtin_1 (tree, bool);
180
181 static tree fold_builtin_strpbrk (tree);
182 static tree fold_builtin_strstr (tree);
183 static tree fold_builtin_strrchr (tree);
184 static tree fold_builtin_strcat (tree);
185 static tree fold_builtin_strncat (tree);
186 static tree fold_builtin_strspn (tree);
187 static tree fold_builtin_strcspn (tree);
188 static tree fold_builtin_sprintf (tree, int);
189
190
191 /* Return the alignment in bits of EXP, a pointer valued expression.
192 But don't return more than MAX_ALIGN no matter what.
193 The alignment returned is, by default, the alignment of the thing that
194 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
195
196 Otherwise, look at the expression to see if we can do better, i.e., if the
197 expression is actually pointing at an object whose alignment is tighter. */
198
199 static int
200 get_pointer_alignment (tree exp, unsigned int max_align)
201 {
202 unsigned int align, inner;
203
204 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
205 return 0;
206
207 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
208 align = MIN (align, max_align);
209
210 while (1)
211 {
212 switch (TREE_CODE (exp))
213 {
214 case NOP_EXPR:
215 case CONVERT_EXPR:
216 case NON_LVALUE_EXPR:
217 exp = TREE_OPERAND (exp, 0);
218 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
219 return align;
220
221 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
222 align = MIN (inner, max_align);
223 break;
224
225 case PLUS_EXPR:
226 /* If sum of pointer + int, restrict our maximum alignment to that
227 imposed by the integer. If not, we can't do any better than
228 ALIGN. */
229 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
230 return align;
231
232 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
233 & (max_align / BITS_PER_UNIT - 1))
234 != 0)
235 max_align >>= 1;
236
237 exp = TREE_OPERAND (exp, 0);
238 break;
239
240 case ADDR_EXPR:
241 /* See what we are pointing at and look at its alignment. */
242 exp = TREE_OPERAND (exp, 0);
243 if (TREE_CODE (exp) == FUNCTION_DECL)
244 align = FUNCTION_BOUNDARY;
245 else if (DECL_P (exp))
246 align = DECL_ALIGN (exp);
247 #ifdef CONSTANT_ALIGNMENT
248 else if (CONSTANT_CLASS_P (exp))
249 align = CONSTANT_ALIGNMENT (exp, align);
250 #endif
251 return MIN (align, max_align);
252
253 default:
254 return align;
255 }
256 }
257 }
258
259 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
260 way, because it could contain a zero byte in the middle.
261 TREE_STRING_LENGTH is the size of the character array, not the string.
262
263 ONLY_VALUE should be nonzero if the result is not going to be emitted
264 into the instruction stream and zero if it is going to be expanded.
265 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
266 is returned, otherwise NULL, since
267 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
268 evaluate the side-effects.
269
270 The value returned is of type `ssizetype'.
271
272 Unfortunately, string_constant can't access the values of const char
273 arrays with initializers, so neither can we do so here. */
274
275 tree
276 c_strlen (tree src, int only_value)
277 {
278 tree offset_node;
279 HOST_WIDE_INT offset;
280 int max;
281 const char *ptr;
282
283 STRIP_NOPS (src);
284 if (TREE_CODE (src) == COND_EXPR
285 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
286 {
287 tree len1, len2;
288
289 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
290 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
291 if (tree_int_cst_equal (len1, len2))
292 return len1;
293 }
294
295 if (TREE_CODE (src) == COMPOUND_EXPR
296 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
297 return c_strlen (TREE_OPERAND (src, 1), only_value);
298
299 src = string_constant (src, &offset_node);
300 if (src == 0)
301 return 0;
302
303 max = TREE_STRING_LENGTH (src) - 1;
304 ptr = TREE_STRING_POINTER (src);
305
306 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
307 {
308 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
309 compute the offset to the following null if we don't know where to
310 start searching for it. */
311 int i;
312
313 for (i = 0; i < max; i++)
314 if (ptr[i] == 0)
315 return 0;
316
317 /* We don't know the starting offset, but we do know that the string
318 has no internal zero bytes. We can assume that the offset falls
319 within the bounds of the string; otherwise, the programmer deserves
320 what he gets. Subtract the offset from the length of the string,
321 and return that. This would perhaps not be valid if we were dealing
322 with named arrays in addition to literal string constants. */
323
324 return size_diffop (size_int (max), offset_node);
325 }
326
327 /* We have a known offset into the string. Start searching there for
328 a null character if we can represent it as a single HOST_WIDE_INT. */
329 if (offset_node == 0)
330 offset = 0;
331 else if (! host_integerp (offset_node, 0))
332 offset = -1;
333 else
334 offset = tree_low_cst (offset_node, 0);
335
336 /* If the offset is known to be out of bounds, warn, and call strlen at
337 runtime. */
338 if (offset < 0 || offset > max)
339 {
340 warning ("offset outside bounds of constant string");
341 return 0;
342 }
343
344 /* Use strlen to search for the first zero byte. Since any strings
345 constructed with build_string will have nulls appended, we win even
346 if we get handed something like (char[4])"abcd".
347
348 Since OFFSET is our starting index into the string, no further
349 calculation is needed. */
350 return ssize_int (strlen (ptr + offset));
351 }
352
353 /* Return a char pointer for a C string if it is a string constant
354 or sum of string constant and integer constant. */
355
356 static const char *
357 c_getstr (tree src)
358 {
359 tree offset_node;
360
361 src = string_constant (src, &offset_node);
362 if (src == 0)
363 return 0;
364
365 if (offset_node == 0)
366 return TREE_STRING_POINTER (src);
367 else if (!host_integerp (offset_node, 1)
368 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
369 return 0;
370
371 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
372 }
373
374 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
375 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
376
377 static rtx
378 c_readstr (const char *str, enum machine_mode mode)
379 {
380 HOST_WIDE_INT c[2];
381 HOST_WIDE_INT ch;
382 unsigned int i, j;
383
384 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
385
386 c[0] = 0;
387 c[1] = 0;
388 ch = 1;
389 for (i = 0; i < GET_MODE_SIZE (mode); i++)
390 {
391 j = i;
392 if (WORDS_BIG_ENDIAN)
393 j = GET_MODE_SIZE (mode) - i - 1;
394 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
395 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
396 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
397 j *= BITS_PER_UNIT;
398 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
399
400 if (ch)
401 ch = (unsigned char) str[i];
402 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
403 }
404 return immed_double_const (c[0], c[1], mode);
405 }
406
407 /* Cast a target constant CST to target CHAR and if that value fits into
408 host char type, return zero and put that value into variable pointed by
409 P. */
410
411 static int
412 target_char_cast (tree cst, char *p)
413 {
414 unsigned HOST_WIDE_INT val, hostval;
415
416 if (!host_integerp (cst, 1)
417 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
418 return 1;
419
420 val = tree_low_cst (cst, 1);
421 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
422 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
423
424 hostval = val;
425 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
426 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
427
428 if (val != hostval)
429 return 1;
430
431 *p = hostval;
432 return 0;
433 }
434
435 /* Similar to save_expr, but assumes that arbitrary code is not executed
436 in between the multiple evaluations. In particular, we assume that a
437 non-addressable local variable will not be modified. */
438
439 static tree
440 builtin_save_expr (tree exp)
441 {
442 if (TREE_ADDRESSABLE (exp) == 0
443 && (TREE_CODE (exp) == PARM_DECL
444 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
445 return exp;
446
447 return save_expr (exp);
448 }
449
450 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
451 times to get the address of either a higher stack frame, or a return
452 address located within it (depending on FNDECL_CODE). */
453
454 static rtx
455 expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
456 rtx tem)
457 {
458 int i;
459
460 /* Some machines need special handling before we can access
461 arbitrary frames. For example, on the sparc, we must first flush
462 all register windows to the stack. */
463 #ifdef SETUP_FRAME_ADDRESSES
464 if (count > 0)
465 SETUP_FRAME_ADDRESSES ();
466 #endif
467
468 /* On the sparc, the return address is not in the frame, it is in a
469 register. There is no way to access it off of the current frame
470 pointer, but it can be accessed off the previous frame pointer by
471 reading the value from the register window save area. */
472 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
473 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
474 count--;
475 #endif
476
477 /* Scan back COUNT frames to the specified frame. */
478 for (i = 0; i < count; i++)
479 {
480 /* Assume the dynamic chain pointer is in the word that the
481 frame address points to, unless otherwise specified. */
482 #ifdef DYNAMIC_CHAIN_ADDRESS
483 tem = DYNAMIC_CHAIN_ADDRESS (tem);
484 #endif
485 tem = memory_address (Pmode, tem);
486 tem = gen_rtx_MEM (Pmode, tem);
487 set_mem_alias_set (tem, get_frame_alias_set ());
488 tem = copy_to_reg (tem);
489 }
490
491 /* For __builtin_frame_address, return what we've got. */
492 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
493 return tem;
494
495 /* For __builtin_return_address, Get the return address from that
496 frame. */
497 #ifdef RETURN_ADDR_RTX
498 tem = RETURN_ADDR_RTX (count, tem);
499 #else
500 tem = memory_address (Pmode,
501 plus_constant (tem, GET_MODE_SIZE (Pmode)));
502 tem = gen_rtx_MEM (Pmode, tem);
503 set_mem_alias_set (tem, get_frame_alias_set ());
504 #endif
505 return tem;
506 }
507
508 /* Alias set used for setjmp buffer. */
509 static HOST_WIDE_INT setjmp_alias_set = -1;
510
511 /* Construct the leading half of a __builtin_setjmp call. Control will
512 return to RECEIVER_LABEL. This is used directly by sjlj exception
513 handling code. */
514
515 void
516 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
517 {
518 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
519 rtx stack_save;
520 rtx mem;
521
522 if (setjmp_alias_set == -1)
523 setjmp_alias_set = new_alias_set ();
524
525 buf_addr = convert_memory_address (Pmode, buf_addr);
526
527 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
528
529 /* We store the frame pointer and the address of receiver_label in
530 the buffer and use the rest of it for the stack save area, which
531 is machine-dependent. */
532
533 mem = gen_rtx_MEM (Pmode, buf_addr);
534 set_mem_alias_set (mem, setjmp_alias_set);
535 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
536
537 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
538 set_mem_alias_set (mem, setjmp_alias_set);
539
540 emit_move_insn (validize_mem (mem),
541 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
542
543 stack_save = gen_rtx_MEM (sa_mode,
544 plus_constant (buf_addr,
545 2 * GET_MODE_SIZE (Pmode)));
546 set_mem_alias_set (stack_save, setjmp_alias_set);
547 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
548
549 /* If there is further processing to do, do it. */
550 #ifdef HAVE_builtin_setjmp_setup
551 if (HAVE_builtin_setjmp_setup)
552 emit_insn (gen_builtin_setjmp_setup (buf_addr));
553 #endif
554
555 /* Tell optimize_save_area_alloca that extra work is going to
556 need to go on during alloca. */
557 current_function_calls_setjmp = 1;
558
559 /* Set this so all the registers get saved in our frame; we need to be
560 able to copy the saved values for any registers from frames we unwind. */
561 current_function_has_nonlocal_label = 1;
562 }
563
564 /* Construct the trailing part of a __builtin_setjmp call.
565 This is used directly by sjlj exception handling code. */
566
567 void
568 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
569 {
570 /* Clobber the FP when we get here, so we have to make sure it's
571 marked as used by this function. */
572 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
573
574 /* Mark the static chain as clobbered here so life information
575 doesn't get messed up for it. */
576 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
577
578 /* Now put in the code to restore the frame pointer, and argument
579 pointer, if needed. */
580 #ifdef HAVE_nonlocal_goto
581 if (! HAVE_nonlocal_goto)
582 #endif
583 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
584
585 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
586 if (fixed_regs[ARG_POINTER_REGNUM])
587 {
588 #ifdef ELIMINABLE_REGS
589 size_t i;
590 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
591
592 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
593 if (elim_regs[i].from == ARG_POINTER_REGNUM
594 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
595 break;
596
597 if (i == ARRAY_SIZE (elim_regs))
598 #endif
599 {
600 /* Now restore our arg pointer from the address at which it
601 was saved in our stack frame. */
602 emit_move_insn (virtual_incoming_args_rtx,
603 copy_to_reg (get_arg_pointer_save_area (cfun)));
604 }
605 }
606 #endif
607
608 #ifdef HAVE_builtin_setjmp_receiver
609 if (HAVE_builtin_setjmp_receiver)
610 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
611 else
612 #endif
613 #ifdef HAVE_nonlocal_goto_receiver
614 if (HAVE_nonlocal_goto_receiver)
615 emit_insn (gen_nonlocal_goto_receiver ());
616 else
617 #endif
618 { /* Nothing */ }
619
620 /* @@@ This is a kludge. Not all machine descriptions define a blockage
621 insn, but we must not allow the code we just generated to be reordered
622 by scheduling. Specifically, the update of the frame pointer must
623 happen immediately, not later. So emit an ASM_INPUT to act as blockage
624 insn. */
625 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
626 }
627
628 /* __builtin_setjmp is passed a pointer to an array of five words (not
629 all will be used on all machines). It operates similarly to the C
630 library function of the same name, but is more efficient. Much of
631 the code below (and for longjmp) is copied from the handling of
632 non-local gotos.
633
634 NOTE: This is intended for use by GNAT and the exception handling
635 scheme in the compiler and will only work in the method used by
636 them. */
637
638 static rtx
639 expand_builtin_setjmp (tree arglist, rtx target)
640 {
641 rtx buf_addr, next_lab, cont_lab;
642
643 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
644 return NULL_RTX;
645
646 if (target == 0 || !REG_P (target)
647 || REGNO (target) < FIRST_PSEUDO_REGISTER)
648 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
649
650 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
651
652 next_lab = gen_label_rtx ();
653 cont_lab = gen_label_rtx ();
654
655 expand_builtin_setjmp_setup (buf_addr, next_lab);
656
657 /* Set TARGET to zero and branch to the continue label. Use emit_jump to
658 ensure that pending stack adjustments are flushed. */
659 emit_move_insn (target, const0_rtx);
660 emit_jump (cont_lab);
661
662 emit_label (next_lab);
663
664 expand_builtin_setjmp_receiver (next_lab);
665
666 /* Set TARGET to one. */
667 emit_move_insn (target, const1_rtx);
668 emit_label (cont_lab);
669
670 /* Tell flow about the strange goings on. Putting `next_lab' on
671 `nonlocal_goto_handler_labels' to indicates that function
672 calls may traverse the arc back to this label. */
673
674 current_function_has_nonlocal_label = 1;
675 nonlocal_goto_handler_labels
676 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
677
678 return target;
679 }
680
681 /* __builtin_longjmp is passed a pointer to an array of five words (not
682 all will be used on all machines). It operates similarly to the C
683 library function of the same name, but is more efficient. Much of
684 the code below is copied from the handling of non-local gotos.
685
686 NOTE: This is intended for use by GNAT and the exception handling
687 scheme in the compiler and will only work in the method used by
688 them. */
689
690 static void
691 expand_builtin_longjmp (rtx buf_addr, rtx value)
692 {
693 rtx fp, lab, stack, insn, last;
694 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
695
696 if (setjmp_alias_set == -1)
697 setjmp_alias_set = new_alias_set ();
698
699 buf_addr = convert_memory_address (Pmode, buf_addr);
700
701 buf_addr = force_reg (Pmode, buf_addr);
702
703 /* We used to store value in static_chain_rtx, but that fails if pointers
704 are smaller than integers. We instead require that the user must pass
705 a second argument of 1, because that is what builtin_setjmp will
706 return. This also makes EH slightly more efficient, since we are no
707 longer copying around a value that we don't care about. */
708 gcc_assert (value == const1_rtx);
709
710 last = get_last_insn ();
711 #ifdef HAVE_builtin_longjmp
712 if (HAVE_builtin_longjmp)
713 emit_insn (gen_builtin_longjmp (buf_addr));
714 else
715 #endif
716 {
717 fp = gen_rtx_MEM (Pmode, buf_addr);
718 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
719 GET_MODE_SIZE (Pmode)));
720
721 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
722 2 * GET_MODE_SIZE (Pmode)));
723 set_mem_alias_set (fp, setjmp_alias_set);
724 set_mem_alias_set (lab, setjmp_alias_set);
725 set_mem_alias_set (stack, setjmp_alias_set);
726
727 /* Pick up FP, label, and SP from the block and jump. This code is
728 from expand_goto in stmt.c; see there for detailed comments. */
729 #if HAVE_nonlocal_goto
730 if (HAVE_nonlocal_goto)
731 /* We have to pass a value to the nonlocal_goto pattern that will
732 get copied into the static_chain pointer, but it does not matter
733 what that value is, because builtin_setjmp does not use it. */
734 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
735 else
736 #endif
737 {
738 lab = copy_to_reg (lab);
739
740 emit_insn (gen_rtx_CLOBBER (VOIDmode,
741 gen_rtx_MEM (BLKmode,
742 gen_rtx_SCRATCH (VOIDmode))));
743 emit_insn (gen_rtx_CLOBBER (VOIDmode,
744 gen_rtx_MEM (BLKmode,
745 hard_frame_pointer_rtx)));
746
747 emit_move_insn (hard_frame_pointer_rtx, fp);
748 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
749
750 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
751 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
752 emit_indirect_jump (lab);
753 }
754 }
755
756 /* Search backwards and mark the jump insn as a non-local goto.
757 Note that this precludes the use of __builtin_longjmp to a
758 __builtin_setjmp target in the same function. However, we've
759 already cautioned the user that these functions are for
760 internal exception handling use only. */
761 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
762 {
763 gcc_assert (insn != last);
764
765 if (JUMP_P (insn))
766 {
767 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
768 REG_NOTES (insn));
769 break;
770 }
771 else if (CALL_P (insn))
772 break;
773 }
774 }
775
776 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
777 and the address of the save area. */
778
779 static rtx
780 expand_builtin_nonlocal_goto (tree arglist)
781 {
782 tree t_label, t_save_area;
783 rtx r_label, r_save_area, r_fp, r_sp, insn;
784
785 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
786 return NULL_RTX;
787
788 t_label = TREE_VALUE (arglist);
789 arglist = TREE_CHAIN (arglist);
790 t_save_area = TREE_VALUE (arglist);
791
792 r_label = expand_expr (t_label, NULL_RTX, VOIDmode, 0);
793 r_label = convert_memory_address (Pmode, r_label);
794 r_save_area = expand_expr (t_save_area, NULL_RTX, VOIDmode, 0);
795 r_save_area = convert_memory_address (Pmode, r_save_area);
796 r_fp = gen_rtx_MEM (Pmode, r_save_area);
797 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
798 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
799
800 current_function_has_nonlocal_goto = 1;
801
802 #if HAVE_nonlocal_goto
803 /* ??? We no longer need to pass the static chain value, afaik. */
804 if (HAVE_nonlocal_goto)
805 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
806 else
807 #endif
808 {
809 r_label = copy_to_reg (r_label);
810
811 emit_insn (gen_rtx_CLOBBER (VOIDmode,
812 gen_rtx_MEM (BLKmode,
813 gen_rtx_SCRATCH (VOIDmode))));
814
815 emit_insn (gen_rtx_CLOBBER (VOIDmode,
816 gen_rtx_MEM (BLKmode,
817 hard_frame_pointer_rtx)));
818
819 /* Restore frame pointer for containing function.
820 This sets the actual hard register used for the frame pointer
821 to the location of the function's incoming static chain info.
822 The non-local goto handler will then adjust it to contain the
823 proper value and reload the argument pointer, if needed. */
824 emit_move_insn (hard_frame_pointer_rtx, r_fp);
825 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
826
827 /* USE of hard_frame_pointer_rtx added for consistency;
828 not clear if really needed. */
829 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
830 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
831 emit_indirect_jump (r_label);
832 }
833
834 /* Search backwards to the jump insn and mark it as a
835 non-local goto. */
836 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
837 {
838 if (JUMP_P (insn))
839 {
840 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
841 const0_rtx, REG_NOTES (insn));
842 break;
843 }
844 else if (CALL_P (insn))
845 break;
846 }
847
848 return const0_rtx;
849 }
850
851 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
852 (not all will be used on all machines) that was passed to __builtin_setjmp.
853 It updates the stack pointer in that block to correspond to the current
854 stack pointer. */
855
856 static void
857 expand_builtin_update_setjmp_buf (rtx buf_addr)
858 {
859 enum machine_mode sa_mode = Pmode;
860 rtx stack_save;
861
862
863 #ifdef HAVE_save_stack_nonlocal
864 if (HAVE_save_stack_nonlocal)
865 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
866 #endif
867 #ifdef STACK_SAVEAREA_MODE
868 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
869 #endif
870
871 stack_save
872 = gen_rtx_MEM (sa_mode,
873 memory_address
874 (sa_mode,
875 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
876
877 #ifdef HAVE_setjmp
878 if (HAVE_setjmp)
879 emit_insn (gen_setjmp ());
880 #endif
881
882 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
883 }
884
885 /* Expand a call to __builtin_prefetch. For a target that does not support
886 data prefetch, evaluate the memory address argument in case it has side
887 effects. */
888
889 static void
890 expand_builtin_prefetch (tree arglist)
891 {
892 tree arg0, arg1, arg2;
893 rtx op0, op1, op2;
894
895 if (!validate_arglist (arglist, POINTER_TYPE, 0))
896 return;
897
898 arg0 = TREE_VALUE (arglist);
899 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
900 zero (read) and argument 2 (locality) defaults to 3 (high degree of
901 locality). */
902 if (TREE_CHAIN (arglist))
903 {
904 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
905 if (TREE_CHAIN (TREE_CHAIN (arglist)))
906 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
907 else
908 arg2 = build_int_cst (NULL_TREE, 3);
909 }
910 else
911 {
912 arg1 = integer_zero_node;
913 arg2 = build_int_cst (NULL_TREE, 3);
914 }
915
916 /* Argument 0 is an address. */
917 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
918
919 /* Argument 1 (read/write flag) must be a compile-time constant int. */
920 if (TREE_CODE (arg1) != INTEGER_CST)
921 {
922 error ("second argument to %<__builtin_prefetch%> must be a constant");
923 arg1 = integer_zero_node;
924 }
925 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
926 /* Argument 1 must be either zero or one. */
927 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
928 {
929 warning ("invalid second argument to %<__builtin_prefetch%>;"
930 " using zero");
931 op1 = const0_rtx;
932 }
933
934 /* Argument 2 (locality) must be a compile-time constant int. */
935 if (TREE_CODE (arg2) != INTEGER_CST)
936 {
937 error ("third argument to %<__builtin_prefetch%> must be a constant");
938 arg2 = integer_zero_node;
939 }
940 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
941 /* Argument 2 must be 0, 1, 2, or 3. */
942 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
943 {
944 warning ("invalid third argument to %<__builtin_prefetch%>; using zero");
945 op2 = const0_rtx;
946 }
947
948 #ifdef HAVE_prefetch
949 if (HAVE_prefetch)
950 {
951 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
952 (op0,
953 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
954 || (GET_MODE (op0) != Pmode))
955 {
956 op0 = convert_memory_address (Pmode, op0);
957 op0 = force_reg (Pmode, op0);
958 }
959 emit_insn (gen_prefetch (op0, op1, op2));
960 }
961 #endif
962
963 /* Don't do anything with direct references to volatile memory, but
964 generate code to handle other side effects. */
965 if (!MEM_P (op0) && side_effects_p (op0))
966 emit_insn (op0);
967 }
968
969 /* Get a MEM rtx for expression EXP which is the address of an operand
970 to be used to be used in a string instruction (cmpstrsi, movmemsi, ..). */
971
972 static rtx
973 get_memory_rtx (tree exp)
974 {
975 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
976 rtx mem;
977
978 addr = convert_memory_address (Pmode, addr);
979
980 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
981
982 /* Get an expression we can use to find the attributes to assign to MEM.
983 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
984 we can. First remove any nops. */
985 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
986 || TREE_CODE (exp) == NON_LVALUE_EXPR)
987 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
988 exp = TREE_OPERAND (exp, 0);
989
990 if (TREE_CODE (exp) == ADDR_EXPR)
991 exp = TREE_OPERAND (exp, 0);
992 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
993 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
994 else
995 exp = NULL;
996
997 /* Honor attributes derived from exp, except for the alias set
998 (as builtin stringops may alias with anything) and the size
999 (as stringops may access multiple array elements). */
1000 if (exp)
1001 {
1002 set_mem_attributes (mem, exp, 0);
1003 set_mem_alias_set (mem, 0);
1004 set_mem_size (mem, NULL_RTX);
1005 }
1006
1007 return mem;
1008 }
1009 \f
1010 /* Built-in functions to perform an untyped call and return. */
1011
1012 /* For each register that may be used for calling a function, this
1013 gives a mode used to copy the register's value. VOIDmode indicates
1014 the register is not used for calling a function. If the machine
1015 has register windows, this gives only the outbound registers.
1016 INCOMING_REGNO gives the corresponding inbound register. */
1017 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1018
1019 /* For each register that may be used for returning values, this gives
1020 a mode used to copy the register's value. VOIDmode indicates the
1021 register is not used for returning values. If the machine has
1022 register windows, this gives only the outbound registers.
1023 INCOMING_REGNO gives the corresponding inbound register. */
1024 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1025
1026 /* For each register that may be used for calling a function, this
1027 gives the offset of that register into the block returned by
1028 __builtin_apply_args. 0 indicates that the register is not
1029 used for calling a function. */
1030 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1031
1032 /* Return the size required for the block returned by __builtin_apply_args,
1033 and initialize apply_args_mode. */
1034
1035 static int
1036 apply_args_size (void)
1037 {
1038 static int size = -1;
1039 int align;
1040 unsigned int regno;
1041 enum machine_mode mode;
1042
1043 /* The values computed by this function never change. */
1044 if (size < 0)
1045 {
1046 /* The first value is the incoming arg-pointer. */
1047 size = GET_MODE_SIZE (Pmode);
1048
1049 /* The second value is the structure value address unless this is
1050 passed as an "invisible" first argument. */
1051 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1052 size += GET_MODE_SIZE (Pmode);
1053
1054 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1055 if (FUNCTION_ARG_REGNO_P (regno))
1056 {
1057 mode = reg_raw_mode[regno];
1058
1059 gcc_assert (mode != VOIDmode);
1060
1061 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1062 if (size % align != 0)
1063 size = CEIL (size, align) * align;
1064 apply_args_reg_offset[regno] = size;
1065 size += GET_MODE_SIZE (mode);
1066 apply_args_mode[regno] = mode;
1067 }
1068 else
1069 {
1070 apply_args_mode[regno] = VOIDmode;
1071 apply_args_reg_offset[regno] = 0;
1072 }
1073 }
1074 return size;
1075 }
1076
1077 /* Return the size required for the block returned by __builtin_apply,
1078 and initialize apply_result_mode. */
1079
1080 static int
1081 apply_result_size (void)
1082 {
1083 static int size = -1;
1084 int align, regno;
1085 enum machine_mode mode;
1086
1087 /* The values computed by this function never change. */
1088 if (size < 0)
1089 {
1090 size = 0;
1091
1092 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1093 if (FUNCTION_VALUE_REGNO_P (regno))
1094 {
1095 mode = reg_raw_mode[regno];
1096
1097 gcc_assert (mode != VOIDmode);
1098
1099 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1100 if (size % align != 0)
1101 size = CEIL (size, align) * align;
1102 size += GET_MODE_SIZE (mode);
1103 apply_result_mode[regno] = mode;
1104 }
1105 else
1106 apply_result_mode[regno] = VOIDmode;
1107
1108 /* Allow targets that use untyped_call and untyped_return to override
1109 the size so that machine-specific information can be stored here. */
1110 #ifdef APPLY_RESULT_SIZE
1111 size = APPLY_RESULT_SIZE;
1112 #endif
1113 }
1114 return size;
1115 }
1116
1117 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1118 /* Create a vector describing the result block RESULT. If SAVEP is true,
1119 the result block is used to save the values; otherwise it is used to
1120 restore the values. */
1121
1122 static rtx
1123 result_vector (int savep, rtx result)
1124 {
1125 int regno, size, align, nelts;
1126 enum machine_mode mode;
1127 rtx reg, mem;
1128 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1129
1130 size = nelts = 0;
1131 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1132 if ((mode = apply_result_mode[regno]) != VOIDmode)
1133 {
1134 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1135 if (size % align != 0)
1136 size = CEIL (size, align) * align;
1137 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1138 mem = adjust_address (result, mode, size);
1139 savevec[nelts++] = (savep
1140 ? gen_rtx_SET (VOIDmode, mem, reg)
1141 : gen_rtx_SET (VOIDmode, reg, mem));
1142 size += GET_MODE_SIZE (mode);
1143 }
1144 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1145 }
1146 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1147
1148 /* Save the state required to perform an untyped call with the same
1149 arguments as were passed to the current function. */
1150
1151 static rtx
1152 expand_builtin_apply_args_1 (void)
1153 {
1154 rtx registers, tem;
1155 int size, align, regno;
1156 enum machine_mode mode;
1157 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1158
1159 /* Create a block where the arg-pointer, structure value address,
1160 and argument registers can be saved. */
1161 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1162
1163 /* Walk past the arg-pointer and structure value address. */
1164 size = GET_MODE_SIZE (Pmode);
1165 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1166 size += GET_MODE_SIZE (Pmode);
1167
1168 /* Save each register used in calling a function to the block. */
1169 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1170 if ((mode = apply_args_mode[regno]) != VOIDmode)
1171 {
1172 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1173 if (size % align != 0)
1174 size = CEIL (size, align) * align;
1175
1176 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1177
1178 emit_move_insn (adjust_address (registers, mode, size), tem);
1179 size += GET_MODE_SIZE (mode);
1180 }
1181
1182 /* Save the arg pointer to the block. */
1183 tem = copy_to_reg (virtual_incoming_args_rtx);
1184 #ifdef STACK_GROWS_DOWNWARD
1185 /* We need the pointer as the caller actually passed them to us, not
1186 as we might have pretended they were passed. Make sure it's a valid
1187 operand, as emit_move_insn isn't expected to handle a PLUS. */
1188 tem
1189 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1190 NULL_RTX);
1191 #endif
1192 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1193
1194 size = GET_MODE_SIZE (Pmode);
1195
1196 /* Save the structure value address unless this is passed as an
1197 "invisible" first argument. */
1198 if (struct_incoming_value)
1199 {
1200 emit_move_insn (adjust_address (registers, Pmode, size),
1201 copy_to_reg (struct_incoming_value));
1202 size += GET_MODE_SIZE (Pmode);
1203 }
1204
1205 /* Return the address of the block. */
1206 return copy_addr_to_reg (XEXP (registers, 0));
1207 }
1208
1209 /* __builtin_apply_args returns block of memory allocated on
1210 the stack into which is stored the arg pointer, structure
1211 value address, static chain, and all the registers that might
1212 possibly be used in performing a function call. The code is
1213 moved to the start of the function so the incoming values are
1214 saved. */
1215
1216 static rtx
1217 expand_builtin_apply_args (void)
1218 {
1219 /* Don't do __builtin_apply_args more than once in a function.
1220 Save the result of the first call and reuse it. */
1221 if (apply_args_value != 0)
1222 return apply_args_value;
1223 {
1224 /* When this function is called, it means that registers must be
1225 saved on entry to this function. So we migrate the
1226 call to the first insn of this function. */
1227 rtx temp;
1228 rtx seq;
1229
1230 start_sequence ();
1231 temp = expand_builtin_apply_args_1 ();
1232 seq = get_insns ();
1233 end_sequence ();
1234
1235 apply_args_value = temp;
1236
1237 /* Put the insns after the NOTE that starts the function.
1238 If this is inside a start_sequence, make the outer-level insn
1239 chain current, so the code is placed at the start of the
1240 function. */
1241 push_topmost_sequence ();
1242 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1243 pop_topmost_sequence ();
1244 return temp;
1245 }
1246 }
1247
1248 /* Perform an untyped call and save the state required to perform an
1249 untyped return of whatever value was returned by the given function. */
1250
1251 static rtx
1252 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1253 {
1254 int size, align, regno;
1255 enum machine_mode mode;
1256 rtx incoming_args, result, reg, dest, src, call_insn;
1257 rtx old_stack_level = 0;
1258 rtx call_fusage = 0;
1259 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1260
1261 arguments = convert_memory_address (Pmode, arguments);
1262
1263 /* Create a block where the return registers can be saved. */
1264 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1265
1266 /* Fetch the arg pointer from the ARGUMENTS block. */
1267 incoming_args = gen_reg_rtx (Pmode);
1268 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1269 #ifndef STACK_GROWS_DOWNWARD
1270 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1271 incoming_args, 0, OPTAB_LIB_WIDEN);
1272 #endif
1273
1274 /* Push a new argument block and copy the arguments. Do not allow
1275 the (potential) memcpy call below to interfere with our stack
1276 manipulations. */
1277 do_pending_stack_adjust ();
1278 NO_DEFER_POP;
1279
1280 /* Save the stack with nonlocal if available. */
1281 #ifdef HAVE_save_stack_nonlocal
1282 if (HAVE_save_stack_nonlocal)
1283 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1284 else
1285 #endif
1286 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1287
1288 /* Allocate a block of memory onto the stack and copy the memory
1289 arguments to the outgoing arguments address. */
1290 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1291 dest = virtual_outgoing_args_rtx;
1292 #ifndef STACK_GROWS_DOWNWARD
1293 if (GET_CODE (argsize) == CONST_INT)
1294 dest = plus_constant (dest, -INTVAL (argsize));
1295 else
1296 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1297 #endif
1298 dest = gen_rtx_MEM (BLKmode, dest);
1299 set_mem_align (dest, PARM_BOUNDARY);
1300 src = gen_rtx_MEM (BLKmode, incoming_args);
1301 set_mem_align (src, PARM_BOUNDARY);
1302 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1303
1304 /* Refer to the argument block. */
1305 apply_args_size ();
1306 arguments = gen_rtx_MEM (BLKmode, arguments);
1307 set_mem_align (arguments, PARM_BOUNDARY);
1308
1309 /* Walk past the arg-pointer and structure value address. */
1310 size = GET_MODE_SIZE (Pmode);
1311 if (struct_value)
1312 size += GET_MODE_SIZE (Pmode);
1313
1314 /* Restore each of the registers previously saved. Make USE insns
1315 for each of these registers for use in making the call. */
1316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1317 if ((mode = apply_args_mode[regno]) != VOIDmode)
1318 {
1319 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1320 if (size % align != 0)
1321 size = CEIL (size, align) * align;
1322 reg = gen_rtx_REG (mode, regno);
1323 emit_move_insn (reg, adjust_address (arguments, mode, size));
1324 use_reg (&call_fusage, reg);
1325 size += GET_MODE_SIZE (mode);
1326 }
1327
1328 /* Restore the structure value address unless this is passed as an
1329 "invisible" first argument. */
1330 size = GET_MODE_SIZE (Pmode);
1331 if (struct_value)
1332 {
1333 rtx value = gen_reg_rtx (Pmode);
1334 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1335 emit_move_insn (struct_value, value);
1336 if (REG_P (struct_value))
1337 use_reg (&call_fusage, struct_value);
1338 size += GET_MODE_SIZE (Pmode);
1339 }
1340
1341 /* All arguments and registers used for the call are set up by now! */
1342 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1343
1344 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1345 and we don't want to load it into a register as an optimization,
1346 because prepare_call_address already did it if it should be done. */
1347 if (GET_CODE (function) != SYMBOL_REF)
1348 function = memory_address (FUNCTION_MODE, function);
1349
1350 /* Generate the actual call instruction and save the return value. */
1351 #ifdef HAVE_untyped_call
1352 if (HAVE_untyped_call)
1353 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1354 result, result_vector (1, result)));
1355 else
1356 #endif
1357 #ifdef HAVE_call_value
1358 if (HAVE_call_value)
1359 {
1360 rtx valreg = 0;
1361
1362 /* Locate the unique return register. It is not possible to
1363 express a call that sets more than one return register using
1364 call_value; use untyped_call for that. In fact, untyped_call
1365 only needs to save the return registers in the given block. */
1366 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1367 if ((mode = apply_result_mode[regno]) != VOIDmode)
1368 {
1369 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1370
1371 valreg = gen_rtx_REG (mode, regno);
1372 }
1373
1374 emit_call_insn (GEN_CALL_VALUE (valreg,
1375 gen_rtx_MEM (FUNCTION_MODE, function),
1376 const0_rtx, NULL_RTX, const0_rtx));
1377
1378 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1379 }
1380 else
1381 #endif
1382 gcc_unreachable ();
1383
1384 /* Find the CALL insn we just emitted, and attach the register usage
1385 information. */
1386 call_insn = last_call_insn ();
1387 add_function_usage_to (call_insn, call_fusage);
1388
1389 /* Restore the stack. */
1390 #ifdef HAVE_save_stack_nonlocal
1391 if (HAVE_save_stack_nonlocal)
1392 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1393 else
1394 #endif
1395 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1396
1397 OK_DEFER_POP;
1398
1399 /* Return the address of the result block. */
1400 result = copy_addr_to_reg (XEXP (result, 0));
1401 return convert_memory_address (ptr_mode, result);
1402 }
1403
1404 /* Perform an untyped return. */
1405
1406 static void
1407 expand_builtin_return (rtx result)
1408 {
1409 int size, align, regno;
1410 enum machine_mode mode;
1411 rtx reg;
1412 rtx call_fusage = 0;
1413
1414 result = convert_memory_address (Pmode, result);
1415
1416 apply_result_size ();
1417 result = gen_rtx_MEM (BLKmode, result);
1418
1419 #ifdef HAVE_untyped_return
1420 if (HAVE_untyped_return)
1421 {
1422 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1423 emit_barrier ();
1424 return;
1425 }
1426 #endif
1427
1428 /* Restore the return value and note that each value is used. */
1429 size = 0;
1430 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1431 if ((mode = apply_result_mode[regno]) != VOIDmode)
1432 {
1433 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1434 if (size % align != 0)
1435 size = CEIL (size, align) * align;
1436 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1437 emit_move_insn (reg, adjust_address (result, mode, size));
1438
1439 push_to_sequence (call_fusage);
1440 emit_insn (gen_rtx_USE (VOIDmode, reg));
1441 call_fusage = get_insns ();
1442 end_sequence ();
1443 size += GET_MODE_SIZE (mode);
1444 }
1445
1446 /* Put the USE insns before the return. */
1447 emit_insn (call_fusage);
1448
1449 /* Return whatever values was restored by jumping directly to the end
1450 of the function. */
1451 expand_naked_return ();
1452 }
1453
1454 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1455
1456 static enum type_class
1457 type_to_class (tree type)
1458 {
1459 switch (TREE_CODE (type))
1460 {
1461 case VOID_TYPE: return void_type_class;
1462 case INTEGER_TYPE: return integer_type_class;
1463 case CHAR_TYPE: return char_type_class;
1464 case ENUMERAL_TYPE: return enumeral_type_class;
1465 case BOOLEAN_TYPE: return boolean_type_class;
1466 case POINTER_TYPE: return pointer_type_class;
1467 case REFERENCE_TYPE: return reference_type_class;
1468 case OFFSET_TYPE: return offset_type_class;
1469 case REAL_TYPE: return real_type_class;
1470 case COMPLEX_TYPE: return complex_type_class;
1471 case FUNCTION_TYPE: return function_type_class;
1472 case METHOD_TYPE: return method_type_class;
1473 case RECORD_TYPE: return record_type_class;
1474 case UNION_TYPE:
1475 case QUAL_UNION_TYPE: return union_type_class;
1476 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1477 ? string_type_class : array_type_class);
1478 case FILE_TYPE: return file_type_class;
1479 case LANG_TYPE: return lang_type_class;
1480 default: return no_type_class;
1481 }
1482 }
1483
1484 /* Expand a call to __builtin_classify_type with arguments found in
1485 ARGLIST. */
1486
1487 static rtx
1488 expand_builtin_classify_type (tree arglist)
1489 {
1490 if (arglist != 0)
1491 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1492 return GEN_INT (no_type_class);
1493 }
1494
1495 /* This helper macro, meant to be used in mathfn_built_in below,
1496 determines which among a set of three builtin math functions is
1497 appropriate for a given type mode. The `F' and `L' cases are
1498 automatically generated from the `double' case. */
1499 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1500 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1501 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1502 fcodel = BUILT_IN_MATHFN##L ; break;
1503
1504 /* Return mathematic function equivalent to FN but operating directly
1505 on TYPE, if available. If we can't do the conversion, return zero. */
1506 tree
1507 mathfn_built_in (tree type, enum built_in_function fn)
1508 {
1509 enum built_in_function fcode, fcodef, fcodel;
1510
1511 switch (fn)
1512 {
1513 CASE_MATHFN (BUILT_IN_ACOS)
1514 CASE_MATHFN (BUILT_IN_ACOSH)
1515 CASE_MATHFN (BUILT_IN_ASIN)
1516 CASE_MATHFN (BUILT_IN_ASINH)
1517 CASE_MATHFN (BUILT_IN_ATAN)
1518 CASE_MATHFN (BUILT_IN_ATAN2)
1519 CASE_MATHFN (BUILT_IN_ATANH)
1520 CASE_MATHFN (BUILT_IN_CBRT)
1521 CASE_MATHFN (BUILT_IN_CEIL)
1522 CASE_MATHFN (BUILT_IN_COPYSIGN)
1523 CASE_MATHFN (BUILT_IN_COS)
1524 CASE_MATHFN (BUILT_IN_COSH)
1525 CASE_MATHFN (BUILT_IN_DREM)
1526 CASE_MATHFN (BUILT_IN_ERF)
1527 CASE_MATHFN (BUILT_IN_ERFC)
1528 CASE_MATHFN (BUILT_IN_EXP)
1529 CASE_MATHFN (BUILT_IN_EXP10)
1530 CASE_MATHFN (BUILT_IN_EXP2)
1531 CASE_MATHFN (BUILT_IN_EXPM1)
1532 CASE_MATHFN (BUILT_IN_FABS)
1533 CASE_MATHFN (BUILT_IN_FDIM)
1534 CASE_MATHFN (BUILT_IN_FLOOR)
1535 CASE_MATHFN (BUILT_IN_FMA)
1536 CASE_MATHFN (BUILT_IN_FMAX)
1537 CASE_MATHFN (BUILT_IN_FMIN)
1538 CASE_MATHFN (BUILT_IN_FMOD)
1539 CASE_MATHFN (BUILT_IN_FREXP)
1540 CASE_MATHFN (BUILT_IN_GAMMA)
1541 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1542 CASE_MATHFN (BUILT_IN_HYPOT)
1543 CASE_MATHFN (BUILT_IN_ILOGB)
1544 CASE_MATHFN (BUILT_IN_INF)
1545 CASE_MATHFN (BUILT_IN_J0)
1546 CASE_MATHFN (BUILT_IN_J1)
1547 CASE_MATHFN (BUILT_IN_JN)
1548 CASE_MATHFN (BUILT_IN_LDEXP)
1549 CASE_MATHFN (BUILT_IN_LGAMMA)
1550 CASE_MATHFN (BUILT_IN_LLRINT)
1551 CASE_MATHFN (BUILT_IN_LLROUND)
1552 CASE_MATHFN (BUILT_IN_LOG)
1553 CASE_MATHFN (BUILT_IN_LOG10)
1554 CASE_MATHFN (BUILT_IN_LOG1P)
1555 CASE_MATHFN (BUILT_IN_LOG2)
1556 CASE_MATHFN (BUILT_IN_LOGB)
1557 CASE_MATHFN (BUILT_IN_LRINT)
1558 CASE_MATHFN (BUILT_IN_LROUND)
1559 CASE_MATHFN (BUILT_IN_MODF)
1560 CASE_MATHFN (BUILT_IN_NAN)
1561 CASE_MATHFN (BUILT_IN_NANS)
1562 CASE_MATHFN (BUILT_IN_NEARBYINT)
1563 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1564 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1565 CASE_MATHFN (BUILT_IN_POW)
1566 CASE_MATHFN (BUILT_IN_POWI)
1567 CASE_MATHFN (BUILT_IN_POW10)
1568 CASE_MATHFN (BUILT_IN_REMAINDER)
1569 CASE_MATHFN (BUILT_IN_REMQUO)
1570 CASE_MATHFN (BUILT_IN_RINT)
1571 CASE_MATHFN (BUILT_IN_ROUND)
1572 CASE_MATHFN (BUILT_IN_SCALB)
1573 CASE_MATHFN (BUILT_IN_SCALBLN)
1574 CASE_MATHFN (BUILT_IN_SCALBN)
1575 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1576 CASE_MATHFN (BUILT_IN_SIN)
1577 CASE_MATHFN (BUILT_IN_SINCOS)
1578 CASE_MATHFN (BUILT_IN_SINH)
1579 CASE_MATHFN (BUILT_IN_SQRT)
1580 CASE_MATHFN (BUILT_IN_TAN)
1581 CASE_MATHFN (BUILT_IN_TANH)
1582 CASE_MATHFN (BUILT_IN_TGAMMA)
1583 CASE_MATHFN (BUILT_IN_TRUNC)
1584 CASE_MATHFN (BUILT_IN_Y0)
1585 CASE_MATHFN (BUILT_IN_Y1)
1586 CASE_MATHFN (BUILT_IN_YN)
1587
1588 default:
1589 return 0;
1590 }
1591
1592 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1593 return implicit_built_in_decls[fcode];
1594 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1595 return implicit_built_in_decls[fcodef];
1596 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1597 return implicit_built_in_decls[fcodel];
1598 else
1599 return 0;
1600 }
1601
1602 /* If errno must be maintained, expand the RTL to check if the result,
1603 TARGET, of a built-in function call, EXP, is NaN, and if so set
1604 errno to EDOM. */
1605
1606 static void
1607 expand_errno_check (tree exp, rtx target)
1608 {
1609 rtx lab = gen_label_rtx ();
1610
1611 /* Test the result; if it is NaN, set errno=EDOM because
1612 the argument was not in the domain. */
1613 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1614 0, lab);
1615
1616 #ifdef TARGET_EDOM
1617 /* If this built-in doesn't throw an exception, set errno directly. */
1618 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1619 {
1620 #ifdef GEN_ERRNO_RTX
1621 rtx errno_rtx = GEN_ERRNO_RTX;
1622 #else
1623 rtx errno_rtx
1624 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1625 #endif
1626 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1627 emit_label (lab);
1628 return;
1629 }
1630 #endif
1631
1632 /* We can't set errno=EDOM directly; let the library call do it.
1633 Pop the arguments right away in case the call gets deleted. */
1634 NO_DEFER_POP;
1635 expand_call (exp, target, 0);
1636 OK_DEFER_POP;
1637 emit_label (lab);
1638 }
1639
1640
1641 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1642 Return 0 if a normal call should be emitted rather than expanding the
1643 function in-line. EXP is the expression that is a call to the builtin
1644 function; if convenient, the result should be placed in TARGET.
1645 SUBTARGET may be used as the target for computing one of EXP's operands. */
1646
1647 static rtx
1648 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1649 {
1650 optab builtin_optab;
1651 rtx op0, insns, before_call;
1652 tree fndecl = get_callee_fndecl (exp);
1653 tree arglist = TREE_OPERAND (exp, 1);
1654 enum machine_mode mode;
1655 bool errno_set = false;
1656 tree arg, narg;
1657
1658 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1659 return 0;
1660
1661 arg = TREE_VALUE (arglist);
1662
1663 switch (DECL_FUNCTION_CODE (fndecl))
1664 {
1665 case BUILT_IN_SQRT:
1666 case BUILT_IN_SQRTF:
1667 case BUILT_IN_SQRTL:
1668 errno_set = ! tree_expr_nonnegative_p (arg);
1669 builtin_optab = sqrt_optab;
1670 break;
1671 case BUILT_IN_EXP:
1672 case BUILT_IN_EXPF:
1673 case BUILT_IN_EXPL:
1674 errno_set = true; builtin_optab = exp_optab; break;
1675 case BUILT_IN_EXP10:
1676 case BUILT_IN_EXP10F:
1677 case BUILT_IN_EXP10L:
1678 case BUILT_IN_POW10:
1679 case BUILT_IN_POW10F:
1680 case BUILT_IN_POW10L:
1681 errno_set = true; builtin_optab = exp10_optab; break;
1682 case BUILT_IN_EXP2:
1683 case BUILT_IN_EXP2F:
1684 case BUILT_IN_EXP2L:
1685 errno_set = true; builtin_optab = exp2_optab; break;
1686 case BUILT_IN_EXPM1:
1687 case BUILT_IN_EXPM1F:
1688 case BUILT_IN_EXPM1L:
1689 errno_set = true; builtin_optab = expm1_optab; break;
1690 case BUILT_IN_LOGB:
1691 case BUILT_IN_LOGBF:
1692 case BUILT_IN_LOGBL:
1693 errno_set = true; builtin_optab = logb_optab; break;
1694 case BUILT_IN_ILOGB:
1695 case BUILT_IN_ILOGBF:
1696 case BUILT_IN_ILOGBL:
1697 errno_set = true; builtin_optab = ilogb_optab; break;
1698 case BUILT_IN_LOG:
1699 case BUILT_IN_LOGF:
1700 case BUILT_IN_LOGL:
1701 errno_set = true; builtin_optab = log_optab; break;
1702 case BUILT_IN_LOG10:
1703 case BUILT_IN_LOG10F:
1704 case BUILT_IN_LOG10L:
1705 errno_set = true; builtin_optab = log10_optab; break;
1706 case BUILT_IN_LOG2:
1707 case BUILT_IN_LOG2F:
1708 case BUILT_IN_LOG2L:
1709 errno_set = true; builtin_optab = log2_optab; break;
1710 case BUILT_IN_LOG1P:
1711 case BUILT_IN_LOG1PF:
1712 case BUILT_IN_LOG1PL:
1713 errno_set = true; builtin_optab = log1p_optab; break;
1714 case BUILT_IN_ASIN:
1715 case BUILT_IN_ASINF:
1716 case BUILT_IN_ASINL:
1717 builtin_optab = asin_optab; break;
1718 case BUILT_IN_ACOS:
1719 case BUILT_IN_ACOSF:
1720 case BUILT_IN_ACOSL:
1721 builtin_optab = acos_optab; break;
1722 case BUILT_IN_TAN:
1723 case BUILT_IN_TANF:
1724 case BUILT_IN_TANL:
1725 builtin_optab = tan_optab; break;
1726 case BUILT_IN_ATAN:
1727 case BUILT_IN_ATANF:
1728 case BUILT_IN_ATANL:
1729 builtin_optab = atan_optab; break;
1730 case BUILT_IN_FLOOR:
1731 case BUILT_IN_FLOORF:
1732 case BUILT_IN_FLOORL:
1733 builtin_optab = floor_optab; break;
1734 case BUILT_IN_CEIL:
1735 case BUILT_IN_CEILF:
1736 case BUILT_IN_CEILL:
1737 builtin_optab = ceil_optab; break;
1738 case BUILT_IN_TRUNC:
1739 case BUILT_IN_TRUNCF:
1740 case BUILT_IN_TRUNCL:
1741 builtin_optab = btrunc_optab; break;
1742 case BUILT_IN_ROUND:
1743 case BUILT_IN_ROUNDF:
1744 case BUILT_IN_ROUNDL:
1745 builtin_optab = round_optab; break;
1746 case BUILT_IN_NEARBYINT:
1747 case BUILT_IN_NEARBYINTF:
1748 case BUILT_IN_NEARBYINTL:
1749 builtin_optab = nearbyint_optab; break;
1750 case BUILT_IN_RINT:
1751 case BUILT_IN_RINTF:
1752 case BUILT_IN_RINTL:
1753 builtin_optab = rint_optab; break;
1754 default:
1755 gcc_unreachable ();
1756 }
1757
1758 /* Make a suitable register to place result in. */
1759 mode = TYPE_MODE (TREE_TYPE (exp));
1760
1761 if (! flag_errno_math || ! HONOR_NANS (mode))
1762 errno_set = false;
1763
1764 /* Before working hard, check whether the instruction is available. */
1765 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1766 {
1767 target = gen_reg_rtx (mode);
1768
1769 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1770 need to expand the argument again. This way, we will not perform
1771 side-effects more the once. */
1772 narg = builtin_save_expr (arg);
1773 if (narg != arg)
1774 {
1775 arg = narg;
1776 arglist = build_tree_list (NULL_TREE, arg);
1777 exp = build_function_call_expr (fndecl, arglist);
1778 }
1779
1780 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1781
1782 start_sequence ();
1783
1784 /* Compute into TARGET.
1785 Set TARGET to wherever the result comes back. */
1786 target = expand_unop (mode, builtin_optab, op0, target, 0);
1787
1788 if (target != 0)
1789 {
1790 if (errno_set)
1791 expand_errno_check (exp, target);
1792
1793 /* Output the entire sequence. */
1794 insns = get_insns ();
1795 end_sequence ();
1796 emit_insn (insns);
1797 return target;
1798 }
1799
1800 /* If we were unable to expand via the builtin, stop the sequence
1801 (without outputting the insns) and call to the library function
1802 with the stabilized argument list. */
1803 end_sequence ();
1804 }
1805
1806 before_call = get_last_insn ();
1807
1808 target = expand_call (exp, target, target == const0_rtx);
1809
1810 /* If this is a sqrt operation and we don't care about errno, try to
1811 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1812 This allows the semantics of the libcall to be visible to the RTL
1813 optimizers. */
1814 if (builtin_optab == sqrt_optab && !errno_set)
1815 {
1816 /* Search backwards through the insns emitted by expand_call looking
1817 for the instruction with the REG_RETVAL note. */
1818 rtx last = get_last_insn ();
1819 while (last != before_call)
1820 {
1821 if (find_reg_note (last, REG_RETVAL, NULL))
1822 {
1823 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1824 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1825 two elements, i.e. symbol_ref(sqrt) and the operand. */
1826 if (note
1827 && GET_CODE (note) == EXPR_LIST
1828 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1829 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1830 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1831 {
1832 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1833 /* Check operand is a register with expected mode. */
1834 if (operand
1835 && REG_P (operand)
1836 && GET_MODE (operand) == mode)
1837 {
1838 /* Replace the REG_EQUAL note with a SQRT rtx. */
1839 rtx equiv = gen_rtx_SQRT (mode, operand);
1840 set_unique_reg_note (last, REG_EQUAL, equiv);
1841 }
1842 }
1843 break;
1844 }
1845 last = PREV_INSN (last);
1846 }
1847 }
1848
1849 return target;
1850 }
1851
1852 /* Expand a call to the builtin binary math functions (pow and atan2).
1853 Return 0 if a normal call should be emitted rather than expanding the
1854 function in-line. EXP is the expression that is a call to the builtin
1855 function; if convenient, the result should be placed in TARGET.
1856 SUBTARGET may be used as the target for computing one of EXP's
1857 operands. */
1858
1859 static rtx
1860 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1861 {
1862 optab builtin_optab;
1863 rtx op0, op1, insns;
1864 tree fndecl = get_callee_fndecl (exp);
1865 tree arglist = TREE_OPERAND (exp, 1);
1866 tree arg0, arg1, temp, narg;
1867 enum machine_mode mode;
1868 bool errno_set = true;
1869 bool stable = true;
1870
1871 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1872 return 0;
1873
1874 arg0 = TREE_VALUE (arglist);
1875 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1876
1877 switch (DECL_FUNCTION_CODE (fndecl))
1878 {
1879 case BUILT_IN_POW:
1880 case BUILT_IN_POWF:
1881 case BUILT_IN_POWL:
1882 builtin_optab = pow_optab; break;
1883 case BUILT_IN_ATAN2:
1884 case BUILT_IN_ATAN2F:
1885 case BUILT_IN_ATAN2L:
1886 builtin_optab = atan2_optab; break;
1887 case BUILT_IN_FMOD:
1888 case BUILT_IN_FMODF:
1889 case BUILT_IN_FMODL:
1890 builtin_optab = fmod_optab; break;
1891 case BUILT_IN_DREM:
1892 case BUILT_IN_DREMF:
1893 case BUILT_IN_DREML:
1894 builtin_optab = drem_optab; break;
1895 default:
1896 gcc_unreachable ();
1897 }
1898
1899 /* Make a suitable register to place result in. */
1900 mode = TYPE_MODE (TREE_TYPE (exp));
1901
1902 /* Before working hard, check whether the instruction is available. */
1903 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1904 return 0;
1905
1906 target = gen_reg_rtx (mode);
1907
1908 if (! flag_errno_math || ! HONOR_NANS (mode))
1909 errno_set = false;
1910
1911 /* Always stabilize the argument list. */
1912 narg = builtin_save_expr (arg1);
1913 if (narg != arg1)
1914 {
1915 arg1 = narg;
1916 temp = build_tree_list (NULL_TREE, narg);
1917 stable = false;
1918 }
1919 else
1920 temp = TREE_CHAIN (arglist);
1921
1922 narg = builtin_save_expr (arg0);
1923 if (narg != arg0)
1924 {
1925 arg0 = narg;
1926 arglist = tree_cons (NULL_TREE, narg, temp);
1927 stable = false;
1928 }
1929 else if (! stable)
1930 arglist = tree_cons (NULL_TREE, arg0, temp);
1931
1932 if (! stable)
1933 exp = build_function_call_expr (fndecl, arglist);
1934
1935 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1936 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1937
1938 start_sequence ();
1939
1940 /* Compute into TARGET.
1941 Set TARGET to wherever the result comes back. */
1942 target = expand_binop (mode, builtin_optab, op0, op1,
1943 target, 0, OPTAB_DIRECT);
1944
1945 /* If we were unable to expand via the builtin, stop the sequence
1946 (without outputting the insns) and call to the library function
1947 with the stabilized argument list. */
1948 if (target == 0)
1949 {
1950 end_sequence ();
1951 return expand_call (exp, target, target == const0_rtx);
1952 }
1953
1954 if (errno_set)
1955 expand_errno_check (exp, target);
1956
1957 /* Output the entire sequence. */
1958 insns = get_insns ();
1959 end_sequence ();
1960 emit_insn (insns);
1961
1962 return target;
1963 }
1964
1965 /* Expand a call to the builtin sin and cos math functions.
1966 Return 0 if a normal call should be emitted rather than expanding the
1967 function in-line. EXP is the expression that is a call to the builtin
1968 function; if convenient, the result should be placed in TARGET.
1969 SUBTARGET may be used as the target for computing one of EXP's
1970 operands. */
1971
1972 static rtx
1973 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
1974 {
1975 optab builtin_optab;
1976 rtx op0, insns, before_call;
1977 tree fndecl = get_callee_fndecl (exp);
1978 tree arglist = TREE_OPERAND (exp, 1);
1979 enum machine_mode mode;
1980 bool errno_set = false;
1981 tree arg, narg;
1982
1983 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1984 return 0;
1985
1986 arg = TREE_VALUE (arglist);
1987
1988 switch (DECL_FUNCTION_CODE (fndecl))
1989 {
1990 case BUILT_IN_SIN:
1991 case BUILT_IN_SINF:
1992 case BUILT_IN_SINL:
1993 case BUILT_IN_COS:
1994 case BUILT_IN_COSF:
1995 case BUILT_IN_COSL:
1996 builtin_optab = sincos_optab; break;
1997 default:
1998 gcc_unreachable ();
1999 }
2000
2001 /* Make a suitable register to place result in. */
2002 mode = TYPE_MODE (TREE_TYPE (exp));
2003
2004 if (! flag_errno_math || ! HONOR_NANS (mode))
2005 errno_set = false;
2006
2007 /* Check if sincos insn is available, otherwise fallback
2008 to sin or cos insn. */
2009 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) {
2010 switch (DECL_FUNCTION_CODE (fndecl))
2011 {
2012 case BUILT_IN_SIN:
2013 case BUILT_IN_SINF:
2014 case BUILT_IN_SINL:
2015 builtin_optab = sin_optab; break;
2016 case BUILT_IN_COS:
2017 case BUILT_IN_COSF:
2018 case BUILT_IN_COSL:
2019 builtin_optab = cos_optab; break;
2020 default:
2021 gcc_unreachable ();
2022 }
2023 }
2024
2025 /* Before working hard, check whether the instruction is available. */
2026 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2027 {
2028 target = gen_reg_rtx (mode);
2029
2030 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2031 need to expand the argument again. This way, we will not perform
2032 side-effects more the once. */
2033 narg = save_expr (arg);
2034 if (narg != arg)
2035 {
2036 arg = narg;
2037 arglist = build_tree_list (NULL_TREE, arg);
2038 exp = build_function_call_expr (fndecl, arglist);
2039 }
2040
2041 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2042
2043 start_sequence ();
2044
2045 /* Compute into TARGET.
2046 Set TARGET to wherever the result comes back. */
2047 if (builtin_optab == sincos_optab)
2048 {
2049 int result;
2050
2051 switch (DECL_FUNCTION_CODE (fndecl))
2052 {
2053 case BUILT_IN_SIN:
2054 case BUILT_IN_SINF:
2055 case BUILT_IN_SINL:
2056 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2057 break;
2058 case BUILT_IN_COS:
2059 case BUILT_IN_COSF:
2060 case BUILT_IN_COSL:
2061 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2062 break;
2063 default:
2064 gcc_unreachable ();
2065 }
2066 gcc_assert (result);
2067 }
2068 else
2069 {
2070 target = expand_unop (mode, builtin_optab, op0, target, 0);
2071 }
2072
2073 if (target != 0)
2074 {
2075 if (errno_set)
2076 expand_errno_check (exp, target);
2077
2078 /* Output the entire sequence. */
2079 insns = get_insns ();
2080 end_sequence ();
2081 emit_insn (insns);
2082 return target;
2083 }
2084
2085 /* If we were unable to expand via the builtin, stop the sequence
2086 (without outputting the insns) and call to the library function
2087 with the stabilized argument list. */
2088 end_sequence ();
2089 }
2090
2091 before_call = get_last_insn ();
2092
2093 target = expand_call (exp, target, target == const0_rtx);
2094
2095 return target;
2096 }
2097
2098 /* To evaluate powi(x,n), the floating point value x raised to the
2099 constant integer exponent n, we use a hybrid algorithm that
2100 combines the "window method" with look-up tables. For an
2101 introduction to exponentiation algorithms and "addition chains",
2102 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2103 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2104 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2105 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2106
2107 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2108 multiplications to inline before calling the system library's pow
2109 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2110 so this default never requires calling pow, powf or powl. */
2111
2112 #ifndef POWI_MAX_MULTS
2113 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2114 #endif
2115
2116 /* The size of the "optimal power tree" lookup table. All
2117 exponents less than this value are simply looked up in the
2118 powi_table below. This threshold is also used to size the
2119 cache of pseudo registers that hold intermediate results. */
2120 #define POWI_TABLE_SIZE 256
2121
2122 /* The size, in bits of the window, used in the "window method"
2123 exponentiation algorithm. This is equivalent to a radix of
2124 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2125 #define POWI_WINDOW_SIZE 3
2126
2127 /* The following table is an efficient representation of an
2128 "optimal power tree". For each value, i, the corresponding
2129 value, j, in the table states than an optimal evaluation
2130 sequence for calculating pow(x,i) can be found by evaluating
2131 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2132 100 integers is given in Knuth's "Seminumerical algorithms". */
2133
2134 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2135 {
2136 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2137 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2138 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2139 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2140 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2141 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2142 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2143 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2144 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2145 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2146 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2147 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2148 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2149 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2150 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2151 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2152 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2153 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2154 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2155 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2156 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2157 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2158 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2159 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2160 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2161 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2162 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2163 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2164 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2165 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2166 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2167 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2168 };
2169
2170
2171 /* Return the number of multiplications required to calculate
2172 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2173 subroutine of powi_cost. CACHE is an array indicating
2174 which exponents have already been calculated. */
2175
2176 static int
2177 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2178 {
2179 /* If we've already calculated this exponent, then this evaluation
2180 doesn't require any additional multiplications. */
2181 if (cache[n])
2182 return 0;
2183
2184 cache[n] = true;
2185 return powi_lookup_cost (n - powi_table[n], cache)
2186 + powi_lookup_cost (powi_table[n], cache) + 1;
2187 }
2188
2189 /* Return the number of multiplications required to calculate
2190 powi(x,n) for an arbitrary x, given the exponent N. This
2191 function needs to be kept in sync with expand_powi below. */
2192
2193 static int
2194 powi_cost (HOST_WIDE_INT n)
2195 {
2196 bool cache[POWI_TABLE_SIZE];
2197 unsigned HOST_WIDE_INT digit;
2198 unsigned HOST_WIDE_INT val;
2199 int result;
2200
2201 if (n == 0)
2202 return 0;
2203
2204 /* Ignore the reciprocal when calculating the cost. */
2205 val = (n < 0) ? -n : n;
2206
2207 /* Initialize the exponent cache. */
2208 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2209 cache[1] = true;
2210
2211 result = 0;
2212
2213 while (val >= POWI_TABLE_SIZE)
2214 {
2215 if (val & 1)
2216 {
2217 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2218 result += powi_lookup_cost (digit, cache)
2219 + POWI_WINDOW_SIZE + 1;
2220 val >>= POWI_WINDOW_SIZE;
2221 }
2222 else
2223 {
2224 val >>= 1;
2225 result++;
2226 }
2227 }
2228
2229 return result + powi_lookup_cost (val, cache);
2230 }
2231
2232 /* Recursive subroutine of expand_powi. This function takes the array,
2233 CACHE, of already calculated exponents and an exponent N and returns
2234 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2235
2236 static rtx
2237 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2238 {
2239 unsigned HOST_WIDE_INT digit;
2240 rtx target, result;
2241 rtx op0, op1;
2242
2243 if (n < POWI_TABLE_SIZE)
2244 {
2245 if (cache[n])
2246 return cache[n];
2247
2248 target = gen_reg_rtx (mode);
2249 cache[n] = target;
2250
2251 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2252 op1 = expand_powi_1 (mode, powi_table[n], cache);
2253 }
2254 else if (n & 1)
2255 {
2256 target = gen_reg_rtx (mode);
2257 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2258 op0 = expand_powi_1 (mode, n - digit, cache);
2259 op1 = expand_powi_1 (mode, digit, cache);
2260 }
2261 else
2262 {
2263 target = gen_reg_rtx (mode);
2264 op0 = expand_powi_1 (mode, n >> 1, cache);
2265 op1 = op0;
2266 }
2267
2268 result = expand_mult (mode, op0, op1, target, 0);
2269 if (result != target)
2270 emit_move_insn (target, result);
2271 return target;
2272 }
2273
2274 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2275 floating point operand in mode MODE, and N is the exponent. This
2276 function needs to be kept in sync with powi_cost above. */
2277
2278 static rtx
2279 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2280 {
2281 unsigned HOST_WIDE_INT val;
2282 rtx cache[POWI_TABLE_SIZE];
2283 rtx result;
2284
2285 if (n == 0)
2286 return CONST1_RTX (mode);
2287
2288 val = (n < 0) ? -n : n;
2289
2290 memset (cache, 0, sizeof (cache));
2291 cache[1] = x;
2292
2293 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2294
2295 /* If the original exponent was negative, reciprocate the result. */
2296 if (n < 0)
2297 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2298 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2299
2300 return result;
2301 }
2302
2303 /* Expand a call to the pow built-in mathematical function. Return 0 if
2304 a normal call should be emitted rather than expanding the function
2305 in-line. EXP is the expression that is a call to the builtin
2306 function; if convenient, the result should be placed in TARGET. */
2307
2308 static rtx
2309 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2310 {
2311 tree arglist = TREE_OPERAND (exp, 1);
2312 tree arg0, arg1;
2313
2314 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2315 return 0;
2316
2317 arg0 = TREE_VALUE (arglist);
2318 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2319
2320 if (TREE_CODE (arg1) == REAL_CST
2321 && ! TREE_CONSTANT_OVERFLOW (arg1))
2322 {
2323 REAL_VALUE_TYPE cint;
2324 REAL_VALUE_TYPE c;
2325 HOST_WIDE_INT n;
2326
2327 c = TREE_REAL_CST (arg1);
2328 n = real_to_integer (&c);
2329 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2330 if (real_identical (&c, &cint))
2331 {
2332 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2333 Otherwise, check the number of multiplications required.
2334 Note that pow never sets errno for an integer exponent. */
2335 if ((n >= -1 && n <= 2)
2336 || (flag_unsafe_math_optimizations
2337 && ! optimize_size
2338 && powi_cost (n) <= POWI_MAX_MULTS))
2339 {
2340 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2341 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2342 op = force_reg (mode, op);
2343 return expand_powi (op, mode, n);
2344 }
2345 }
2346 }
2347
2348 if (! flag_unsafe_math_optimizations)
2349 return NULL_RTX;
2350 return expand_builtin_mathfn_2 (exp, target, subtarget);
2351 }
2352
2353 /* Expand a call to the powi built-in mathematical function. Return 0 if
2354 a normal call should be emitted rather than expanding the function
2355 in-line. EXP is the expression that is a call to the builtin
2356 function; if convenient, the result should be placed in TARGET. */
2357
2358 static rtx
2359 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2360 {
2361 tree arglist = TREE_OPERAND (exp, 1);
2362 tree arg0, arg1;
2363 rtx op0, op1;
2364 enum machine_mode mode;
2365
2366 if (! validate_arglist (arglist, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2367 return 0;
2368
2369 arg0 = TREE_VALUE (arglist);
2370 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2371 mode = TYPE_MODE (TREE_TYPE (exp));
2372
2373 /* Handle constant power. */
2374
2375 if (TREE_CODE (arg1) == INTEGER_CST
2376 && ! TREE_CONSTANT_OVERFLOW (arg1))
2377 {
2378 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
2379
2380 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2381 Otherwise, check the number of multiplications required. */
2382 if ((TREE_INT_CST_HIGH (arg1) == 0
2383 || TREE_INT_CST_HIGH (arg1) == -1)
2384 && ((n >= -1 && n <= 2)
2385 || (! optimize_size
2386 && powi_cost (n) <= POWI_MAX_MULTS)))
2387 {
2388 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
2389 op0 = force_reg (mode, op0);
2390 return expand_powi (op0, mode, n);
2391 }
2392 }
2393
2394 /* Emit a libcall to libgcc. */
2395
2396 if (target == NULL_RTX)
2397 target = gen_reg_rtx (mode);
2398
2399 op0 = expand_expr (arg0, subtarget, mode, 0);
2400 if (GET_MODE (op0) != mode)
2401 op0 = convert_to_mode (mode, op0, 0);
2402 op1 = expand_expr (arg1, 0, word_mode, 0);
2403 if (GET_MODE (op1) != word_mode)
2404 op1 = convert_to_mode (word_mode, op1, 0);
2405
2406 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
2407 target, LCT_CONST_MAKE_BLOCK, mode, 2,
2408 op0, mode, op1, word_mode);
2409
2410 return target;
2411 }
2412
2413 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2414 if we failed the caller should emit a normal call, otherwise
2415 try to get the result in TARGET, if convenient. */
2416
2417 static rtx
2418 expand_builtin_strlen (tree arglist, rtx target,
2419 enum machine_mode target_mode)
2420 {
2421 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2422 return 0;
2423 else
2424 {
2425 rtx pat;
2426 tree len, src = TREE_VALUE (arglist);
2427 rtx result, src_reg, char_rtx, before_strlen;
2428 enum machine_mode insn_mode = target_mode, char_mode;
2429 enum insn_code icode = CODE_FOR_nothing;
2430 int align;
2431
2432 /* If the length can be computed at compile-time, return it. */
2433 len = c_strlen (src, 0);
2434 if (len)
2435 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2436
2437 /* If the length can be computed at compile-time and is constant
2438 integer, but there are side-effects in src, evaluate
2439 src for side-effects, then return len.
2440 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2441 can be optimized into: i++; x = 3; */
2442 len = c_strlen (src, 1);
2443 if (len && TREE_CODE (len) == INTEGER_CST)
2444 {
2445 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2446 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2447 }
2448
2449 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2450
2451 /* If SRC is not a pointer type, don't do this operation inline. */
2452 if (align == 0)
2453 return 0;
2454
2455 /* Bail out if we can't compute strlen in the right mode. */
2456 while (insn_mode != VOIDmode)
2457 {
2458 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2459 if (icode != CODE_FOR_nothing)
2460 break;
2461
2462 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2463 }
2464 if (insn_mode == VOIDmode)
2465 return 0;
2466
2467 /* Make a place to write the result of the instruction. */
2468 result = target;
2469 if (! (result != 0
2470 && REG_P (result)
2471 && GET_MODE (result) == insn_mode
2472 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2473 result = gen_reg_rtx (insn_mode);
2474
2475 /* Make a place to hold the source address. We will not expand
2476 the actual source until we are sure that the expansion will
2477 not fail -- there are trees that cannot be expanded twice. */
2478 src_reg = gen_reg_rtx (Pmode);
2479
2480 /* Mark the beginning of the strlen sequence so we can emit the
2481 source operand later. */
2482 before_strlen = get_last_insn ();
2483
2484 char_rtx = const0_rtx;
2485 char_mode = insn_data[(int) icode].operand[2].mode;
2486 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2487 char_mode))
2488 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2489
2490 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2491 char_rtx, GEN_INT (align));
2492 if (! pat)
2493 return 0;
2494 emit_insn (pat);
2495
2496 /* Now that we are assured of success, expand the source. */
2497 start_sequence ();
2498 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
2499 if (pat != src_reg)
2500 emit_move_insn (src_reg, pat);
2501 pat = get_insns ();
2502 end_sequence ();
2503
2504 if (before_strlen)
2505 emit_insn_after (pat, before_strlen);
2506 else
2507 emit_insn_before (pat, get_insns ());
2508
2509 /* Return the value in the proper mode for this function. */
2510 if (GET_MODE (result) == target_mode)
2511 target = result;
2512 else if (target != 0)
2513 convert_move (target, result, 0);
2514 else
2515 target = convert_to_mode (target_mode, result, 0);
2516
2517 return target;
2518 }
2519 }
2520
2521 /* Expand a call to the strstr builtin. Return 0 if we failed the
2522 caller should emit a normal call, otherwise try to get the result
2523 in TARGET, if convenient (and in mode MODE if that's convenient). */
2524
2525 static rtx
2526 expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
2527 {
2528 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2529 {
2530 tree result = fold_builtin_strstr (arglist);
2531 if (result)
2532 return expand_expr (result, target, mode, EXPAND_NORMAL);
2533 }
2534 return 0;
2535 }
2536
2537 /* Expand a call to the strchr builtin. Return 0 if we failed the
2538 caller should emit a normal call, otherwise try to get the result
2539 in TARGET, if convenient (and in mode MODE if that's convenient). */
2540
2541 static rtx
2542 expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
2543 {
2544 if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2545 {
2546 tree result = fold_builtin_strchr (arglist);
2547 if (result)
2548 return expand_expr (result, target, mode, EXPAND_NORMAL);
2549
2550 /* FIXME: Should use strchrM optab so that ports can optimize this. */
2551 }
2552 return 0;
2553 }
2554
2555 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2556 caller should emit a normal call, otherwise try to get the result
2557 in TARGET, if convenient (and in mode MODE if that's convenient). */
2558
2559 static rtx
2560 expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
2561 {
2562 if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2563 {
2564 tree result = fold_builtin_strrchr (arglist);
2565 if (result)
2566 return expand_expr (result, target, mode, EXPAND_NORMAL);
2567 }
2568 return 0;
2569 }
2570
2571 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2572 caller should emit a normal call, otherwise try to get the result
2573 in TARGET, if convenient (and in mode MODE if that's convenient). */
2574
2575 static rtx
2576 expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
2577 {
2578 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2579 {
2580 tree result = fold_builtin_strpbrk (arglist);
2581 if (result)
2582 return expand_expr (result, target, mode, EXPAND_NORMAL);
2583 }
2584 return 0;
2585 }
2586
2587 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2588 bytes from constant string DATA + OFFSET and return it as target
2589 constant. */
2590
2591 static rtx
2592 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2593 enum machine_mode mode)
2594 {
2595 const char *str = (const char *) data;
2596
2597 gcc_assert (offset >= 0
2598 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2599 <= strlen (str) + 1));
2600
2601 return c_readstr (str + offset, mode);
2602 }
2603
2604 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2605 Return 0 if we failed, the caller should emit a normal call,
2606 otherwise try to get the result in TARGET, if convenient (and in
2607 mode MODE if that's convenient). */
2608 static rtx
2609 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
2610 {
2611 tree arglist = TREE_OPERAND (exp, 1);
2612 if (!validate_arglist (arglist,
2613 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2614 return 0;
2615 else
2616 {
2617 tree dest = TREE_VALUE (arglist);
2618 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2619 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2620 const char *src_str;
2621 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2622 unsigned int dest_align
2623 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2624 rtx dest_mem, src_mem, dest_addr, len_rtx;
2625 tree result = fold_builtin_memcpy (exp);
2626
2627 if (result)
2628 return expand_expr (result, target, mode, EXPAND_NORMAL);
2629
2630 /* If DEST is not a pointer type, call the normal function. */
2631 if (dest_align == 0)
2632 return 0;
2633
2634 /* If either SRC is not a pointer type, don't do this
2635 operation in-line. */
2636 if (src_align == 0)
2637 return 0;
2638
2639 dest_mem = get_memory_rtx (dest);
2640 set_mem_align (dest_mem, dest_align);
2641 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2642 src_str = c_getstr (src);
2643
2644 /* If SRC is a string constant and block move would be done
2645 by pieces, we can avoid loading the string from memory
2646 and only stored the computed constants. */
2647 if (src_str
2648 && GET_CODE (len_rtx) == CONST_INT
2649 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2650 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2651 (void *) src_str, dest_align))
2652 {
2653 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2654 builtin_memcpy_read_str,
2655 (void *) src_str, dest_align, 0);
2656 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2657 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2658 return dest_mem;
2659 }
2660
2661 src_mem = get_memory_rtx (src);
2662 set_mem_align (src_mem, src_align);
2663
2664 /* Copy word part most expediently. */
2665 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2666 BLOCK_OP_NORMAL);
2667
2668 if (dest_addr == 0)
2669 {
2670 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2671 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2672 }
2673 return dest_addr;
2674 }
2675 }
2676
2677 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2678 Return 0 if we failed the caller should emit a normal call,
2679 otherwise try to get the result in TARGET, if convenient (and in
2680 mode MODE if that's convenient). If ENDP is 0 return the
2681 destination pointer, if ENDP is 1 return the end pointer ala
2682 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2683 stpcpy. */
2684
2685 static rtx
2686 expand_builtin_mempcpy (tree arglist, tree type, rtx target, enum machine_mode mode,
2687 int endp)
2688 {
2689 if (!validate_arglist (arglist,
2690 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2691 return 0;
2692 /* If return value is ignored, transform mempcpy into memcpy. */
2693 else if (target == const0_rtx)
2694 {
2695 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2696
2697 if (!fn)
2698 return 0;
2699
2700 return expand_expr (build_function_call_expr (fn, arglist),
2701 target, mode, EXPAND_NORMAL);
2702 }
2703 else
2704 {
2705 tree dest = TREE_VALUE (arglist);
2706 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2707 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2708 const char *src_str;
2709 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2710 unsigned int dest_align
2711 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2712 rtx dest_mem, src_mem, len_rtx;
2713 tree result = fold_builtin_mempcpy (arglist, type, endp);
2714
2715 if (result)
2716 return expand_expr (result, target, mode, EXPAND_NORMAL);
2717
2718 /* If either SRC or DEST is not a pointer type, don't do this
2719 operation in-line. */
2720 if (dest_align == 0 || src_align == 0)
2721 return 0;
2722
2723 /* If LEN is not constant, call the normal function. */
2724 if (! host_integerp (len, 1))
2725 return 0;
2726
2727 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2728 src_str = c_getstr (src);
2729
2730 /* If SRC is a string constant and block move would be done
2731 by pieces, we can avoid loading the string from memory
2732 and only stored the computed constants. */
2733 if (src_str
2734 && GET_CODE (len_rtx) == CONST_INT
2735 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2736 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2737 (void *) src_str, dest_align))
2738 {
2739 dest_mem = get_memory_rtx (dest);
2740 set_mem_align (dest_mem, dest_align);
2741 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2742 builtin_memcpy_read_str,
2743 (void *) src_str, dest_align, endp);
2744 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2745 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2746 return dest_mem;
2747 }
2748
2749 if (GET_CODE (len_rtx) == CONST_INT
2750 && can_move_by_pieces (INTVAL (len_rtx),
2751 MIN (dest_align, src_align)))
2752 {
2753 dest_mem = get_memory_rtx (dest);
2754 set_mem_align (dest_mem, dest_align);
2755 src_mem = get_memory_rtx (src);
2756 set_mem_align (src_mem, src_align);
2757 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2758 MIN (dest_align, src_align), endp);
2759 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2760 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2761 return dest_mem;
2762 }
2763
2764 return 0;
2765 }
2766 }
2767
2768 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2769 if we failed the caller should emit a normal call. */
2770
2771 static rtx
2772 expand_builtin_memmove (tree arglist, tree type, rtx target,
2773 enum machine_mode mode)
2774 {
2775 if (!validate_arglist (arglist,
2776 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2777 return 0;
2778 else
2779 {
2780 tree dest = TREE_VALUE (arglist);
2781 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2782 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2783
2784 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2785 unsigned int dest_align
2786 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2787 tree result = fold_builtin_memmove (arglist, type);
2788
2789 if (result)
2790 return expand_expr (result, target, mode, EXPAND_NORMAL);
2791
2792 /* If DEST is not a pointer type, call the normal function. */
2793 if (dest_align == 0)
2794 return 0;
2795
2796 /* If either SRC is not a pointer type, don't do this
2797 operation in-line. */
2798 if (src_align == 0)
2799 return 0;
2800
2801 /* If src is categorized for a readonly section we can use
2802 normal memcpy. */
2803 if (readonly_data_expr (src))
2804 {
2805 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2806 if (!fn)
2807 return 0;
2808 return expand_expr (build_function_call_expr (fn, arglist),
2809 target, mode, EXPAND_NORMAL);
2810 }
2811
2812 /* If length is 1 and we can expand memcpy call inline,
2813 it is ok to use memcpy as well. */
2814 if (integer_onep (len))
2815 {
2816 rtx ret = expand_builtin_mempcpy (arglist, type, target, mode,
2817 /*endp=*/0);
2818 if (ret)
2819 return ret;
2820 }
2821
2822 /* Otherwise, call the normal function. */
2823 return 0;
2824 }
2825 }
2826
2827 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2828 if we failed the caller should emit a normal call. */
2829
2830 static rtx
2831 expand_builtin_bcopy (tree arglist, tree type)
2832 {
2833 tree src, dest, size, newarglist;
2834
2835 if (!validate_arglist (arglist,
2836 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2837 return NULL_RTX;
2838
2839 src = TREE_VALUE (arglist);
2840 dest = TREE_VALUE (TREE_CHAIN (arglist));
2841 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2842
2843 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2844 memmove(ptr y, ptr x, size_t z). This is done this way
2845 so that if it isn't expanded inline, we fallback to
2846 calling bcopy instead of memmove. */
2847
2848 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
2849 newarglist = tree_cons (NULL_TREE, src, newarglist);
2850 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2851
2852 return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode);
2853 }
2854
2855 #ifndef HAVE_movstr
2856 # define HAVE_movstr 0
2857 # define CODE_FOR_movstr CODE_FOR_nothing
2858 #endif
2859
2860 /* Expand into a movstr instruction, if one is available. Return 0 if
2861 we failed, the caller should emit a normal call, otherwise try to
2862 get the result in TARGET, if convenient. If ENDP is 0 return the
2863 destination pointer, if ENDP is 1 return the end pointer ala
2864 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2865 stpcpy. */
2866
2867 static rtx
2868 expand_movstr (tree dest, tree src, rtx target, int endp)
2869 {
2870 rtx end;
2871 rtx dest_mem;
2872 rtx src_mem;
2873 rtx insn;
2874 const struct insn_data * data;
2875
2876 if (!HAVE_movstr)
2877 return 0;
2878
2879 dest_mem = get_memory_rtx (dest);
2880 src_mem = get_memory_rtx (src);
2881 if (!endp)
2882 {
2883 target = force_reg (Pmode, XEXP (dest_mem, 0));
2884 dest_mem = replace_equiv_address (dest_mem, target);
2885 end = gen_reg_rtx (Pmode);
2886 }
2887 else
2888 {
2889 if (target == 0 || target == const0_rtx)
2890 {
2891 end = gen_reg_rtx (Pmode);
2892 if (target == 0)
2893 target = end;
2894 }
2895 else
2896 end = target;
2897 }
2898
2899 data = insn_data + CODE_FOR_movstr;
2900
2901 if (data->operand[0].mode != VOIDmode)
2902 end = gen_lowpart (data->operand[0].mode, end);
2903
2904 insn = data->genfun (end, dest_mem, src_mem);
2905
2906 gcc_assert (insn);
2907
2908 emit_insn (insn);
2909
2910 /* movstr is supposed to set end to the address of the NUL
2911 terminator. If the caller requested a mempcpy-like return value,
2912 adjust it. */
2913 if (endp == 1 && target != const0_rtx)
2914 {
2915 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
2916 emit_move_insn (target, force_operand (tem, NULL_RTX));
2917 }
2918
2919 return target;
2920 }
2921
2922 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2923 if we failed the caller should emit a normal call, otherwise try to get
2924 the result in TARGET, if convenient (and in mode MODE if that's
2925 convenient). */
2926
2927 static rtx
2928 expand_builtin_strcpy (tree exp, rtx target, enum machine_mode mode)
2929 {
2930 tree arglist = TREE_OPERAND (exp, 1);
2931 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2932 {
2933 tree result = fold_builtin_strcpy (exp, 0);
2934 if (result)
2935 return expand_expr (result, target, mode, EXPAND_NORMAL);
2936
2937 return expand_movstr (TREE_VALUE (arglist),
2938 TREE_VALUE (TREE_CHAIN (arglist)),
2939 target, /*endp=*/0);
2940 }
2941 return 0;
2942 }
2943
2944 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2945 Return 0 if we failed the caller should emit a normal call,
2946 otherwise try to get the result in TARGET, if convenient (and in
2947 mode MODE if that's convenient). */
2948
2949 static rtx
2950 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
2951 {
2952 tree arglist = TREE_OPERAND (exp, 1);
2953 /* If return value is ignored, transform stpcpy into strcpy. */
2954 if (target == const0_rtx)
2955 {
2956 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2957 if (!fn)
2958 return 0;
2959
2960 return expand_expr (build_function_call_expr (fn, arglist),
2961 target, mode, EXPAND_NORMAL);
2962 }
2963
2964 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2965 return 0;
2966 else
2967 {
2968 tree dst, src, len, lenp1;
2969 tree narglist;
2970 rtx ret;
2971
2972 /* Ensure we get an actual string whose length can be evaluated at
2973 compile-time, not an expression containing a string. This is
2974 because the latter will potentially produce pessimized code
2975 when used to produce the return value. */
2976 src = TREE_VALUE (TREE_CHAIN (arglist));
2977 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
2978 return expand_movstr (TREE_VALUE (arglist),
2979 TREE_VALUE (TREE_CHAIN (arglist)),
2980 target, /*endp=*/2);
2981
2982 dst = TREE_VALUE (arglist);
2983 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
2984 narglist = build_tree_list (NULL_TREE, lenp1);
2985 narglist = tree_cons (NULL_TREE, src, narglist);
2986 narglist = tree_cons (NULL_TREE, dst, narglist);
2987 ret = expand_builtin_mempcpy (narglist, TREE_TYPE (exp),
2988 target, mode, /*endp=*/2);
2989
2990 if (ret)
2991 return ret;
2992
2993 if (TREE_CODE (len) == INTEGER_CST)
2994 {
2995 rtx len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2996
2997 if (GET_CODE (len_rtx) == CONST_INT)
2998 {
2999 ret = expand_builtin_strcpy (exp, target, mode);
3000
3001 if (ret)
3002 {
3003 if (! target)
3004 {
3005 if (mode != VOIDmode)
3006 target = gen_reg_rtx (mode);
3007 else
3008 target = gen_reg_rtx (GET_MODE (ret));
3009 }
3010 if (GET_MODE (target) != GET_MODE (ret))
3011 ret = gen_lowpart (GET_MODE (target), ret);
3012
3013 ret = plus_constant (ret, INTVAL (len_rtx));
3014 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3015 gcc_assert (ret);
3016
3017 return target;
3018 }
3019 }
3020 }
3021
3022 return expand_movstr (TREE_VALUE (arglist),
3023 TREE_VALUE (TREE_CHAIN (arglist)),
3024 target, /*endp=*/2);
3025 }
3026 }
3027
3028 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3029 bytes from constant string DATA + OFFSET and return it as target
3030 constant. */
3031
3032 static rtx
3033 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3034 enum machine_mode mode)
3035 {
3036 const char *str = (const char *) data;
3037
3038 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3039 return const0_rtx;
3040
3041 return c_readstr (str + offset, mode);
3042 }
3043
3044 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
3045 if we failed the caller should emit a normal call. */
3046
3047 static rtx
3048 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3049 {
3050 tree arglist = TREE_OPERAND (exp, 1);
3051 if (validate_arglist (arglist,
3052 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3053 {
3054 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
3055 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3056 tree result = fold_builtin_strncpy (exp, slen);
3057
3058 if (result)
3059 return expand_expr (result, target, mode, EXPAND_NORMAL);
3060
3061 /* We must be passed a constant len and src parameter. */
3062 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3063 return 0;
3064
3065 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3066
3067 /* We're required to pad with trailing zeros if the requested
3068 len is greater than strlen(s2)+1. In that case try to
3069 use store_by_pieces, if it fails, punt. */
3070 if (tree_int_cst_lt (slen, len))
3071 {
3072 tree dest = TREE_VALUE (arglist);
3073 unsigned int dest_align
3074 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3075 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
3076 rtx dest_mem;
3077
3078 if (!p || dest_align == 0 || !host_integerp (len, 1)
3079 || !can_store_by_pieces (tree_low_cst (len, 1),
3080 builtin_strncpy_read_str,
3081 (void *) p, dest_align))
3082 return 0;
3083
3084 dest_mem = get_memory_rtx (dest);
3085 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3086 builtin_strncpy_read_str,
3087 (void *) p, dest_align, 0);
3088 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3089 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3090 return dest_mem;
3091 }
3092 }
3093 return 0;
3094 }
3095
3096 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3097 bytes from constant string DATA + OFFSET and return it as target
3098 constant. */
3099
3100 static rtx
3101 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3102 enum machine_mode mode)
3103 {
3104 const char *c = (const char *) data;
3105 char *p = alloca (GET_MODE_SIZE (mode));
3106
3107 memset (p, *c, GET_MODE_SIZE (mode));
3108
3109 return c_readstr (p, mode);
3110 }
3111
3112 /* Callback routine for store_by_pieces. Return the RTL of a register
3113 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3114 char value given in the RTL register data. For example, if mode is
3115 4 bytes wide, return the RTL for 0x01010101*data. */
3116
3117 static rtx
3118 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3119 enum machine_mode mode)
3120 {
3121 rtx target, coeff;
3122 size_t size;
3123 char *p;
3124
3125 size = GET_MODE_SIZE (mode);
3126 if (size == 1)
3127 return (rtx) data;
3128
3129 p = alloca (size);
3130 memset (p, 1, size);
3131 coeff = c_readstr (p, mode);
3132
3133 target = convert_to_mode (mode, (rtx) data, 1);
3134 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3135 return force_reg (mode, target);
3136 }
3137
3138 /* Expand expression EXP, which is a call to the memset builtin. Return 0
3139 if we failed the caller should emit a normal call, otherwise try to get
3140 the result in TARGET, if convenient (and in mode MODE if that's
3141 convenient). */
3142
3143 static rtx
3144 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
3145 {
3146 if (!validate_arglist (arglist,
3147 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3148 return 0;
3149 else
3150 {
3151 tree dest = TREE_VALUE (arglist);
3152 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3153 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3154 char c;
3155
3156 unsigned int dest_align
3157 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3158 rtx dest_mem, dest_addr, len_rtx;
3159
3160 /* If DEST is not a pointer type, don't do this
3161 operation in-line. */
3162 if (dest_align == 0)
3163 return 0;
3164
3165 /* If the LEN parameter is zero, return DEST. */
3166 if (integer_zerop (len))
3167 {
3168 /* Evaluate and ignore VAL in case it has side-effects. */
3169 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3170 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3171 }
3172
3173 if (TREE_CODE (val) != INTEGER_CST)
3174 {
3175 rtx val_rtx;
3176
3177 if (!host_integerp (len, 1))
3178 return 0;
3179
3180 if (optimize_size && tree_low_cst (len, 1) > 1)
3181 return 0;
3182
3183 /* Assume that we can memset by pieces if we can store the
3184 * the coefficients by pieces (in the required modes).
3185 * We can't pass builtin_memset_gen_str as that emits RTL. */
3186 c = 1;
3187 if (!can_store_by_pieces (tree_low_cst (len, 1),
3188 builtin_memset_read_str,
3189 &c, dest_align))
3190 return 0;
3191
3192 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
3193 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3194 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3195 val_rtx);
3196 dest_mem = get_memory_rtx (dest);
3197 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3198 builtin_memset_gen_str,
3199 val_rtx, dest_align, 0);
3200 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3201 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3202 return dest_mem;
3203 }
3204
3205 if (target_char_cast (val, &c))
3206 return 0;
3207
3208 if (c)
3209 {
3210 if (!host_integerp (len, 1))
3211 return 0;
3212 if (!can_store_by_pieces (tree_low_cst (len, 1),
3213 builtin_memset_read_str, &c,
3214 dest_align))
3215 return 0;
3216
3217 dest_mem = get_memory_rtx (dest);
3218 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3219 builtin_memset_read_str,
3220 &c, dest_align, 0);
3221 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3222 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3223 return dest_mem;
3224 }
3225
3226 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3227
3228 dest_mem = get_memory_rtx (dest);
3229 set_mem_align (dest_mem, dest_align);
3230 dest_addr = clear_storage (dest_mem, len_rtx);
3231
3232 if (dest_addr == 0)
3233 {
3234 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3235 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3236 }
3237
3238 return dest_addr;
3239 }
3240 }
3241
3242 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3243 if we failed the caller should emit a normal call. */
3244
3245 static rtx
3246 expand_builtin_bzero (tree arglist)
3247 {
3248 tree dest, size, newarglist;
3249
3250 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3251 return NULL_RTX;
3252
3253 dest = TREE_VALUE (arglist);
3254 size = TREE_VALUE (TREE_CHAIN (arglist));
3255
3256 /* New argument list transforming bzero(ptr x, int y) to
3257 memset(ptr x, int 0, size_t y). This is done this way
3258 so that if it isn't expanded inline, we fallback to
3259 calling bzero instead of memset. */
3260
3261 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3262 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3263 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3264
3265 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
3266 }
3267
3268 /* Expand expression EXP, which is a call to the memcmp built-in function.
3269 ARGLIST is the argument list for this call. Return 0 if we failed and the
3270 caller should emit a normal call, otherwise try to get the result in
3271 TARGET, if convenient (and in mode MODE, if that's convenient). */
3272
3273 static rtx
3274 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3275 enum machine_mode mode)
3276 {
3277 if (!validate_arglist (arglist,
3278 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3279 return 0;
3280 else
3281 {
3282 tree result = fold_builtin_memcmp (arglist);
3283 if (result)
3284 return expand_expr (result, target, mode, EXPAND_NORMAL);
3285 }
3286
3287 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrsi
3288 {
3289 tree arg1 = TREE_VALUE (arglist);
3290 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3291 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3292 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3293 rtx result;
3294 rtx insn;
3295
3296 int arg1_align
3297 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3298 int arg2_align
3299 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3300 enum machine_mode insn_mode;
3301
3302 #ifdef HAVE_cmpmemsi
3303 if (HAVE_cmpmemsi)
3304 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3305 else
3306 #endif
3307 #ifdef HAVE_cmpstrsi
3308 if (HAVE_cmpstrsi)
3309 insn_mode = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3310 else
3311 #endif
3312 return 0;
3313
3314 /* If we don't have POINTER_TYPE, call the function. */
3315 if (arg1_align == 0 || arg2_align == 0)
3316 return 0;
3317
3318 /* Make a place to write the result of the instruction. */
3319 result = target;
3320 if (! (result != 0
3321 && REG_P (result) && GET_MODE (result) == insn_mode
3322 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3323 result = gen_reg_rtx (insn_mode);
3324
3325 arg1_rtx = get_memory_rtx (arg1);
3326 arg2_rtx = get_memory_rtx (arg2);
3327 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3328
3329 /* Set MEM_SIZE as appropriate. */
3330 if (GET_CODE (arg3_rtx) == CONST_INT)
3331 {
3332 set_mem_size (arg1_rtx, arg3_rtx);
3333 set_mem_size (arg2_rtx, arg3_rtx);
3334 }
3335
3336 #ifdef HAVE_cmpmemsi
3337 if (HAVE_cmpmemsi)
3338 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3339 GEN_INT (MIN (arg1_align, arg2_align)));
3340 else
3341 #endif
3342 #ifdef HAVE_cmpstrsi
3343 if (HAVE_cmpstrsi)
3344 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3345 GEN_INT (MIN (arg1_align, arg2_align)));
3346 else
3347 #endif
3348 gcc_unreachable ();
3349
3350 if (insn)
3351 emit_insn (insn);
3352 else
3353 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3354 TYPE_MODE (integer_type_node), 3,
3355 XEXP (arg1_rtx, 0), Pmode,
3356 XEXP (arg2_rtx, 0), Pmode,
3357 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3358 TYPE_UNSIGNED (sizetype)),
3359 TYPE_MODE (sizetype));
3360
3361 /* Return the value in the proper mode for this function. */
3362 mode = TYPE_MODE (TREE_TYPE (exp));
3363 if (GET_MODE (result) == mode)
3364 return result;
3365 else if (target != 0)
3366 {
3367 convert_move (target, result, 0);
3368 return target;
3369 }
3370 else
3371 return convert_to_mode (mode, result, 0);
3372 }
3373 #endif
3374
3375 return 0;
3376 }
3377
3378 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3379 if we failed the caller should emit a normal call, otherwise try to get
3380 the result in TARGET, if convenient. */
3381
3382 static rtx
3383 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3384 {
3385 tree arglist = TREE_OPERAND (exp, 1);
3386
3387 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3388 return 0;
3389 else
3390 {
3391 tree result = fold_builtin_strcmp (arglist);
3392 if (result)
3393 return expand_expr (result, target, mode, EXPAND_NORMAL);
3394 }
3395
3396 #ifdef HAVE_cmpstrsi
3397 if (HAVE_cmpstrsi)
3398 {
3399 tree arg1 = TREE_VALUE (arglist);
3400 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3401 tree len, len1, len2;
3402 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3403 rtx result, insn;
3404 tree fndecl;
3405
3406 int arg1_align
3407 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3408 int arg2_align
3409 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3410 enum machine_mode insn_mode
3411 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3412
3413 len1 = c_strlen (arg1, 1);
3414 len2 = c_strlen (arg2, 1);
3415
3416 if (len1)
3417 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3418 if (len2)
3419 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3420
3421 /* If we don't have a constant length for the first, use the length
3422 of the second, if we know it. We don't require a constant for
3423 this case; some cost analysis could be done if both are available
3424 but neither is constant. For now, assume they're equally cheap,
3425 unless one has side effects. If both strings have constant lengths,
3426 use the smaller. */
3427
3428 if (!len1)
3429 len = len2;
3430 else if (!len2)
3431 len = len1;
3432 else if (TREE_SIDE_EFFECTS (len1))
3433 len = len2;
3434 else if (TREE_SIDE_EFFECTS (len2))
3435 len = len1;
3436 else if (TREE_CODE (len1) != INTEGER_CST)
3437 len = len2;
3438 else if (TREE_CODE (len2) != INTEGER_CST)
3439 len = len1;
3440 else if (tree_int_cst_lt (len1, len2))
3441 len = len1;
3442 else
3443 len = len2;
3444
3445 /* If both arguments have side effects, we cannot optimize. */
3446 if (!len || TREE_SIDE_EFFECTS (len))
3447 return 0;
3448
3449 /* If we don't have POINTER_TYPE, call the function. */
3450 if (arg1_align == 0 || arg2_align == 0)
3451 return 0;
3452
3453 /* Make a place to write the result of the instruction. */
3454 result = target;
3455 if (! (result != 0
3456 && REG_P (result) && GET_MODE (result) == insn_mode
3457 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3458 result = gen_reg_rtx (insn_mode);
3459
3460 /* Stabilize the arguments in case gen_cmpstrsi fails. */
3461 arg1 = builtin_save_expr (arg1);
3462 arg2 = builtin_save_expr (arg2);
3463
3464 arg1_rtx = get_memory_rtx (arg1);
3465 arg2_rtx = get_memory_rtx (arg2);
3466 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3467 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3468 GEN_INT (MIN (arg1_align, arg2_align)));
3469 if (insn)
3470 {
3471 emit_insn (insn);
3472
3473 /* Return the value in the proper mode for this function. */
3474 mode = TYPE_MODE (TREE_TYPE (exp));
3475 if (GET_MODE (result) == mode)
3476 return result;
3477 if (target == 0)
3478 return convert_to_mode (mode, result, 0);
3479 convert_move (target, result, 0);
3480 return target;
3481 }
3482
3483 /* Expand the library call ourselves using a stabilized argument
3484 list to avoid re-evaluating the function's arguments twice. */
3485 arglist = build_tree_list (NULL_TREE, arg2);
3486 arglist = tree_cons (NULL_TREE, arg1, arglist);
3487 fndecl = get_callee_fndecl (exp);
3488 exp = build_function_call_expr (fndecl, arglist);
3489 return expand_call (exp, target, target == const0_rtx);
3490 }
3491 #endif
3492 return 0;
3493 }
3494
3495 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3496 if we failed the caller should emit a normal call, otherwise try to get
3497 the result in TARGET, if convenient. */
3498
3499 static rtx
3500 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3501 {
3502 tree arglist = TREE_OPERAND (exp, 1);
3503
3504 if (!validate_arglist (arglist,
3505 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3506 return 0;
3507 else
3508 {
3509 tree result = fold_builtin_strncmp (arglist);
3510 if (result)
3511 return expand_expr (result, target, mode, EXPAND_NORMAL);
3512 }
3513
3514 /* If c_strlen can determine an expression for one of the string
3515 lengths, and it doesn't have side effects, then emit cmpstrsi
3516 using length MIN(strlen(string)+1, arg3). */
3517 #ifdef HAVE_cmpstrsi
3518 if (HAVE_cmpstrsi)
3519 {
3520 tree arg1 = TREE_VALUE (arglist);
3521 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3522 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3523 tree len, len1, len2;
3524 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3525 rtx result, insn;
3526 tree fndecl;
3527
3528 int arg1_align
3529 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3530 int arg2_align
3531 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3532 enum machine_mode insn_mode
3533 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3534
3535 len1 = c_strlen (arg1, 1);
3536 len2 = c_strlen (arg2, 1);
3537
3538 if (len1)
3539 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3540 if (len2)
3541 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3542
3543 /* If we don't have a constant length for the first, use the length
3544 of the second, if we know it. We don't require a constant for
3545 this case; some cost analysis could be done if both are available
3546 but neither is constant. For now, assume they're equally cheap,
3547 unless one has side effects. If both strings have constant lengths,
3548 use the smaller. */
3549
3550 if (!len1)
3551 len = len2;
3552 else if (!len2)
3553 len = len1;
3554 else if (TREE_SIDE_EFFECTS (len1))
3555 len = len2;
3556 else if (TREE_SIDE_EFFECTS (len2))
3557 len = len1;
3558 else if (TREE_CODE (len1) != INTEGER_CST)
3559 len = len2;
3560 else if (TREE_CODE (len2) != INTEGER_CST)
3561 len = len1;
3562 else if (tree_int_cst_lt (len1, len2))
3563 len = len1;
3564 else
3565 len = len2;
3566
3567 /* If both arguments have side effects, we cannot optimize. */
3568 if (!len || TREE_SIDE_EFFECTS (len))
3569 return 0;
3570
3571 /* The actual new length parameter is MIN(len,arg3). */
3572 len = fold (build2 (MIN_EXPR, TREE_TYPE (len), len,
3573 fold_convert (TREE_TYPE (len), arg3)));
3574
3575 /* If we don't have POINTER_TYPE, call the function. */
3576 if (arg1_align == 0 || arg2_align == 0)
3577 return 0;
3578
3579 /* Make a place to write the result of the instruction. */
3580 result = target;
3581 if (! (result != 0
3582 && REG_P (result) && GET_MODE (result) == insn_mode
3583 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3584 result = gen_reg_rtx (insn_mode);
3585
3586 /* Stabilize the arguments in case gen_cmpstrsi fails. */
3587 arg1 = builtin_save_expr (arg1);
3588 arg2 = builtin_save_expr (arg2);
3589 len = builtin_save_expr (len);
3590
3591 arg1_rtx = get_memory_rtx (arg1);
3592 arg2_rtx = get_memory_rtx (arg2);
3593 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3594 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3595 GEN_INT (MIN (arg1_align, arg2_align)));
3596 if (insn)
3597 {
3598 emit_insn (insn);
3599
3600 /* Return the value in the proper mode for this function. */
3601 mode = TYPE_MODE (TREE_TYPE (exp));
3602 if (GET_MODE (result) == mode)
3603 return result;
3604 if (target == 0)
3605 return convert_to_mode (mode, result, 0);
3606 convert_move (target, result, 0);
3607 return target;
3608 }
3609
3610 /* Expand the library call ourselves using a stabilized argument
3611 list to avoid re-evaluating the function's arguments twice. */
3612 arglist = build_tree_list (NULL_TREE, len);
3613 arglist = tree_cons (NULL_TREE, arg2, arglist);
3614 arglist = tree_cons (NULL_TREE, arg1, arglist);
3615 fndecl = get_callee_fndecl (exp);
3616 exp = build_function_call_expr (fndecl, arglist);
3617 return expand_call (exp, target, target == const0_rtx);
3618 }
3619 #endif
3620 return 0;
3621 }
3622
3623 /* Expand expression EXP, which is a call to the strcat builtin.
3624 Return 0 if we failed the caller should emit a normal call,
3625 otherwise try to get the result in TARGET, if convenient. */
3626
3627 static rtx
3628 expand_builtin_strcat (tree arglist, tree type, rtx target, enum machine_mode mode)
3629 {
3630 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3631 return 0;
3632 else
3633 {
3634 tree dst = TREE_VALUE (arglist),
3635 src = TREE_VALUE (TREE_CHAIN (arglist));
3636 const char *p = c_getstr (src);
3637
3638 if (p)
3639 {
3640 /* If the string length is zero, return the dst parameter. */
3641 if (*p == '\0')
3642 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3643 else if (!optimize_size)
3644 {
3645 /* Otherwise if !optimize_size, see if we can store by
3646 pieces into (dst + strlen(dst)). */
3647 tree newdst, arglist,
3648 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3649
3650 /* This is the length argument. */
3651 arglist = build_tree_list (NULL_TREE,
3652 fold (size_binop (PLUS_EXPR,
3653 c_strlen (src, 0),
3654 ssize_int (1))));
3655 /* Prepend src argument. */
3656 arglist = tree_cons (NULL_TREE, src, arglist);
3657
3658 /* We're going to use dst more than once. */
3659 dst = builtin_save_expr (dst);
3660
3661 /* Create strlen (dst). */
3662 newdst =
3663 fold (build_function_call_expr (strlen_fn,
3664 build_tree_list (NULL_TREE,
3665 dst)));
3666 /* Create (dst + strlen (dst)). */
3667 newdst = fold (build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst));
3668
3669 /* Prepend the new dst argument. */
3670 arglist = tree_cons (NULL_TREE, newdst, arglist);
3671
3672 /* We don't want to get turned into a memcpy if the
3673 target is const0_rtx, i.e. when the return value
3674 isn't used. That would produce pessimized code so
3675 pass in a target of zero, it should never actually be
3676 used. If this was successful return the original
3677 dst, not the result of mempcpy. */
3678 if (expand_builtin_mempcpy (arglist, type, /*target=*/0, mode, /*endp=*/0))
3679 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3680 else
3681 return 0;
3682 }
3683 }
3684
3685 return 0;
3686 }
3687 }
3688
3689 /* Expand expression EXP, which is a call to the strncat builtin.
3690 Return 0 if we failed the caller should emit a normal call,
3691 otherwise try to get the result in TARGET, if convenient. */
3692
3693 static rtx
3694 expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
3695 {
3696 if (validate_arglist (arglist,
3697 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3698 {
3699 tree result = fold_builtin_strncat (arglist);
3700 if (result)
3701 return expand_expr (result, target, mode, EXPAND_NORMAL);
3702 }
3703 return 0;
3704 }
3705
3706 /* Expand expression EXP, which is a call to the strspn builtin.
3707 Return 0 if we failed the caller should emit a normal call,
3708 otherwise try to get the result in TARGET, if convenient. */
3709
3710 static rtx
3711 expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
3712 {
3713 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3714 {
3715 tree result = fold_builtin_strspn (arglist);
3716 if (result)
3717 return expand_expr (result, target, mode, EXPAND_NORMAL);
3718 }
3719 return 0;
3720 }
3721
3722 /* Expand expression EXP, which is a call to the strcspn builtin.
3723 Return 0 if we failed the caller should emit a normal call,
3724 otherwise try to get the result in TARGET, if convenient. */
3725
3726 static rtx
3727 expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
3728 {
3729 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3730 {
3731 tree result = fold_builtin_strcspn (arglist);
3732 if (result)
3733 return expand_expr (result, target, mode, EXPAND_NORMAL);
3734 }
3735 return 0;
3736 }
3737
3738 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3739 if that's convenient. */
3740
3741 rtx
3742 expand_builtin_saveregs (void)
3743 {
3744 rtx val, seq;
3745
3746 /* Don't do __builtin_saveregs more than once in a function.
3747 Save the result of the first call and reuse it. */
3748 if (saveregs_value != 0)
3749 return saveregs_value;
3750
3751 /* When this function is called, it means that registers must be
3752 saved on entry to this function. So we migrate the call to the
3753 first insn of this function. */
3754
3755 start_sequence ();
3756
3757 /* Do whatever the machine needs done in this case. */
3758 val = targetm.calls.expand_builtin_saveregs ();
3759
3760 seq = get_insns ();
3761 end_sequence ();
3762
3763 saveregs_value = val;
3764
3765 /* Put the insns after the NOTE that starts the function. If this
3766 is inside a start_sequence, make the outer-level insn chain current, so
3767 the code is placed at the start of the function. */
3768 push_topmost_sequence ();
3769 emit_insn_after (seq, entry_of_function ());
3770 pop_topmost_sequence ();
3771
3772 return val;
3773 }
3774
3775 /* __builtin_args_info (N) returns word N of the arg space info
3776 for the current function. The number and meanings of words
3777 is controlled by the definition of CUMULATIVE_ARGS. */
3778
3779 static rtx
3780 expand_builtin_args_info (tree arglist)
3781 {
3782 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3783 int *word_ptr = (int *) &current_function_args_info;
3784
3785 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
3786
3787 if (arglist != 0)
3788 {
3789 if (!host_integerp (TREE_VALUE (arglist), 0))
3790 error ("argument of %<__builtin_args_info%> must be constant");
3791 else
3792 {
3793 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3794
3795 if (wordnum < 0 || wordnum >= nwords)
3796 error ("argument of %<__builtin_args_info%> out of range");
3797 else
3798 return GEN_INT (word_ptr[wordnum]);
3799 }
3800 }
3801 else
3802 error ("missing argument in %<__builtin_args_info%>");
3803
3804 return const0_rtx;
3805 }
3806
3807 /* Expand a call to __builtin_next_arg. */
3808
3809 static rtx
3810 expand_builtin_next_arg (void)
3811 {
3812 /* Checking arguments is already done in fold_builtin_next_arg
3813 that must be called before this function. */
3814 return expand_binop (Pmode, add_optab,
3815 current_function_internal_arg_pointer,
3816 current_function_arg_offset_rtx,
3817 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3818 }
3819
3820 /* Make it easier for the backends by protecting the valist argument
3821 from multiple evaluations. */
3822
3823 static tree
3824 stabilize_va_list (tree valist, int needs_lvalue)
3825 {
3826 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3827 {
3828 if (TREE_SIDE_EFFECTS (valist))
3829 valist = save_expr (valist);
3830
3831 /* For this case, the backends will be expecting a pointer to
3832 TREE_TYPE (va_list_type_node), but it's possible we've
3833 actually been given an array (an actual va_list_type_node).
3834 So fix it. */
3835 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3836 {
3837 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3838 valist = build_fold_addr_expr_with_type (valist, p1);
3839 }
3840 }
3841 else
3842 {
3843 tree pt;
3844
3845 if (! needs_lvalue)
3846 {
3847 if (! TREE_SIDE_EFFECTS (valist))
3848 return valist;
3849
3850 pt = build_pointer_type (va_list_type_node);
3851 valist = fold (build1 (ADDR_EXPR, pt, valist));
3852 TREE_SIDE_EFFECTS (valist) = 1;
3853 }
3854
3855 if (TREE_SIDE_EFFECTS (valist))
3856 valist = save_expr (valist);
3857 valist = build_fold_indirect_ref (valist);
3858 }
3859
3860 return valist;
3861 }
3862
3863 /* The "standard" definition of va_list is void*. */
3864
3865 tree
3866 std_build_builtin_va_list (void)
3867 {
3868 return ptr_type_node;
3869 }
3870
3871 /* The "standard" implementation of va_start: just assign `nextarg' to
3872 the variable. */
3873
3874 void
3875 std_expand_builtin_va_start (tree valist, rtx nextarg)
3876 {
3877 tree t;
3878
3879 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
3880 make_tree (ptr_type_node, nextarg));
3881 TREE_SIDE_EFFECTS (t) = 1;
3882
3883 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3884 }
3885
3886 /* Expand ARGLIST, from a call to __builtin_va_start. */
3887
3888 static rtx
3889 expand_builtin_va_start (tree arglist)
3890 {
3891 rtx nextarg;
3892 tree chain, valist;
3893
3894 chain = TREE_CHAIN (arglist);
3895
3896 if (!chain)
3897 {
3898 error ("too few arguments to function %<va_start%>");
3899 return const0_rtx;
3900 }
3901
3902 if (fold_builtin_next_arg (chain))
3903 return const0_rtx;
3904
3905 nextarg = expand_builtin_next_arg ();
3906 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3907
3908 #ifdef EXPAND_BUILTIN_VA_START
3909 EXPAND_BUILTIN_VA_START (valist, nextarg);
3910 #else
3911 std_expand_builtin_va_start (valist, nextarg);
3912 #endif
3913
3914 return const0_rtx;
3915 }
3916
3917 /* The "standard" implementation of va_arg: read the value from the
3918 current (padded) address and increment by the (padded) size. */
3919
3920 tree
3921 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
3922 {
3923 tree addr, t, type_size, rounded_size, valist_tmp;
3924 unsigned HOST_WIDE_INT align, boundary;
3925 bool indirect;
3926
3927 #ifdef ARGS_GROW_DOWNWARD
3928 /* All of the alignment and movement below is for args-grow-up machines.
3929 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
3930 implement their own specialized gimplify_va_arg_expr routines. */
3931 gcc_unreachable ();
3932 #endif
3933
3934 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
3935 if (indirect)
3936 type = build_pointer_type (type);
3937
3938 align = PARM_BOUNDARY / BITS_PER_UNIT;
3939 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
3940
3941 /* Hoist the valist value into a temporary for the moment. */
3942 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
3943
3944 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
3945 requires greater alignment, we must perform dynamic alignment. */
3946 if (boundary > align)
3947 {
3948 t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
3949 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
3950 build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
3951 gimplify_and_add (t, pre_p);
3952
3953 t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
3954 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
3955 build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
3956 gimplify_and_add (t, pre_p);
3957 }
3958 else
3959 boundary = align;
3960
3961 /* If the actual alignment is less than the alignment of the type,
3962 adjust the type accordingly so that we don't assume strict alignment
3963 when deferencing the pointer. */
3964 boundary *= BITS_PER_UNIT;
3965 if (boundary < TYPE_ALIGN (type))
3966 {
3967 type = build_variant_type_copy (type);
3968 TYPE_ALIGN (type) = boundary;
3969 }
3970
3971 /* Compute the rounded size of the type. */
3972 type_size = size_in_bytes (type);
3973 rounded_size = round_up (type_size, align);
3974
3975 /* Reduce rounded_size so it's sharable with the postqueue. */
3976 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
3977
3978 /* Get AP. */
3979 addr = valist_tmp;
3980 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
3981 {
3982 /* Small args are padded downward. */
3983 t = fold (build2 (GT_EXPR, sizetype, rounded_size, size_int (align)));
3984 t = fold (build3 (COND_EXPR, sizetype, t, size_zero_node,
3985 size_binop (MINUS_EXPR, rounded_size, type_size)));
3986 t = fold_convert (TREE_TYPE (addr), t);
3987 addr = fold (build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t));
3988 }
3989
3990 /* Compute new value for AP. */
3991 t = fold_convert (TREE_TYPE (valist), rounded_size);
3992 t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
3993 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
3994 gimplify_and_add (t, pre_p);
3995
3996 addr = fold_convert (build_pointer_type (type), addr);
3997
3998 if (indirect)
3999 addr = build_va_arg_indirect_ref (addr);
4000
4001 return build_va_arg_indirect_ref (addr);
4002 }
4003
4004 /* Build an indirect-ref expression over the given TREE, which represents a
4005 piece of a va_arg() expansion. */
4006 tree
4007 build_va_arg_indirect_ref (tree addr)
4008 {
4009 addr = build_fold_indirect_ref (addr);
4010
4011 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4012 mf_mark (addr);
4013
4014 return addr;
4015 }
4016
4017 /* Return a dummy expression of type TYPE in order to keep going after an
4018 error. */
4019
4020 static tree
4021 dummy_object (tree type)
4022 {
4023 tree t = convert (build_pointer_type (type), null_pointer_node);
4024 return build1 (INDIRECT_REF, type, t);
4025 }
4026
4027 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4028 builtin function, but a very special sort of operator. */
4029
4030 enum gimplify_status
4031 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4032 {
4033 tree promoted_type, want_va_type, have_va_type;
4034 tree valist = TREE_OPERAND (*expr_p, 0);
4035 tree type = TREE_TYPE (*expr_p);
4036 tree t;
4037
4038 /* Verify that valist is of the proper type. */
4039 want_va_type = va_list_type_node;
4040 have_va_type = TREE_TYPE (valist);
4041
4042 if (have_va_type == error_mark_node)
4043 return GS_ERROR;
4044
4045 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4046 {
4047 /* If va_list is an array type, the argument may have decayed
4048 to a pointer type, e.g. by being passed to another function.
4049 In that case, unwrap both types so that we can compare the
4050 underlying records. */
4051 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4052 || POINTER_TYPE_P (have_va_type))
4053 {
4054 want_va_type = TREE_TYPE (want_va_type);
4055 have_va_type = TREE_TYPE (have_va_type);
4056 }
4057 }
4058
4059 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4060 {
4061 error ("first argument to %<va_arg%> not of type %<va_list%>");
4062 return GS_ERROR;
4063 }
4064
4065 /* Generate a diagnostic for requesting data of a type that cannot
4066 be passed through `...' due to type promotion at the call site. */
4067 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4068 != type)
4069 {
4070 static bool gave_help;
4071
4072 /* Unfortunately, this is merely undefined, rather than a constraint
4073 violation, so we cannot make this an error. If this call is never
4074 executed, the program is still strictly conforming. */
4075 warning ("%qT is promoted to %qT when passed through %<...%>",
4076 type, promoted_type);
4077 if (! gave_help)
4078 {
4079 gave_help = true;
4080 warning ("(so you should pass %qT not %qT to %<va_arg%>)",
4081 promoted_type, type);
4082 }
4083
4084 /* We can, however, treat "undefined" any way we please.
4085 Call abort to encourage the user to fix the program. */
4086 inform ("if this code is reached, the program will abort");
4087 t = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
4088 NULL);
4089 append_to_statement_list (t, pre_p);
4090
4091 /* This is dead code, but go ahead and finish so that the
4092 mode of the result comes out right. */
4093 *expr_p = dummy_object (type);
4094 return GS_ALL_DONE;
4095 }
4096 else
4097 {
4098 /* Make it easier for the backends by protecting the valist argument
4099 from multiple evaluations. */
4100 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4101 {
4102 /* For this case, the backends will be expecting a pointer to
4103 TREE_TYPE (va_list_type_node), but it's possible we've
4104 actually been given an array (an actual va_list_type_node).
4105 So fix it. */
4106 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4107 {
4108 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4109 valist = build_fold_addr_expr_with_type (valist, p1);
4110 }
4111 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4112 }
4113 else
4114 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4115
4116 if (!targetm.gimplify_va_arg_expr)
4117 /* Once most targets are converted this should abort. */
4118 return GS_ALL_DONE;
4119
4120 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4121 return GS_OK;
4122 }
4123 }
4124
4125 /* Expand ARGLIST, from a call to __builtin_va_end. */
4126
4127 static rtx
4128 expand_builtin_va_end (tree arglist)
4129 {
4130 tree valist = TREE_VALUE (arglist);
4131
4132 /* Evaluate for side effects, if needed. I hate macros that don't
4133 do that. */
4134 if (TREE_SIDE_EFFECTS (valist))
4135 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4136
4137 return const0_rtx;
4138 }
4139
4140 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
4141 builtin rather than just as an assignment in stdarg.h because of the
4142 nastiness of array-type va_list types. */
4143
4144 static rtx
4145 expand_builtin_va_copy (tree arglist)
4146 {
4147 tree dst, src, t;
4148
4149 dst = TREE_VALUE (arglist);
4150 src = TREE_VALUE (TREE_CHAIN (arglist));
4151
4152 dst = stabilize_va_list (dst, 1);
4153 src = stabilize_va_list (src, 0);
4154
4155 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4156 {
4157 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4158 TREE_SIDE_EFFECTS (t) = 1;
4159 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4160 }
4161 else
4162 {
4163 rtx dstb, srcb, size;
4164
4165 /* Evaluate to pointers. */
4166 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4167 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4168 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4169 VOIDmode, EXPAND_NORMAL);
4170
4171 dstb = convert_memory_address (Pmode, dstb);
4172 srcb = convert_memory_address (Pmode, srcb);
4173
4174 /* "Dereference" to BLKmode memories. */
4175 dstb = gen_rtx_MEM (BLKmode, dstb);
4176 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4177 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4178 srcb = gen_rtx_MEM (BLKmode, srcb);
4179 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4180 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4181
4182 /* Copy. */
4183 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4184 }
4185
4186 return const0_rtx;
4187 }
4188
4189 /* Expand a call to one of the builtin functions __builtin_frame_address or
4190 __builtin_return_address. */
4191
4192 static rtx
4193 expand_builtin_frame_address (tree fndecl, tree arglist)
4194 {
4195 /* The argument must be a nonnegative integer constant.
4196 It counts the number of frames to scan up the stack.
4197 The value is the return address saved in that frame. */
4198 if (arglist == 0)
4199 /* Warning about missing arg was already issued. */
4200 return const0_rtx;
4201 else if (! host_integerp (TREE_VALUE (arglist), 1))
4202 {
4203 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4204 error ("invalid argument to %<__builtin_frame_address%>");
4205 else
4206 error ("invalid argument to %<__builtin_return_address%>");
4207 return const0_rtx;
4208 }
4209 else
4210 {
4211 rtx tem
4212 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4213 tree_low_cst (TREE_VALUE (arglist), 1),
4214 hard_frame_pointer_rtx);
4215
4216 /* Some ports cannot access arbitrary stack frames. */
4217 if (tem == NULL)
4218 {
4219 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4220 warning ("unsupported argument to %<__builtin_frame_address%>");
4221 else
4222 warning ("unsupported argument to %<__builtin_return_address%>");
4223 return const0_rtx;
4224 }
4225
4226 /* For __builtin_frame_address, return what we've got. */
4227 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4228 return tem;
4229
4230 if (!REG_P (tem)
4231 && ! CONSTANT_P (tem))
4232 tem = copy_to_mode_reg (Pmode, tem);
4233 return tem;
4234 }
4235 }
4236
4237 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
4238 we failed and the caller should emit a normal call, otherwise try to get
4239 the result in TARGET, if convenient. */
4240
4241 static rtx
4242 expand_builtin_alloca (tree arglist, rtx target)
4243 {
4244 rtx op0;
4245 rtx result;
4246
4247 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
4248 should always expand to function calls. These can be intercepted
4249 in libmudflap. */
4250 if (flag_mudflap)
4251 return 0;
4252
4253 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4254 return 0;
4255
4256 /* Compute the argument. */
4257 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
4258
4259 /* Allocate the desired space. */
4260 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4261 result = convert_memory_address (ptr_mode, result);
4262
4263 return result;
4264 }
4265
4266 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
4267 Return 0 if a normal call should be emitted rather than expanding the
4268 function in-line. If convenient, the result should be placed in TARGET.
4269 SUBTARGET may be used as the target for computing one of EXP's operands. */
4270
4271 static rtx
4272 expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
4273 rtx subtarget, optab op_optab)
4274 {
4275 rtx op0;
4276 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4277 return 0;
4278
4279 /* Compute the argument. */
4280 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4281 /* Compute op, into TARGET if possible.
4282 Set TARGET to wherever the result comes back. */
4283 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4284 op_optab, op0, target, 1);
4285 gcc_assert (target);
4286
4287 return convert_to_mode (target_mode, target, 0);
4288 }
4289
4290 /* If the string passed to fputs is a constant and is one character
4291 long, we attempt to transform this call into __builtin_fputc(). */
4292
4293 static rtx
4294 expand_builtin_fputs (tree arglist, rtx target, bool unlocked)
4295 {
4296 /* Verify the arguments in the original call. */
4297 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4298 {
4299 tree result = fold_builtin_fputs (arglist, (target == const0_rtx),
4300 unlocked, NULL_TREE);
4301 if (result)
4302 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
4303 }
4304 return 0;
4305 }
4306
4307 /* Expand a call to __builtin_expect. We return our argument and emit a
4308 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4309 a non-jump context. */
4310
4311 static rtx
4312 expand_builtin_expect (tree arglist, rtx target)
4313 {
4314 tree exp, c;
4315 rtx note, rtx_c;
4316
4317 if (arglist == NULL_TREE
4318 || TREE_CHAIN (arglist) == NULL_TREE)
4319 return const0_rtx;
4320 exp = TREE_VALUE (arglist);
4321 c = TREE_VALUE (TREE_CHAIN (arglist));
4322
4323 if (TREE_CODE (c) != INTEGER_CST)
4324 {
4325 error ("second argument to %<__builtin_expect%> must be a constant");
4326 c = integer_zero_node;
4327 }
4328
4329 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4330
4331 /* Don't bother with expected value notes for integral constants. */
4332 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4333 {
4334 /* We do need to force this into a register so that we can be
4335 moderately sure to be able to correctly interpret the branch
4336 condition later. */
4337 target = force_reg (GET_MODE (target), target);
4338
4339 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4340
4341 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
4342 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4343 }
4344
4345 return target;
4346 }
4347
4348 /* Like expand_builtin_expect, except do this in a jump context. This is
4349 called from do_jump if the conditional is a __builtin_expect. Return either
4350 a list of insns to emit the jump or NULL if we cannot optimize
4351 __builtin_expect. We need to optimize this at jump time so that machines
4352 like the PowerPC don't turn the test into a SCC operation, and then jump
4353 based on the test being 0/1. */
4354
4355 rtx
4356 expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
4357 {
4358 tree arglist = TREE_OPERAND (exp, 1);
4359 tree arg0 = TREE_VALUE (arglist);
4360 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4361 rtx ret = NULL_RTX;
4362
4363 /* Only handle __builtin_expect (test, 0) and
4364 __builtin_expect (test, 1). */
4365 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4366 && (integer_zerop (arg1) || integer_onep (arg1)))
4367 {
4368 rtx insn, drop_through_label, temp;
4369
4370 /* Expand the jump insns. */
4371 start_sequence ();
4372 do_jump (arg0, if_false_label, if_true_label);
4373 ret = get_insns ();
4374
4375 drop_through_label = get_last_insn ();
4376 if (drop_through_label && NOTE_P (drop_through_label))
4377 drop_through_label = prev_nonnote_insn (drop_through_label);
4378 if (drop_through_label && !LABEL_P (drop_through_label))
4379 drop_through_label = NULL_RTX;
4380 end_sequence ();
4381
4382 if (! if_true_label)
4383 if_true_label = drop_through_label;
4384 if (! if_false_label)
4385 if_false_label = drop_through_label;
4386
4387 /* Go through and add the expect's to each of the conditional jumps. */
4388 insn = ret;
4389 while (insn != NULL_RTX)
4390 {
4391 rtx next = NEXT_INSN (insn);
4392
4393 if (JUMP_P (insn) && any_condjump_p (insn))
4394 {
4395 rtx ifelse = SET_SRC (pc_set (insn));
4396 rtx then_dest = XEXP (ifelse, 1);
4397 rtx else_dest = XEXP (ifelse, 2);
4398 int taken = -1;
4399
4400 /* First check if we recognize any of the labels. */
4401 if (GET_CODE (then_dest) == LABEL_REF
4402 && XEXP (then_dest, 0) == if_true_label)
4403 taken = 1;
4404 else if (GET_CODE (then_dest) == LABEL_REF
4405 && XEXP (then_dest, 0) == if_false_label)
4406 taken = 0;
4407 else if (GET_CODE (else_dest) == LABEL_REF
4408 && XEXP (else_dest, 0) == if_false_label)
4409 taken = 1;
4410 else if (GET_CODE (else_dest) == LABEL_REF
4411 && XEXP (else_dest, 0) == if_true_label)
4412 taken = 0;
4413 /* Otherwise check where we drop through. */
4414 else if (else_dest == pc_rtx)
4415 {
4416 if (next && NOTE_P (next))
4417 next = next_nonnote_insn (next);
4418
4419 if (next && JUMP_P (next)
4420 && any_uncondjump_p (next))
4421 temp = XEXP (SET_SRC (pc_set (next)), 0);
4422 else
4423 temp = next;
4424
4425 /* TEMP is either a CODE_LABEL, NULL_RTX or something
4426 else that can't possibly match either target label. */
4427 if (temp == if_false_label)
4428 taken = 1;
4429 else if (temp == if_true_label)
4430 taken = 0;
4431 }
4432 else if (then_dest == pc_rtx)
4433 {
4434 if (next && NOTE_P (next))
4435 next = next_nonnote_insn (next);
4436
4437 if (next && JUMP_P (next)
4438 && any_uncondjump_p (next))
4439 temp = XEXP (SET_SRC (pc_set (next)), 0);
4440 else
4441 temp = next;
4442
4443 if (temp == if_false_label)
4444 taken = 0;
4445 else if (temp == if_true_label)
4446 taken = 1;
4447 }
4448
4449 if (taken != -1)
4450 {
4451 /* If the test is expected to fail, reverse the
4452 probabilities. */
4453 if (integer_zerop (arg1))
4454 taken = 1 - taken;
4455 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4456 }
4457 }
4458
4459 insn = next;
4460 }
4461 }
4462
4463 return ret;
4464 }
4465
4466 static void
4467 expand_builtin_trap (void)
4468 {
4469 #ifdef HAVE_trap
4470 if (HAVE_trap)
4471 emit_insn (gen_trap ());
4472 else
4473 #endif
4474 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4475 emit_barrier ();
4476 }
4477
4478 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4479 Return 0 if a normal call should be emitted rather than expanding
4480 the function inline. If convenient, the result should be placed
4481 in TARGET. SUBTARGET may be used as the target for computing
4482 the operand. */
4483
4484 static rtx
4485 expand_builtin_fabs (tree arglist, rtx target, rtx subtarget)
4486 {
4487 enum machine_mode mode;
4488 tree arg;
4489 rtx op0;
4490
4491 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4492 return 0;
4493
4494 arg = TREE_VALUE (arglist);
4495 mode = TYPE_MODE (TREE_TYPE (arg));
4496 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4497 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4498 }
4499
4500 /* Expand a call to copysign, copysignf, or copysignl with arguments ARGLIST.
4501 Return NULL is a normal call should be emitted rather than expanding the
4502 function inline. If convenient, the result should be placed in TARGET.
4503 SUBTARGET may be used as the target for computing the operand. */
4504
4505 static rtx
4506 expand_builtin_copysign (tree arglist, rtx target, rtx subtarget)
4507 {
4508 rtx op0, op1;
4509 tree arg;
4510
4511 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4512 return 0;
4513
4514 arg = TREE_VALUE (arglist);
4515 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4516
4517 arg = TREE_VALUE (TREE_CHAIN (arglist));
4518 op1 = expand_expr (arg, NULL, VOIDmode, 0);
4519
4520 return expand_copysign (op0, op1, target);
4521 }
4522
4523 /* Create a new constant string literal and return a char* pointer to it.
4524 The STRING_CST value is the LEN characters at STR. */
4525 static tree
4526 build_string_literal (int len, const char *str)
4527 {
4528 tree t, elem, index, type;
4529
4530 t = build_string (len, str);
4531 elem = build_type_variant (char_type_node, 1, 0);
4532 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
4533 type = build_array_type (elem, index);
4534 TREE_TYPE (t) = type;
4535 TREE_CONSTANT (t) = 1;
4536 TREE_INVARIANT (t) = 1;
4537 TREE_READONLY (t) = 1;
4538 TREE_STATIC (t) = 1;
4539
4540 type = build_pointer_type (type);
4541 t = build1 (ADDR_EXPR, type, t);
4542
4543 type = build_pointer_type (elem);
4544 t = build1 (NOP_EXPR, type, t);
4545 return t;
4546 }
4547
4548 /* Expand a call to printf or printf_unlocked with argument list ARGLIST.
4549 Return 0 if a normal call should be emitted rather than transforming
4550 the function inline. If convenient, the result should be placed in
4551 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
4552 call. */
4553 static rtx
4554 expand_builtin_printf (tree arglist, rtx target, enum machine_mode mode,
4555 bool unlocked)
4556 {
4557 tree fn_putchar = unlocked
4558 ? implicit_built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
4559 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
4560 tree fn_puts = unlocked ? implicit_built_in_decls[BUILT_IN_PUTS_UNLOCKED]
4561 : implicit_built_in_decls[BUILT_IN_PUTS];
4562 const char *fmt_str;
4563 tree fn, fmt, arg;
4564
4565 /* If the return value is used, don't do the transformation. */
4566 if (target != const0_rtx)
4567 return 0;
4568
4569 /* Verify the required arguments in the original call. */
4570 if (! arglist)
4571 return 0;
4572 fmt = TREE_VALUE (arglist);
4573 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
4574 return 0;
4575 arglist = TREE_CHAIN (arglist);
4576
4577 /* Check whether the format is a literal string constant. */
4578 fmt_str = c_getstr (fmt);
4579 if (fmt_str == NULL)
4580 return 0;
4581
4582 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4583 if (strcmp (fmt_str, "%s\n") == 0)
4584 {
4585 if (! arglist
4586 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
4587 || TREE_CHAIN (arglist))
4588 return 0;
4589 fn = fn_puts;
4590 }
4591 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4592 else if (strcmp (fmt_str, "%c") == 0)
4593 {
4594 if (! arglist
4595 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE
4596 || TREE_CHAIN (arglist))
4597 return 0;
4598 fn = fn_putchar;
4599 }
4600 else
4601 {
4602 /* We can't handle anything else with % args or %% ... yet. */
4603 if (strchr (fmt_str, '%'))
4604 return 0;
4605
4606 if (arglist)
4607 return 0;
4608
4609 /* If the format specifier was "", printf does nothing. */
4610 if (fmt_str[0] == '\0')
4611 return const0_rtx;
4612 /* If the format specifier has length of 1, call putchar. */
4613 if (fmt_str[1] == '\0')
4614 {
4615 /* Given printf("c"), (where c is any one character,)
4616 convert "c"[0] to an int and pass that to the replacement
4617 function. */
4618 arg = build_int_cst (NULL_TREE, fmt_str[0]);
4619 arglist = build_tree_list (NULL_TREE, arg);
4620 fn = fn_putchar;
4621 }
4622 else
4623 {
4624 /* If the format specifier was "string\n", call puts("string"). */
4625 size_t len = strlen (fmt_str);
4626 if (fmt_str[len - 1] == '\n')
4627 {
4628 /* Create a NUL-terminated string that's one char shorter
4629 than the original, stripping off the trailing '\n'. */
4630 char *newstr = alloca (len);
4631 memcpy (newstr, fmt_str, len - 1);
4632 newstr[len - 1] = 0;
4633
4634 arg = build_string_literal (len, newstr);
4635 arglist = build_tree_list (NULL_TREE, arg);
4636 fn = fn_puts;
4637 }
4638 else
4639 /* We'd like to arrange to call fputs(string,stdout) here,
4640 but we need stdout and don't have a way to get it yet. */
4641 return 0;
4642 }
4643 }
4644
4645 if (!fn)
4646 return 0;
4647 return expand_expr (build_function_call_expr (fn, arglist),
4648 target, mode, EXPAND_NORMAL);
4649 }
4650
4651 /* Expand a call to fprintf or fprintf_unlocked with argument list ARGLIST.
4652 Return 0 if a normal call should be emitted rather than transforming
4653 the function inline. If convenient, the result should be placed in
4654 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
4655 call. */
4656 static rtx
4657 expand_builtin_fprintf (tree arglist, rtx target, enum machine_mode mode,
4658 bool unlocked)
4659 {
4660 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
4661 : implicit_built_in_decls[BUILT_IN_FPUTC];
4662 tree fn_fputs = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
4663 : implicit_built_in_decls[BUILT_IN_FPUTS];
4664 const char *fmt_str;
4665 tree fn, fmt, fp, arg;
4666
4667 /* If the return value is used, don't do the transformation. */
4668 if (target != const0_rtx)
4669 return 0;
4670
4671 /* Verify the required arguments in the original call. */
4672 if (! arglist)
4673 return 0;
4674 fp = TREE_VALUE (arglist);
4675 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
4676 return 0;
4677 arglist = TREE_CHAIN (arglist);
4678 if (! arglist)
4679 return 0;
4680 fmt = TREE_VALUE (arglist);
4681 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
4682 return 0;
4683 arglist = TREE_CHAIN (arglist);
4684
4685 /* Check whether the format is a literal string constant. */
4686 fmt_str = c_getstr (fmt);
4687 if (fmt_str == NULL)
4688 return 0;
4689
4690 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
4691 if (strcmp (fmt_str, "%s") == 0)
4692 {
4693 if (! arglist
4694 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
4695 || TREE_CHAIN (arglist))
4696 return 0;
4697 arg = TREE_VALUE (arglist);
4698 arglist = build_tree_list (NULL_TREE, fp);
4699 arglist = tree_cons (NULL_TREE, arg, arglist);
4700 fn = fn_fputs;
4701 }
4702 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
4703 else if (strcmp (fmt_str, "%c") == 0)
4704 {
4705 if (! arglist
4706 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE
4707 || TREE_CHAIN (arglist))
4708 return 0;
4709 arg = TREE_VALUE (arglist);
4710 arglist = build_tree_list (NULL_TREE, fp);
4711 arglist = tree_cons (NULL_TREE, arg, arglist);
4712 fn = fn_fputc;
4713 }
4714 else
4715 {
4716 /* We can't handle anything else with % args or %% ... yet. */
4717 if (strchr (fmt_str, '%'))
4718 return 0;
4719
4720 if (arglist)
4721 return 0;
4722
4723 /* If the format specifier was "", fprintf does nothing. */
4724 if (fmt_str[0] == '\0')
4725 {
4726 /* Evaluate and ignore FILE* argument for side-effects. */
4727 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4728 return const0_rtx;
4729 }
4730
4731 /* When "string" doesn't contain %, replace all cases of
4732 fprintf(stream,string) with fputs(string,stream). The fputs
4733 builtin will take care of special cases like length == 1. */
4734 arglist = build_tree_list (NULL_TREE, fp);
4735 arglist = tree_cons (NULL_TREE, fmt, arglist);
4736 fn = fn_fputs;
4737 }
4738
4739 if (!fn)
4740 return 0;
4741 return expand_expr (build_function_call_expr (fn, arglist),
4742 target, mode, EXPAND_NORMAL);
4743 }
4744
4745 /* Expand a call to sprintf with argument list ARGLIST. Return 0 if
4746 a normal call should be emitted rather than expanding the function
4747 inline. If convenient, the result should be placed in TARGET with
4748 mode MODE. */
4749
4750 static rtx
4751 expand_builtin_sprintf (tree arglist, rtx target, enum machine_mode mode)
4752 {
4753 tree orig_arglist, dest, fmt;
4754 const char *fmt_str;
4755
4756 orig_arglist = arglist;
4757
4758 /* Verify the required arguments in the original call. */
4759 if (! arglist)
4760 return 0;
4761 dest = TREE_VALUE (arglist);
4762 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
4763 return 0;
4764 arglist = TREE_CHAIN (arglist);
4765 if (! arglist)
4766 return 0;
4767 fmt = TREE_VALUE (arglist);
4768 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
4769 return 0;
4770 arglist = TREE_CHAIN (arglist);
4771
4772 /* Check whether the format is a literal string constant. */
4773 fmt_str = c_getstr (fmt);
4774 if (fmt_str == NULL)
4775 return 0;
4776
4777 /* If the format doesn't contain % args or %%, use strcpy. */
4778 if (strchr (fmt_str, '%') == 0)
4779 {
4780 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4781 tree exp;
4782
4783 if (arglist || ! fn)
4784 return 0;
4785 expand_expr (build_function_call_expr (fn, orig_arglist),
4786 const0_rtx, VOIDmode, EXPAND_NORMAL);
4787 if (target == const0_rtx)
4788 return const0_rtx;
4789 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
4790 return expand_expr (exp, target, mode, EXPAND_NORMAL);
4791 }
4792 /* If the format is "%s", use strcpy if the result isn't used. */
4793 else if (strcmp (fmt_str, "%s") == 0)
4794 {
4795 tree fn, arg, len;
4796 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4797
4798 if (! fn)
4799 return 0;
4800
4801 if (! arglist || TREE_CHAIN (arglist))
4802 return 0;
4803 arg = TREE_VALUE (arglist);
4804 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
4805 return 0;
4806
4807 if (target != const0_rtx)
4808 {
4809 len = c_strlen (arg, 1);
4810 if (! len || TREE_CODE (len) != INTEGER_CST)
4811 return 0;
4812 }
4813 else
4814 len = NULL_TREE;
4815
4816 arglist = build_tree_list (NULL_TREE, arg);
4817 arglist = tree_cons (NULL_TREE, dest, arglist);
4818 expand_expr (build_function_call_expr (fn, arglist),
4819 const0_rtx, VOIDmode, EXPAND_NORMAL);
4820
4821 if (target == const0_rtx)
4822 return const0_rtx;
4823 return expand_expr (len, target, mode, EXPAND_NORMAL);
4824 }
4825
4826 return 0;
4827 }
4828
4829 /* Expand a call to either the entry or exit function profiler. */
4830
4831 static rtx
4832 expand_builtin_profile_func (bool exitp)
4833 {
4834 rtx this, which;
4835
4836 this = DECL_RTL (current_function_decl);
4837 gcc_assert (MEM_P (this));
4838 this = XEXP (this, 0);
4839
4840 if (exitp)
4841 which = profile_function_exit_libfunc;
4842 else
4843 which = profile_function_entry_libfunc;
4844
4845 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
4846 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
4847 0, hard_frame_pointer_rtx),
4848 Pmode);
4849
4850 return const0_rtx;
4851 }
4852
4853 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4854
4855 static rtx
4856 round_trampoline_addr (rtx tramp)
4857 {
4858 rtx temp, addend, mask;
4859
4860 /* If we don't need too much alignment, we'll have been guaranteed
4861 proper alignment by get_trampoline_type. */
4862 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4863 return tramp;
4864
4865 /* Round address up to desired boundary. */
4866 temp = gen_reg_rtx (Pmode);
4867 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4868 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4869
4870 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4871 temp, 0, OPTAB_LIB_WIDEN);
4872 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4873 temp, 0, OPTAB_LIB_WIDEN);
4874
4875 return tramp;
4876 }
4877
4878 static rtx
4879 expand_builtin_init_trampoline (tree arglist)
4880 {
4881 tree t_tramp, t_func, t_chain;
4882 rtx r_tramp, r_func, r_chain;
4883 #ifdef TRAMPOLINE_TEMPLATE
4884 rtx blktramp;
4885 #endif
4886
4887 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE,
4888 POINTER_TYPE, VOID_TYPE))
4889 return NULL_RTX;
4890
4891 t_tramp = TREE_VALUE (arglist);
4892 arglist = TREE_CHAIN (arglist);
4893 t_func = TREE_VALUE (arglist);
4894 arglist = TREE_CHAIN (arglist);
4895 t_chain = TREE_VALUE (arglist);
4896
4897 r_tramp = expand_expr (t_tramp, NULL_RTX, VOIDmode, 0);
4898 r_func = expand_expr (t_func, NULL_RTX, VOIDmode, 0);
4899 r_chain = expand_expr (t_chain, NULL_RTX, VOIDmode, 0);
4900
4901 /* Generate insns to initialize the trampoline. */
4902 r_tramp = round_trampoline_addr (r_tramp);
4903 #ifdef TRAMPOLINE_TEMPLATE
4904 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
4905 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
4906 emit_block_move (blktramp, assemble_trampoline_template (),
4907 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
4908 #endif
4909 trampolines_created = 1;
4910 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
4911
4912 return const0_rtx;
4913 }
4914
4915 static rtx
4916 expand_builtin_adjust_trampoline (tree arglist)
4917 {
4918 rtx tramp;
4919
4920 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4921 return NULL_RTX;
4922
4923 tramp = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
4924 tramp = round_trampoline_addr (tramp);
4925 #ifdef TRAMPOLINE_ADJUST_ADDRESS
4926 TRAMPOLINE_ADJUST_ADDRESS (tramp);
4927 #endif
4928
4929 return tramp;
4930 }
4931
4932 /* Expand a call to the built-in signbit, signbitf or signbitl function.
4933 Return NULL_RTX if a normal call should be emitted rather than expanding
4934 the function in-line. EXP is the expression that is a call to the builtin
4935 function; if convenient, the result should be placed in TARGET. */
4936
4937 static rtx
4938 expand_builtin_signbit (tree exp, rtx target)
4939 {
4940 const struct real_format *fmt;
4941 enum machine_mode fmode, imode, rmode;
4942 HOST_WIDE_INT hi, lo;
4943 tree arg, arglist;
4944 int bitpos;
4945 rtx temp;
4946
4947 arglist = TREE_OPERAND (exp, 1);
4948 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4949 return 0;
4950
4951 arg = TREE_VALUE (arglist);
4952 fmode = TYPE_MODE (TREE_TYPE (arg));
4953 rmode = TYPE_MODE (TREE_TYPE (exp));
4954 fmt = REAL_MODE_FORMAT (fmode);
4955
4956 /* For floating point formats without a sign bit, implement signbit
4957 as "ARG < 0.0". */
4958 if (fmt->signbit < 0)
4959 {
4960 /* But we can't do this if the format supports signed zero. */
4961 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4962 return 0;
4963
4964 arg = fold (build2 (LT_EXPR, TREE_TYPE (exp), arg,
4965 build_real (TREE_TYPE (arg), dconst0)));
4966 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4967 }
4968
4969 imode = int_mode_for_mode (fmode);
4970 if (imode == BLKmode)
4971 return 0;
4972
4973 bitpos = fmt->signbit;
4974 /* Handle targets with different FP word orders. */
4975 if (FLOAT_WORDS_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4976 {
4977 int nwords = GET_MODE_BITSIZE (fmode) / BITS_PER_WORD;
4978 int word = nwords - (bitpos / BITS_PER_WORD) - 1;
4979 bitpos = word * BITS_PER_WORD + bitpos % BITS_PER_WORD;
4980 }
4981
4982 /* If the sign bit is not in the lowpart and the floating point format
4983 is wider than an integer, check that is twice the size of an integer
4984 so that we can use gen_highpart below. */
4985 if (bitpos >= GET_MODE_BITSIZE (rmode)
4986 && GET_MODE_BITSIZE (imode) != 2 * GET_MODE_BITSIZE (rmode))
4987 return 0;
4988
4989 temp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
4990 temp = gen_lowpart (imode, temp);
4991
4992 if (GET_MODE_BITSIZE (imode) > GET_MODE_BITSIZE (rmode))
4993 {
4994 if (BYTES_BIG_ENDIAN)
4995 bitpos = GET_MODE_BITSIZE (imode) - 1 - bitpos;
4996 temp = copy_to_mode_reg (imode, temp);
4997 temp = extract_bit_field (temp, 1, bitpos, 1,
4998 NULL_RTX, rmode, rmode);
4999 }
5000 else
5001 {
5002 if (GET_MODE_BITSIZE (imode) < GET_MODE_BITSIZE (rmode))
5003 temp = gen_lowpart (rmode, temp);
5004 if (bitpos < HOST_BITS_PER_WIDE_INT)
5005 {
5006 hi = 0;
5007 lo = (HOST_WIDE_INT) 1 << bitpos;
5008 }
5009 else
5010 {
5011 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5012 lo = 0;
5013 }
5014
5015 temp = force_reg (rmode, temp);
5016 temp = expand_binop (rmode, and_optab, temp,
5017 immed_double_const (lo, hi, rmode),
5018 target, 1, OPTAB_LIB_WIDEN);
5019 }
5020 return temp;
5021 }
5022
5023 /* Expand fork or exec calls. TARGET is the desired target of the
5024 call. ARGLIST is the list of arguments of the call. FN is the
5025 identificator of the actual function. IGNORE is nonzero if the
5026 value is to be ignored. */
5027
5028 static rtx
5029 expand_builtin_fork_or_exec (tree fn, tree arglist, rtx target, int ignore)
5030 {
5031 tree id, decl;
5032 tree call;
5033
5034 /* If we are not profiling, just call the function. */
5035 if (!profile_arc_flag)
5036 return NULL_RTX;
5037
5038 /* Otherwise call the wrapper. This should be equivalent for the rest of
5039 compiler, so the code does not diverge, and the wrapper may run the
5040 code necessary for keeping the profiling sane. */
5041
5042 switch (DECL_FUNCTION_CODE (fn))
5043 {
5044 case BUILT_IN_FORK:
5045 id = get_identifier ("__gcov_fork");
5046 break;
5047
5048 case BUILT_IN_EXECL:
5049 id = get_identifier ("__gcov_execl");
5050 break;
5051
5052 case BUILT_IN_EXECV:
5053 id = get_identifier ("__gcov_execv");
5054 break;
5055
5056 case BUILT_IN_EXECLP:
5057 id = get_identifier ("__gcov_execlp");
5058 break;
5059
5060 case BUILT_IN_EXECLE:
5061 id = get_identifier ("__gcov_execle");
5062 break;
5063
5064 case BUILT_IN_EXECVP:
5065 id = get_identifier ("__gcov_execvp");
5066 break;
5067
5068 case BUILT_IN_EXECVE:
5069 id = get_identifier ("__gcov_execve");
5070 break;
5071
5072 default:
5073 gcc_unreachable ();
5074 }
5075
5076 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5077 DECL_EXTERNAL (decl) = 1;
5078 TREE_PUBLIC (decl) = 1;
5079 DECL_ARTIFICIAL (decl) = 1;
5080 TREE_NOTHROW (decl) = 1;
5081 call = build_function_call_expr (decl, arglist);
5082
5083 return expand_call (call, target, ignore);
5084 }
5085 \f
5086 /* Expand an expression EXP that calls a built-in function,
5087 with result going to TARGET if that's convenient
5088 (and in mode MODE if that's convenient).
5089 SUBTARGET may be used as the target for computing one of EXP's operands.
5090 IGNORE is nonzero if the value is to be ignored. */
5091
5092 rtx
5093 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5094 int ignore)
5095 {
5096 tree fndecl = get_callee_fndecl (exp);
5097 tree arglist = TREE_OPERAND (exp, 1);
5098 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5099 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5100
5101 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5102 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5103
5104 /* When not optimizing, generate calls to library functions for a certain
5105 set of builtins. */
5106 if (!optimize
5107 && !CALLED_AS_BUILT_IN (fndecl)
5108 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5109 && fcode != BUILT_IN_ALLOCA)
5110 return expand_call (exp, target, ignore);
5111
5112 /* The built-in function expanders test for target == const0_rtx
5113 to determine whether the function's result will be ignored. */
5114 if (ignore)
5115 target = const0_rtx;
5116
5117 /* If the result of a pure or const built-in function is ignored, and
5118 none of its arguments are volatile, we can avoid expanding the
5119 built-in call and just evaluate the arguments for side-effects. */
5120 if (target == const0_rtx
5121 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5122 {
5123 bool volatilep = false;
5124 tree arg;
5125
5126 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
5127 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
5128 {
5129 volatilep = true;
5130 break;
5131 }
5132
5133 if (! volatilep)
5134 {
5135 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
5136 expand_expr (TREE_VALUE (arg), const0_rtx,
5137 VOIDmode, EXPAND_NORMAL);
5138 return const0_rtx;
5139 }
5140 }
5141
5142 switch (fcode)
5143 {
5144 case BUILT_IN_FABS:
5145 case BUILT_IN_FABSF:
5146 case BUILT_IN_FABSL:
5147 target = expand_builtin_fabs (arglist, target, subtarget);
5148 if (target)
5149 return target;
5150 break;
5151
5152 case BUILT_IN_COPYSIGN:
5153 case BUILT_IN_COPYSIGNF:
5154 case BUILT_IN_COPYSIGNL:
5155 target = expand_builtin_copysign (arglist, target, subtarget);
5156 if (target)
5157 return target;
5158 break;
5159
5160 /* Just do a normal library call if we were unable to fold
5161 the values. */
5162 case BUILT_IN_CABS:
5163 case BUILT_IN_CABSF:
5164 case BUILT_IN_CABSL:
5165 break;
5166
5167 case BUILT_IN_EXP:
5168 case BUILT_IN_EXPF:
5169 case BUILT_IN_EXPL:
5170 case BUILT_IN_EXP10:
5171 case BUILT_IN_EXP10F:
5172 case BUILT_IN_EXP10L:
5173 case BUILT_IN_POW10:
5174 case BUILT_IN_POW10F:
5175 case BUILT_IN_POW10L:
5176 case BUILT_IN_EXP2:
5177 case BUILT_IN_EXP2F:
5178 case BUILT_IN_EXP2L:
5179 case BUILT_IN_EXPM1:
5180 case BUILT_IN_EXPM1F:
5181 case BUILT_IN_EXPM1L:
5182 case BUILT_IN_LOGB:
5183 case BUILT_IN_LOGBF:
5184 case BUILT_IN_LOGBL:
5185 case BUILT_IN_ILOGB:
5186 case BUILT_IN_ILOGBF:
5187 case BUILT_IN_ILOGBL:
5188 case BUILT_IN_LOG:
5189 case BUILT_IN_LOGF:
5190 case BUILT_IN_LOGL:
5191 case BUILT_IN_LOG10:
5192 case BUILT_IN_LOG10F:
5193 case BUILT_IN_LOG10L:
5194 case BUILT_IN_LOG2:
5195 case BUILT_IN_LOG2F:
5196 case BUILT_IN_LOG2L:
5197 case BUILT_IN_LOG1P:
5198 case BUILT_IN_LOG1PF:
5199 case BUILT_IN_LOG1PL:
5200 case BUILT_IN_TAN:
5201 case BUILT_IN_TANF:
5202 case BUILT_IN_TANL:
5203 case BUILT_IN_ASIN:
5204 case BUILT_IN_ASINF:
5205 case BUILT_IN_ASINL:
5206 case BUILT_IN_ACOS:
5207 case BUILT_IN_ACOSF:
5208 case BUILT_IN_ACOSL:
5209 case BUILT_IN_ATAN:
5210 case BUILT_IN_ATANF:
5211 case BUILT_IN_ATANL:
5212 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5213 because of possible accuracy problems. */
5214 if (! flag_unsafe_math_optimizations)
5215 break;
5216 case BUILT_IN_SQRT:
5217 case BUILT_IN_SQRTF:
5218 case BUILT_IN_SQRTL:
5219 case BUILT_IN_FLOOR:
5220 case BUILT_IN_FLOORF:
5221 case BUILT_IN_FLOORL:
5222 case BUILT_IN_CEIL:
5223 case BUILT_IN_CEILF:
5224 case BUILT_IN_CEILL:
5225 case BUILT_IN_TRUNC:
5226 case BUILT_IN_TRUNCF:
5227 case BUILT_IN_TRUNCL:
5228 case BUILT_IN_ROUND:
5229 case BUILT_IN_ROUNDF:
5230 case BUILT_IN_ROUNDL:
5231 case BUILT_IN_NEARBYINT:
5232 case BUILT_IN_NEARBYINTF:
5233 case BUILT_IN_NEARBYINTL:
5234 case BUILT_IN_RINT:
5235 case BUILT_IN_RINTF:
5236 case BUILT_IN_RINTL:
5237 target = expand_builtin_mathfn (exp, target, subtarget);
5238 if (target)
5239 return target;
5240 break;
5241
5242 case BUILT_IN_POW:
5243 case BUILT_IN_POWF:
5244 case BUILT_IN_POWL:
5245 target = expand_builtin_pow (exp, target, subtarget);
5246 if (target)
5247 return target;
5248 break;
5249
5250 case BUILT_IN_POWI:
5251 case BUILT_IN_POWIF:
5252 case BUILT_IN_POWIL:
5253 target = expand_builtin_powi (exp, target, subtarget);
5254 if (target)
5255 return target;
5256 break;
5257
5258 case BUILT_IN_ATAN2:
5259 case BUILT_IN_ATAN2F:
5260 case BUILT_IN_ATAN2L:
5261 case BUILT_IN_FMOD:
5262 case BUILT_IN_FMODF:
5263 case BUILT_IN_FMODL:
5264 case BUILT_IN_DREM:
5265 case BUILT_IN_DREMF:
5266 case BUILT_IN_DREML:
5267 if (! flag_unsafe_math_optimizations)
5268 break;
5269 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5270 if (target)
5271 return target;
5272 break;
5273
5274 case BUILT_IN_SIN:
5275 case BUILT_IN_SINF:
5276 case BUILT_IN_SINL:
5277 case BUILT_IN_COS:
5278 case BUILT_IN_COSF:
5279 case BUILT_IN_COSL:
5280 if (! flag_unsafe_math_optimizations)
5281 break;
5282 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5283 if (target)
5284 return target;
5285 break;
5286
5287 case BUILT_IN_APPLY_ARGS:
5288 return expand_builtin_apply_args ();
5289
5290 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5291 FUNCTION with a copy of the parameters described by
5292 ARGUMENTS, and ARGSIZE. It returns a block of memory
5293 allocated on the stack into which is stored all the registers
5294 that might possibly be used for returning the result of a
5295 function. ARGUMENTS is the value returned by
5296 __builtin_apply_args. ARGSIZE is the number of bytes of
5297 arguments that must be copied. ??? How should this value be
5298 computed? We'll also need a safe worst case value for varargs
5299 functions. */
5300 case BUILT_IN_APPLY:
5301 if (!validate_arglist (arglist, POINTER_TYPE,
5302 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5303 && !validate_arglist (arglist, REFERENCE_TYPE,
5304 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5305 return const0_rtx;
5306 else
5307 {
5308 int i;
5309 tree t;
5310 rtx ops[3];
5311
5312 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5313 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5314
5315 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5316 }
5317
5318 /* __builtin_return (RESULT) causes the function to return the
5319 value described by RESULT. RESULT is address of the block of
5320 memory returned by __builtin_apply. */
5321 case BUILT_IN_RETURN:
5322 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5323 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5324 NULL_RTX, VOIDmode, 0));
5325 return const0_rtx;
5326
5327 case BUILT_IN_SAVEREGS:
5328 return expand_builtin_saveregs ();
5329
5330 case BUILT_IN_ARGS_INFO:
5331 return expand_builtin_args_info (arglist);
5332
5333 /* Return the address of the first anonymous stack arg. */
5334 case BUILT_IN_NEXT_ARG:
5335 if (fold_builtin_next_arg (arglist))
5336 return const0_rtx;
5337 return expand_builtin_next_arg ();
5338
5339 case BUILT_IN_CLASSIFY_TYPE:
5340 return expand_builtin_classify_type (arglist);
5341
5342 case BUILT_IN_CONSTANT_P:
5343 return const0_rtx;
5344
5345 case BUILT_IN_FRAME_ADDRESS:
5346 case BUILT_IN_RETURN_ADDRESS:
5347 return expand_builtin_frame_address (fndecl, arglist);
5348
5349 /* Returns the address of the area where the structure is returned.
5350 0 otherwise. */
5351 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5352 if (arglist != 0
5353 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5354 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5355 return const0_rtx;
5356 else
5357 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5358
5359 case BUILT_IN_ALLOCA:
5360 target = expand_builtin_alloca (arglist, target);
5361 if (target)
5362 return target;
5363 break;
5364
5365 case BUILT_IN_STACK_SAVE:
5366 return expand_stack_save ();
5367
5368 case BUILT_IN_STACK_RESTORE:
5369 expand_stack_restore (TREE_VALUE (arglist));
5370 return const0_rtx;
5371
5372 case BUILT_IN_FFS:
5373 case BUILT_IN_FFSL:
5374 case BUILT_IN_FFSLL:
5375 case BUILT_IN_FFSIMAX:
5376 target = expand_builtin_unop (target_mode, arglist, target,
5377 subtarget, ffs_optab);
5378 if (target)
5379 return target;
5380 break;
5381
5382 case BUILT_IN_CLZ:
5383 case BUILT_IN_CLZL:
5384 case BUILT_IN_CLZLL:
5385 case BUILT_IN_CLZIMAX:
5386 target = expand_builtin_unop (target_mode, arglist, target,
5387 subtarget, clz_optab);
5388 if (target)
5389 return target;
5390 break;
5391
5392 case BUILT_IN_CTZ:
5393 case BUILT_IN_CTZL:
5394 case BUILT_IN_CTZLL:
5395 case BUILT_IN_CTZIMAX:
5396 target = expand_builtin_unop (target_mode, arglist, target,
5397 subtarget, ctz_optab);
5398 if (target)
5399 return target;
5400 break;
5401
5402 case BUILT_IN_POPCOUNT:
5403 case BUILT_IN_POPCOUNTL:
5404 case BUILT_IN_POPCOUNTLL:
5405 case BUILT_IN_POPCOUNTIMAX:
5406 target = expand_builtin_unop (target_mode, arglist, target,
5407 subtarget, popcount_optab);
5408 if (target)
5409 return target;
5410 break;
5411
5412 case BUILT_IN_PARITY:
5413 case BUILT_IN_PARITYL:
5414 case BUILT_IN_PARITYLL:
5415 case BUILT_IN_PARITYIMAX:
5416 target = expand_builtin_unop (target_mode, arglist, target,
5417 subtarget, parity_optab);
5418 if (target)
5419 return target;
5420 break;
5421
5422 case BUILT_IN_STRLEN:
5423 target = expand_builtin_strlen (arglist, target, target_mode);
5424 if (target)
5425 return target;
5426 break;
5427
5428 case BUILT_IN_STRCPY:
5429 target = expand_builtin_strcpy (exp, target, mode);
5430 if (target)
5431 return target;
5432 break;
5433
5434 case BUILT_IN_STRNCPY:
5435 target = expand_builtin_strncpy (exp, target, mode);
5436 if (target)
5437 return target;
5438 break;
5439
5440 case BUILT_IN_STPCPY:
5441 target = expand_builtin_stpcpy (exp, target, mode);
5442 if (target)
5443 return target;
5444 break;
5445
5446 case BUILT_IN_STRCAT:
5447 target = expand_builtin_strcat (arglist, TREE_TYPE (exp), target, mode);
5448 if (target)
5449 return target;
5450 break;
5451
5452 case BUILT_IN_STRNCAT:
5453 target = expand_builtin_strncat (arglist, target, mode);
5454 if (target)
5455 return target;
5456 break;
5457
5458 case BUILT_IN_STRSPN:
5459 target = expand_builtin_strspn (arglist, target, mode);
5460 if (target)
5461 return target;
5462 break;
5463
5464 case BUILT_IN_STRCSPN:
5465 target = expand_builtin_strcspn (arglist, target, mode);
5466 if (target)
5467 return target;
5468 break;
5469
5470 case BUILT_IN_STRSTR:
5471 target = expand_builtin_strstr (arglist, target, mode);
5472 if (target)
5473 return target;
5474 break;
5475
5476 case BUILT_IN_STRPBRK:
5477 target = expand_builtin_strpbrk (arglist, target, mode);
5478 if (target)
5479 return target;
5480 break;
5481
5482 case BUILT_IN_INDEX:
5483 case BUILT_IN_STRCHR:
5484 target = expand_builtin_strchr (arglist, target, mode);
5485 if (target)
5486 return target;
5487 break;
5488
5489 case BUILT_IN_RINDEX:
5490 case BUILT_IN_STRRCHR:
5491 target = expand_builtin_strrchr (arglist, target, mode);
5492 if (target)
5493 return target;
5494 break;
5495
5496 case BUILT_IN_MEMCPY:
5497 target = expand_builtin_memcpy (exp, target, mode);
5498 if (target)
5499 return target;
5500 break;
5501
5502 case BUILT_IN_MEMPCPY:
5503 target = expand_builtin_mempcpy (arglist, TREE_TYPE (exp), target, mode, /*endp=*/ 1);
5504 if (target)
5505 return target;
5506 break;
5507
5508 case BUILT_IN_MEMMOVE:
5509 target = expand_builtin_memmove (arglist, TREE_TYPE (exp), target, mode);
5510 if (target)
5511 return target;
5512 break;
5513
5514 case BUILT_IN_BCOPY:
5515 target = expand_builtin_bcopy (arglist, TREE_TYPE (exp));
5516 if (target)
5517 return target;
5518 break;
5519
5520 case BUILT_IN_MEMSET:
5521 target = expand_builtin_memset (arglist, target, mode);
5522 if (target)
5523 return target;
5524 break;
5525
5526 case BUILT_IN_BZERO:
5527 target = expand_builtin_bzero (arglist);
5528 if (target)
5529 return target;
5530 break;
5531
5532 case BUILT_IN_STRCMP:
5533 target = expand_builtin_strcmp (exp, target, mode);
5534 if (target)
5535 return target;
5536 break;
5537
5538 case BUILT_IN_STRNCMP:
5539 target = expand_builtin_strncmp (exp, target, mode);
5540 if (target)
5541 return target;
5542 break;
5543
5544 case BUILT_IN_BCMP:
5545 case BUILT_IN_MEMCMP:
5546 target = expand_builtin_memcmp (exp, arglist, target, mode);
5547 if (target)
5548 return target;
5549 break;
5550
5551 case BUILT_IN_SETJMP:
5552 target = expand_builtin_setjmp (arglist, target);
5553 if (target)
5554 return target;
5555 break;
5556
5557 /* __builtin_longjmp is passed a pointer to an array of five words.
5558 It's similar to the C library longjmp function but works with
5559 __builtin_setjmp above. */
5560 case BUILT_IN_LONGJMP:
5561 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5562 break;
5563 else
5564 {
5565 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
5566 VOIDmode, 0);
5567 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
5568 NULL_RTX, VOIDmode, 0);
5569
5570 if (value != const1_rtx)
5571 {
5572 error ("%<__builtin_longjmp%> second argument must be 1");
5573 return const0_rtx;
5574 }
5575
5576 expand_builtin_longjmp (buf_addr, value);
5577 return const0_rtx;
5578 }
5579
5580 case BUILT_IN_NONLOCAL_GOTO:
5581 target = expand_builtin_nonlocal_goto (arglist);
5582 if (target)
5583 return target;
5584 break;
5585
5586 /* This updates the setjmp buffer that is its argument with the value
5587 of the current stack pointer. */
5588 case BUILT_IN_UPDATE_SETJMP_BUF:
5589 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5590 {
5591 rtx buf_addr
5592 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5593
5594 expand_builtin_update_setjmp_buf (buf_addr);
5595 return const0_rtx;
5596 }
5597 break;
5598
5599 case BUILT_IN_TRAP:
5600 expand_builtin_trap ();
5601 return const0_rtx;
5602
5603 case BUILT_IN_PRINTF:
5604 target = expand_builtin_printf (arglist, target, mode, false);
5605 if (target)
5606 return target;
5607 break;
5608
5609 case BUILT_IN_PRINTF_UNLOCKED:
5610 target = expand_builtin_printf (arglist, target, mode, true);
5611 if (target)
5612 return target;
5613 break;
5614
5615 case BUILT_IN_FPUTS:
5616 target = expand_builtin_fputs (arglist, target, false);
5617 if (target)
5618 return target;
5619 break;
5620 case BUILT_IN_FPUTS_UNLOCKED:
5621 target = expand_builtin_fputs (arglist, target, true);
5622 if (target)
5623 return target;
5624 break;
5625
5626 case BUILT_IN_FPRINTF:
5627 target = expand_builtin_fprintf (arglist, target, mode, false);
5628 if (target)
5629 return target;
5630 break;
5631
5632 case BUILT_IN_FPRINTF_UNLOCKED:
5633 target = expand_builtin_fprintf (arglist, target, mode, true);
5634 if (target)
5635 return target;
5636 break;
5637
5638 case BUILT_IN_SPRINTF:
5639 target = expand_builtin_sprintf (arglist, target, mode);
5640 if (target)
5641 return target;
5642 break;
5643
5644 case BUILT_IN_SIGNBIT:
5645 case BUILT_IN_SIGNBITF:
5646 case BUILT_IN_SIGNBITL:
5647 target = expand_builtin_signbit (exp, target);
5648 if (target)
5649 return target;
5650 break;
5651
5652 /* Various hooks for the DWARF 2 __throw routine. */
5653 case BUILT_IN_UNWIND_INIT:
5654 expand_builtin_unwind_init ();
5655 return const0_rtx;
5656 case BUILT_IN_DWARF_CFA:
5657 return virtual_cfa_rtx;
5658 #ifdef DWARF2_UNWIND_INFO
5659 case BUILT_IN_DWARF_SP_COLUMN:
5660 return expand_builtin_dwarf_sp_column ();
5661 case BUILT_IN_INIT_DWARF_REG_SIZES:
5662 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
5663 return const0_rtx;
5664 #endif
5665 case BUILT_IN_FROB_RETURN_ADDR:
5666 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
5667 case BUILT_IN_EXTRACT_RETURN_ADDR:
5668 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
5669 case BUILT_IN_EH_RETURN:
5670 expand_builtin_eh_return (TREE_VALUE (arglist),
5671 TREE_VALUE (TREE_CHAIN (arglist)));
5672 return const0_rtx;
5673 #ifdef EH_RETURN_DATA_REGNO
5674 case BUILT_IN_EH_RETURN_DATA_REGNO:
5675 return expand_builtin_eh_return_data_regno (arglist);
5676 #endif
5677 case BUILT_IN_EXTEND_POINTER:
5678 return expand_builtin_extend_pointer (TREE_VALUE (arglist));
5679
5680 case BUILT_IN_VA_START:
5681 case BUILT_IN_STDARG_START:
5682 return expand_builtin_va_start (arglist);
5683 case BUILT_IN_VA_END:
5684 return expand_builtin_va_end (arglist);
5685 case BUILT_IN_VA_COPY:
5686 return expand_builtin_va_copy (arglist);
5687 case BUILT_IN_EXPECT:
5688 return expand_builtin_expect (arglist, target);
5689 case BUILT_IN_PREFETCH:
5690 expand_builtin_prefetch (arglist);
5691 return const0_rtx;
5692
5693 case BUILT_IN_PROFILE_FUNC_ENTER:
5694 return expand_builtin_profile_func (false);
5695 case BUILT_IN_PROFILE_FUNC_EXIT:
5696 return expand_builtin_profile_func (true);
5697
5698 case BUILT_IN_INIT_TRAMPOLINE:
5699 return expand_builtin_init_trampoline (arglist);
5700 case BUILT_IN_ADJUST_TRAMPOLINE:
5701 return expand_builtin_adjust_trampoline (arglist);
5702
5703 case BUILT_IN_FORK:
5704 case BUILT_IN_EXECL:
5705 case BUILT_IN_EXECV:
5706 case BUILT_IN_EXECLP:
5707 case BUILT_IN_EXECLE:
5708 case BUILT_IN_EXECVP:
5709 case BUILT_IN_EXECVE:
5710 target = expand_builtin_fork_or_exec (fndecl, arglist, target, ignore);
5711 if (target)
5712 return target;
5713 break;
5714
5715 default: /* just do library call, if unknown builtin */
5716 break;
5717 }
5718
5719 /* The switch statement above can drop through to cause the function
5720 to be called normally. */
5721 return expand_call (exp, target, ignore);
5722 }
5723
5724 /* Determine whether a tree node represents a call to a built-in
5725 function. If the tree T is a call to a built-in function with
5726 the right number of arguments of the appropriate types, return
5727 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
5728 Otherwise the return value is END_BUILTINS. */
5729
5730 enum built_in_function
5731 builtin_mathfn_code (tree t)
5732 {
5733 tree fndecl, arglist, parmlist;
5734 tree argtype, parmtype;
5735
5736 if (TREE_CODE (t) != CALL_EXPR
5737 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
5738 return END_BUILTINS;
5739
5740 fndecl = get_callee_fndecl (t);
5741 if (fndecl == NULL_TREE
5742 || TREE_CODE (fndecl) != FUNCTION_DECL
5743 || ! DECL_BUILT_IN (fndecl)
5744 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5745 return END_BUILTINS;
5746
5747 arglist = TREE_OPERAND (t, 1);
5748 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5749 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
5750 {
5751 /* If a function doesn't take a variable number of arguments,
5752 the last element in the list will have type `void'. */
5753 parmtype = TREE_VALUE (parmlist);
5754 if (VOID_TYPE_P (parmtype))
5755 {
5756 if (arglist)
5757 return END_BUILTINS;
5758 return DECL_FUNCTION_CODE (fndecl);
5759 }
5760
5761 if (! arglist)
5762 return END_BUILTINS;
5763
5764 argtype = TREE_TYPE (TREE_VALUE (arglist));
5765
5766 if (SCALAR_FLOAT_TYPE_P (parmtype))
5767 {
5768 if (! SCALAR_FLOAT_TYPE_P (argtype))
5769 return END_BUILTINS;
5770 }
5771 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
5772 {
5773 if (! COMPLEX_FLOAT_TYPE_P (argtype))
5774 return END_BUILTINS;
5775 }
5776 else if (POINTER_TYPE_P (parmtype))
5777 {
5778 if (! POINTER_TYPE_P (argtype))
5779 return END_BUILTINS;
5780 }
5781 else if (INTEGRAL_TYPE_P (parmtype))
5782 {
5783 if (! INTEGRAL_TYPE_P (argtype))
5784 return END_BUILTINS;
5785 }
5786 else
5787 return END_BUILTINS;
5788
5789 arglist = TREE_CHAIN (arglist);
5790 }
5791
5792 /* Variable-length argument list. */
5793 return DECL_FUNCTION_CODE (fndecl);
5794 }
5795
5796 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
5797 constant. ARGLIST is the argument list of the call. */
5798
5799 static tree
5800 fold_builtin_constant_p (tree arglist)
5801 {
5802 if (arglist == 0)
5803 return 0;
5804
5805 arglist = TREE_VALUE (arglist);
5806
5807 /* We return 1 for a numeric type that's known to be a constant
5808 value at compile-time or for an aggregate type that's a
5809 literal constant. */
5810 STRIP_NOPS (arglist);
5811
5812 /* If we know this is a constant, emit the constant of one. */
5813 if (CONSTANT_CLASS_P (arglist)
5814 || (TREE_CODE (arglist) == CONSTRUCTOR
5815 && TREE_CONSTANT (arglist))
5816 || (TREE_CODE (arglist) == ADDR_EXPR
5817 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
5818 return integer_one_node;
5819
5820 /* If this expression has side effects, show we don't know it to be a
5821 constant. Likewise if it's a pointer or aggregate type since in
5822 those case we only want literals, since those are only optimized
5823 when generating RTL, not later.
5824 And finally, if we are compiling an initializer, not code, we
5825 need to return a definite result now; there's not going to be any
5826 more optimization done. */
5827 if (TREE_SIDE_EFFECTS (arglist)
5828 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
5829 || POINTER_TYPE_P (TREE_TYPE (arglist))
5830 || cfun == 0)
5831 return integer_zero_node;
5832
5833 return 0;
5834 }
5835
5836 /* Fold a call to __builtin_expect, if we expect that a comparison against
5837 the argument will fold to a constant. In practice, this means a true
5838 constant or the address of a non-weak symbol. ARGLIST is the argument
5839 list of the call. */
5840
5841 static tree
5842 fold_builtin_expect (tree arglist)
5843 {
5844 tree arg, inner;
5845
5846 if (arglist == 0)
5847 return 0;
5848
5849 arg = TREE_VALUE (arglist);
5850
5851 /* If the argument isn't invariant, then there's nothing we can do. */
5852 if (!TREE_INVARIANT (arg))
5853 return 0;
5854
5855 /* If we're looking at an address of a weak decl, then do not fold. */
5856 inner = arg;
5857 STRIP_NOPS (inner);
5858 if (TREE_CODE (inner) == ADDR_EXPR)
5859 {
5860 do
5861 {
5862 inner = TREE_OPERAND (inner, 0);
5863 }
5864 while (TREE_CODE (inner) == COMPONENT_REF
5865 || TREE_CODE (inner) == ARRAY_REF);
5866 if (DECL_P (inner) && DECL_WEAK (inner))
5867 return 0;
5868 }
5869
5870 /* Otherwise, ARG already has the proper type for the return value. */
5871 return arg;
5872 }
5873
5874 /* Fold a call to __builtin_classify_type. */
5875
5876 static tree
5877 fold_builtin_classify_type (tree arglist)
5878 {
5879 if (arglist == 0)
5880 return build_int_cst (NULL_TREE, no_type_class);
5881
5882 return build_int_cst (NULL_TREE,
5883 type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
5884 }
5885
5886 /* Fold a call to __builtin_strlen. */
5887
5888 static tree
5889 fold_builtin_strlen (tree arglist)
5890 {
5891 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5892 return NULL_TREE;
5893 else
5894 {
5895 tree len = c_strlen (TREE_VALUE (arglist), 0);
5896
5897 if (len)
5898 {
5899 /* Convert from the internal "sizetype" type to "size_t". */
5900 if (size_type_node)
5901 len = fold_convert (size_type_node, len);
5902 return len;
5903 }
5904
5905 return NULL_TREE;
5906 }
5907 }
5908
5909 /* Fold a call to __builtin_inf or __builtin_huge_val. */
5910
5911 static tree
5912 fold_builtin_inf (tree type, int warn)
5913 {
5914 REAL_VALUE_TYPE real;
5915
5916 /* __builtin_inff is intended to be usable to define INFINITY on all
5917 targets. If an infinity is not available, INFINITY expands "to a
5918 positive constant of type float that overflows at translation
5919 time", footnote "In this case, using INFINITY will violate the
5920 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
5921 Thus we pedwarn to ensure this constraint violation is
5922 diagnosed. */
5923 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
5924 pedwarn ("target format does not support infinity");
5925
5926 real_inf (&real);
5927 return build_real (type, real);
5928 }
5929
5930 /* Fold a call to __builtin_nan or __builtin_nans. */
5931
5932 static tree
5933 fold_builtin_nan (tree arglist, tree type, int quiet)
5934 {
5935 REAL_VALUE_TYPE real;
5936 const char *str;
5937
5938 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5939 return 0;
5940 str = c_getstr (TREE_VALUE (arglist));
5941 if (!str)
5942 return 0;
5943
5944 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
5945 return 0;
5946
5947 return build_real (type, real);
5948 }
5949
5950 /* Return true if the floating point expression T has an integer value.
5951 We also allow +Inf, -Inf and NaN to be considered integer values. */
5952
5953 static bool
5954 integer_valued_real_p (tree t)
5955 {
5956 switch (TREE_CODE (t))
5957 {
5958 case FLOAT_EXPR:
5959 return true;
5960
5961 case ABS_EXPR:
5962 case SAVE_EXPR:
5963 case NON_LVALUE_EXPR:
5964 return integer_valued_real_p (TREE_OPERAND (t, 0));
5965
5966 case COMPOUND_EXPR:
5967 case MODIFY_EXPR:
5968 case BIND_EXPR:
5969 return integer_valued_real_p (TREE_OPERAND (t, 1));
5970
5971 case PLUS_EXPR:
5972 case MINUS_EXPR:
5973 case MULT_EXPR:
5974 case MIN_EXPR:
5975 case MAX_EXPR:
5976 return integer_valued_real_p (TREE_OPERAND (t, 0))
5977 && integer_valued_real_p (TREE_OPERAND (t, 1));
5978
5979 case COND_EXPR:
5980 return integer_valued_real_p (TREE_OPERAND (t, 1))
5981 && integer_valued_real_p (TREE_OPERAND (t, 2));
5982
5983 case REAL_CST:
5984 if (! TREE_CONSTANT_OVERFLOW (t))
5985 {
5986 REAL_VALUE_TYPE c, cint;
5987
5988 c = TREE_REAL_CST (t);
5989 real_trunc (&cint, TYPE_MODE (TREE_TYPE (t)), &c);
5990 return real_identical (&c, &cint);
5991 }
5992
5993 case NOP_EXPR:
5994 {
5995 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
5996 if (TREE_CODE (type) == INTEGER_TYPE)
5997 return true;
5998 if (TREE_CODE (type) == REAL_TYPE)
5999 return integer_valued_real_p (TREE_OPERAND (t, 0));
6000 break;
6001 }
6002
6003 case CALL_EXPR:
6004 switch (builtin_mathfn_code (t))
6005 {
6006 case BUILT_IN_CEIL:
6007 case BUILT_IN_CEILF:
6008 case BUILT_IN_CEILL:
6009 case BUILT_IN_FLOOR:
6010 case BUILT_IN_FLOORF:
6011 case BUILT_IN_FLOORL:
6012 case BUILT_IN_NEARBYINT:
6013 case BUILT_IN_NEARBYINTF:
6014 case BUILT_IN_NEARBYINTL:
6015 case BUILT_IN_RINT:
6016 case BUILT_IN_RINTF:
6017 case BUILT_IN_RINTL:
6018 case BUILT_IN_ROUND:
6019 case BUILT_IN_ROUNDF:
6020 case BUILT_IN_ROUNDL:
6021 case BUILT_IN_TRUNC:
6022 case BUILT_IN_TRUNCF:
6023 case BUILT_IN_TRUNCL:
6024 return true;
6025
6026 default:
6027 break;
6028 }
6029 break;
6030
6031 default:
6032 break;
6033 }
6034 return false;
6035 }
6036
6037 /* EXP is assumed to be builtin call where truncation can be propagated
6038 across (for instance floor((double)f) == (double)floorf (f).
6039 Do the transformation. */
6040
6041 static tree
6042 fold_trunc_transparent_mathfn (tree exp)
6043 {
6044 tree fndecl = get_callee_fndecl (exp);
6045 tree arglist = TREE_OPERAND (exp, 1);
6046 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6047 tree arg;
6048
6049 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6050 return 0;
6051
6052 arg = TREE_VALUE (arglist);
6053 /* Integer rounding functions are idempotent. */
6054 if (fcode == builtin_mathfn_code (arg))
6055 return arg;
6056
6057 /* If argument is already integer valued, and we don't need to worry
6058 about setting errno, there's no need to perform rounding. */
6059 if (! flag_errno_math && integer_valued_real_p (arg))
6060 return arg;
6061
6062 if (optimize)
6063 {
6064 tree arg0 = strip_float_extensions (arg);
6065 tree ftype = TREE_TYPE (exp);
6066 tree newtype = TREE_TYPE (arg0);
6067 tree decl;
6068
6069 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6070 && (decl = mathfn_built_in (newtype, fcode)))
6071 {
6072 arglist =
6073 build_tree_list (NULL_TREE, fold_convert (newtype, arg0));
6074 return fold_convert (ftype,
6075 build_function_call_expr (decl, arglist));
6076 }
6077 }
6078 return 0;
6079 }
6080
6081 /* EXP is assumed to be builtin call which can narrow the FP type of
6082 the argument, for instance lround((double)f) -> lroundf (f). */
6083
6084 static tree
6085 fold_fixed_mathfn (tree exp)
6086 {
6087 tree fndecl = get_callee_fndecl (exp);
6088 tree arglist = TREE_OPERAND (exp, 1);
6089 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6090 tree arg;
6091
6092 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6093 return 0;
6094
6095 arg = TREE_VALUE (arglist);
6096
6097 /* If argument is already integer valued, and we don't need to worry
6098 about setting errno, there's no need to perform rounding. */
6099 if (! flag_errno_math && integer_valued_real_p (arg))
6100 return fold (build1 (FIX_TRUNC_EXPR, TREE_TYPE (exp), arg));
6101
6102 if (optimize)
6103 {
6104 tree ftype = TREE_TYPE (arg);
6105 tree arg0 = strip_float_extensions (arg);
6106 tree newtype = TREE_TYPE (arg0);
6107 tree decl;
6108
6109 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6110 && (decl = mathfn_built_in (newtype, fcode)))
6111 {
6112 arglist =
6113 build_tree_list (NULL_TREE, fold_convert (newtype, arg0));
6114 return build_function_call_expr (decl, arglist);
6115 }
6116 }
6117 return 0;
6118 }
6119
6120 /* Fold function call to builtin cabs, cabsf or cabsl. ARGLIST
6121 is the argument list and TYPE is the return type. Return
6122 NULL_TREE if no if no simplification can be made. */
6123
6124 static tree
6125 fold_builtin_cabs (tree arglist, tree type)
6126 {
6127 tree arg;
6128
6129 if (!arglist || TREE_CHAIN (arglist))
6130 return NULL_TREE;
6131
6132 arg = TREE_VALUE (arglist);
6133 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
6134 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6135 return NULL_TREE;
6136
6137 /* Evaluate cabs of a constant at compile-time. */
6138 if (flag_unsafe_math_optimizations
6139 && TREE_CODE (arg) == COMPLEX_CST
6140 && TREE_CODE (TREE_REALPART (arg)) == REAL_CST
6141 && TREE_CODE (TREE_IMAGPART (arg)) == REAL_CST
6142 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg))
6143 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg)))
6144 {
6145 REAL_VALUE_TYPE r, i;
6146
6147 r = TREE_REAL_CST (TREE_REALPART (arg));
6148 i = TREE_REAL_CST (TREE_IMAGPART (arg));
6149
6150 real_arithmetic (&r, MULT_EXPR, &r, &r);
6151 real_arithmetic (&i, MULT_EXPR, &i, &i);
6152 real_arithmetic (&r, PLUS_EXPR, &r, &i);
6153 if (real_sqrt (&r, TYPE_MODE (type), &r)
6154 || ! flag_trapping_math)
6155 return build_real (type, r);
6156 }
6157
6158 /* If either part is zero, cabs is fabs of the other. */
6159 if (TREE_CODE (arg) == COMPLEX_EXPR
6160 && real_zerop (TREE_OPERAND (arg, 0)))
6161 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
6162 if (TREE_CODE (arg) == COMPLEX_EXPR
6163 && real_zerop (TREE_OPERAND (arg, 1)))
6164 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
6165
6166 /* Don't do this when optimizing for size. */
6167 if (flag_unsafe_math_optimizations
6168 && optimize && !optimize_size)
6169 {
6170 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6171
6172 if (sqrtfn != NULL_TREE)
6173 {
6174 tree rpart, ipart, result, arglist;
6175
6176 arg = builtin_save_expr (arg);
6177
6178 rpart = fold (build1 (REALPART_EXPR, type, arg));
6179 ipart = fold (build1 (IMAGPART_EXPR, type, arg));
6180
6181 rpart = builtin_save_expr (rpart);
6182 ipart = builtin_save_expr (ipart);
6183
6184 result = fold (build2 (PLUS_EXPR, type,
6185 fold (build2 (MULT_EXPR, type,
6186 rpart, rpart)),
6187 fold (build2 (MULT_EXPR, type,
6188 ipart, ipart))));
6189
6190 arglist = build_tree_list (NULL_TREE, result);
6191 return build_function_call_expr (sqrtfn, arglist);
6192 }
6193 }
6194
6195 return NULL_TREE;
6196 }
6197
6198 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl. Return
6199 NULL_TREE if no simplification can be made. */
6200
6201 static tree
6202 fold_builtin_sqrt (tree arglist, tree type)
6203 {
6204
6205 enum built_in_function fcode;
6206 tree arg = TREE_VALUE (arglist);
6207
6208 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6209 return NULL_TREE;
6210
6211 /* Optimize sqrt of constant value. */
6212 if (TREE_CODE (arg) == REAL_CST
6213 && ! TREE_CONSTANT_OVERFLOW (arg))
6214 {
6215 REAL_VALUE_TYPE r, x;
6216
6217 x = TREE_REAL_CST (arg);
6218 if (real_sqrt (&r, TYPE_MODE (type), &x)
6219 || (!flag_trapping_math && !flag_errno_math))
6220 return build_real (type, r);
6221 }
6222
6223 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6224 fcode = builtin_mathfn_code (arg);
6225 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6226 {
6227 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
6228 arg = fold (build2 (MULT_EXPR, type,
6229 TREE_VALUE (TREE_OPERAND (arg, 1)),
6230 build_real (type, dconsthalf)));
6231 arglist = build_tree_list (NULL_TREE, arg);
6232 return build_function_call_expr (expfn, arglist);
6233 }
6234
6235 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6236 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6237 {
6238 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6239
6240 if (powfn)
6241 {
6242 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
6243 tree tree_root;
6244 /* The inner root was either sqrt or cbrt. */
6245 REAL_VALUE_TYPE dconstroot =
6246 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
6247
6248 /* Adjust for the outer root. */
6249 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6250 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6251 tree_root = build_real (type, dconstroot);
6252 arglist = tree_cons (NULL_TREE, arg0,
6253 build_tree_list (NULL_TREE, tree_root));
6254 return build_function_call_expr (powfn, arglist);
6255 }
6256 }
6257
6258 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6259 if (flag_unsafe_math_optimizations
6260 && (fcode == BUILT_IN_POW
6261 || fcode == BUILT_IN_POWF
6262 || fcode == BUILT_IN_POWL))
6263 {
6264 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
6265 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
6266 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
6267 tree narg1;
6268 if (!tree_expr_nonnegative_p (arg0))
6269 arg0 = build1 (ABS_EXPR, type, arg0);
6270 narg1 = fold (build2 (MULT_EXPR, type, arg1,
6271 build_real (type, dconsthalf)));
6272 arglist = tree_cons (NULL_TREE, arg0,
6273 build_tree_list (NULL_TREE, narg1));
6274 return build_function_call_expr (powfn, arglist);
6275 }
6276
6277 return NULL_TREE;
6278 }
6279
6280 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl. Return
6281 NULL_TREE if no simplification can be made. */
6282 static tree
6283 fold_builtin_cbrt (tree arglist, tree type)
6284 {
6285 tree arg = TREE_VALUE (arglist);
6286 const enum built_in_function fcode = builtin_mathfn_code (arg);
6287
6288 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6289 return NULL_TREE;
6290
6291 /* Optimize cbrt of constant value. */
6292 if (real_zerop (arg) || real_onep (arg) || real_minus_onep (arg))
6293 return arg;
6294
6295 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6296 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6297 {
6298 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
6299 const REAL_VALUE_TYPE third_trunc =
6300 real_value_truncate (TYPE_MODE (type), dconstthird);
6301 arg = fold (build2 (MULT_EXPR, type,
6302 TREE_VALUE (TREE_OPERAND (arg, 1)),
6303 build_real (type, third_trunc)));
6304 arglist = build_tree_list (NULL_TREE, arg);
6305 return build_function_call_expr (expfn, arglist);
6306 }
6307
6308 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6309 /* We don't optimize cbrt(cbrt(x)) -> pow(x,1/9) because if
6310 x is negative pow will error but cbrt won't. */
6311 if (flag_unsafe_math_optimizations && BUILTIN_SQRT_P (fcode))
6312 {
6313 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6314
6315 if (powfn)
6316 {
6317 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
6318 tree tree_root;
6319 REAL_VALUE_TYPE dconstroot = dconstthird;
6320
6321 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6322 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6323 tree_root = build_real (type, dconstroot);
6324 arglist = tree_cons (NULL_TREE, arg0,
6325 build_tree_list (NULL_TREE, tree_root));
6326 return build_function_call_expr (powfn, arglist);
6327 }
6328
6329 }
6330 return NULL_TREE;
6331 }
6332
6333 /* Fold function call to builtin sin, sinf, or sinl. Return
6334 NULL_TREE if no simplification can be made. */
6335 static tree
6336 fold_builtin_sin (tree arglist)
6337 {
6338 tree arg = TREE_VALUE (arglist);
6339
6340 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6341 return NULL_TREE;
6342
6343 /* Optimize sin (0.0) = 0.0. */
6344 if (real_zerop (arg))
6345 return arg;
6346
6347 return NULL_TREE;
6348 }
6349
6350 /* Fold function call to builtin cos, cosf, or cosl. Return
6351 NULL_TREE if no simplification can be made. */
6352 static tree
6353 fold_builtin_cos (tree arglist, tree type, tree fndecl)
6354 {
6355 tree arg = TREE_VALUE (arglist);
6356
6357 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6358 return NULL_TREE;
6359
6360 /* Optimize cos (0.0) = 1.0. */
6361 if (real_zerop (arg))
6362 return build_real (type, dconst1);
6363
6364 /* Optimize cos(-x) into cos (x). */
6365 if (TREE_CODE (arg) == NEGATE_EXPR)
6366 {
6367 tree args = build_tree_list (NULL_TREE,
6368 TREE_OPERAND (arg, 0));
6369 return build_function_call_expr (fndecl, args);
6370 }
6371
6372 return NULL_TREE;
6373 }
6374
6375 /* Fold function call to builtin tan, tanf, or tanl. Return
6376 NULL_TREE if no simplification can be made. */
6377 static tree
6378 fold_builtin_tan (tree arglist)
6379 {
6380 enum built_in_function fcode;
6381 tree arg = TREE_VALUE (arglist);
6382
6383 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6384 return NULL_TREE;
6385
6386 /* Optimize tan(0.0) = 0.0. */
6387 if (real_zerop (arg))
6388 return arg;
6389
6390 /* Optimize tan(atan(x)) = x. */
6391 fcode = builtin_mathfn_code (arg);
6392 if (flag_unsafe_math_optimizations
6393 && (fcode == BUILT_IN_ATAN
6394 || fcode == BUILT_IN_ATANF
6395 || fcode == BUILT_IN_ATANL))
6396 return TREE_VALUE (TREE_OPERAND (arg, 1));
6397
6398 return NULL_TREE;
6399 }
6400
6401 /* Fold function call to builtin atan, atanf, or atanl. Return
6402 NULL_TREE if no simplification can be made. */
6403
6404 static tree
6405 fold_builtin_atan (tree arglist, tree type)
6406 {
6407
6408 tree arg = TREE_VALUE (arglist);
6409
6410 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6411 return NULL_TREE;
6412
6413 /* Optimize atan(0.0) = 0.0. */
6414 if (real_zerop (arg))
6415 return arg;
6416
6417 /* Optimize atan(1.0) = pi/4. */
6418 if (real_onep (arg))
6419 {
6420 REAL_VALUE_TYPE cst;
6421
6422 real_convert (&cst, TYPE_MODE (type), &dconstpi);
6423 SET_REAL_EXP (&cst, REAL_EXP (&cst) - 2);
6424 return build_real (type, cst);
6425 }
6426
6427 return NULL_TREE;
6428 }
6429
6430 /* Fold function call to builtin trunc, truncf or truncl. Return
6431 NULL_TREE if no simplification can be made. */
6432
6433 static tree
6434 fold_builtin_trunc (tree exp)
6435 {
6436 tree arglist = TREE_OPERAND (exp, 1);
6437 tree arg;
6438
6439 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6440 return 0;
6441
6442 /* Optimize trunc of constant value. */
6443 arg = TREE_VALUE (arglist);
6444 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
6445 {
6446 REAL_VALUE_TYPE r, x;
6447 tree type = TREE_TYPE (exp);
6448
6449 x = TREE_REAL_CST (arg);
6450 real_trunc (&r, TYPE_MODE (type), &x);
6451 return build_real (type, r);
6452 }
6453
6454 return fold_trunc_transparent_mathfn (exp);
6455 }
6456
6457 /* Fold function call to builtin floor, floorf or floorl. Return
6458 NULL_TREE if no simplification can be made. */
6459
6460 static tree
6461 fold_builtin_floor (tree exp)
6462 {
6463 tree arglist = TREE_OPERAND (exp, 1);
6464 tree arg;
6465
6466 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6467 return 0;
6468
6469 /* Optimize floor of constant value. */
6470 arg = TREE_VALUE (arglist);
6471 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
6472 {
6473 REAL_VALUE_TYPE x;
6474
6475 x = TREE_REAL_CST (arg);
6476 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
6477 {
6478 tree type = TREE_TYPE (exp);
6479 REAL_VALUE_TYPE r;
6480
6481 real_floor (&r, TYPE_MODE (type), &x);
6482 return build_real (type, r);
6483 }
6484 }
6485
6486 return fold_trunc_transparent_mathfn (exp);
6487 }
6488
6489 /* Fold function call to builtin ceil, ceilf or ceill. Return
6490 NULL_TREE if no simplification can be made. */
6491
6492 static tree
6493 fold_builtin_ceil (tree exp)
6494 {
6495 tree arglist = TREE_OPERAND (exp, 1);
6496 tree arg;
6497
6498 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6499 return 0;
6500
6501 /* Optimize ceil of constant value. */
6502 arg = TREE_VALUE (arglist);
6503 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
6504 {
6505 REAL_VALUE_TYPE x;
6506
6507 x = TREE_REAL_CST (arg);
6508 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
6509 {
6510 tree type = TREE_TYPE (exp);
6511 REAL_VALUE_TYPE r;
6512
6513 real_ceil (&r, TYPE_MODE (type), &x);
6514 return build_real (type, r);
6515 }
6516 }
6517
6518 return fold_trunc_transparent_mathfn (exp);
6519 }
6520
6521 /* Fold function call to builtin round, roundf or roundl. Return
6522 NULL_TREE if no simplification can be made. */
6523
6524 static tree
6525 fold_builtin_round (tree exp)
6526 {
6527 tree arglist = TREE_OPERAND (exp, 1);
6528 tree arg;
6529
6530 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6531 return 0;
6532
6533 /* Optimize round of constant value. */
6534 arg = TREE_VALUE (arglist);
6535 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
6536 {
6537 REAL_VALUE_TYPE x;
6538
6539 x = TREE_REAL_CST (arg);
6540 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
6541 {
6542 tree type = TREE_TYPE (exp);
6543 REAL_VALUE_TYPE r;
6544
6545 real_round (&r, TYPE_MODE (type), &x);
6546 return build_real (type, r);
6547 }
6548 }
6549
6550 return fold_trunc_transparent_mathfn (exp);
6551 }
6552
6553 /* Fold function call to builtin lround, lroundf or lroundl (or the
6554 corresponding long long versions). Return NULL_TREE if no
6555 simplification can be made. */
6556
6557 static tree
6558 fold_builtin_lround (tree exp)
6559 {
6560 tree arglist = TREE_OPERAND (exp, 1);
6561 tree arg;
6562
6563 if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6564 return 0;
6565
6566 /* Optimize lround of constant value. */
6567 arg = TREE_VALUE (arglist);
6568 if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
6569 {
6570 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
6571
6572 if (! REAL_VALUE_ISNAN (x) && ! REAL_VALUE_ISINF (x))
6573 {
6574 tree itype = TREE_TYPE (exp), ftype = TREE_TYPE (arg), result;
6575 HOST_WIDE_INT hi, lo;
6576 REAL_VALUE_TYPE r;
6577
6578 real_round (&r, TYPE_MODE (ftype), &x);
6579 REAL_VALUE_TO_INT (&lo, &hi, r);
6580 result = build_int_cst_wide (NULL_TREE, lo, hi);
6581 if (int_fits_type_p (result, itype))
6582 return fold_convert (itype, result);
6583 }
6584 }
6585
6586 return fold_fixed_mathfn (exp);
6587 }
6588
6589 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
6590 and their long and long long variants (i.e. ffsl and ffsll).
6591 Return NULL_TREE if no simplification can be made. */
6592
6593 static tree
6594 fold_builtin_bitop (tree exp)
6595 {
6596 tree fndecl = get_callee_fndecl (exp);
6597 tree arglist = TREE_OPERAND (exp, 1);
6598 tree arg;
6599
6600 if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
6601 return NULL_TREE;
6602
6603 /* Optimize for constant argument. */
6604 arg = TREE_VALUE (arglist);
6605 if (TREE_CODE (arg) == INTEGER_CST && ! TREE_CONSTANT_OVERFLOW (arg))
6606 {
6607 HOST_WIDE_INT hi, width, result;
6608 unsigned HOST_WIDE_INT lo;
6609 tree type;
6610
6611 type = TREE_TYPE (arg);
6612 width = TYPE_PRECISION (type);
6613 lo = TREE_INT_CST_LOW (arg);
6614
6615 /* Clear all the bits that are beyond the type's precision. */
6616 if (width > HOST_BITS_PER_WIDE_INT)
6617 {
6618 hi = TREE_INT_CST_HIGH (arg);
6619 if (width < 2 * HOST_BITS_PER_WIDE_INT)
6620 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
6621 }
6622 else
6623 {
6624 hi = 0;
6625 if (width < HOST_BITS_PER_WIDE_INT)
6626 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
6627 }
6628
6629 switch (DECL_FUNCTION_CODE (fndecl))
6630 {
6631 case BUILT_IN_FFS:
6632 case BUILT_IN_FFSL:
6633 case BUILT_IN_FFSLL:
6634 if (lo != 0)
6635 result = exact_log2 (lo & -lo) + 1;
6636 else if (hi != 0)
6637 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
6638 else
6639 result = 0;
6640 break;
6641
6642 case BUILT_IN_CLZ:
6643 case BUILT_IN_CLZL:
6644 case BUILT_IN_CLZLL:
6645 if (hi != 0)
6646 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
6647 else if (lo != 0)
6648 result = width - floor_log2 (lo) - 1;
6649 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
6650 result = width;
6651 break;
6652
6653 case BUILT_IN_CTZ:
6654 case BUILT_IN_CTZL:
6655 case BUILT_IN_CTZLL:
6656 if (lo != 0)
6657 result = exact_log2 (lo & -lo);
6658 else if (hi != 0)
6659 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
6660 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
6661 result = width;
6662 break;
6663
6664 case BUILT_IN_POPCOUNT:
6665 case BUILT_IN_POPCOUNTL:
6666 case BUILT_IN_POPCOUNTLL:
6667 result = 0;
6668 while (lo)
6669 result++, lo &= lo - 1;
6670 while (hi)
6671 result++, hi &= hi - 1;
6672 break;
6673
6674 case BUILT_IN_PARITY:
6675 case BUILT_IN_PARITYL:
6676 case BUILT_IN_PARITYLL:
6677 result = 0;
6678 while (lo)
6679 result++, lo &= lo - 1;
6680 while (hi)
6681 result++, hi &= hi - 1;
6682 result &= 1;
6683 break;
6684
6685 default:
6686 gcc_unreachable ();
6687 }
6688
6689 return build_int_cst (TREE_TYPE (exp), result);
6690 }
6691
6692 return NULL_TREE;
6693 }
6694
6695 /* Return true if EXPR is the real constant contained in VALUE. */
6696
6697 static bool
6698 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
6699 {
6700 STRIP_NOPS (expr);
6701
6702 return ((TREE_CODE (expr) == REAL_CST
6703 && ! TREE_CONSTANT_OVERFLOW (expr)
6704 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
6705 || (TREE_CODE (expr) == COMPLEX_CST
6706 && real_dconstp (TREE_REALPART (expr), value)
6707 && real_zerop (TREE_IMAGPART (expr))));
6708 }
6709
6710 /* A subroutine of fold_builtin to fold the various logarithmic
6711 functions. EXP is the CALL_EXPR of a call to a builtin logN
6712 function. VALUE is the base of the logN function. */
6713
6714 static tree
6715 fold_builtin_logarithm (tree exp, const REAL_VALUE_TYPE *value)
6716 {
6717 tree arglist = TREE_OPERAND (exp, 1);
6718
6719 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6720 {
6721 tree fndecl = get_callee_fndecl (exp);
6722 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6723 tree arg = TREE_VALUE (arglist);
6724 const enum built_in_function fcode = builtin_mathfn_code (arg);
6725
6726 /* Optimize logN(1.0) = 0.0. */
6727 if (real_onep (arg))
6728 return build_real (type, dconst0);
6729
6730 /* Optimize logN(N) = 1.0. If N can't be truncated to MODE
6731 exactly, then only do this if flag_unsafe_math_optimizations. */
6732 if (exact_real_truncate (TYPE_MODE (type), value)
6733 || flag_unsafe_math_optimizations)
6734 {
6735 const REAL_VALUE_TYPE value_truncate =
6736 real_value_truncate (TYPE_MODE (type), *value);
6737 if (real_dconstp (arg, &value_truncate))
6738 return build_real (type, dconst1);
6739 }
6740
6741 /* Special case, optimize logN(expN(x)) = x. */
6742 if (flag_unsafe_math_optimizations
6743 && ((value == &dconste
6744 && (fcode == BUILT_IN_EXP
6745 || fcode == BUILT_IN_EXPF
6746 || fcode == BUILT_IN_EXPL))
6747 || (value == &dconst2
6748 && (fcode == BUILT_IN_EXP2
6749 || fcode == BUILT_IN_EXP2F
6750 || fcode == BUILT_IN_EXP2L))
6751 || (value == &dconst10 && (BUILTIN_EXP10_P (fcode)))))
6752 return fold_convert (type, TREE_VALUE (TREE_OPERAND (arg, 1)));
6753
6754 /* Optimize logN(func()) for various exponential functions. We
6755 want to determine the value "x" and the power "exponent" in
6756 order to transform logN(x**exponent) into exponent*logN(x). */
6757 if (flag_unsafe_math_optimizations)
6758 {
6759 tree exponent = 0, x = 0;
6760
6761 switch (fcode)
6762 {
6763 case BUILT_IN_EXP:
6764 case BUILT_IN_EXPF:
6765 case BUILT_IN_EXPL:
6766 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
6767 x = build_real (type,
6768 real_value_truncate (TYPE_MODE (type), dconste));
6769 exponent = TREE_VALUE (TREE_OPERAND (arg, 1));
6770 break;
6771 case BUILT_IN_EXP2:
6772 case BUILT_IN_EXP2F:
6773 case BUILT_IN_EXP2L:
6774 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
6775 x = build_real (type, dconst2);
6776 exponent = TREE_VALUE (TREE_OPERAND (arg, 1));
6777 break;
6778 case BUILT_IN_EXP10:
6779 case BUILT_IN_EXP10F:
6780 case BUILT_IN_EXP10L:
6781 case BUILT_IN_POW10:
6782 case BUILT_IN_POW10F:
6783 case BUILT_IN_POW10L:
6784 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
6785 x = build_real (type, dconst10);
6786 exponent = TREE_VALUE (TREE_OPERAND (arg, 1));
6787 break;
6788 case BUILT_IN_SQRT:
6789 case BUILT_IN_SQRTF:
6790 case BUILT_IN_SQRTL:
6791 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
6792 x = TREE_VALUE (TREE_OPERAND (arg, 1));
6793 exponent = build_real (type, dconsthalf);
6794 break;
6795 case BUILT_IN_CBRT:
6796 case BUILT_IN_CBRTF:
6797 case BUILT_IN_CBRTL:
6798 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
6799 x = TREE_VALUE (TREE_OPERAND (arg, 1));
6800 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
6801 dconstthird));
6802 break;
6803 case BUILT_IN_POW:
6804 case BUILT_IN_POWF:
6805 case BUILT_IN_POWL:
6806 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
6807 x = TREE_VALUE (TREE_OPERAND (arg, 1));
6808 exponent = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
6809 break;
6810 default:
6811 break;
6812 }
6813
6814 /* Now perform the optimization. */
6815 if (x && exponent)
6816 {
6817 tree logfn;
6818 arglist = build_tree_list (NULL_TREE, x);
6819 logfn = build_function_call_expr (fndecl, arglist);
6820 return fold (build2 (MULT_EXPR, type, exponent, logfn));
6821 }
6822 }
6823 }
6824
6825 return 0;
6826 }
6827
6828 /* Fold a builtin function call to pow, powf, or powl. Return
6829 NULL_TREE if no simplification can be made. */
6830 static tree
6831 fold_builtin_pow (tree fndecl, tree arglist, tree type)
6832 {
6833 enum built_in_function fcode;
6834 tree arg0 = TREE_VALUE (arglist);
6835 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6836
6837 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6838 return NULL_TREE;
6839
6840 /* Optimize pow(1.0,y) = 1.0. */
6841 if (real_onep (arg0))
6842 return omit_one_operand (type, build_real (type, dconst1), arg1);
6843
6844 if (TREE_CODE (arg1) == REAL_CST
6845 && ! TREE_CONSTANT_OVERFLOW (arg1))
6846 {
6847 REAL_VALUE_TYPE c;
6848 c = TREE_REAL_CST (arg1);
6849
6850 /* Optimize pow(x,0.0) = 1.0. */
6851 if (REAL_VALUES_EQUAL (c, dconst0))
6852 return omit_one_operand (type, build_real (type, dconst1),
6853 arg0);
6854
6855 /* Optimize pow(x,1.0) = x. */
6856 if (REAL_VALUES_EQUAL (c, dconst1))
6857 return arg0;
6858
6859 /* Optimize pow(x,-1.0) = 1.0/x. */
6860 if (REAL_VALUES_EQUAL (c, dconstm1))
6861 return fold (build2 (RDIV_EXPR, type,
6862 build_real (type, dconst1), arg0));
6863
6864 /* Optimize pow(x,0.5) = sqrt(x). */
6865 if (flag_unsafe_math_optimizations
6866 && REAL_VALUES_EQUAL (c, dconsthalf))
6867 {
6868 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6869
6870 if (sqrtfn != NULL_TREE)
6871 {
6872 tree arglist = build_tree_list (NULL_TREE, arg0);
6873 return build_function_call_expr (sqrtfn, arglist);
6874 }
6875 }
6876
6877 /* Attempt to evaluate pow at compile-time. */
6878 if (TREE_CODE (arg0) == REAL_CST
6879 && ! TREE_CONSTANT_OVERFLOW (arg0))
6880 {
6881 REAL_VALUE_TYPE cint;
6882 HOST_WIDE_INT n;
6883
6884 n = real_to_integer (&c);
6885 real_from_integer (&cint, VOIDmode, n,
6886 n < 0 ? -1 : 0, 0);
6887 if (real_identical (&c, &cint))
6888 {
6889 REAL_VALUE_TYPE x;
6890 bool inexact;
6891
6892 x = TREE_REAL_CST (arg0);
6893 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
6894 if (flag_unsafe_math_optimizations || !inexact)
6895 return build_real (type, x);
6896 }
6897 }
6898 }
6899
6900 /* Optimize pow(expN(x),y) = expN(x*y). */
6901 fcode = builtin_mathfn_code (arg0);
6902 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6903 {
6904 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6905 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6906 arg = fold (build2 (MULT_EXPR, type, arg, arg1));
6907 arglist = build_tree_list (NULL_TREE, arg);
6908 return build_function_call_expr (expfn, arglist);
6909 }
6910
6911 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
6912 if (flag_unsafe_math_optimizations && BUILTIN_SQRT_P (fcode))
6913 {
6914 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6915 tree narg1 = fold (build2 (MULT_EXPR, type, arg1,
6916 build_real (type, dconsthalf)));
6917
6918 arglist = tree_cons (NULL_TREE, narg0,
6919 build_tree_list (NULL_TREE, narg1));
6920 return build_function_call_expr (fndecl, arglist);
6921 }
6922
6923 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
6924 if (flag_unsafe_math_optimizations
6925 && (fcode == BUILT_IN_POW
6926 || fcode == BUILT_IN_POWF
6927 || fcode == BUILT_IN_POWL))
6928 {
6929 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6930 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6931 tree narg1 = fold (build2 (MULT_EXPR, type, arg01, arg1));
6932 arglist = tree_cons (NULL_TREE, arg00,
6933 build_tree_list (NULL_TREE, narg1));
6934 return build_function_call_expr (fndecl, arglist);
6935 }
6936 return NULL_TREE;
6937 }
6938
6939 /* A subroutine of fold_builtin to fold the various exponent
6940 functions. EXP is the CALL_EXPR of a call to a builtin function.
6941 VALUE is the value which will be raised to a power. */
6942
6943 static tree
6944 fold_builtin_exponent (tree exp, const REAL_VALUE_TYPE *value)
6945 {
6946 tree arglist = TREE_OPERAND (exp, 1);
6947
6948 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
6949 {
6950 tree fndecl = get_callee_fndecl (exp);
6951 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6952 tree arg = TREE_VALUE (arglist);
6953
6954 /* Optimize exp*(0.0) = 1.0. */
6955 if (real_zerop (arg))
6956 return build_real (type, dconst1);
6957
6958 /* Optimize expN(1.0) = N. */
6959 if (real_onep (arg))
6960 {
6961 REAL_VALUE_TYPE cst;
6962
6963 real_convert (&cst, TYPE_MODE (type), value);
6964 return build_real (type, cst);
6965 }
6966
6967 /* Attempt to evaluate expN(integer) at compile-time. */
6968 if (flag_unsafe_math_optimizations
6969 && TREE_CODE (arg) == REAL_CST
6970 && ! TREE_CONSTANT_OVERFLOW (arg))
6971 {
6972 REAL_VALUE_TYPE cint;
6973 REAL_VALUE_TYPE c;
6974 HOST_WIDE_INT n;
6975
6976 c = TREE_REAL_CST (arg);
6977 n = real_to_integer (&c);
6978 real_from_integer (&cint, VOIDmode, n,
6979 n < 0 ? -1 : 0, 0);
6980 if (real_identical (&c, &cint))
6981 {
6982 REAL_VALUE_TYPE x;
6983
6984 real_powi (&x, TYPE_MODE (type), value, n);
6985 return build_real (type, x);
6986 }
6987 }
6988
6989 /* Optimize expN(logN(x)) = x. */
6990 if (flag_unsafe_math_optimizations)
6991 {
6992 const enum built_in_function fcode = builtin_mathfn_code (arg);
6993
6994 if ((value == &dconste
6995 && (fcode == BUILT_IN_LOG
6996 || fcode == BUILT_IN_LOGF
6997 || fcode == BUILT_IN_LOGL))
6998 || (value == &dconst2
6999 && (fcode == BUILT_IN_LOG2
7000 || fcode == BUILT_IN_LOG2F
7001 || fcode == BUILT_IN_LOG2L))
7002 || (value == &dconst10
7003 && (fcode == BUILT_IN_LOG10
7004 || fcode == BUILT_IN_LOG10F
7005 || fcode == BUILT_IN_LOG10L)))
7006 return fold_convert (type, TREE_VALUE (TREE_OPERAND (arg, 1)));
7007 }
7008 }
7009
7010 return 0;
7011 }
7012
7013 /* Fold function call to builtin memcpy. Return
7014 NULL_TREE if no simplification can be made. */
7015
7016 static tree
7017 fold_builtin_memcpy (tree exp)
7018 {
7019 tree arglist = TREE_OPERAND (exp, 1);
7020 tree dest, src, len;
7021
7022 if (!validate_arglist (arglist,
7023 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7024 return 0;
7025
7026 dest = TREE_VALUE (arglist);
7027 src = TREE_VALUE (TREE_CHAIN (arglist));
7028 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7029
7030 /* If the LEN parameter is zero, return DEST. */
7031 if (integer_zerop (len))
7032 return omit_one_operand (TREE_TYPE (exp), dest, src);
7033
7034 /* If SRC and DEST are the same (and not volatile), return DEST. */
7035 if (operand_equal_p (src, dest, 0))
7036 return omit_one_operand (TREE_TYPE (exp), dest, len);
7037
7038 return 0;
7039 }
7040
7041 /* Fold function call to builtin mempcpy. Return
7042 NULL_TREE if no simplification can be made. */
7043
7044 static tree
7045 fold_builtin_mempcpy (tree arglist, tree type, int endp)
7046 {
7047 if (validate_arglist (arglist,
7048 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7049 {
7050 tree dest = TREE_VALUE (arglist);
7051 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7052 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7053
7054 /* If the LEN parameter is zero, return DEST. */
7055 if (integer_zerop (len))
7056 return omit_one_operand (type, dest, src);
7057
7058 /* If SRC and DEST are the same (and not volatile), return DEST+LEN. */
7059 if (operand_equal_p (src, dest, 0))
7060 {
7061 if (endp == 0)
7062 return omit_one_operand (type, dest, len);
7063
7064 if (endp == 2)
7065 len = fold (build2 (MINUS_EXPR, TREE_TYPE (len), len,
7066 ssize_int (1)));
7067
7068 len = fold_convert (TREE_TYPE (dest), len);
7069 len = fold (build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len));
7070 return fold_convert (type, len);
7071 }
7072 }
7073 return 0;
7074 }
7075
7076 /* Fold function call to builtin memmove. Return
7077 NULL_TREE if no simplification can be made. */
7078
7079 static tree
7080 fold_builtin_memmove (tree arglist, tree type)
7081 {
7082 tree dest, src, len;
7083
7084 if (!validate_arglist (arglist,
7085 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7086 return 0;
7087
7088 dest = TREE_VALUE (arglist);
7089 src = TREE_VALUE (TREE_CHAIN (arglist));
7090 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7091
7092 /* If the LEN parameter is zero, return DEST. */
7093 if (integer_zerop (len))
7094 return omit_one_operand (type, dest, src);
7095
7096 /* If SRC and DEST are the same (and not volatile), return DEST. */
7097 if (operand_equal_p (src, dest, 0))
7098 return omit_one_operand (type, dest, len);
7099
7100 return 0;
7101 }
7102
7103 /* Fold function call to builtin strcpy. If LEN is not NULL, it represents
7104 the length of the string to be copied. Return NULL_TREE if no
7105 simplification can be made. */
7106
7107 tree
7108 fold_builtin_strcpy (tree exp, tree len)
7109 {
7110 tree arglist = TREE_OPERAND (exp, 1);
7111 tree dest, src, fn;
7112
7113 if (!validate_arglist (arglist,
7114 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7115 return 0;
7116
7117 dest = TREE_VALUE (arglist);
7118 src = TREE_VALUE (TREE_CHAIN (arglist));
7119
7120 /* If SRC and DEST are the same (and not volatile), return DEST. */
7121 if (operand_equal_p (src, dest, 0))
7122 return fold_convert (TREE_TYPE (exp), dest);
7123
7124 if (optimize_size)
7125 return 0;
7126
7127 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
7128 if (!fn)
7129 return 0;
7130
7131 if (!len)
7132 {
7133 len = c_strlen (src, 1);
7134 if (! len || TREE_SIDE_EFFECTS (len))
7135 return 0;
7136 }
7137
7138 len = size_binop (PLUS_EXPR, len, ssize_int (1));
7139 arglist = build_tree_list (NULL_TREE, len);
7140 arglist = tree_cons (NULL_TREE, src, arglist);
7141 arglist = tree_cons (NULL_TREE, dest, arglist);
7142 return fold_convert (TREE_TYPE (exp),
7143 build_function_call_expr (fn, arglist));
7144 }
7145
7146 /* Fold function call to builtin strncpy. If SLEN is not NULL, it represents
7147 the length of the source string. Return NULL_TREE if no simplification
7148 can be made. */
7149
7150 tree
7151 fold_builtin_strncpy (tree exp, tree slen)
7152 {
7153 tree arglist = TREE_OPERAND (exp, 1);
7154 tree dest, src, len, fn;
7155
7156 if (!validate_arglist (arglist,
7157 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7158 return 0;
7159
7160 dest = TREE_VALUE (arglist);
7161 src = TREE_VALUE (TREE_CHAIN (arglist));
7162 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7163
7164 /* If the LEN parameter is zero, return DEST. */
7165 if (integer_zerop (len))
7166 return omit_one_operand (TREE_TYPE (exp), dest, src);
7167
7168 /* We can't compare slen with len as constants below if len is not a
7169 constant. */
7170 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
7171 return 0;
7172
7173 if (!slen)
7174 slen = c_strlen (src, 1);
7175
7176 /* Now, we must be passed a constant src ptr parameter. */
7177 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
7178 return 0;
7179
7180 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
7181
7182 /* We do not support simplification of this case, though we do
7183 support it when expanding trees into RTL. */
7184 /* FIXME: generate a call to __builtin_memset. */
7185 if (tree_int_cst_lt (slen, len))
7186 return 0;
7187
7188 /* OK transform into builtin memcpy. */
7189 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
7190 if (!fn)
7191 return 0;
7192 return fold_convert (TREE_TYPE (exp),
7193 build_function_call_expr (fn, arglist));
7194 }
7195
7196 /* Fold function call to builtin memcmp. Return
7197 NULL_TREE if no simplification can be made. */
7198
7199 static tree
7200 fold_builtin_memcmp (tree arglist)
7201 {
7202 tree arg1, arg2, len;
7203 const char *p1, *p2;
7204
7205 if (!validate_arglist (arglist,
7206 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7207 return 0;
7208
7209 arg1 = TREE_VALUE (arglist);
7210 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7211 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7212
7213 /* If the LEN parameter is zero, return zero. */
7214 if (integer_zerop (len))
7215 return omit_two_operands (integer_type_node, integer_zero_node,
7216 arg1, arg2);
7217
7218 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7219 if (operand_equal_p (arg1, arg2, 0))
7220 return omit_one_operand (integer_type_node, integer_zero_node, len);
7221
7222 p1 = c_getstr (arg1);
7223 p2 = c_getstr (arg2);
7224
7225 /* If all arguments are constant, and the value of len is not greater
7226 than the lengths of arg1 and arg2, evaluate at compile-time. */
7227 if (host_integerp (len, 1) && p1 && p2
7228 && compare_tree_int (len, strlen (p1) + 1) <= 0
7229 && compare_tree_int (len, strlen (p2) + 1) <= 0)
7230 {
7231 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
7232
7233 if (r > 0)
7234 return integer_one_node;
7235 else if (r < 0)
7236 return integer_minus_one_node;
7237 else
7238 return integer_zero_node;
7239 }
7240
7241 /* If len parameter is one, return an expression corresponding to
7242 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7243 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
7244 {
7245 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7246 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
7247 tree ind1 = fold_convert (integer_type_node,
7248 build1 (INDIRECT_REF, cst_uchar_node,
7249 fold_convert (cst_uchar_ptr_node,
7250 arg1)));
7251 tree ind2 = fold_convert (integer_type_node,
7252 build1 (INDIRECT_REF, cst_uchar_node,
7253 fold_convert (cst_uchar_ptr_node,
7254 arg2)));
7255 return fold (build2 (MINUS_EXPR, integer_type_node, ind1, ind2));
7256 }
7257
7258 return 0;
7259 }
7260
7261 /* Fold function call to builtin strcmp. Return
7262 NULL_TREE if no simplification can be made. */
7263
7264 static tree
7265 fold_builtin_strcmp (tree arglist)
7266 {
7267 tree arg1, arg2;
7268 const char *p1, *p2;
7269
7270 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7271 return 0;
7272
7273 arg1 = TREE_VALUE (arglist);
7274 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7275
7276 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7277 if (operand_equal_p (arg1, arg2, 0))
7278 return integer_zero_node;
7279
7280 p1 = c_getstr (arg1);
7281 p2 = c_getstr (arg2);
7282
7283 if (p1 && p2)
7284 {
7285 const int i = strcmp (p1, p2);
7286 if (i < 0)
7287 return integer_minus_one_node;
7288 else if (i > 0)
7289 return integer_one_node;
7290 else
7291 return integer_zero_node;
7292 }
7293
7294 /* If the second arg is "", return *(const unsigned char*)arg1. */
7295 if (p2 && *p2 == '\0')
7296 {
7297 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7298 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
7299 return fold_convert (integer_type_node,
7300 build1 (INDIRECT_REF, cst_uchar_node,
7301 fold_convert (cst_uchar_ptr_node,
7302 arg1)));
7303 }
7304
7305 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7306 if (p1 && *p1 == '\0')
7307 {
7308 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7309 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
7310 tree temp = fold_convert (integer_type_node,
7311 build1 (INDIRECT_REF, cst_uchar_node,
7312 fold_convert (cst_uchar_ptr_node,
7313 arg2)));
7314 return fold (build1 (NEGATE_EXPR, integer_type_node, temp));
7315 }
7316
7317 return 0;
7318 }
7319
7320 /* Fold function call to builtin strncmp. Return
7321 NULL_TREE if no simplification can be made. */
7322
7323 static tree
7324 fold_builtin_strncmp (tree arglist)
7325 {
7326 tree arg1, arg2, len;
7327 const char *p1, *p2;
7328
7329 if (!validate_arglist (arglist,
7330 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7331 return 0;
7332
7333 arg1 = TREE_VALUE (arglist);
7334 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7335 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7336
7337 /* If the LEN parameter is zero, return zero. */
7338 if (integer_zerop (len))
7339 return omit_two_operands (integer_type_node, integer_zero_node,
7340 arg1, arg2);
7341
7342 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7343 if (operand_equal_p (arg1, arg2, 0))
7344 return omit_one_operand (integer_type_node, integer_zero_node, len);
7345
7346 p1 = c_getstr (arg1);
7347 p2 = c_getstr (arg2);
7348
7349 if (host_integerp (len, 1) && p1 && p2)
7350 {
7351 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
7352 if (i > 0)
7353 return integer_one_node;
7354 else if (i < 0)
7355 return integer_minus_one_node;
7356 else
7357 return integer_zero_node;
7358 }
7359
7360 /* If the second arg is "", and the length is greater than zero,
7361 return *(const unsigned char*)arg1. */
7362 if (p2 && *p2 == '\0'
7363 && TREE_CODE (len) == INTEGER_CST
7364 && tree_int_cst_sgn (len) == 1)
7365 {
7366 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7367 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
7368 return fold_convert (integer_type_node,
7369 build1 (INDIRECT_REF, cst_uchar_node,
7370 fold_convert (cst_uchar_ptr_node,
7371 arg1)));
7372 }
7373
7374 /* If the first arg is "", and the length is greater than zero,
7375 return -*(const unsigned char*)arg2. */
7376 if (p1 && *p1 == '\0'
7377 && TREE_CODE (len) == INTEGER_CST
7378 && tree_int_cst_sgn (len) == 1)
7379 {
7380 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7381 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
7382 tree temp = fold_convert (integer_type_node,
7383 build1 (INDIRECT_REF, cst_uchar_node,
7384 fold_convert (cst_uchar_ptr_node,
7385 arg2)));
7386 return fold (build1 (NEGATE_EXPR, integer_type_node, temp));
7387 }
7388
7389 /* If len parameter is one, return an expression corresponding to
7390 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7391 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
7392 {
7393 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7394 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
7395 tree ind1 = fold_convert (integer_type_node,
7396 build1 (INDIRECT_REF, cst_uchar_node,
7397 fold_convert (cst_uchar_ptr_node,
7398 arg1)));
7399 tree ind2 = fold_convert (integer_type_node,
7400 build1 (INDIRECT_REF, cst_uchar_node,
7401 fold_convert (cst_uchar_ptr_node,
7402 arg2)));
7403 return fold (build2 (MINUS_EXPR, integer_type_node, ind1, ind2));
7404 }
7405
7406 return 0;
7407 }
7408
7409 /* Fold function call to builtin signbit, signbitf or signbitl. Return
7410 NULL_TREE if no simplification can be made. */
7411
7412 static tree
7413 fold_builtin_signbit (tree exp)
7414 {
7415 tree arglist = TREE_OPERAND (exp, 1);
7416 tree arg, temp;
7417
7418 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
7419 return NULL_TREE;
7420
7421 arg = TREE_VALUE (arglist);
7422
7423 /* If ARG is a compile-time constant, determine the result. */
7424 if (TREE_CODE (arg) == REAL_CST
7425 && !TREE_CONSTANT_OVERFLOW (arg))
7426 {
7427 REAL_VALUE_TYPE c;
7428
7429 c = TREE_REAL_CST (arg);
7430 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
7431 return fold_convert (TREE_TYPE (exp), temp);
7432 }
7433
7434 /* If ARG is non-negative, the result is always zero. */
7435 if (tree_expr_nonnegative_p (arg))
7436 return omit_one_operand (TREE_TYPE (exp), integer_zero_node, arg);
7437
7438 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
7439 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
7440 return fold (build2 (LT_EXPR, TREE_TYPE (exp), arg,
7441 build_real (TREE_TYPE (arg), dconst0)));
7442
7443 return NULL_TREE;
7444 }
7445
7446 /* Fold function call to builtin copysign, copysignf or copysignl.
7447 Return NULL_TREE if no simplification can be made. */
7448
7449 static tree
7450 fold_builtin_copysign (tree arglist, tree type)
7451 {
7452 tree arg1, arg2;
7453
7454 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7455 return NULL_TREE;
7456
7457 arg1 = TREE_VALUE (arglist);
7458 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7459
7460 /* copysign(X,X) is X. */
7461 if (operand_equal_p (arg1, arg2, 0))
7462 return fold_convert (type, arg1);
7463
7464 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
7465 if (TREE_CODE (arg1) == REAL_CST
7466 && TREE_CODE (arg2) == REAL_CST
7467 && !TREE_CONSTANT_OVERFLOW (arg1)
7468 && !TREE_CONSTANT_OVERFLOW (arg2))
7469 {
7470 REAL_VALUE_TYPE c1, c2;
7471
7472 c1 = TREE_REAL_CST (arg1);
7473 c2 = TREE_REAL_CST (arg2);
7474 real_copysign (&c1, &c2);
7475 return build_real (type, c1);
7476 c1.sign = c2.sign;
7477 }
7478
7479 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
7480 Remember to evaluate Y for side-effects. */
7481 if (tree_expr_nonnegative_p (arg2))
7482 return omit_one_operand (type,
7483 fold (build1 (ABS_EXPR, type, arg1)),
7484 arg2);
7485
7486 return NULL_TREE;
7487 }
7488
7489 /* Fold a call to builtin isascii. */
7490
7491 static tree
7492 fold_builtin_isascii (tree arglist)
7493 {
7494 if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
7495 return 0;
7496 else
7497 {
7498 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7499 tree arg = TREE_VALUE (arglist);
7500
7501 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
7502 build_int_cst (NULL_TREE,
7503 ~ (unsigned HOST_WIDE_INT) 0x7f));
7504 arg = fold (build2 (EQ_EXPR, integer_type_node,
7505 arg, integer_zero_node));
7506
7507 if (in_gimple_form && !TREE_CONSTANT (arg))
7508 return NULL_TREE;
7509 else
7510 return arg;
7511 }
7512 }
7513
7514 /* Fold a call to builtin toascii. */
7515
7516 static tree
7517 fold_builtin_toascii (tree arglist)
7518 {
7519 if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
7520 return 0;
7521 else
7522 {
7523 /* Transform toascii(c) -> (c & 0x7f). */
7524 tree arg = TREE_VALUE (arglist);
7525
7526 return fold (build2 (BIT_AND_EXPR, integer_type_node, arg,
7527 build_int_cst (NULL_TREE, 0x7f)));
7528 }
7529 }
7530
7531 /* Fold a call to builtin isdigit. */
7532
7533 static tree
7534 fold_builtin_isdigit (tree arglist)
7535 {
7536 if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
7537 return 0;
7538 else
7539 {
7540 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7541 /* According to the C standard, isdigit is unaffected by locale. */
7542 tree arg = TREE_VALUE (arglist);
7543 arg = fold_convert (unsigned_type_node, arg);
7544 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
7545 build_int_cst (unsigned_type_node, TARGET_DIGIT0));
7546 arg = build2 (LE_EXPR, integer_type_node, arg,
7547 build_int_cst (unsigned_type_node, 9));
7548 arg = fold (arg);
7549 if (in_gimple_form && !TREE_CONSTANT (arg))
7550 return NULL_TREE;
7551 else
7552 return arg;
7553 }
7554 }
7555
7556 /* Fold a call to fabs, fabsf or fabsl. */
7557
7558 static tree
7559 fold_builtin_fabs (tree arglist, tree type)
7560 {
7561 tree arg;
7562
7563 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
7564 return 0;
7565
7566 arg = TREE_VALUE (arglist);
7567 arg = fold_convert (type, arg);
7568 if (TREE_CODE (arg) == REAL_CST)
7569 return fold_abs_const (arg, type);
7570 return fold (build1 (ABS_EXPR, type, arg));
7571 }
7572
7573 /* Fold a call to abs, labs, llabs or imaxabs. */
7574
7575 static tree
7576 fold_builtin_abs (tree arglist, tree type)
7577 {
7578 tree arg;
7579
7580 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
7581 return 0;
7582
7583 arg = TREE_VALUE (arglist);
7584 arg = fold_convert (type, arg);
7585 if (TREE_CODE (arg) == INTEGER_CST)
7586 return fold_abs_const (arg, type);
7587 return fold (build1 (ABS_EXPR, type, arg));
7588 }
7589
7590 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7591 EXP is the CALL_EXPR for the call. */
7592
7593 static tree
7594 fold_builtin_classify (tree exp, int builtin_index)
7595 {
7596 tree fndecl = get_callee_fndecl (exp);
7597 tree arglist = TREE_OPERAND (exp, 1);
7598 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7599 tree arg;
7600 REAL_VALUE_TYPE r;
7601
7602 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
7603 {
7604 /* Check that we have exactly one argument. */
7605 if (arglist == 0)
7606 {
7607 error ("too few arguments to function %qs",
7608 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7609 return error_mark_node;
7610 }
7611 else if (TREE_CHAIN (arglist) != 0)
7612 {
7613 error ("too many arguments to function %qs",
7614 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7615 return error_mark_node;
7616 }
7617 else
7618 {
7619 error ("non-floating-point argument to function %qs",
7620 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7621 return error_mark_node;
7622 }
7623 }
7624
7625 arg = TREE_VALUE (arglist);
7626 switch (builtin_index)
7627 {
7628 case BUILT_IN_ISINF:
7629 if (!MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
7630 return omit_one_operand (type, integer_zero_node, arg);
7631
7632 if (TREE_CODE (arg) == REAL_CST)
7633 {
7634 r = TREE_REAL_CST (arg);
7635 if (real_isinf (&r))
7636 return real_compare (GT_EXPR, &r, &dconst0)
7637 ? integer_one_node : integer_minus_one_node;
7638 else
7639 return integer_zero_node;
7640 }
7641
7642 return NULL_TREE;
7643
7644 case BUILT_IN_FINITE:
7645 if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg)))
7646 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
7647 return omit_one_operand (type, integer_zero_node, arg);
7648
7649 if (TREE_CODE (arg) == REAL_CST)
7650 {
7651 r = TREE_REAL_CST (arg);
7652 return real_isinf (&r) || real_isnan (&r)
7653 ? integer_zero_node : integer_one_node;
7654 }
7655
7656 return NULL_TREE;
7657
7658 case BUILT_IN_ISNAN:
7659 if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg))))
7660 return omit_one_operand (type, integer_zero_node, arg);
7661
7662 if (TREE_CODE (arg) == REAL_CST)
7663 {
7664 r = TREE_REAL_CST (arg);
7665 return real_isnan (&r) ? integer_one_node : integer_zero_node;
7666 }
7667
7668 arg = builtin_save_expr (arg);
7669 return fold (build2 (UNORDERED_EXPR, type, arg, arg));
7670
7671 default:
7672 gcc_unreachable ();
7673 }
7674 }
7675
7676 /* Fold a call to an unordered comparison function such as
7677 __builtin_isgreater(). EXP is the CALL_EXPR for the call.
7678 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7679 the opposite of the desired result. UNORDERED_CODE is used
7680 for modes that can hold NaNs and ORDERED_CODE is used for
7681 the rest. */
7682
7683 static tree
7684 fold_builtin_unordered_cmp (tree exp,
7685 enum tree_code unordered_code,
7686 enum tree_code ordered_code)
7687 {
7688 tree fndecl = get_callee_fndecl (exp);
7689 tree arglist = TREE_OPERAND (exp, 1);
7690 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7691 enum tree_code code;
7692 tree arg0, arg1;
7693 tree type0, type1;
7694 enum tree_code code0, code1;
7695 tree cmp_type = NULL_TREE;
7696
7697 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7698 {
7699 /* Check that we have exactly two arguments. */
7700 if (arglist == 0 || TREE_CHAIN (arglist) == 0)
7701 {
7702 error ("too few arguments to function %qs",
7703 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7704 return error_mark_node;
7705 }
7706 else if (TREE_CHAIN (TREE_CHAIN (arglist)) != 0)
7707 {
7708 error ("too many arguments to function %qs",
7709 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7710 return error_mark_node;
7711 }
7712 }
7713
7714 arg0 = TREE_VALUE (arglist);
7715 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7716
7717 type0 = TREE_TYPE (arg0);
7718 type1 = TREE_TYPE (arg1);
7719
7720 code0 = TREE_CODE (type0);
7721 code1 = TREE_CODE (type1);
7722
7723 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
7724 /* Choose the wider of two real types. */
7725 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
7726 ? type0 : type1;
7727 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
7728 cmp_type = type0;
7729 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
7730 cmp_type = type1;
7731 else
7732 {
7733 error ("non-floating-point argument to function %qs",
7734 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7735 return error_mark_node;
7736 }
7737
7738 arg0 = fold_convert (cmp_type, arg0);
7739 arg1 = fold_convert (cmp_type, arg1);
7740
7741 if (unordered_code == UNORDERED_EXPR)
7742 {
7743 if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7744 return omit_two_operands (type, integer_zero_node, arg0, arg1);
7745 return fold (build2 (UNORDERED_EXPR, type, arg0, arg1));
7746 }
7747
7748 code = MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
7749 : ordered_code;
7750 return fold (build1 (TRUTH_NOT_EXPR, type,
7751 fold (build2 (code, type, arg0, arg1))));
7752 }
7753
7754 /* Used by constant folding to simplify calls to builtin functions. EXP is
7755 the CALL_EXPR of a call to a builtin function. IGNORE is true if the
7756 result of the function call is ignored. This function returns NULL_TREE
7757 if no simplification was possible. */
7758
7759 static tree
7760 fold_builtin_1 (tree exp, bool ignore)
7761 {
7762 tree fndecl = get_callee_fndecl (exp);
7763 tree arglist = TREE_OPERAND (exp, 1);
7764 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7765
7766 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7767 return targetm.fold_builtin (exp, ignore);
7768
7769 switch (DECL_FUNCTION_CODE (fndecl))
7770 {
7771 case BUILT_IN_FPUTS:
7772 return fold_builtin_fputs (arglist, ignore, false, NULL_TREE);
7773
7774 case BUILT_IN_FPUTS_UNLOCKED:
7775 return fold_builtin_fputs (arglist, ignore, true, NULL_TREE);
7776
7777 case BUILT_IN_STRSTR:
7778 return fold_builtin_strstr (arglist);
7779
7780 case BUILT_IN_STRCAT:
7781 return fold_builtin_strcat (arglist);
7782
7783 case BUILT_IN_STRNCAT:
7784 return fold_builtin_strncat (arglist);
7785
7786 case BUILT_IN_STRSPN:
7787 return fold_builtin_strspn (arglist);
7788
7789 case BUILT_IN_STRCSPN:
7790 return fold_builtin_strcspn (arglist);
7791
7792 case BUILT_IN_STRCHR:
7793 case BUILT_IN_INDEX:
7794 return fold_builtin_strchr (arglist);
7795
7796 case BUILT_IN_STRRCHR:
7797 case BUILT_IN_RINDEX:
7798 return fold_builtin_strrchr (arglist);
7799
7800 case BUILT_IN_STRCPY:
7801 return fold_builtin_strcpy (exp, NULL_TREE);
7802
7803 case BUILT_IN_STRNCPY:
7804 return fold_builtin_strncpy (exp, NULL_TREE);
7805
7806 case BUILT_IN_STRCMP:
7807 return fold_builtin_strcmp (arglist);
7808
7809 case BUILT_IN_STRNCMP:
7810 return fold_builtin_strncmp (arglist);
7811
7812 case BUILT_IN_STRPBRK:
7813 return fold_builtin_strpbrk (arglist);
7814
7815 case BUILT_IN_BCMP:
7816 case BUILT_IN_MEMCMP:
7817 return fold_builtin_memcmp (arglist);
7818
7819 case BUILT_IN_SPRINTF:
7820 return fold_builtin_sprintf (arglist, ignore);
7821
7822 case BUILT_IN_CONSTANT_P:
7823 {
7824 tree val;
7825
7826 val = fold_builtin_constant_p (arglist);
7827 /* Gimplification will pull the CALL_EXPR for the builtin out of
7828 an if condition. When not optimizing, we'll not CSE it back.
7829 To avoid link error types of regressions, return false now. */
7830 if (!val && !optimize)
7831 val = integer_zero_node;
7832
7833 return val;
7834 }
7835
7836 case BUILT_IN_EXPECT:
7837 return fold_builtin_expect (arglist);
7838
7839 case BUILT_IN_CLASSIFY_TYPE:
7840 return fold_builtin_classify_type (arglist);
7841
7842 case BUILT_IN_STRLEN:
7843 return fold_builtin_strlen (arglist);
7844
7845 case BUILT_IN_FABS:
7846 case BUILT_IN_FABSF:
7847 case BUILT_IN_FABSL:
7848 return fold_builtin_fabs (arglist, type);
7849
7850 case BUILT_IN_ABS:
7851 case BUILT_IN_LABS:
7852 case BUILT_IN_LLABS:
7853 case BUILT_IN_IMAXABS:
7854 return fold_builtin_abs (arglist, type);
7855
7856 case BUILT_IN_CONJ:
7857 case BUILT_IN_CONJF:
7858 case BUILT_IN_CONJL:
7859 if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
7860 return fold (build1 (CONJ_EXPR, type, TREE_VALUE (arglist)));
7861 break;
7862
7863 case BUILT_IN_CREAL:
7864 case BUILT_IN_CREALF:
7865 case BUILT_IN_CREALL:
7866 if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
7867 return non_lvalue (fold (build1 (REALPART_EXPR, type,
7868 TREE_VALUE (arglist))));
7869 break;
7870
7871 case BUILT_IN_CIMAG:
7872 case BUILT_IN_CIMAGF:
7873 case BUILT_IN_CIMAGL:
7874 if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
7875 return non_lvalue (fold (build1 (IMAGPART_EXPR, type,
7876 TREE_VALUE (arglist))));
7877 break;
7878
7879 case BUILT_IN_CABS:
7880 case BUILT_IN_CABSF:
7881 case BUILT_IN_CABSL:
7882 return fold_builtin_cabs (arglist, type);
7883
7884 case BUILT_IN_SQRT:
7885 case BUILT_IN_SQRTF:
7886 case BUILT_IN_SQRTL:
7887 return fold_builtin_sqrt (arglist, type);
7888
7889 case BUILT_IN_CBRT:
7890 case BUILT_IN_CBRTF:
7891 case BUILT_IN_CBRTL:
7892 return fold_builtin_cbrt (arglist, type);
7893
7894 case BUILT_IN_SIN:
7895 case BUILT_IN_SINF:
7896 case BUILT_IN_SINL:
7897 return fold_builtin_sin (arglist);
7898
7899 case BUILT_IN_COS:
7900 case BUILT_IN_COSF:
7901 case BUILT_IN_COSL:
7902 return fold_builtin_cos (arglist, type, fndecl);
7903
7904 case BUILT_IN_EXP:
7905 case BUILT_IN_EXPF:
7906 case BUILT_IN_EXPL:
7907 return fold_builtin_exponent (exp, &dconste);
7908
7909 case BUILT_IN_EXP2:
7910 case BUILT_IN_EXP2F:
7911 case BUILT_IN_EXP2L:
7912 return fold_builtin_exponent (exp, &dconst2);
7913
7914 case BUILT_IN_EXP10:
7915 case BUILT_IN_EXP10F:
7916 case BUILT_IN_EXP10L:
7917 case BUILT_IN_POW10:
7918 case BUILT_IN_POW10F:
7919 case BUILT_IN_POW10L:
7920 return fold_builtin_exponent (exp, &dconst10);
7921
7922 case BUILT_IN_LOG:
7923 case BUILT_IN_LOGF:
7924 case BUILT_IN_LOGL:
7925 return fold_builtin_logarithm (exp, &dconste);
7926
7927 case BUILT_IN_LOG2:
7928 case BUILT_IN_LOG2F:
7929 case BUILT_IN_LOG2L:
7930 return fold_builtin_logarithm (exp, &dconst2);
7931
7932 case BUILT_IN_LOG10:
7933 case BUILT_IN_LOG10F:
7934 case BUILT_IN_LOG10L:
7935 return fold_builtin_logarithm (exp, &dconst10);
7936
7937 case BUILT_IN_TAN:
7938 case BUILT_IN_TANF:
7939 case BUILT_IN_TANL:
7940 return fold_builtin_tan (arglist);
7941
7942 case BUILT_IN_ATAN:
7943 case BUILT_IN_ATANF:
7944 case BUILT_IN_ATANL:
7945 return fold_builtin_atan (arglist, type);
7946
7947 case BUILT_IN_POW:
7948 case BUILT_IN_POWF:
7949 case BUILT_IN_POWL:
7950 return fold_builtin_pow (fndecl, arglist, type);
7951
7952 case BUILT_IN_INF:
7953 case BUILT_IN_INFF:
7954 case BUILT_IN_INFL:
7955 return fold_builtin_inf (type, true);
7956
7957 case BUILT_IN_HUGE_VAL:
7958 case BUILT_IN_HUGE_VALF:
7959 case BUILT_IN_HUGE_VALL:
7960 return fold_builtin_inf (type, false);
7961
7962 case BUILT_IN_NAN:
7963 case BUILT_IN_NANF:
7964 case BUILT_IN_NANL:
7965 return fold_builtin_nan (arglist, type, true);
7966
7967 case BUILT_IN_NANS:
7968 case BUILT_IN_NANSF:
7969 case BUILT_IN_NANSL:
7970 return fold_builtin_nan (arglist, type, false);
7971
7972 case BUILT_IN_FLOOR:
7973 case BUILT_IN_FLOORF:
7974 case BUILT_IN_FLOORL:
7975 return fold_builtin_floor (exp);
7976
7977 case BUILT_IN_CEIL:
7978 case BUILT_IN_CEILF:
7979 case BUILT_IN_CEILL:
7980 return fold_builtin_ceil (exp);
7981
7982 case BUILT_IN_TRUNC:
7983 case BUILT_IN_TRUNCF:
7984 case BUILT_IN_TRUNCL:
7985 return fold_builtin_trunc (exp);
7986
7987 case BUILT_IN_ROUND:
7988 case BUILT_IN_ROUNDF:
7989 case BUILT_IN_ROUNDL:
7990 return fold_builtin_round (exp);
7991
7992 case BUILT_IN_NEARBYINT:
7993 case BUILT_IN_NEARBYINTF:
7994 case BUILT_IN_NEARBYINTL:
7995 case BUILT_IN_RINT:
7996 case BUILT_IN_RINTF:
7997 case BUILT_IN_RINTL:
7998 return fold_trunc_transparent_mathfn (exp);
7999
8000 case BUILT_IN_LROUND:
8001 case BUILT_IN_LROUNDF:
8002 case BUILT_IN_LROUNDL:
8003 case BUILT_IN_LLROUND:
8004 case BUILT_IN_LLROUNDF:
8005 case BUILT_IN_LLROUNDL:
8006 return fold_builtin_lround (exp);
8007
8008 case BUILT_IN_LRINT:
8009 case BUILT_IN_LRINTF:
8010 case BUILT_IN_LRINTL:
8011 case BUILT_IN_LLRINT:
8012 case BUILT_IN_LLRINTF:
8013 case BUILT_IN_LLRINTL:
8014 return fold_fixed_mathfn (exp);
8015
8016 case BUILT_IN_FFS:
8017 case BUILT_IN_FFSL:
8018 case BUILT_IN_FFSLL:
8019 case BUILT_IN_CLZ:
8020 case BUILT_IN_CLZL:
8021 case BUILT_IN_CLZLL:
8022 case BUILT_IN_CTZ:
8023 case BUILT_IN_CTZL:
8024 case BUILT_IN_CTZLL:
8025 case BUILT_IN_POPCOUNT:
8026 case BUILT_IN_POPCOUNTL:
8027 case BUILT_IN_POPCOUNTLL:
8028 case BUILT_IN_PARITY:
8029 case BUILT_IN_PARITYL:
8030 case BUILT_IN_PARITYLL:
8031 return fold_builtin_bitop (exp);
8032
8033 case BUILT_IN_MEMCPY:
8034 return fold_builtin_memcpy (exp);
8035
8036 case BUILT_IN_MEMPCPY:
8037 return fold_builtin_mempcpy (arglist, type, /*endp=*/1);
8038
8039 case BUILT_IN_MEMMOVE:
8040 return fold_builtin_memmove (arglist, type);
8041
8042 case BUILT_IN_SIGNBIT:
8043 case BUILT_IN_SIGNBITF:
8044 case BUILT_IN_SIGNBITL:
8045 return fold_builtin_signbit (exp);
8046
8047 case BUILT_IN_ISASCII:
8048 return fold_builtin_isascii (arglist);
8049
8050 case BUILT_IN_TOASCII:
8051 return fold_builtin_toascii (arglist);
8052
8053 case BUILT_IN_ISDIGIT:
8054 return fold_builtin_isdigit (arglist);
8055
8056 case BUILT_IN_COPYSIGN:
8057 case BUILT_IN_COPYSIGNF:
8058 case BUILT_IN_COPYSIGNL:
8059 return fold_builtin_copysign (arglist, type);
8060
8061 case BUILT_IN_FINITE:
8062 case BUILT_IN_FINITEF:
8063 case BUILT_IN_FINITEL:
8064 return fold_builtin_classify (exp, BUILT_IN_FINITE);
8065
8066 case BUILT_IN_ISINF:
8067 case BUILT_IN_ISINFF:
8068 case BUILT_IN_ISINFL:
8069 return fold_builtin_classify (exp, BUILT_IN_ISINF);
8070
8071 case BUILT_IN_ISNAN:
8072 case BUILT_IN_ISNANF:
8073 case BUILT_IN_ISNANL:
8074 return fold_builtin_classify (exp, BUILT_IN_ISNAN);
8075
8076 case BUILT_IN_ISGREATER:
8077 return fold_builtin_unordered_cmp (exp, UNLE_EXPR, LE_EXPR);
8078 case BUILT_IN_ISGREATEREQUAL:
8079 return fold_builtin_unordered_cmp (exp, UNLT_EXPR, LT_EXPR);
8080 case BUILT_IN_ISLESS:
8081 return fold_builtin_unordered_cmp (exp, UNGE_EXPR, GE_EXPR);
8082 case BUILT_IN_ISLESSEQUAL:
8083 return fold_builtin_unordered_cmp (exp, UNGT_EXPR, GT_EXPR);
8084 case BUILT_IN_ISLESSGREATER:
8085 return fold_builtin_unordered_cmp (exp, UNEQ_EXPR, EQ_EXPR);
8086 case BUILT_IN_ISUNORDERED:
8087 return fold_builtin_unordered_cmp (exp, UNORDERED_EXPR, NOP_EXPR);
8088
8089 /* We do the folding for va_start in the expander. */
8090 case BUILT_IN_VA_START:
8091 break;
8092
8093 default:
8094 break;
8095 }
8096
8097 return 0;
8098 }
8099
8100 /* A wrapper function for builtin folding that prevents warnings for
8101 "statement without effect" and the like, caused by removing the
8102 call node earlier than the warning is generated. */
8103
8104 tree
8105 fold_builtin (tree exp, bool ignore)
8106 {
8107 exp = fold_builtin_1 (exp, ignore);
8108 if (exp)
8109 {
8110 /* ??? Don't clobber shared nodes such as integer_zero_node. */
8111 if (CONSTANT_CLASS_P (exp))
8112 exp = build1 (NOP_EXPR, TREE_TYPE (exp), exp);
8113 TREE_NO_WARNING (exp) = 1;
8114 }
8115
8116 return exp;
8117 }
8118
8119 /* Conveniently construct a function call expression. */
8120
8121 tree
8122 build_function_call_expr (tree fn, tree arglist)
8123 {
8124 tree call_expr;
8125
8126 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
8127 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
8128 call_expr, arglist, NULL_TREE);
8129 return fold (call_expr);
8130 }
8131
8132 /* This function validates the types of a function call argument list
8133 represented as a tree chain of parameters against a specified list
8134 of tree_codes. If the last specifier is a 0, that represents an
8135 ellipses, otherwise the last specifier must be a VOID_TYPE. */
8136
8137 static int
8138 validate_arglist (tree arglist, ...)
8139 {
8140 enum tree_code code;
8141 int res = 0;
8142 va_list ap;
8143
8144 va_start (ap, arglist);
8145
8146 do
8147 {
8148 code = va_arg (ap, enum tree_code);
8149 switch (code)
8150 {
8151 case 0:
8152 /* This signifies an ellipses, any further arguments are all ok. */
8153 res = 1;
8154 goto end;
8155 case VOID_TYPE:
8156 /* This signifies an endlink, if no arguments remain, return
8157 true, otherwise return false. */
8158 res = arglist == 0;
8159 goto end;
8160 default:
8161 /* If no parameters remain or the parameter's code does not
8162 match the specified code, return false. Otherwise continue
8163 checking any remaining arguments. */
8164 if (arglist == 0)
8165 goto end;
8166 if (code == POINTER_TYPE)
8167 {
8168 if (! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist))))
8169 goto end;
8170 }
8171 else if (code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
8172 goto end;
8173 break;
8174 }
8175 arglist = TREE_CHAIN (arglist);
8176 }
8177 while (1);
8178
8179 /* We need gotos here since we can only have one VA_CLOSE in a
8180 function. */
8181 end: ;
8182 va_end (ap);
8183
8184 return res;
8185 }
8186
8187 /* Default target-specific builtin expander that does nothing. */
8188
8189 rtx
8190 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8191 rtx target ATTRIBUTE_UNUSED,
8192 rtx subtarget ATTRIBUTE_UNUSED,
8193 enum machine_mode mode ATTRIBUTE_UNUSED,
8194 int ignore ATTRIBUTE_UNUSED)
8195 {
8196 return NULL_RTX;
8197 }
8198
8199 /* Returns true is EXP represents data that would potentially reside
8200 in a readonly section. */
8201
8202 static bool
8203 readonly_data_expr (tree exp)
8204 {
8205 STRIP_NOPS (exp);
8206
8207 if (TREE_CODE (exp) != ADDR_EXPR)
8208 return false;
8209
8210 exp = get_base_address (TREE_OPERAND (exp, 0));
8211 if (!exp)
8212 return false;
8213
8214 /* Make sure we call decl_readonly_section only for trees it
8215 can handle (since it returns true for everything it doesn't
8216 understand). */
8217 if (TREE_CODE (exp) == STRING_CST
8218 || TREE_CODE (exp) == CONSTRUCTOR
8219 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8220 return decl_readonly_section (exp, 0);
8221 else
8222 return false;
8223 }
8224
8225 /* Simplify a call to the strstr builtin.
8226
8227 Return 0 if no simplification was possible, otherwise return the
8228 simplified form of the call as a tree.
8229
8230 The simplified form may be a constant or other expression which
8231 computes the same value, but in a more efficient manner (including
8232 calls to other builtin functions).
8233
8234 The call may contain arguments which need to be evaluated, but
8235 which are not useful to determine the result of the call. In
8236 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8237 COMPOUND_EXPR will be an argument which must be evaluated.
8238 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8239 COMPOUND_EXPR in the chain will contain the tree for the simplified
8240 form of the builtin function call. */
8241
8242 static tree
8243 fold_builtin_strstr (tree arglist)
8244 {
8245 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8246 return 0;
8247 else
8248 {
8249 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
8250 tree fn;
8251 const char *p1, *p2;
8252
8253 p2 = c_getstr (s2);
8254 if (p2 == NULL)
8255 return 0;
8256
8257 p1 = c_getstr (s1);
8258 if (p1 != NULL)
8259 {
8260 const char *r = strstr (p1, p2);
8261
8262 if (r == NULL)
8263 return build_int_cst (TREE_TYPE (s1), 0);
8264
8265 /* Return an offset into the constant string argument. */
8266 return fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
8267 s1, build_int_cst (TREE_TYPE (s1), r - p1)));
8268 }
8269
8270 if (p2[0] == '\0')
8271 return s1;
8272
8273 if (p2[1] != '\0')
8274 return 0;
8275
8276 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
8277 if (!fn)
8278 return 0;
8279
8280 /* New argument list transforming strstr(s1, s2) to
8281 strchr(s1, s2[0]). */
8282 arglist = build_tree_list (NULL_TREE,
8283 build_int_cst (NULL_TREE, p2[0]));
8284 arglist = tree_cons (NULL_TREE, s1, arglist);
8285 return build_function_call_expr (fn, arglist);
8286 }
8287 }
8288
8289 /* Simplify a call to the strchr builtin.
8290
8291 Return 0 if no simplification was possible, otherwise return the
8292 simplified form of the call as a tree.
8293
8294 The simplified form may be a constant or other expression which
8295 computes the same value, but in a more efficient manner (including
8296 calls to other builtin functions).
8297
8298 The call may contain arguments which need to be evaluated, but
8299 which are not useful to determine the result of the call. In
8300 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8301 COMPOUND_EXPR will be an argument which must be evaluated.
8302 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8303 COMPOUND_EXPR in the chain will contain the tree for the simplified
8304 form of the builtin function call. */
8305
8306 static tree
8307 fold_builtin_strchr (tree arglist)
8308 {
8309 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8310 return 0;
8311 else
8312 {
8313 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
8314 const char *p1;
8315
8316 if (TREE_CODE (s2) != INTEGER_CST)
8317 return 0;
8318
8319 p1 = c_getstr (s1);
8320 if (p1 != NULL)
8321 {
8322 char c;
8323 const char *r;
8324
8325 if (target_char_cast (s2, &c))
8326 return 0;
8327
8328 r = strchr (p1, c);
8329
8330 if (r == NULL)
8331 return build_int_cst (TREE_TYPE (s1), 0);
8332
8333 /* Return an offset into the constant string argument. */
8334 return fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
8335 s1, build_int_cst (TREE_TYPE (s1), r - p1)));
8336 }
8337 return 0;
8338 }
8339 }
8340
8341 /* Simplify a call to the strrchr builtin.
8342
8343 Return 0 if no simplification was possible, otherwise return the
8344 simplified form of the call as a tree.
8345
8346 The simplified form may be a constant or other expression which
8347 computes the same value, but in a more efficient manner (including
8348 calls to other builtin functions).
8349
8350 The call may contain arguments which need to be evaluated, but
8351 which are not useful to determine the result of the call. In
8352 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8353 COMPOUND_EXPR will be an argument which must be evaluated.
8354 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8355 COMPOUND_EXPR in the chain will contain the tree for the simplified
8356 form of the builtin function call. */
8357
8358 static tree
8359 fold_builtin_strrchr (tree arglist)
8360 {
8361 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8362 return 0;
8363 else
8364 {
8365 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
8366 tree fn;
8367 const char *p1;
8368
8369 if (TREE_CODE (s2) != INTEGER_CST)
8370 return 0;
8371
8372 p1 = c_getstr (s1);
8373 if (p1 != NULL)
8374 {
8375 char c;
8376 const char *r;
8377
8378 if (target_char_cast (s2, &c))
8379 return 0;
8380
8381 r = strrchr (p1, c);
8382
8383 if (r == NULL)
8384 return build_int_cst (TREE_TYPE (s1), 0);
8385
8386 /* Return an offset into the constant string argument. */
8387 return fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
8388 s1, build_int_cst (TREE_TYPE (s1), r - p1)));
8389 }
8390
8391 if (! integer_zerop (s2))
8392 return 0;
8393
8394 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
8395 if (!fn)
8396 return 0;
8397
8398 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
8399 return build_function_call_expr (fn, arglist);
8400 }
8401 }
8402
8403 /* Simplify a call to the strpbrk builtin.
8404
8405 Return 0 if no simplification was possible, otherwise return the
8406 simplified form of the call as a tree.
8407
8408 The simplified form may be a constant or other expression which
8409 computes the same value, but in a more efficient manner (including
8410 calls to other builtin functions).
8411
8412 The call may contain arguments which need to be evaluated, but
8413 which are not useful to determine the result of the call. In
8414 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8415 COMPOUND_EXPR will be an argument which must be evaluated.
8416 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8417 COMPOUND_EXPR in the chain will contain the tree for the simplified
8418 form of the builtin function call. */
8419
8420 static tree
8421 fold_builtin_strpbrk (tree arglist)
8422 {
8423 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8424 return 0;
8425 else
8426 {
8427 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
8428 tree fn;
8429 const char *p1, *p2;
8430
8431 p2 = c_getstr (s2);
8432 if (p2 == NULL)
8433 return 0;
8434
8435 p1 = c_getstr (s1);
8436 if (p1 != NULL)
8437 {
8438 const char *r = strpbrk (p1, p2);
8439
8440 if (r == NULL)
8441 return build_int_cst (TREE_TYPE (s1), 0);
8442
8443 /* Return an offset into the constant string argument. */
8444 return fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
8445 s1, build_int_cst (TREE_TYPE (s1), r - p1)));
8446 }
8447
8448 if (p2[0] == '\0')
8449 /* strpbrk(x, "") == NULL.
8450 Evaluate and ignore s1 in case it had side-effects. */
8451 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
8452
8453 if (p2[1] != '\0')
8454 return 0; /* Really call strpbrk. */
8455
8456 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
8457 if (!fn)
8458 return 0;
8459
8460 /* New argument list transforming strpbrk(s1, s2) to
8461 strchr(s1, s2[0]). */
8462 arglist = build_tree_list (NULL_TREE,
8463 build_int_cst (NULL_TREE, p2[0]));
8464 arglist = tree_cons (NULL_TREE, s1, arglist);
8465 return build_function_call_expr (fn, arglist);
8466 }
8467 }
8468
8469 /* Simplify a call to the strcat builtin.
8470
8471 Return 0 if no simplification was possible, otherwise return the
8472 simplified form of the call as a tree.
8473
8474 The simplified form may be a constant or other expression which
8475 computes the same value, but in a more efficient manner (including
8476 calls to other builtin functions).
8477
8478 The call may contain arguments which need to be evaluated, but
8479 which are not useful to determine the result of the call. In
8480 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8481 COMPOUND_EXPR will be an argument which must be evaluated.
8482 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8483 COMPOUND_EXPR in the chain will contain the tree for the simplified
8484 form of the builtin function call. */
8485
8486 static tree
8487 fold_builtin_strcat (tree arglist)
8488 {
8489 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8490 return 0;
8491 else
8492 {
8493 tree dst = TREE_VALUE (arglist),
8494 src = TREE_VALUE (TREE_CHAIN (arglist));
8495 const char *p = c_getstr (src);
8496
8497 /* If the string length is zero, return the dst parameter. */
8498 if (p && *p == '\0')
8499 return dst;
8500
8501 return 0;
8502 }
8503 }
8504
8505 /* Simplify a call to the strncat builtin.
8506
8507 Return 0 if no simplification was possible, otherwise return the
8508 simplified form of the call as a tree.
8509
8510 The simplified form may be a constant or other expression which
8511 computes the same value, but in a more efficient manner (including
8512 calls to other builtin functions).
8513
8514 The call may contain arguments which need to be evaluated, but
8515 which are not useful to determine the result of the call. In
8516 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8517 COMPOUND_EXPR will be an argument which must be evaluated.
8518 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8519 COMPOUND_EXPR in the chain will contain the tree for the simplified
8520 form of the builtin function call. */
8521
8522 static tree
8523 fold_builtin_strncat (tree arglist)
8524 {
8525 if (!validate_arglist (arglist,
8526 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8527 return 0;
8528 else
8529 {
8530 tree dst = TREE_VALUE (arglist);
8531 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8532 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8533 const char *p = c_getstr (src);
8534
8535 /* If the requested length is zero, or the src parameter string
8536 length is zero, return the dst parameter. */
8537 if (integer_zerop (len) || (p && *p == '\0'))
8538 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
8539
8540 /* If the requested len is greater than or equal to the string
8541 length, call strcat. */
8542 if (TREE_CODE (len) == INTEGER_CST && p
8543 && compare_tree_int (len, strlen (p)) >= 0)
8544 {
8545 tree newarglist
8546 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
8547 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
8548
8549 /* If the replacement _DECL isn't initialized, don't do the
8550 transformation. */
8551 if (!fn)
8552 return 0;
8553
8554 return build_function_call_expr (fn, newarglist);
8555 }
8556 return 0;
8557 }
8558 }
8559
8560 /* Simplify a call to the strspn builtin.
8561
8562 Return 0 if no simplification was possible, otherwise return the
8563 simplified form of the call as a tree.
8564
8565 The simplified form may be a constant or other expression which
8566 computes the same value, but in a more efficient manner (including
8567 calls to other builtin functions).
8568
8569 The call may contain arguments which need to be evaluated, but
8570 which are not useful to determine the result of the call. In
8571 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8572 COMPOUND_EXPR will be an argument which must be evaluated.
8573 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8574 COMPOUND_EXPR in the chain will contain the tree for the simplified
8575 form of the builtin function call. */
8576
8577 static tree
8578 fold_builtin_strspn (tree arglist)
8579 {
8580 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8581 return 0;
8582 else
8583 {
8584 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
8585 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8586
8587 /* If both arguments are constants, evaluate at compile-time. */
8588 if (p1 && p2)
8589 {
8590 const size_t r = strspn (p1, p2);
8591 return size_int (r);
8592 }
8593
8594 /* If either argument is "", return 0. */
8595 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
8596 /* Evaluate and ignore both arguments in case either one has
8597 side-effects. */
8598 return omit_two_operands (integer_type_node, integer_zero_node,
8599 s1, s2);
8600 return 0;
8601 }
8602 }
8603
8604 /* Simplify a call to the strcspn builtin.
8605
8606 Return 0 if no simplification was possible, otherwise return the
8607 simplified form of the call as a tree.
8608
8609 The simplified form may be a constant or other expression which
8610 computes the same value, but in a more efficient manner (including
8611 calls to other builtin functions).
8612
8613 The call may contain arguments which need to be evaluated, but
8614 which are not useful to determine the result of the call. In
8615 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8616 COMPOUND_EXPR will be an argument which must be evaluated.
8617 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8618 COMPOUND_EXPR in the chain will contain the tree for the simplified
8619 form of the builtin function call. */
8620
8621 static tree
8622 fold_builtin_strcspn (tree arglist)
8623 {
8624 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8625 return 0;
8626 else
8627 {
8628 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
8629 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8630
8631 /* If both arguments are constants, evaluate at compile-time. */
8632 if (p1 && p2)
8633 {
8634 const size_t r = strcspn (p1, p2);
8635 return size_int (r);
8636 }
8637
8638 /* If the first argument is "", return 0. */
8639 if (p1 && *p1 == '\0')
8640 {
8641 /* Evaluate and ignore argument s2 in case it has
8642 side-effects. */
8643 return omit_one_operand (integer_type_node,
8644 integer_zero_node, s2);
8645 }
8646
8647 /* If the second argument is "", return __builtin_strlen(s1). */
8648 if (p2 && *p2 == '\0')
8649 {
8650 tree newarglist = build_tree_list (NULL_TREE, s1),
8651 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
8652
8653 /* If the replacement _DECL isn't initialized, don't do the
8654 transformation. */
8655 if (!fn)
8656 return 0;
8657
8658 return build_function_call_expr (fn, newarglist);
8659 }
8660 return 0;
8661 }
8662 }
8663
8664 /* Fold a call to the fputs builtin. IGNORE is true if the value returned
8665 by the builtin will be ignored. UNLOCKED is true is true if this
8666 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
8667 the known length of the string. Return NULL_TREE if no simplification
8668 was possible. */
8669
8670 tree
8671 fold_builtin_fputs (tree arglist, bool ignore, bool unlocked, tree len)
8672 {
8673 tree fn;
8674 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
8675 : implicit_built_in_decls[BUILT_IN_FPUTC];
8676 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
8677 : implicit_built_in_decls[BUILT_IN_FWRITE];
8678
8679 /* If the return value is used, or the replacement _DECL isn't
8680 initialized, don't do the transformation. */
8681 if (!ignore || !fn_fputc || !fn_fwrite)
8682 return 0;
8683
8684 /* Verify the arguments in the original call. */
8685 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8686 return 0;
8687
8688 if (! len)
8689 len = c_strlen (TREE_VALUE (arglist), 0);
8690
8691 /* Get the length of the string passed to fputs. If the length
8692 can't be determined, punt. */
8693 if (!len
8694 || TREE_CODE (len) != INTEGER_CST)
8695 return 0;
8696
8697 switch (compare_tree_int (len, 1))
8698 {
8699 case -1: /* length is 0, delete the call entirely . */
8700 return omit_one_operand (integer_type_node, integer_zero_node,
8701 TREE_VALUE (TREE_CHAIN (arglist)));
8702
8703 case 0: /* length is 1, call fputc. */
8704 {
8705 const char *p = c_getstr (TREE_VALUE (arglist));
8706
8707 if (p != NULL)
8708 {
8709 /* New argument list transforming fputs(string, stream) to
8710 fputc(string[0], stream). */
8711 arglist = build_tree_list (NULL_TREE,
8712 TREE_VALUE (TREE_CHAIN (arglist)));
8713 arglist = tree_cons (NULL_TREE,
8714 build_int_cst (NULL_TREE, p[0]),
8715 arglist);
8716 fn = fn_fputc;
8717 break;
8718 }
8719 }
8720 /* FALLTHROUGH */
8721 case 1: /* length is greater than 1, call fwrite. */
8722 {
8723 tree string_arg;
8724
8725 /* If optimizing for size keep fputs. */
8726 if (optimize_size)
8727 return 0;
8728 string_arg = TREE_VALUE (arglist);
8729 /* New argument list transforming fputs(string, stream) to
8730 fwrite(string, 1, len, stream). */
8731 arglist = build_tree_list (NULL_TREE,
8732 TREE_VALUE (TREE_CHAIN (arglist)));
8733 arglist = tree_cons (NULL_TREE, len, arglist);
8734 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
8735 arglist = tree_cons (NULL_TREE, string_arg, arglist);
8736 fn = fn_fwrite;
8737 break;
8738 }
8739 default:
8740 gcc_unreachable ();
8741 }
8742
8743 /* These optimizations are only performed when the result is ignored,
8744 hence there's no need to cast the result to integer_type_node. */
8745 return build_function_call_expr (fn, arglist);
8746 }
8747
8748 /* Fold the new_arg's arguments (ARGLIST). Returns true if there was an error
8749 produced. False otherwise. This is done so that we don't output the error
8750 or warning twice or three times. */
8751 bool
8752 fold_builtin_next_arg (tree arglist)
8753 {
8754 tree fntype = TREE_TYPE (current_function_decl);
8755
8756 if (TYPE_ARG_TYPES (fntype) == 0
8757 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8758 == void_type_node))
8759 {
8760 error ("%<va_start%> used in function with fixed args");
8761 return true;
8762 }
8763 else if (!arglist)
8764 {
8765 /* Evidently an out of date version of <stdarg.h>; can't validate
8766 va_start's second argument, but can still work as intended. */
8767 warning ("%<__builtin_next_arg%> called without an argument");
8768 return true;
8769 }
8770 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
8771 when we checked the arguments and if needed issued a warning. */
8772 else if (!TREE_CHAIN (arglist)
8773 || !integer_zerop (TREE_VALUE (arglist))
8774 || !integer_zerop (TREE_VALUE (TREE_CHAIN (arglist)))
8775 || TREE_CHAIN (TREE_CHAIN (arglist)))
8776 {
8777 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8778 tree arg = TREE_VALUE (arglist);
8779
8780 if (TREE_CHAIN (arglist))
8781 {
8782 error ("%<va_start%> used with too many arguments");
8783 return true;
8784 }
8785
8786 /* Strip off all nops for the sake of the comparison. This
8787 is not quite the same as STRIP_NOPS. It does more.
8788 We must also strip off INDIRECT_EXPR for C++ reference
8789 parameters. */
8790 while (TREE_CODE (arg) == NOP_EXPR
8791 || TREE_CODE (arg) == CONVERT_EXPR
8792 || TREE_CODE (arg) == NON_LVALUE_EXPR
8793 || TREE_CODE (arg) == INDIRECT_REF)
8794 arg = TREE_OPERAND (arg, 0);
8795 if (arg != last_parm)
8796 {
8797 /* FIXME: Sometimes with the tree optimizers we can get the
8798 not the last argument even though the user used the last
8799 argument. We just warn and set the arg to be the last
8800 argument so that we will get wrong-code because of
8801 it. */
8802 warning ("second parameter of %<va_start%> not last named argument");
8803 }
8804 /* We want to verify the second parameter just once before the tree
8805 optimizers are run and then avoid keeping it in the tree,
8806 as otherwise we could warn even for correct code like:
8807 void foo (int i, ...)
8808 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
8809 TREE_VALUE (arglist) = integer_zero_node;
8810 TREE_CHAIN (arglist) = build_tree_list (NULL, integer_zero_node);
8811 }
8812 return false;
8813 }
8814
8815
8816 /* Simplify a call to the sprintf builtin.
8817
8818 Return 0 if no simplification was possible, otherwise return the
8819 simplified form of the call as a tree. If IGNORED is true, it means that
8820 the caller does not use the returned value of the function. */
8821
8822 static tree
8823 fold_builtin_sprintf (tree arglist, int ignored)
8824 {
8825 tree call, retval, dest, fmt;
8826 const char *fmt_str = NULL;
8827
8828 /* Verify the required arguments in the original call. We deal with two
8829 types of sprintf() calls: 'sprintf (str, fmt)' and
8830 'sprintf (dest, "%s", orig)'. */
8831 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
8832 && !validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
8833 VOID_TYPE))
8834 return NULL_TREE;
8835
8836 /* Get the destination string and the format specifier. */
8837 dest = TREE_VALUE (arglist);
8838 fmt = TREE_VALUE (TREE_CHAIN (arglist));
8839
8840 /* Check whether the format is a literal string constant. */
8841 fmt_str = c_getstr (fmt);
8842 if (fmt_str == NULL)
8843 return NULL_TREE;
8844
8845 call = NULL_TREE;
8846 retval = NULL_TREE;
8847
8848 /* If the format doesn't contain % args or %%, use strcpy. */
8849 if (strchr (fmt_str, '%') == NULL)
8850 {
8851 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
8852
8853 if (!fn)
8854 return NULL_TREE;
8855
8856 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
8857 'format' is known to contain no % formats. */
8858 arglist = build_tree_list (NULL_TREE, fmt);
8859 arglist = tree_cons (NULL_TREE, dest, arglist);
8860 call = build_function_call_expr (fn, arglist);
8861 if (!ignored)
8862 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
8863 }
8864
8865 /* If the format is "%s", use strcpy if the result isn't used. */
8866 else if (fmt_str && strcmp (fmt_str, "%s") == 0)
8867 {
8868 tree fn, orig;
8869 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
8870
8871 if (!fn)
8872 return NULL_TREE;
8873
8874 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
8875 orig = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8876 arglist = build_tree_list (NULL_TREE, orig);
8877 arglist = tree_cons (NULL_TREE, dest, arglist);
8878 if (!ignored)
8879 {
8880 retval = c_strlen (orig, 1);
8881 if (!retval || TREE_CODE (retval) != INTEGER_CST)
8882 return NULL_TREE;
8883 }
8884 call = build_function_call_expr (fn, arglist);
8885 }
8886
8887 if (call && retval)
8888 {
8889 retval = convert
8890 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
8891 retval);
8892 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
8893 }
8894 else
8895 return call;
8896 }