]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
* builtins.def (BUILT_IN_BCOPY, BUILT_IN_MEMMOVE): New.
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
50
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) STRINGX(X),
68 const char *const built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 static int get_pointer_alignment PARAMS ((tree, unsigned int));
83 static tree c_strlen PARAMS ((tree));
84 static const char *c_getstr PARAMS ((tree));
85 static rtx c_readstr PARAMS ((const char *,
86 enum machine_mode));
87 static int target_char_cast PARAMS ((tree, char *));
88 static rtx get_memory_rtx PARAMS ((tree));
89 static int apply_args_size PARAMS ((void));
90 static int apply_result_size PARAMS ((void));
91 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
92 static rtx result_vector PARAMS ((int, rtx));
93 #endif
94 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
95 static void expand_builtin_prefetch PARAMS ((tree));
96 static rtx expand_builtin_apply_args PARAMS ((void));
97 static rtx expand_builtin_apply_args_1 PARAMS ((void));
98 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
99 static void expand_builtin_return PARAMS ((rtx));
100 static enum type_class type_to_class PARAMS ((tree));
101 static rtx expand_builtin_classify_type PARAMS ((tree));
102 static void expand_errno_check PARAMS ((tree, rtx));
103 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
104 static rtx expand_builtin_mathfn_2 PARAMS ((tree, rtx, rtx));
105 static rtx expand_builtin_constant_p PARAMS ((tree));
106 static rtx expand_builtin_args_info PARAMS ((tree));
107 static rtx expand_builtin_next_arg PARAMS ((tree));
108 static rtx expand_builtin_va_start PARAMS ((tree));
109 static rtx expand_builtin_va_end PARAMS ((tree));
110 static rtx expand_builtin_va_copy PARAMS ((tree));
111 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
112 enum machine_mode));
113 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
114 enum machine_mode));
115 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
116 enum machine_mode));
117 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
118 enum machine_mode));
119 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
120 enum machine_mode));
121 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
122 enum machine_mode));
123 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
124 enum machine_mode));
125 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
126 enum machine_mode));
127 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
128 enum machine_mode, int));
129 static rtx expand_builtin_memmove PARAMS ((tree, rtx,
130 enum machine_mode));
131 static rtx expand_builtin_bcopy PARAMS ((tree));
132 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
133 enum machine_mode));
134 static rtx expand_builtin_stpcpy PARAMS ((tree, rtx,
135 enum machine_mode));
136 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
137 enum machine_mode));
138 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
139 enum machine_mode));
140 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
144 static rtx expand_builtin_memset PARAMS ((tree, rtx,
145 enum machine_mode));
146 static rtx expand_builtin_bzero PARAMS ((tree));
147 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
148 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
149 enum machine_mode));
150 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
151 enum machine_mode));
152 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
153 enum machine_mode));
154 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
155 enum machine_mode));
156 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
157 static rtx expand_builtin_unop PARAMS ((enum machine_mode,
158 tree, rtx, rtx, optab));
159 static rtx expand_builtin_frame_address PARAMS ((tree));
160 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
161 static tree stabilize_va_list PARAMS ((tree, int));
162 static rtx expand_builtin_expect PARAMS ((tree, rtx));
163 static tree fold_builtin_constant_p PARAMS ((tree));
164 static tree fold_builtin_classify_type PARAMS ((tree));
165 static tree fold_builtin_inf PARAMS ((tree, int));
166 static tree fold_builtin_nan PARAMS ((tree, tree, int));
167 static int validate_arglist PARAMS ((tree, ...));
168 static tree fold_trunc_transparent_mathfn PARAMS ((tree));
169
170 /* Return the alignment in bits of EXP, a pointer valued expression.
171 But don't return more than MAX_ALIGN no matter what.
172 The alignment returned is, by default, the alignment of the thing that
173 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
174
175 Otherwise, look at the expression to see if we can do better, i.e., if the
176 expression is actually pointing at an object whose alignment is tighter. */
177
178 static int
179 get_pointer_alignment (exp, max_align)
180 tree exp;
181 unsigned int max_align;
182 {
183 unsigned int align, inner;
184
185 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
186 return 0;
187
188 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189 align = MIN (align, max_align);
190
191 while (1)
192 {
193 switch (TREE_CODE (exp))
194 {
195 case NOP_EXPR:
196 case CONVERT_EXPR:
197 case NON_LVALUE_EXPR:
198 exp = TREE_OPERAND (exp, 0);
199 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
200 return align;
201
202 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
203 align = MIN (inner, max_align);
204 break;
205
206 case PLUS_EXPR:
207 /* If sum of pointer + int, restrict our maximum alignment to that
208 imposed by the integer. If not, we can't do any better than
209 ALIGN. */
210 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
211 return align;
212
213 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
214 & (max_align / BITS_PER_UNIT - 1))
215 != 0)
216 max_align >>= 1;
217
218 exp = TREE_OPERAND (exp, 0);
219 break;
220
221 case ADDR_EXPR:
222 /* See what we are pointing at and look at its alignment. */
223 exp = TREE_OPERAND (exp, 0);
224 if (TREE_CODE (exp) == FUNCTION_DECL)
225 align = FUNCTION_BOUNDARY;
226 else if (DECL_P (exp))
227 align = DECL_ALIGN (exp);
228 #ifdef CONSTANT_ALIGNMENT
229 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
230 align = CONSTANT_ALIGNMENT (exp, align);
231 #endif
232 return MIN (align, max_align);
233
234 default:
235 return align;
236 }
237 }
238 }
239
240 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
241 way, because it could contain a zero byte in the middle.
242 TREE_STRING_LENGTH is the size of the character array, not the string.
243
244 The value returned is of type `ssizetype'.
245
246 Unfortunately, string_constant can't access the values of const char
247 arrays with initializers, so neither can we do so here. */
248
249 static tree
250 c_strlen (src)
251 tree src;
252 {
253 tree offset_node;
254 HOST_WIDE_INT offset;
255 int max;
256 const char *ptr;
257
258 src = string_constant (src, &offset_node);
259 if (src == 0)
260 return 0;
261
262 max = TREE_STRING_LENGTH (src) - 1;
263 ptr = TREE_STRING_POINTER (src);
264
265 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
266 {
267 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
268 compute the offset to the following null if we don't know where to
269 start searching for it. */
270 int i;
271
272 for (i = 0; i < max; i++)
273 if (ptr[i] == 0)
274 return 0;
275
276 /* We don't know the starting offset, but we do know that the string
277 has no internal zero bytes. We can assume that the offset falls
278 within the bounds of the string; otherwise, the programmer deserves
279 what he gets. Subtract the offset from the length of the string,
280 and return that. This would perhaps not be valid if we were dealing
281 with named arrays in addition to literal string constants. */
282
283 return size_diffop (size_int (max), offset_node);
284 }
285
286 /* We have a known offset into the string. Start searching there for
287 a null character if we can represent it as a single HOST_WIDE_INT. */
288 if (offset_node == 0)
289 offset = 0;
290 else if (! host_integerp (offset_node, 0))
291 offset = -1;
292 else
293 offset = tree_low_cst (offset_node, 0);
294
295 /* If the offset is known to be out of bounds, warn, and call strlen at
296 runtime. */
297 if (offset < 0 || offset > max)
298 {
299 warning ("offset outside bounds of constant string");
300 return 0;
301 }
302
303 /* Use strlen to search for the first zero byte. Since any strings
304 constructed with build_string will have nulls appended, we win even
305 if we get handed something like (char[4])"abcd".
306
307 Since OFFSET is our starting index into the string, no further
308 calculation is needed. */
309 return ssize_int (strlen (ptr + offset));
310 }
311
312 /* Return a char pointer for a C string if it is a string constant
313 or sum of string constant and integer constant. */
314
315 static const char *
316 c_getstr (src)
317 tree src;
318 {
319 tree offset_node;
320
321 src = string_constant (src, &offset_node);
322 if (src == 0)
323 return 0;
324
325 if (offset_node == 0)
326 return TREE_STRING_POINTER (src);
327 else if (!host_integerp (offset_node, 1)
328 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
329 return 0;
330
331 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
332 }
333
334 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
335 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
336
337 static rtx
338 c_readstr (str, mode)
339 const char *str;
340 enum machine_mode mode;
341 {
342 HOST_WIDE_INT c[2];
343 HOST_WIDE_INT ch;
344 unsigned int i, j;
345
346 if (GET_MODE_CLASS (mode) != MODE_INT)
347 abort ();
348 c[0] = 0;
349 c[1] = 0;
350 ch = 1;
351 for (i = 0; i < GET_MODE_SIZE (mode); i++)
352 {
353 j = i;
354 if (WORDS_BIG_ENDIAN)
355 j = GET_MODE_SIZE (mode) - i - 1;
356 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
357 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
358 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
359 j *= BITS_PER_UNIT;
360 if (j > 2 * HOST_BITS_PER_WIDE_INT)
361 abort ();
362 if (ch)
363 ch = (unsigned char) str[i];
364 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
365 }
366 return immed_double_const (c[0], c[1], mode);
367 }
368
369 /* Cast a target constant CST to target CHAR and if that value fits into
370 host char type, return zero and put that value into variable pointed by
371 P. */
372
373 static int
374 target_char_cast (cst, p)
375 tree cst;
376 char *p;
377 {
378 unsigned HOST_WIDE_INT val, hostval;
379
380 if (!host_integerp (cst, 1)
381 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
382 return 1;
383
384 val = tree_low_cst (cst, 1);
385 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
386 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
387
388 hostval = val;
389 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
390 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
391
392 if (val != hostval)
393 return 1;
394
395 *p = hostval;
396 return 0;
397 }
398
399 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
400 times to get the address of either a higher stack frame, or a return
401 address located within it (depending on FNDECL_CODE). */
402
403 rtx
404 expand_builtin_return_addr (fndecl_code, count, tem)
405 enum built_in_function fndecl_code;
406 int count;
407 rtx tem;
408 {
409 int i;
410
411 /* Some machines need special handling before we can access
412 arbitrary frames. For example, on the sparc, we must first flush
413 all register windows to the stack. */
414 #ifdef SETUP_FRAME_ADDRESSES
415 if (count > 0)
416 SETUP_FRAME_ADDRESSES ();
417 #endif
418
419 /* On the sparc, the return address is not in the frame, it is in a
420 register. There is no way to access it off of the current frame
421 pointer, but it can be accessed off the previous frame pointer by
422 reading the value from the register window save area. */
423 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
424 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
425 count--;
426 #endif
427
428 /* Scan back COUNT frames to the specified frame. */
429 for (i = 0; i < count; i++)
430 {
431 /* Assume the dynamic chain pointer is in the word that the
432 frame address points to, unless otherwise specified. */
433 #ifdef DYNAMIC_CHAIN_ADDRESS
434 tem = DYNAMIC_CHAIN_ADDRESS (tem);
435 #endif
436 tem = memory_address (Pmode, tem);
437 tem = gen_rtx_MEM (Pmode, tem);
438 set_mem_alias_set (tem, get_frame_alias_set ());
439 tem = copy_to_reg (tem);
440 }
441
442 /* For __builtin_frame_address, return what we've got. */
443 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
444 return tem;
445
446 /* For __builtin_return_address, Get the return address from that
447 frame. */
448 #ifdef RETURN_ADDR_RTX
449 tem = RETURN_ADDR_RTX (count, tem);
450 #else
451 tem = memory_address (Pmode,
452 plus_constant (tem, GET_MODE_SIZE (Pmode)));
453 tem = gen_rtx_MEM (Pmode, tem);
454 set_mem_alias_set (tem, get_frame_alias_set ());
455 #endif
456 return tem;
457 }
458
459 /* Alias set used for setjmp buffer. */
460 static HOST_WIDE_INT setjmp_alias_set = -1;
461
462 /* Construct the leading half of a __builtin_setjmp call. Control will
463 return to RECEIVER_LABEL. This is used directly by sjlj exception
464 handling code. */
465
466 void
467 expand_builtin_setjmp_setup (buf_addr, receiver_label)
468 rtx buf_addr;
469 rtx receiver_label;
470 {
471 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
472 rtx stack_save;
473 rtx mem;
474
475 if (setjmp_alias_set == -1)
476 setjmp_alias_set = new_alias_set ();
477
478 #ifdef POINTERS_EXTEND_UNSIGNED
479 if (GET_MODE (buf_addr) != Pmode)
480 buf_addr = convert_memory_address (Pmode, buf_addr);
481 #endif
482
483 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
484
485 emit_queue ();
486
487 /* We store the frame pointer and the address of receiver_label in
488 the buffer and use the rest of it for the stack save area, which
489 is machine-dependent. */
490
491 #ifndef BUILTIN_SETJMP_FRAME_VALUE
492 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
493 #endif
494
495 mem = gen_rtx_MEM (Pmode, buf_addr);
496 set_mem_alias_set (mem, setjmp_alias_set);
497 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
498
499 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
500 set_mem_alias_set (mem, setjmp_alias_set);
501
502 emit_move_insn (validize_mem (mem),
503 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
504
505 stack_save = gen_rtx_MEM (sa_mode,
506 plus_constant (buf_addr,
507 2 * GET_MODE_SIZE (Pmode)));
508 set_mem_alias_set (stack_save, setjmp_alias_set);
509 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
510
511 /* If there is further processing to do, do it. */
512 #ifdef HAVE_builtin_setjmp_setup
513 if (HAVE_builtin_setjmp_setup)
514 emit_insn (gen_builtin_setjmp_setup (buf_addr));
515 #endif
516
517 /* Tell optimize_save_area_alloca that extra work is going to
518 need to go on during alloca. */
519 current_function_calls_setjmp = 1;
520
521 /* Set this so all the registers get saved in our frame; we need to be
522 able to copy the saved values for any registers from frames we unwind. */
523 current_function_has_nonlocal_label = 1;
524 }
525
526 /* Construct the trailing part of a __builtin_setjmp call.
527 This is used directly by sjlj exception handling code. */
528
529 void
530 expand_builtin_setjmp_receiver (receiver_label)
531 rtx receiver_label ATTRIBUTE_UNUSED;
532 {
533 /* Clobber the FP when we get here, so we have to make sure it's
534 marked as used by this function. */
535 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
536
537 /* Mark the static chain as clobbered here so life information
538 doesn't get messed up for it. */
539 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
540
541 /* Now put in the code to restore the frame pointer, and argument
542 pointer, if needed. The code below is from expand_end_bindings
543 in stmt.c; see detailed documentation there. */
544 #ifdef HAVE_nonlocal_goto
545 if (! HAVE_nonlocal_goto)
546 #endif
547 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
548
549 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
550 if (fixed_regs[ARG_POINTER_REGNUM])
551 {
552 #ifdef ELIMINABLE_REGS
553 size_t i;
554 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
555
556 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
557 if (elim_regs[i].from == ARG_POINTER_REGNUM
558 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
559 break;
560
561 if (i == ARRAY_SIZE (elim_regs))
562 #endif
563 {
564 /* Now restore our arg pointer from the address at which it
565 was saved in our stack frame. */
566 emit_move_insn (virtual_incoming_args_rtx,
567 copy_to_reg (get_arg_pointer_save_area (cfun)));
568 }
569 }
570 #endif
571
572 #ifdef HAVE_builtin_setjmp_receiver
573 if (HAVE_builtin_setjmp_receiver)
574 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
575 else
576 #endif
577 #ifdef HAVE_nonlocal_goto_receiver
578 if (HAVE_nonlocal_goto_receiver)
579 emit_insn (gen_nonlocal_goto_receiver ());
580 else
581 #endif
582 { /* Nothing */ }
583
584 /* @@@ This is a kludge. Not all machine descriptions define a blockage
585 insn, but we must not allow the code we just generated to be reordered
586 by scheduling. Specifically, the update of the frame pointer must
587 happen immediately, not later. So emit an ASM_INPUT to act as blockage
588 insn. */
589 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
590 }
591
592 /* __builtin_setjmp is passed a pointer to an array of five words (not
593 all will be used on all machines). It operates similarly to the C
594 library function of the same name, but is more efficient. Much of
595 the code below (and for longjmp) is copied from the handling of
596 non-local gotos.
597
598 NOTE: This is intended for use by GNAT and the exception handling
599 scheme in the compiler and will only work in the method used by
600 them. */
601
602 static rtx
603 expand_builtin_setjmp (arglist, target)
604 tree arglist;
605 rtx target;
606 {
607 rtx buf_addr, next_lab, cont_lab;
608
609 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
610 return NULL_RTX;
611
612 if (target == 0 || GET_CODE (target) != REG
613 || REGNO (target) < FIRST_PSEUDO_REGISTER)
614 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
615
616 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
617
618 next_lab = gen_label_rtx ();
619 cont_lab = gen_label_rtx ();
620
621 expand_builtin_setjmp_setup (buf_addr, next_lab);
622
623 /* Set TARGET to zero and branch to the continue label. */
624 emit_move_insn (target, const0_rtx);
625 emit_jump_insn (gen_jump (cont_lab));
626 emit_barrier ();
627 emit_label (next_lab);
628
629 expand_builtin_setjmp_receiver (next_lab);
630
631 /* Set TARGET to one. */
632 emit_move_insn (target, const1_rtx);
633 emit_label (cont_lab);
634
635 /* Tell flow about the strange goings on. Putting `next_lab' on
636 `nonlocal_goto_handler_labels' to indicates that function
637 calls may traverse the arc back to this label. */
638
639 current_function_has_nonlocal_label = 1;
640 nonlocal_goto_handler_labels
641 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
642
643 return target;
644 }
645
646 /* __builtin_longjmp is passed a pointer to an array of five words (not
647 all will be used on all machines). It operates similarly to the C
648 library function of the same name, but is more efficient. Much of
649 the code below is copied from the handling of non-local gotos.
650
651 NOTE: This is intended for use by GNAT and the exception handling
652 scheme in the compiler and will only work in the method used by
653 them. */
654
655 void
656 expand_builtin_longjmp (buf_addr, value)
657 rtx buf_addr, value;
658 {
659 rtx fp, lab, stack, insn, last;
660 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
661
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
664
665 #ifdef POINTERS_EXTEND_UNSIGNED
666 if (GET_MODE (buf_addr) != Pmode)
667 buf_addr = convert_memory_address (Pmode, buf_addr);
668 #endif
669
670 buf_addr = force_reg (Pmode, buf_addr);
671
672 /* We used to store value in static_chain_rtx, but that fails if pointers
673 are smaller than integers. We instead require that the user must pass
674 a second argument of 1, because that is what builtin_setjmp will
675 return. This also makes EH slightly more efficient, since we are no
676 longer copying around a value that we don't care about. */
677 if (value != const1_rtx)
678 abort ();
679
680 current_function_calls_longjmp = 1;
681
682 last = get_last_insn ();
683 #ifdef HAVE_builtin_longjmp
684 if (HAVE_builtin_longjmp)
685 emit_insn (gen_builtin_longjmp (buf_addr));
686 else
687 #endif
688 {
689 fp = gen_rtx_MEM (Pmode, buf_addr);
690 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
691 GET_MODE_SIZE (Pmode)));
692
693 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
694 2 * GET_MODE_SIZE (Pmode)));
695 set_mem_alias_set (fp, setjmp_alias_set);
696 set_mem_alias_set (lab, setjmp_alias_set);
697 set_mem_alias_set (stack, setjmp_alias_set);
698
699 /* Pick up FP, label, and SP from the block and jump. This code is
700 from expand_goto in stmt.c; see there for detailed comments. */
701 #if HAVE_nonlocal_goto
702 if (HAVE_nonlocal_goto)
703 /* We have to pass a value to the nonlocal_goto pattern that will
704 get copied into the static_chain pointer, but it does not matter
705 what that value is, because builtin_setjmp does not use it. */
706 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
707 else
708 #endif
709 {
710 lab = copy_to_reg (lab);
711
712 emit_move_insn (hard_frame_pointer_rtx, fp);
713 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
714
715 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
716 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
717 emit_indirect_jump (lab);
718 }
719 }
720
721 /* Search backwards and mark the jump insn as a non-local goto.
722 Note that this precludes the use of __builtin_longjmp to a
723 __builtin_setjmp target in the same function. However, we've
724 already cautioned the user that these functions are for
725 internal exception handling use only. */
726 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
727 {
728 if (insn == last)
729 abort ();
730 if (GET_CODE (insn) == JUMP_INSN)
731 {
732 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
733 REG_NOTES (insn));
734 break;
735 }
736 else if (GET_CODE (insn) == CALL_INSN)
737 break;
738 }
739 }
740
741 /* Expand a call to __builtin_prefetch. For a target that does not support
742 data prefetch, evaluate the memory address argument in case it has side
743 effects. */
744
745 static void
746 expand_builtin_prefetch (arglist)
747 tree arglist;
748 {
749 tree arg0, arg1, arg2;
750 rtx op0, op1, op2;
751
752 if (!validate_arglist (arglist, POINTER_TYPE, 0))
753 return;
754
755 arg0 = TREE_VALUE (arglist);
756 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
757 zero (read) and argument 2 (locality) defaults to 3 (high degree of
758 locality). */
759 if (TREE_CHAIN (arglist))
760 {
761 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
762 if (TREE_CHAIN (TREE_CHAIN (arglist)))
763 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
764 else
765 arg2 = build_int_2 (3, 0);
766 }
767 else
768 {
769 arg1 = integer_zero_node;
770 arg2 = build_int_2 (3, 0);
771 }
772
773 /* Argument 0 is an address. */
774 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
775
776 /* Argument 1 (read/write flag) must be a compile-time constant int. */
777 if (TREE_CODE (arg1) != INTEGER_CST)
778 {
779 error ("second arg to `__builtin_prefetch' must be a constant");
780 arg1 = integer_zero_node;
781 }
782 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
783 /* Argument 1 must be either zero or one. */
784 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
785 {
786 warning ("invalid second arg to __builtin_prefetch; using zero");
787 op1 = const0_rtx;
788 }
789
790 /* Argument 2 (locality) must be a compile-time constant int. */
791 if (TREE_CODE (arg2) != INTEGER_CST)
792 {
793 error ("third arg to `__builtin_prefetch' must be a constant");
794 arg2 = integer_zero_node;
795 }
796 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
797 /* Argument 2 must be 0, 1, 2, or 3. */
798 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
799 {
800 warning ("invalid third arg to __builtin_prefetch; using zero");
801 op2 = const0_rtx;
802 }
803
804 #ifdef HAVE_prefetch
805 if (HAVE_prefetch)
806 {
807 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
808 (op0,
809 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
810 || (GET_MODE(op0) != Pmode))
811 {
812 #ifdef POINTERS_EXTEND_UNSIGNED
813 if (GET_MODE(op0) != Pmode)
814 op0 = convert_memory_address (Pmode, op0);
815 #endif
816 op0 = force_reg (Pmode, op0);
817 }
818 emit_insn (gen_prefetch (op0, op1, op2));
819 }
820 else
821 #endif
822 op0 = protect_from_queue (op0, 0);
823 /* Don't do anything with direct references to volatile memory, but
824 generate code to handle other side effects. */
825 if (GET_CODE (op0) != MEM && side_effects_p (op0))
826 emit_insn (op0);
827 }
828
829 /* Get a MEM rtx for expression EXP which is the address of an operand
830 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
831
832 static rtx
833 get_memory_rtx (exp)
834 tree exp;
835 {
836 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
837 rtx mem;
838
839 #ifdef POINTERS_EXTEND_UNSIGNED
840 if (GET_MODE (addr) != Pmode)
841 addr = convert_memory_address (Pmode, addr);
842 #endif
843
844 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
845
846 /* Get an expression we can use to find the attributes to assign to MEM.
847 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
848 we can. First remove any nops. */
849 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
850 || TREE_CODE (exp) == NON_LVALUE_EXPR)
851 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
852 exp = TREE_OPERAND (exp, 0);
853
854 if (TREE_CODE (exp) == ADDR_EXPR)
855 {
856 exp = TREE_OPERAND (exp, 0);
857 set_mem_attributes (mem, exp, 0);
858 }
859 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
860 {
861 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
862 /* memcpy, memset and other builtin stringops can alias with anything. */
863 set_mem_alias_set (mem, 0);
864 }
865
866 return mem;
867 }
868 \f
869 /* Built-in functions to perform an untyped call and return. */
870
871 /* For each register that may be used for calling a function, this
872 gives a mode used to copy the register's value. VOIDmode indicates
873 the register is not used for calling a function. If the machine
874 has register windows, this gives only the outbound registers.
875 INCOMING_REGNO gives the corresponding inbound register. */
876 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
877
878 /* For each register that may be used for returning values, this gives
879 a mode used to copy the register's value. VOIDmode indicates the
880 register is not used for returning values. If the machine has
881 register windows, this gives only the outbound registers.
882 INCOMING_REGNO gives the corresponding inbound register. */
883 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
884
885 /* For each register that may be used for calling a function, this
886 gives the offset of that register into the block returned by
887 __builtin_apply_args. 0 indicates that the register is not
888 used for calling a function. */
889 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
890
891 /* Return the offset of register REGNO into the block returned by
892 __builtin_apply_args. This is not declared static, since it is
893 needed in objc-act.c. */
894
895 int
896 apply_args_register_offset (regno)
897 int regno;
898 {
899 apply_args_size ();
900
901 /* Arguments are always put in outgoing registers (in the argument
902 block) if such make sense. */
903 #ifdef OUTGOING_REGNO
904 regno = OUTGOING_REGNO (regno);
905 #endif
906 return apply_args_reg_offset[regno];
907 }
908
909 /* Return the size required for the block returned by __builtin_apply_args,
910 and initialize apply_args_mode. */
911
912 static int
913 apply_args_size ()
914 {
915 static int size = -1;
916 int align;
917 unsigned int regno;
918 enum machine_mode mode;
919
920 /* The values computed by this function never change. */
921 if (size < 0)
922 {
923 /* The first value is the incoming arg-pointer. */
924 size = GET_MODE_SIZE (Pmode);
925
926 /* The second value is the structure value address unless this is
927 passed as an "invisible" first argument. */
928 if (struct_value_rtx)
929 size += GET_MODE_SIZE (Pmode);
930
931 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
932 if (FUNCTION_ARG_REGNO_P (regno))
933 {
934 /* Search for the proper mode for copying this register's
935 value. I'm not sure this is right, but it works so far. */
936 enum machine_mode best_mode = VOIDmode;
937
938 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
939 mode != VOIDmode;
940 mode = GET_MODE_WIDER_MODE (mode))
941 if (HARD_REGNO_MODE_OK (regno, mode)
942 && HARD_REGNO_NREGS (regno, mode) == 1)
943 best_mode = mode;
944
945 if (best_mode == VOIDmode)
946 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
947 mode != VOIDmode;
948 mode = GET_MODE_WIDER_MODE (mode))
949 if (HARD_REGNO_MODE_OK (regno, mode)
950 && have_insn_for (SET, mode))
951 best_mode = mode;
952
953 if (best_mode == VOIDmode)
954 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
955 mode != VOIDmode;
956 mode = GET_MODE_WIDER_MODE (mode))
957 if (HARD_REGNO_MODE_OK (regno, mode)
958 && have_insn_for (SET, mode))
959 best_mode = mode;
960
961 if (best_mode == VOIDmode)
962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
963 mode != VOIDmode;
964 mode = GET_MODE_WIDER_MODE (mode))
965 if (HARD_REGNO_MODE_OK (regno, mode)
966 && have_insn_for (SET, mode))
967 best_mode = mode;
968
969 mode = best_mode;
970 if (mode == VOIDmode)
971 abort ();
972
973 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
974 if (size % align != 0)
975 size = CEIL (size, align) * align;
976 apply_args_reg_offset[regno] = size;
977 size += GET_MODE_SIZE (mode);
978 apply_args_mode[regno] = mode;
979 }
980 else
981 {
982 apply_args_mode[regno] = VOIDmode;
983 apply_args_reg_offset[regno] = 0;
984 }
985 }
986 return size;
987 }
988
989 /* Return the size required for the block returned by __builtin_apply,
990 and initialize apply_result_mode. */
991
992 static int
993 apply_result_size ()
994 {
995 static int size = -1;
996 int align, regno;
997 enum machine_mode mode;
998
999 /* The values computed by this function never change. */
1000 if (size < 0)
1001 {
1002 size = 0;
1003
1004 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1005 if (FUNCTION_VALUE_REGNO_P (regno))
1006 {
1007 /* Search for the proper mode for copying this register's
1008 value. I'm not sure this is right, but it works so far. */
1009 enum machine_mode best_mode = VOIDmode;
1010
1011 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1012 mode != TImode;
1013 mode = GET_MODE_WIDER_MODE (mode))
1014 if (HARD_REGNO_MODE_OK (regno, mode))
1015 best_mode = mode;
1016
1017 if (best_mode == VOIDmode)
1018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1019 mode != VOIDmode;
1020 mode = GET_MODE_WIDER_MODE (mode))
1021 if (HARD_REGNO_MODE_OK (regno, mode)
1022 && have_insn_for (SET, mode))
1023 best_mode = mode;
1024
1025 if (best_mode == VOIDmode)
1026 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1027 mode != VOIDmode;
1028 mode = GET_MODE_WIDER_MODE (mode))
1029 if (HARD_REGNO_MODE_OK (regno, mode)
1030 && have_insn_for (SET, mode))
1031 best_mode = mode;
1032
1033 if (best_mode == VOIDmode)
1034 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1035 mode != VOIDmode;
1036 mode = GET_MODE_WIDER_MODE (mode))
1037 if (HARD_REGNO_MODE_OK (regno, mode)
1038 && have_insn_for (SET, mode))
1039 best_mode = mode;
1040
1041 mode = best_mode;
1042 if (mode == VOIDmode)
1043 abort ();
1044
1045 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1046 if (size % align != 0)
1047 size = CEIL (size, align) * align;
1048 size += GET_MODE_SIZE (mode);
1049 apply_result_mode[regno] = mode;
1050 }
1051 else
1052 apply_result_mode[regno] = VOIDmode;
1053
1054 /* Allow targets that use untyped_call and untyped_return to override
1055 the size so that machine-specific information can be stored here. */
1056 #ifdef APPLY_RESULT_SIZE
1057 size = APPLY_RESULT_SIZE;
1058 #endif
1059 }
1060 return size;
1061 }
1062
1063 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1064 /* Create a vector describing the result block RESULT. If SAVEP is true,
1065 the result block is used to save the values; otherwise it is used to
1066 restore the values. */
1067
1068 static rtx
1069 result_vector (savep, result)
1070 int savep;
1071 rtx result;
1072 {
1073 int regno, size, align, nelts;
1074 enum machine_mode mode;
1075 rtx reg, mem;
1076 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1077
1078 size = nelts = 0;
1079 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1080 if ((mode = apply_result_mode[regno]) != VOIDmode)
1081 {
1082 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1083 if (size % align != 0)
1084 size = CEIL (size, align) * align;
1085 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1086 mem = adjust_address (result, mode, size);
1087 savevec[nelts++] = (savep
1088 ? gen_rtx_SET (VOIDmode, mem, reg)
1089 : gen_rtx_SET (VOIDmode, reg, mem));
1090 size += GET_MODE_SIZE (mode);
1091 }
1092 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1093 }
1094 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1095
1096 /* Save the state required to perform an untyped call with the same
1097 arguments as were passed to the current function. */
1098
1099 static rtx
1100 expand_builtin_apply_args_1 ()
1101 {
1102 rtx registers;
1103 int size, align, regno;
1104 enum machine_mode mode;
1105
1106 /* Create a block where the arg-pointer, structure value address,
1107 and argument registers can be saved. */
1108 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1109
1110 /* Walk past the arg-pointer and structure value address. */
1111 size = GET_MODE_SIZE (Pmode);
1112 if (struct_value_rtx)
1113 size += GET_MODE_SIZE (Pmode);
1114
1115 /* Save each register used in calling a function to the block. */
1116 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1117 if ((mode = apply_args_mode[regno]) != VOIDmode)
1118 {
1119 rtx tem;
1120
1121 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1122 if (size % align != 0)
1123 size = CEIL (size, align) * align;
1124
1125 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1126
1127 emit_move_insn (adjust_address (registers, mode, size), tem);
1128 size += GET_MODE_SIZE (mode);
1129 }
1130
1131 /* Save the arg pointer to the block. */
1132 emit_move_insn (adjust_address (registers, Pmode, 0),
1133 copy_to_reg (virtual_incoming_args_rtx));
1134 size = GET_MODE_SIZE (Pmode);
1135
1136 /* Save the structure value address unless this is passed as an
1137 "invisible" first argument. */
1138 if (struct_value_incoming_rtx)
1139 {
1140 emit_move_insn (adjust_address (registers, Pmode, size),
1141 copy_to_reg (struct_value_incoming_rtx));
1142 size += GET_MODE_SIZE (Pmode);
1143 }
1144
1145 /* Return the address of the block. */
1146 return copy_addr_to_reg (XEXP (registers, 0));
1147 }
1148
1149 /* __builtin_apply_args returns block of memory allocated on
1150 the stack into which is stored the arg pointer, structure
1151 value address, static chain, and all the registers that might
1152 possibly be used in performing a function call. The code is
1153 moved to the start of the function so the incoming values are
1154 saved. */
1155
1156 static rtx
1157 expand_builtin_apply_args ()
1158 {
1159 /* Don't do __builtin_apply_args more than once in a function.
1160 Save the result of the first call and reuse it. */
1161 if (apply_args_value != 0)
1162 return apply_args_value;
1163 {
1164 /* When this function is called, it means that registers must be
1165 saved on entry to this function. So we migrate the
1166 call to the first insn of this function. */
1167 rtx temp;
1168 rtx seq;
1169
1170 start_sequence ();
1171 temp = expand_builtin_apply_args_1 ();
1172 seq = get_insns ();
1173 end_sequence ();
1174
1175 apply_args_value = temp;
1176
1177 /* Put the insns after the NOTE that starts the function.
1178 If this is inside a start_sequence, make the outer-level insn
1179 chain current, so the code is placed at the start of the
1180 function. */
1181 push_topmost_sequence ();
1182 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1183 pop_topmost_sequence ();
1184 return temp;
1185 }
1186 }
1187
1188 /* Perform an untyped call and save the state required to perform an
1189 untyped return of whatever value was returned by the given function. */
1190
1191 static rtx
1192 expand_builtin_apply (function, arguments, argsize)
1193 rtx function, arguments, argsize;
1194 {
1195 int size, align, regno;
1196 enum machine_mode mode;
1197 rtx incoming_args, result, reg, dest, src, call_insn;
1198 rtx old_stack_level = 0;
1199 rtx call_fusage = 0;
1200
1201 #ifdef POINTERS_EXTEND_UNSIGNED
1202 if (GET_MODE (arguments) != Pmode)
1203 arguments = convert_memory_address (Pmode, arguments);
1204 #endif
1205
1206 /* Create a block where the return registers can be saved. */
1207 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1208
1209 /* Fetch the arg pointer from the ARGUMENTS block. */
1210 incoming_args = gen_reg_rtx (Pmode);
1211 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1212 #ifndef STACK_GROWS_DOWNWARD
1213 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1214 incoming_args, 0, OPTAB_LIB_WIDEN);
1215 #endif
1216
1217 /* Perform postincrements before actually calling the function. */
1218 emit_queue ();
1219
1220 /* Push a new argument block and copy the arguments. Do not allow
1221 the (potential) memcpy call below to interfere with our stack
1222 manipulations. */
1223 do_pending_stack_adjust ();
1224 NO_DEFER_POP;
1225
1226 /* Save the stack with nonlocal if available */
1227 #ifdef HAVE_save_stack_nonlocal
1228 if (HAVE_save_stack_nonlocal)
1229 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1230 else
1231 #endif
1232 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1233
1234 /* Push a block of memory onto the stack to store the memory arguments.
1235 Save the address in a register, and copy the memory arguments. ??? I
1236 haven't figured out how the calling convention macros effect this,
1237 but it's likely that the source and/or destination addresses in
1238 the block copy will need updating in machine specific ways. */
1239 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1240 dest = gen_rtx_MEM (BLKmode, dest);
1241 set_mem_align (dest, PARM_BOUNDARY);
1242 src = gen_rtx_MEM (BLKmode, incoming_args);
1243 set_mem_align (src, PARM_BOUNDARY);
1244 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1245
1246 /* Refer to the argument block. */
1247 apply_args_size ();
1248 arguments = gen_rtx_MEM (BLKmode, arguments);
1249 set_mem_align (arguments, PARM_BOUNDARY);
1250
1251 /* Walk past the arg-pointer and structure value address. */
1252 size = GET_MODE_SIZE (Pmode);
1253 if (struct_value_rtx)
1254 size += GET_MODE_SIZE (Pmode);
1255
1256 /* Restore each of the registers previously saved. Make USE insns
1257 for each of these registers for use in making the call. */
1258 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1259 if ((mode = apply_args_mode[regno]) != VOIDmode)
1260 {
1261 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1262 if (size % align != 0)
1263 size = CEIL (size, align) * align;
1264 reg = gen_rtx_REG (mode, regno);
1265 emit_move_insn (reg, adjust_address (arguments, mode, size));
1266 use_reg (&call_fusage, reg);
1267 size += GET_MODE_SIZE (mode);
1268 }
1269
1270 /* Restore the structure value address unless this is passed as an
1271 "invisible" first argument. */
1272 size = GET_MODE_SIZE (Pmode);
1273 if (struct_value_rtx)
1274 {
1275 rtx value = gen_reg_rtx (Pmode);
1276 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1277 emit_move_insn (struct_value_rtx, value);
1278 if (GET_CODE (struct_value_rtx) == REG)
1279 use_reg (&call_fusage, struct_value_rtx);
1280 size += GET_MODE_SIZE (Pmode);
1281 }
1282
1283 /* All arguments and registers used for the call are set up by now! */
1284 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1285
1286 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1287 and we don't want to load it into a register as an optimization,
1288 because prepare_call_address already did it if it should be done. */
1289 if (GET_CODE (function) != SYMBOL_REF)
1290 function = memory_address (FUNCTION_MODE, function);
1291
1292 /* Generate the actual call instruction and save the return value. */
1293 #ifdef HAVE_untyped_call
1294 if (HAVE_untyped_call)
1295 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1296 result, result_vector (1, result)));
1297 else
1298 #endif
1299 #ifdef HAVE_call_value
1300 if (HAVE_call_value)
1301 {
1302 rtx valreg = 0;
1303
1304 /* Locate the unique return register. It is not possible to
1305 express a call that sets more than one return register using
1306 call_value; use untyped_call for that. In fact, untyped_call
1307 only needs to save the return registers in the given block. */
1308 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1309 if ((mode = apply_result_mode[regno]) != VOIDmode)
1310 {
1311 if (valreg)
1312 abort (); /* HAVE_untyped_call required. */
1313 valreg = gen_rtx_REG (mode, regno);
1314 }
1315
1316 emit_call_insn (GEN_CALL_VALUE (valreg,
1317 gen_rtx_MEM (FUNCTION_MODE, function),
1318 const0_rtx, NULL_RTX, const0_rtx));
1319
1320 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1321 }
1322 else
1323 #endif
1324 abort ();
1325
1326 /* Find the CALL insn we just emitted. */
1327 for (call_insn = get_last_insn ();
1328 call_insn && GET_CODE (call_insn) != CALL_INSN;
1329 call_insn = PREV_INSN (call_insn))
1330 ;
1331
1332 if (! call_insn)
1333 abort ();
1334
1335 /* Put the register usage information on the CALL. If there is already
1336 some usage information, put ours at the end. */
1337 if (CALL_INSN_FUNCTION_USAGE (call_insn))
1338 {
1339 rtx link;
1340
1341 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
1342 link = XEXP (link, 1))
1343 ;
1344
1345 XEXP (link, 1) = call_fusage;
1346 }
1347 else
1348 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
1349
1350 /* Restore the stack. */
1351 #ifdef HAVE_save_stack_nonlocal
1352 if (HAVE_save_stack_nonlocal)
1353 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1354 else
1355 #endif
1356 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1357
1358 OK_DEFER_POP;
1359
1360 /* Return the address of the result block. */
1361 return copy_addr_to_reg (XEXP (result, 0));
1362 }
1363
1364 /* Perform an untyped return. */
1365
1366 static void
1367 expand_builtin_return (result)
1368 rtx result;
1369 {
1370 int size, align, regno;
1371 enum machine_mode mode;
1372 rtx reg;
1373 rtx call_fusage = 0;
1374
1375 #ifdef POINTERS_EXTEND_UNSIGNED
1376 if (GET_MODE (result) != Pmode)
1377 result = convert_memory_address (Pmode, result);
1378 #endif
1379
1380 apply_result_size ();
1381 result = gen_rtx_MEM (BLKmode, result);
1382
1383 #ifdef HAVE_untyped_return
1384 if (HAVE_untyped_return)
1385 {
1386 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1387 emit_barrier ();
1388 return;
1389 }
1390 #endif
1391
1392 /* Restore the return value and note that each value is used. */
1393 size = 0;
1394 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1395 if ((mode = apply_result_mode[regno]) != VOIDmode)
1396 {
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
1400 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1401 emit_move_insn (reg, adjust_address (result, mode, size));
1402
1403 push_to_sequence (call_fusage);
1404 emit_insn (gen_rtx_USE (VOIDmode, reg));
1405 call_fusage = get_insns ();
1406 end_sequence ();
1407 size += GET_MODE_SIZE (mode);
1408 }
1409
1410 /* Put the USE insns before the return. */
1411 emit_insn (call_fusage);
1412
1413 /* Return whatever values was restored by jumping directly to the end
1414 of the function. */
1415 expand_null_return ();
1416 }
1417
1418 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1419
1420 static enum type_class
1421 type_to_class (type)
1422 tree type;
1423 {
1424 switch (TREE_CODE (type))
1425 {
1426 case VOID_TYPE: return void_type_class;
1427 case INTEGER_TYPE: return integer_type_class;
1428 case CHAR_TYPE: return char_type_class;
1429 case ENUMERAL_TYPE: return enumeral_type_class;
1430 case BOOLEAN_TYPE: return boolean_type_class;
1431 case POINTER_TYPE: return pointer_type_class;
1432 case REFERENCE_TYPE: return reference_type_class;
1433 case OFFSET_TYPE: return offset_type_class;
1434 case REAL_TYPE: return real_type_class;
1435 case COMPLEX_TYPE: return complex_type_class;
1436 case FUNCTION_TYPE: return function_type_class;
1437 case METHOD_TYPE: return method_type_class;
1438 case RECORD_TYPE: return record_type_class;
1439 case UNION_TYPE:
1440 case QUAL_UNION_TYPE: return union_type_class;
1441 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1442 ? string_type_class : array_type_class);
1443 case SET_TYPE: return set_type_class;
1444 case FILE_TYPE: return file_type_class;
1445 case LANG_TYPE: return lang_type_class;
1446 default: return no_type_class;
1447 }
1448 }
1449
1450 /* Expand a call to __builtin_classify_type with arguments found in
1451 ARGLIST. */
1452
1453 static rtx
1454 expand_builtin_classify_type (arglist)
1455 tree arglist;
1456 {
1457 if (arglist != 0)
1458 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1459 return GEN_INT (no_type_class);
1460 }
1461
1462 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1463
1464 static rtx
1465 expand_builtin_constant_p (exp)
1466 tree exp;
1467 {
1468 tree arglist = TREE_OPERAND (exp, 1);
1469 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1470 rtx tmp;
1471
1472 if (arglist == 0)
1473 return const0_rtx;
1474 arglist = TREE_VALUE (arglist);
1475
1476 /* We have taken care of the easy cases during constant folding. This
1477 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1478 get a chance to see if it can deduce whether ARGLIST is constant. */
1479
1480 current_function_calls_constant_p = 1;
1481
1482 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1483 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1484 return tmp;
1485 }
1486
1487 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1488 if available. */
1489 tree
1490 mathfn_built_in (type, fn)
1491 tree type;
1492 enum built_in_function fn;
1493 {
1494 enum built_in_function fcode = NOT_BUILT_IN;
1495 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1496 switch (fn)
1497 {
1498 case BUILT_IN_SQRT:
1499 case BUILT_IN_SQRTF:
1500 case BUILT_IN_SQRTL:
1501 fcode = BUILT_IN_SQRT;
1502 break;
1503 case BUILT_IN_SIN:
1504 case BUILT_IN_SINF:
1505 case BUILT_IN_SINL:
1506 fcode = BUILT_IN_SIN;
1507 break;
1508 case BUILT_IN_COS:
1509 case BUILT_IN_COSF:
1510 case BUILT_IN_COSL:
1511 fcode = BUILT_IN_COS;
1512 break;
1513 case BUILT_IN_EXP:
1514 case BUILT_IN_EXPF:
1515 case BUILT_IN_EXPL:
1516 fcode = BUILT_IN_EXP;
1517 break;
1518 case BUILT_IN_LOG:
1519 case BUILT_IN_LOGF:
1520 case BUILT_IN_LOGL:
1521 fcode = BUILT_IN_LOG;
1522 break;
1523 case BUILT_IN_FLOOR:
1524 case BUILT_IN_FLOORF:
1525 case BUILT_IN_FLOORL:
1526 fcode = BUILT_IN_FLOOR;
1527 break;
1528 case BUILT_IN_CEIL:
1529 case BUILT_IN_CEILF:
1530 case BUILT_IN_CEILL:
1531 fcode = BUILT_IN_CEIL;
1532 break;
1533 case BUILT_IN_TRUNC:
1534 case BUILT_IN_TRUNCF:
1535 case BUILT_IN_TRUNCL:
1536 fcode = BUILT_IN_TRUNC;
1537 break;
1538 case BUILT_IN_ROUND:
1539 case BUILT_IN_ROUNDF:
1540 case BUILT_IN_ROUNDL:
1541 fcode = BUILT_IN_ROUND;
1542 break;
1543 case BUILT_IN_NEARBYINT:
1544 case BUILT_IN_NEARBYINTF:
1545 case BUILT_IN_NEARBYINTL:
1546 fcode = BUILT_IN_NEARBYINT;
1547 break;
1548 default:
1549 abort ();
1550 }
1551 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1552 switch (fn)
1553 {
1554 case BUILT_IN_SQRT:
1555 case BUILT_IN_SQRTF:
1556 case BUILT_IN_SQRTL:
1557 fcode = BUILT_IN_SQRTF;
1558 break;
1559 case BUILT_IN_SIN:
1560 case BUILT_IN_SINF:
1561 case BUILT_IN_SINL:
1562 fcode = BUILT_IN_SINF;
1563 break;
1564 case BUILT_IN_COS:
1565 case BUILT_IN_COSF:
1566 case BUILT_IN_COSL:
1567 fcode = BUILT_IN_COSF;
1568 break;
1569 case BUILT_IN_EXP:
1570 case BUILT_IN_EXPF:
1571 case BUILT_IN_EXPL:
1572 fcode = BUILT_IN_EXPF;
1573 break;
1574 case BUILT_IN_LOG:
1575 case BUILT_IN_LOGF:
1576 case BUILT_IN_LOGL:
1577 fcode = BUILT_IN_LOGF;
1578 break;
1579 case BUILT_IN_FLOOR:
1580 case BUILT_IN_FLOORF:
1581 case BUILT_IN_FLOORL:
1582 fcode = BUILT_IN_FLOORF;
1583 break;
1584 case BUILT_IN_CEIL:
1585 case BUILT_IN_CEILF:
1586 case BUILT_IN_CEILL:
1587 fcode = BUILT_IN_CEILF;
1588 break;
1589 case BUILT_IN_TRUNC:
1590 case BUILT_IN_TRUNCF:
1591 case BUILT_IN_TRUNCL:
1592 fcode = BUILT_IN_TRUNCF;
1593 break;
1594 case BUILT_IN_ROUND:
1595 case BUILT_IN_ROUNDF:
1596 case BUILT_IN_ROUNDL:
1597 fcode = BUILT_IN_ROUNDF;
1598 break;
1599 case BUILT_IN_NEARBYINT:
1600 case BUILT_IN_NEARBYINTF:
1601 case BUILT_IN_NEARBYINTL:
1602 fcode = BUILT_IN_NEARBYINTF;
1603 break;
1604 default:
1605 abort ();
1606 }
1607 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1608 switch (fn)
1609 {
1610 case BUILT_IN_SQRT:
1611 case BUILT_IN_SQRTF:
1612 case BUILT_IN_SQRTL:
1613 fcode = BUILT_IN_SQRTL;
1614 break;
1615 case BUILT_IN_SIN:
1616 case BUILT_IN_SINF:
1617 case BUILT_IN_SINL:
1618 fcode = BUILT_IN_SINL;
1619 break;
1620 case BUILT_IN_COS:
1621 case BUILT_IN_COSF:
1622 case BUILT_IN_COSL:
1623 fcode = BUILT_IN_COSL;
1624 break;
1625 case BUILT_IN_EXP:
1626 case BUILT_IN_EXPF:
1627 case BUILT_IN_EXPL:
1628 fcode = BUILT_IN_EXPL;
1629 break;
1630 case BUILT_IN_LOG:
1631 case BUILT_IN_LOGF:
1632 case BUILT_IN_LOGL:
1633 fcode = BUILT_IN_LOGL;
1634 break;
1635 case BUILT_IN_FLOOR:
1636 case BUILT_IN_FLOORF:
1637 case BUILT_IN_FLOORL:
1638 fcode = BUILT_IN_FLOORL;
1639 break;
1640 case BUILT_IN_CEIL:
1641 case BUILT_IN_CEILF:
1642 case BUILT_IN_CEILL:
1643 fcode = BUILT_IN_CEILL;
1644 break;
1645 case BUILT_IN_TRUNC:
1646 case BUILT_IN_TRUNCF:
1647 case BUILT_IN_TRUNCL:
1648 fcode = BUILT_IN_TRUNCL;
1649 break;
1650 case BUILT_IN_ROUND:
1651 case BUILT_IN_ROUNDF:
1652 case BUILT_IN_ROUNDL:
1653 fcode = BUILT_IN_ROUNDL;
1654 break;
1655 case BUILT_IN_NEARBYINT:
1656 case BUILT_IN_NEARBYINTF:
1657 case BUILT_IN_NEARBYINTL:
1658 fcode = BUILT_IN_NEARBYINTL;
1659 break;
1660 default:
1661 abort ();
1662 }
1663 return implicit_built_in_decls[fcode];
1664 }
1665
1666 /* If errno must be maintained, expand the RTL to check if the result,
1667 TARGET, of a built-in function call, EXP, is NaN, and if so set
1668 errno to EDOM. */
1669
1670 static void
1671 expand_errno_check (exp, target)
1672 tree exp;
1673 rtx target;
1674 {
1675 rtx lab;
1676
1677 if (flag_errno_math && HONOR_NANS (GET_MODE (target)))
1678 {
1679 lab = gen_label_rtx ();
1680
1681 /* Test the result; if it is NaN, set errno=EDOM because
1682 the argument was not in the domain. */
1683 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1684 0, lab);
1685
1686 #ifdef TARGET_EDOM
1687 {
1688 #ifdef GEN_ERRNO_RTX
1689 rtx errno_rtx = GEN_ERRNO_RTX;
1690 #else
1691 rtx errno_rtx
1692 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1693 #endif
1694
1695 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1696 }
1697 #else
1698 /* We can't set errno=EDOM directly; let the library call do it.
1699 Pop the arguments right away in case the call gets deleted. */
1700 NO_DEFER_POP;
1701 expand_call (exp, target, 0);
1702 OK_DEFER_POP;
1703 #endif
1704
1705 emit_label (lab);
1706 }
1707 }
1708
1709
1710 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1711 Return 0 if a normal call should be emitted rather than expanding the
1712 function in-line. EXP is the expression that is a call to the builtin
1713 function; if convenient, the result should be placed in TARGET.
1714 SUBTARGET may be used as the target for computing one of EXP's operands. */
1715
1716 static rtx
1717 expand_builtin_mathfn (exp, target, subtarget)
1718 tree exp;
1719 rtx target, subtarget;
1720 {
1721 optab builtin_optab;
1722 rtx op0, insns;
1723 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1724 tree arglist = TREE_OPERAND (exp, 1);
1725 enum machine_mode argmode;
1726 bool errno_set = true;
1727
1728 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1729 return 0;
1730
1731 /* Stabilize and compute the argument. */
1732 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1733 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1734 {
1735 exp = copy_node (exp);
1736 TREE_OPERAND (exp, 1) = arglist;
1737 /* Wrap the computation of the argument in a SAVE_EXPR. That
1738 way, if we need to expand the argument again (as in the
1739 flag_errno_math case below where we cannot directly set
1740 errno), we will not perform side-effects more than once.
1741 Note that here we're mutating the original EXP as well as the
1742 copy; that's the right thing to do in case the original EXP
1743 is expanded later. */
1744 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1745 arglist = copy_node (arglist);
1746 }
1747 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1748
1749 /* Make a suitable register to place result in. */
1750 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1751
1752 emit_queue ();
1753 start_sequence ();
1754
1755 switch (DECL_FUNCTION_CODE (fndecl))
1756 {
1757 case BUILT_IN_SIN:
1758 case BUILT_IN_SINF:
1759 case BUILT_IN_SINL:
1760 builtin_optab = sin_optab; break;
1761 case BUILT_IN_COS:
1762 case BUILT_IN_COSF:
1763 case BUILT_IN_COSL:
1764 builtin_optab = cos_optab; break;
1765 case BUILT_IN_SQRT:
1766 case BUILT_IN_SQRTF:
1767 case BUILT_IN_SQRTL:
1768 builtin_optab = sqrt_optab; break;
1769 case BUILT_IN_EXP:
1770 case BUILT_IN_EXPF:
1771 case BUILT_IN_EXPL:
1772 builtin_optab = exp_optab; break;
1773 case BUILT_IN_LOG:
1774 case BUILT_IN_LOGF:
1775 case BUILT_IN_LOGL:
1776 builtin_optab = log_optab; break;
1777 case BUILT_IN_FLOOR:
1778 case BUILT_IN_FLOORF:
1779 case BUILT_IN_FLOORL:
1780 errno_set = false ; builtin_optab = floor_optab; break;
1781 case BUILT_IN_CEIL:
1782 case BUILT_IN_CEILF:
1783 case BUILT_IN_CEILL:
1784 errno_set = false ; builtin_optab = ceil_optab; break;
1785 case BUILT_IN_TRUNC:
1786 case BUILT_IN_TRUNCF:
1787 case BUILT_IN_TRUNCL:
1788 errno_set = false ; builtin_optab = trunc_optab; break;
1789 case BUILT_IN_ROUND:
1790 case BUILT_IN_ROUNDF:
1791 case BUILT_IN_ROUNDL:
1792 errno_set = false ; builtin_optab = round_optab; break;
1793 case BUILT_IN_NEARBYINT:
1794 case BUILT_IN_NEARBYINTF:
1795 case BUILT_IN_NEARBYINTL:
1796 errno_set = false ; builtin_optab = nearbyint_optab; break;
1797 default:
1798 abort ();
1799 }
1800
1801 /* Compute into TARGET.
1802 Set TARGET to wherever the result comes back. */
1803 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1804 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1805
1806 /* If we were unable to expand via the builtin, stop the
1807 sequence (without outputting the insns) and return 0, causing
1808 a call to the library function. */
1809 if (target == 0)
1810 {
1811 end_sequence ();
1812 return 0;
1813 }
1814
1815 if (errno_set)
1816 expand_errno_check (exp, target);
1817
1818 /* Output the entire sequence. */
1819 insns = get_insns ();
1820 end_sequence ();
1821 emit_insn (insns);
1822
1823 return target;
1824 }
1825
1826 /* Expand a call to the builtin binary math functions (pow and atan2).
1827 Return 0 if a normal call should be emitted rather than expanding the
1828 function in-line. EXP is the expression that is a call to the builtin
1829 function; if convenient, the result should be placed in TARGET.
1830 SUBTARGET may be used as the target for computing one of EXP's
1831 operands. */
1832
1833 static rtx
1834 expand_builtin_mathfn_2 (exp, target, subtarget)
1835 tree exp;
1836 rtx target, subtarget;
1837 {
1838 optab builtin_optab;
1839 rtx op0, op1, insns;
1840 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1841 tree arglist = TREE_OPERAND (exp, 1);
1842 tree arg0, arg1;
1843 enum machine_mode argmode;
1844 bool errno_set = true;
1845 bool stable = true;
1846
1847 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1848 return 0;
1849
1850 arg0 = TREE_VALUE (arglist);
1851 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1852
1853 /* Stabilize the arguments. */
1854 if (TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
1855 {
1856 arg0 = save_expr (arg0);
1857 TREE_VALUE (arglist) = arg0;
1858 stable = false;
1859 }
1860 if (TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
1861 {
1862 arg1 = save_expr (arg1);
1863 TREE_VALUE (TREE_CHAIN (arglist)) = arg1;
1864 stable = false;
1865 }
1866
1867 if (! stable)
1868 {
1869 exp = copy_node (exp);
1870 arglist = tree_cons (NULL_TREE, arg0,
1871 build_tree_list (NULL_TREE, arg1));
1872 TREE_OPERAND (exp, 1) = arglist;
1873 }
1874
1875 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1876 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1877
1878 /* Make a suitable register to place result in. */
1879 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1880
1881 emit_queue ();
1882 start_sequence ();
1883
1884 switch (DECL_FUNCTION_CODE (fndecl))
1885 {
1886 case BUILT_IN_POW:
1887 case BUILT_IN_POWF:
1888 case BUILT_IN_POWL:
1889 builtin_optab = pow_optab; break;
1890 case BUILT_IN_ATAN2:
1891 case BUILT_IN_ATAN2F:
1892 case BUILT_IN_ATAN2L:
1893 builtin_optab = atan2_optab; break;
1894 default:
1895 abort ();
1896 }
1897
1898 /* Compute into TARGET.
1899 Set TARGET to wherever the result comes back. */
1900 argmode = TYPE_MODE (TREE_TYPE (arg0));
1901 target = expand_binop (argmode, builtin_optab, op0, op1,
1902 target, 0, OPTAB_DIRECT);
1903
1904 /* If we were unable to expand via the builtin, stop the
1905 sequence (without outputting the insns) and return 0, causing
1906 a call to the library function. */
1907 if (target == 0)
1908 {
1909 end_sequence ();
1910 return 0;
1911 }
1912
1913 if (errno_set)
1914 expand_errno_check (exp, target);
1915
1916 /* Output the entire sequence. */
1917 insns = get_insns ();
1918 end_sequence ();
1919 emit_insn (insns);
1920
1921 return target;
1922 }
1923
1924 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1925 if we failed the caller should emit a normal call, otherwise
1926 try to get the result in TARGET, if convenient. */
1927
1928 static rtx
1929 expand_builtin_strlen (exp, target)
1930 tree exp;
1931 rtx target;
1932 {
1933 tree arglist = TREE_OPERAND (exp, 1);
1934 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1935
1936 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1937 return 0;
1938 else
1939 {
1940 rtx pat;
1941 tree len, src = TREE_VALUE (arglist);
1942 rtx result, src_reg, char_rtx, before_strlen;
1943 enum machine_mode insn_mode = value_mode, char_mode;
1944 enum insn_code icode = CODE_FOR_nothing;
1945 int align;
1946
1947 /* If the length can be computed at compile-time, return it. */
1948 len = c_strlen (src);
1949 if (len)
1950 return expand_expr (len, target, value_mode, EXPAND_NORMAL);
1951
1952 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1953
1954 /* If SRC is not a pointer type, don't do this operation inline. */
1955 if (align == 0)
1956 return 0;
1957
1958 /* Bail out if we can't compute strlen in the right mode. */
1959 while (insn_mode != VOIDmode)
1960 {
1961 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1962 if (icode != CODE_FOR_nothing)
1963 break;
1964
1965 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1966 }
1967 if (insn_mode == VOIDmode)
1968 return 0;
1969
1970 /* Make a place to write the result of the instruction. */
1971 result = target;
1972 if (! (result != 0
1973 && GET_CODE (result) == REG
1974 && GET_MODE (result) == insn_mode
1975 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1976 result = gen_reg_rtx (insn_mode);
1977
1978 /* Make a place to hold the source address. We will not expand
1979 the actual source until we are sure that the expansion will
1980 not fail -- there are trees that cannot be expanded twice. */
1981 src_reg = gen_reg_rtx (Pmode);
1982
1983 /* Mark the beginning of the strlen sequence so we can emit the
1984 source operand later. */
1985 before_strlen = get_last_insn ();
1986
1987 char_rtx = const0_rtx;
1988 char_mode = insn_data[(int) icode].operand[2].mode;
1989 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1990 char_mode))
1991 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1992
1993 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1994 char_rtx, GEN_INT (align));
1995 if (! pat)
1996 return 0;
1997 emit_insn (pat);
1998
1999 /* Now that we are assured of success, expand the source. */
2000 start_sequence ();
2001 pat = memory_address (BLKmode,
2002 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2003 if (pat != src_reg)
2004 emit_move_insn (src_reg, pat);
2005 pat = get_insns ();
2006 end_sequence ();
2007
2008 if (before_strlen)
2009 emit_insn_after (pat, before_strlen);
2010 else
2011 emit_insn_before (pat, get_insns ());
2012
2013 /* Return the value in the proper mode for this function. */
2014 if (GET_MODE (result) == value_mode)
2015 target = result;
2016 else if (target != 0)
2017 convert_move (target, result, 0);
2018 else
2019 target = convert_to_mode (value_mode, result, 0);
2020
2021 return target;
2022 }
2023 }
2024
2025 /* Expand a call to the strstr builtin. Return 0 if we failed the
2026 caller should emit a normal call, otherwise try to get the result
2027 in TARGET, if convenient (and in mode MODE if that's convenient). */
2028
2029 static rtx
2030 expand_builtin_strstr (arglist, target, mode)
2031 tree arglist;
2032 rtx target;
2033 enum machine_mode mode;
2034 {
2035 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2036 return 0;
2037 else
2038 {
2039 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2040 tree fn;
2041 const char *p1, *p2;
2042
2043 p2 = c_getstr (s2);
2044 if (p2 == NULL)
2045 return 0;
2046
2047 p1 = c_getstr (s1);
2048 if (p1 != NULL)
2049 {
2050 const char *r = strstr (p1, p2);
2051
2052 if (r == NULL)
2053 return const0_rtx;
2054
2055 /* Return an offset into the constant string argument. */
2056 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2057 s1, ssize_int (r - p1))),
2058 target, mode, EXPAND_NORMAL);
2059 }
2060
2061 if (p2[0] == '\0')
2062 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2063
2064 if (p2[1] != '\0')
2065 return 0;
2066
2067 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2068 if (!fn)
2069 return 0;
2070
2071 /* New argument list transforming strstr(s1, s2) to
2072 strchr(s1, s2[0]). */
2073 arglist =
2074 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2075 arglist = tree_cons (NULL_TREE, s1, arglist);
2076 return expand_expr (build_function_call_expr (fn, arglist),
2077 target, mode, EXPAND_NORMAL);
2078 }
2079 }
2080
2081 /* Expand a call to the strchr builtin. Return 0 if we failed the
2082 caller should emit a normal call, otherwise try to get the result
2083 in TARGET, if convenient (and in mode MODE if that's convenient). */
2084
2085 static rtx
2086 expand_builtin_strchr (arglist, target, mode)
2087 tree arglist;
2088 rtx target;
2089 enum machine_mode mode;
2090 {
2091 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2092 return 0;
2093 else
2094 {
2095 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2096 const char *p1;
2097
2098 if (TREE_CODE (s2) != INTEGER_CST)
2099 return 0;
2100
2101 p1 = c_getstr (s1);
2102 if (p1 != NULL)
2103 {
2104 char c;
2105 const char *r;
2106
2107 if (target_char_cast (s2, &c))
2108 return 0;
2109
2110 r = strchr (p1, c);
2111
2112 if (r == NULL)
2113 return const0_rtx;
2114
2115 /* Return an offset into the constant string argument. */
2116 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2117 s1, ssize_int (r - p1))),
2118 target, mode, EXPAND_NORMAL);
2119 }
2120
2121 /* FIXME: Should use here strchrM optab so that ports can optimize
2122 this. */
2123 return 0;
2124 }
2125 }
2126
2127 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2128 caller should emit a normal call, otherwise try to get the result
2129 in TARGET, if convenient (and in mode MODE if that's convenient). */
2130
2131 static rtx
2132 expand_builtin_strrchr (arglist, target, mode)
2133 tree arglist;
2134 rtx target;
2135 enum machine_mode mode;
2136 {
2137 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2138 return 0;
2139 else
2140 {
2141 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2142 tree fn;
2143 const char *p1;
2144
2145 if (TREE_CODE (s2) != INTEGER_CST)
2146 return 0;
2147
2148 p1 = c_getstr (s1);
2149 if (p1 != NULL)
2150 {
2151 char c;
2152 const char *r;
2153
2154 if (target_char_cast (s2, &c))
2155 return 0;
2156
2157 r = strrchr (p1, c);
2158
2159 if (r == NULL)
2160 return const0_rtx;
2161
2162 /* Return an offset into the constant string argument. */
2163 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2164 s1, ssize_int (r - p1))),
2165 target, mode, EXPAND_NORMAL);
2166 }
2167
2168 if (! integer_zerop (s2))
2169 return 0;
2170
2171 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2172 if (!fn)
2173 return 0;
2174
2175 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2176 return expand_expr (build_function_call_expr (fn, arglist),
2177 target, mode, EXPAND_NORMAL);
2178 }
2179 }
2180
2181 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2182 caller should emit a normal call, otherwise try to get the result
2183 in TARGET, if convenient (and in mode MODE if that's convenient). */
2184
2185 static rtx
2186 expand_builtin_strpbrk (arglist, target, mode)
2187 tree arglist;
2188 rtx target;
2189 enum machine_mode mode;
2190 {
2191 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2192 return 0;
2193 else
2194 {
2195 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2196 tree fn;
2197 const char *p1, *p2;
2198
2199 p2 = c_getstr (s2);
2200 if (p2 == NULL)
2201 return 0;
2202
2203 p1 = c_getstr (s1);
2204 if (p1 != NULL)
2205 {
2206 const char *r = strpbrk (p1, p2);
2207
2208 if (r == NULL)
2209 return const0_rtx;
2210
2211 /* Return an offset into the constant string argument. */
2212 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2213 s1, ssize_int (r - p1))),
2214 target, mode, EXPAND_NORMAL);
2215 }
2216
2217 if (p2[0] == '\0')
2218 {
2219 /* strpbrk(x, "") == NULL.
2220 Evaluate and ignore the arguments in case they had
2221 side-effects. */
2222 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2223 return const0_rtx;
2224 }
2225
2226 if (p2[1] != '\0')
2227 return 0; /* Really call strpbrk. */
2228
2229 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2230 if (!fn)
2231 return 0;
2232
2233 /* New argument list transforming strpbrk(s1, s2) to
2234 strchr(s1, s2[0]). */
2235 arglist =
2236 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2237 arglist = tree_cons (NULL_TREE, s1, arglist);
2238 return expand_expr (build_function_call_expr (fn, arglist),
2239 target, mode, EXPAND_NORMAL);
2240 }
2241 }
2242
2243 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2244 bytes from constant string DATA + OFFSET and return it as target
2245 constant. */
2246
2247 static rtx
2248 builtin_memcpy_read_str (data, offset, mode)
2249 PTR data;
2250 HOST_WIDE_INT offset;
2251 enum machine_mode mode;
2252 {
2253 const char *str = (const char *) data;
2254
2255 if (offset < 0
2256 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2257 > strlen (str) + 1))
2258 abort (); /* Attempt to read past the end of constant string. */
2259
2260 return c_readstr (str + offset, mode);
2261 }
2262
2263 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2264 Return 0 if we failed, the caller should emit a normal call,
2265 otherwise try to get the result in TARGET, if convenient (and in
2266 mode MODE if that's convenient). If ENDP is 0 return the
2267 destination pointer, if ENDP is 1 return the end pointer ala
2268 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2269 stpcpy. */
2270 static rtx
2271 expand_builtin_memcpy (arglist, target, mode, endp)
2272 tree arglist;
2273 rtx target;
2274 enum machine_mode mode;
2275 int endp;
2276 {
2277 if (!validate_arglist (arglist,
2278 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2279 return 0;
2280 else
2281 {
2282 tree dest = TREE_VALUE (arglist);
2283 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2284 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2285 const char *src_str;
2286
2287 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2288 unsigned int dest_align
2289 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2290 rtx dest_mem, src_mem, dest_addr, len_rtx;
2291
2292 /* If DEST is not a pointer type, call the normal function. */
2293 if (dest_align == 0)
2294 return 0;
2295
2296 /* If the LEN parameter is zero, return DEST. */
2297 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2298 {
2299 /* Evaluate and ignore SRC in case it has side-effects. */
2300 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2301 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2302 }
2303
2304 /* If either SRC is not a pointer type, don't do this
2305 operation in-line. */
2306 if (src_align == 0)
2307 return 0;
2308
2309 dest_mem = get_memory_rtx (dest);
2310 set_mem_align (dest_mem, dest_align);
2311 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2312 src_str = c_getstr (src);
2313
2314 /* If SRC is a string constant and block move would be done
2315 by pieces, we can avoid loading the string from memory
2316 and only stored the computed constants. */
2317 if (src_str
2318 && GET_CODE (len_rtx) == CONST_INT
2319 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2320 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2321 (PTR) src_str, dest_align))
2322 {
2323 store_by_pieces (dest_mem, INTVAL (len_rtx),
2324 builtin_memcpy_read_str,
2325 (PTR) src_str, dest_align);
2326 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2327 #ifdef POINTERS_EXTEND_UNSIGNED
2328 if (GET_MODE (dest_mem) != ptr_mode)
2329 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2330 #endif
2331 if (endp)
2332 {
2333 rtx result = gen_rtx_PLUS (GET_MODE (dest_mem), dest_mem, len_rtx);
2334 if (endp == 2)
2335 result = simplify_gen_binary (MINUS, GET_MODE (result), result, const1_rtx);
2336 return result;
2337 }
2338 else
2339 return dest_mem;
2340 }
2341
2342 src_mem = get_memory_rtx (src);
2343 set_mem_align (src_mem, src_align);
2344
2345 /* Copy word part most expediently. */
2346 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2347 BLOCK_OP_NORMAL);
2348
2349 if (dest_addr == 0)
2350 {
2351 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2352 #ifdef POINTERS_EXTEND_UNSIGNED
2353 if (GET_MODE (dest_addr) != ptr_mode)
2354 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2355 #endif
2356 }
2357
2358 if (endp)
2359 {
2360 rtx result = gen_rtx_PLUS (GET_MODE (dest_addr), dest_addr, len_rtx);
2361 if (endp == 2)
2362 result = simplify_gen_binary (MINUS, GET_MODE (result), result, const1_rtx);
2363 return result;
2364 }
2365 else
2366 return dest_addr;
2367 }
2368 }
2369
2370 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2371 if we failed the caller should emit a normal call. */
2372
2373 static rtx
2374 expand_builtin_memmove (arglist, target, mode)
2375 tree arglist;
2376 rtx target;
2377 enum machine_mode mode;
2378 {
2379 if (!validate_arglist (arglist,
2380 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2381 return 0;
2382 else
2383 {
2384 tree dest = TREE_VALUE (arglist);
2385 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2386 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2387
2388 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2389 unsigned int dest_align
2390 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2391
2392 /* If DEST is not a pointer type, call the normal function. */
2393 if (dest_align == 0)
2394 return 0;
2395
2396 /* If the LEN parameter is zero, return DEST. */
2397 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2398 {
2399 /* Evaluate and ignore SRC in case it has side-effects. */
2400 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2401 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2402 }
2403
2404 /* If either SRC is not a pointer type, don't do this
2405 operation in-line. */
2406 if (src_align == 0)
2407 return 0;
2408
2409 /* If src is a string constant and strings are not writable,
2410 we can use normal memcpy. */
2411 if (!flag_writable_strings && c_getstr (src))
2412 return expand_builtin_memcpy (arglist, target, mode, 0);
2413
2414 /* Otherwise, call the normal function. */
2415 return 0;
2416 }
2417 }
2418
2419 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2420 if we failed the caller should emit a normal call. */
2421
2422 static rtx
2423 expand_builtin_bcopy (arglist)
2424 tree arglist;
2425 {
2426 tree src, dest, size, newarglist;
2427
2428 if (!validate_arglist (arglist,
2429 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2430 return NULL_RTX;
2431
2432 src = TREE_VALUE (arglist);
2433 dest = TREE_VALUE (TREE_CHAIN (arglist));
2434 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2435
2436 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2437 memmove(ptr y, ptr x, size_t z). This is done this way
2438 so that if it isn't expanded inline, we fallback to
2439 calling bcopy instead of memmove. */
2440
2441 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2442 newarglist = tree_cons (NULL_TREE, src, newarglist);
2443 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2444
2445 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2446 }
2447
2448 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2449 if we failed the caller should emit a normal call, otherwise try to get
2450 the result in TARGET, if convenient (and in mode MODE if that's
2451 convenient). */
2452
2453 static rtx
2454 expand_builtin_strcpy (exp, target, mode)
2455 tree exp;
2456 rtx target;
2457 enum machine_mode mode;
2458 {
2459 tree arglist = TREE_OPERAND (exp, 1);
2460 tree fn, len;
2461
2462 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2463 return 0;
2464
2465 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2466 if (!fn)
2467 return 0;
2468
2469 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2470 if (len == 0)
2471 return 0;
2472
2473 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2474 chainon (arglist, build_tree_list (NULL_TREE, len));
2475 return expand_expr (build_function_call_expr (fn, arglist),
2476 target, mode, EXPAND_NORMAL);
2477 }
2478
2479 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2480 Return 0 if we failed the caller should emit a normal call,
2481 otherwise try to get the result in TARGET, if convenient (and in
2482 mode MODE if that's convenient). */
2483
2484 static rtx
2485 expand_builtin_stpcpy (arglist, target, mode)
2486 tree arglist;
2487 rtx target;
2488 enum machine_mode mode;
2489 {
2490 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2491 return 0;
2492 else
2493 {
2494 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2495 if (len == 0)
2496 return 0;
2497
2498 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2499 chainon (arglist, build_tree_list (NULL_TREE, len));
2500 return expand_builtin_memcpy (arglist, target, mode, /*endp=*/2);
2501 }
2502 }
2503
2504 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2505 bytes from constant string DATA + OFFSET and return it as target
2506 constant. */
2507
2508 static rtx
2509 builtin_strncpy_read_str (data, offset, mode)
2510 PTR data;
2511 HOST_WIDE_INT offset;
2512 enum machine_mode mode;
2513 {
2514 const char *str = (const char *) data;
2515
2516 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2517 return const0_rtx;
2518
2519 return c_readstr (str + offset, mode);
2520 }
2521
2522 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2523 if we failed the caller should emit a normal call. */
2524
2525 static rtx
2526 expand_builtin_strncpy (arglist, target, mode)
2527 tree arglist;
2528 rtx target;
2529 enum machine_mode mode;
2530 {
2531 if (!validate_arglist (arglist,
2532 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2533 return 0;
2534 else
2535 {
2536 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2537 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2538 tree fn;
2539
2540 /* We must be passed a constant len parameter. */
2541 if (TREE_CODE (len) != INTEGER_CST)
2542 return 0;
2543
2544 /* If the len parameter is zero, return the dst parameter. */
2545 if (integer_zerop (len))
2546 {
2547 /* Evaluate and ignore the src argument in case it has
2548 side-effects. */
2549 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2550 VOIDmode, EXPAND_NORMAL);
2551 /* Return the dst parameter. */
2552 return expand_expr (TREE_VALUE (arglist), target, mode,
2553 EXPAND_NORMAL);
2554 }
2555
2556 /* Now, we must be passed a constant src ptr parameter. */
2557 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2558 return 0;
2559
2560 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2561
2562 /* We're required to pad with trailing zeros if the requested
2563 len is greater than strlen(s2)+1. In that case try to
2564 use store_by_pieces, if it fails, punt. */
2565 if (tree_int_cst_lt (slen, len))
2566 {
2567 tree dest = TREE_VALUE (arglist);
2568 unsigned int dest_align
2569 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2570 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2571 rtx dest_mem;
2572
2573 if (!p || dest_align == 0 || !host_integerp (len, 1)
2574 || !can_store_by_pieces (tree_low_cst (len, 1),
2575 builtin_strncpy_read_str,
2576 (PTR) p, dest_align))
2577 return 0;
2578
2579 dest_mem = get_memory_rtx (dest);
2580 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2581 builtin_strncpy_read_str,
2582 (PTR) p, dest_align);
2583 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2584 #ifdef POINTERS_EXTEND_UNSIGNED
2585 if (GET_MODE (dest_mem) != ptr_mode)
2586 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2587 #endif
2588 return dest_mem;
2589 }
2590
2591 /* OK transform into builtin memcpy. */
2592 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2593 if (!fn)
2594 return 0;
2595 return expand_expr (build_function_call_expr (fn, arglist),
2596 target, mode, EXPAND_NORMAL);
2597 }
2598 }
2599
2600 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2601 bytes from constant string DATA + OFFSET and return it as target
2602 constant. */
2603
2604 static rtx
2605 builtin_memset_read_str (data, offset, mode)
2606 PTR data;
2607 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2608 enum machine_mode mode;
2609 {
2610 const char *c = (const char *) data;
2611 char *p = alloca (GET_MODE_SIZE (mode));
2612
2613 memset (p, *c, GET_MODE_SIZE (mode));
2614
2615 return c_readstr (p, mode);
2616 }
2617
2618 /* Callback routine for store_by_pieces. Return the RTL of a register
2619 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2620 char value given in the RTL register data. For example, if mode is
2621 4 bytes wide, return the RTL for 0x01010101*data. */
2622
2623 static rtx
2624 builtin_memset_gen_str (data, offset, mode)
2625 PTR data;
2626 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2627 enum machine_mode mode;
2628 {
2629 rtx target, coeff;
2630 size_t size;
2631 char *p;
2632
2633 size = GET_MODE_SIZE (mode);
2634 if (size == 1)
2635 return (rtx) data;
2636
2637 p = alloca (size);
2638 memset (p, 1, size);
2639 coeff = c_readstr (p, mode);
2640
2641 target = convert_to_mode (mode, (rtx) data, 1);
2642 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2643 return force_reg (mode, target);
2644 }
2645
2646 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2647 if we failed the caller should emit a normal call, otherwise try to get
2648 the result in TARGET, if convenient (and in mode MODE if that's
2649 convenient). */
2650
2651 static rtx
2652 expand_builtin_memset (exp, target, mode)
2653 tree exp;
2654 rtx target;
2655 enum machine_mode mode;
2656 {
2657 tree arglist = TREE_OPERAND (exp, 1);
2658
2659 if (!validate_arglist (arglist,
2660 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2661 return 0;
2662 else
2663 {
2664 tree dest = TREE_VALUE (arglist);
2665 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2666 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2667 char c;
2668
2669 unsigned int dest_align
2670 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2671 rtx dest_mem, dest_addr, len_rtx;
2672
2673 /* If DEST is not a pointer type, don't do this
2674 operation in-line. */
2675 if (dest_align == 0)
2676 return 0;
2677
2678 /* If the LEN parameter is zero, return DEST. */
2679 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2680 {
2681 /* Evaluate and ignore VAL in case it has side-effects. */
2682 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2683 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2684 }
2685
2686 if (TREE_CODE (val) != INTEGER_CST)
2687 {
2688 rtx val_rtx;
2689
2690 if (!host_integerp (len, 1))
2691 return 0;
2692
2693 if (optimize_size && tree_low_cst (len, 1) > 1)
2694 return 0;
2695
2696 /* Assume that we can memset by pieces if we can store the
2697 * the coefficients by pieces (in the required modes).
2698 * We can't pass builtin_memset_gen_str as that emits RTL. */
2699 c = 1;
2700 if (!can_store_by_pieces (tree_low_cst (len, 1),
2701 builtin_memset_read_str,
2702 (PTR) &c, dest_align))
2703 return 0;
2704
2705 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2706 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2707 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2708 val_rtx);
2709 dest_mem = get_memory_rtx (dest);
2710 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2711 builtin_memset_gen_str,
2712 (PTR) val_rtx, dest_align);
2713 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2714 #ifdef POINTERS_EXTEND_UNSIGNED
2715 if (GET_MODE (dest_mem) != ptr_mode)
2716 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2717 #endif
2718 return dest_mem;
2719 }
2720
2721 if (target_char_cast (val, &c))
2722 return 0;
2723
2724 if (c)
2725 {
2726 if (!host_integerp (len, 1))
2727 return 0;
2728 if (!can_store_by_pieces (tree_low_cst (len, 1),
2729 builtin_memset_read_str, (PTR) &c,
2730 dest_align))
2731 return 0;
2732
2733 dest_mem = get_memory_rtx (dest);
2734 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2735 builtin_memset_read_str,
2736 (PTR) &c, dest_align);
2737 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2738 #ifdef POINTERS_EXTEND_UNSIGNED
2739 if (GET_MODE (dest_mem) != ptr_mode)
2740 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2741 #endif
2742 return dest_mem;
2743 }
2744
2745 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2746
2747 dest_mem = get_memory_rtx (dest);
2748 set_mem_align (dest_mem, dest_align);
2749 dest_addr = clear_storage (dest_mem, len_rtx);
2750
2751 if (dest_addr == 0)
2752 {
2753 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2754 #ifdef POINTERS_EXTEND_UNSIGNED
2755 if (GET_MODE (dest_addr) != ptr_mode)
2756 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2757 #endif
2758 }
2759
2760 return dest_addr;
2761 }
2762 }
2763
2764 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2765 if we failed the caller should emit a normal call. */
2766
2767 static rtx
2768 expand_builtin_bzero (exp)
2769 tree exp;
2770 {
2771 tree arglist = TREE_OPERAND (exp, 1);
2772 tree dest, size, newarglist;
2773 rtx result;
2774
2775 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2776 return NULL_RTX;
2777
2778 dest = TREE_VALUE (arglist);
2779 size = TREE_VALUE (TREE_CHAIN (arglist));
2780
2781 /* New argument list transforming bzero(ptr x, int y) to
2782 memset(ptr x, int 0, size_t y). This is done this way
2783 so that if it isn't expanded inline, we fallback to
2784 calling bzero instead of memset. */
2785
2786 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2787 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2788 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2789
2790 TREE_OPERAND (exp, 1) = newarglist;
2791 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2792
2793 /* Always restore the original arguments. */
2794 TREE_OPERAND (exp, 1) = arglist;
2795
2796 return result;
2797 }
2798
2799 /* Expand expression EXP, which is a call to the memcmp built-in function.
2800 ARGLIST is the argument list for this call. Return 0 if we failed and the
2801 caller should emit a normal call, otherwise try to get the result in
2802 TARGET, if convenient (and in mode MODE, if that's convenient). */
2803
2804 static rtx
2805 expand_builtin_memcmp (exp, arglist, target, mode)
2806 tree exp ATTRIBUTE_UNUSED;
2807 tree arglist;
2808 rtx target;
2809 enum machine_mode mode;
2810 {
2811 tree arg1, arg2, len;
2812 const char *p1, *p2;
2813
2814 if (!validate_arglist (arglist,
2815 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2816 return 0;
2817
2818 arg1 = TREE_VALUE (arglist);
2819 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2820 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2821
2822 /* If the len parameter is zero, return zero. */
2823 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2824 {
2825 /* Evaluate and ignore arg1 and arg2 in case they have
2826 side-effects. */
2827 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2828 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2829 return const0_rtx;
2830 }
2831
2832 p1 = c_getstr (arg1);
2833 p2 = c_getstr (arg2);
2834
2835 /* If all arguments are constant, and the value of len is not greater
2836 than the lengths of arg1 and arg2, evaluate at compile-time. */
2837 if (host_integerp (len, 1) && p1 && p2
2838 && compare_tree_int (len, strlen (p1) + 1) <= 0
2839 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2840 {
2841 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2842
2843 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2844 }
2845
2846 /* If len parameter is one, return an expression corresponding to
2847 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2848 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2849 {
2850 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2851 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2852 tree ind1 =
2853 fold (build1 (CONVERT_EXPR, integer_type_node,
2854 build1 (INDIRECT_REF, cst_uchar_node,
2855 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2856 tree ind2 =
2857 fold (build1 (CONVERT_EXPR, integer_type_node,
2858 build1 (INDIRECT_REF, cst_uchar_node,
2859 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2860 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2861 return expand_expr (result, target, mode, EXPAND_NORMAL);
2862 }
2863
2864 #ifdef HAVE_cmpstrsi
2865 {
2866 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2867 rtx result;
2868 rtx insn;
2869
2870 int arg1_align
2871 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2872 int arg2_align
2873 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2874 enum machine_mode insn_mode
2875 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2876
2877 /* If we don't have POINTER_TYPE, call the function. */
2878 if (arg1_align == 0 || arg2_align == 0)
2879 return 0;
2880
2881 /* Make a place to write the result of the instruction. */
2882 result = target;
2883 if (! (result != 0
2884 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2885 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2886 result = gen_reg_rtx (insn_mode);
2887
2888 arg1_rtx = get_memory_rtx (arg1);
2889 arg2_rtx = get_memory_rtx (arg2);
2890 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2891 if (!HAVE_cmpstrsi)
2892 insn = NULL_RTX;
2893 else
2894 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2895 GEN_INT (MIN (arg1_align, arg2_align)));
2896
2897 if (insn)
2898 emit_insn (insn);
2899 else
2900 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2901 TYPE_MODE (integer_type_node), 3,
2902 XEXP (arg1_rtx, 0), Pmode,
2903 XEXP (arg2_rtx, 0), Pmode,
2904 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2905 TREE_UNSIGNED (sizetype)),
2906 TYPE_MODE (sizetype));
2907
2908 /* Return the value in the proper mode for this function. */
2909 mode = TYPE_MODE (TREE_TYPE (exp));
2910 if (GET_MODE (result) == mode)
2911 return result;
2912 else if (target != 0)
2913 {
2914 convert_move (target, result, 0);
2915 return target;
2916 }
2917 else
2918 return convert_to_mode (mode, result, 0);
2919 }
2920 #endif
2921
2922 return 0;
2923 }
2924
2925 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2926 if we failed the caller should emit a normal call, otherwise try to get
2927 the result in TARGET, if convenient. */
2928
2929 static rtx
2930 expand_builtin_strcmp (exp, target, mode)
2931 tree exp;
2932 rtx target;
2933 enum machine_mode mode;
2934 {
2935 tree arglist = TREE_OPERAND (exp, 1);
2936 tree arg1, arg2;
2937 const char *p1, *p2;
2938
2939 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2940 return 0;
2941
2942 arg1 = TREE_VALUE (arglist);
2943 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2944
2945 p1 = c_getstr (arg1);
2946 p2 = c_getstr (arg2);
2947
2948 if (p1 && p2)
2949 {
2950 const int i = strcmp (p1, p2);
2951 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2952 }
2953
2954 /* If either arg is "", return an expression corresponding to
2955 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2956 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2957 {
2958 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2959 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2960 tree ind1 =
2961 fold (build1 (CONVERT_EXPR, integer_type_node,
2962 build1 (INDIRECT_REF, cst_uchar_node,
2963 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2964 tree ind2 =
2965 fold (build1 (CONVERT_EXPR, integer_type_node,
2966 build1 (INDIRECT_REF, cst_uchar_node,
2967 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2968 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2969 return expand_expr (result, target, mode, EXPAND_NORMAL);
2970 }
2971
2972 #ifdef HAVE_cmpstrsi
2973 if (HAVE_cmpstrsi)
2974 {
2975 tree len, len1, len2;
2976 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2977 rtx result, insn;
2978
2979 int arg1_align
2980 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2981 int arg2_align
2982 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2983 enum machine_mode insn_mode
2984 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2985
2986 len1 = c_strlen (arg1);
2987 len2 = c_strlen (arg2);
2988
2989 if (len1)
2990 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
2991 if (len2)
2992 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2993
2994 /* If we don't have a constant length for the first, use the length
2995 of the second, if we know it. We don't require a constant for
2996 this case; some cost analysis could be done if both are available
2997 but neither is constant. For now, assume they're equally cheap,
2998 unless one has side effects. If both strings have constant lengths,
2999 use the smaller. */
3000
3001 if (!len1)
3002 len = len2;
3003 else if (!len2)
3004 len = len1;
3005 else if (TREE_SIDE_EFFECTS (len1))
3006 len = len2;
3007 else if (TREE_SIDE_EFFECTS (len2))
3008 len = len1;
3009 else if (TREE_CODE (len1) != INTEGER_CST)
3010 len = len2;
3011 else if (TREE_CODE (len2) != INTEGER_CST)
3012 len = len1;
3013 else if (tree_int_cst_lt (len1, len2))
3014 len = len1;
3015 else
3016 len = len2;
3017
3018 /* If both arguments have side effects, we cannot optimize. */
3019 if (!len || TREE_SIDE_EFFECTS (len))
3020 return 0;
3021
3022 /* If we don't have POINTER_TYPE, call the function. */
3023 if (arg1_align == 0 || arg2_align == 0)
3024 return 0;
3025
3026 /* Make a place to write the result of the instruction. */
3027 result = target;
3028 if (! (result != 0
3029 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3030 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3031 result = gen_reg_rtx (insn_mode);
3032
3033 arg1_rtx = get_memory_rtx (arg1);
3034 arg2_rtx = get_memory_rtx (arg2);
3035 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3036 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3037 GEN_INT (MIN (arg1_align, arg2_align)));
3038 if (!insn)
3039 return 0;
3040
3041 emit_insn (insn);
3042
3043 /* Return the value in the proper mode for this function. */
3044 mode = TYPE_MODE (TREE_TYPE (exp));
3045 if (GET_MODE (result) == mode)
3046 return result;
3047 if (target == 0)
3048 return convert_to_mode (mode, result, 0);
3049 convert_move (target, result, 0);
3050 return target;
3051 }
3052 #endif
3053 return 0;
3054 }
3055
3056 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3057 if we failed the caller should emit a normal call, otherwise try to get
3058 the result in TARGET, if convenient. */
3059
3060 static rtx
3061 expand_builtin_strncmp (exp, target, mode)
3062 tree exp;
3063 rtx target;
3064 enum machine_mode mode;
3065 {
3066 tree arglist = TREE_OPERAND (exp, 1);
3067 tree arg1, arg2, arg3;
3068 const char *p1, *p2;
3069
3070 if (!validate_arglist (arglist,
3071 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3072 return 0;
3073
3074 arg1 = TREE_VALUE (arglist);
3075 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3076 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3077
3078 /* If the len parameter is zero, return zero. */
3079 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3080 {
3081 /* Evaluate and ignore arg1 and arg2 in case they have
3082 side-effects. */
3083 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3084 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3085 return const0_rtx;
3086 }
3087
3088 p1 = c_getstr (arg1);
3089 p2 = c_getstr (arg2);
3090
3091 /* If all arguments are constant, evaluate at compile-time. */
3092 if (host_integerp (arg3, 1) && p1 && p2)
3093 {
3094 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3095 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3096 }
3097
3098 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3099 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3100 if (host_integerp (arg3, 1)
3101 && (tree_low_cst (arg3, 1) == 1
3102 || (tree_low_cst (arg3, 1) > 1
3103 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3104 {
3105 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3106 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3107 tree ind1 =
3108 fold (build1 (CONVERT_EXPR, integer_type_node,
3109 build1 (INDIRECT_REF, cst_uchar_node,
3110 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3111 tree ind2 =
3112 fold (build1 (CONVERT_EXPR, integer_type_node,
3113 build1 (INDIRECT_REF, cst_uchar_node,
3114 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3115 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3116 return expand_expr (result, target, mode, EXPAND_NORMAL);
3117 }
3118
3119 /* If c_strlen can determine an expression for one of the string
3120 lengths, and it doesn't have side effects, then emit cmpstrsi
3121 using length MIN(strlen(string)+1, arg3). */
3122 #ifdef HAVE_cmpstrsi
3123 if (HAVE_cmpstrsi)
3124 {
3125 tree len, len1, len2;
3126 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3127 rtx result, insn;
3128
3129 int arg1_align
3130 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3131 int arg2_align
3132 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3133 enum machine_mode insn_mode
3134 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3135
3136 len1 = c_strlen (arg1);
3137 len2 = c_strlen (arg2);
3138
3139 if (len1)
3140 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3141 if (len2)
3142 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3143
3144 /* If we don't have a constant length for the first, use the length
3145 of the second, if we know it. We don't require a constant for
3146 this case; some cost analysis could be done if both are available
3147 but neither is constant. For now, assume they're equally cheap,
3148 unless one has side effects. If both strings have constant lengths,
3149 use the smaller. */
3150
3151 if (!len1)
3152 len = len2;
3153 else if (!len2)
3154 len = len1;
3155 else if (TREE_SIDE_EFFECTS (len1))
3156 len = len2;
3157 else if (TREE_SIDE_EFFECTS (len2))
3158 len = len1;
3159 else if (TREE_CODE (len1) != INTEGER_CST)
3160 len = len2;
3161 else if (TREE_CODE (len2) != INTEGER_CST)
3162 len = len1;
3163 else if (tree_int_cst_lt (len1, len2))
3164 len = len1;
3165 else
3166 len = len2;
3167
3168 /* If both arguments have side effects, we cannot optimize. */
3169 if (!len || TREE_SIDE_EFFECTS (len))
3170 return 0;
3171
3172 /* The actual new length parameter is MIN(len,arg3). */
3173 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3174
3175 /* If we don't have POINTER_TYPE, call the function. */
3176 if (arg1_align == 0 || arg2_align == 0)
3177 return 0;
3178
3179 /* Make a place to write the result of the instruction. */
3180 result = target;
3181 if (! (result != 0
3182 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3183 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3184 result = gen_reg_rtx (insn_mode);
3185
3186 arg1_rtx = get_memory_rtx (arg1);
3187 arg2_rtx = get_memory_rtx (arg2);
3188 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3189 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3190 GEN_INT (MIN (arg1_align, arg2_align)));
3191 if (!insn)
3192 return 0;
3193
3194 emit_insn (insn);
3195
3196 /* Return the value in the proper mode for this function. */
3197 mode = TYPE_MODE (TREE_TYPE (exp));
3198 if (GET_MODE (result) == mode)
3199 return result;
3200 if (target == 0)
3201 return convert_to_mode (mode, result, 0);
3202 convert_move (target, result, 0);
3203 return target;
3204 }
3205 #endif
3206 return 0;
3207 }
3208
3209 /* Expand expression EXP, which is a call to the strcat builtin.
3210 Return 0 if we failed the caller should emit a normal call,
3211 otherwise try to get the result in TARGET, if convenient. */
3212
3213 static rtx
3214 expand_builtin_strcat (arglist, target, mode)
3215 tree arglist;
3216 rtx target;
3217 enum machine_mode mode;
3218 {
3219 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3220 return 0;
3221 else
3222 {
3223 tree dst = TREE_VALUE (arglist),
3224 src = TREE_VALUE (TREE_CHAIN (arglist));
3225 const char *p = c_getstr (src);
3226
3227 /* If the string length is zero, return the dst parameter. */
3228 if (p && *p == '\0')
3229 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3230
3231 return 0;
3232 }
3233 }
3234
3235 /* Expand expression EXP, which is a call to the strncat builtin.
3236 Return 0 if we failed the caller should emit a normal call,
3237 otherwise try to get the result in TARGET, if convenient. */
3238
3239 static rtx
3240 expand_builtin_strncat (arglist, target, mode)
3241 tree arglist;
3242 rtx target;
3243 enum machine_mode mode;
3244 {
3245 if (!validate_arglist (arglist,
3246 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3247 return 0;
3248 else
3249 {
3250 tree dst = TREE_VALUE (arglist),
3251 src = TREE_VALUE (TREE_CHAIN (arglist)),
3252 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3253 const char *p = c_getstr (src);
3254
3255 /* If the requested length is zero, or the src parameter string
3256 length is zero, return the dst parameter. */
3257 if (integer_zerop (len) || (p && *p == '\0'))
3258 {
3259 /* Evaluate and ignore the src and len parameters in case
3260 they have side-effects. */
3261 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3262 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3263 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3264 }
3265
3266 /* If the requested len is greater than or equal to the string
3267 length, call strcat. */
3268 if (TREE_CODE (len) == INTEGER_CST && p
3269 && compare_tree_int (len, strlen (p)) >= 0)
3270 {
3271 tree newarglist
3272 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3273 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3274
3275 /* If the replacement _DECL isn't initialized, don't do the
3276 transformation. */
3277 if (!fn)
3278 return 0;
3279
3280 return expand_expr (build_function_call_expr (fn, newarglist),
3281 target, mode, EXPAND_NORMAL);
3282 }
3283 return 0;
3284 }
3285 }
3286
3287 /* Expand expression EXP, which is a call to the strspn builtin.
3288 Return 0 if we failed the caller should emit a normal call,
3289 otherwise try to get the result in TARGET, if convenient. */
3290
3291 static rtx
3292 expand_builtin_strspn (arglist, target, mode)
3293 tree arglist;
3294 rtx target;
3295 enum machine_mode mode;
3296 {
3297 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3298 return 0;
3299 else
3300 {
3301 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3302 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3303
3304 /* If both arguments are constants, evaluate at compile-time. */
3305 if (p1 && p2)
3306 {
3307 const size_t r = strspn (p1, p2);
3308 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3309 }
3310
3311 /* If either argument is "", return 0. */
3312 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3313 {
3314 /* Evaluate and ignore both arguments in case either one has
3315 side-effects. */
3316 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3317 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3318 return const0_rtx;
3319 }
3320 return 0;
3321 }
3322 }
3323
3324 /* Expand expression EXP, which is a call to the strcspn builtin.
3325 Return 0 if we failed the caller should emit a normal call,
3326 otherwise try to get the result in TARGET, if convenient. */
3327
3328 static rtx
3329 expand_builtin_strcspn (arglist, target, mode)
3330 tree arglist;
3331 rtx target;
3332 enum machine_mode mode;
3333 {
3334 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3335 return 0;
3336 else
3337 {
3338 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3339 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3340
3341 /* If both arguments are constants, evaluate at compile-time. */
3342 if (p1 && p2)
3343 {
3344 const size_t r = strcspn (p1, p2);
3345 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3346 }
3347
3348 /* If the first argument is "", return 0. */
3349 if (p1 && *p1 == '\0')
3350 {
3351 /* Evaluate and ignore argument s2 in case it has
3352 side-effects. */
3353 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3354 return const0_rtx;
3355 }
3356
3357 /* If the second argument is "", return __builtin_strlen(s1). */
3358 if (p2 && *p2 == '\0')
3359 {
3360 tree newarglist = build_tree_list (NULL_TREE, s1),
3361 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3362
3363 /* If the replacement _DECL isn't initialized, don't do the
3364 transformation. */
3365 if (!fn)
3366 return 0;
3367
3368 return expand_expr (build_function_call_expr (fn, newarglist),
3369 target, mode, EXPAND_NORMAL);
3370 }
3371 return 0;
3372 }
3373 }
3374
3375 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3376 if that's convenient. */
3377
3378 rtx
3379 expand_builtin_saveregs ()
3380 {
3381 rtx val, seq;
3382
3383 /* Don't do __builtin_saveregs more than once in a function.
3384 Save the result of the first call and reuse it. */
3385 if (saveregs_value != 0)
3386 return saveregs_value;
3387
3388 /* When this function is called, it means that registers must be
3389 saved on entry to this function. So we migrate the call to the
3390 first insn of this function. */
3391
3392 start_sequence ();
3393
3394 #ifdef EXPAND_BUILTIN_SAVEREGS
3395 /* Do whatever the machine needs done in this case. */
3396 val = EXPAND_BUILTIN_SAVEREGS ();
3397 #else
3398 /* ??? We used to try and build up a call to the out of line function,
3399 guessing about what registers needed saving etc. This became much
3400 harder with __builtin_va_start, since we don't have a tree for a
3401 call to __builtin_saveregs to fall back on. There was exactly one
3402 port (i860) that used this code, and I'm unconvinced it could actually
3403 handle the general case. So we no longer try to handle anything
3404 weird and make the backend absorb the evil. */
3405
3406 error ("__builtin_saveregs not supported by this target");
3407 val = const0_rtx;
3408 #endif
3409
3410 seq = get_insns ();
3411 end_sequence ();
3412
3413 saveregs_value = val;
3414
3415 /* Put the insns after the NOTE that starts the function. If this
3416 is inside a start_sequence, make the outer-level insn chain current, so
3417 the code is placed at the start of the function. */
3418 push_topmost_sequence ();
3419 emit_insn_after (seq, get_insns ());
3420 pop_topmost_sequence ();
3421
3422 return val;
3423 }
3424
3425 /* __builtin_args_info (N) returns word N of the arg space info
3426 for the current function. The number and meanings of words
3427 is controlled by the definition of CUMULATIVE_ARGS. */
3428
3429 static rtx
3430 expand_builtin_args_info (exp)
3431 tree exp;
3432 {
3433 tree arglist = TREE_OPERAND (exp, 1);
3434 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3435 int *word_ptr = (int *) &current_function_args_info;
3436
3437 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3438 abort ();
3439
3440 if (arglist != 0)
3441 {
3442 if (!host_integerp (TREE_VALUE (arglist), 0))
3443 error ("argument of `__builtin_args_info' must be constant");
3444 else
3445 {
3446 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3447
3448 if (wordnum < 0 || wordnum >= nwords)
3449 error ("argument of `__builtin_args_info' out of range");
3450 else
3451 return GEN_INT (word_ptr[wordnum]);
3452 }
3453 }
3454 else
3455 error ("missing argument in `__builtin_args_info'");
3456
3457 return const0_rtx;
3458 }
3459
3460 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3461
3462 static rtx
3463 expand_builtin_next_arg (arglist)
3464 tree arglist;
3465 {
3466 tree fntype = TREE_TYPE (current_function_decl);
3467
3468 if (TYPE_ARG_TYPES (fntype) == 0
3469 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3470 == void_type_node))
3471 {
3472 error ("`va_start' used in function with fixed args");
3473 return const0_rtx;
3474 }
3475
3476 if (arglist)
3477 {
3478 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3479 tree arg = TREE_VALUE (arglist);
3480
3481 /* Strip off all nops for the sake of the comparison. This
3482 is not quite the same as STRIP_NOPS. It does more.
3483 We must also strip off INDIRECT_EXPR for C++ reference
3484 parameters. */
3485 while (TREE_CODE (arg) == NOP_EXPR
3486 || TREE_CODE (arg) == CONVERT_EXPR
3487 || TREE_CODE (arg) == NON_LVALUE_EXPR
3488 || TREE_CODE (arg) == INDIRECT_REF)
3489 arg = TREE_OPERAND (arg, 0);
3490 if (arg != last_parm)
3491 warning ("second parameter of `va_start' not last named argument");
3492 }
3493 else
3494 /* Evidently an out of date version of <stdarg.h>; can't validate
3495 va_start's second argument, but can still work as intended. */
3496 warning ("`__builtin_next_arg' called without an argument");
3497
3498 return expand_binop (Pmode, add_optab,
3499 current_function_internal_arg_pointer,
3500 current_function_arg_offset_rtx,
3501 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3502 }
3503
3504 /* Make it easier for the backends by protecting the valist argument
3505 from multiple evaluations. */
3506
3507 static tree
3508 stabilize_va_list (valist, needs_lvalue)
3509 tree valist;
3510 int needs_lvalue;
3511 {
3512 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3513 {
3514 if (TREE_SIDE_EFFECTS (valist))
3515 valist = save_expr (valist);
3516
3517 /* For this case, the backends will be expecting a pointer to
3518 TREE_TYPE (va_list_type_node), but it's possible we've
3519 actually been given an array (an actual va_list_type_node).
3520 So fix it. */
3521 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3522 {
3523 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3524 tree p2 = build_pointer_type (va_list_type_node);
3525
3526 valist = build1 (ADDR_EXPR, p2, valist);
3527 valist = fold (build1 (NOP_EXPR, p1, valist));
3528 }
3529 }
3530 else
3531 {
3532 tree pt;
3533
3534 if (! needs_lvalue)
3535 {
3536 if (! TREE_SIDE_EFFECTS (valist))
3537 return valist;
3538
3539 pt = build_pointer_type (va_list_type_node);
3540 valist = fold (build1 (ADDR_EXPR, pt, valist));
3541 TREE_SIDE_EFFECTS (valist) = 1;
3542 }
3543
3544 if (TREE_SIDE_EFFECTS (valist))
3545 valist = save_expr (valist);
3546 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3547 valist));
3548 }
3549
3550 return valist;
3551 }
3552
3553 /* The "standard" implementation of va_start: just assign `nextarg' to
3554 the variable. */
3555
3556 void
3557 std_expand_builtin_va_start (valist, nextarg)
3558 tree valist;
3559 rtx nextarg;
3560 {
3561 tree t;
3562
3563 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3564 make_tree (ptr_type_node, nextarg));
3565 TREE_SIDE_EFFECTS (t) = 1;
3566
3567 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3568 }
3569
3570 /* Expand ARGLIST, from a call to __builtin_va_start. */
3571
3572 static rtx
3573 expand_builtin_va_start (arglist)
3574 tree arglist;
3575 {
3576 rtx nextarg;
3577 tree chain, valist;
3578
3579 chain = TREE_CHAIN (arglist);
3580
3581 if (TREE_CHAIN (chain))
3582 error ("too many arguments to function `va_start'");
3583
3584 nextarg = expand_builtin_next_arg (chain);
3585 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3586
3587 #ifdef EXPAND_BUILTIN_VA_START
3588 EXPAND_BUILTIN_VA_START (valist, nextarg);
3589 #else
3590 std_expand_builtin_va_start (valist, nextarg);
3591 #endif
3592
3593 return const0_rtx;
3594 }
3595
3596 /* The "standard" implementation of va_arg: read the value from the
3597 current (padded) address and increment by the (padded) size. */
3598
3599 rtx
3600 std_expand_builtin_va_arg (valist, type)
3601 tree valist, type;
3602 {
3603 tree addr_tree, t, type_size = NULL;
3604 tree align, alignm1;
3605 tree rounded_size;
3606 rtx addr;
3607
3608 /* Compute the rounded size of the type. */
3609 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3610 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3611 if (type == error_mark_node
3612 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3613 || TREE_OVERFLOW (type_size))
3614 rounded_size = size_zero_node;
3615 else
3616 rounded_size = fold (build (MULT_EXPR, sizetype,
3617 fold (build (TRUNC_DIV_EXPR, sizetype,
3618 fold (build (PLUS_EXPR, sizetype,
3619 type_size, alignm1)),
3620 align)),
3621 align));
3622
3623 /* Get AP. */
3624 addr_tree = valist;
3625 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3626 {
3627 /* Small args are padded downward. */
3628 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3629 fold (build (COND_EXPR, sizetype,
3630 fold (build (GT_EXPR, sizetype,
3631 rounded_size,
3632 align)),
3633 size_zero_node,
3634 fold (build (MINUS_EXPR, sizetype,
3635 rounded_size,
3636 type_size))))));
3637 }
3638
3639 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3640 addr = copy_to_reg (addr);
3641
3642 /* Compute new value for AP. */
3643 if (! integer_zerop (rounded_size))
3644 {
3645 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3646 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3647 rounded_size));
3648 TREE_SIDE_EFFECTS (t) = 1;
3649 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3650 }
3651
3652 return addr;
3653 }
3654
3655 /* Expand __builtin_va_arg, which is not really a builtin function, but
3656 a very special sort of operator. */
3657
3658 rtx
3659 expand_builtin_va_arg (valist, type)
3660 tree valist, type;
3661 {
3662 rtx addr, result;
3663 tree promoted_type, want_va_type, have_va_type;
3664
3665 /* Verify that valist is of the proper type. */
3666
3667 want_va_type = va_list_type_node;
3668 have_va_type = TREE_TYPE (valist);
3669 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3670 {
3671 /* If va_list is an array type, the argument may have decayed
3672 to a pointer type, e.g. by being passed to another function.
3673 In that case, unwrap both types so that we can compare the
3674 underlying records. */
3675 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3676 || TREE_CODE (have_va_type) == POINTER_TYPE)
3677 {
3678 want_va_type = TREE_TYPE (want_va_type);
3679 have_va_type = TREE_TYPE (have_va_type);
3680 }
3681 }
3682 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3683 {
3684 error ("first argument to `va_arg' not of type `va_list'");
3685 addr = const0_rtx;
3686 }
3687
3688 /* Generate a diagnostic for requesting data of a type that cannot
3689 be passed through `...' due to type promotion at the call site. */
3690 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3691 != type)
3692 {
3693 const char *name = "<anonymous type>", *pname = 0;
3694 static bool gave_help;
3695
3696 if (TYPE_NAME (type))
3697 {
3698 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3699 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3700 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3701 && DECL_NAME (TYPE_NAME (type)))
3702 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3703 }
3704 if (TYPE_NAME (promoted_type))
3705 {
3706 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3707 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3708 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3709 && DECL_NAME (TYPE_NAME (promoted_type)))
3710 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3711 }
3712
3713 /* Unfortunately, this is merely undefined, rather than a constraint
3714 violation, so we cannot make this an error. If this call is never
3715 executed, the program is still strictly conforming. */
3716 warning ("`%s' is promoted to `%s' when passed through `...'",
3717 name, pname);
3718 if (! gave_help)
3719 {
3720 gave_help = true;
3721 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3722 pname, name);
3723 }
3724
3725 /* We can, however, treat "undefined" any way we please.
3726 Call abort to encourage the user to fix the program. */
3727 expand_builtin_trap ();
3728
3729 /* This is dead code, but go ahead and finish so that the
3730 mode of the result comes out right. */
3731 addr = const0_rtx;
3732 }
3733 else
3734 {
3735 /* Make it easier for the backends by protecting the valist argument
3736 from multiple evaluations. */
3737 valist = stabilize_va_list (valist, 0);
3738
3739 #ifdef EXPAND_BUILTIN_VA_ARG
3740 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3741 #else
3742 addr = std_expand_builtin_va_arg (valist, type);
3743 #endif
3744 }
3745
3746 #ifdef POINTERS_EXTEND_UNSIGNED
3747 if (GET_MODE (addr) != Pmode)
3748 addr = convert_memory_address (Pmode, addr);
3749 #endif
3750
3751 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3752 set_mem_alias_set (result, get_varargs_alias_set ());
3753
3754 return result;
3755 }
3756
3757 /* Expand ARGLIST, from a call to __builtin_va_end. */
3758
3759 static rtx
3760 expand_builtin_va_end (arglist)
3761 tree arglist;
3762 {
3763 tree valist = TREE_VALUE (arglist);
3764
3765 #ifdef EXPAND_BUILTIN_VA_END
3766 valist = stabilize_va_list (valist, 0);
3767 EXPAND_BUILTIN_VA_END (arglist);
3768 #else
3769 /* Evaluate for side effects, if needed. I hate macros that don't
3770 do that. */
3771 if (TREE_SIDE_EFFECTS (valist))
3772 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3773 #endif
3774
3775 return const0_rtx;
3776 }
3777
3778 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3779 builtin rather than just as an assignment in stdarg.h because of the
3780 nastiness of array-type va_list types. */
3781
3782 static rtx
3783 expand_builtin_va_copy (arglist)
3784 tree arglist;
3785 {
3786 tree dst, src, t;
3787
3788 dst = TREE_VALUE (arglist);
3789 src = TREE_VALUE (TREE_CHAIN (arglist));
3790
3791 dst = stabilize_va_list (dst, 1);
3792 src = stabilize_va_list (src, 0);
3793
3794 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3795 {
3796 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3797 TREE_SIDE_EFFECTS (t) = 1;
3798 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3799 }
3800 else
3801 {
3802 rtx dstb, srcb, size;
3803
3804 /* Evaluate to pointers. */
3805 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3806 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3807 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3808 VOIDmode, EXPAND_NORMAL);
3809
3810 #ifdef POINTERS_EXTEND_UNSIGNED
3811 if (GET_MODE (dstb) != Pmode)
3812 dstb = convert_memory_address (Pmode, dstb);
3813
3814 if (GET_MODE (srcb) != Pmode)
3815 srcb = convert_memory_address (Pmode, srcb);
3816 #endif
3817
3818 /* "Dereference" to BLKmode memories. */
3819 dstb = gen_rtx_MEM (BLKmode, dstb);
3820 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3821 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3822 srcb = gen_rtx_MEM (BLKmode, srcb);
3823 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3824 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3825
3826 /* Copy. */
3827 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
3828 }
3829
3830 return const0_rtx;
3831 }
3832
3833 /* Expand a call to one of the builtin functions __builtin_frame_address or
3834 __builtin_return_address. */
3835
3836 static rtx
3837 expand_builtin_frame_address (exp)
3838 tree exp;
3839 {
3840 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3841 tree arglist = TREE_OPERAND (exp, 1);
3842
3843 /* The argument must be a nonnegative integer constant.
3844 It counts the number of frames to scan up the stack.
3845 The value is the return address saved in that frame. */
3846 if (arglist == 0)
3847 /* Warning about missing arg was already issued. */
3848 return const0_rtx;
3849 else if (! host_integerp (TREE_VALUE (arglist), 1))
3850 {
3851 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3852 error ("invalid arg to `__builtin_frame_address'");
3853 else
3854 error ("invalid arg to `__builtin_return_address'");
3855 return const0_rtx;
3856 }
3857 else
3858 {
3859 rtx tem
3860 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3861 tree_low_cst (TREE_VALUE (arglist), 1),
3862 hard_frame_pointer_rtx);
3863
3864 /* Some ports cannot access arbitrary stack frames. */
3865 if (tem == NULL)
3866 {
3867 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3868 warning ("unsupported arg to `__builtin_frame_address'");
3869 else
3870 warning ("unsupported arg to `__builtin_return_address'");
3871 return const0_rtx;
3872 }
3873
3874 /* For __builtin_frame_address, return what we've got. */
3875 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3876 return tem;
3877
3878 if (GET_CODE (tem) != REG
3879 && ! CONSTANT_P (tem))
3880 tem = copy_to_mode_reg (Pmode, tem);
3881 return tem;
3882 }
3883 }
3884
3885 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3886 we failed and the caller should emit a normal call, otherwise try to get
3887 the result in TARGET, if convenient. */
3888
3889 static rtx
3890 expand_builtin_alloca (arglist, target)
3891 tree arglist;
3892 rtx target;
3893 {
3894 rtx op0;
3895 rtx result;
3896
3897 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3898 return 0;
3899
3900 /* Compute the argument. */
3901 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3902
3903 /* Allocate the desired space. */
3904 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3905
3906 #ifdef POINTERS_EXTEND_UNSIGNED
3907 if (GET_MODE (result) != ptr_mode)
3908 result = convert_memory_address (ptr_mode, result);
3909 #endif
3910
3911 return result;
3912 }
3913
3914 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
3915 Return 0 if a normal call should be emitted rather than expanding the
3916 function in-line. If convenient, the result should be placed in TARGET.
3917 SUBTARGET may be used as the target for computing one of EXP's operands. */
3918
3919 static rtx
3920 expand_builtin_unop (target_mode, arglist, target, subtarget, op_optab)
3921 enum machine_mode target_mode;
3922 tree arglist;
3923 rtx target, subtarget;
3924 optab op_optab;
3925 {
3926 rtx op0;
3927 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3928 return 0;
3929
3930 /* Compute the argument. */
3931 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3932 /* Compute op, into TARGET if possible.
3933 Set TARGET to wherever the result comes back. */
3934 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3935 op_optab, op0, target, 1);
3936 if (target == 0)
3937 abort ();
3938
3939 return convert_to_mode (target_mode, target, 0);
3940 }
3941
3942 /* If the string passed to fputs is a constant and is one character
3943 long, we attempt to transform this call into __builtin_fputc(). */
3944
3945 static rtx
3946 expand_builtin_fputs (arglist, ignore, unlocked)
3947 tree arglist;
3948 int ignore;
3949 int unlocked;
3950 {
3951 tree len, fn;
3952 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3953 : implicit_built_in_decls[BUILT_IN_FPUTC];
3954 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3955 : implicit_built_in_decls[BUILT_IN_FWRITE];
3956
3957 /* If the return value is used, or the replacement _DECL isn't
3958 initialized, don't do the transformation. */
3959 if (!ignore || !fn_fputc || !fn_fwrite)
3960 return 0;
3961
3962 /* Verify the arguments in the original call. */
3963 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3964 return 0;
3965
3966 /* Get the length of the string passed to fputs. If the length
3967 can't be determined, punt. */
3968 if (!(len = c_strlen (TREE_VALUE (arglist)))
3969 || TREE_CODE (len) != INTEGER_CST)
3970 return 0;
3971
3972 switch (compare_tree_int (len, 1))
3973 {
3974 case -1: /* length is 0, delete the call entirely . */
3975 {
3976 /* Evaluate and ignore the argument in case it has
3977 side-effects. */
3978 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3979 VOIDmode, EXPAND_NORMAL);
3980 return const0_rtx;
3981 }
3982 case 0: /* length is 1, call fputc. */
3983 {
3984 const char *p = c_getstr (TREE_VALUE (arglist));
3985
3986 if (p != NULL)
3987 {
3988 /* New argument list transforming fputs(string, stream) to
3989 fputc(string[0], stream). */
3990 arglist =
3991 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3992 arglist =
3993 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3994 fn = fn_fputc;
3995 break;
3996 }
3997 }
3998 /* FALLTHROUGH */
3999 case 1: /* length is greater than 1, call fwrite. */
4000 {
4001 tree string_arg;
4002
4003 /* If optimizing for size keep fputs. */
4004 if (optimize_size)
4005 return 0;
4006 string_arg = TREE_VALUE (arglist);
4007 /* New argument list transforming fputs(string, stream) to
4008 fwrite(string, 1, len, stream). */
4009 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4010 arglist = tree_cons (NULL_TREE, len, arglist);
4011 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
4012 arglist = tree_cons (NULL_TREE, string_arg, arglist);
4013 fn = fn_fwrite;
4014 break;
4015 }
4016 default:
4017 abort ();
4018 }
4019
4020 return expand_expr (build_function_call_expr (fn, arglist),
4021 (ignore ? const0_rtx : NULL_RTX),
4022 VOIDmode, EXPAND_NORMAL);
4023 }
4024
4025 /* Expand a call to __builtin_expect. We return our argument and emit a
4026 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4027 a non-jump context. */
4028
4029 static rtx
4030 expand_builtin_expect (arglist, target)
4031 tree arglist;
4032 rtx target;
4033 {
4034 tree exp, c;
4035 rtx note, rtx_c;
4036
4037 if (arglist == NULL_TREE
4038 || TREE_CHAIN (arglist) == NULL_TREE)
4039 return const0_rtx;
4040 exp = TREE_VALUE (arglist);
4041 c = TREE_VALUE (TREE_CHAIN (arglist));
4042
4043 if (TREE_CODE (c) != INTEGER_CST)
4044 {
4045 error ("second arg to `__builtin_expect' must be a constant");
4046 c = integer_zero_node;
4047 }
4048
4049 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4050
4051 /* Don't bother with expected value notes for integral constants. */
4052 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4053 {
4054 /* We do need to force this into a register so that we can be
4055 moderately sure to be able to correctly interpret the branch
4056 condition later. */
4057 target = force_reg (GET_MODE (target), target);
4058
4059 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4060
4061 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
4062 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4063 }
4064
4065 return target;
4066 }
4067
4068 /* Like expand_builtin_expect, except do this in a jump context. This is
4069 called from do_jump if the conditional is a __builtin_expect. Return either
4070 a list of insns to emit the jump or NULL if we cannot optimize
4071 __builtin_expect. We need to optimize this at jump time so that machines
4072 like the PowerPC don't turn the test into a SCC operation, and then jump
4073 based on the test being 0/1. */
4074
4075 rtx
4076 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
4077 tree exp;
4078 rtx if_false_label;
4079 rtx if_true_label;
4080 {
4081 tree arglist = TREE_OPERAND (exp, 1);
4082 tree arg0 = TREE_VALUE (arglist);
4083 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4084 rtx ret = NULL_RTX;
4085
4086 /* Only handle __builtin_expect (test, 0) and
4087 __builtin_expect (test, 1). */
4088 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4089 && (integer_zerop (arg1) || integer_onep (arg1)))
4090 {
4091 int num_jumps = 0;
4092 rtx insn;
4093
4094 /* If we fail to locate an appropriate conditional jump, we'll
4095 fall back to normal evaluation. Ensure that the expression
4096 can be re-evaluated. */
4097 switch (unsafe_for_reeval (arg0))
4098 {
4099 case 0: /* Safe. */
4100 break;
4101
4102 case 1: /* Mildly unsafe. */
4103 arg0 = unsave_expr (arg0);
4104 break;
4105
4106 case 2: /* Wildly unsafe. */
4107 return NULL_RTX;
4108 }
4109
4110 /* Expand the jump insns. */
4111 start_sequence ();
4112 do_jump (arg0, if_false_label, if_true_label);
4113 ret = get_insns ();
4114 end_sequence ();
4115
4116 /* Now that the __builtin_expect has been validated, go through and add
4117 the expect's to each of the conditional jumps. If we run into an
4118 error, just give up and generate the 'safe' code of doing a SCC
4119 operation and then doing a branch on that. */
4120 insn = ret;
4121 while (insn != NULL_RTX)
4122 {
4123 rtx next = NEXT_INSN (insn);
4124 rtx pattern;
4125
4126 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
4127 && (pattern = pc_set (insn)) != NULL_RTX)
4128 {
4129 rtx ifelse = SET_SRC (pattern);
4130 rtx label;
4131 int taken;
4132
4133 if (GET_CODE (ifelse) != IF_THEN_ELSE)
4134 goto do_next_insn;
4135
4136 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
4137 {
4138 taken = 1;
4139 label = XEXP (XEXP (ifelse, 1), 0);
4140 }
4141 /* An inverted jump reverses the probabilities. */
4142 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
4143 {
4144 taken = 0;
4145 label = XEXP (XEXP (ifelse, 2), 0);
4146 }
4147 /* We shouldn't have to worry about conditional returns during
4148 the expansion stage, but handle it gracefully anyway. */
4149 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
4150 {
4151 taken = 1;
4152 label = NULL_RTX;
4153 }
4154 /* An inverted return reverses the probabilities. */
4155 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
4156 {
4157 taken = 0;
4158 label = NULL_RTX;
4159 }
4160 else
4161 goto do_next_insn;
4162
4163 /* If the test is expected to fail, reverse the
4164 probabilities. */
4165 if (integer_zerop (arg1))
4166 taken = 1 - taken;
4167
4168 /* If we are jumping to the false label, reverse the
4169 probabilities. */
4170 if (label == NULL_RTX)
4171 ; /* conditional return */
4172 else if (label == if_false_label)
4173 taken = 1 - taken;
4174 else if (label != if_true_label)
4175 goto do_next_insn;
4176
4177 num_jumps++;
4178 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4179 }
4180
4181 do_next_insn:
4182 insn = next;
4183 }
4184
4185 /* If no jumps were modified, fail and do __builtin_expect the normal
4186 way. */
4187 if (num_jumps == 0)
4188 ret = NULL_RTX;
4189 }
4190
4191 return ret;
4192 }
4193
4194 void
4195 expand_builtin_trap ()
4196 {
4197 #ifdef HAVE_trap
4198 if (HAVE_trap)
4199 emit_insn (gen_trap ());
4200 else
4201 #endif
4202 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4203 emit_barrier ();
4204 }
4205 \f
4206 /* Expand an expression EXP that calls a built-in function,
4207 with result going to TARGET if that's convenient
4208 (and in mode MODE if that's convenient).
4209 SUBTARGET may be used as the target for computing one of EXP's operands.
4210 IGNORE is nonzero if the value is to be ignored. */
4211
4212 rtx
4213 expand_builtin (exp, target, subtarget, mode, ignore)
4214 tree exp;
4215 rtx target;
4216 rtx subtarget;
4217 enum machine_mode mode;
4218 int ignore;
4219 {
4220 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4221 tree arglist = TREE_OPERAND (exp, 1);
4222 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4223 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4224
4225 /* Perform postincrements before expanding builtin functions.  */
4226 emit_queue ();
4227
4228 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4229 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4230
4231 /* When not optimizing, generate calls to library functions for a certain
4232 set of builtins. */
4233 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4234 switch (fcode)
4235 {
4236 case BUILT_IN_SQRT:
4237 case BUILT_IN_SQRTF:
4238 case BUILT_IN_SQRTL:
4239 case BUILT_IN_SIN:
4240 case BUILT_IN_SINF:
4241 case BUILT_IN_SINL:
4242 case BUILT_IN_COS:
4243 case BUILT_IN_COSF:
4244 case BUILT_IN_COSL:
4245 case BUILT_IN_EXP:
4246 case BUILT_IN_EXPF:
4247 case BUILT_IN_EXPL:
4248 case BUILT_IN_LOG:
4249 case BUILT_IN_LOGF:
4250 case BUILT_IN_LOGL:
4251 case BUILT_IN_POW:
4252 case BUILT_IN_POWF:
4253 case BUILT_IN_POWL:
4254 case BUILT_IN_ATAN2:
4255 case BUILT_IN_ATAN2F:
4256 case BUILT_IN_ATAN2L:
4257 case BUILT_IN_MEMSET:
4258 case BUILT_IN_MEMCPY:
4259 case BUILT_IN_MEMCMP:
4260 case BUILT_IN_MEMPCPY:
4261 case BUILT_IN_MEMMOVE:
4262 case BUILT_IN_BCMP:
4263 case BUILT_IN_BZERO:
4264 case BUILT_IN_BCOPY:
4265 case BUILT_IN_INDEX:
4266 case BUILT_IN_RINDEX:
4267 case BUILT_IN_STPCPY:
4268 case BUILT_IN_STRCHR:
4269 case BUILT_IN_STRRCHR:
4270 case BUILT_IN_STRLEN:
4271 case BUILT_IN_STRCPY:
4272 case BUILT_IN_STRNCPY:
4273 case BUILT_IN_STRNCMP:
4274 case BUILT_IN_STRSTR:
4275 case BUILT_IN_STRPBRK:
4276 case BUILT_IN_STRCAT:
4277 case BUILT_IN_STRNCAT:
4278 case BUILT_IN_STRSPN:
4279 case BUILT_IN_STRCSPN:
4280 case BUILT_IN_STRCMP:
4281 case BUILT_IN_FFS:
4282 case BUILT_IN_PUTCHAR:
4283 case BUILT_IN_PUTS:
4284 case BUILT_IN_PRINTF:
4285 case BUILT_IN_FPUTC:
4286 case BUILT_IN_FPUTS:
4287 case BUILT_IN_FWRITE:
4288 case BUILT_IN_PUTCHAR_UNLOCKED:
4289 case BUILT_IN_PUTS_UNLOCKED:
4290 case BUILT_IN_PRINTF_UNLOCKED:
4291 case BUILT_IN_FPUTC_UNLOCKED:
4292 case BUILT_IN_FPUTS_UNLOCKED:
4293 case BUILT_IN_FWRITE_UNLOCKED:
4294 case BUILT_IN_FLOOR:
4295 case BUILT_IN_FLOORF:
4296 case BUILT_IN_FLOORL:
4297 case BUILT_IN_CEIL:
4298 case BUILT_IN_CEILF:
4299 case BUILT_IN_CEILL:
4300 case BUILT_IN_TRUNC:
4301 case BUILT_IN_TRUNCF:
4302 case BUILT_IN_TRUNCL:
4303 case BUILT_IN_ROUND:
4304 case BUILT_IN_ROUNDF:
4305 case BUILT_IN_ROUNDL:
4306 case BUILT_IN_NEARBYINT:
4307 case BUILT_IN_NEARBYINTF:
4308 case BUILT_IN_NEARBYINTL:
4309 return expand_call (exp, target, ignore);
4310
4311 default:
4312 break;
4313 }
4314
4315 /* The built-in function expanders test for target == const0_rtx
4316 to determine whether the function's result will be ignored. */
4317 if (ignore)
4318 target = const0_rtx;
4319
4320 /* If the result of a pure or const built-in function is ignored, and
4321 none of its arguments are volatile, we can avoid expanding the
4322 built-in call and just evaluate the arguments for side-effects. */
4323 if (target == const0_rtx
4324 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
4325 {
4326 bool volatilep = false;
4327 tree arg;
4328
4329 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4330 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
4331 {
4332 volatilep = true;
4333 break;
4334 }
4335
4336 if (! volatilep)
4337 {
4338 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4339 expand_expr (TREE_VALUE (arg), const0_rtx,
4340 VOIDmode, EXPAND_NORMAL);
4341 return const0_rtx;
4342 }
4343 }
4344
4345 switch (fcode)
4346 {
4347 case BUILT_IN_ABS:
4348 case BUILT_IN_LABS:
4349 case BUILT_IN_LLABS:
4350 case BUILT_IN_IMAXABS:
4351 case BUILT_IN_FABS:
4352 case BUILT_IN_FABSF:
4353 case BUILT_IN_FABSL:
4354 /* build_function_call changes these into ABS_EXPR. */
4355 abort ();
4356
4357 case BUILT_IN_CONJ:
4358 case BUILT_IN_CONJF:
4359 case BUILT_IN_CONJL:
4360 case BUILT_IN_CREAL:
4361 case BUILT_IN_CREALF:
4362 case BUILT_IN_CREALL:
4363 case BUILT_IN_CIMAG:
4364 case BUILT_IN_CIMAGF:
4365 case BUILT_IN_CIMAGL:
4366 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4367 and IMAGPART_EXPR. */
4368 abort ();
4369
4370 case BUILT_IN_SIN:
4371 case BUILT_IN_SINF:
4372 case BUILT_IN_SINL:
4373 case BUILT_IN_COS:
4374 case BUILT_IN_COSF:
4375 case BUILT_IN_COSL:
4376 case BUILT_IN_EXP:
4377 case BUILT_IN_EXPF:
4378 case BUILT_IN_EXPL:
4379 case BUILT_IN_LOG:
4380 case BUILT_IN_LOGF:
4381 case BUILT_IN_LOGL:
4382 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4383 because of possible accuracy problems. */
4384 if (! flag_unsafe_math_optimizations)
4385 break;
4386 case BUILT_IN_SQRT:
4387 case BUILT_IN_SQRTF:
4388 case BUILT_IN_SQRTL:
4389 case BUILT_IN_FLOOR:
4390 case BUILT_IN_FLOORF:
4391 case BUILT_IN_FLOORL:
4392 case BUILT_IN_CEIL:
4393 case BUILT_IN_CEILF:
4394 case BUILT_IN_CEILL:
4395 case BUILT_IN_TRUNC:
4396 case BUILT_IN_TRUNCF:
4397 case BUILT_IN_TRUNCL:
4398 case BUILT_IN_ROUND:
4399 case BUILT_IN_ROUNDF:
4400 case BUILT_IN_ROUNDL:
4401 case BUILT_IN_NEARBYINT:
4402 case BUILT_IN_NEARBYINTF:
4403 case BUILT_IN_NEARBYINTL:
4404 target = expand_builtin_mathfn (exp, target, subtarget);
4405 if (target)
4406 return target;
4407 break;
4408
4409 case BUILT_IN_POW:
4410 case BUILT_IN_POWF:
4411 case BUILT_IN_POWL:
4412 case BUILT_IN_ATAN2:
4413 case BUILT_IN_ATAN2F:
4414 case BUILT_IN_ATAN2L:
4415 if (! flag_unsafe_math_optimizations)
4416 break;
4417 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4418 if (target)
4419 return target;
4420 break;
4421
4422 case BUILT_IN_APPLY_ARGS:
4423 return expand_builtin_apply_args ();
4424
4425 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4426 FUNCTION with a copy of the parameters described by
4427 ARGUMENTS, and ARGSIZE. It returns a block of memory
4428 allocated on the stack into which is stored all the registers
4429 that might possibly be used for returning the result of a
4430 function. ARGUMENTS is the value returned by
4431 __builtin_apply_args. ARGSIZE is the number of bytes of
4432 arguments that must be copied. ??? How should this value be
4433 computed? We'll also need a safe worst case value for varargs
4434 functions. */
4435 case BUILT_IN_APPLY:
4436 if (!validate_arglist (arglist, POINTER_TYPE,
4437 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4438 && !validate_arglist (arglist, REFERENCE_TYPE,
4439 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4440 return const0_rtx;
4441 else
4442 {
4443 int i;
4444 tree t;
4445 rtx ops[3];
4446
4447 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4448 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4449
4450 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4451 }
4452
4453 /* __builtin_return (RESULT) causes the function to return the
4454 value described by RESULT. RESULT is address of the block of
4455 memory returned by __builtin_apply. */
4456 case BUILT_IN_RETURN:
4457 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4458 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4459 NULL_RTX, VOIDmode, 0));
4460 return const0_rtx;
4461
4462 case BUILT_IN_SAVEREGS:
4463 return expand_builtin_saveregs ();
4464
4465 case BUILT_IN_ARGS_INFO:
4466 return expand_builtin_args_info (exp);
4467
4468 /* Return the address of the first anonymous stack arg. */
4469 case BUILT_IN_NEXT_ARG:
4470 return expand_builtin_next_arg (arglist);
4471
4472 case BUILT_IN_CLASSIFY_TYPE:
4473 return expand_builtin_classify_type (arglist);
4474
4475 case BUILT_IN_CONSTANT_P:
4476 return expand_builtin_constant_p (exp);
4477
4478 case BUILT_IN_FRAME_ADDRESS:
4479 case BUILT_IN_RETURN_ADDRESS:
4480 return expand_builtin_frame_address (exp);
4481
4482 /* Returns the address of the area where the structure is returned.
4483 0 otherwise. */
4484 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4485 if (arglist != 0
4486 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4487 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4488 return const0_rtx;
4489 else
4490 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4491
4492 case BUILT_IN_ALLOCA:
4493 target = expand_builtin_alloca (arglist, target);
4494 if (target)
4495 return target;
4496 break;
4497
4498 case BUILT_IN_FFS:
4499 case BUILT_IN_FFSL:
4500 case BUILT_IN_FFSLL:
4501 target = expand_builtin_unop (target_mode, arglist, target,
4502 subtarget, ffs_optab);
4503 if (target)
4504 return target;
4505 break;
4506
4507 case BUILT_IN_CLZ:
4508 case BUILT_IN_CLZL:
4509 case BUILT_IN_CLZLL:
4510 target = expand_builtin_unop (target_mode, arglist, target,
4511 subtarget, clz_optab);
4512 if (target)
4513 return target;
4514 break;
4515
4516 case BUILT_IN_CTZ:
4517 case BUILT_IN_CTZL:
4518 case BUILT_IN_CTZLL:
4519 target = expand_builtin_unop (target_mode, arglist, target,
4520 subtarget, ctz_optab);
4521 if (target)
4522 return target;
4523 break;
4524
4525 case BUILT_IN_POPCOUNT:
4526 case BUILT_IN_POPCOUNTL:
4527 case BUILT_IN_POPCOUNTLL:
4528 target = expand_builtin_unop (target_mode, arglist, target,
4529 subtarget, popcount_optab);
4530 if (target)
4531 return target;
4532 break;
4533
4534 case BUILT_IN_PARITY:
4535 case BUILT_IN_PARITYL:
4536 case BUILT_IN_PARITYLL:
4537 target = expand_builtin_unop (target_mode, arglist, target,
4538 subtarget, parity_optab);
4539 if (target)
4540 return target;
4541 break;
4542
4543 case BUILT_IN_STRLEN:
4544 target = expand_builtin_strlen (exp, target);
4545 if (target)
4546 return target;
4547 break;
4548
4549 case BUILT_IN_STRCPY:
4550 target = expand_builtin_strcpy (exp, target, mode);
4551 if (target)
4552 return target;
4553 break;
4554
4555 case BUILT_IN_STRNCPY:
4556 target = expand_builtin_strncpy (arglist, target, mode);
4557 if (target)
4558 return target;
4559 break;
4560
4561 case BUILT_IN_STPCPY:
4562 target = expand_builtin_stpcpy (arglist, target, mode);
4563 if (target)
4564 return target;
4565 break;
4566
4567 case BUILT_IN_STRCAT:
4568 target = expand_builtin_strcat (arglist, target, mode);
4569 if (target)
4570 return target;
4571 break;
4572
4573 case BUILT_IN_STRNCAT:
4574 target = expand_builtin_strncat (arglist, target, mode);
4575 if (target)
4576 return target;
4577 break;
4578
4579 case BUILT_IN_STRSPN:
4580 target = expand_builtin_strspn (arglist, target, mode);
4581 if (target)
4582 return target;
4583 break;
4584
4585 case BUILT_IN_STRCSPN:
4586 target = expand_builtin_strcspn (arglist, target, mode);
4587 if (target)
4588 return target;
4589 break;
4590
4591 case BUILT_IN_STRSTR:
4592 target = expand_builtin_strstr (arglist, target, mode);
4593 if (target)
4594 return target;
4595 break;
4596
4597 case BUILT_IN_STRPBRK:
4598 target = expand_builtin_strpbrk (arglist, target, mode);
4599 if (target)
4600 return target;
4601 break;
4602
4603 case BUILT_IN_INDEX:
4604 case BUILT_IN_STRCHR:
4605 target = expand_builtin_strchr (arglist, target, mode);
4606 if (target)
4607 return target;
4608 break;
4609
4610 case BUILT_IN_RINDEX:
4611 case BUILT_IN_STRRCHR:
4612 target = expand_builtin_strrchr (arglist, target, mode);
4613 if (target)
4614 return target;
4615 break;
4616
4617 case BUILT_IN_MEMCPY:
4618 target = expand_builtin_memcpy (arglist, target, mode, /*endp=*/0);
4619 if (target)
4620 return target;
4621 break;
4622
4623 case BUILT_IN_MEMPCPY:
4624 target = expand_builtin_memcpy (arglist, target, mode, /*endp=*/1);
4625 if (target)
4626 return target;
4627 break;
4628
4629 case BUILT_IN_MEMMOVE:
4630 target = expand_builtin_memmove (arglist, target, mode);
4631 if (target)
4632 return target;
4633 break;
4634
4635 case BUILT_IN_BCOPY:
4636 target = expand_builtin_bcopy (arglist);
4637 if (target)
4638 return target;
4639 break;
4640
4641 case BUILT_IN_MEMSET:
4642 target = expand_builtin_memset (exp, target, mode);
4643 if (target)
4644 return target;
4645 break;
4646
4647 case BUILT_IN_BZERO:
4648 target = expand_builtin_bzero (exp);
4649 if (target)
4650 return target;
4651 break;
4652
4653 case BUILT_IN_STRCMP:
4654 target = expand_builtin_strcmp (exp, target, mode);
4655 if (target)
4656 return target;
4657 break;
4658
4659 case BUILT_IN_STRNCMP:
4660 target = expand_builtin_strncmp (exp, target, mode);
4661 if (target)
4662 return target;
4663 break;
4664
4665 case BUILT_IN_BCMP:
4666 case BUILT_IN_MEMCMP:
4667 target = expand_builtin_memcmp (exp, arglist, target, mode);
4668 if (target)
4669 return target;
4670 break;
4671
4672 case BUILT_IN_SETJMP:
4673 target = expand_builtin_setjmp (arglist, target);
4674 if (target)
4675 return target;
4676 break;
4677
4678 /* __builtin_longjmp is passed a pointer to an array of five words.
4679 It's similar to the C library longjmp function but works with
4680 __builtin_setjmp above. */
4681 case BUILT_IN_LONGJMP:
4682 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4683 break;
4684 else
4685 {
4686 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4687 VOIDmode, 0);
4688 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4689 NULL_RTX, VOIDmode, 0);
4690
4691 if (value != const1_rtx)
4692 {
4693 error ("__builtin_longjmp second argument must be 1");
4694 return const0_rtx;
4695 }
4696
4697 expand_builtin_longjmp (buf_addr, value);
4698 return const0_rtx;
4699 }
4700
4701 case BUILT_IN_TRAP:
4702 expand_builtin_trap ();
4703 return const0_rtx;
4704
4705 case BUILT_IN_FPUTS:
4706 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4707 if (target)
4708 return target;
4709 break;
4710 case BUILT_IN_FPUTS_UNLOCKED:
4711 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4712 if (target)
4713 return target;
4714 break;
4715
4716 /* Various hooks for the DWARF 2 __throw routine. */
4717 case BUILT_IN_UNWIND_INIT:
4718 expand_builtin_unwind_init ();
4719 return const0_rtx;
4720 case BUILT_IN_DWARF_CFA:
4721 return virtual_cfa_rtx;
4722 #ifdef DWARF2_UNWIND_INFO
4723 case BUILT_IN_DWARF_FP_REGNUM:
4724 return expand_builtin_dwarf_fp_regnum ();
4725 case BUILT_IN_INIT_DWARF_REG_SIZES:
4726 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4727 return const0_rtx;
4728 #endif
4729 case BUILT_IN_FROB_RETURN_ADDR:
4730 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4731 case BUILT_IN_EXTRACT_RETURN_ADDR:
4732 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4733 case BUILT_IN_EH_RETURN:
4734 expand_builtin_eh_return (TREE_VALUE (arglist),
4735 TREE_VALUE (TREE_CHAIN (arglist)));
4736 return const0_rtx;
4737 #ifdef EH_RETURN_DATA_REGNO
4738 case BUILT_IN_EH_RETURN_DATA_REGNO:
4739 return expand_builtin_eh_return_data_regno (arglist);
4740 #endif
4741 case BUILT_IN_VA_START:
4742 case BUILT_IN_STDARG_START:
4743 return expand_builtin_va_start (arglist);
4744 case BUILT_IN_VA_END:
4745 return expand_builtin_va_end (arglist);
4746 case BUILT_IN_VA_COPY:
4747 return expand_builtin_va_copy (arglist);
4748 case BUILT_IN_EXPECT:
4749 return expand_builtin_expect (arglist, target);
4750 case BUILT_IN_PREFETCH:
4751 expand_builtin_prefetch (arglist);
4752 return const0_rtx;
4753
4754
4755 default: /* just do library call, if unknown builtin */
4756 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4757 error ("built-in function `%s' not currently supported",
4758 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4759 }
4760
4761 /* The switch statement above can drop through to cause the function
4762 to be called normally. */
4763 return expand_call (exp, target, ignore);
4764 }
4765
4766 /* Determine whether a tree node represents a call to a built-in
4767 math function. If the tree T is a call to a built-in function
4768 taking a single real argument, then the return value is the
4769 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
4770 the return value is END_BUILTINS. */
4771
4772 enum built_in_function
4773 builtin_mathfn_code (t)
4774 tree t;
4775 {
4776 tree fndecl, arglist;
4777
4778 if (TREE_CODE (t) != CALL_EXPR
4779 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
4780 return END_BUILTINS;
4781
4782 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4783 if (TREE_CODE (fndecl) != FUNCTION_DECL
4784 || ! DECL_BUILT_IN (fndecl)
4785 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4786 return END_BUILTINS;
4787
4788 arglist = TREE_OPERAND (t, 1);
4789 if (! arglist
4790 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4791 return END_BUILTINS;
4792
4793 arglist = TREE_CHAIN (arglist);
4794 switch (DECL_FUNCTION_CODE (fndecl))
4795 {
4796 case BUILT_IN_POW:
4797 case BUILT_IN_POWF:
4798 case BUILT_IN_POWL:
4799 case BUILT_IN_ATAN2:
4800 case BUILT_IN_ATAN2F:
4801 case BUILT_IN_ATAN2L:
4802 if (! arglist
4803 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
4804 || TREE_CHAIN (arglist))
4805 return END_BUILTINS;
4806 break;
4807
4808 default:
4809 if (arglist)
4810 return END_BUILTINS;
4811 break;
4812 }
4813
4814 return DECL_FUNCTION_CODE (fndecl);
4815 }
4816
4817 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4818 constant. ARGLIST is the argument list of the call. */
4819
4820 static tree
4821 fold_builtin_constant_p (arglist)
4822 tree arglist;
4823 {
4824 if (arglist == 0)
4825 return 0;
4826
4827 arglist = TREE_VALUE (arglist);
4828
4829 /* We return 1 for a numeric type that's known to be a constant
4830 value at compile-time or for an aggregate type that's a
4831 literal constant. */
4832 STRIP_NOPS (arglist);
4833
4834 /* If we know this is a constant, emit the constant of one. */
4835 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4836 || (TREE_CODE (arglist) == CONSTRUCTOR
4837 && TREE_CONSTANT (arglist))
4838 || (TREE_CODE (arglist) == ADDR_EXPR
4839 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4840 return integer_one_node;
4841
4842 /* If we aren't going to be running CSE or this expression
4843 has side effects, show we don't know it to be a constant.
4844 Likewise if it's a pointer or aggregate type since in those
4845 case we only want literals, since those are only optimized
4846 when generating RTL, not later.
4847 And finally, if we are compiling an initializer, not code, we
4848 need to return a definite result now; there's not going to be any
4849 more optimization done. */
4850 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4851 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4852 || POINTER_TYPE_P (TREE_TYPE (arglist))
4853 || cfun == 0)
4854 return integer_zero_node;
4855
4856 return 0;
4857 }
4858
4859 /* Fold a call to __builtin_classify_type. */
4860
4861 static tree
4862 fold_builtin_classify_type (arglist)
4863 tree arglist;
4864 {
4865 if (arglist == 0)
4866 return build_int_2 (no_type_class, 0);
4867
4868 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4869 }
4870
4871 /* Fold a call to __builtin_inf or __builtin_huge_val. */
4872
4873 static tree
4874 fold_builtin_inf (type, warn)
4875 tree type;
4876 int warn;
4877 {
4878 REAL_VALUE_TYPE real;
4879
4880 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
4881 warning ("target format does not support infinity");
4882
4883 real_inf (&real);
4884 return build_real (type, real);
4885 }
4886
4887 /* Fold a call to __builtin_nan or __builtin_nans. */
4888
4889 static tree
4890 fold_builtin_nan (arglist, type, quiet)
4891 tree arglist, type;
4892 int quiet;
4893 {
4894 REAL_VALUE_TYPE real;
4895 const char *str;
4896
4897 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4898 return 0;
4899 str = c_getstr (TREE_VALUE (arglist));
4900 if (!str)
4901 return 0;
4902
4903 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
4904 return 0;
4905
4906 return build_real (type, real);
4907 }
4908
4909 /* EXP is assumed to me builtin call where truncation can be propagated
4910 across (for instance floor((double)f) == (double)floorf (f).
4911 Do the transformation. */
4912 static tree
4913 fold_trunc_transparent_mathfn (exp)
4914 tree exp;
4915 {
4916 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4917 tree arglist = TREE_OPERAND (exp, 1);
4918 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4919
4920 if (optimize && validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4921 {
4922 tree arg0 = strip_float_extensions (TREE_VALUE (arglist));
4923 tree ftype = TREE_TYPE (exp);
4924 tree newtype = TREE_TYPE (arg0);
4925 tree decl;
4926
4927 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
4928 && (decl = mathfn_built_in (newtype, fcode)))
4929 {
4930 arglist =
4931 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
4932 return convert (ftype,
4933 build_function_call_expr (decl, arglist));
4934 }
4935 }
4936 return 0;
4937 }
4938
4939 /* Used by constant folding to eliminate some builtin calls early. EXP is
4940 the CALL_EXPR of a call to a builtin function. */
4941
4942 tree
4943 fold_builtin (exp)
4944 tree exp;
4945 {
4946 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4947 tree arglist = TREE_OPERAND (exp, 1);
4948 tree type = TREE_TYPE (TREE_TYPE (fndecl));
4949
4950 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4951 return 0;
4952
4953 switch (DECL_FUNCTION_CODE (fndecl))
4954 {
4955 case BUILT_IN_CONSTANT_P:
4956 return fold_builtin_constant_p (arglist);
4957
4958 case BUILT_IN_CLASSIFY_TYPE:
4959 return fold_builtin_classify_type (arglist);
4960
4961 case BUILT_IN_STRLEN:
4962 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4963 {
4964 tree len = c_strlen (TREE_VALUE (arglist));
4965 if (len)
4966 {
4967 /* Convert from the internal "sizetype" type to "size_t". */
4968 if (size_type_node)
4969 len = convert (size_type_node, len);
4970 return len;
4971 }
4972 }
4973 break;
4974
4975 case BUILT_IN_SQRT:
4976 case BUILT_IN_SQRTF:
4977 case BUILT_IN_SQRTL:
4978 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4979 {
4980 enum built_in_function fcode;
4981 tree arg = TREE_VALUE (arglist);
4982
4983 /* Optimize sqrt of constant value. */
4984 if (TREE_CODE (arg) == REAL_CST
4985 && ! TREE_CONSTANT_OVERFLOW (arg))
4986 {
4987 enum machine_mode mode;
4988 REAL_VALUE_TYPE r, x;
4989
4990 x = TREE_REAL_CST (arg);
4991 mode = TYPE_MODE (type);
4992 if (real_sqrt (&r, mode, &x)
4993 || (!flag_trapping_math && !flag_errno_math))
4994 return build_real (type, r);
4995 }
4996
4997 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
4998 fcode = builtin_mathfn_code (arg);
4999 if (flag_unsafe_math_optimizations
5000 && (fcode == BUILT_IN_EXP
5001 || fcode == BUILT_IN_EXPF
5002 || fcode == BUILT_IN_EXPL))
5003 {
5004 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5005 arg = fold (build (MULT_EXPR, type,
5006 TREE_VALUE (TREE_OPERAND (arg, 1)),
5007 build_real (type, dconsthalf)));
5008 arglist = build_tree_list (NULL_TREE, arg);
5009 return build_function_call_expr (expfn, arglist);
5010 }
5011
5012 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5013 if (flag_unsafe_math_optimizations
5014 && (fcode == BUILT_IN_POW
5015 || fcode == BUILT_IN_POWF
5016 || fcode == BUILT_IN_POWL))
5017 {
5018 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5019 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5020 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5021 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5022 build_real (type, dconsthalf)));
5023 arglist = tree_cons (NULL_TREE, arg0,
5024 build_tree_list (NULL_TREE, narg1));
5025 return build_function_call_expr (powfn, arglist);
5026 }
5027 }
5028 break;
5029
5030 case BUILT_IN_SIN:
5031 case BUILT_IN_SINF:
5032 case BUILT_IN_SINL:
5033 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5034 {
5035 tree arg = TREE_VALUE (arglist);
5036
5037 /* Optimize sin(0.0) = 0.0. */
5038 if (real_zerop (arg))
5039 return build_real (type, dconst0);
5040 }
5041 break;
5042
5043 case BUILT_IN_COS:
5044 case BUILT_IN_COSF:
5045 case BUILT_IN_COSL:
5046 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5047 {
5048 tree arg = TREE_VALUE (arglist);
5049
5050 /* Optimize cos(0.0) = 1.0. */
5051 if (real_zerop (arg))
5052 return build_real (type, dconst1);
5053 }
5054 break;
5055
5056 case BUILT_IN_EXP:
5057 case BUILT_IN_EXPF:
5058 case BUILT_IN_EXPL:
5059 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5060 {
5061 enum built_in_function fcode;
5062 tree arg = TREE_VALUE (arglist);
5063
5064 /* Optimize exp(0.0) = 1.0. */
5065 if (real_zerop (arg))
5066 return build_real (type, dconst1);
5067
5068 /* Optimize exp(log(x)) = x. */
5069 fcode = builtin_mathfn_code (arg);
5070 if (flag_unsafe_math_optimizations
5071 && (fcode == BUILT_IN_LOG
5072 || fcode == BUILT_IN_LOGF
5073 || fcode == BUILT_IN_LOGL))
5074 return TREE_VALUE (TREE_OPERAND (arg, 1));
5075 }
5076 break;
5077
5078 case BUILT_IN_LOG:
5079 case BUILT_IN_LOGF:
5080 case BUILT_IN_LOGL:
5081 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5082 {
5083 enum built_in_function fcode;
5084 tree arg = TREE_VALUE (arglist);
5085
5086 /* Optimize log(1.0) = 0.0. */
5087 if (real_onep (arg))
5088 return build_real (type, dconst0);
5089
5090 /* Optimize log(exp(x)) = x. */
5091 fcode = builtin_mathfn_code (arg);
5092 if (flag_unsafe_math_optimizations
5093 && (fcode == BUILT_IN_EXP
5094 || fcode == BUILT_IN_EXPF
5095 || fcode == BUILT_IN_EXPL))
5096 return TREE_VALUE (TREE_OPERAND (arg, 1));
5097
5098 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5099 if (flag_unsafe_math_optimizations
5100 && (fcode == BUILT_IN_SQRT
5101 || fcode == BUILT_IN_SQRTF
5102 || fcode == BUILT_IN_SQRTL))
5103 {
5104 tree logfn = build_function_call_expr (fndecl,
5105 TREE_OPERAND (arg, 1));
5106 return fold (build (MULT_EXPR, type, logfn,
5107 build_real (type, dconsthalf)));
5108 }
5109
5110 /* Optimize log(pow(x,y)) = y*log(x). */
5111 if (flag_unsafe_math_optimizations
5112 && (fcode == BUILT_IN_POW
5113 || fcode == BUILT_IN_POWF
5114 || fcode == BUILT_IN_POWL))
5115 {
5116 tree arg0, arg1, logfn;
5117
5118 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5119 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5120 arglist = build_tree_list (NULL_TREE, arg0);
5121 logfn = build_function_call_expr (fndecl, arglist);
5122 return fold (build (MULT_EXPR, type, arg1, logfn));
5123 }
5124 }
5125 break;
5126
5127 case BUILT_IN_POW:
5128 case BUILT_IN_POWF:
5129 case BUILT_IN_POWL:
5130 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5131 {
5132 enum built_in_function fcode;
5133 tree arg0 = TREE_VALUE (arglist);
5134 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5135
5136 /* Optimize pow(1.0,y) = 1.0. */
5137 if (real_onep (arg0))
5138 return omit_one_operand (type, build_real (type, dconst1), arg1);
5139
5140 if (TREE_CODE (arg1) == REAL_CST
5141 && ! TREE_CONSTANT_OVERFLOW (arg1))
5142 {
5143 REAL_VALUE_TYPE c;
5144 c = TREE_REAL_CST (arg1);
5145
5146 /* Optimize pow(x,0.0) = 1.0. */
5147 if (REAL_VALUES_EQUAL (c, dconst0))
5148 return omit_one_operand (type, build_real (type, dconst1),
5149 arg0);
5150
5151 /* Optimize pow(x,1.0) = x. */
5152 if (REAL_VALUES_EQUAL (c, dconst1))
5153 return arg0;
5154
5155 /* Optimize pow(x,-1.0) = 1.0/x. */
5156 if (REAL_VALUES_EQUAL (c, dconstm1))
5157 return fold (build (RDIV_EXPR, type,
5158 build_real (type, dconst1),
5159 arg0));
5160
5161 /* Optimize pow(x,2.0) = x*x. */
5162 if (REAL_VALUES_EQUAL (c, dconst2)
5163 && (*lang_hooks.decls.global_bindings_p) () == 0
5164 && ! contains_placeholder_p (arg0))
5165 {
5166 arg0 = save_expr (arg0);
5167 return fold (build (MULT_EXPR, type, arg0, arg0));
5168 }
5169
5170 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5171 if (flag_unsafe_math_optimizations
5172 && REAL_VALUES_EQUAL (c, dconstm2)
5173 && (*lang_hooks.decls.global_bindings_p) () == 0
5174 && ! contains_placeholder_p (arg0))
5175 {
5176 arg0 = save_expr (arg0);
5177 return fold (build (RDIV_EXPR, type,
5178 build_real (type, dconst1),
5179 fold (build (MULT_EXPR, type,
5180 arg0, arg0))));
5181 }
5182
5183 /* Optimize pow(x,0.5) = sqrt(x). */
5184 if (flag_unsafe_math_optimizations
5185 && REAL_VALUES_EQUAL (c, dconsthalf))
5186 {
5187 tree sqrtfn;
5188
5189 fcode = DECL_FUNCTION_CODE (fndecl);
5190 if (fcode == BUILT_IN_POW)
5191 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5192 else if (fcode == BUILT_IN_POWF)
5193 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5194 else if (fcode == BUILT_IN_POWL)
5195 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5196 else
5197 sqrtfn = NULL_TREE;
5198
5199 if (sqrtfn != NULL_TREE)
5200 {
5201 tree arglist = build_tree_list (NULL_TREE, arg0);
5202 return build_function_call_expr (sqrtfn, arglist);
5203 }
5204 }
5205 }
5206
5207 /* Optimize pow(exp(x),y) = exp(x*y). */
5208 fcode = builtin_mathfn_code (arg0);
5209 if (flag_unsafe_math_optimizations
5210 && (fcode == BUILT_IN_EXP
5211 || fcode == BUILT_IN_EXPF
5212 || fcode == BUILT_IN_EXPL))
5213 {
5214 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5215 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5216 arg = fold (build (MULT_EXPR, type, arg, arg1));
5217 arglist = build_tree_list (NULL_TREE, arg);
5218 return build_function_call_expr (expfn, arglist);
5219 }
5220
5221 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5222 if (flag_unsafe_math_optimizations
5223 && (fcode == BUILT_IN_SQRT
5224 || fcode == BUILT_IN_SQRTF
5225 || fcode == BUILT_IN_SQRTL))
5226 {
5227 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5228 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5229 build_real (type, dconsthalf)));
5230
5231 arglist = tree_cons (NULL_TREE, narg0,
5232 build_tree_list (NULL_TREE, narg1));
5233 return build_function_call_expr (fndecl, arglist);
5234 }
5235
5236 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5237 if (flag_unsafe_math_optimizations
5238 && (fcode == BUILT_IN_POW
5239 || fcode == BUILT_IN_POWF
5240 || fcode == BUILT_IN_POWL))
5241 {
5242 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5243 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
5244 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
5245 arglist = tree_cons (NULL_TREE, arg00,
5246 build_tree_list (NULL_TREE, narg1));
5247 return build_function_call_expr (fndecl, arglist);
5248 }
5249 }
5250 break;
5251
5252 case BUILT_IN_INF:
5253 case BUILT_IN_INFF:
5254 case BUILT_IN_INFL:
5255 return fold_builtin_inf (type, true);
5256
5257 case BUILT_IN_HUGE_VAL:
5258 case BUILT_IN_HUGE_VALF:
5259 case BUILT_IN_HUGE_VALL:
5260 return fold_builtin_inf (type, false);
5261
5262 case BUILT_IN_NAN:
5263 case BUILT_IN_NANF:
5264 case BUILT_IN_NANL:
5265 return fold_builtin_nan (arglist, type, true);
5266
5267 case BUILT_IN_NANS:
5268 case BUILT_IN_NANSF:
5269 case BUILT_IN_NANSL:
5270 return fold_builtin_nan (arglist, type, false);
5271
5272 case BUILT_IN_FLOOR:
5273 case BUILT_IN_FLOORF:
5274 case BUILT_IN_FLOORL:
5275 case BUILT_IN_CEIL:
5276 case BUILT_IN_CEILF:
5277 case BUILT_IN_CEILL:
5278 case BUILT_IN_TRUNC:
5279 case BUILT_IN_TRUNCF:
5280 case BUILT_IN_TRUNCL:
5281 case BUILT_IN_ROUND:
5282 case BUILT_IN_ROUNDF:
5283 case BUILT_IN_ROUNDL:
5284 case BUILT_IN_NEARBYINT:
5285 case BUILT_IN_NEARBYINTF:
5286 case BUILT_IN_NEARBYINTL:
5287 return fold_trunc_transparent_mathfn (exp);
5288
5289 default:
5290 break;
5291 }
5292
5293 return 0;
5294 }
5295
5296 /* Conveniently construct a function call expression. */
5297
5298 tree
5299 build_function_call_expr (fn, arglist)
5300 tree fn, arglist;
5301 {
5302 tree call_expr;
5303
5304 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5305 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
5306 call_expr, arglist);
5307 TREE_SIDE_EFFECTS (call_expr) = 1;
5308 return fold (call_expr);
5309 }
5310
5311 /* This function validates the types of a function call argument list
5312 represented as a tree chain of parameters against a specified list
5313 of tree_codes. If the last specifier is a 0, that represents an
5314 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5315
5316 static int
5317 validate_arglist VPARAMS ((tree arglist, ...))
5318 {
5319 enum tree_code code;
5320 int res = 0;
5321
5322 VA_OPEN (ap, arglist);
5323 VA_FIXEDARG (ap, tree, arglist);
5324
5325 do
5326 {
5327 code = va_arg (ap, enum tree_code);
5328 switch (code)
5329 {
5330 case 0:
5331 /* This signifies an ellipses, any further arguments are all ok. */
5332 res = 1;
5333 goto end;
5334 case VOID_TYPE:
5335 /* This signifies an endlink, if no arguments remain, return
5336 true, otherwise return false. */
5337 res = arglist == 0;
5338 goto end;
5339 default:
5340 /* If no parameters remain or the parameter's code does not
5341 match the specified code, return false. Otherwise continue
5342 checking any remaining arguments. */
5343 if (arglist == 0
5344 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
5345 goto end;
5346 break;
5347 }
5348 arglist = TREE_CHAIN (arglist);
5349 }
5350 while (1);
5351
5352 /* We need gotos here since we can only have one VA_CLOSE in a
5353 function. */
5354 end: ;
5355 VA_CLOSE (ap);
5356
5357 return res;
5358 }
5359
5360 /* Default version of target-specific builtin setup that does nothing. */
5361
5362 void
5363 default_init_builtins ()
5364 {
5365 }
5366
5367 /* Default target-specific builtin expander that does nothing. */
5368
5369 rtx
5370 default_expand_builtin (exp, target, subtarget, mode, ignore)
5371 tree exp ATTRIBUTE_UNUSED;
5372 rtx target ATTRIBUTE_UNUSED;
5373 rtx subtarget ATTRIBUTE_UNUSED;
5374 enum machine_mode mode ATTRIBUTE_UNUSED;
5375 int ignore ATTRIBUTE_UNUSED;
5376 {
5377 return NULL_RTX;
5378 }
5379
5380 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
5381
5382 void
5383 purge_builtin_constant_p ()
5384 {
5385 rtx insn, set, arg, new, note;
5386
5387 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5388 if (INSN_P (insn)
5389 && (set = single_set (insn)) != NULL_RTX
5390 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
5391 || (GET_CODE (arg) == SUBREG
5392 && (GET_CODE (arg = SUBREG_REG (arg))
5393 == CONSTANT_P_RTX))))
5394 {
5395 arg = XEXP (arg, 0);
5396 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
5397 validate_change (insn, &SET_SRC (set), new, 0);
5398
5399 /* Remove the REG_EQUAL note from the insn. */
5400 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
5401 remove_note (insn, note);
5402 }
5403 }
5404