]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
explow.c (convert_memory_address): Fix conversion of CONSTs.
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "toplev.h"
42 #include "predict.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "langhooks.h"
46
47 #define CALLED_AS_BUILT_IN(NODE) \
48 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
49
50 /* Register mappings for target machines without register windows. */
51 #ifndef INCOMING_REGNO
52 #define INCOMING_REGNO(OUT) (OUT)
53 #endif
54 #ifndef OUTGOING_REGNO
55 #define OUTGOING_REGNO(IN) (IN)
56 #endif
57
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
65
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT) STRINGX(X),
67 const char *const built_in_names[(int) END_BUILTINS] =
68 {
69 #include "builtins.def"
70 };
71 #undef DEF_BUILTIN
72
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76
77 static int get_pointer_alignment PARAMS ((tree, unsigned int));
78 static tree c_strlen PARAMS ((tree));
79 static const char *c_getstr PARAMS ((tree));
80 static rtx c_readstr PARAMS ((const char *,
81 enum machine_mode));
82 static int target_char_cast PARAMS ((tree, char *));
83 static rtx get_memory_rtx PARAMS ((tree));
84 static int apply_args_size PARAMS ((void));
85 static int apply_result_size PARAMS ((void));
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector PARAMS ((int, rtx));
88 #endif
89 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
90 static void expand_builtin_prefetch PARAMS ((tree));
91 static rtx expand_builtin_apply_args PARAMS ((void));
92 static rtx expand_builtin_apply_args_1 PARAMS ((void));
93 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
94 static void expand_builtin_return PARAMS ((rtx));
95 static enum type_class type_to_class PARAMS ((tree));
96 static rtx expand_builtin_classify_type PARAMS ((tree));
97 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
98 static rtx expand_builtin_constant_p PARAMS ((tree));
99 static rtx expand_builtin_args_info PARAMS ((tree));
100 static rtx expand_builtin_next_arg PARAMS ((tree));
101 static rtx expand_builtin_va_start PARAMS ((tree));
102 static rtx expand_builtin_va_end PARAMS ((tree));
103 static rtx expand_builtin_va_copy PARAMS ((tree));
104 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
105 enum machine_mode));
106 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
107 enum machine_mode));
108 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
109 enum machine_mode));
110 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
111 enum machine_mode));
112 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
113 enum machine_mode));
114 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
115 enum machine_mode));
116 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
117 enum machine_mode));
118 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
119 enum machine_mode));
120 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
121 enum machine_mode));
122 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
123 enum machine_mode));
124 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
125 enum machine_mode));
126 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
127 enum machine_mode));
128 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
130 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
131 enum machine_mode));
132 static rtx expand_builtin_memset PARAMS ((tree, rtx,
133 enum machine_mode));
134 static rtx expand_builtin_bzero PARAMS ((tree));
135 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
136 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
137 enum machine_mode));
138 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
139 enum machine_mode));
140 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
141 enum machine_mode));
142 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
143 enum machine_mode));
144 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
145 static rtx expand_builtin_ffs PARAMS ((tree, rtx, rtx));
146 static rtx expand_builtin_frame_address PARAMS ((tree));
147 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
148 static tree stabilize_va_list PARAMS ((tree, int));
149 static rtx expand_builtin_expect PARAMS ((tree, rtx));
150 static tree fold_builtin_constant_p PARAMS ((tree));
151 static tree fold_builtin_classify_type PARAMS ((tree));
152 static tree build_function_call_expr PARAMS ((tree, tree));
153 static int validate_arglist PARAMS ((tree, ...));
154
155 /* Return the alignment in bits of EXP, a pointer valued expression.
156 But don't return more than MAX_ALIGN no matter what.
157 The alignment returned is, by default, the alignment of the thing that
158 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
159
160 Otherwise, look at the expression to see if we can do better, i.e., if the
161 expression is actually pointing at an object whose alignment is tighter. */
162
163 static int
164 get_pointer_alignment (exp, max_align)
165 tree exp;
166 unsigned int max_align;
167 {
168 unsigned int align, inner;
169
170 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
171 return 0;
172
173 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
174 align = MIN (align, max_align);
175
176 while (1)
177 {
178 switch (TREE_CODE (exp))
179 {
180 case NOP_EXPR:
181 case CONVERT_EXPR:
182 case NON_LVALUE_EXPR:
183 exp = TREE_OPERAND (exp, 0);
184 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
185 return align;
186
187 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
188 align = MIN (inner, max_align);
189 break;
190
191 case PLUS_EXPR:
192 /* If sum of pointer + int, restrict our maximum alignment to that
193 imposed by the integer. If not, we can't do any better than
194 ALIGN. */
195 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
196 return align;
197
198 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
199 & (max_align / BITS_PER_UNIT - 1))
200 != 0)
201 max_align >>= 1;
202
203 exp = TREE_OPERAND (exp, 0);
204 break;
205
206 case ADDR_EXPR:
207 /* See what we are pointing at and look at its alignment. */
208 exp = TREE_OPERAND (exp, 0);
209 if (TREE_CODE (exp) == FUNCTION_DECL)
210 align = FUNCTION_BOUNDARY;
211 else if (DECL_P (exp))
212 align = DECL_ALIGN (exp);
213 #ifdef CONSTANT_ALIGNMENT
214 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
215 align = CONSTANT_ALIGNMENT (exp, align);
216 #endif
217 return MIN (align, max_align);
218
219 default:
220 return align;
221 }
222 }
223 }
224
225 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
226 way, because it could contain a zero byte in the middle.
227 TREE_STRING_LENGTH is the size of the character array, not the string.
228
229 The value returned is of type `ssizetype'.
230
231 Unfortunately, string_constant can't access the values of const char
232 arrays with initializers, so neither can we do so here. */
233
234 static tree
235 c_strlen (src)
236 tree src;
237 {
238 tree offset_node;
239 HOST_WIDE_INT offset;
240 int max;
241 const char *ptr;
242
243 src = string_constant (src, &offset_node);
244 if (src == 0)
245 return 0;
246
247 max = TREE_STRING_LENGTH (src) - 1;
248 ptr = TREE_STRING_POINTER (src);
249
250 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
251 {
252 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
253 compute the offset to the following null if we don't know where to
254 start searching for it. */
255 int i;
256
257 for (i = 0; i < max; i++)
258 if (ptr[i] == 0)
259 return 0;
260
261 /* We don't know the starting offset, but we do know that the string
262 has no internal zero bytes. We can assume that the offset falls
263 within the bounds of the string; otherwise, the programmer deserves
264 what he gets. Subtract the offset from the length of the string,
265 and return that. This would perhaps not be valid if we were dealing
266 with named arrays in addition to literal string constants. */
267
268 return size_diffop (size_int (max), offset_node);
269 }
270
271 /* We have a known offset into the string. Start searching there for
272 a null character if we can represent it as a single HOST_WIDE_INT. */
273 if (offset_node == 0)
274 offset = 0;
275 else if (! host_integerp (offset_node, 0))
276 offset = -1;
277 else
278 offset = tree_low_cst (offset_node, 0);
279
280 /* If the offset is known to be out of bounds, warn, and call strlen at
281 runtime. */
282 if (offset < 0 || offset > max)
283 {
284 warning ("offset outside bounds of constant string");
285 return 0;
286 }
287
288 /* Use strlen to search for the first zero byte. Since any strings
289 constructed with build_string will have nulls appended, we win even
290 if we get handed something like (char[4])"abcd".
291
292 Since OFFSET is our starting index into the string, no further
293 calculation is needed. */
294 return ssize_int (strlen (ptr + offset));
295 }
296
297 /* Return a char pointer for a C string if it is a string constant
298 or sum of string constant and integer constant. */
299
300 static const char *
301 c_getstr (src)
302 tree src;
303 {
304 tree offset_node;
305
306 src = string_constant (src, &offset_node);
307 if (src == 0)
308 return 0;
309
310 if (offset_node == 0)
311 return TREE_STRING_POINTER (src);
312 else if (!host_integerp (offset_node, 1)
313 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
314 return 0;
315
316 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
317 }
318
319 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
320 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
321
322 static rtx
323 c_readstr (str, mode)
324 const char *str;
325 enum machine_mode mode;
326 {
327 HOST_WIDE_INT c[2];
328 HOST_WIDE_INT ch;
329 unsigned int i, j;
330
331 if (GET_MODE_CLASS (mode) != MODE_INT)
332 abort ();
333 c[0] = 0;
334 c[1] = 0;
335 ch = 1;
336 for (i = 0; i < GET_MODE_SIZE (mode); i++)
337 {
338 j = i;
339 if (WORDS_BIG_ENDIAN)
340 j = GET_MODE_SIZE (mode) - i - 1;
341 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
342 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
343 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
344 j *= BITS_PER_UNIT;
345 if (j > 2 * HOST_BITS_PER_WIDE_INT)
346 abort ();
347 if (ch)
348 ch = (unsigned char) str[i];
349 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
350 }
351 return immed_double_const (c[0], c[1], mode);
352 }
353
354 /* Cast a target constant CST to target CHAR and if that value fits into
355 host char type, return zero and put that value into variable pointed by
356 P. */
357
358 static int
359 target_char_cast (cst, p)
360 tree cst;
361 char *p;
362 {
363 unsigned HOST_WIDE_INT val, hostval;
364
365 if (!host_integerp (cst, 1)
366 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
367 return 1;
368
369 val = tree_low_cst (cst, 1);
370 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
371 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
372
373 hostval = val;
374 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
375 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
376
377 if (val != hostval)
378 return 1;
379
380 *p = hostval;
381 return 0;
382 }
383
384 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
385 times to get the address of either a higher stack frame, or a return
386 address located within it (depending on FNDECL_CODE). */
387
388 rtx
389 expand_builtin_return_addr (fndecl_code, count, tem)
390 enum built_in_function fndecl_code;
391 int count;
392 rtx tem;
393 {
394 int i;
395
396 /* Some machines need special handling before we can access
397 arbitrary frames. For example, on the sparc, we must first flush
398 all register windows to the stack. */
399 #ifdef SETUP_FRAME_ADDRESSES
400 if (count > 0)
401 SETUP_FRAME_ADDRESSES ();
402 #endif
403
404 /* On the sparc, the return address is not in the frame, it is in a
405 register. There is no way to access it off of the current frame
406 pointer, but it can be accessed off the previous frame pointer by
407 reading the value from the register window save area. */
408 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
409 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
410 count--;
411 #endif
412
413 /* Scan back COUNT frames to the specified frame. */
414 for (i = 0; i < count; i++)
415 {
416 /* Assume the dynamic chain pointer is in the word that the
417 frame address points to, unless otherwise specified. */
418 #ifdef DYNAMIC_CHAIN_ADDRESS
419 tem = DYNAMIC_CHAIN_ADDRESS (tem);
420 #endif
421 tem = memory_address (Pmode, tem);
422 tem = gen_rtx_MEM (Pmode, tem);
423 set_mem_alias_set (tem, get_frame_alias_set ());
424 tem = copy_to_reg (tem);
425 }
426
427 /* For __builtin_frame_address, return what we've got. */
428 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
429 return tem;
430
431 /* For __builtin_return_address, Get the return address from that
432 frame. */
433 #ifdef RETURN_ADDR_RTX
434 tem = RETURN_ADDR_RTX (count, tem);
435 #else
436 tem = memory_address (Pmode,
437 plus_constant (tem, GET_MODE_SIZE (Pmode)));
438 tem = gen_rtx_MEM (Pmode, tem);
439 set_mem_alias_set (tem, get_frame_alias_set ());
440 #endif
441 return tem;
442 }
443
444 /* Alias set used for setjmp buffer. */
445 static HOST_WIDE_INT setjmp_alias_set = -1;
446
447 /* Construct the leading half of a __builtin_setjmp call. Control will
448 return to RECEIVER_LABEL. This is used directly by sjlj exception
449 handling code. */
450
451 void
452 expand_builtin_setjmp_setup (buf_addr, receiver_label)
453 rtx buf_addr;
454 rtx receiver_label;
455 {
456 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
457 rtx stack_save;
458 rtx mem;
459
460 if (setjmp_alias_set == -1)
461 setjmp_alias_set = new_alias_set ();
462
463 #ifdef POINTERS_EXTEND_UNSIGNED
464 if (GET_MODE (buf_addr) != Pmode)
465 buf_addr = convert_memory_address (Pmode, buf_addr);
466 #endif
467
468 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
469
470 emit_queue ();
471
472 /* We store the frame pointer and the address of receiver_label in
473 the buffer and use the rest of it for the stack save area, which
474 is machine-dependent. */
475
476 #ifndef BUILTIN_SETJMP_FRAME_VALUE
477 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
478 #endif
479
480 mem = gen_rtx_MEM (Pmode, buf_addr);
481 set_mem_alias_set (mem, setjmp_alias_set);
482 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
483
484 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
485 set_mem_alias_set (mem, setjmp_alias_set);
486
487 emit_move_insn (validize_mem (mem),
488 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
489
490 stack_save = gen_rtx_MEM (sa_mode,
491 plus_constant (buf_addr,
492 2 * GET_MODE_SIZE (Pmode)));
493 set_mem_alias_set (stack_save, setjmp_alias_set);
494 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
495
496 /* If there is further processing to do, do it. */
497 #ifdef HAVE_builtin_setjmp_setup
498 if (HAVE_builtin_setjmp_setup)
499 emit_insn (gen_builtin_setjmp_setup (buf_addr));
500 #endif
501
502 /* Tell optimize_save_area_alloca that extra work is going to
503 need to go on during alloca. */
504 current_function_calls_setjmp = 1;
505
506 /* Set this so all the registers get saved in our frame; we need to be
507 able to copy the saved values for any registers from frames we unwind. */
508 current_function_has_nonlocal_label = 1;
509 }
510
511 /* Construct the trailing part of a __builtin_setjmp call.
512 This is used directly by sjlj exception handling code. */
513
514 void
515 expand_builtin_setjmp_receiver (receiver_label)
516 rtx receiver_label ATTRIBUTE_UNUSED;
517 {
518 /* Clobber the FP when we get here, so we have to make sure it's
519 marked as used by this function. */
520 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
521
522 /* Mark the static chain as clobbered here so life information
523 doesn't get messed up for it. */
524 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
525
526 /* Now put in the code to restore the frame pointer, and argument
527 pointer, if needed. The code below is from expand_end_bindings
528 in stmt.c; see detailed documentation there. */
529 #ifdef HAVE_nonlocal_goto
530 if (! HAVE_nonlocal_goto)
531 #endif
532 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
533
534 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
535 if (fixed_regs[ARG_POINTER_REGNUM])
536 {
537 #ifdef ELIMINABLE_REGS
538 size_t i;
539 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
540
541 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
542 if (elim_regs[i].from == ARG_POINTER_REGNUM
543 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
544 break;
545
546 if (i == ARRAY_SIZE (elim_regs))
547 #endif
548 {
549 /* Now restore our arg pointer from the address at which it
550 was saved in our stack frame. */
551 emit_move_insn (virtual_incoming_args_rtx,
552 copy_to_reg (get_arg_pointer_save_area (cfun)));
553 }
554 }
555 #endif
556
557 #ifdef HAVE_builtin_setjmp_receiver
558 if (HAVE_builtin_setjmp_receiver)
559 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
560 else
561 #endif
562 #ifdef HAVE_nonlocal_goto_receiver
563 if (HAVE_nonlocal_goto_receiver)
564 emit_insn (gen_nonlocal_goto_receiver ());
565 else
566 #endif
567 { /* Nothing */ }
568
569 /* @@@ This is a kludge. Not all machine descriptions define a blockage
570 insn, but we must not allow the code we just generated to be reordered
571 by scheduling. Specifically, the update of the frame pointer must
572 happen immediately, not later. So emit an ASM_INPUT to act as blockage
573 insn. */
574 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
575 }
576
577 /* __builtin_setjmp is passed a pointer to an array of five words (not
578 all will be used on all machines). It operates similarly to the C
579 library function of the same name, but is more efficient. Much of
580 the code below (and for longjmp) is copied from the handling of
581 non-local gotos.
582
583 NOTE: This is intended for use by GNAT and the exception handling
584 scheme in the compiler and will only work in the method used by
585 them. */
586
587 static rtx
588 expand_builtin_setjmp (arglist, target)
589 tree arglist;
590 rtx target;
591 {
592 rtx buf_addr, next_lab, cont_lab;
593
594 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
595 return NULL_RTX;
596
597 if (target == 0 || GET_CODE (target) != REG
598 || REGNO (target) < FIRST_PSEUDO_REGISTER)
599 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
600
601 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
602
603 next_lab = gen_label_rtx ();
604 cont_lab = gen_label_rtx ();
605
606 expand_builtin_setjmp_setup (buf_addr, next_lab);
607
608 /* Set TARGET to zero and branch to the continue label. */
609 emit_move_insn (target, const0_rtx);
610 emit_jump_insn (gen_jump (cont_lab));
611 emit_barrier ();
612 emit_label (next_lab);
613
614 expand_builtin_setjmp_receiver (next_lab);
615
616 /* Set TARGET to one. */
617 emit_move_insn (target, const1_rtx);
618 emit_label (cont_lab);
619
620 /* Tell flow about the strange goings on. Putting `next_lab' on
621 `nonlocal_goto_handler_labels' to indicates that function
622 calls may traverse the arc back to this label. */
623
624 current_function_has_nonlocal_label = 1;
625 nonlocal_goto_handler_labels
626 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
627
628 return target;
629 }
630
631 /* __builtin_longjmp is passed a pointer to an array of five words (not
632 all will be used on all machines). It operates similarly to the C
633 library function of the same name, but is more efficient. Much of
634 the code below is copied from the handling of non-local gotos.
635
636 NOTE: This is intended for use by GNAT and the exception handling
637 scheme in the compiler and will only work in the method used by
638 them. */
639
640 void
641 expand_builtin_longjmp (buf_addr, value)
642 rtx buf_addr, value;
643 {
644 rtx fp, lab, stack, insn, last;
645 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
646
647 if (setjmp_alias_set == -1)
648 setjmp_alias_set = new_alias_set ();
649
650 #ifdef POINTERS_EXTEND_UNSIGNED
651 if (GET_MODE (buf_addr) != Pmode)
652 buf_addr = convert_memory_address (Pmode, buf_addr);
653 #endif
654
655 buf_addr = force_reg (Pmode, buf_addr);
656
657 /* We used to store value in static_chain_rtx, but that fails if pointers
658 are smaller than integers. We instead require that the user must pass
659 a second argument of 1, because that is what builtin_setjmp will
660 return. This also makes EH slightly more efficient, since we are no
661 longer copying around a value that we don't care about. */
662 if (value != const1_rtx)
663 abort ();
664
665 current_function_calls_longjmp = 1;
666
667 last = get_last_insn ();
668 #ifdef HAVE_builtin_longjmp
669 if (HAVE_builtin_longjmp)
670 emit_insn (gen_builtin_longjmp (buf_addr));
671 else
672 #endif
673 {
674 fp = gen_rtx_MEM (Pmode, buf_addr);
675 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
676 GET_MODE_SIZE (Pmode)));
677
678 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
679 2 * GET_MODE_SIZE (Pmode)));
680 set_mem_alias_set (fp, setjmp_alias_set);
681 set_mem_alias_set (lab, setjmp_alias_set);
682 set_mem_alias_set (stack, setjmp_alias_set);
683
684 /* Pick up FP, label, and SP from the block and jump. This code is
685 from expand_goto in stmt.c; see there for detailed comments. */
686 #if HAVE_nonlocal_goto
687 if (HAVE_nonlocal_goto)
688 /* We have to pass a value to the nonlocal_goto pattern that will
689 get copied into the static_chain pointer, but it does not matter
690 what that value is, because builtin_setjmp does not use it. */
691 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
692 else
693 #endif
694 {
695 lab = copy_to_reg (lab);
696
697 emit_move_insn (hard_frame_pointer_rtx, fp);
698 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
699
700 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
701 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
702 emit_indirect_jump (lab);
703 }
704 }
705
706 /* Search backwards and mark the jump insn as a non-local goto.
707 Note that this precludes the use of __builtin_longjmp to a
708 __builtin_setjmp target in the same function. However, we've
709 already cautioned the user that these functions are for
710 internal exception handling use only. */
711 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
712 {
713 if (insn == last)
714 abort ();
715 if (GET_CODE (insn) == JUMP_INSN)
716 {
717 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
718 REG_NOTES (insn));
719 break;
720 }
721 else if (GET_CODE (insn) == CALL_INSN)
722 break;
723 }
724 }
725
726 /* Expand a call to __builtin_prefetch. For a target that does not support
727 data prefetch, evaluate the memory address argument in case it has side
728 effects. */
729
730 static void
731 expand_builtin_prefetch (arglist)
732 tree arglist;
733 {
734 tree arg0, arg1, arg2;
735 rtx op0, op1, op2;
736
737 if (!validate_arglist (arglist, POINTER_TYPE, 0))
738 return;
739
740 arg0 = TREE_VALUE (arglist);
741 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
742 zero (read) and argument 2 (locality) defaults to 3 (high degree of
743 locality). */
744 if (TREE_CHAIN (arglist))
745 {
746 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
747 if (TREE_CHAIN (TREE_CHAIN (arglist)))
748 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
749 else
750 arg2 = build_int_2 (3, 0);
751 }
752 else
753 {
754 arg1 = integer_zero_node;
755 arg2 = build_int_2 (3, 0);
756 }
757
758 /* Argument 0 is an address. */
759 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
760
761 /* Argument 1 (read/write flag) must be a compile-time constant int. */
762 if (TREE_CODE (arg1) != INTEGER_CST)
763 {
764 error ("second arg to `__builtin_prefetch' must be a constant");
765 arg1 = integer_zero_node;
766 }
767 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
768 /* Argument 1 must be either zero or one. */
769 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
770 {
771 warning ("invalid second arg to __builtin_prefetch; using zero");
772 op1 = const0_rtx;
773 }
774
775 /* Argument 2 (locality) must be a compile-time constant int. */
776 if (TREE_CODE (arg2) != INTEGER_CST)
777 {
778 error ("third arg to `__builtin_prefetch' must be a constant");
779 arg2 = integer_zero_node;
780 }
781 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
782 /* Argument 2 must be 0, 1, 2, or 3. */
783 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
784 {
785 warning ("invalid third arg to __builtin_prefetch; using zero");
786 op2 = const0_rtx;
787 }
788
789 #ifdef HAVE_prefetch
790 if (HAVE_prefetch)
791 {
792 if ((! (*insn_data[(int)CODE_FOR_prefetch].operand[0].predicate)
793 (op0,
794 insn_data[(int)CODE_FOR_prefetch].operand[0].mode)) ||
795 (GET_MODE(op0) != Pmode))
796 {
797 #ifdef POINTERS_EXTEND_UNSIGNED
798 if (GET_MODE(op0) != Pmode)
799 op0 = convert_memory_address (Pmode, op0);
800 #endif
801 op0 = force_reg (Pmode, op0);
802 }
803 emit_insn (gen_prefetch (op0, op1, op2));
804 }
805 else
806 #endif
807 op0 = protect_from_queue (op0, 0);
808 /* Don't do anything with direct references to volatile memory, but
809 generate code to handle other side effects. */
810 if (GET_CODE (op0) != MEM && side_effects_p (op0))
811 emit_insn (op0);
812 }
813
814 /* Get a MEM rtx for expression EXP which is the address of an operand
815 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
816
817 static rtx
818 get_memory_rtx (exp)
819 tree exp;
820 {
821 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
822 rtx mem;
823
824 #ifdef POINTERS_EXTEND_UNSIGNED
825 if (GET_MODE (addr) != Pmode)
826 addr = convert_memory_address (Pmode, addr);
827 #endif
828
829 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
830
831 /* Get an expression we can use to find the attributes to assign to MEM.
832 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
833 we can. First remove any nops. */
834 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
835 || TREE_CODE (exp) == NON_LVALUE_EXPR)
836 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
837 exp = TREE_OPERAND (exp, 0);
838
839 if (TREE_CODE (exp) == ADDR_EXPR)
840 {
841 exp = TREE_OPERAND (exp, 0);
842 set_mem_attributes (mem, exp, 0);
843 }
844 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
845 {
846 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
847 /* memcpy, memset and other builtin stringops can alias with anything. */
848 set_mem_alias_set (mem, 0);
849 }
850
851 return mem;
852 }
853 \f
854 /* Built-in functions to perform an untyped call and return. */
855
856 /* For each register that may be used for calling a function, this
857 gives a mode used to copy the register's value. VOIDmode indicates
858 the register is not used for calling a function. If the machine
859 has register windows, this gives only the outbound registers.
860 INCOMING_REGNO gives the corresponding inbound register. */
861 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
862
863 /* For each register that may be used for returning values, this gives
864 a mode used to copy the register's value. VOIDmode indicates the
865 register is not used for returning values. If the machine has
866 register windows, this gives only the outbound registers.
867 INCOMING_REGNO gives the corresponding inbound register. */
868 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
869
870 /* For each register that may be used for calling a function, this
871 gives the offset of that register into the block returned by
872 __builtin_apply_args. 0 indicates that the register is not
873 used for calling a function. */
874 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
875
876 /* Return the offset of register REGNO into the block returned by
877 __builtin_apply_args. This is not declared static, since it is
878 needed in objc-act.c. */
879
880 int
881 apply_args_register_offset (regno)
882 int regno;
883 {
884 apply_args_size ();
885
886 /* Arguments are always put in outgoing registers (in the argument
887 block) if such make sense. */
888 #ifdef OUTGOING_REGNO
889 regno = OUTGOING_REGNO(regno);
890 #endif
891 return apply_args_reg_offset[regno];
892 }
893
894 /* Return the size required for the block returned by __builtin_apply_args,
895 and initialize apply_args_mode. */
896
897 static int
898 apply_args_size ()
899 {
900 static int size = -1;
901 int align;
902 unsigned int regno;
903 enum machine_mode mode;
904
905 /* The values computed by this function never change. */
906 if (size < 0)
907 {
908 /* The first value is the incoming arg-pointer. */
909 size = GET_MODE_SIZE (Pmode);
910
911 /* The second value is the structure value address unless this is
912 passed as an "invisible" first argument. */
913 if (struct_value_rtx)
914 size += GET_MODE_SIZE (Pmode);
915
916 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
917 if (FUNCTION_ARG_REGNO_P (regno))
918 {
919 /* Search for the proper mode for copying this register's
920 value. I'm not sure this is right, but it works so far. */
921 enum machine_mode best_mode = VOIDmode;
922
923 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
924 mode != VOIDmode;
925 mode = GET_MODE_WIDER_MODE (mode))
926 if (HARD_REGNO_MODE_OK (regno, mode)
927 && HARD_REGNO_NREGS (regno, mode) == 1)
928 best_mode = mode;
929
930 if (best_mode == VOIDmode)
931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
932 mode != VOIDmode;
933 mode = GET_MODE_WIDER_MODE (mode))
934 if (HARD_REGNO_MODE_OK (regno, mode)
935 && have_insn_for (SET, mode))
936 best_mode = mode;
937
938 if (best_mode == VOIDmode)
939 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
940 mode != VOIDmode;
941 mode = GET_MODE_WIDER_MODE (mode))
942 if (HARD_REGNO_MODE_OK (regno, mode)
943 && have_insn_for (SET, mode))
944 best_mode = mode;
945
946 if (best_mode == VOIDmode)
947 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
948 mode != VOIDmode;
949 mode = GET_MODE_WIDER_MODE (mode))
950 if (HARD_REGNO_MODE_OK (regno, mode)
951 && have_insn_for (SET, mode))
952 best_mode = mode;
953
954 mode = best_mode;
955 if (mode == VOIDmode)
956 abort ();
957
958 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
959 if (size % align != 0)
960 size = CEIL (size, align) * align;
961 apply_args_reg_offset[regno] = size;
962 size += GET_MODE_SIZE (mode);
963 apply_args_mode[regno] = mode;
964 }
965 else
966 {
967 apply_args_mode[regno] = VOIDmode;
968 apply_args_reg_offset[regno] = 0;
969 }
970 }
971 return size;
972 }
973
974 /* Return the size required for the block returned by __builtin_apply,
975 and initialize apply_result_mode. */
976
977 static int
978 apply_result_size ()
979 {
980 static int size = -1;
981 int align, regno;
982 enum machine_mode mode;
983
984 /* The values computed by this function never change. */
985 if (size < 0)
986 {
987 size = 0;
988
989 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
990 if (FUNCTION_VALUE_REGNO_P (regno))
991 {
992 /* Search for the proper mode for copying this register's
993 value. I'm not sure this is right, but it works so far. */
994 enum machine_mode best_mode = VOIDmode;
995
996 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
997 mode != TImode;
998 mode = GET_MODE_WIDER_MODE (mode))
999 if (HARD_REGNO_MODE_OK (regno, mode))
1000 best_mode = mode;
1001
1002 if (best_mode == VOIDmode)
1003 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1004 mode != VOIDmode;
1005 mode = GET_MODE_WIDER_MODE (mode))
1006 if (HARD_REGNO_MODE_OK (regno, mode)
1007 && have_insn_for (SET, mode))
1008 best_mode = mode;
1009
1010 if (best_mode == VOIDmode)
1011 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1012 mode != VOIDmode;
1013 mode = GET_MODE_WIDER_MODE (mode))
1014 if (HARD_REGNO_MODE_OK (regno, mode)
1015 && have_insn_for (SET, mode))
1016 best_mode = mode;
1017
1018 if (best_mode == VOIDmode)
1019 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1020 mode != VOIDmode;
1021 mode = GET_MODE_WIDER_MODE (mode))
1022 if (HARD_REGNO_MODE_OK (regno, mode)
1023 && have_insn_for (SET, mode))
1024 best_mode = mode;
1025
1026 mode = best_mode;
1027 if (mode == VOIDmode)
1028 abort ();
1029
1030 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1031 if (size % align != 0)
1032 size = CEIL (size, align) * align;
1033 size += GET_MODE_SIZE (mode);
1034 apply_result_mode[regno] = mode;
1035 }
1036 else
1037 apply_result_mode[regno] = VOIDmode;
1038
1039 /* Allow targets that use untyped_call and untyped_return to override
1040 the size so that machine-specific information can be stored here. */
1041 #ifdef APPLY_RESULT_SIZE
1042 size = APPLY_RESULT_SIZE;
1043 #endif
1044 }
1045 return size;
1046 }
1047
1048 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1049 /* Create a vector describing the result block RESULT. If SAVEP is true,
1050 the result block is used to save the values; otherwise it is used to
1051 restore the values. */
1052
1053 static rtx
1054 result_vector (savep, result)
1055 int savep;
1056 rtx result;
1057 {
1058 int regno, size, align, nelts;
1059 enum machine_mode mode;
1060 rtx reg, mem;
1061 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1062
1063 size = nelts = 0;
1064 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1065 if ((mode = apply_result_mode[regno]) != VOIDmode)
1066 {
1067 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1068 if (size % align != 0)
1069 size = CEIL (size, align) * align;
1070 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1071 mem = adjust_address (result, mode, size);
1072 savevec[nelts++] = (savep
1073 ? gen_rtx_SET (VOIDmode, mem, reg)
1074 : gen_rtx_SET (VOIDmode, reg, mem));
1075 size += GET_MODE_SIZE (mode);
1076 }
1077 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1078 }
1079 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1080
1081 /* Save the state required to perform an untyped call with the same
1082 arguments as were passed to the current function. */
1083
1084 static rtx
1085 expand_builtin_apply_args_1 ()
1086 {
1087 rtx registers;
1088 int size, align, regno;
1089 enum machine_mode mode;
1090
1091 /* Create a block where the arg-pointer, structure value address,
1092 and argument registers can be saved. */
1093 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1094
1095 /* Walk past the arg-pointer and structure value address. */
1096 size = GET_MODE_SIZE (Pmode);
1097 if (struct_value_rtx)
1098 size += GET_MODE_SIZE (Pmode);
1099
1100 /* Save each register used in calling a function to the block. */
1101 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1102 if ((mode = apply_args_mode[regno]) != VOIDmode)
1103 {
1104 rtx tem;
1105
1106 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1107 if (size % align != 0)
1108 size = CEIL (size, align) * align;
1109
1110 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1111
1112 emit_move_insn (adjust_address (registers, mode, size), tem);
1113 size += GET_MODE_SIZE (mode);
1114 }
1115
1116 /* Save the arg pointer to the block. */
1117 emit_move_insn (adjust_address (registers, Pmode, 0),
1118 copy_to_reg (virtual_incoming_args_rtx));
1119 size = GET_MODE_SIZE (Pmode);
1120
1121 /* Save the structure value address unless this is passed as an
1122 "invisible" first argument. */
1123 if (struct_value_incoming_rtx)
1124 {
1125 emit_move_insn (adjust_address (registers, Pmode, size),
1126 copy_to_reg (struct_value_incoming_rtx));
1127 size += GET_MODE_SIZE (Pmode);
1128 }
1129
1130 /* Return the address of the block. */
1131 return copy_addr_to_reg (XEXP (registers, 0));
1132 }
1133
1134 /* __builtin_apply_args returns block of memory allocated on
1135 the stack into which is stored the arg pointer, structure
1136 value address, static chain, and all the registers that might
1137 possibly be used in performing a function call. The code is
1138 moved to the start of the function so the incoming values are
1139 saved. */
1140
1141 static rtx
1142 expand_builtin_apply_args ()
1143 {
1144 /* Don't do __builtin_apply_args more than once in a function.
1145 Save the result of the first call and reuse it. */
1146 if (apply_args_value != 0)
1147 return apply_args_value;
1148 {
1149 /* When this function is called, it means that registers must be
1150 saved on entry to this function. So we migrate the
1151 call to the first insn of this function. */
1152 rtx temp;
1153 rtx seq;
1154
1155 start_sequence ();
1156 temp = expand_builtin_apply_args_1 ();
1157 seq = get_insns ();
1158 end_sequence ();
1159
1160 apply_args_value = temp;
1161
1162 /* Put the insns after the NOTE that starts the function.
1163 If this is inside a start_sequence, make the outer-level insn
1164 chain current, so the code is placed at the start of the
1165 function. */
1166 push_topmost_sequence ();
1167 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1168 pop_topmost_sequence ();
1169 return temp;
1170 }
1171 }
1172
1173 /* Perform an untyped call and save the state required to perform an
1174 untyped return of whatever value was returned by the given function. */
1175
1176 static rtx
1177 expand_builtin_apply (function, arguments, argsize)
1178 rtx function, arguments, argsize;
1179 {
1180 int size, align, regno;
1181 enum machine_mode mode;
1182 rtx incoming_args, result, reg, dest, src, call_insn;
1183 rtx old_stack_level = 0;
1184 rtx call_fusage = 0;
1185
1186 #ifdef POINTERS_EXTEND_UNSIGNED
1187 if (GET_MODE (arguments) != Pmode)
1188 arguments = convert_memory_address (Pmode, arguments);
1189 #endif
1190
1191 /* Create a block where the return registers can be saved. */
1192 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1193
1194 /* Fetch the arg pointer from the ARGUMENTS block. */
1195 incoming_args = gen_reg_rtx (Pmode);
1196 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1197 #ifndef STACK_GROWS_DOWNWARD
1198 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1199 incoming_args, 0, OPTAB_LIB_WIDEN);
1200 #endif
1201
1202 /* Perform postincrements before actually calling the function. */
1203 emit_queue ();
1204
1205 /* Push a new argument block and copy the arguments. Do not allow
1206 the (potential) memcpy call below to interfere with our stack
1207 manipulations. */
1208 do_pending_stack_adjust ();
1209 NO_DEFER_POP;
1210
1211 /* Save the stack with nonlocal if available */
1212 #ifdef HAVE_save_stack_nonlocal
1213 if (HAVE_save_stack_nonlocal)
1214 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1215 else
1216 #endif
1217 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1218
1219 /* Push a block of memory onto the stack to store the memory arguments.
1220 Save the address in a register, and copy the memory arguments. ??? I
1221 haven't figured out how the calling convention macros effect this,
1222 but it's likely that the source and/or destination addresses in
1223 the block copy will need updating in machine specific ways. */
1224 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1225 dest = gen_rtx_MEM (BLKmode, dest);
1226 set_mem_align (dest, PARM_BOUNDARY);
1227 src = gen_rtx_MEM (BLKmode, incoming_args);
1228 set_mem_align (src, PARM_BOUNDARY);
1229 emit_block_move (dest, src, argsize);
1230
1231 /* Refer to the argument block. */
1232 apply_args_size ();
1233 arguments = gen_rtx_MEM (BLKmode, arguments);
1234 set_mem_align (arguments, PARM_BOUNDARY);
1235
1236 /* Walk past the arg-pointer and structure value address. */
1237 size = GET_MODE_SIZE (Pmode);
1238 if (struct_value_rtx)
1239 size += GET_MODE_SIZE (Pmode);
1240
1241 /* Restore each of the registers previously saved. Make USE insns
1242 for each of these registers for use in making the call. */
1243 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1244 if ((mode = apply_args_mode[regno]) != VOIDmode)
1245 {
1246 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1247 if (size % align != 0)
1248 size = CEIL (size, align) * align;
1249 reg = gen_rtx_REG (mode, regno);
1250 emit_move_insn (reg, adjust_address (arguments, mode, size));
1251 use_reg (&call_fusage, reg);
1252 size += GET_MODE_SIZE (mode);
1253 }
1254
1255 /* Restore the structure value address unless this is passed as an
1256 "invisible" first argument. */
1257 size = GET_MODE_SIZE (Pmode);
1258 if (struct_value_rtx)
1259 {
1260 rtx value = gen_reg_rtx (Pmode);
1261 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1262 emit_move_insn (struct_value_rtx, value);
1263 if (GET_CODE (struct_value_rtx) == REG)
1264 use_reg (&call_fusage, struct_value_rtx);
1265 size += GET_MODE_SIZE (Pmode);
1266 }
1267
1268 /* All arguments and registers used for the call are set up by now! */
1269 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1270
1271 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1272 and we don't want to load it into a register as an optimization,
1273 because prepare_call_address already did it if it should be done. */
1274 if (GET_CODE (function) != SYMBOL_REF)
1275 function = memory_address (FUNCTION_MODE, function);
1276
1277 /* Generate the actual call instruction and save the return value. */
1278 #ifdef HAVE_untyped_call
1279 if (HAVE_untyped_call)
1280 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1281 result, result_vector (1, result)));
1282 else
1283 #endif
1284 #ifdef HAVE_call_value
1285 if (HAVE_call_value)
1286 {
1287 rtx valreg = 0;
1288
1289 /* Locate the unique return register. It is not possible to
1290 express a call that sets more than one return register using
1291 call_value; use untyped_call for that. In fact, untyped_call
1292 only needs to save the return registers in the given block. */
1293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1294 if ((mode = apply_result_mode[regno]) != VOIDmode)
1295 {
1296 if (valreg)
1297 abort (); /* HAVE_untyped_call required. */
1298 valreg = gen_rtx_REG (mode, regno);
1299 }
1300
1301 emit_call_insn (GEN_CALL_VALUE (valreg,
1302 gen_rtx_MEM (FUNCTION_MODE, function),
1303 const0_rtx, NULL_RTX, const0_rtx));
1304
1305 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1306 }
1307 else
1308 #endif
1309 abort ();
1310
1311 /* Find the CALL insn we just emitted. */
1312 for (call_insn = get_last_insn ();
1313 call_insn && GET_CODE (call_insn) != CALL_INSN;
1314 call_insn = PREV_INSN (call_insn))
1315 ;
1316
1317 if (! call_insn)
1318 abort ();
1319
1320 /* Put the register usage information on the CALL. If there is already
1321 some usage information, put ours at the end. */
1322 if (CALL_INSN_FUNCTION_USAGE (call_insn))
1323 {
1324 rtx link;
1325
1326 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
1327 link = XEXP (link, 1))
1328 ;
1329
1330 XEXP (link, 1) = call_fusage;
1331 }
1332 else
1333 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
1334
1335 /* Restore the stack. */
1336 #ifdef HAVE_save_stack_nonlocal
1337 if (HAVE_save_stack_nonlocal)
1338 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1339 else
1340 #endif
1341 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1342
1343 OK_DEFER_POP;
1344
1345 /* Return the address of the result block. */
1346 return copy_addr_to_reg (XEXP (result, 0));
1347 }
1348
1349 /* Perform an untyped return. */
1350
1351 static void
1352 expand_builtin_return (result)
1353 rtx result;
1354 {
1355 int size, align, regno;
1356 enum machine_mode mode;
1357 rtx reg;
1358 rtx call_fusage = 0;
1359
1360 #ifdef POINTERS_EXTEND_UNSIGNED
1361 if (GET_MODE (result) != Pmode)
1362 result = convert_memory_address (Pmode, result);
1363 #endif
1364
1365 apply_result_size ();
1366 result = gen_rtx_MEM (BLKmode, result);
1367
1368 #ifdef HAVE_untyped_return
1369 if (HAVE_untyped_return)
1370 {
1371 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1372 emit_barrier ();
1373 return;
1374 }
1375 #endif
1376
1377 /* Restore the return value and note that each value is used. */
1378 size = 0;
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if ((mode = apply_result_mode[regno]) != VOIDmode)
1381 {
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1386 emit_move_insn (reg, adjust_address (result, mode, size));
1387
1388 push_to_sequence (call_fusage);
1389 emit_insn (gen_rtx_USE (VOIDmode, reg));
1390 call_fusage = get_insns ();
1391 end_sequence ();
1392 size += GET_MODE_SIZE (mode);
1393 }
1394
1395 /* Put the USE insns before the return. */
1396 emit_insn (call_fusage);
1397
1398 /* Return whatever values was restored by jumping directly to the end
1399 of the function. */
1400 expand_null_return ();
1401 }
1402
1403 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1404
1405 static enum type_class
1406 type_to_class (type)
1407 tree type;
1408 {
1409 switch (TREE_CODE (type))
1410 {
1411 case VOID_TYPE: return void_type_class;
1412 case INTEGER_TYPE: return integer_type_class;
1413 case CHAR_TYPE: return char_type_class;
1414 case ENUMERAL_TYPE: return enumeral_type_class;
1415 case BOOLEAN_TYPE: return boolean_type_class;
1416 case POINTER_TYPE: return pointer_type_class;
1417 case REFERENCE_TYPE: return reference_type_class;
1418 case OFFSET_TYPE: return offset_type_class;
1419 case REAL_TYPE: return real_type_class;
1420 case COMPLEX_TYPE: return complex_type_class;
1421 case FUNCTION_TYPE: return function_type_class;
1422 case METHOD_TYPE: return method_type_class;
1423 case RECORD_TYPE: return record_type_class;
1424 case UNION_TYPE:
1425 case QUAL_UNION_TYPE: return union_type_class;
1426 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1427 ? string_type_class : array_type_class);
1428 case SET_TYPE: return set_type_class;
1429 case FILE_TYPE: return file_type_class;
1430 case LANG_TYPE: return lang_type_class;
1431 default: return no_type_class;
1432 }
1433 }
1434
1435 /* Expand a call to __builtin_classify_type with arguments found in
1436 ARGLIST. */
1437
1438 static rtx
1439 expand_builtin_classify_type (arglist)
1440 tree arglist;
1441 {
1442 if (arglist != 0)
1443 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1444 return GEN_INT (no_type_class);
1445 }
1446
1447 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1448
1449 static rtx
1450 expand_builtin_constant_p (exp)
1451 tree exp;
1452 {
1453 tree arglist = TREE_OPERAND (exp, 1);
1454 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1455 rtx tmp;
1456
1457 if (arglist == 0)
1458 return const0_rtx;
1459 arglist = TREE_VALUE (arglist);
1460
1461 /* We have taken care of the easy cases during constant folding. This
1462 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE get a
1463 chance to see if it can deduce whether ARGLIST is constant. */
1464
1465 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1466 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1467 return tmp;
1468 }
1469
1470 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1471 Return 0 if a normal call should be emitted rather than expanding the
1472 function in-line. EXP is the expression that is a call to the builtin
1473 function; if convenient, the result should be placed in TARGET.
1474 SUBTARGET may be used as the target for computing one of EXP's operands. */
1475
1476 static rtx
1477 expand_builtin_mathfn (exp, target, subtarget)
1478 tree exp;
1479 rtx target, subtarget;
1480 {
1481 optab builtin_optab;
1482 rtx op0, insns;
1483 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1484 tree arglist = TREE_OPERAND (exp, 1);
1485 enum machine_mode argmode;
1486
1487 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1488 return 0;
1489
1490 /* Stabilize and compute the argument. */
1491 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1492 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1493 {
1494 exp = copy_node (exp);
1495 TREE_OPERAND (exp, 1) = arglist;
1496 /* Wrap the computation of the argument in a SAVE_EXPR. That
1497 way, if we need to expand the argument again (as in the
1498 flag_errno_math case below where we cannot directly set
1499 errno), we will not perform side-effects more than once.
1500 Note that here we're mutating the original EXP as well as the
1501 copy; that's the right thing to do in case the original EXP
1502 is expanded later. */
1503 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1504 arglist = copy_node (arglist);
1505 }
1506 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1507
1508 /* Make a suitable register to place result in. */
1509 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1510
1511 emit_queue ();
1512 start_sequence ();
1513
1514 switch (DECL_FUNCTION_CODE (fndecl))
1515 {
1516 case BUILT_IN_SIN:
1517 case BUILT_IN_SINF:
1518 case BUILT_IN_SINL:
1519 builtin_optab = sin_optab; break;
1520 case BUILT_IN_COS:
1521 case BUILT_IN_COSF:
1522 case BUILT_IN_COSL:
1523 builtin_optab = cos_optab; break;
1524 case BUILT_IN_SQRT:
1525 case BUILT_IN_SQRTF:
1526 case BUILT_IN_SQRTL:
1527 builtin_optab = sqrt_optab; break;
1528 default:
1529 abort ();
1530 }
1531
1532 /* Compute into TARGET.
1533 Set TARGET to wherever the result comes back. */
1534 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1535 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1536
1537 /* If we were unable to expand via the builtin, stop the
1538 sequence (without outputting the insns) and return 0, causing
1539 a call to the library function. */
1540 if (target == 0)
1541 {
1542 end_sequence ();
1543 return 0;
1544 }
1545
1546 /* If errno must be maintained, we must set it to EDOM for NaN results. */
1547
1548 if (flag_errno_math && HONOR_NANS (argmode))
1549 {
1550 rtx lab1;
1551
1552 lab1 = gen_label_rtx ();
1553
1554 /* Test the result; if it is NaN, set errno=EDOM because
1555 the argument was not in the domain. */
1556 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1557 0, lab1);
1558
1559 #ifdef TARGET_EDOM
1560 {
1561 #ifdef GEN_ERRNO_RTX
1562 rtx errno_rtx = GEN_ERRNO_RTX;
1563 #else
1564 rtx errno_rtx
1565 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1566 #endif
1567
1568 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1569 }
1570 #else
1571 /* We can't set errno=EDOM directly; let the library call do it.
1572 Pop the arguments right away in case the call gets deleted. */
1573 NO_DEFER_POP;
1574 expand_call (exp, target, 0);
1575 OK_DEFER_POP;
1576 #endif
1577
1578 emit_label (lab1);
1579 }
1580
1581 /* Output the entire sequence. */
1582 insns = get_insns ();
1583 end_sequence ();
1584 emit_insn (insns);
1585
1586 return target;
1587 }
1588
1589 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1590 if we failed the caller should emit a normal call, otherwise
1591 try to get the result in TARGET, if convenient. */
1592
1593 static rtx
1594 expand_builtin_strlen (exp, target)
1595 tree exp;
1596 rtx target;
1597 {
1598 tree arglist = TREE_OPERAND (exp, 1);
1599 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1600
1601 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1602 return 0;
1603 else
1604 {
1605 rtx pat;
1606 tree src = TREE_VALUE (arglist);
1607
1608 int align
1609 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1610
1611 rtx result, src_reg, char_rtx, before_strlen;
1612 enum machine_mode insn_mode = value_mode, char_mode;
1613 enum insn_code icode = CODE_FOR_nothing;
1614
1615 /* If SRC is not a pointer type, don't do this operation inline. */
1616 if (align == 0)
1617 return 0;
1618
1619 /* Bail out if we can't compute strlen in the right mode. */
1620 while (insn_mode != VOIDmode)
1621 {
1622 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1623 if (icode != CODE_FOR_nothing)
1624 break;
1625
1626 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1627 }
1628 if (insn_mode == VOIDmode)
1629 return 0;
1630
1631 /* Make a place to write the result of the instruction. */
1632 result = target;
1633 if (! (result != 0
1634 && GET_CODE (result) == REG
1635 && GET_MODE (result) == insn_mode
1636 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1637 result = gen_reg_rtx (insn_mode);
1638
1639 /* Make a place to hold the source address. We will not expand
1640 the actual source until we are sure that the expansion will
1641 not fail -- there are trees that cannot be expanded twice. */
1642 src_reg = gen_reg_rtx (Pmode);
1643
1644 /* Mark the beginning of the strlen sequence so we can emit the
1645 source operand later. */
1646 before_strlen = get_last_insn();
1647
1648 char_rtx = const0_rtx;
1649 char_mode = insn_data[(int) icode].operand[2].mode;
1650 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1651 char_mode))
1652 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1653
1654 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1655 char_rtx, GEN_INT (align));
1656 if (! pat)
1657 return 0;
1658 emit_insn (pat);
1659
1660 /* Now that we are assured of success, expand the source. */
1661 start_sequence ();
1662 pat = memory_address (BLKmode,
1663 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
1664 if (pat != src_reg)
1665 emit_move_insn (src_reg, pat);
1666 pat = get_insns ();
1667 end_sequence ();
1668
1669 if (before_strlen)
1670 emit_insn_after (pat, before_strlen);
1671 else
1672 emit_insn_before (pat, get_insns ());
1673
1674 /* Return the value in the proper mode for this function. */
1675 if (GET_MODE (result) == value_mode)
1676 target = result;
1677 else if (target != 0)
1678 convert_move (target, result, 0);
1679 else
1680 target = convert_to_mode (value_mode, result, 0);
1681
1682 return target;
1683 }
1684 }
1685
1686 /* Expand a call to the strstr builtin. Return 0 if we failed the
1687 caller should emit a normal call, otherwise try to get the result
1688 in TARGET, if convenient (and in mode MODE if that's convenient). */
1689
1690 static rtx
1691 expand_builtin_strstr (arglist, target, mode)
1692 tree arglist;
1693 rtx target;
1694 enum machine_mode mode;
1695 {
1696 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1697 return 0;
1698 else
1699 {
1700 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1701 tree fn;
1702 const char *p1, *p2;
1703
1704 p2 = c_getstr (s2);
1705 if (p2 == NULL)
1706 return 0;
1707
1708 p1 = c_getstr (s1);
1709 if (p1 != NULL)
1710 {
1711 const char *r = strstr (p1, p2);
1712
1713 if (r == NULL)
1714 return const0_rtx;
1715
1716 /* Return an offset into the constant string argument. */
1717 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1718 s1, ssize_int (r - p1))),
1719 target, mode, EXPAND_NORMAL);
1720 }
1721
1722 if (p2[0] == '\0')
1723 return expand_expr (s1, target, mode, EXPAND_NORMAL);
1724
1725 if (p2[1] != '\0')
1726 return 0;
1727
1728 fn = built_in_decls[BUILT_IN_STRCHR];
1729 if (!fn)
1730 return 0;
1731
1732 /* New argument list transforming strstr(s1, s2) to
1733 strchr(s1, s2[0]). */
1734 arglist =
1735 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1736 arglist = tree_cons (NULL_TREE, s1, arglist);
1737 return expand_expr (build_function_call_expr (fn, arglist),
1738 target, mode, EXPAND_NORMAL);
1739 }
1740 }
1741
1742 /* Expand a call to the strchr builtin. Return 0 if we failed the
1743 caller should emit a normal call, otherwise try to get the result
1744 in TARGET, if convenient (and in mode MODE if that's convenient). */
1745
1746 static rtx
1747 expand_builtin_strchr (arglist, target, mode)
1748 tree arglist;
1749 rtx target;
1750 enum machine_mode mode;
1751 {
1752 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1753 return 0;
1754 else
1755 {
1756 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1757 const char *p1;
1758
1759 if (TREE_CODE (s2) != INTEGER_CST)
1760 return 0;
1761
1762 p1 = c_getstr (s1);
1763 if (p1 != NULL)
1764 {
1765 char c;
1766 const char *r;
1767
1768 if (target_char_cast (s2, &c))
1769 return 0;
1770
1771 r = strchr (p1, c);
1772
1773 if (r == NULL)
1774 return const0_rtx;
1775
1776 /* Return an offset into the constant string argument. */
1777 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1778 s1, ssize_int (r - p1))),
1779 target, mode, EXPAND_NORMAL);
1780 }
1781
1782 /* FIXME: Should use here strchrM optab so that ports can optimize
1783 this. */
1784 return 0;
1785 }
1786 }
1787
1788 /* Expand a call to the strrchr builtin. Return 0 if we failed the
1789 caller should emit a normal call, otherwise try to get the result
1790 in TARGET, if convenient (and in mode MODE if that's convenient). */
1791
1792 static rtx
1793 expand_builtin_strrchr (arglist, target, mode)
1794 tree arglist;
1795 rtx target;
1796 enum machine_mode mode;
1797 {
1798 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1799 return 0;
1800 else
1801 {
1802 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1803 tree fn;
1804 const char *p1;
1805
1806 if (TREE_CODE (s2) != INTEGER_CST)
1807 return 0;
1808
1809 p1 = c_getstr (s1);
1810 if (p1 != NULL)
1811 {
1812 char c;
1813 const char *r;
1814
1815 if (target_char_cast (s2, &c))
1816 return 0;
1817
1818 r = strrchr (p1, c);
1819
1820 if (r == NULL)
1821 return const0_rtx;
1822
1823 /* Return an offset into the constant string argument. */
1824 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1825 s1, ssize_int (r - p1))),
1826 target, mode, EXPAND_NORMAL);
1827 }
1828
1829 if (! integer_zerop (s2))
1830 return 0;
1831
1832 fn = built_in_decls[BUILT_IN_STRCHR];
1833 if (!fn)
1834 return 0;
1835
1836 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
1837 return expand_expr (build_function_call_expr (fn, arglist),
1838 target, mode, EXPAND_NORMAL);
1839 }
1840 }
1841
1842 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
1843 caller should emit a normal call, otherwise try to get the result
1844 in TARGET, if convenient (and in mode MODE if that's convenient). */
1845
1846 static rtx
1847 expand_builtin_strpbrk (arglist, target, mode)
1848 tree arglist;
1849 rtx target;
1850 enum machine_mode mode;
1851 {
1852 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1853 return 0;
1854 else
1855 {
1856 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1857 tree fn;
1858 const char *p1, *p2;
1859
1860 p2 = c_getstr (s2);
1861 if (p2 == NULL)
1862 return 0;
1863
1864 p1 = c_getstr (s1);
1865 if (p1 != NULL)
1866 {
1867 const char *r = strpbrk (p1, p2);
1868
1869 if (r == NULL)
1870 return const0_rtx;
1871
1872 /* Return an offset into the constant string argument. */
1873 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1874 s1, ssize_int (r - p1))),
1875 target, mode, EXPAND_NORMAL);
1876 }
1877
1878 if (p2[0] == '\0')
1879 {
1880 /* strpbrk(x, "") == NULL.
1881 Evaluate and ignore the arguments in case they had
1882 side-effects. */
1883 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
1884 return const0_rtx;
1885 }
1886
1887 if (p2[1] != '\0')
1888 return 0; /* Really call strpbrk. */
1889
1890 fn = built_in_decls[BUILT_IN_STRCHR];
1891 if (!fn)
1892 return 0;
1893
1894 /* New argument list transforming strpbrk(s1, s2) to
1895 strchr(s1, s2[0]). */
1896 arglist =
1897 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1898 arglist = tree_cons (NULL_TREE, s1, arglist);
1899 return expand_expr (build_function_call_expr (fn, arglist),
1900 target, mode, EXPAND_NORMAL);
1901 }
1902 }
1903
1904 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
1905 bytes from constant string DATA + OFFSET and return it as target
1906 constant. */
1907
1908 static rtx
1909 builtin_memcpy_read_str (data, offset, mode)
1910 PTR data;
1911 HOST_WIDE_INT offset;
1912 enum machine_mode mode;
1913 {
1914 const char *str = (const char *) data;
1915
1916 if (offset < 0
1917 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
1918 > strlen (str) + 1))
1919 abort (); /* Attempt to read past the end of constant string. */
1920
1921 return c_readstr (str + offset, mode);
1922 }
1923
1924 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
1925 Return 0 if we failed, the caller should emit a normal call, otherwise
1926 try to get the result in TARGET, if convenient (and in mode MODE if
1927 that's convenient). */
1928
1929 static rtx
1930 expand_builtin_memcpy (arglist, target, mode)
1931 tree arglist;
1932 rtx target;
1933 enum machine_mode mode;
1934 {
1935 if (!validate_arglist (arglist,
1936 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1937 return 0;
1938 else
1939 {
1940 tree dest = TREE_VALUE (arglist);
1941 tree src = TREE_VALUE (TREE_CHAIN (arglist));
1942 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1943 const char *src_str;
1944
1945 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
1946 unsigned int dest_align
1947 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
1948 rtx dest_mem, src_mem, dest_addr, len_rtx;
1949
1950 /* If DEST is not a pointer type, call the normal function. */
1951 if (dest_align == 0)
1952 return 0;
1953
1954 /* If the LEN parameter is zero, return DEST. */
1955 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
1956 {
1957 /* Evaluate and ignore SRC in case it has side-effects. */
1958 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
1959 return expand_expr (dest, target, mode, EXPAND_NORMAL);
1960 }
1961
1962 /* If either SRC is not a pointer type, don't do this
1963 operation in-line. */
1964 if (src_align == 0)
1965 return 0;
1966
1967 dest_mem = get_memory_rtx (dest);
1968 set_mem_align (dest_mem, dest_align);
1969 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
1970 src_str = c_getstr (src);
1971
1972 /* If SRC is a string constant and block move would be done
1973 by pieces, we can avoid loading the string from memory
1974 and only stored the computed constants. */
1975 if (src_str
1976 && GET_CODE (len_rtx) == CONST_INT
1977 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
1978 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
1979 (PTR) src_str, dest_align))
1980 {
1981 store_by_pieces (dest_mem, INTVAL (len_rtx),
1982 builtin_memcpy_read_str,
1983 (PTR) src_str, dest_align);
1984 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
1985 #ifdef POINTERS_EXTEND_UNSIGNED
1986 if (GET_MODE (dest_mem) != ptr_mode)
1987 dest_mem = convert_memory_address (ptr_mode, dest_mem);
1988 #endif
1989 return dest_mem;
1990 }
1991
1992 src_mem = get_memory_rtx (src);
1993 set_mem_align (src_mem, src_align);
1994
1995 /* Copy word part most expediently. */
1996 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx);
1997
1998 if (dest_addr == 0)
1999 {
2000 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2001 #ifdef POINTERS_EXTEND_UNSIGNED
2002 if (GET_MODE (dest_addr) != ptr_mode)
2003 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2004 #endif
2005 }
2006
2007 return dest_addr;
2008 }
2009 }
2010
2011 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2012 if we failed the caller should emit a normal call, otherwise try to get
2013 the result in TARGET, if convenient (and in mode MODE if that's
2014 convenient). */
2015
2016 static rtx
2017 expand_builtin_strcpy (exp, target, mode)
2018 tree exp;
2019 rtx target;
2020 enum machine_mode mode;
2021 {
2022 tree arglist = TREE_OPERAND (exp, 1);
2023 tree fn, len;
2024
2025 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2026 return 0;
2027
2028 fn = built_in_decls[BUILT_IN_MEMCPY];
2029 if (!fn)
2030 return 0;
2031
2032 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2033 if (len == 0)
2034 return 0;
2035
2036 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2037 chainon (arglist, build_tree_list (NULL_TREE, len));
2038 return expand_expr (build_function_call_expr (fn, arglist),
2039 target, mode, EXPAND_NORMAL);
2040 }
2041
2042 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2043 bytes from constant string DATA + OFFSET and return it as target
2044 constant. */
2045
2046 static rtx
2047 builtin_strncpy_read_str (data, offset, mode)
2048 PTR data;
2049 HOST_WIDE_INT offset;
2050 enum machine_mode mode;
2051 {
2052 const char *str = (const char *) data;
2053
2054 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2055 return const0_rtx;
2056
2057 return c_readstr (str + offset, mode);
2058 }
2059
2060 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2061 if we failed the caller should emit a normal call. */
2062
2063 static rtx
2064 expand_builtin_strncpy (arglist, target, mode)
2065 tree arglist;
2066 rtx target;
2067 enum machine_mode mode;
2068 {
2069 if (!validate_arglist (arglist,
2070 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2071 return 0;
2072 else
2073 {
2074 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2075 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2076 tree fn;
2077
2078 /* We must be passed a constant len parameter. */
2079 if (TREE_CODE (len) != INTEGER_CST)
2080 return 0;
2081
2082 /* If the len parameter is zero, return the dst parameter. */
2083 if (integer_zerop (len))
2084 {
2085 /* Evaluate and ignore the src argument in case it has
2086 side-effects. */
2087 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2088 VOIDmode, EXPAND_NORMAL);
2089 /* Return the dst parameter. */
2090 return expand_expr (TREE_VALUE (arglist), target, mode,
2091 EXPAND_NORMAL);
2092 }
2093
2094 /* Now, we must be passed a constant src ptr parameter. */
2095 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2096 return 0;
2097
2098 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2099
2100 /* We're required to pad with trailing zeros if the requested
2101 len is greater than strlen(s2)+1. In that case try to
2102 use store_by_pieces, if it fails, punt. */
2103 if (tree_int_cst_lt (slen, len))
2104 {
2105 tree dest = TREE_VALUE (arglist);
2106 unsigned int dest_align
2107 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2108 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2109 rtx dest_mem;
2110
2111 if (!p || dest_align == 0 || !host_integerp (len, 1)
2112 || !can_store_by_pieces (tree_low_cst (len, 1),
2113 builtin_strncpy_read_str,
2114 (PTR) p, dest_align))
2115 return 0;
2116
2117 dest_mem = get_memory_rtx (dest);
2118 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2119 builtin_strncpy_read_str,
2120 (PTR) p, dest_align);
2121 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2122 #ifdef POINTERS_EXTEND_UNSIGNED
2123 if (GET_MODE (dest_mem) != ptr_mode)
2124 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2125 #endif
2126 return dest_mem;
2127 }
2128
2129 /* OK transform into builtin memcpy. */
2130 fn = built_in_decls[BUILT_IN_MEMCPY];
2131 if (!fn)
2132 return 0;
2133 return expand_expr (build_function_call_expr (fn, arglist),
2134 target, mode, EXPAND_NORMAL);
2135 }
2136 }
2137
2138 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2139 bytes from constant string DATA + OFFSET and return it as target
2140 constant. */
2141
2142 static rtx
2143 builtin_memset_read_str (data, offset, mode)
2144 PTR data;
2145 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2146 enum machine_mode mode;
2147 {
2148 const char *c = (const char *) data;
2149 char *p = alloca (GET_MODE_SIZE (mode));
2150
2151 memset (p, *c, GET_MODE_SIZE (mode));
2152
2153 return c_readstr (p, mode);
2154 }
2155
2156 /* Callback routine for store_by_pieces. Return the RTL of a register
2157 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2158 char value given in the RTL register data. For example, if mode is
2159 4 bytes wide, return the RTL for 0x01010101*data. */
2160
2161 static rtx
2162 builtin_memset_gen_str (data, offset, mode)
2163 PTR data;
2164 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2165 enum machine_mode mode;
2166 {
2167 rtx target, coeff;
2168 size_t size;
2169 char *p;
2170
2171 size = GET_MODE_SIZE (mode);
2172 if (size==1)
2173 return (rtx)data;
2174
2175 p = alloca (size);
2176 memset (p, 1, size);
2177 coeff = c_readstr (p, mode);
2178
2179 target = convert_to_mode (mode, (rtx)data, 1);
2180 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2181 return force_reg (mode, target);
2182 }
2183
2184 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2185 if we failed the caller should emit a normal call, otherwise try to get
2186 the result in TARGET, if convenient (and in mode MODE if that's
2187 convenient). */
2188
2189 static rtx
2190 expand_builtin_memset (exp, target, mode)
2191 tree exp;
2192 rtx target;
2193 enum machine_mode mode;
2194 {
2195 tree arglist = TREE_OPERAND (exp, 1);
2196
2197 if (!validate_arglist (arglist,
2198 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2199 return 0;
2200 else
2201 {
2202 tree dest = TREE_VALUE (arglist);
2203 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2204 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2205 char c;
2206
2207 unsigned int dest_align
2208 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2209 rtx dest_mem, dest_addr, len_rtx;
2210
2211 /* If DEST is not a pointer type, don't do this
2212 operation in-line. */
2213 if (dest_align == 0)
2214 return 0;
2215
2216 /* If the LEN parameter is zero, return DEST. */
2217 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2218 {
2219 /* Evaluate and ignore VAL in case it has side-effects. */
2220 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2221 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2222 }
2223
2224 if (TREE_CODE (val) != INTEGER_CST)
2225 {
2226 rtx val_rtx;
2227
2228 if (!host_integerp (len, 1))
2229 return 0;
2230
2231 if (optimize_size && tree_low_cst (len, 1) > 1)
2232 return 0;
2233
2234 /* Assume that we can memset by pieces if we can store the
2235 * the coefficients by pieces (in the required modes).
2236 * We can't pass builtin_memset_gen_str as that emits RTL. */
2237 c = 1;
2238 if (!can_store_by_pieces (tree_low_cst (len, 1),
2239 builtin_memset_read_str,
2240 (PTR) &c, dest_align))
2241 return 0;
2242
2243 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2244 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2245 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2246 val_rtx);
2247 dest_mem = get_memory_rtx (dest);
2248 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2249 builtin_memset_gen_str,
2250 (PTR)val_rtx, dest_align);
2251 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2252 #ifdef POINTERS_EXTEND_UNSIGNED
2253 if (GET_MODE (dest_mem) != ptr_mode)
2254 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2255 #endif
2256 return dest_mem;
2257 }
2258
2259 if (target_char_cast (val, &c))
2260 return 0;
2261
2262 if (c)
2263 {
2264 if (!host_integerp (len, 1))
2265 return 0;
2266 if (!can_store_by_pieces (tree_low_cst (len, 1),
2267 builtin_memset_read_str, (PTR) &c,
2268 dest_align))
2269 return 0;
2270
2271 dest_mem = get_memory_rtx (dest);
2272 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2273 builtin_memset_read_str,
2274 (PTR) &c, dest_align);
2275 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2276 #ifdef POINTERS_EXTEND_UNSIGNED
2277 if (GET_MODE (dest_mem) != ptr_mode)
2278 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2279 #endif
2280 return dest_mem;
2281 }
2282
2283 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2284
2285 dest_mem = get_memory_rtx (dest);
2286 set_mem_align (dest_mem, dest_align);
2287 dest_addr = clear_storage (dest_mem, len_rtx);
2288
2289 if (dest_addr == 0)
2290 {
2291 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2292 #ifdef POINTERS_EXTEND_UNSIGNED
2293 if (GET_MODE (dest_addr) != ptr_mode)
2294 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2295 #endif
2296 }
2297
2298 return dest_addr;
2299 }
2300 }
2301
2302 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2303 if we failed the caller should emit a normal call. */
2304
2305 static rtx
2306 expand_builtin_bzero (exp)
2307 tree exp;
2308 {
2309 tree arglist = TREE_OPERAND (exp, 1);
2310 tree dest, size, newarglist;
2311 rtx result;
2312
2313 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2314 return NULL_RTX;
2315
2316 dest = TREE_VALUE (arglist);
2317 size = TREE_VALUE (TREE_CHAIN (arglist));
2318
2319 /* New argument list transforming bzero(ptr x, int y) to
2320 memset(ptr x, int 0, size_t y). This is done this way
2321 so that if it isn't expanded inline, we fallback to
2322 calling bzero instead of memset. */
2323
2324 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2325 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2326 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2327
2328 TREE_OPERAND (exp, 1) = newarglist;
2329 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2330
2331 /* Always restore the original arguments. */
2332 TREE_OPERAND (exp, 1) = arglist;
2333
2334 return result;
2335 }
2336
2337 /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin.
2338 ARGLIST is the argument list for this call. Return 0 if we failed and the
2339 caller should emit a normal call, otherwise try to get the result in
2340 TARGET, if convenient (and in mode MODE, if that's convenient). */
2341
2342 static rtx
2343 expand_builtin_memcmp (exp, arglist, target, mode)
2344 tree exp ATTRIBUTE_UNUSED;
2345 tree arglist;
2346 rtx target;
2347 enum machine_mode mode;
2348 {
2349 tree arg1, arg2, len;
2350 const char *p1, *p2;
2351
2352 if (!validate_arglist (arglist,
2353 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2354 return 0;
2355
2356 arg1 = TREE_VALUE (arglist);
2357 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2358 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2359
2360 /* If the len parameter is zero, return zero. */
2361 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2362 {
2363 /* Evaluate and ignore arg1 and arg2 in case they have
2364 side-effects. */
2365 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2366 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2367 return const0_rtx;
2368 }
2369
2370 p1 = c_getstr (arg1);
2371 p2 = c_getstr (arg2);
2372
2373 /* If all arguments are constant, and the value of len is not greater
2374 than the lengths of arg1 and arg2, evaluate at compile-time. */
2375 if (host_integerp (len, 1) && p1 && p2
2376 && compare_tree_int (len, strlen (p1) + 1) <= 0
2377 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2378 {
2379 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2380
2381 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2382 }
2383
2384 /* If len parameter is one, return an expression corresponding to
2385 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2386 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2387 {
2388 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2389 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2390 tree ind1 =
2391 fold (build1 (CONVERT_EXPR, integer_type_node,
2392 build1 (INDIRECT_REF, cst_uchar_node,
2393 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2394 tree ind2 =
2395 fold (build1 (CONVERT_EXPR, integer_type_node,
2396 build1 (INDIRECT_REF, cst_uchar_node,
2397 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2398 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2399 return expand_expr (result, target, mode, EXPAND_NORMAL);
2400 }
2401
2402 #ifdef HAVE_cmpstrsi
2403 {
2404 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2405 rtx result;
2406 rtx insn;
2407
2408 int arg1_align
2409 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2410 int arg2_align
2411 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2412 enum machine_mode insn_mode
2413 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2414
2415 /* If we don't have POINTER_TYPE, call the function. */
2416 if (arg1_align == 0 || arg2_align == 0)
2417 return 0;
2418
2419 /* Make a place to write the result of the instruction. */
2420 result = target;
2421 if (! (result != 0
2422 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2423 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2424 result = gen_reg_rtx (insn_mode);
2425
2426 arg1_rtx = get_memory_rtx (arg1);
2427 arg2_rtx = get_memory_rtx (arg2);
2428 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2429 if (!HAVE_cmpstrsi)
2430 insn = NULL_RTX;
2431 else
2432 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2433 GEN_INT (MIN (arg1_align, arg2_align)));
2434
2435 if (insn)
2436 emit_insn (insn);
2437 else
2438 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2439 TYPE_MODE (integer_type_node), 3,
2440 XEXP (arg1_rtx, 0), Pmode,
2441 XEXP (arg2_rtx, 0), Pmode,
2442 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2443 TREE_UNSIGNED (sizetype)),
2444 TYPE_MODE (sizetype));
2445
2446 /* Return the value in the proper mode for this function. */
2447 mode = TYPE_MODE (TREE_TYPE (exp));
2448 if (GET_MODE (result) == mode)
2449 return result;
2450 else if (target != 0)
2451 {
2452 convert_move (target, result, 0);
2453 return target;
2454 }
2455 else
2456 return convert_to_mode (mode, result, 0);
2457 }
2458 #endif
2459
2460 return 0;
2461 }
2462
2463 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2464 if we failed the caller should emit a normal call, otherwise try to get
2465 the result in TARGET, if convenient. */
2466
2467 static rtx
2468 expand_builtin_strcmp (exp, target, mode)
2469 tree exp;
2470 rtx target;
2471 enum machine_mode mode;
2472 {
2473 tree arglist = TREE_OPERAND (exp, 1);
2474 tree arg1, arg2, len, len2, fn;
2475 const char *p1, *p2;
2476
2477 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2478 return 0;
2479
2480 arg1 = TREE_VALUE (arglist);
2481 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2482
2483 p1 = c_getstr (arg1);
2484 p2 = c_getstr (arg2);
2485
2486 if (p1 && p2)
2487 {
2488 const int i = strcmp (p1, p2);
2489 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2490 }
2491
2492 /* If either arg is "", return an expression corresponding to
2493 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2494 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2495 {
2496 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2497 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2498 tree ind1 =
2499 fold (build1 (CONVERT_EXPR, integer_type_node,
2500 build1 (INDIRECT_REF, cst_uchar_node,
2501 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2502 tree ind2 =
2503 fold (build1 (CONVERT_EXPR, integer_type_node,
2504 build1 (INDIRECT_REF, cst_uchar_node,
2505 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2506 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2507 return expand_expr (result, target, mode, EXPAND_NORMAL);
2508 }
2509
2510 len = c_strlen (arg1);
2511 len2 = c_strlen (arg2);
2512
2513 if (len)
2514 len = size_binop (PLUS_EXPR, ssize_int (1), len);
2515
2516 if (len2)
2517 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2518
2519 /* If we don't have a constant length for the first, use the length
2520 of the second, if we know it. We don't require a constant for
2521 this case; some cost analysis could be done if both are available
2522 but neither is constant. For now, assume they're equally cheap
2523 unless one has side effects.
2524
2525 If both strings have constant lengths, use the smaller. This
2526 could arise if optimization results in strcpy being called with
2527 two fixed strings, or if the code was machine-generated. We should
2528 add some code to the `memcmp' handler below to deal with such
2529 situations, someday. */
2530
2531 if (!len || TREE_CODE (len) != INTEGER_CST)
2532 {
2533 if (len2 && !TREE_SIDE_EFFECTS (len2))
2534 len = len2;
2535 else if (len == 0)
2536 return 0;
2537 }
2538 else if (len2 && TREE_CODE (len2) == INTEGER_CST
2539 && tree_int_cst_lt (len2, len))
2540 len = len2;
2541
2542 /* If both arguments have side effects, we cannot optimize. */
2543 if (TREE_SIDE_EFFECTS (len))
2544 return 0;
2545
2546 fn = built_in_decls[BUILT_IN_MEMCMP];
2547 if (!fn)
2548 return 0;
2549
2550 chainon (arglist, build_tree_list (NULL_TREE, len));
2551 return expand_expr (build_function_call_expr (fn, arglist),
2552 target, mode, EXPAND_NORMAL);
2553 }
2554
2555 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
2556 if we failed the caller should emit a normal call, otherwise try to get
2557 the result in TARGET, if convenient. */
2558
2559 static rtx
2560 expand_builtin_strncmp (exp, target, mode)
2561 tree exp;
2562 rtx target;
2563 enum machine_mode mode;
2564 {
2565 tree arglist = TREE_OPERAND (exp, 1);
2566 tree fn, newarglist, len = 0;
2567 tree arg1, arg2, arg3;
2568 const char *p1, *p2;
2569
2570 if (!validate_arglist (arglist,
2571 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2572 return 0;
2573
2574 arg1 = TREE_VALUE (arglist);
2575 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2576 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2577
2578 /* If the len parameter is zero, return zero. */
2579 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
2580 {
2581 /* Evaluate and ignore arg1 and arg2 in case they have
2582 side-effects. */
2583 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2584 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2585 return const0_rtx;
2586 }
2587
2588 p1 = c_getstr (arg1);
2589 p2 = c_getstr (arg2);
2590
2591 /* If all arguments are constant, evaluate at compile-time. */
2592 if (host_integerp (arg3, 1) && p1 && p2)
2593 {
2594 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
2595 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2596 }
2597
2598 /* If len == 1 or (either string parameter is "" and (len >= 1)),
2599 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
2600 if (host_integerp (arg3, 1)
2601 && (tree_low_cst (arg3, 1) == 1
2602 || (tree_low_cst (arg3, 1) > 1
2603 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
2604 {
2605 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2606 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2607 tree ind1 =
2608 fold (build1 (CONVERT_EXPR, integer_type_node,
2609 build1 (INDIRECT_REF, cst_uchar_node,
2610 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2611 tree ind2 =
2612 fold (build1 (CONVERT_EXPR, integer_type_node,
2613 build1 (INDIRECT_REF, cst_uchar_node,
2614 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2615 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2616 return expand_expr (result, target, mode, EXPAND_NORMAL);
2617 }
2618
2619 /* If c_strlen can determine an expression for one of the string
2620 lengths, and it doesn't have side effects, then call
2621 expand_builtin_memcmp() using length MIN(strlen(string)+1, arg3). */
2622
2623 /* Perhaps one of the strings is really constant, if so prefer
2624 that constant length over the other string's length. */
2625 if (p1)
2626 len = c_strlen (arg1);
2627 else if (p2)
2628 len = c_strlen (arg2);
2629
2630 /* If we still don't have a len, try either string arg as long
2631 as they don't have side effects. */
2632 if (!len && !TREE_SIDE_EFFECTS (arg1))
2633 len = c_strlen (arg1);
2634 if (!len && !TREE_SIDE_EFFECTS (arg2))
2635 len = c_strlen (arg2);
2636 /* If we still don't have a length, punt. */
2637 if (!len)
2638 return 0;
2639
2640 fn = built_in_decls[BUILT_IN_MEMCMP];
2641 if (!fn)
2642 return 0;
2643
2644 /* Add one to the string length. */
2645 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2646
2647 /* The actual new length parameter is MIN(len,arg3). */
2648 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
2649
2650 newarglist = build_tree_list (NULL_TREE, len);
2651 newarglist = tree_cons (NULL_TREE, arg2, newarglist);
2652 newarglist = tree_cons (NULL_TREE, arg1, newarglist);
2653 return expand_expr (build_function_call_expr (fn, newarglist),
2654 target, mode, EXPAND_NORMAL);
2655 }
2656
2657 /* Expand expression EXP, which is a call to the strcat builtin.
2658 Return 0 if we failed the caller should emit a normal call,
2659 otherwise try to get the result in TARGET, if convenient. */
2660
2661 static rtx
2662 expand_builtin_strcat (arglist, target, mode)
2663 tree arglist;
2664 rtx target;
2665 enum machine_mode mode;
2666 {
2667 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2668 return 0;
2669 else
2670 {
2671 tree dst = TREE_VALUE (arglist),
2672 src = TREE_VALUE (TREE_CHAIN (arglist));
2673 const char *p = c_getstr (src);
2674
2675 /* If the string length is zero, return the dst parameter. */
2676 if (p && *p == '\0')
2677 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2678
2679 return 0;
2680 }
2681 }
2682
2683 /* Expand expression EXP, which is a call to the strncat builtin.
2684 Return 0 if we failed the caller should emit a normal call,
2685 otherwise try to get the result in TARGET, if convenient. */
2686
2687 static rtx
2688 expand_builtin_strncat (arglist, target, mode)
2689 tree arglist;
2690 rtx target;
2691 enum machine_mode mode;
2692 {
2693 if (!validate_arglist (arglist,
2694 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2695 return 0;
2696 else
2697 {
2698 tree dst = TREE_VALUE (arglist),
2699 src = TREE_VALUE (TREE_CHAIN (arglist)),
2700 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2701 const char *p = c_getstr (src);
2702
2703 /* If the requested length is zero, or the src parameter string
2704 length is zero, return the dst parameter. */
2705 if (integer_zerop (len) || (p && *p == '\0'))
2706 {
2707 /* Evaluate and ignore the src and len parameters in case
2708 they have side-effects. */
2709 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2710 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2711 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2712 }
2713
2714 /* If the requested len is greater than or equal to the string
2715 length, call strcat. */
2716 if (TREE_CODE (len) == INTEGER_CST && p
2717 && compare_tree_int (len, strlen (p)) >= 0)
2718 {
2719 tree newarglist
2720 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
2721 tree fn = built_in_decls[BUILT_IN_STRCAT];
2722
2723 /* If the replacement _DECL isn't initialized, don't do the
2724 transformation. */
2725 if (!fn)
2726 return 0;
2727
2728 return expand_expr (build_function_call_expr (fn, newarglist),
2729 target, mode, EXPAND_NORMAL);
2730 }
2731 return 0;
2732 }
2733 }
2734
2735 /* Expand expression EXP, which is a call to the strspn builtin.
2736 Return 0 if we failed the caller should emit a normal call,
2737 otherwise try to get the result in TARGET, if convenient. */
2738
2739 static rtx
2740 expand_builtin_strspn (arglist, target, mode)
2741 tree arglist;
2742 rtx target;
2743 enum machine_mode mode;
2744 {
2745 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2746 return 0;
2747 else
2748 {
2749 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2750 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2751
2752 /* If both arguments are constants, evaluate at compile-time. */
2753 if (p1 && p2)
2754 {
2755 const size_t r = strspn (p1, p2);
2756 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2757 }
2758
2759 /* If either argument is "", return 0. */
2760 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2761 {
2762 /* Evaluate and ignore both arguments in case either one has
2763 side-effects. */
2764 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2765 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2766 return const0_rtx;
2767 }
2768 return 0;
2769 }
2770 }
2771
2772 /* Expand expression EXP, which is a call to the strcspn builtin.
2773 Return 0 if we failed the caller should emit a normal call,
2774 otherwise try to get the result in TARGET, if convenient. */
2775
2776 static rtx
2777 expand_builtin_strcspn (arglist, target, mode)
2778 tree arglist;
2779 rtx target;
2780 enum machine_mode mode;
2781 {
2782 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2783 return 0;
2784 else
2785 {
2786 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2787 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2788
2789 /* If both arguments are constants, evaluate at compile-time. */
2790 if (p1 && p2)
2791 {
2792 const size_t r = strcspn (p1, p2);
2793 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2794 }
2795
2796 /* If the first argument is "", return 0. */
2797 if (p1 && *p1 == '\0')
2798 {
2799 /* Evaluate and ignore argument s2 in case it has
2800 side-effects. */
2801 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2802 return const0_rtx;
2803 }
2804
2805 /* If the second argument is "", return __builtin_strlen(s1). */
2806 if (p2 && *p2 == '\0')
2807 {
2808 tree newarglist = build_tree_list (NULL_TREE, s1),
2809 fn = built_in_decls[BUILT_IN_STRLEN];
2810
2811 /* If the replacement _DECL isn't initialized, don't do the
2812 transformation. */
2813 if (!fn)
2814 return 0;
2815
2816 return expand_expr (build_function_call_expr (fn, newarglist),
2817 target, mode, EXPAND_NORMAL);
2818 }
2819 return 0;
2820 }
2821 }
2822
2823 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
2824 if that's convenient. */
2825
2826 rtx
2827 expand_builtin_saveregs ()
2828 {
2829 rtx val, seq;
2830
2831 /* Don't do __builtin_saveregs more than once in a function.
2832 Save the result of the first call and reuse it. */
2833 if (saveregs_value != 0)
2834 return saveregs_value;
2835
2836 /* When this function is called, it means that registers must be
2837 saved on entry to this function. So we migrate the call to the
2838 first insn of this function. */
2839
2840 start_sequence ();
2841
2842 #ifdef EXPAND_BUILTIN_SAVEREGS
2843 /* Do whatever the machine needs done in this case. */
2844 val = EXPAND_BUILTIN_SAVEREGS ();
2845 #else
2846 /* ??? We used to try and build up a call to the out of line function,
2847 guessing about what registers needed saving etc. This became much
2848 harder with __builtin_va_start, since we don't have a tree for a
2849 call to __builtin_saveregs to fall back on. There was exactly one
2850 port (i860) that used this code, and I'm unconvinced it could actually
2851 handle the general case. So we no longer try to handle anything
2852 weird and make the backend absorb the evil. */
2853
2854 error ("__builtin_saveregs not supported by this target");
2855 val = const0_rtx;
2856 #endif
2857
2858 seq = get_insns ();
2859 end_sequence ();
2860
2861 saveregs_value = val;
2862
2863 /* Put the insns after the NOTE that starts the function. If this
2864 is inside a start_sequence, make the outer-level insn chain current, so
2865 the code is placed at the start of the function. */
2866 push_topmost_sequence ();
2867 emit_insn_after (seq, get_insns ());
2868 pop_topmost_sequence ();
2869
2870 return val;
2871 }
2872
2873 /* __builtin_args_info (N) returns word N of the arg space info
2874 for the current function. The number and meanings of words
2875 is controlled by the definition of CUMULATIVE_ARGS. */
2876
2877 static rtx
2878 expand_builtin_args_info (exp)
2879 tree exp;
2880 {
2881 tree arglist = TREE_OPERAND (exp, 1);
2882 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
2883 int *word_ptr = (int *) &current_function_args_info;
2884 #if 0
2885 /* These are used by the code below that is if 0'ed away */
2886 int i;
2887 tree type, elts, result;
2888 #endif
2889
2890 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
2891 abort ();
2892
2893 if (arglist != 0)
2894 {
2895 if (!host_integerp (TREE_VALUE (arglist), 0))
2896 error ("argument of `__builtin_args_info' must be constant");
2897 else
2898 {
2899 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
2900
2901 if (wordnum < 0 || wordnum >= nwords)
2902 error ("argument of `__builtin_args_info' out of range");
2903 else
2904 return GEN_INT (word_ptr[wordnum]);
2905 }
2906 }
2907 else
2908 error ("missing argument in `__builtin_args_info'");
2909
2910 return const0_rtx;
2911
2912 #if 0
2913 for (i = 0; i < nwords; i++)
2914 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
2915
2916 type = build_array_type (integer_type_node,
2917 build_index_type (build_int_2 (nwords, 0)));
2918 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
2919 TREE_CONSTANT (result) = 1;
2920 TREE_STATIC (result) = 1;
2921 result = build1 (INDIRECT_REF, build_pointer_type (type), result);
2922 TREE_CONSTANT (result) = 1;
2923 return expand_expr (result, NULL_RTX, VOIDmode, 0);
2924 #endif
2925 }
2926
2927 /* Expand ARGLIST, from a call to __builtin_next_arg. */
2928
2929 static rtx
2930 expand_builtin_next_arg (arglist)
2931 tree arglist;
2932 {
2933 tree fntype = TREE_TYPE (current_function_decl);
2934
2935 if (TYPE_ARG_TYPES (fntype) == 0
2936 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2937 == void_type_node))
2938 {
2939 error ("`va_start' used in function with fixed args");
2940 return const0_rtx;
2941 }
2942
2943 if (arglist)
2944 {
2945 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
2946 tree arg = TREE_VALUE (arglist);
2947
2948 /* Strip off all nops for the sake of the comparison. This
2949 is not quite the same as STRIP_NOPS. It does more.
2950 We must also strip off INDIRECT_EXPR for C++ reference
2951 parameters. */
2952 while (TREE_CODE (arg) == NOP_EXPR
2953 || TREE_CODE (arg) == CONVERT_EXPR
2954 || TREE_CODE (arg) == NON_LVALUE_EXPR
2955 || TREE_CODE (arg) == INDIRECT_REF)
2956 arg = TREE_OPERAND (arg, 0);
2957 if (arg != last_parm)
2958 warning ("second parameter of `va_start' not last named argument");
2959 }
2960 else
2961 /* Evidently an out of date version of <stdarg.h>; can't validate
2962 va_start's second argument, but can still work as intended. */
2963 warning ("`__builtin_next_arg' called without an argument");
2964
2965 return expand_binop (Pmode, add_optab,
2966 current_function_internal_arg_pointer,
2967 current_function_arg_offset_rtx,
2968 NULL_RTX, 0, OPTAB_LIB_WIDEN);
2969 }
2970
2971 /* Make it easier for the backends by protecting the valist argument
2972 from multiple evaluations. */
2973
2974 static tree
2975 stabilize_va_list (valist, needs_lvalue)
2976 tree valist;
2977 int needs_lvalue;
2978 {
2979 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
2980 {
2981 if (TREE_SIDE_EFFECTS (valist))
2982 valist = save_expr (valist);
2983
2984 /* For this case, the backends will be expecting a pointer to
2985 TREE_TYPE (va_list_type_node), but it's possible we've
2986 actually been given an array (an actual va_list_type_node).
2987 So fix it. */
2988 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
2989 {
2990 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
2991 tree p2 = build_pointer_type (va_list_type_node);
2992
2993 valist = build1 (ADDR_EXPR, p2, valist);
2994 valist = fold (build1 (NOP_EXPR, p1, valist));
2995 }
2996 }
2997 else
2998 {
2999 tree pt;
3000
3001 if (! needs_lvalue)
3002 {
3003 if (! TREE_SIDE_EFFECTS (valist))
3004 return valist;
3005
3006 pt = build_pointer_type (va_list_type_node);
3007 valist = fold (build1 (ADDR_EXPR, pt, valist));
3008 TREE_SIDE_EFFECTS (valist) = 1;
3009 }
3010
3011 if (TREE_SIDE_EFFECTS (valist))
3012 valist = save_expr (valist);
3013 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3014 valist));
3015 }
3016
3017 return valist;
3018 }
3019
3020 /* The "standard" implementation of va_start: just assign `nextarg' to
3021 the variable. */
3022
3023 void
3024 std_expand_builtin_va_start (valist, nextarg)
3025 tree valist;
3026 rtx nextarg;
3027 {
3028 tree t;
3029
3030 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3031 make_tree (ptr_type_node, nextarg));
3032 TREE_SIDE_EFFECTS (t) = 1;
3033
3034 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3035 }
3036
3037 /* Expand ARGLIST, from a call to __builtin_va_start. */
3038
3039 static rtx
3040 expand_builtin_va_start (arglist)
3041 tree arglist;
3042 {
3043 rtx nextarg;
3044 tree chain, valist;
3045
3046 chain = TREE_CHAIN (arglist);
3047
3048 if (TREE_CHAIN (chain))
3049 error ("too many arguments to function `va_start'");
3050
3051 nextarg = expand_builtin_next_arg (chain);
3052 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3053
3054 #ifdef EXPAND_BUILTIN_VA_START
3055 EXPAND_BUILTIN_VA_START (valist, nextarg);
3056 #else
3057 std_expand_builtin_va_start (valist, nextarg);
3058 #endif
3059
3060 return const0_rtx;
3061 }
3062
3063 /* The "standard" implementation of va_arg: read the value from the
3064 current (padded) address and increment by the (padded) size. */
3065
3066 rtx
3067 std_expand_builtin_va_arg (valist, type)
3068 tree valist, type;
3069 {
3070 tree addr_tree, t, type_size = NULL;
3071 tree align, alignm1;
3072 tree rounded_size;
3073 rtx addr;
3074
3075 /* Compute the rounded size of the type. */
3076 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3077 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3078 if (type == error_mark_node
3079 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3080 || TREE_OVERFLOW (type_size))
3081 rounded_size = size_zero_node;
3082 else
3083 rounded_size = fold (build (MULT_EXPR, sizetype,
3084 fold (build (TRUNC_DIV_EXPR, sizetype,
3085 fold (build (PLUS_EXPR, sizetype,
3086 type_size, alignm1)),
3087 align)),
3088 align));
3089
3090 /* Get AP. */
3091 addr_tree = valist;
3092 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3093 {
3094 /* Small args are padded downward. */
3095 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3096 fold (build (COND_EXPR, sizetype,
3097 fold (build (GT_EXPR, sizetype,
3098 rounded_size,
3099 align)),
3100 size_zero_node,
3101 fold (build (MINUS_EXPR, sizetype,
3102 rounded_size,
3103 type_size))))));
3104 }
3105
3106 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3107 addr = copy_to_reg (addr);
3108
3109 /* Compute new value for AP. */
3110 if (! integer_zerop (rounded_size))
3111 {
3112 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3113 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3114 rounded_size));
3115 TREE_SIDE_EFFECTS (t) = 1;
3116 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3117 }
3118
3119 return addr;
3120 }
3121
3122 /* Expand __builtin_va_arg, which is not really a builtin function, but
3123 a very special sort of operator. */
3124
3125 rtx
3126 expand_builtin_va_arg (valist, type)
3127 tree valist, type;
3128 {
3129 rtx addr, result;
3130 tree promoted_type, want_va_type, have_va_type;
3131
3132 /* Verify that valist is of the proper type. */
3133
3134 want_va_type = va_list_type_node;
3135 have_va_type = TREE_TYPE (valist);
3136 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3137 {
3138 /* If va_list is an array type, the argument may have decayed
3139 to a pointer type, e.g. by being passed to another function.
3140 In that case, unwrap both types so that we can compare the
3141 underlying records. */
3142 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3143 || TREE_CODE (have_va_type) == POINTER_TYPE)
3144 {
3145 want_va_type = TREE_TYPE (want_va_type);
3146 have_va_type = TREE_TYPE (have_va_type);
3147 }
3148 }
3149 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3150 {
3151 error ("first argument to `va_arg' not of type `va_list'");
3152 addr = const0_rtx;
3153 }
3154
3155 /* Generate a diagnostic for requesting data of a type that cannot
3156 be passed through `...' due to type promotion at the call site. */
3157 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3158 != type)
3159 {
3160 const char *name = "<anonymous type>", *pname = 0;
3161 static bool gave_help;
3162
3163 if (TYPE_NAME (type))
3164 {
3165 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3166 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3167 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3168 && DECL_NAME (TYPE_NAME (type)))
3169 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3170 }
3171 if (TYPE_NAME (promoted_type))
3172 {
3173 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3174 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3175 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3176 && DECL_NAME (TYPE_NAME (promoted_type)))
3177 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3178 }
3179
3180 /* Unfortunately, this is merely undefined, rather than a constraint
3181 violation, so we cannot make this an error. If this call is never
3182 executed, the program is still strictly conforming. */
3183 warning ("`%s' is promoted to `%s' when passed through `...'",
3184 name, pname);
3185 if (! gave_help)
3186 {
3187 gave_help = true;
3188 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3189 pname, name);
3190 }
3191
3192 /* We can, however, treat "undefined" any way we please.
3193 Call abort to encourage the user to fix the program. */
3194 expand_builtin_trap ();
3195
3196 /* This is dead code, but go ahead and finish so that the
3197 mode of the result comes out right. */
3198 addr = const0_rtx;
3199 }
3200 else
3201 {
3202 /* Make it easier for the backends by protecting the valist argument
3203 from multiple evaluations. */
3204 valist = stabilize_va_list (valist, 0);
3205
3206 #ifdef EXPAND_BUILTIN_VA_ARG
3207 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3208 #else
3209 addr = std_expand_builtin_va_arg (valist, type);
3210 #endif
3211 }
3212
3213 #ifdef POINTERS_EXTEND_UNSIGNED
3214 if (GET_MODE (addr) != Pmode)
3215 addr = convert_memory_address (Pmode, addr);
3216 #endif
3217
3218 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3219 set_mem_alias_set (result, get_varargs_alias_set ());
3220
3221 return result;
3222 }
3223
3224 /* Expand ARGLIST, from a call to __builtin_va_end. */
3225
3226 static rtx
3227 expand_builtin_va_end (arglist)
3228 tree arglist;
3229 {
3230 tree valist = TREE_VALUE (arglist);
3231
3232 #ifdef EXPAND_BUILTIN_VA_END
3233 valist = stabilize_va_list (valist, 0);
3234 EXPAND_BUILTIN_VA_END(arglist);
3235 #else
3236 /* Evaluate for side effects, if needed. I hate macros that don't
3237 do that. */
3238 if (TREE_SIDE_EFFECTS (valist))
3239 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3240 #endif
3241
3242 return const0_rtx;
3243 }
3244
3245 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3246 builtin rather than just as an assignment in stdarg.h because of the
3247 nastiness of array-type va_list types. */
3248
3249 static rtx
3250 expand_builtin_va_copy (arglist)
3251 tree arglist;
3252 {
3253 tree dst, src, t;
3254
3255 dst = TREE_VALUE (arglist);
3256 src = TREE_VALUE (TREE_CHAIN (arglist));
3257
3258 dst = stabilize_va_list (dst, 1);
3259 src = stabilize_va_list (src, 0);
3260
3261 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3262 {
3263 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3264 TREE_SIDE_EFFECTS (t) = 1;
3265 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3266 }
3267 else
3268 {
3269 rtx dstb, srcb, size;
3270
3271 /* Evaluate to pointers. */
3272 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3273 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3274 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3275 VOIDmode, EXPAND_NORMAL);
3276
3277 #ifdef POINTERS_EXTEND_UNSIGNED
3278 if (GET_MODE (dstb) != Pmode)
3279 dstb = convert_memory_address (Pmode, dstb);
3280
3281 if (GET_MODE (srcb) != Pmode)
3282 srcb = convert_memory_address (Pmode, srcb);
3283 #endif
3284
3285 /* "Dereference" to BLKmode memories. */
3286 dstb = gen_rtx_MEM (BLKmode, dstb);
3287 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3288 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3289 srcb = gen_rtx_MEM (BLKmode, srcb);
3290 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3291 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3292
3293 /* Copy. */
3294 emit_block_move (dstb, srcb, size);
3295 }
3296
3297 return const0_rtx;
3298 }
3299
3300 /* Expand a call to one of the builtin functions __builtin_frame_address or
3301 __builtin_return_address. */
3302
3303 static rtx
3304 expand_builtin_frame_address (exp)
3305 tree exp;
3306 {
3307 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3308 tree arglist = TREE_OPERAND (exp, 1);
3309
3310 /* The argument must be a nonnegative integer constant.
3311 It counts the number of frames to scan up the stack.
3312 The value is the return address saved in that frame. */
3313 if (arglist == 0)
3314 /* Warning about missing arg was already issued. */
3315 return const0_rtx;
3316 else if (! host_integerp (TREE_VALUE (arglist), 1))
3317 {
3318 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3319 error ("invalid arg to `__builtin_frame_address'");
3320 else
3321 error ("invalid arg to `__builtin_return_address'");
3322 return const0_rtx;
3323 }
3324 else
3325 {
3326 rtx tem
3327 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3328 tree_low_cst (TREE_VALUE (arglist), 1),
3329 hard_frame_pointer_rtx);
3330
3331 /* Some ports cannot access arbitrary stack frames. */
3332 if (tem == NULL)
3333 {
3334 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3335 warning ("unsupported arg to `__builtin_frame_address'");
3336 else
3337 warning ("unsupported arg to `__builtin_return_address'");
3338 return const0_rtx;
3339 }
3340
3341 /* For __builtin_frame_address, return what we've got. */
3342 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3343 return tem;
3344
3345 if (GET_CODE (tem) != REG
3346 && ! CONSTANT_P (tem))
3347 tem = copy_to_mode_reg (Pmode, tem);
3348 return tem;
3349 }
3350 }
3351
3352 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3353 we failed and the caller should emit a normal call, otherwise try to get
3354 the result in TARGET, if convenient. */
3355
3356 static rtx
3357 expand_builtin_alloca (arglist, target)
3358 tree arglist;
3359 rtx target;
3360 {
3361 rtx op0;
3362 rtx result;
3363
3364 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3365 return 0;
3366
3367 /* Compute the argument. */
3368 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3369
3370 /* Allocate the desired space. */
3371 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3372
3373 #ifdef POINTERS_EXTEND_UNSIGNED
3374 if (GET_MODE (result) != ptr_mode)
3375 result = convert_memory_address (ptr_mode, result);
3376 #endif
3377
3378 return result;
3379 }
3380
3381 /* Expand a call to the ffs builtin. The arguments are in ARGLIST.
3382 Return 0 if a normal call should be emitted rather than expanding the
3383 function in-line. If convenient, the result should be placed in TARGET.
3384 SUBTARGET may be used as the target for computing one of EXP's operands. */
3385
3386 static rtx
3387 expand_builtin_ffs (arglist, target, subtarget)
3388 tree arglist;
3389 rtx target, subtarget;
3390 {
3391 rtx op0;
3392 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3393 return 0;
3394
3395 /* Compute the argument. */
3396 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3397 /* Compute ffs, into TARGET if possible.
3398 Set TARGET to wherever the result comes back. */
3399 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3400 ffs_optab, op0, target, 1);
3401 if (target == 0)
3402 abort ();
3403 return target;
3404 }
3405
3406 /* If the string passed to fputs is a constant and is one character
3407 long, we attempt to transform this call into __builtin_fputc(). */
3408
3409 static rtx
3410 expand_builtin_fputs (arglist, ignore, unlocked)
3411 tree arglist;
3412 int ignore;
3413 int unlocked;
3414 {
3415 tree len, fn;
3416 tree fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3417 : built_in_decls[BUILT_IN_FPUTC];
3418 tree fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3419 : built_in_decls[BUILT_IN_FWRITE];
3420
3421 /* If the return value is used, or the replacement _DECL isn't
3422 initialized, don't do the transformation. */
3423 if (!ignore || !fn_fputc || !fn_fwrite)
3424 return 0;
3425
3426 /* Verify the arguments in the original call. */
3427 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3428 return 0;
3429
3430 /* Get the length of the string passed to fputs. If the length
3431 can't be determined, punt. */
3432 if (!(len = c_strlen (TREE_VALUE (arglist)))
3433 || TREE_CODE (len) != INTEGER_CST)
3434 return 0;
3435
3436 switch (compare_tree_int (len, 1))
3437 {
3438 case -1: /* length is 0, delete the call entirely . */
3439 {
3440 /* Evaluate and ignore the argument in case it has
3441 side-effects. */
3442 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3443 VOIDmode, EXPAND_NORMAL);
3444 return const0_rtx;
3445 }
3446 case 0: /* length is 1, call fputc. */
3447 {
3448 const char *p = c_getstr (TREE_VALUE (arglist));
3449
3450 if (p != NULL)
3451 {
3452 /* New argument list transforming fputs(string, stream) to
3453 fputc(string[0], stream). */
3454 arglist =
3455 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3456 arglist =
3457 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3458 fn = fn_fputc;
3459 break;
3460 }
3461 }
3462 /* FALLTHROUGH */
3463 case 1: /* length is greater than 1, call fwrite. */
3464 {
3465 tree string_arg = TREE_VALUE (arglist);
3466
3467 /* New argument list transforming fputs(string, stream) to
3468 fwrite(string, 1, len, stream). */
3469 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3470 arglist = tree_cons (NULL_TREE, len, arglist);
3471 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
3472 arglist = tree_cons (NULL_TREE, string_arg, arglist);
3473 fn = fn_fwrite;
3474 break;
3475 }
3476 default:
3477 abort ();
3478 }
3479
3480 return expand_expr (build_function_call_expr (fn, arglist),
3481 (ignore ? const0_rtx : NULL_RTX),
3482 VOIDmode, EXPAND_NORMAL);
3483 }
3484
3485 /* Expand a call to __builtin_expect. We return our argument and emit a
3486 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
3487 a non-jump context. */
3488
3489 static rtx
3490 expand_builtin_expect (arglist, target)
3491 tree arglist;
3492 rtx target;
3493 {
3494 tree exp, c;
3495 rtx note, rtx_c;
3496
3497 if (arglist == NULL_TREE
3498 || TREE_CHAIN (arglist) == NULL_TREE)
3499 return const0_rtx;
3500 exp = TREE_VALUE (arglist);
3501 c = TREE_VALUE (TREE_CHAIN (arglist));
3502
3503 if (TREE_CODE (c) != INTEGER_CST)
3504 {
3505 error ("second arg to `__builtin_expect' must be a constant");
3506 c = integer_zero_node;
3507 }
3508
3509 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
3510
3511 /* Don't bother with expected value notes for integral constants. */
3512 if (GET_CODE (target) != CONST_INT)
3513 {
3514 /* We do need to force this into a register so that we can be
3515 moderately sure to be able to correctly interpret the branch
3516 condition later. */
3517 target = force_reg (GET_MODE (target), target);
3518
3519 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
3520
3521 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
3522 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
3523 }
3524
3525 return target;
3526 }
3527
3528 /* Like expand_builtin_expect, except do this in a jump context. This is
3529 called from do_jump if the conditional is a __builtin_expect. Return either
3530 a list of insns to emit the jump or NULL if we cannot optimize
3531 __builtin_expect. We need to optimize this at jump time so that machines
3532 like the PowerPC don't turn the test into a SCC operation, and then jump
3533 based on the test being 0/1. */
3534
3535 rtx
3536 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
3537 tree exp;
3538 rtx if_false_label;
3539 rtx if_true_label;
3540 {
3541 tree arglist = TREE_OPERAND (exp, 1);
3542 tree arg0 = TREE_VALUE (arglist);
3543 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3544 rtx ret = NULL_RTX;
3545
3546 /* Only handle __builtin_expect (test, 0) and
3547 __builtin_expect (test, 1). */
3548 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3549 && (integer_zerop (arg1) || integer_onep (arg1)))
3550 {
3551 int num_jumps = 0;
3552 rtx insn;
3553
3554 /* If we fail to locate an appropriate conditional jump, we'll
3555 fall back to normal evaluation. Ensure that the expression
3556 can be re-evaluated. */
3557 switch (unsafe_for_reeval (arg0))
3558 {
3559 case 0: /* Safe. */
3560 break;
3561
3562 case 1: /* Mildly unsafe. */
3563 arg0 = unsave_expr (arg0);
3564 break;
3565
3566 case 2: /* Wildly unsafe. */
3567 return NULL_RTX;
3568 }
3569
3570 /* Expand the jump insns. */
3571 start_sequence ();
3572 do_jump (arg0, if_false_label, if_true_label);
3573 ret = get_insns ();
3574 end_sequence ();
3575
3576 /* Now that the __builtin_expect has been validated, go through and add
3577 the expect's to each of the conditional jumps. If we run into an
3578 error, just give up and generate the 'safe' code of doing a SCC
3579 operation and then doing a branch on that. */
3580 insn = ret;
3581 while (insn != NULL_RTX)
3582 {
3583 rtx next = NEXT_INSN (insn);
3584 rtx pattern;
3585
3586 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
3587 && (pattern = pc_set (insn)) != NULL_RTX)
3588 {
3589 rtx ifelse = SET_SRC (pattern);
3590 rtx label;
3591 int taken;
3592
3593 if (GET_CODE (ifelse) != IF_THEN_ELSE)
3594 goto do_next_insn;
3595
3596 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
3597 {
3598 taken = 1;
3599 label = XEXP (XEXP (ifelse, 1), 0);
3600 }
3601 /* An inverted jump reverses the probabilities. */
3602 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
3603 {
3604 taken = 0;
3605 label = XEXP (XEXP (ifelse, 2), 0);
3606 }
3607 /* We shouldn't have to worry about conditional returns during
3608 the expansion stage, but handle it gracefully anyway. */
3609 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
3610 {
3611 taken = 1;
3612 label = NULL_RTX;
3613 }
3614 /* An inverted return reverses the probabilities. */
3615 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
3616 {
3617 taken = 0;
3618 label = NULL_RTX;
3619 }
3620 else
3621 goto do_next_insn;
3622
3623 /* If the test is expected to fail, reverse the
3624 probabilities. */
3625 if (integer_zerop (arg1))
3626 taken = 1 - taken;
3627
3628 /* If we are jumping to the false label, reverse the
3629 probabilities. */
3630 if (label == NULL_RTX)
3631 ; /* conditional return */
3632 else if (label == if_false_label)
3633 taken = 1 - taken;
3634 else if (label != if_true_label)
3635 goto do_next_insn;
3636
3637 num_jumps++;
3638 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
3639 }
3640
3641 do_next_insn:
3642 insn = next;
3643 }
3644
3645 /* If no jumps were modified, fail and do __builtin_expect the normal
3646 way. */
3647 if (num_jumps == 0)
3648 ret = NULL_RTX;
3649 }
3650
3651 return ret;
3652 }
3653
3654 void
3655 expand_builtin_trap ()
3656 {
3657 #ifdef HAVE_trap
3658 if (HAVE_trap)
3659 emit_insn (gen_trap ());
3660 else
3661 #endif
3662 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
3663 emit_barrier ();
3664 }
3665 \f
3666 /* Expand an expression EXP that calls a built-in function,
3667 with result going to TARGET if that's convenient
3668 (and in mode MODE if that's convenient).
3669 SUBTARGET may be used as the target for computing one of EXP's operands.
3670 IGNORE is nonzero if the value is to be ignored. */
3671
3672 rtx
3673 expand_builtin (exp, target, subtarget, mode, ignore)
3674 tree exp;
3675 rtx target;
3676 rtx subtarget;
3677 enum machine_mode mode;
3678 int ignore;
3679 {
3680 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3681 tree arglist = TREE_OPERAND (exp, 1);
3682 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3683
3684 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
3685 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
3686
3687 /* When not optimizing, generate calls to library functions for a certain
3688 set of builtins. */
3689 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
3690 switch (fcode)
3691 {
3692 case BUILT_IN_SIN:
3693 case BUILT_IN_COS:
3694 case BUILT_IN_SQRT:
3695 case BUILT_IN_SQRTF:
3696 case BUILT_IN_SQRTL:
3697 case BUILT_IN_MEMSET:
3698 case BUILT_IN_MEMCPY:
3699 case BUILT_IN_MEMCMP:
3700 case BUILT_IN_BCMP:
3701 case BUILT_IN_BZERO:
3702 case BUILT_IN_INDEX:
3703 case BUILT_IN_RINDEX:
3704 case BUILT_IN_STRCHR:
3705 case BUILT_IN_STRRCHR:
3706 case BUILT_IN_STRLEN:
3707 case BUILT_IN_STRCPY:
3708 case BUILT_IN_STRNCPY:
3709 case BUILT_IN_STRNCMP:
3710 case BUILT_IN_STRSTR:
3711 case BUILT_IN_STRPBRK:
3712 case BUILT_IN_STRCAT:
3713 case BUILT_IN_STRNCAT:
3714 case BUILT_IN_STRSPN:
3715 case BUILT_IN_STRCSPN:
3716 case BUILT_IN_STRCMP:
3717 case BUILT_IN_FFS:
3718 case BUILT_IN_PUTCHAR:
3719 case BUILT_IN_PUTS:
3720 case BUILT_IN_PRINTF:
3721 case BUILT_IN_FPUTC:
3722 case BUILT_IN_FPUTS:
3723 case BUILT_IN_FWRITE:
3724 case BUILT_IN_PUTCHAR_UNLOCKED:
3725 case BUILT_IN_PUTS_UNLOCKED:
3726 case BUILT_IN_PRINTF_UNLOCKED:
3727 case BUILT_IN_FPUTC_UNLOCKED:
3728 case BUILT_IN_FPUTS_UNLOCKED:
3729 case BUILT_IN_FWRITE_UNLOCKED:
3730 return expand_call (exp, target, ignore);
3731
3732 default:
3733 break;
3734 }
3735
3736 switch (fcode)
3737 {
3738 case BUILT_IN_ABS:
3739 case BUILT_IN_LABS:
3740 case BUILT_IN_LLABS:
3741 case BUILT_IN_IMAXABS:
3742 case BUILT_IN_FABS:
3743 case BUILT_IN_FABSF:
3744 case BUILT_IN_FABSL:
3745 /* build_function_call changes these into ABS_EXPR. */
3746 abort ();
3747
3748 case BUILT_IN_CONJ:
3749 case BUILT_IN_CONJF:
3750 case BUILT_IN_CONJL:
3751 case BUILT_IN_CREAL:
3752 case BUILT_IN_CREALF:
3753 case BUILT_IN_CREALL:
3754 case BUILT_IN_CIMAG:
3755 case BUILT_IN_CIMAGF:
3756 case BUILT_IN_CIMAGL:
3757 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
3758 and IMAGPART_EXPR. */
3759 abort ();
3760
3761 case BUILT_IN_SIN:
3762 case BUILT_IN_SINF:
3763 case BUILT_IN_SINL:
3764 case BUILT_IN_COS:
3765 case BUILT_IN_COSF:
3766 case BUILT_IN_COSL:
3767 /* Treat these like sqrt only if unsafe math optimizations are allowed,
3768 because of possible accuracy problems. */
3769 if (! flag_unsafe_math_optimizations)
3770 break;
3771 case BUILT_IN_SQRT:
3772 case BUILT_IN_SQRTF:
3773 case BUILT_IN_SQRTL:
3774 target = expand_builtin_mathfn (exp, target, subtarget);
3775 if (target)
3776 return target;
3777 break;
3778
3779 case BUILT_IN_FMOD:
3780 break;
3781
3782 case BUILT_IN_APPLY_ARGS:
3783 return expand_builtin_apply_args ();
3784
3785 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
3786 FUNCTION with a copy of the parameters described by
3787 ARGUMENTS, and ARGSIZE. It returns a block of memory
3788 allocated on the stack into which is stored all the registers
3789 that might possibly be used for returning the result of a
3790 function. ARGUMENTS is the value returned by
3791 __builtin_apply_args. ARGSIZE is the number of bytes of
3792 arguments that must be copied. ??? How should this value be
3793 computed? We'll also need a safe worst case value for varargs
3794 functions. */
3795 case BUILT_IN_APPLY:
3796 if (!validate_arglist (arglist, POINTER_TYPE,
3797 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3798 && !validate_arglist (arglist, REFERENCE_TYPE,
3799 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3800 return const0_rtx;
3801 else
3802 {
3803 int i;
3804 tree t;
3805 rtx ops[3];
3806
3807 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
3808 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
3809
3810 return expand_builtin_apply (ops[0], ops[1], ops[2]);
3811 }
3812
3813 /* __builtin_return (RESULT) causes the function to return the
3814 value described by RESULT. RESULT is address of the block of
3815 memory returned by __builtin_apply. */
3816 case BUILT_IN_RETURN:
3817 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
3818 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
3819 NULL_RTX, VOIDmode, 0));
3820 return const0_rtx;
3821
3822 case BUILT_IN_SAVEREGS:
3823 return expand_builtin_saveregs ();
3824
3825 case BUILT_IN_ARGS_INFO:
3826 return expand_builtin_args_info (exp);
3827
3828 /* Return the address of the first anonymous stack arg. */
3829 case BUILT_IN_NEXT_ARG:
3830 return expand_builtin_next_arg (arglist);
3831
3832 case BUILT_IN_CLASSIFY_TYPE:
3833 return expand_builtin_classify_type (arglist);
3834
3835 case BUILT_IN_CONSTANT_P:
3836 return expand_builtin_constant_p (exp);
3837
3838 case BUILT_IN_FRAME_ADDRESS:
3839 case BUILT_IN_RETURN_ADDRESS:
3840 return expand_builtin_frame_address (exp);
3841
3842 /* Returns the address of the area where the structure is returned.
3843 0 otherwise. */
3844 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3845 if (arglist != 0
3846 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3847 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
3848 return const0_rtx;
3849 else
3850 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
3851
3852 case BUILT_IN_ALLOCA:
3853 target = expand_builtin_alloca (arglist, target);
3854 if (target)
3855 return target;
3856 break;
3857
3858 case BUILT_IN_FFS:
3859 target = expand_builtin_ffs (arglist, target, subtarget);
3860 if (target)
3861 return target;
3862 break;
3863
3864 case BUILT_IN_STRLEN:
3865 target = expand_builtin_strlen (exp, target);
3866 if (target)
3867 return target;
3868 break;
3869
3870 case BUILT_IN_STRCPY:
3871 target = expand_builtin_strcpy (exp, target, mode);
3872 if (target)
3873 return target;
3874 break;
3875
3876 case BUILT_IN_STRNCPY:
3877 target = expand_builtin_strncpy (arglist, target, mode);
3878 if (target)
3879 return target;
3880 break;
3881
3882 case BUILT_IN_STRCAT:
3883 target = expand_builtin_strcat (arglist, target, mode);
3884 if (target)
3885 return target;
3886 break;
3887
3888 case BUILT_IN_STRNCAT:
3889 target = expand_builtin_strncat (arglist, target, mode);
3890 if (target)
3891 return target;
3892 break;
3893
3894 case BUILT_IN_STRSPN:
3895 target = expand_builtin_strspn (arglist, target, mode);
3896 if (target)
3897 return target;
3898 break;
3899
3900 case BUILT_IN_STRCSPN:
3901 target = expand_builtin_strcspn (arglist, target, mode);
3902 if (target)
3903 return target;
3904 break;
3905
3906 case BUILT_IN_STRSTR:
3907 target = expand_builtin_strstr (arglist, target, mode);
3908 if (target)
3909 return target;
3910 break;
3911
3912 case BUILT_IN_STRPBRK:
3913 target = expand_builtin_strpbrk (arglist, target, mode);
3914 if (target)
3915 return target;
3916 break;
3917
3918 case BUILT_IN_INDEX:
3919 case BUILT_IN_STRCHR:
3920 target = expand_builtin_strchr (arglist, target, mode);
3921 if (target)
3922 return target;
3923 break;
3924
3925 case BUILT_IN_RINDEX:
3926 case BUILT_IN_STRRCHR:
3927 target = expand_builtin_strrchr (arglist, target, mode);
3928 if (target)
3929 return target;
3930 break;
3931
3932 case BUILT_IN_MEMCPY:
3933 target = expand_builtin_memcpy (arglist, target, mode);
3934 if (target)
3935 return target;
3936 break;
3937
3938 case BUILT_IN_MEMSET:
3939 target = expand_builtin_memset (exp, target, mode);
3940 if (target)
3941 return target;
3942 break;
3943
3944 case BUILT_IN_BZERO:
3945 target = expand_builtin_bzero (exp);
3946 if (target)
3947 return target;
3948 break;
3949
3950 case BUILT_IN_STRCMP:
3951 target = expand_builtin_strcmp (exp, target, mode);
3952 if (target)
3953 return target;
3954 break;
3955
3956 case BUILT_IN_STRNCMP:
3957 target = expand_builtin_strncmp (exp, target, mode);
3958 if (target)
3959 return target;
3960 break;
3961
3962 case BUILT_IN_BCMP:
3963 case BUILT_IN_MEMCMP:
3964 target = expand_builtin_memcmp (exp, arglist, target, mode);
3965 if (target)
3966 return target;
3967 break;
3968
3969 case BUILT_IN_SETJMP:
3970 target = expand_builtin_setjmp (arglist, target);
3971 if (target)
3972 return target;
3973 break;
3974
3975 /* __builtin_longjmp is passed a pointer to an array of five words.
3976 It's similar to the C library longjmp function but works with
3977 __builtin_setjmp above. */
3978 case BUILT_IN_LONGJMP:
3979 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3980 break;
3981 else
3982 {
3983 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
3984 VOIDmode, 0);
3985 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
3986 NULL_RTX, VOIDmode, 0);
3987
3988 if (value != const1_rtx)
3989 {
3990 error ("__builtin_longjmp second argument must be 1");
3991 return const0_rtx;
3992 }
3993
3994 expand_builtin_longjmp (buf_addr, value);
3995 return const0_rtx;
3996 }
3997
3998 case BUILT_IN_TRAP:
3999 expand_builtin_trap ();
4000 return const0_rtx;
4001
4002 case BUILT_IN_PUTCHAR:
4003 case BUILT_IN_PUTS:
4004 case BUILT_IN_FPUTC:
4005 case BUILT_IN_FWRITE:
4006 case BUILT_IN_PUTCHAR_UNLOCKED:
4007 case BUILT_IN_PUTS_UNLOCKED:
4008 case BUILT_IN_FPUTC_UNLOCKED:
4009 case BUILT_IN_FWRITE_UNLOCKED:
4010 break;
4011 case BUILT_IN_FPUTS:
4012 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4013 if (target)
4014 return target;
4015 break;
4016 case BUILT_IN_FPUTS_UNLOCKED:
4017 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4018 if (target)
4019 return target;
4020 break;
4021
4022 /* Various hooks for the DWARF 2 __throw routine. */
4023 case BUILT_IN_UNWIND_INIT:
4024 expand_builtin_unwind_init ();
4025 return const0_rtx;
4026 case BUILT_IN_DWARF_CFA:
4027 return virtual_cfa_rtx;
4028 #ifdef DWARF2_UNWIND_INFO
4029 case BUILT_IN_DWARF_FP_REGNUM:
4030 return expand_builtin_dwarf_fp_regnum ();
4031 case BUILT_IN_INIT_DWARF_REG_SIZES:
4032 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4033 return const0_rtx;
4034 #endif
4035 case BUILT_IN_FROB_RETURN_ADDR:
4036 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4037 case BUILT_IN_EXTRACT_RETURN_ADDR:
4038 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4039 case BUILT_IN_EH_RETURN:
4040 expand_builtin_eh_return (TREE_VALUE (arglist),
4041 TREE_VALUE (TREE_CHAIN (arglist)));
4042 return const0_rtx;
4043 #ifdef EH_RETURN_DATA_REGNO
4044 case BUILT_IN_EH_RETURN_DATA_REGNO:
4045 return expand_builtin_eh_return_data_regno (arglist);
4046 #endif
4047 case BUILT_IN_VA_START:
4048 case BUILT_IN_STDARG_START:
4049 return expand_builtin_va_start (arglist);
4050 case BUILT_IN_VA_END:
4051 return expand_builtin_va_end (arglist);
4052 case BUILT_IN_VA_COPY:
4053 return expand_builtin_va_copy (arglist);
4054 case BUILT_IN_EXPECT:
4055 return expand_builtin_expect (arglist, target);
4056 case BUILT_IN_PREFETCH:
4057 expand_builtin_prefetch (arglist);
4058 return const0_rtx;
4059
4060
4061 default: /* just do library call, if unknown builtin */
4062 error ("built-in function `%s' not currently supported",
4063 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4064 }
4065
4066 /* The switch statement above can drop through to cause the function
4067 to be called normally. */
4068 return expand_call (exp, target, ignore);
4069 }
4070
4071 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4072 constant. ARGLIST is the argument list of the call. */
4073
4074 static tree
4075 fold_builtin_constant_p (arglist)
4076 tree arglist;
4077 {
4078 if (arglist == 0)
4079 return 0;
4080
4081 arglist = TREE_VALUE (arglist);
4082
4083 /* We return 1 for a numeric type that's known to be a constant
4084 value at compile-time or for an aggregate type that's a
4085 literal constant. */
4086 STRIP_NOPS (arglist);
4087
4088 /* If we know this is a constant, emit the constant of one. */
4089 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4090 || (TREE_CODE (arglist) == CONSTRUCTOR
4091 && TREE_CONSTANT (arglist))
4092 || (TREE_CODE (arglist) == ADDR_EXPR
4093 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4094 return integer_one_node;
4095
4096 /* If we aren't going to be running CSE or this expression
4097 has side effects, show we don't know it to be a constant.
4098 Likewise if it's a pointer or aggregate type since in those
4099 case we only want literals, since those are only optimized
4100 when generating RTL, not later.
4101 And finally, if we are compiling an initializer, not code, we
4102 need to return a definite result now; there's not going to be any
4103 more optimization done. */
4104 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4105 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4106 || POINTER_TYPE_P (TREE_TYPE (arglist))
4107 || cfun == 0)
4108 return integer_zero_node;
4109
4110 return 0;
4111 }
4112
4113 /* Fold a call to __builtin_classify_type. */
4114
4115 static tree
4116 fold_builtin_classify_type (arglist)
4117 tree arglist;
4118 {
4119 if (arglist == 0)
4120 return build_int_2 (no_type_class, 0);
4121
4122 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4123 }
4124
4125 /* Used by constant folding to eliminate some builtin calls early. EXP is
4126 the CALL_EXPR of a call to a builtin function. */
4127
4128 tree
4129 fold_builtin (exp)
4130 tree exp;
4131 {
4132 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4133 tree arglist = TREE_OPERAND (exp, 1);
4134 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4135
4136 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4137 return 0;
4138
4139 switch (fcode)
4140 {
4141 case BUILT_IN_CONSTANT_P:
4142 return fold_builtin_constant_p (arglist);
4143
4144 case BUILT_IN_CLASSIFY_TYPE:
4145 return fold_builtin_classify_type (arglist);
4146
4147 case BUILT_IN_STRLEN:
4148 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4149 {
4150 tree len = c_strlen (TREE_VALUE (arglist));
4151 if (len != 0)
4152 return len;
4153 }
4154 break;
4155
4156 default:
4157 break;
4158 }
4159
4160 return 0;
4161 }
4162
4163 static tree
4164 build_function_call_expr (fn, arglist)
4165 tree fn, arglist;
4166 {
4167 tree call_expr;
4168
4169 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
4170 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
4171 call_expr, arglist);
4172 TREE_SIDE_EFFECTS (call_expr) = 1;
4173 return fold (call_expr);
4174 }
4175
4176 /* This function validates the types of a function call argument list
4177 represented as a tree chain of parameters against a specified list
4178 of tree_codes. If the last specifier is a 0, that represents an
4179 ellipses, otherwise the last specifier must be a VOID_TYPE. */
4180
4181 static int
4182 validate_arglist VPARAMS ((tree arglist, ...))
4183 {
4184 enum tree_code code;
4185 int res = 0;
4186
4187 VA_OPEN (ap, arglist);
4188 VA_FIXEDARG (ap, tree, arglist);
4189
4190 do {
4191 code = va_arg (ap, enum tree_code);
4192 switch (code)
4193 {
4194 case 0:
4195 /* This signifies an ellipses, any further arguments are all ok. */
4196 res = 1;
4197 goto end;
4198 case VOID_TYPE:
4199 /* This signifies an endlink, if no arguments remain, return
4200 true, otherwise return false. */
4201 res = arglist == 0;
4202 goto end;
4203 default:
4204 /* If no parameters remain or the parameter's code does not
4205 match the specified code, return false. Otherwise continue
4206 checking any remaining arguments. */
4207 if (arglist == 0 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
4208 goto end;
4209 break;
4210 }
4211 arglist = TREE_CHAIN (arglist);
4212 } while (1);
4213
4214 /* We need gotos here since we can only have one VA_CLOSE in a
4215 function. */
4216 end: ;
4217 VA_CLOSE (ap);
4218
4219 return res;
4220 }
4221
4222 /* Default version of target-specific builtin setup that does nothing. */
4223
4224 void
4225 default_init_builtins ()
4226 {
4227 }
4228
4229 /* Default target-specific builtin expander that does nothing. */
4230
4231 rtx
4232 default_expand_builtin (exp, target, subtarget, mode, ignore)
4233 tree exp ATTRIBUTE_UNUSED;
4234 rtx target ATTRIBUTE_UNUSED;
4235 rtx subtarget ATTRIBUTE_UNUSED;
4236 enum machine_mode mode ATTRIBUTE_UNUSED;
4237 int ignore ATTRIBUTE_UNUSED;
4238 {
4239 return NULL_RTX;
4240 }