]>
Commit | Line | Data |
---|---|---|
03385ed3 | 1 | /* intrinsics.cc -- D language compiler intrinsics. |
fbd26352 | 2 | Copyright (C) 2006-2019 Free Software Foundation, Inc. |
03385ed3 | 3 | |
4 | GCC is free software; you can redistribute it and/or modify | |
5 | it under the terms of the GNU General Public License as published by | |
6 | the Free Software Foundation; either version 3, or (at your option) | |
7 | any later version. | |
8 | ||
9 | GCC is distributed in the hope that it will be useful, | |
10 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | GNU General Public License for more details. | |
13 | ||
14 | You should have received a copy of the GNU General Public License | |
15 | along with GCC; see the file COPYING3. If not see | |
16 | <http://www.gnu.org/licenses/>. */ | |
17 | ||
18 | #include "config.h" | |
19 | #include "system.h" | |
20 | #include "coretypes.h" | |
21 | ||
22 | #include "dmd/declaration.h" | |
23 | #include "dmd/identifier.h" | |
24 | #include "dmd/mangle.h" | |
25 | #include "dmd/mangle.h" | |
26 | #include "dmd/module.h" | |
27 | #include "dmd/template.h" | |
28 | ||
29 | #include "tm.h" | |
30 | #include "function.h" | |
31 | #include "tree.h" | |
32 | #include "fold-const.h" | |
33 | #include "stringpool.h" | |
34 | #include "builtins.h" | |
35 | ||
36 | #include "d-tree.h" | |
37 | ||
38 | ||
39 | /* An internal struct used to hold information on D intrinsics. */ | |
40 | ||
41 | struct intrinsic_decl | |
42 | { | |
43 | /* The DECL_FUNCTION_CODE of this decl. */ | |
44 | intrinsic_code code; | |
45 | ||
46 | /* The name of the intrinsic. */ | |
47 | const char *name; | |
48 | ||
49 | /* The module where the intrinsic is located. */ | |
50 | const char *module; | |
51 | ||
52 | /* The mangled signature decoration of the intrinsic. */ | |
53 | const char *deco; | |
54 | ||
55 | /* True if the intrinsic is only handled in CTFE. */ | |
56 | bool ctfeonly; | |
57 | }; | |
58 | ||
59 | static const intrinsic_decl intrinsic_decls[] = | |
60 | { | |
61 | #define DEF_D_INTRINSIC(CODE, ALIAS, NAME, MODULE, DECO, CTFE) \ | |
62 | { INTRINSIC_ ## ALIAS, NAME, MODULE, DECO, CTFE }, | |
63 | ||
64 | #include "intrinsics.def" | |
65 | ||
66 | #undef DEF_D_INTRINSIC | |
67 | }; | |
68 | ||
69 | /* Checks if DECL is an intrinsic or run time library function that requires | |
70 | special processing. Sets DECL_INTRINSIC_CODE so it can be identified | |
71 | later in maybe_expand_intrinsic. */ | |
72 | ||
73 | void | |
74 | maybe_set_intrinsic (FuncDeclaration *decl) | |
75 | { | |
76 | if (!decl->ident || decl->builtin != BUILTINunknown) | |
77 | return; | |
78 | ||
79 | /* The builtin flag is updated only if we can evaluate the intrinsic | |
80 | at compile-time. Such as the math or bitop intrinsics. */ | |
81 | decl->builtin = BUILTINno; | |
82 | ||
83 | /* Check if it's a compiler intrinsic. We only require that any | |
84 | internally recognised intrinsics are declared in a module with | |
85 | an explicit module declaration. */ | |
86 | Module *m = decl->getModule (); | |
87 | ||
88 | if (!m || !m->md) | |
89 | return; | |
90 | ||
91 | TemplateInstance *ti = decl->isInstantiated (); | |
92 | TemplateDeclaration *td = ti ? ti->tempdecl->isTemplateDeclaration () : NULL; | |
93 | ||
94 | const char *tname = decl->ident->toChars (); | |
95 | const char *tmodule = m->md->toChars (); | |
96 | const char *tdeco = (td == NULL) ? decl->type->deco : NULL; | |
97 | ||
98 | /* Look through all D intrinsics. */ | |
99 | for (size_t i = 0; i < (int) INTRINSIC_LAST; i++) | |
100 | { | |
101 | if (!intrinsic_decls[i].name) | |
102 | continue; | |
103 | ||
104 | if (strcmp (intrinsic_decls[i].name, tname) != 0 | |
105 | || strcmp (intrinsic_decls[i].module, tmodule) != 0) | |
106 | continue; | |
107 | ||
108 | /* Instantiated functions would have the wrong type deco, get it from the | |
109 | template member instead. */ | |
110 | if (tdeco == NULL) | |
111 | { | |
112 | if (!td || !td->onemember) | |
113 | return; | |
114 | ||
115 | FuncDeclaration *fd = td->onemember->isFuncDeclaration (); | |
116 | if (fd == NULL) | |
117 | return; | |
118 | ||
119 | OutBuffer buf; | |
120 | mangleToBuffer (fd->type, &buf); | |
121 | tdeco = buf.extractString (); | |
122 | } | |
123 | ||
124 | /* Matching the type deco may be a bit too strict, as it means that all | |
125 | function attributes that end up in the signature must be kept aligned | |
126 | between the compiler and library declaration. */ | |
127 | if (strcmp (intrinsic_decls[i].deco, tdeco) == 0) | |
128 | { | |
129 | intrinsic_code code = intrinsic_decls[i].code; | |
130 | ||
131 | if (decl->csym == NULL) | |
132 | get_symbol_decl (decl); | |
133 | ||
134 | /* If there is no function body, then the implementation is always | |
135 | provided by the compiler. */ | |
136 | if (!decl->fbody) | |
137 | { | |
138 | DECL_BUILT_IN_CLASS (decl->csym) = BUILT_IN_FRONTEND; | |
139 | DECL_FUNCTION_CODE (decl->csym) = (built_in_function) code; | |
140 | } | |
141 | ||
142 | /* Infer whether the intrinsic can be used for CTFE, let the | |
143 | front-end know that it can be evaluated at compile-time. */ | |
144 | switch (code) | |
145 | { | |
146 | case INTRINSIC_VA_ARG: | |
147 | case INTRINSIC_C_VA_ARG: | |
148 | case INTRINSIC_VASTART: | |
149 | case INTRINSIC_ADDS: | |
150 | case INTRINSIC_SUBS: | |
151 | case INTRINSIC_MULS: | |
152 | case INTRINSIC_NEGS: | |
153 | case INTRINSIC_VLOAD: | |
154 | case INTRINSIC_VSTORE: | |
155 | break; | |
156 | ||
157 | case INTRINSIC_POW: | |
158 | { | |
159 | /* Check that this overload of pow() is has an equivalent | |
160 | built-in function. It could be `int pow(int, int)'. */ | |
161 | tree rettype = TREE_TYPE (TREE_TYPE (decl->csym)); | |
162 | if (mathfn_built_in (rettype, BUILT_IN_POW) != NULL_TREE) | |
163 | decl->builtin = BUILTINyes; | |
164 | break; | |
165 | } | |
166 | ||
167 | default: | |
168 | decl->builtin = BUILTINyes; | |
169 | break; | |
170 | } | |
171 | ||
172 | /* The intrinsic was marked as CTFE-only. */ | |
173 | if (intrinsic_decls[i].ctfeonly) | |
174 | DECL_BUILT_IN_CTFE (decl->csym) = 1; | |
175 | ||
176 | DECL_INTRINSIC_CODE (decl->csym) = code; | |
177 | break; | |
178 | } | |
179 | } | |
180 | } | |
181 | ||
182 | /* Construct a function call to the built-in function CODE, N is the number of | |
183 | arguments, and the `...' parameters are the argument expressions. | |
184 | The original call expression is held in CALLEXP. */ | |
185 | ||
186 | static tree | |
187 | call_builtin_fn (tree callexp, built_in_function code, int n, ...) | |
188 | { | |
189 | tree *argarray = XALLOCAVEC (tree, n); | |
190 | va_list ap; | |
191 | ||
192 | va_start (ap, n); | |
193 | for (int i = 0; i < n; i++) | |
194 | argarray[i] = va_arg (ap, tree); | |
195 | va_end (ap); | |
196 | ||
197 | tree exp = build_call_expr_loc_array (EXPR_LOCATION (callexp), | |
198 | builtin_decl_explicit (code), | |
199 | n, argarray); | |
200 | return convert (TREE_TYPE (callexp), fold (exp)); | |
201 | } | |
202 | ||
203 | /* Expand a front-end instrinsic call to bsf(). This takes one argument, | |
204 | the signature to which can be either: | |
205 | ||
206 | int bsf (uint arg); | |
207 | int bsf (ulong arg); | |
208 | ||
209 | This scans all bits in the given argument starting with the first, | |
210 | returning the bit number of the first bit set. The original call | |
211 | expression is held in CALLEXP. */ | |
212 | ||
213 | static tree | |
214 | expand_intrinsic_bsf (tree callexp) | |
215 | { | |
216 | /* The bsr() intrinsic gets turned into __builtin_ctz(arg). | |
217 | The return value is supposed to be undefined if arg is zero. */ | |
218 | tree arg = CALL_EXPR_ARG (callexp, 0); | |
219 | int argsize = TYPE_PRECISION (TREE_TYPE (arg)); | |
220 | ||
221 | /* Which variant of __builtin_ctz* should we call? */ | |
222 | built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CTZ | |
223 | : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CTZL | |
224 | : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CTZLL | |
225 | : END_BUILTINS; | |
226 | ||
227 | gcc_assert (code != END_BUILTINS); | |
228 | ||
229 | return call_builtin_fn (callexp, code, 1, arg); | |
230 | } | |
231 | ||
232 | /* Expand a front-end instrinsic call to bsr(). This takes one argument, | |
233 | the signature to which can be either: | |
234 | ||
235 | int bsr (uint arg); | |
236 | int bsr (ulong arg); | |
237 | ||
238 | This scans all bits in the given argument from the most significant bit | |
239 | to the least significant, returning the bit number of the first bit set. | |
240 | The original call expression is held in CALLEXP. */ | |
241 | ||
242 | static tree | |
243 | expand_intrinsic_bsr (tree callexp) | |
244 | { | |
245 | /* The bsr() intrinsic gets turned into (size - 1) - __builtin_clz(arg). | |
246 | The return value is supposed to be undefined if arg is zero. */ | |
247 | tree arg = CALL_EXPR_ARG (callexp, 0); | |
248 | tree type = TREE_TYPE (arg); | |
249 | int argsize = TYPE_PRECISION (type); | |
250 | ||
251 | /* Which variant of __builtin_clz* should we call? */ | |
252 | built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CLZ | |
253 | : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CLZL | |
254 | : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CLZLL | |
255 | : END_BUILTINS; | |
256 | ||
257 | gcc_assert (code != END_BUILTINS); | |
258 | ||
259 | tree result = call_builtin_fn (callexp, code, 1, arg); | |
260 | ||
261 | /* Handle int -> long conversions. */ | |
262 | if (TREE_TYPE (result) != type) | |
263 | result = fold_convert (type, result); | |
264 | ||
265 | result = fold_build2 (MINUS_EXPR, type, | |
266 | build_integer_cst (argsize - 1, type), result); | |
267 | return fold_convert (TREE_TYPE (callexp), result); | |
268 | } | |
269 | ||
270 | /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to | |
271 | bt(), btc(), btr(), or bts(). These intrinsics expect to take two arguments, | |
272 | the signature to which is: | |
273 | ||
274 | int bt (size_t* ptr, size_t bitnum); | |
275 | ||
276 | All intrinsics test if a bit is set and return the result of that condition. | |
277 | Variants of `bt' will then update that bit. `btc' compliments the bit, `bts' | |
278 | sets the bit, and `btr' resets the bit. The original call expression is | |
279 | held in CALLEXP. */ | |
280 | ||
281 | static tree | |
282 | expand_intrinsic_bt (intrinsic_code intrinsic, tree callexp) | |
283 | { | |
284 | tree ptr = CALL_EXPR_ARG (callexp, 0); | |
285 | tree bitnum = CALL_EXPR_ARG (callexp, 1); | |
286 | tree type = TREE_TYPE (TREE_TYPE (ptr)); | |
287 | ||
288 | /* size_t bitsize = sizeof(*ptr) * BITS_PER_UNIT; */ | |
289 | tree bitsize = fold_convert (type, TYPE_SIZE (type)); | |
290 | ||
291 | /* ptr[bitnum / bitsize] */ | |
292 | ptr = build_array_index (ptr, fold_build2 (TRUNC_DIV_EXPR, type, | |
293 | bitnum, bitsize)); | |
294 | ptr = indirect_ref (type, ptr); | |
295 | ||
296 | /* mask = 1 << (bitnum % bitsize); */ | |
297 | bitnum = fold_build2 (TRUNC_MOD_EXPR, type, bitnum, bitsize); | |
298 | bitnum = fold_build2 (LSHIFT_EXPR, type, size_one_node, bitnum); | |
299 | ||
300 | /* cond = ptr[bitnum / size] & mask; */ | |
301 | tree cond = fold_build2 (BIT_AND_EXPR, type, ptr, bitnum); | |
302 | ||
303 | /* cond ? -1 : 0; */ | |
304 | cond = build_condition (TREE_TYPE (callexp), d_truthvalue_conversion (cond), | |
305 | integer_minus_one_node, integer_zero_node); | |
306 | ||
307 | /* Update the bit as needed, only testing the bit for bt(). */ | |
308 | if (intrinsic == INTRINSIC_BT) | |
309 | return cond; | |
310 | ||
311 | tree_code code = (intrinsic == INTRINSIC_BTC) ? BIT_XOR_EXPR | |
312 | : (intrinsic == INTRINSIC_BTR) ? BIT_AND_EXPR | |
313 | : (intrinsic == INTRINSIC_BTS) ? BIT_IOR_EXPR | |
314 | : ERROR_MARK; | |
315 | gcc_assert (code != ERROR_MARK); | |
316 | ||
317 | /* ptr[bitnum / size] op= mask; */ | |
318 | if (intrinsic == INTRINSIC_BTR) | |
319 | bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum); | |
320 | ||
321 | ptr = modify_expr (ptr, fold_build2 (code, TREE_TYPE (ptr), ptr, bitnum)); | |
322 | ||
323 | /* Store the condition result in a temporary, and return expressions in | |
324 | correct order of evaluation. */ | |
325 | tree tmp = build_local_temp (TREE_TYPE (callexp)); | |
326 | cond = modify_expr (tmp, cond); | |
327 | ||
328 | return compound_expr (cond, compound_expr (ptr, tmp)); | |
329 | } | |
330 | ||
331 | /* Expand a front-end intrinsic call to bswap(). This takes one argument, the | |
332 | signature to which can be either: | |
333 | ||
334 | int bswap (uint arg); | |
335 | int bswap (ulong arg); | |
336 | ||
337 | This swaps all bytes in an N byte type end-to-end. The original call | |
338 | expression is held in CALLEXP. */ | |
339 | ||
340 | static tree | |
341 | expand_intrinsic_bswap (tree callexp) | |
342 | { | |
343 | tree arg = CALL_EXPR_ARG (callexp, 0); | |
344 | int argsize = TYPE_PRECISION (TREE_TYPE (arg)); | |
345 | ||
346 | /* Which variant of __builtin_bswap* should we call? */ | |
347 | built_in_function code = (argsize == 32) ? BUILT_IN_BSWAP32 | |
348 | : (argsize == 64) ? BUILT_IN_BSWAP64 | |
349 | : END_BUILTINS; | |
350 | ||
351 | gcc_assert (code != END_BUILTINS); | |
352 | ||
353 | return call_builtin_fn (callexp, code, 1, arg); | |
354 | } | |
355 | ||
356 | /* Expand a front-end intrinsic call to popcnt(). This takes one argument, the | |
357 | signature to which can be either: | |
358 | ||
359 | int popcnt (uint arg); | |
360 | int popcnt (ulong arg); | |
361 | ||
362 | Calculates the number of set bits in an integer. The original call | |
363 | expression is held in CALLEXP. */ | |
364 | ||
365 | static tree | |
366 | expand_intrinsic_popcnt (tree callexp) | |
367 | { | |
368 | tree arg = CALL_EXPR_ARG (callexp, 0); | |
369 | int argsize = TYPE_PRECISION (TREE_TYPE (arg)); | |
370 | ||
371 | /* Which variant of __builtin_popcount* should we call? */ | |
372 | built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_POPCOUNT | |
373 | : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTL | |
374 | : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTLL | |
375 | : END_BUILTINS; | |
376 | ||
377 | gcc_assert (code != END_BUILTINS); | |
378 | ||
379 | return call_builtin_fn (callexp, code, 1, arg); | |
380 | } | |
381 | ||
382 | /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to | |
383 | sqrt(), sqrtf(), sqrtl(). These intrinsics expect to take one argument, | |
384 | the signature to which can be either: | |
385 | ||
386 | float sqrt (float arg); | |
387 | double sqrt (double arg); | |
388 | real sqrt (real arg); | |
389 | ||
390 | This computes the square root of the given argument. The original call | |
391 | expression is held in CALLEXP. */ | |
392 | ||
393 | static tree | |
394 | expand_intrinsic_sqrt (intrinsic_code intrinsic, tree callexp) | |
395 | { | |
396 | tree arg = CALL_EXPR_ARG (callexp, 0); | |
397 | ||
398 | /* Which variant of __builtin_sqrt* should we call? */ | |
399 | built_in_function code = (intrinsic == INTRINSIC_SQRT) ? BUILT_IN_SQRT | |
400 | : (intrinsic == INTRINSIC_SQRTF) ? BUILT_IN_SQRTF | |
401 | : (intrinsic == INTRINSIC_SQRTL) ? BUILT_IN_SQRTL | |
402 | : END_BUILTINS; | |
403 | ||
404 | gcc_assert (code != END_BUILTINS); | |
405 | return call_builtin_fn (callexp, code, 1, arg); | |
406 | } | |
407 | ||
408 | /* Expand a front-end intrinsic call to copysign(). This takes two arguments, | |
409 | the signature to which can be either: | |
410 | ||
411 | float copysign (T to, float from); | |
412 | double copysign (T to, double from); | |
413 | real copysign (T to, real from); | |
414 | ||
415 | This computes a value composed of TO with the sign bit of FROM. The original | |
416 | call expression is held in CALLEXP. */ | |
417 | ||
418 | static tree | |
419 | expand_intrinsic_copysign (tree callexp) | |
420 | { | |
421 | tree to = CALL_EXPR_ARG (callexp, 0); | |
422 | tree from = CALL_EXPR_ARG (callexp, 1); | |
423 | tree type = TREE_TYPE (to); | |
424 | ||
425 | /* Convert parameters to the same type. Prefer the first parameter unless it | |
426 | is an integral type. */ | |
427 | if (INTEGRAL_TYPE_P (type)) | |
428 | { | |
429 | to = fold_convert (TREE_TYPE (from), to); | |
430 | type = TREE_TYPE (to); | |
431 | } | |
432 | else | |
433 | from = fold_convert (type, from); | |
434 | ||
435 | /* Which variant of __builtin_copysign* should we call? */ | |
436 | tree builtin = mathfn_built_in (type, BUILT_IN_COPYSIGN); | |
437 | gcc_assert (builtin != NULL_TREE); | |
438 | ||
439 | return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2, | |
440 | to, from); | |
441 | } | |
442 | ||
443 | /* Expand a front-end intrinsic call to pow(). This takes two arguments, the | |
444 | signature to which can be either: | |
445 | ||
446 | float pow (float base, T exponent); | |
447 | double pow (double base, T exponent); | |
448 | real pow (real base, T exponent); | |
449 | ||
450 | This computes the value of BASE raised to the power of EXPONENT. | |
451 | The original call expression is held in CALLEXP. */ | |
452 | ||
453 | static tree | |
454 | expand_intrinsic_pow (tree callexp) | |
455 | { | |
456 | tree base = CALL_EXPR_ARG (callexp, 0); | |
457 | tree exponent = CALL_EXPR_ARG (callexp, 1); | |
458 | tree exptype = TREE_TYPE (exponent); | |
459 | ||
460 | /* Which variant of __builtin_pow* should we call? */ | |
461 | built_in_function code = SCALAR_FLOAT_TYPE_P (exptype) ? BUILT_IN_POW | |
462 | : INTEGRAL_TYPE_P (exptype) ? BUILT_IN_POWI | |
463 | : END_BUILTINS; | |
464 | gcc_assert (code != END_BUILTINS); | |
465 | ||
466 | tree builtin = mathfn_built_in (TREE_TYPE (base), code); | |
467 | gcc_assert (builtin != NULL_TREE); | |
468 | ||
469 | return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2, | |
470 | base, exponent); | |
471 | } | |
472 | ||
473 | /* Expand a front-end intrinsic call to va_arg(). This takes either one or two | |
474 | arguments, the signature to which can be either: | |
475 | ||
476 | T va_arg(T) (ref va_list ap); | |
477 | void va_arg(T) (va_list ap, ref T parmn); | |
478 | ||
479 | This retrieves the next variadic parameter that is type T from the given | |
480 | va_list. If also given, store the value into parmn, otherwise return it. | |
481 | The original call expression is held in CALLEXP. */ | |
482 | ||
483 | static tree | |
484 | expand_intrinsic_vaarg (tree callexp) | |
485 | { | |
486 | tree ap = CALL_EXPR_ARG (callexp, 0); | |
487 | tree parmn = NULL_TREE; | |
488 | tree type; | |
489 | ||
490 | STRIP_NOPS (ap); | |
491 | ||
492 | if (call_expr_nargs (callexp) == 1) | |
493 | type = TREE_TYPE (callexp); | |
494 | else | |
495 | { | |
496 | parmn = CALL_EXPR_ARG (callexp, 1); | |
497 | STRIP_NOPS (parmn); | |
498 | gcc_assert (TREE_CODE (parmn) == ADDR_EXPR); | |
499 | parmn = TREE_OPERAND (parmn, 0); | |
500 | type = TREE_TYPE (parmn); | |
501 | } | |
502 | ||
503 | /* (T) VA_ARG_EXP<ap>; */ | |
504 | tree exp = build1 (VA_ARG_EXPR, type, ap); | |
505 | ||
506 | /* parmn = (T) VA_ARG_EXP<ap>; */ | |
507 | if (parmn != NULL_TREE) | |
508 | exp = modify_expr (parmn, exp); | |
509 | ||
510 | return exp; | |
511 | } | |
512 | ||
513 | /* Expand a front-end intrinsic call to va_start(), which takes two arguments, | |
514 | the signature to which is: | |
515 | ||
516 | void va_start(T) (out va_list ap, ref T parmn); | |
517 | ||
518 | This initializes the va_list type, where parmn should be the last named | |
519 | parameter. The original call expression is held in CALLEXP. */ | |
520 | ||
521 | static tree | |
522 | expand_intrinsic_vastart (tree callexp) | |
523 | { | |
524 | tree ap = CALL_EXPR_ARG (callexp, 0); | |
525 | tree parmn = CALL_EXPR_ARG (callexp, 1); | |
526 | ||
527 | STRIP_NOPS (ap); | |
528 | STRIP_NOPS (parmn); | |
529 | ||
530 | /* The va_list argument should already have its address taken. The second | |
531 | argument, however, is inout and that needs to be fixed to prevent a | |
532 | warning. Could be casting, so need to check type too? */ | |
533 | gcc_assert (TREE_CODE (ap) == ADDR_EXPR && TREE_CODE (parmn) == ADDR_EXPR); | |
534 | ||
535 | /* Assuming nobody tries to change the return type. */ | |
536 | parmn = TREE_OPERAND (parmn, 0); | |
537 | ||
538 | return call_builtin_fn (callexp, BUILT_IN_VA_START, 2, ap, parmn); | |
539 | } | |
540 | ||
541 | /* Expand a front-end instrinsic call to INTRINSIC, which is either a call to | |
542 | adds(), addu(), subs(), subu(), negs(), muls(), or mulu(). These intrinsics | |
543 | expect to take two or three arguments, the signature to which can be either: | |
544 | ||
545 | int adds (int x, int y, ref bool overflow); | |
546 | long adds (long x, long y, ref bool overflow); | |
547 | int negs (int x, ref bool overflow); | |
548 | long negs (long x, ref bool overflow); | |
549 | ||
550 | This performs an operation on two signed or unsigned integers, checking for | |
551 | overflow. The overflow is sticky, meaning that a sequence of operations | |
552 | can be done and overflow need only be checked at the end. The original call | |
553 | expression is held in CALLEXP. */ | |
554 | ||
555 | static tree | |
556 | expand_intrinsic_checkedint (intrinsic_code intrinsic, tree callexp) | |
557 | { | |
558 | tree type = TREE_TYPE (callexp); | |
559 | tree x; | |
560 | tree y; | |
561 | tree overflow; | |
562 | ||
563 | /* The negs() intrinsic gets turned into SUB_OVERFLOW (0, y). */ | |
564 | if (intrinsic == INTRINSIC_NEGS) | |
565 | { | |
566 | x = fold_convert (type, integer_zero_node); | |
567 | y = CALL_EXPR_ARG (callexp, 0); | |
568 | overflow = CALL_EXPR_ARG (callexp, 1); | |
569 | } | |
570 | else | |
571 | { | |
572 | x = CALL_EXPR_ARG (callexp, 0); | |
573 | y = CALL_EXPR_ARG (callexp, 1); | |
574 | overflow = CALL_EXPR_ARG (callexp, 2); | |
575 | } | |
576 | ||
577 | /* Which variant of *_OVERFLOW should we generate? */ | |
578 | internal_fn icode = (intrinsic == INTRINSIC_ADDS) ? IFN_ADD_OVERFLOW | |
579 | : (intrinsic == INTRINSIC_SUBS) ? IFN_SUB_OVERFLOW | |
580 | : (intrinsic == INTRINSIC_MULS) ? IFN_MUL_OVERFLOW | |
581 | : (intrinsic == INTRINSIC_NEGS) ? IFN_SUB_OVERFLOW | |
582 | : IFN_LAST; | |
583 | gcc_assert (icode != IFN_LAST); | |
584 | ||
585 | tree result | |
586 | = build_call_expr_internal_loc (EXPR_LOCATION (callexp), icode, | |
587 | build_complex_type (type), 2, x, y); | |
588 | ||
589 | STRIP_NOPS (overflow); | |
590 | overflow = build_deref (overflow); | |
591 | ||
592 | /* Assign returned result to overflow parameter, however if overflow is | |
593 | already true, maintain its value. */ | |
594 | type = TREE_TYPE (overflow); | |
595 | result = save_expr (result); | |
596 | ||
597 | tree exp = fold_build2 (BIT_IOR_EXPR, type, overflow, | |
598 | fold_convert (type, imaginary_part (result))); | |
599 | exp = modify_expr (overflow, exp); | |
600 | ||
601 | /* Return the value of result. */ | |
602 | return compound_expr (exp, real_part (result)); | |
603 | } | |
604 | ||
605 | /* Expand a front-end instrinsic call to volatileLoad(). This takes one | |
606 | argument, the signature to which can be either: | |
607 | ||
608 | ubyte volatileLoad (ubyte* ptr); | |
609 | ushort volatileLoad (ushort* ptr); | |
610 | uint volatileLoad (uint* ptr); | |
611 | ulong volatileLoad (ulong* ptr); | |
612 | ||
613 | This reads a value from the memory location indicated by ptr. Calls to | |
614 | them are be guaranteed to not be removed (such as during DCE) or reordered | |
615 | in the same thread. The original call expression is held in CALLEXP. */ | |
616 | ||
617 | static tree | |
618 | expand_volatile_load (tree callexp) | |
619 | { | |
620 | tree ptr = CALL_EXPR_ARG (callexp, 0); | |
621 | tree ptrtype = TREE_TYPE (ptr); | |
622 | gcc_assert (POINTER_TYPE_P (ptrtype)); | |
623 | ||
624 | /* (T) *(volatile T *) ptr; */ | |
625 | tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE); | |
626 | tree result = indirect_ref (type, ptr); | |
627 | TREE_THIS_VOLATILE (result) = 1; | |
628 | ||
629 | return result; | |
630 | } | |
631 | ||
632 | /* Expand a front-end instrinsic call to volatileStore(). This takes two | |
633 | arguments, the signature to which can be either: | |
634 | ||
635 | void volatileStore (ubyte* ptr, ubyte value); | |
636 | void volatileStore (ushort* ptr, ushort value); | |
637 | void volatileStore (uint* ptr, uint value); | |
638 | void volatileStore (ulong* ptr, ulong value); | |
639 | ||
640 | This writes a value to the memory location indicated by ptr. Calls to | |
641 | them are be guaranteed to not be removed (such as during DCE) or reordered | |
642 | in the same thread. The original call expression is held in CALLEXP. */ | |
643 | ||
644 | static tree | |
645 | expand_volatile_store (tree callexp) | |
646 | { | |
647 | tree ptr = CALL_EXPR_ARG (callexp, 0); | |
648 | tree ptrtype = TREE_TYPE (ptr); | |
649 | gcc_assert (POINTER_TYPE_P (ptrtype)); | |
650 | ||
651 | /* (T) *(volatile T *) ptr; */ | |
652 | tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE); | |
653 | tree result = indirect_ref (type, ptr); | |
654 | TREE_THIS_VOLATILE (result) = 1; | |
655 | ||
656 | /* (*(volatile T *) ptr) = value; */ | |
657 | tree value = CALL_EXPR_ARG (callexp, 1); | |
658 | return modify_expr (result, value); | |
659 | } | |
660 | ||
661 | /* If CALLEXP is for an intrinsic , expand and return inlined compiler | |
662 | generated instructions. Most map directly to GCC builtins, others | |
663 | require a little extra work around them. */ | |
664 | ||
665 | tree | |
666 | maybe_expand_intrinsic (tree callexp) | |
667 | { | |
668 | tree callee = CALL_EXPR_FN (callexp); | |
669 | ||
670 | if (TREE_CODE (callee) == ADDR_EXPR) | |
671 | callee = TREE_OPERAND (callee, 0); | |
672 | ||
673 | if (TREE_CODE (callee) != FUNCTION_DECL) | |
674 | return callexp; | |
675 | ||
676 | /* Don't expand CTFE-only intrinsics outside of semantic processing. */ | |
677 | if (DECL_BUILT_IN_CTFE (callee) && !doing_semantic_analysis_p) | |
678 | return callexp; | |
679 | ||
680 | intrinsic_code intrinsic = DECL_INTRINSIC_CODE (callee); | |
681 | built_in_function code; | |
682 | ||
683 | switch (intrinsic) | |
684 | { | |
685 | case INTRINSIC_NONE: | |
686 | return callexp; | |
687 | ||
688 | case INTRINSIC_BSF: | |
689 | return expand_intrinsic_bsf (callexp); | |
690 | ||
691 | case INTRINSIC_BSR: | |
692 | return expand_intrinsic_bsr (callexp); | |
693 | ||
694 | case INTRINSIC_BT: | |
695 | case INTRINSIC_BTC: | |
696 | case INTRINSIC_BTR: | |
697 | case INTRINSIC_BTS: | |
698 | return expand_intrinsic_bt (intrinsic, callexp); | |
699 | ||
700 | case INTRINSIC_BSWAP: | |
701 | return expand_intrinsic_bswap (callexp); | |
702 | ||
703 | case INTRINSIC_POPCNT: | |
704 | return expand_intrinsic_popcnt (callexp); | |
705 | ||
706 | case INTRINSIC_COS: | |
707 | return call_builtin_fn (callexp, BUILT_IN_COSL, 1, | |
708 | CALL_EXPR_ARG (callexp, 0)); | |
709 | ||
710 | case INTRINSIC_SIN: | |
711 | return call_builtin_fn (callexp, BUILT_IN_SINL, 1, | |
712 | CALL_EXPR_ARG (callexp, 0)); | |
713 | ||
714 | case INTRINSIC_RNDTOL: | |
715 | /* Not sure if llroundl stands as a good replacement for the | |
716 | expected behavior of rndtol. */ | |
717 | return call_builtin_fn (callexp, BUILT_IN_LLROUNDL, 1, | |
718 | CALL_EXPR_ARG (callexp, 0)); | |
719 | ||
720 | case INTRINSIC_SQRT: | |
721 | case INTRINSIC_SQRTF: | |
722 | case INTRINSIC_SQRTL: | |
723 | return expand_intrinsic_sqrt (intrinsic, callexp); | |
724 | ||
725 | case INTRINSIC_LDEXP: | |
726 | return call_builtin_fn (callexp, BUILT_IN_LDEXPL, 2, | |
727 | CALL_EXPR_ARG (callexp, 0), | |
728 | CALL_EXPR_ARG (callexp, 1)); | |
729 | ||
730 | case INTRINSIC_FABS: | |
731 | return call_builtin_fn (callexp, BUILT_IN_FABSL, 1, | |
732 | CALL_EXPR_ARG (callexp, 0)); | |
733 | ||
734 | case INTRINSIC_RINT: | |
735 | return call_builtin_fn (callexp, BUILT_IN_RINTL, 1, | |
736 | CALL_EXPR_ARG (callexp, 0)); | |
737 | ||
738 | case INTRINSIC_TAN: | |
739 | return call_builtin_fn (callexp, BUILT_IN_TANL, 1, | |
740 | CALL_EXPR_ARG (callexp, 0)); | |
741 | ||
742 | case INTRINSIC_ISNAN: | |
743 | return call_builtin_fn (callexp, BUILT_IN_ISNAN, 1, | |
744 | CALL_EXPR_ARG (callexp, 0)); | |
745 | ||
746 | case INTRINSIC_ISINFINITY: | |
747 | return call_builtin_fn (callexp, BUILT_IN_ISINF, 1, | |
748 | CALL_EXPR_ARG (callexp, 0)); | |
749 | ||
750 | case INTRINSIC_ISFINITE: | |
751 | return call_builtin_fn (callexp, BUILT_IN_ISFINITE, 1, | |
752 | CALL_EXPR_ARG (callexp, 0)); | |
753 | ||
754 | case INTRINSIC_EXP: | |
755 | return call_builtin_fn (callexp, BUILT_IN_EXPL, 1, | |
756 | CALL_EXPR_ARG (callexp, 0)); | |
757 | ||
758 | case INTRINSIC_EXPM1: | |
759 | return call_builtin_fn (callexp, BUILT_IN_EXPM1L, 1, | |
760 | CALL_EXPR_ARG (callexp, 0)); | |
761 | ||
762 | case INTRINSIC_EXP2: | |
763 | return call_builtin_fn (callexp, BUILT_IN_EXP2L, 1, | |
764 | CALL_EXPR_ARG (callexp, 0)); | |
765 | ||
766 | case INTRINSIC_LOG: | |
767 | return call_builtin_fn (callexp, BUILT_IN_LOGL, 1, | |
768 | CALL_EXPR_ARG (callexp, 0)); | |
769 | ||
770 | case INTRINSIC_LOG2: | |
771 | return call_builtin_fn (callexp, BUILT_IN_LOG2L, 1, | |
772 | CALL_EXPR_ARG (callexp, 0)); | |
773 | ||
774 | case INTRINSIC_LOG10: | |
775 | return call_builtin_fn (callexp, BUILT_IN_LOG10L, 1, | |
776 | CALL_EXPR_ARG (callexp, 0)); | |
777 | ||
778 | case INTRINSIC_ROUND: | |
779 | return call_builtin_fn (callexp, BUILT_IN_ROUNDL, 1, | |
780 | CALL_EXPR_ARG (callexp, 0)); | |
781 | ||
782 | case INTRINSIC_FLOORF: | |
783 | case INTRINSIC_FLOOR: | |
784 | case INTRINSIC_FLOORL: | |
785 | code = (intrinsic == INTRINSIC_FLOOR) ? BUILT_IN_FLOOR | |
786 | : (intrinsic == INTRINSIC_FLOORF) ? BUILT_IN_FLOORF | |
787 | : BUILT_IN_FLOORL; | |
788 | return call_builtin_fn (callexp, code, 1, CALL_EXPR_ARG (callexp, 0)); | |
789 | ||
790 | case INTRINSIC_CEILF: | |
791 | case INTRINSIC_CEIL: | |
792 | case INTRINSIC_CEILL: | |
793 | code = (intrinsic == INTRINSIC_CEIL) ? BUILT_IN_CEIL | |
794 | : (intrinsic == INTRINSIC_CEILF) ? BUILT_IN_CEILF | |
795 | : BUILT_IN_CEILL; | |
796 | return call_builtin_fn (callexp, code, 1, CALL_EXPR_ARG (callexp, 0)); | |
797 | ||
798 | case INTRINSIC_TRUNC: | |
799 | return call_builtin_fn (callexp, BUILT_IN_TRUNCL, 1, | |
800 | CALL_EXPR_ARG (callexp, 0)); | |
801 | ||
802 | case INTRINSIC_FMIN: | |
803 | return call_builtin_fn (callexp, BUILT_IN_FMINL, 2, | |
804 | CALL_EXPR_ARG (callexp, 0), | |
805 | CALL_EXPR_ARG (callexp, 1)); | |
806 | ||
807 | case INTRINSIC_FMAX: | |
808 | return call_builtin_fn (callexp, BUILT_IN_FMAXL, 2, | |
809 | CALL_EXPR_ARG (callexp, 0), | |
810 | CALL_EXPR_ARG (callexp, 1)); | |
811 | ||
812 | case INTRINSIC_COPYSIGN: | |
813 | return expand_intrinsic_copysign (callexp); | |
814 | ||
815 | case INTRINSIC_POW: | |
816 | return expand_intrinsic_pow (callexp); | |
817 | ||
818 | case INTRINSIC_FMA: | |
819 | return call_builtin_fn (callexp, BUILT_IN_FMAL, 3, | |
820 | CALL_EXPR_ARG (callexp, 0), | |
821 | CALL_EXPR_ARG (callexp, 1), | |
822 | CALL_EXPR_ARG (callexp, 2)); | |
823 | ||
824 | case INTRINSIC_VA_ARG: | |
825 | case INTRINSIC_C_VA_ARG: | |
826 | return expand_intrinsic_vaarg (callexp); | |
827 | ||
828 | case INTRINSIC_VASTART: | |
829 | return expand_intrinsic_vastart (callexp); | |
830 | ||
831 | case INTRINSIC_ADDS: | |
832 | case INTRINSIC_SUBS: | |
833 | case INTRINSIC_MULS: | |
834 | case INTRINSIC_NEGS: | |
835 | return expand_intrinsic_checkedint (intrinsic, callexp); | |
836 | ||
837 | case INTRINSIC_VLOAD: | |
838 | return expand_volatile_load (callexp); | |
839 | ||
840 | case INTRINSIC_VSTORE: | |
841 | return expand_volatile_store (callexp); | |
842 | ||
843 | default: | |
844 | gcc_unreachable (); | |
845 | } | |
846 | } |