]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/internal-fn.c
configure.ac: Remove -Werror addition to WARN_FLAGS.
[thirdparty/gcc.git] / gcc / internal-fn.c
CommitLineData
25583c4f 1/* Internal functions.
23a5b65a 2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
25583c4f
RS
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
25583c4f 23#include "tree.h"
0e37a2f3 24#include "internal-fn.h"
d8a2d370 25#include "stor-layout.h"
25583c4f
RS
26#include "expr.h"
27#include "optabs.h"
60393bbc
AM
28#include "predict.h"
29#include "vec.h"
30#include "hashtab.h"
31#include "hash-set.h"
32#include "machmode.h"
33#include "tm.h"
34#include "hard-reg-set.h"
35#include "input.h"
36#include "function.h"
37#include "dominance.h"
38#include "cfg.h"
2fb9a547
AM
39#include "basic-block.h"
40#include "tree-ssa-alias.h"
41#include "internal-fn.h"
42#include "gimple-expr.h"
43#include "is-a.h"
25583c4f 44#include "gimple.h"
31e071ae
MP
45#include "ubsan.h"
46#include "target.h"
97286431
JJ
47#include "stringpool.h"
48#include "tree-ssanames.h"
ed9c79e1 49#include "diagnostic-core.h"
25583c4f
RS
50
51/* The names of each internal function, indexed by function number. */
52const char *const internal_fn_name_array[] = {
b78475cf 53#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
25583c4f
RS
54#include "internal-fn.def"
55#undef DEF_INTERNAL_FN
56 "<invalid-fn>"
57};
58
59/* The ECF_* flags of each internal function, indexed by function number. */
60const int internal_fn_flags_array[] = {
b78475cf 61#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
25583c4f
RS
62#include "internal-fn.def"
63#undef DEF_INTERNAL_FN
64 0
65};
66
b78475cf
YG
67/* Fnspec of each internal function, indexed by function number. */
68const_tree internal_fn_fnspec_array[IFN_LAST + 1];
69
70void
71init_internal_fns ()
72{
73#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
74 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
63a4184f 75 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
b78475cf
YG
76#include "internal-fn.def"
77#undef DEF_INTERNAL_FN
78 internal_fn_fnspec_array[IFN_LAST] = 0;
79}
80
272c6793
RS
81/* ARRAY_TYPE is an array of vector modes. Return the associated insn
82 for load-lanes-style optab OPTAB. The insn must exist. */
83
84static enum insn_code
85get_multi_vector_move (tree array_type, convert_optab optab)
86{
87 enum insn_code icode;
88 enum machine_mode imode;
89 enum machine_mode vmode;
90
91 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
92 imode = TYPE_MODE (array_type);
93 vmode = TYPE_MODE (TREE_TYPE (array_type));
94
95 icode = convert_optab_handler (optab, imode, vmode);
96 gcc_assert (icode != CODE_FOR_nothing);
97 return icode;
98}
99
100/* Expand LOAD_LANES call STMT. */
101
102static void
103expand_LOAD_LANES (gimple stmt)
104{
105 struct expand_operand ops[2];
106 tree type, lhs, rhs;
107 rtx target, mem;
108
109 lhs = gimple_call_lhs (stmt);
110 rhs = gimple_call_arg (stmt, 0);
111 type = TREE_TYPE (lhs);
112
113 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
114 mem = expand_normal (rhs);
115
116 gcc_assert (MEM_P (mem));
117 PUT_MODE (mem, TYPE_MODE (type));
118
119 create_output_operand (&ops[0], target, TYPE_MODE (type));
120 create_fixed_operand (&ops[1], mem);
121 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
122}
123
124/* Expand STORE_LANES call STMT. */
125
126static void
127expand_STORE_LANES (gimple stmt)
128{
129 struct expand_operand ops[2];
130 tree type, lhs, rhs;
131 rtx target, reg;
132
133 lhs = gimple_call_lhs (stmt);
134 rhs = gimple_call_arg (stmt, 0);
135 type = TREE_TYPE (rhs);
136
137 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
138 reg = expand_normal (rhs);
139
140 gcc_assert (MEM_P (target));
141 PUT_MODE (target, TYPE_MODE (type));
142
143 create_fixed_operand (&ops[0], target);
144 create_input_operand (&ops[1], reg, TYPE_MODE (type));
145 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
146}
147
8170608b
TB
148static void
149expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
150{
151 gcc_unreachable ();
152}
153
74bf76ed
JJ
154/* This should get expanded in adjust_simduid_builtins. */
155
156static void
157expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
158{
159 gcc_unreachable ();
160}
161
162/* This should get expanded in adjust_simduid_builtins. */
163
164static void
165expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
166{
167 gcc_unreachable ();
168}
169
170/* This should get expanded in adjust_simduid_builtins. */
171
172static void
173expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
174{
175 gcc_unreachable ();
176}
177
b9a55b13
MP
178/* This should get expanded in the sanopt pass. */
179
180static void
181expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
182{
183 gcc_unreachable ();
184}
185
0e37a2f3
MP
186/* This should get expanded in the sanopt pass. */
187
188static void
189expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED)
0e82f089
MP
190{
191 gcc_unreachable ();
192}
193
194/* This should get expanded in the sanopt pass. */
195
196static void
197expand_UBSAN_OBJECT_SIZE (gimple stmt ATTRIBUTE_UNUSED)
0e37a2f3
MP
198{
199 gcc_unreachable ();
200}
201
c62ccb9a
YG
202/* This should get expanded in the sanopt pass. */
203
204static void
205expand_ASAN_CHECK (gimple stmt ATTRIBUTE_UNUSED)
206{
207 gcc_unreachable ();
208}
209
31e071ae
MP
210/* Add sub/add overflow checking to the statement STMT.
211 CODE says whether the operation is +, or -. */
212
213void
214ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
215{
216 rtx res, op0, op1;
217 tree lhs, fn, arg0, arg1;
da664544
DM
218 rtx_code_label *done_label, *do_error;
219 rtx target = NULL_RTX;
31e071ae
MP
220
221 lhs = gimple_call_lhs (stmt);
222 arg0 = gimple_call_arg (stmt, 0);
223 arg1 = gimple_call_arg (stmt, 1);
224 done_label = gen_label_rtx ();
225 do_error = gen_label_rtx ();
31e071ae
MP
226 do_pending_stack_adjust ();
227 op0 = expand_normal (arg0);
228 op1 = expand_normal (arg1);
229
230 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
231 if (lhs)
232 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
233
234 enum insn_code icode
235 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
236 if (icode != CODE_FOR_nothing)
237 {
238 struct expand_operand ops[4];
da664544 239 rtx_insn *last = get_last_insn ();
31e071ae
MP
240
241 res = gen_reg_rtx (mode);
242 create_output_operand (&ops[0], res, mode);
243 create_input_operand (&ops[1], op0, mode);
244 create_input_operand (&ops[2], op1, mode);
245 create_fixed_operand (&ops[3], do_error);
246 if (maybe_expand_insn (icode, 4, ops))
247 {
248 last = get_last_insn ();
0a6a6ac9 249 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
250 && JUMP_P (last)
251 && any_condjump_p (last)
252 && !find_reg_note (last, REG_BR_PROB, 0))
253 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
254 emit_jump (done_label);
255 }
256 else
257 {
258 delete_insns_since (last);
259 icode = CODE_FOR_nothing;
260 }
261 }
262
263 if (icode == CODE_FOR_nothing)
264 {
da664544 265 rtx_code_label *sub_check = gen_label_rtx ();
97286431 266 int pos_neg = 3;
31e071ae
MP
267
268 /* Compute the operation. On RTL level, the addition is always
269 unsigned. */
7ddf4d5a
MP
270 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
271 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
31e071ae 272
f451d3a8
JJ
273 /* If we can prove one of the arguments (for MINUS_EXPR only
274 the second operand, as subtraction is not commutative) is always
275 non-negative or always negative, we can do just one comparison
276 and conditional jump instead of 2 at runtime, 3 present in the
97286431
JJ
277 emitted code. If one of the arguments is CONST_INT, all we
278 need is to make sure it is op1, then the first
279 emit_cmp_and_jump_insns will be just folded. Otherwise try
280 to use range info if available. */
f451d3a8 281 if (code == PLUS_EXPR && CONST_INT_P (op0))
97286431
JJ
282 {
283 rtx tem = op0;
284 op0 = op1;
285 op1 = tem;
286 }
287 else if (CONST_INT_P (op1))
288 ;
f451d3a8 289 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
97286431 290 {
807e902e 291 wide_int arg0_min, arg0_max;
97286431
JJ
292 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
293 {
807e902e 294 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
97286431 295 pos_neg = 1;
807e902e 296 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
97286431
JJ
297 pos_neg = 2;
298 }
299 if (pos_neg != 3)
300 {
301 rtx tem = op0;
302 op0 = op1;
303 op1 = tem;
304 }
305 }
306 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
307 {
807e902e 308 wide_int arg1_min, arg1_max;
97286431
JJ
309 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
310 {
807e902e 311 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
97286431 312 pos_neg = 1;
807e902e 313 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
97286431
JJ
314 pos_neg = 2;
315 }
316 }
317
31e071ae 318 /* If the op1 is negative, we have to use a different check. */
97286431
JJ
319 if (pos_neg == 3)
320 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
321 false, sub_check, PROB_EVEN);
31e071ae 322
7ddf4d5a 323 /* Compare the result of the operation with one of the operands. */
97286431
JJ
324 if (pos_neg & 1)
325 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
326 NULL_RTX, mode, false, done_label,
327 PROB_VERY_LIKELY);
328
31e071ae 329 /* If we get here, we have to print the error. */
97286431
JJ
330 if (pos_neg == 3)
331 {
332 emit_jump (do_error);
333
334 emit_label (sub_check);
335 }
31e071ae 336
31e071ae 337 /* We have k = a + b for b < 0 here. k <= a must hold. */
97286431
JJ
338 if (pos_neg & 2)
339 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
340 NULL_RTX, mode, false, done_label,
341 PROB_VERY_LIKELY);
31e071ae
MP
342 }
343
1769415d
MP
344 emit_label (do_error);
345 /* Expand the ubsan builtin call. */
346 push_temp_slots ();
347 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
348 TREE_TYPE (arg0), arg0, arg1);
349 expand_normal (fn);
350 pop_temp_slots ();
351 do_pending_stack_adjust ();
31e071ae 352
1769415d
MP
353 /* We're done. */
354 emit_label (done_label);
31e071ae
MP
355
356 if (lhs)
357 emit_move_insn (target, res);
358}
359
360/* Add negate overflow checking to the statement STMT. */
361
362void
363ubsan_expand_si_overflow_neg_check (gimple stmt)
364{
365 rtx res, op1;
366 tree lhs, fn, arg1;
da664544
DM
367 rtx_code_label *done_label, *do_error;
368 rtx target = NULL_RTX;
31e071ae
MP
369
370 lhs = gimple_call_lhs (stmt);
371 arg1 = gimple_call_arg (stmt, 1);
372 done_label = gen_label_rtx ();
373 do_error = gen_label_rtx ();
31e071ae
MP
374
375 do_pending_stack_adjust ();
376 op1 = expand_normal (arg1);
377
378 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
379 if (lhs)
380 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
381
382 enum insn_code icode = optab_handler (negv3_optab, mode);
383 if (icode != CODE_FOR_nothing)
384 {
385 struct expand_operand ops[3];
da664544 386 rtx_insn *last = get_last_insn ();
31e071ae
MP
387
388 res = gen_reg_rtx (mode);
389 create_output_operand (&ops[0], res, mode);
390 create_input_operand (&ops[1], op1, mode);
391 create_fixed_operand (&ops[2], do_error);
392 if (maybe_expand_insn (icode, 3, ops))
393 {
394 last = get_last_insn ();
0a6a6ac9 395 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
396 && JUMP_P (last)
397 && any_condjump_p (last)
398 && !find_reg_note (last, REG_BR_PROB, 0))
399 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
400 emit_jump (done_label);
401 }
402 else
403 {
404 delete_insns_since (last);
405 icode = CODE_FOR_nothing;
406 }
407 }
408
409 if (icode == CODE_FOR_nothing)
410 {
411 /* Compute the operation. On RTL level, the addition is always
412 unsigned. */
413 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
414
415 /* Compare the operand with the most negative value. */
416 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
417 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
418 done_label, PROB_VERY_LIKELY);
419 }
420
421 emit_label (do_error);
422 /* Expand the ubsan builtin call. */
1769415d
MP
423 push_temp_slots ();
424 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
425 TREE_TYPE (arg1), arg1, NULL_TREE);
31e071ae 426 expand_normal (fn);
1769415d 427 pop_temp_slots ();
31e071ae
MP
428 do_pending_stack_adjust ();
429
430 /* We're done. */
431 emit_label (done_label);
432
433 if (lhs)
434 emit_move_insn (target, res);
435}
436
437/* Add mul overflow checking to the statement STMT. */
438
439void
440ubsan_expand_si_overflow_mul_check (gimple stmt)
441{
442 rtx res, op0, op1;
443 tree lhs, fn, arg0, arg1;
da664544
DM
444 rtx_code_label *done_label, *do_error;
445 rtx target = NULL_RTX;
31e071ae
MP
446
447 lhs = gimple_call_lhs (stmt);
448 arg0 = gimple_call_arg (stmt, 0);
449 arg1 = gimple_call_arg (stmt, 1);
450 done_label = gen_label_rtx ();
451 do_error = gen_label_rtx ();
31e071ae
MP
452
453 do_pending_stack_adjust ();
454 op0 = expand_normal (arg0);
455 op1 = expand_normal (arg1);
456
457 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
458 if (lhs)
459 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
460
461 enum insn_code icode = optab_handler (mulv4_optab, mode);
462 if (icode != CODE_FOR_nothing)
463 {
464 struct expand_operand ops[4];
da664544 465 rtx_insn *last = get_last_insn ();
31e071ae
MP
466
467 res = gen_reg_rtx (mode);
468 create_output_operand (&ops[0], res, mode);
469 create_input_operand (&ops[1], op0, mode);
470 create_input_operand (&ops[2], op1, mode);
471 create_fixed_operand (&ops[3], do_error);
472 if (maybe_expand_insn (icode, 4, ops))
473 {
474 last = get_last_insn ();
0a6a6ac9 475 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
476 && JUMP_P (last)
477 && any_condjump_p (last)
478 && !find_reg_note (last, REG_BR_PROB, 0))
479 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
480 emit_jump (done_label);
481 }
482 else
483 {
484 delete_insns_since (last);
485 icode = CODE_FOR_nothing;
486 }
487 }
488
489 if (icode == CODE_FOR_nothing)
490 {
491 struct separate_ops ops;
d5fa9cc9
JJ
492 enum machine_mode hmode
493 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
31e071ae
MP
494 ops.op0 = arg0;
495 ops.op1 = arg1;
496 ops.op2 = NULL_TREE;
497 ops.location = gimple_location (stmt);
498 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
499 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
500 {
501 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
502 ops.code = WIDEN_MULT_EXPR;
503 ops.type
504 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
505
506 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
507 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
508 GET_MODE_PRECISION (mode), NULL_RTX, 0);
509 hipart = gen_lowpart (mode, hipart);
510 res = gen_lowpart (mode, res);
511 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
512 GET_MODE_PRECISION (mode) - 1,
513 NULL_RTX, 0);
514 /* RES is low half of the double width result, HIPART
515 the high half. There was overflow if
516 HIPART is different from RES < 0 ? -1 : 0. */
517 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
518 false, done_label, PROB_VERY_LIKELY);
519 }
d5fa9cc9
JJ
520 else if (hmode != BLKmode
521 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
522 {
da664544
DM
523 rtx_code_label *large_op0 = gen_label_rtx ();
524 rtx_code_label *small_op0_large_op1 = gen_label_rtx ();
525 rtx_code_label *one_small_one_large = gen_label_rtx ();
526 rtx_code_label *both_ops_large = gen_label_rtx ();
527 rtx_code_label *after_hipart_neg = gen_label_rtx ();
528 rtx_code_label *after_lopart_neg = gen_label_rtx ();
529 rtx_code_label *do_overflow = gen_label_rtx ();
530 rtx_code_label *hipart_different = gen_label_rtx ();
d5fa9cc9 531
807e902e 532 unsigned int hprec = GET_MODE_PRECISION (hmode);
d5fa9cc9
JJ
533 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
534 NULL_RTX, 0);
535 hipart0 = gen_lowpart (hmode, hipart0);
536 rtx lopart0 = gen_lowpart (hmode, op0);
537 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
538 NULL_RTX, 0);
539 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
540 NULL_RTX, 0);
541 hipart1 = gen_lowpart (hmode, hipart1);
542 rtx lopart1 = gen_lowpart (hmode, op1);
543 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
544 NULL_RTX, 0);
545
546 res = gen_reg_rtx (mode);
547
548 /* True if op0 resp. op1 are known to be in the range of
549 halfstype. */
550 bool op0_small_p = false;
551 bool op1_small_p = false;
552 /* True if op0 resp. op1 are known to have all zeros or all ones
553 in the upper half of bits, but are not known to be
554 op{0,1}_small_p. */
555 bool op0_medium_p = false;
556 bool op1_medium_p = false;
557 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
558 nonnegative, 1 if unknown. */
559 int op0_sign = 1;
560 int op1_sign = 1;
561
562 if (TREE_CODE (arg0) == SSA_NAME)
563 {
807e902e 564 wide_int arg0_min, arg0_max;
d5fa9cc9
JJ
565 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
566 {
807e902e
KZ
567 unsigned int mprec0 = wi::min_precision (arg0_min, SIGNED);
568 unsigned int mprec1 = wi::min_precision (arg0_max, SIGNED);
569 if (mprec0 <= hprec && mprec1 <= hprec)
d5fa9cc9 570 op0_small_p = true;
807e902e 571 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
d5fa9cc9 572 op0_medium_p = true;
807e902e 573 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
d5fa9cc9 574 op0_sign = 0;
807e902e 575 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
d5fa9cc9
JJ
576 op0_sign = -1;
577 }
578 }
579 if (TREE_CODE (arg1) == SSA_NAME)
580 {
807e902e 581 wide_int arg1_min, arg1_max;
d5fa9cc9
JJ
582 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
583 {
807e902e
KZ
584 unsigned int mprec0 = wi::min_precision (arg1_min, SIGNED);
585 unsigned int mprec1 = wi::min_precision (arg1_max, SIGNED);
586 if (mprec0 <= hprec && mprec1 <= hprec)
d5fa9cc9 587 op1_small_p = true;
807e902e 588 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
d5fa9cc9 589 op1_medium_p = true;
807e902e 590 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
d5fa9cc9 591 op1_sign = 0;
807e902e 592 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
d5fa9cc9
JJ
593 op1_sign = -1;
594 }
595 }
596
597 int smaller_sign = 1;
598 int larger_sign = 1;
599 if (op0_small_p)
600 {
601 smaller_sign = op0_sign;
602 larger_sign = op1_sign;
603 }
604 else if (op1_small_p)
605 {
606 smaller_sign = op1_sign;
607 larger_sign = op0_sign;
608 }
609 else if (op0_sign == op1_sign)
610 {
611 smaller_sign = op0_sign;
612 larger_sign = op0_sign;
613 }
614
615 if (!op0_small_p)
616 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
617 false, large_op0, PROB_UNLIKELY);
618
619 if (!op1_small_p)
620 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
621 false, small_op0_large_op1,
622 PROB_UNLIKELY);
623
624 /* If both op0 and op1 are sign extended from hmode to mode,
625 the multiplication will never overflow. We can do just one
626 hmode x hmode => mode widening multiplication. */
627 if (GET_CODE (lopart0) == SUBREG)
628 {
629 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
362d42dc 630 SUBREG_PROMOTED_SET (lopart0, 0);
d5fa9cc9
JJ
631 }
632 if (GET_CODE (lopart1) == SUBREG)
633 {
634 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
362d42dc 635 SUBREG_PROMOTED_SET (lopart1, 0);
d5fa9cc9
JJ
636 }
637 tree halfstype = build_nonstandard_integer_type (hprec, 0);
638 ops.op0 = make_tree (halfstype, lopart0);
639 ops.op1 = make_tree (halfstype, lopart1);
640 ops.code = WIDEN_MULT_EXPR;
641 ops.type = TREE_TYPE (arg0);
642 rtx thisres
643 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
644 emit_move_insn (res, thisres);
645 emit_jump (done_label);
646
647 emit_label (small_op0_large_op1);
648
649 /* If op0 is sign extended from hmode to mode, but op1 is not,
650 just swap the arguments and handle it as op1 sign extended,
651 op0 not. */
652 rtx larger = gen_reg_rtx (mode);
653 rtx hipart = gen_reg_rtx (hmode);
654 rtx lopart = gen_reg_rtx (hmode);
655 emit_move_insn (larger, op1);
656 emit_move_insn (hipart, hipart1);
657 emit_move_insn (lopart, lopart0);
658 emit_jump (one_small_one_large);
659
660 emit_label (large_op0);
661
662 if (!op1_small_p)
663 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
664 false, both_ops_large, PROB_UNLIKELY);
665
666 /* If op1 is sign extended from hmode to mode, but op0 is not,
667 prepare larger, hipart and lopart pseudos and handle it together
668 with small_op0_large_op1. */
669 emit_move_insn (larger, op0);
670 emit_move_insn (hipart, hipart0);
671 emit_move_insn (lopart, lopart1);
672
673 emit_label (one_small_one_large);
674
675 /* lopart is the low part of the operand that is sign extended
676 to mode, larger is the the other operand, hipart is the
677 high part of larger and lopart0 and lopart1 are the low parts
678 of both operands.
679 We perform lopart0 * lopart1 and lopart * hipart widening
680 multiplications. */
681 tree halfutype = build_nonstandard_integer_type (hprec, 1);
682 ops.op0 = make_tree (halfutype, lopart0);
683 ops.op1 = make_tree (halfutype, lopart1);
684 rtx lo0xlo1
685 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
686
687 ops.op0 = make_tree (halfutype, lopart);
688 ops.op1 = make_tree (halfutype, hipart);
689 rtx loxhi = gen_reg_rtx (mode);
690 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
691 emit_move_insn (loxhi, tem);
692
693 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
694 if (larger_sign == 0)
695 emit_jump (after_hipart_neg);
696 else if (larger_sign != -1)
697 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
698 false, after_hipart_neg, PROB_EVEN);
699
47d552eb
JJ
700 tem = convert_modes (mode, hmode, lopart, 1);
701 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
d5fa9cc9
JJ
702 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
703 1, OPTAB_DIRECT);
704 emit_move_insn (loxhi, tem);
705
706 emit_label (after_hipart_neg);
707
708 /* if (lopart < 0) loxhi -= larger; */
709 if (smaller_sign == 0)
710 emit_jump (after_lopart_neg);
711 else if (smaller_sign != -1)
712 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
713 false, after_lopart_neg, PROB_EVEN);
714
715 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
716 1, OPTAB_DIRECT);
717 emit_move_insn (loxhi, tem);
718
719 emit_label (after_lopart_neg);
720
721 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
722 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
723 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
724 1, OPTAB_DIRECT);
725 emit_move_insn (loxhi, tem);
726
727 /* if (loxhi >> (bitsize / 2)
728 == (hmode) loxhi >> (bitsize / 2 - 1)) */
729 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
730 NULL_RTX, 0);
731 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
732 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
733 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
734 hprec - 1, NULL_RTX, 0);
735
736 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
737 hmode, false, do_overflow,
738 PROB_VERY_UNLIKELY);
739
740 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
741 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
742 NULL_RTX, 1);
743 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
744
745 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
746 1, OPTAB_DIRECT);
747 if (tem != res)
748 emit_move_insn (res, tem);
749 emit_jump (done_label);
750
751 emit_label (both_ops_large);
752
753 /* If both operands are large (not sign extended from hmode),
754 then perform the full multiplication which will be the result
755 of the operation. The only cases which don't overflow are
756 some cases where both hipart0 and highpart1 are 0 or -1. */
757 ops.code = MULT_EXPR;
758 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
759 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
760 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
761 emit_move_insn (res, tem);
762
763 if (!op0_medium_p)
764 {
765 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
766 NULL_RTX, 1, OPTAB_DIRECT);
767 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
768 true, do_error, PROB_VERY_UNLIKELY);
769 }
770
771 if (!op1_medium_p)
772 {
773 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
774 NULL_RTX, 1, OPTAB_DIRECT);
775 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
776 true, do_error, PROB_VERY_UNLIKELY);
777 }
778
779 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
780 same, overflow happened if res is negative, if they are different,
781 overflow happened if res is positive. */
782 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
783 emit_jump (hipart_different);
784 else if (op0_sign == 1 || op1_sign == 1)
785 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
786 true, hipart_different, PROB_EVEN);
787
788 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
789 do_error, PROB_VERY_UNLIKELY);
790 emit_jump (done_label);
791
792 emit_label (hipart_different);
793
794 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
795 do_error, PROB_VERY_UNLIKELY);
796 emit_jump (done_label);
797
798 emit_label (do_overflow);
799
800 /* Overflow, do full multiplication and fallthru into do_error. */
801 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
802 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
803 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
804 emit_move_insn (res, tem);
805 }
31e071ae
MP
806 else
807 {
31e071ae
MP
808 ops.code = MULT_EXPR;
809 ops.type = TREE_TYPE (arg0);
810 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
811 emit_jump (done_label);
812 }
813 }
814
815 emit_label (do_error);
816 /* Expand the ubsan builtin call. */
1769415d
MP
817 push_temp_slots ();
818 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
819 TREE_TYPE (arg0), arg0, arg1);
31e071ae 820 expand_normal (fn);
1769415d 821 pop_temp_slots ();
31e071ae
MP
822 do_pending_stack_adjust ();
823
824 /* We're done. */
825 emit_label (done_label);
826
827 if (lhs)
828 emit_move_insn (target, res);
829}
830
831/* Expand UBSAN_CHECK_ADD call STMT. */
832
833static void
834expand_UBSAN_CHECK_ADD (gimple stmt)
835{
836 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
837}
838
839/* Expand UBSAN_CHECK_SUB call STMT. */
840
841static void
842expand_UBSAN_CHECK_SUB (gimple stmt)
843{
844 if (integer_zerop (gimple_call_arg (stmt, 0)))
845 ubsan_expand_si_overflow_neg_check (stmt);
846 else
847 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
848}
849
850/* Expand UBSAN_CHECK_MUL call STMT. */
851
852static void
853expand_UBSAN_CHECK_MUL (gimple stmt)
854{
855 ubsan_expand_si_overflow_mul_check (stmt);
856}
857
5ce9450f
JJ
858/* This should get folded in tree-vectorizer.c. */
859
860static void
861expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
862{
863 gcc_unreachable ();
864}
865
866static void
867expand_MASK_LOAD (gimple stmt)
868{
869 struct expand_operand ops[3];
870 tree type, lhs, rhs, maskt;
871 rtx mem, target, mask;
872
873 maskt = gimple_call_arg (stmt, 2);
874 lhs = gimple_call_lhs (stmt);
8e91d222
JJ
875 if (lhs == NULL_TREE)
876 return;
5ce9450f
JJ
877 type = TREE_TYPE (lhs);
878 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
879 gimple_call_arg (stmt, 1));
880
881 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
882 gcc_assert (MEM_P (mem));
883 mask = expand_normal (maskt);
884 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
885 create_output_operand (&ops[0], target, TYPE_MODE (type));
886 create_fixed_operand (&ops[1], mem);
887 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
888 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
889}
890
891static void
892expand_MASK_STORE (gimple stmt)
893{
894 struct expand_operand ops[3];
895 tree type, lhs, rhs, maskt;
896 rtx mem, reg, mask;
897
898 maskt = gimple_call_arg (stmt, 2);
899 rhs = gimple_call_arg (stmt, 3);
900 type = TREE_TYPE (rhs);
901 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
902 gimple_call_arg (stmt, 1));
903
904 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
905 gcc_assert (MEM_P (mem));
906 mask = expand_normal (maskt);
907 reg = expand_normal (rhs);
908 create_fixed_operand (&ops[0], mem);
909 create_input_operand (&ops[1], reg, TYPE_MODE (type));
910 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
911 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
912}
913
09b22f48
JJ
914static void
915expand_ABNORMAL_DISPATCHER (gimple)
916{
917}
918
ed9c79e1
JJ
919static void
920expand_BUILTIN_EXPECT (gimple stmt)
921{
922 /* When guessing was done, the hints should be already stripped away. */
923 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
924
925 rtx target;
926 tree lhs = gimple_call_lhs (stmt);
927 if (lhs)
928 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
929 else
930 target = const0_rtx;
931 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
932 if (lhs && val != target)
933 emit_move_insn (target, val);
934}
935
25583c4f
RS
936/* Routines to expand each internal function, indexed by function number.
937 Each routine has the prototype:
938
939 expand_<NAME> (gimple stmt)
940
941 where STMT is the statement that performs the call. */
942static void (*const internal_fn_expanders[]) (gimple) = {
b78475cf 943#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
25583c4f
RS
944#include "internal-fn.def"
945#undef DEF_INTERNAL_FN
946 0
947};
948
949/* Expand STMT, which is a call to internal function FN. */
950
951void
952expand_internal_call (gimple stmt)
953{
954 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
955}