]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/internal-fn.c
compiler: Store flags for division checks in Gogo object instead of using global...
[thirdparty/gcc.git] / gcc / internal-fn.c
CommitLineData
25583c4f 1/* Internal functions.
23a5b65a 2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
25583c4f
RS
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "internal-fn.h"
24#include "tree.h"
d8a2d370 25#include "stor-layout.h"
25583c4f
RS
26#include "expr.h"
27#include "optabs.h"
2fb9a547
AM
28#include "basic-block.h"
29#include "tree-ssa-alias.h"
30#include "internal-fn.h"
31#include "gimple-expr.h"
32#include "is-a.h"
25583c4f 33#include "gimple.h"
31e071ae
MP
34#include "ubsan.h"
35#include "target.h"
36#include "predict.h"
97286431
JJ
37#include "stringpool.h"
38#include "tree-ssanames.h"
ed9c79e1 39#include "diagnostic-core.h"
25583c4f
RS
40
41/* The names of each internal function, indexed by function number. */
42const char *const internal_fn_name_array[] = {
43#define DEF_INTERNAL_FN(CODE, FLAGS) #CODE,
44#include "internal-fn.def"
45#undef DEF_INTERNAL_FN
46 "<invalid-fn>"
47};
48
49/* The ECF_* flags of each internal function, indexed by function number. */
50const int internal_fn_flags_array[] = {
51#define DEF_INTERNAL_FN(CODE, FLAGS) FLAGS,
52#include "internal-fn.def"
53#undef DEF_INTERNAL_FN
54 0
55};
56
272c6793
RS
57/* ARRAY_TYPE is an array of vector modes. Return the associated insn
58 for load-lanes-style optab OPTAB. The insn must exist. */
59
60static enum insn_code
61get_multi_vector_move (tree array_type, convert_optab optab)
62{
63 enum insn_code icode;
64 enum machine_mode imode;
65 enum machine_mode vmode;
66
67 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
68 imode = TYPE_MODE (array_type);
69 vmode = TYPE_MODE (TREE_TYPE (array_type));
70
71 icode = convert_optab_handler (optab, imode, vmode);
72 gcc_assert (icode != CODE_FOR_nothing);
73 return icode;
74}
75
76/* Expand LOAD_LANES call STMT. */
77
78static void
79expand_LOAD_LANES (gimple stmt)
80{
81 struct expand_operand ops[2];
82 tree type, lhs, rhs;
83 rtx target, mem;
84
85 lhs = gimple_call_lhs (stmt);
86 rhs = gimple_call_arg (stmt, 0);
87 type = TREE_TYPE (lhs);
88
89 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
90 mem = expand_normal (rhs);
91
92 gcc_assert (MEM_P (mem));
93 PUT_MODE (mem, TYPE_MODE (type));
94
95 create_output_operand (&ops[0], target, TYPE_MODE (type));
96 create_fixed_operand (&ops[1], mem);
97 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
98}
99
100/* Expand STORE_LANES call STMT. */
101
102static void
103expand_STORE_LANES (gimple stmt)
104{
105 struct expand_operand ops[2];
106 tree type, lhs, rhs;
107 rtx target, reg;
108
109 lhs = gimple_call_lhs (stmt);
110 rhs = gimple_call_arg (stmt, 0);
111 type = TREE_TYPE (rhs);
112
113 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
114 reg = expand_normal (rhs);
115
116 gcc_assert (MEM_P (target));
117 PUT_MODE (target, TYPE_MODE (type));
118
119 create_fixed_operand (&ops[0], target);
120 create_input_operand (&ops[1], reg, TYPE_MODE (type));
121 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
122}
123
8170608b
TB
124static void
125expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
126{
127 gcc_unreachable ();
128}
129
74bf76ed
JJ
130/* This should get expanded in adjust_simduid_builtins. */
131
132static void
133expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
134{
135 gcc_unreachable ();
136}
137
138/* This should get expanded in adjust_simduid_builtins. */
139
140static void
141expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
142{
143 gcc_unreachable ();
144}
145
146/* This should get expanded in adjust_simduid_builtins. */
147
148static void
149expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
150{
151 gcc_unreachable ();
152}
153
b9a55b13
MP
154/* This should get expanded in the sanopt pass. */
155
156static void
157expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
158{
159 gcc_unreachable ();
160}
161
31e071ae
MP
162/* Add sub/add overflow checking to the statement STMT.
163 CODE says whether the operation is +, or -. */
164
165void
166ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
167{
168 rtx res, op0, op1;
169 tree lhs, fn, arg0, arg1;
170 rtx done_label, do_error, target = NULL_RTX;
171
172 lhs = gimple_call_lhs (stmt);
173 arg0 = gimple_call_arg (stmt, 0);
174 arg1 = gimple_call_arg (stmt, 1);
175 done_label = gen_label_rtx ();
176 do_error = gen_label_rtx ();
31e071ae
MP
177 do_pending_stack_adjust ();
178 op0 = expand_normal (arg0);
179 op1 = expand_normal (arg1);
180
181 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
182 if (lhs)
183 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
184
185 enum insn_code icode
186 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
187 if (icode != CODE_FOR_nothing)
188 {
189 struct expand_operand ops[4];
190 rtx last = get_last_insn ();
191
192 res = gen_reg_rtx (mode);
193 create_output_operand (&ops[0], res, mode);
194 create_input_operand (&ops[1], op0, mode);
195 create_input_operand (&ops[2], op1, mode);
196 create_fixed_operand (&ops[3], do_error);
197 if (maybe_expand_insn (icode, 4, ops))
198 {
199 last = get_last_insn ();
0a6a6ac9 200 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
201 && JUMP_P (last)
202 && any_condjump_p (last)
203 && !find_reg_note (last, REG_BR_PROB, 0))
204 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
205 emit_jump (done_label);
206 }
207 else
208 {
209 delete_insns_since (last);
210 icode = CODE_FOR_nothing;
211 }
212 }
213
214 if (icode == CODE_FOR_nothing)
215 {
216 rtx sub_check = gen_label_rtx ();
97286431 217 int pos_neg = 3;
31e071ae
MP
218
219 /* Compute the operation. On RTL level, the addition is always
220 unsigned. */
7ddf4d5a
MP
221 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
222 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
31e071ae 223
f451d3a8
JJ
224 /* If we can prove one of the arguments (for MINUS_EXPR only
225 the second operand, as subtraction is not commutative) is always
226 non-negative or always negative, we can do just one comparison
227 and conditional jump instead of 2 at runtime, 3 present in the
97286431
JJ
228 emitted code. If one of the arguments is CONST_INT, all we
229 need is to make sure it is op1, then the first
230 emit_cmp_and_jump_insns will be just folded. Otherwise try
231 to use range info if available. */
f451d3a8 232 if (code == PLUS_EXPR && CONST_INT_P (op0))
97286431
JJ
233 {
234 rtx tem = op0;
235 op0 = op1;
236 op1 = tem;
237 }
238 else if (CONST_INT_P (op1))
239 ;
f451d3a8 240 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
97286431
JJ
241 {
242 double_int arg0_min, arg0_max;
243 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
244 {
245 if (!arg0_min.is_negative ())
246 pos_neg = 1;
247 else if (arg0_max.is_negative ())
248 pos_neg = 2;
249 }
250 if (pos_neg != 3)
251 {
252 rtx tem = op0;
253 op0 = op1;
254 op1 = tem;
255 }
256 }
257 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
258 {
259 double_int arg1_min, arg1_max;
260 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
261 {
262 if (!arg1_min.is_negative ())
263 pos_neg = 1;
264 else if (arg1_max.is_negative ())
265 pos_neg = 2;
266 }
267 }
268
31e071ae 269 /* If the op1 is negative, we have to use a different check. */
97286431
JJ
270 if (pos_neg == 3)
271 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
272 false, sub_check, PROB_EVEN);
31e071ae 273
7ddf4d5a 274 /* Compare the result of the operation with one of the operands. */
97286431
JJ
275 if (pos_neg & 1)
276 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
277 NULL_RTX, mode, false, done_label,
278 PROB_VERY_LIKELY);
279
31e071ae 280 /* If we get here, we have to print the error. */
97286431
JJ
281 if (pos_neg == 3)
282 {
283 emit_jump (do_error);
284
285 emit_label (sub_check);
286 }
31e071ae 287
31e071ae 288 /* We have k = a + b for b < 0 here. k <= a must hold. */
97286431
JJ
289 if (pos_neg & 2)
290 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
291 NULL_RTX, mode, false, done_label,
292 PROB_VERY_LIKELY);
31e071ae
MP
293 }
294
1769415d
MP
295 emit_label (do_error);
296 /* Expand the ubsan builtin call. */
297 push_temp_slots ();
298 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
299 TREE_TYPE (arg0), arg0, arg1);
300 expand_normal (fn);
301 pop_temp_slots ();
302 do_pending_stack_adjust ();
31e071ae 303
1769415d
MP
304 /* We're done. */
305 emit_label (done_label);
31e071ae
MP
306
307 if (lhs)
308 emit_move_insn (target, res);
309}
310
311/* Add negate overflow checking to the statement STMT. */
312
313void
314ubsan_expand_si_overflow_neg_check (gimple stmt)
315{
316 rtx res, op1;
317 tree lhs, fn, arg1;
318 rtx done_label, do_error, target = NULL_RTX;
319
320 lhs = gimple_call_lhs (stmt);
321 arg1 = gimple_call_arg (stmt, 1);
322 done_label = gen_label_rtx ();
323 do_error = gen_label_rtx ();
31e071ae
MP
324
325 do_pending_stack_adjust ();
326 op1 = expand_normal (arg1);
327
328 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
329 if (lhs)
330 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
331
332 enum insn_code icode = optab_handler (negv3_optab, mode);
333 if (icode != CODE_FOR_nothing)
334 {
335 struct expand_operand ops[3];
336 rtx last = get_last_insn ();
337
338 res = gen_reg_rtx (mode);
339 create_output_operand (&ops[0], res, mode);
340 create_input_operand (&ops[1], op1, mode);
341 create_fixed_operand (&ops[2], do_error);
342 if (maybe_expand_insn (icode, 3, ops))
343 {
344 last = get_last_insn ();
0a6a6ac9 345 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
346 && JUMP_P (last)
347 && any_condjump_p (last)
348 && !find_reg_note (last, REG_BR_PROB, 0))
349 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
350 emit_jump (done_label);
351 }
352 else
353 {
354 delete_insns_since (last);
355 icode = CODE_FOR_nothing;
356 }
357 }
358
359 if (icode == CODE_FOR_nothing)
360 {
361 /* Compute the operation. On RTL level, the addition is always
362 unsigned. */
363 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
364
365 /* Compare the operand with the most negative value. */
366 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
367 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
368 done_label, PROB_VERY_LIKELY);
369 }
370
371 emit_label (do_error);
372 /* Expand the ubsan builtin call. */
1769415d
MP
373 push_temp_slots ();
374 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
375 TREE_TYPE (arg1), arg1, NULL_TREE);
31e071ae 376 expand_normal (fn);
1769415d 377 pop_temp_slots ();
31e071ae
MP
378 do_pending_stack_adjust ();
379
380 /* We're done. */
381 emit_label (done_label);
382
383 if (lhs)
384 emit_move_insn (target, res);
385}
386
387/* Add mul overflow checking to the statement STMT. */
388
389void
390ubsan_expand_si_overflow_mul_check (gimple stmt)
391{
392 rtx res, op0, op1;
393 tree lhs, fn, arg0, arg1;
394 rtx done_label, do_error, target = NULL_RTX;
395
396 lhs = gimple_call_lhs (stmt);
397 arg0 = gimple_call_arg (stmt, 0);
398 arg1 = gimple_call_arg (stmt, 1);
399 done_label = gen_label_rtx ();
400 do_error = gen_label_rtx ();
31e071ae
MP
401
402 do_pending_stack_adjust ();
403 op0 = expand_normal (arg0);
404 op1 = expand_normal (arg1);
405
406 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
407 if (lhs)
408 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
409
410 enum insn_code icode = optab_handler (mulv4_optab, mode);
411 if (icode != CODE_FOR_nothing)
412 {
413 struct expand_operand ops[4];
414 rtx last = get_last_insn ();
415
416 res = gen_reg_rtx (mode);
417 create_output_operand (&ops[0], res, mode);
418 create_input_operand (&ops[1], op0, mode);
419 create_input_operand (&ops[2], op1, mode);
420 create_fixed_operand (&ops[3], do_error);
421 if (maybe_expand_insn (icode, 4, ops))
422 {
423 last = get_last_insn ();
0a6a6ac9 424 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
425 && JUMP_P (last)
426 && any_condjump_p (last)
427 && !find_reg_note (last, REG_BR_PROB, 0))
428 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
429 emit_jump (done_label);
430 }
431 else
432 {
433 delete_insns_since (last);
434 icode = CODE_FOR_nothing;
435 }
436 }
437
438 if (icode == CODE_FOR_nothing)
439 {
440 struct separate_ops ops;
d5fa9cc9
JJ
441 enum machine_mode hmode
442 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
31e071ae
MP
443 ops.op0 = arg0;
444 ops.op1 = arg1;
445 ops.op2 = NULL_TREE;
446 ops.location = gimple_location (stmt);
447 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
448 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
449 {
450 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
451 ops.code = WIDEN_MULT_EXPR;
452 ops.type
453 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
454
455 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
456 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
457 GET_MODE_PRECISION (mode), NULL_RTX, 0);
458 hipart = gen_lowpart (mode, hipart);
459 res = gen_lowpart (mode, res);
460 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
461 GET_MODE_PRECISION (mode) - 1,
462 NULL_RTX, 0);
463 /* RES is low half of the double width result, HIPART
464 the high half. There was overflow if
465 HIPART is different from RES < 0 ? -1 : 0. */
466 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
467 false, done_label, PROB_VERY_LIKELY);
468 }
d5fa9cc9
JJ
469 else if (hmode != BLKmode
470 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
471 {
472 rtx large_op0 = gen_label_rtx ();
473 rtx small_op0_large_op1 = gen_label_rtx ();
474 rtx one_small_one_large = gen_label_rtx ();
475 rtx both_ops_large = gen_label_rtx ();
476 rtx after_hipart_neg = gen_label_rtx ();
477 rtx after_lopart_neg = gen_label_rtx ();
478 rtx do_overflow = gen_label_rtx ();
479 rtx hipart_different = gen_label_rtx ();
480
481 int hprec = GET_MODE_PRECISION (hmode);
482 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
483 NULL_RTX, 0);
484 hipart0 = gen_lowpart (hmode, hipart0);
485 rtx lopart0 = gen_lowpart (hmode, op0);
486 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
487 NULL_RTX, 0);
488 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
489 NULL_RTX, 0);
490 hipart1 = gen_lowpart (hmode, hipart1);
491 rtx lopart1 = gen_lowpart (hmode, op1);
492 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
493 NULL_RTX, 0);
494
495 res = gen_reg_rtx (mode);
496
497 /* True if op0 resp. op1 are known to be in the range of
498 halfstype. */
499 bool op0_small_p = false;
500 bool op1_small_p = false;
501 /* True if op0 resp. op1 are known to have all zeros or all ones
502 in the upper half of bits, but are not known to be
503 op{0,1}_small_p. */
504 bool op0_medium_p = false;
505 bool op1_medium_p = false;
506 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
507 nonnegative, 1 if unknown. */
508 int op0_sign = 1;
509 int op1_sign = 1;
510
511 if (TREE_CODE (arg0) == SSA_NAME)
512 {
513 double_int arg0_min, arg0_max;
514 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
515 {
516 if (arg0_max.sle (double_int::max_value (hprec, false))
517 && double_int::min_value (hprec, false).sle (arg0_min))
518 op0_small_p = true;
519 else if (arg0_max.sle (double_int::max_value (hprec, true))
520 && (~double_int::max_value (hprec,
521 true)).sle (arg0_min))
522 op0_medium_p = true;
523 if (!arg0_min.is_negative ())
524 op0_sign = 0;
525 else if (arg0_max.is_negative ())
526 op0_sign = -1;
527 }
528 }
529 if (TREE_CODE (arg1) == SSA_NAME)
530 {
531 double_int arg1_min, arg1_max;
532 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
533 {
534 if (arg1_max.sle (double_int::max_value (hprec, false))
535 && double_int::min_value (hprec, false).sle (arg1_min))
536 op1_small_p = true;
537 else if (arg1_max.sle (double_int::max_value (hprec, true))
538 && (~double_int::max_value (hprec,
539 true)).sle (arg1_min))
540 op1_medium_p = true;
541 if (!arg1_min.is_negative ())
542 op1_sign = 0;
543 else if (arg1_max.is_negative ())
544 op1_sign = -1;
545 }
546 }
547
548 int smaller_sign = 1;
549 int larger_sign = 1;
550 if (op0_small_p)
551 {
552 smaller_sign = op0_sign;
553 larger_sign = op1_sign;
554 }
555 else if (op1_small_p)
556 {
557 smaller_sign = op1_sign;
558 larger_sign = op0_sign;
559 }
560 else if (op0_sign == op1_sign)
561 {
562 smaller_sign = op0_sign;
563 larger_sign = op0_sign;
564 }
565
566 if (!op0_small_p)
567 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
568 false, large_op0, PROB_UNLIKELY);
569
570 if (!op1_small_p)
571 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
572 false, small_op0_large_op1,
573 PROB_UNLIKELY);
574
575 /* If both op0 and op1 are sign extended from hmode to mode,
576 the multiplication will never overflow. We can do just one
577 hmode x hmode => mode widening multiplication. */
578 if (GET_CODE (lopart0) == SUBREG)
579 {
580 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
581 SUBREG_PROMOTED_UNSIGNED_SET (lopart0, 0);
582 }
583 if (GET_CODE (lopart1) == SUBREG)
584 {
585 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
586 SUBREG_PROMOTED_UNSIGNED_SET (lopart1, 0);
587 }
588 tree halfstype = build_nonstandard_integer_type (hprec, 0);
589 ops.op0 = make_tree (halfstype, lopart0);
590 ops.op1 = make_tree (halfstype, lopart1);
591 ops.code = WIDEN_MULT_EXPR;
592 ops.type = TREE_TYPE (arg0);
593 rtx thisres
594 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
595 emit_move_insn (res, thisres);
596 emit_jump (done_label);
597
598 emit_label (small_op0_large_op1);
599
600 /* If op0 is sign extended from hmode to mode, but op1 is not,
601 just swap the arguments and handle it as op1 sign extended,
602 op0 not. */
603 rtx larger = gen_reg_rtx (mode);
604 rtx hipart = gen_reg_rtx (hmode);
605 rtx lopart = gen_reg_rtx (hmode);
606 emit_move_insn (larger, op1);
607 emit_move_insn (hipart, hipart1);
608 emit_move_insn (lopart, lopart0);
609 emit_jump (one_small_one_large);
610
611 emit_label (large_op0);
612
613 if (!op1_small_p)
614 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
615 false, both_ops_large, PROB_UNLIKELY);
616
617 /* If op1 is sign extended from hmode to mode, but op0 is not,
618 prepare larger, hipart and lopart pseudos and handle it together
619 with small_op0_large_op1. */
620 emit_move_insn (larger, op0);
621 emit_move_insn (hipart, hipart0);
622 emit_move_insn (lopart, lopart1);
623
624 emit_label (one_small_one_large);
625
626 /* lopart is the low part of the operand that is sign extended
627 to mode, larger is the the other operand, hipart is the
628 high part of larger and lopart0 and lopart1 are the low parts
629 of both operands.
630 We perform lopart0 * lopart1 and lopart * hipart widening
631 multiplications. */
632 tree halfutype = build_nonstandard_integer_type (hprec, 1);
633 ops.op0 = make_tree (halfutype, lopart0);
634 ops.op1 = make_tree (halfutype, lopart1);
635 rtx lo0xlo1
636 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
637
638 ops.op0 = make_tree (halfutype, lopart);
639 ops.op1 = make_tree (halfutype, hipart);
640 rtx loxhi = gen_reg_rtx (mode);
641 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
642 emit_move_insn (loxhi, tem);
643
644 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
645 if (larger_sign == 0)
646 emit_jump (after_hipart_neg);
647 else if (larger_sign != -1)
648 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
649 false, after_hipart_neg, PROB_EVEN);
650
47d552eb
JJ
651 tem = convert_modes (mode, hmode, lopart, 1);
652 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
d5fa9cc9
JJ
653 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
654 1, OPTAB_DIRECT);
655 emit_move_insn (loxhi, tem);
656
657 emit_label (after_hipart_neg);
658
659 /* if (lopart < 0) loxhi -= larger; */
660 if (smaller_sign == 0)
661 emit_jump (after_lopart_neg);
662 else if (smaller_sign != -1)
663 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
664 false, after_lopart_neg, PROB_EVEN);
665
666 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
667 1, OPTAB_DIRECT);
668 emit_move_insn (loxhi, tem);
669
670 emit_label (after_lopart_neg);
671
672 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
673 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
674 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
675 1, OPTAB_DIRECT);
676 emit_move_insn (loxhi, tem);
677
678 /* if (loxhi >> (bitsize / 2)
679 == (hmode) loxhi >> (bitsize / 2 - 1)) */
680 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
681 NULL_RTX, 0);
682 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
683 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
684 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
685 hprec - 1, NULL_RTX, 0);
686
687 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
688 hmode, false, do_overflow,
689 PROB_VERY_UNLIKELY);
690
691 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
692 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
693 NULL_RTX, 1);
694 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
695
696 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
697 1, OPTAB_DIRECT);
698 if (tem != res)
699 emit_move_insn (res, tem);
700 emit_jump (done_label);
701
702 emit_label (both_ops_large);
703
704 /* If both operands are large (not sign extended from hmode),
705 then perform the full multiplication which will be the result
706 of the operation. The only cases which don't overflow are
707 some cases where both hipart0 and highpart1 are 0 or -1. */
708 ops.code = MULT_EXPR;
709 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
710 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
711 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
712 emit_move_insn (res, tem);
713
714 if (!op0_medium_p)
715 {
716 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
717 NULL_RTX, 1, OPTAB_DIRECT);
718 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
719 true, do_error, PROB_VERY_UNLIKELY);
720 }
721
722 if (!op1_medium_p)
723 {
724 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
725 NULL_RTX, 1, OPTAB_DIRECT);
726 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
727 true, do_error, PROB_VERY_UNLIKELY);
728 }
729
730 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
731 same, overflow happened if res is negative, if they are different,
732 overflow happened if res is positive. */
733 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
734 emit_jump (hipart_different);
735 else if (op0_sign == 1 || op1_sign == 1)
736 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
737 true, hipart_different, PROB_EVEN);
738
739 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
740 do_error, PROB_VERY_UNLIKELY);
741 emit_jump (done_label);
742
743 emit_label (hipart_different);
744
745 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
746 do_error, PROB_VERY_UNLIKELY);
747 emit_jump (done_label);
748
749 emit_label (do_overflow);
750
751 /* Overflow, do full multiplication and fallthru into do_error. */
752 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
753 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
754 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
755 emit_move_insn (res, tem);
756 }
31e071ae
MP
757 else
758 {
31e071ae
MP
759 ops.code = MULT_EXPR;
760 ops.type = TREE_TYPE (arg0);
761 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
762 emit_jump (done_label);
763 }
764 }
765
766 emit_label (do_error);
767 /* Expand the ubsan builtin call. */
1769415d
MP
768 push_temp_slots ();
769 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
770 TREE_TYPE (arg0), arg0, arg1);
31e071ae 771 expand_normal (fn);
1769415d 772 pop_temp_slots ();
31e071ae
MP
773 do_pending_stack_adjust ();
774
775 /* We're done. */
776 emit_label (done_label);
777
778 if (lhs)
779 emit_move_insn (target, res);
780}
781
782/* Expand UBSAN_CHECK_ADD call STMT. */
783
784static void
785expand_UBSAN_CHECK_ADD (gimple stmt)
786{
787 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
788}
789
790/* Expand UBSAN_CHECK_SUB call STMT. */
791
792static void
793expand_UBSAN_CHECK_SUB (gimple stmt)
794{
795 if (integer_zerop (gimple_call_arg (stmt, 0)))
796 ubsan_expand_si_overflow_neg_check (stmt);
797 else
798 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
799}
800
801/* Expand UBSAN_CHECK_MUL call STMT. */
802
803static void
804expand_UBSAN_CHECK_MUL (gimple stmt)
805{
806 ubsan_expand_si_overflow_mul_check (stmt);
807}
808
5ce9450f
JJ
809/* This should get folded in tree-vectorizer.c. */
810
811static void
812expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
813{
814 gcc_unreachable ();
815}
816
817static void
818expand_MASK_LOAD (gimple stmt)
819{
820 struct expand_operand ops[3];
821 tree type, lhs, rhs, maskt;
822 rtx mem, target, mask;
823
824 maskt = gimple_call_arg (stmt, 2);
825 lhs = gimple_call_lhs (stmt);
8e91d222
JJ
826 if (lhs == NULL_TREE)
827 return;
5ce9450f
JJ
828 type = TREE_TYPE (lhs);
829 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
830 gimple_call_arg (stmt, 1));
831
832 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
833 gcc_assert (MEM_P (mem));
834 mask = expand_normal (maskt);
835 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
836 create_output_operand (&ops[0], target, TYPE_MODE (type));
837 create_fixed_operand (&ops[1], mem);
838 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
839 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
840}
841
842static void
843expand_MASK_STORE (gimple stmt)
844{
845 struct expand_operand ops[3];
846 tree type, lhs, rhs, maskt;
847 rtx mem, reg, mask;
848
849 maskt = gimple_call_arg (stmt, 2);
850 rhs = gimple_call_arg (stmt, 3);
851 type = TREE_TYPE (rhs);
852 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
853 gimple_call_arg (stmt, 1));
854
855 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
856 gcc_assert (MEM_P (mem));
857 mask = expand_normal (maskt);
858 reg = expand_normal (rhs);
859 create_fixed_operand (&ops[0], mem);
860 create_input_operand (&ops[1], reg, TYPE_MODE (type));
861 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
862 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
863}
864
09b22f48
JJ
865static void
866expand_ABNORMAL_DISPATCHER (gimple)
867{
868}
869
ed9c79e1
JJ
870static void
871expand_BUILTIN_EXPECT (gimple stmt)
872{
873 /* When guessing was done, the hints should be already stripped away. */
874 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
875
876 rtx target;
877 tree lhs = gimple_call_lhs (stmt);
878 if (lhs)
879 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
880 else
881 target = const0_rtx;
882 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
883 if (lhs && val != target)
884 emit_move_insn (target, val);
885}
886
25583c4f
RS
887/* Routines to expand each internal function, indexed by function number.
888 Each routine has the prototype:
889
890 expand_<NAME> (gimple stmt)
891
892 where STMT is the statement that performs the call. */
893static void (*const internal_fn_expanders[]) (gimple) = {
894#define DEF_INTERNAL_FN(CODE, FLAGS) expand_##CODE,
895#include "internal-fn.def"
896#undef DEF_INTERNAL_FN
897 0
898};
899
900/* Expand STMT, which is a call to internal function FN. */
901
902void
903expand_internal_call (gimple stmt)
904{
905 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
906}