]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/internal-fn.c
i386.c (standard_sse_constant_opcode): Use vpxord/vpternlog if avx512 is availible.
[thirdparty/gcc.git] / gcc / internal-fn.c
CommitLineData
25583c4f 1/* Internal functions.
23a5b65a 2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
25583c4f
RS
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
25583c4f 23#include "tree.h"
0e37a2f3 24#include "internal-fn.h"
d8a2d370 25#include "stor-layout.h"
25583c4f
RS
26#include "expr.h"
27#include "optabs.h"
2fb9a547
AM
28#include "basic-block.h"
29#include "tree-ssa-alias.h"
30#include "internal-fn.h"
31#include "gimple-expr.h"
32#include "is-a.h"
25583c4f 33#include "gimple.h"
31e071ae
MP
34#include "ubsan.h"
35#include "target.h"
36#include "predict.h"
97286431
JJ
37#include "stringpool.h"
38#include "tree-ssanames.h"
ed9c79e1 39#include "diagnostic-core.h"
25583c4f
RS
40
41/* The names of each internal function, indexed by function number. */
42const char *const internal_fn_name_array[] = {
b78475cf 43#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
25583c4f
RS
44#include "internal-fn.def"
45#undef DEF_INTERNAL_FN
46 "<invalid-fn>"
47};
48
49/* The ECF_* flags of each internal function, indexed by function number. */
50const int internal_fn_flags_array[] = {
b78475cf 51#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
25583c4f
RS
52#include "internal-fn.def"
53#undef DEF_INTERNAL_FN
54 0
55};
56
b78475cf
YG
57/* Fnspec of each internal function, indexed by function number. */
58const_tree internal_fn_fnspec_array[IFN_LAST + 1];
59
60void
61init_internal_fns ()
62{
63#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
64 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
65 build_string ((int) sizeof (FNSPEC) + 1, FNSPEC ? FNSPEC : "");
66#include "internal-fn.def"
67#undef DEF_INTERNAL_FN
68 internal_fn_fnspec_array[IFN_LAST] = 0;
69}
70
272c6793
RS
71/* ARRAY_TYPE is an array of vector modes. Return the associated insn
72 for load-lanes-style optab OPTAB. The insn must exist. */
73
74static enum insn_code
75get_multi_vector_move (tree array_type, convert_optab optab)
76{
77 enum insn_code icode;
78 enum machine_mode imode;
79 enum machine_mode vmode;
80
81 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
82 imode = TYPE_MODE (array_type);
83 vmode = TYPE_MODE (TREE_TYPE (array_type));
84
85 icode = convert_optab_handler (optab, imode, vmode);
86 gcc_assert (icode != CODE_FOR_nothing);
87 return icode;
88}
89
90/* Expand LOAD_LANES call STMT. */
91
92static void
93expand_LOAD_LANES (gimple stmt)
94{
95 struct expand_operand ops[2];
96 tree type, lhs, rhs;
97 rtx target, mem;
98
99 lhs = gimple_call_lhs (stmt);
100 rhs = gimple_call_arg (stmt, 0);
101 type = TREE_TYPE (lhs);
102
103 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
104 mem = expand_normal (rhs);
105
106 gcc_assert (MEM_P (mem));
107 PUT_MODE (mem, TYPE_MODE (type));
108
109 create_output_operand (&ops[0], target, TYPE_MODE (type));
110 create_fixed_operand (&ops[1], mem);
111 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
112}
113
114/* Expand STORE_LANES call STMT. */
115
116static void
117expand_STORE_LANES (gimple stmt)
118{
119 struct expand_operand ops[2];
120 tree type, lhs, rhs;
121 rtx target, reg;
122
123 lhs = gimple_call_lhs (stmt);
124 rhs = gimple_call_arg (stmt, 0);
125 type = TREE_TYPE (rhs);
126
127 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
128 reg = expand_normal (rhs);
129
130 gcc_assert (MEM_P (target));
131 PUT_MODE (target, TYPE_MODE (type));
132
133 create_fixed_operand (&ops[0], target);
134 create_input_operand (&ops[1], reg, TYPE_MODE (type));
135 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
136}
137
8170608b
TB
138static void
139expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
140{
141 gcc_unreachable ();
142}
143
74bf76ed
JJ
144/* This should get expanded in adjust_simduid_builtins. */
145
146static void
147expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
148{
149 gcc_unreachable ();
150}
151
152/* This should get expanded in adjust_simduid_builtins. */
153
154static void
155expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
156{
157 gcc_unreachable ();
158}
159
160/* This should get expanded in adjust_simduid_builtins. */
161
162static void
163expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
164{
165 gcc_unreachable ();
166}
167
b9a55b13
MP
168/* This should get expanded in the sanopt pass. */
169
170static void
171expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
172{
173 gcc_unreachable ();
174}
175
0e37a2f3
MP
176/* This should get expanded in the sanopt pass. */
177
178static void
179expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED)
180{
181 gcc_unreachable ();
182}
183
c62ccb9a
YG
184/* This should get expanded in the sanopt pass. */
185
186static void
187expand_ASAN_CHECK (gimple stmt ATTRIBUTE_UNUSED)
188{
189 gcc_unreachable ();
190}
191
31e071ae
MP
192/* Add sub/add overflow checking to the statement STMT.
193 CODE says whether the operation is +, or -. */
194
195void
196ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
197{
198 rtx res, op0, op1;
199 tree lhs, fn, arg0, arg1;
200 rtx done_label, do_error, target = NULL_RTX;
201
202 lhs = gimple_call_lhs (stmt);
203 arg0 = gimple_call_arg (stmt, 0);
204 arg1 = gimple_call_arg (stmt, 1);
205 done_label = gen_label_rtx ();
206 do_error = gen_label_rtx ();
31e071ae
MP
207 do_pending_stack_adjust ();
208 op0 = expand_normal (arg0);
209 op1 = expand_normal (arg1);
210
211 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
212 if (lhs)
213 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
214
215 enum insn_code icode
216 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
217 if (icode != CODE_FOR_nothing)
218 {
219 struct expand_operand ops[4];
220 rtx last = get_last_insn ();
221
222 res = gen_reg_rtx (mode);
223 create_output_operand (&ops[0], res, mode);
224 create_input_operand (&ops[1], op0, mode);
225 create_input_operand (&ops[2], op1, mode);
226 create_fixed_operand (&ops[3], do_error);
227 if (maybe_expand_insn (icode, 4, ops))
228 {
229 last = get_last_insn ();
0a6a6ac9 230 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
231 && JUMP_P (last)
232 && any_condjump_p (last)
233 && !find_reg_note (last, REG_BR_PROB, 0))
234 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
235 emit_jump (done_label);
236 }
237 else
238 {
239 delete_insns_since (last);
240 icode = CODE_FOR_nothing;
241 }
242 }
243
244 if (icode == CODE_FOR_nothing)
245 {
246 rtx sub_check = gen_label_rtx ();
97286431 247 int pos_neg = 3;
31e071ae
MP
248
249 /* Compute the operation. On RTL level, the addition is always
250 unsigned. */
7ddf4d5a
MP
251 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
252 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
31e071ae 253
f451d3a8
JJ
254 /* If we can prove one of the arguments (for MINUS_EXPR only
255 the second operand, as subtraction is not commutative) is always
256 non-negative or always negative, we can do just one comparison
257 and conditional jump instead of 2 at runtime, 3 present in the
97286431
JJ
258 emitted code. If one of the arguments is CONST_INT, all we
259 need is to make sure it is op1, then the first
260 emit_cmp_and_jump_insns will be just folded. Otherwise try
261 to use range info if available. */
f451d3a8 262 if (code == PLUS_EXPR && CONST_INT_P (op0))
97286431
JJ
263 {
264 rtx tem = op0;
265 op0 = op1;
266 op1 = tem;
267 }
268 else if (CONST_INT_P (op1))
269 ;
f451d3a8 270 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
97286431 271 {
807e902e 272 wide_int arg0_min, arg0_max;
97286431
JJ
273 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
274 {
807e902e 275 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
97286431 276 pos_neg = 1;
807e902e 277 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
97286431
JJ
278 pos_neg = 2;
279 }
280 if (pos_neg != 3)
281 {
282 rtx tem = op0;
283 op0 = op1;
284 op1 = tem;
285 }
286 }
287 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
288 {
807e902e 289 wide_int arg1_min, arg1_max;
97286431
JJ
290 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
291 {
807e902e 292 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
97286431 293 pos_neg = 1;
807e902e 294 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
97286431
JJ
295 pos_neg = 2;
296 }
297 }
298
31e071ae 299 /* If the op1 is negative, we have to use a different check. */
97286431
JJ
300 if (pos_neg == 3)
301 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
302 false, sub_check, PROB_EVEN);
31e071ae 303
7ddf4d5a 304 /* Compare the result of the operation with one of the operands. */
97286431
JJ
305 if (pos_neg & 1)
306 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
307 NULL_RTX, mode, false, done_label,
308 PROB_VERY_LIKELY);
309
31e071ae 310 /* If we get here, we have to print the error. */
97286431
JJ
311 if (pos_neg == 3)
312 {
313 emit_jump (do_error);
314
315 emit_label (sub_check);
316 }
31e071ae 317
31e071ae 318 /* We have k = a + b for b < 0 here. k <= a must hold. */
97286431
JJ
319 if (pos_neg & 2)
320 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
321 NULL_RTX, mode, false, done_label,
322 PROB_VERY_LIKELY);
31e071ae
MP
323 }
324
1769415d
MP
325 emit_label (do_error);
326 /* Expand the ubsan builtin call. */
327 push_temp_slots ();
328 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
329 TREE_TYPE (arg0), arg0, arg1);
330 expand_normal (fn);
331 pop_temp_slots ();
332 do_pending_stack_adjust ();
31e071ae 333
1769415d
MP
334 /* We're done. */
335 emit_label (done_label);
31e071ae
MP
336
337 if (lhs)
338 emit_move_insn (target, res);
339}
340
341/* Add negate overflow checking to the statement STMT. */
342
343void
344ubsan_expand_si_overflow_neg_check (gimple stmt)
345{
346 rtx res, op1;
347 tree lhs, fn, arg1;
348 rtx done_label, do_error, target = NULL_RTX;
349
350 lhs = gimple_call_lhs (stmt);
351 arg1 = gimple_call_arg (stmt, 1);
352 done_label = gen_label_rtx ();
353 do_error = gen_label_rtx ();
31e071ae
MP
354
355 do_pending_stack_adjust ();
356 op1 = expand_normal (arg1);
357
358 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
359 if (lhs)
360 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
361
362 enum insn_code icode = optab_handler (negv3_optab, mode);
363 if (icode != CODE_FOR_nothing)
364 {
365 struct expand_operand ops[3];
366 rtx last = get_last_insn ();
367
368 res = gen_reg_rtx (mode);
369 create_output_operand (&ops[0], res, mode);
370 create_input_operand (&ops[1], op1, mode);
371 create_fixed_operand (&ops[2], do_error);
372 if (maybe_expand_insn (icode, 3, ops))
373 {
374 last = get_last_insn ();
0a6a6ac9 375 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
376 && JUMP_P (last)
377 && any_condjump_p (last)
378 && !find_reg_note (last, REG_BR_PROB, 0))
379 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
380 emit_jump (done_label);
381 }
382 else
383 {
384 delete_insns_since (last);
385 icode = CODE_FOR_nothing;
386 }
387 }
388
389 if (icode == CODE_FOR_nothing)
390 {
391 /* Compute the operation. On RTL level, the addition is always
392 unsigned. */
393 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
394
395 /* Compare the operand with the most negative value. */
396 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
397 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
398 done_label, PROB_VERY_LIKELY);
399 }
400
401 emit_label (do_error);
402 /* Expand the ubsan builtin call. */
1769415d
MP
403 push_temp_slots ();
404 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
405 TREE_TYPE (arg1), arg1, NULL_TREE);
31e071ae 406 expand_normal (fn);
1769415d 407 pop_temp_slots ();
31e071ae
MP
408 do_pending_stack_adjust ();
409
410 /* We're done. */
411 emit_label (done_label);
412
413 if (lhs)
414 emit_move_insn (target, res);
415}
416
417/* Add mul overflow checking to the statement STMT. */
418
419void
420ubsan_expand_si_overflow_mul_check (gimple stmt)
421{
422 rtx res, op0, op1;
423 tree lhs, fn, arg0, arg1;
424 rtx done_label, do_error, target = NULL_RTX;
425
426 lhs = gimple_call_lhs (stmt);
427 arg0 = gimple_call_arg (stmt, 0);
428 arg1 = gimple_call_arg (stmt, 1);
429 done_label = gen_label_rtx ();
430 do_error = gen_label_rtx ();
31e071ae
MP
431
432 do_pending_stack_adjust ();
433 op0 = expand_normal (arg0);
434 op1 = expand_normal (arg1);
435
436 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
437 if (lhs)
438 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
439
440 enum insn_code icode = optab_handler (mulv4_optab, mode);
441 if (icode != CODE_FOR_nothing)
442 {
443 struct expand_operand ops[4];
444 rtx last = get_last_insn ();
445
446 res = gen_reg_rtx (mode);
447 create_output_operand (&ops[0], res, mode);
448 create_input_operand (&ops[1], op0, mode);
449 create_input_operand (&ops[2], op1, mode);
450 create_fixed_operand (&ops[3], do_error);
451 if (maybe_expand_insn (icode, 4, ops))
452 {
453 last = get_last_insn ();
0a6a6ac9 454 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
455 && JUMP_P (last)
456 && any_condjump_p (last)
457 && !find_reg_note (last, REG_BR_PROB, 0))
458 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
459 emit_jump (done_label);
460 }
461 else
462 {
463 delete_insns_since (last);
464 icode = CODE_FOR_nothing;
465 }
466 }
467
468 if (icode == CODE_FOR_nothing)
469 {
470 struct separate_ops ops;
d5fa9cc9
JJ
471 enum machine_mode hmode
472 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
31e071ae
MP
473 ops.op0 = arg0;
474 ops.op1 = arg1;
475 ops.op2 = NULL_TREE;
476 ops.location = gimple_location (stmt);
477 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
478 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
479 {
480 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
481 ops.code = WIDEN_MULT_EXPR;
482 ops.type
483 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
484
485 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
486 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
487 GET_MODE_PRECISION (mode), NULL_RTX, 0);
488 hipart = gen_lowpart (mode, hipart);
489 res = gen_lowpart (mode, res);
490 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
491 GET_MODE_PRECISION (mode) - 1,
492 NULL_RTX, 0);
493 /* RES is low half of the double width result, HIPART
494 the high half. There was overflow if
495 HIPART is different from RES < 0 ? -1 : 0. */
496 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
497 false, done_label, PROB_VERY_LIKELY);
498 }
d5fa9cc9
JJ
499 else if (hmode != BLKmode
500 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
501 {
502 rtx large_op0 = gen_label_rtx ();
503 rtx small_op0_large_op1 = gen_label_rtx ();
504 rtx one_small_one_large = gen_label_rtx ();
505 rtx both_ops_large = gen_label_rtx ();
506 rtx after_hipart_neg = gen_label_rtx ();
507 rtx after_lopart_neg = gen_label_rtx ();
508 rtx do_overflow = gen_label_rtx ();
509 rtx hipart_different = gen_label_rtx ();
510
807e902e 511 unsigned int hprec = GET_MODE_PRECISION (hmode);
d5fa9cc9
JJ
512 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
513 NULL_RTX, 0);
514 hipart0 = gen_lowpart (hmode, hipart0);
515 rtx lopart0 = gen_lowpart (hmode, op0);
516 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
517 NULL_RTX, 0);
518 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
519 NULL_RTX, 0);
520 hipart1 = gen_lowpart (hmode, hipart1);
521 rtx lopart1 = gen_lowpart (hmode, op1);
522 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
523 NULL_RTX, 0);
524
525 res = gen_reg_rtx (mode);
526
527 /* True if op0 resp. op1 are known to be in the range of
528 halfstype. */
529 bool op0_small_p = false;
530 bool op1_small_p = false;
531 /* True if op0 resp. op1 are known to have all zeros or all ones
532 in the upper half of bits, but are not known to be
533 op{0,1}_small_p. */
534 bool op0_medium_p = false;
535 bool op1_medium_p = false;
536 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
537 nonnegative, 1 if unknown. */
538 int op0_sign = 1;
539 int op1_sign = 1;
540
541 if (TREE_CODE (arg0) == SSA_NAME)
542 {
807e902e 543 wide_int arg0_min, arg0_max;
d5fa9cc9
JJ
544 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
545 {
807e902e
KZ
546 unsigned int mprec0 = wi::min_precision (arg0_min, SIGNED);
547 unsigned int mprec1 = wi::min_precision (arg0_max, SIGNED);
548 if (mprec0 <= hprec && mprec1 <= hprec)
d5fa9cc9 549 op0_small_p = true;
807e902e 550 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
d5fa9cc9 551 op0_medium_p = true;
807e902e 552 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
d5fa9cc9 553 op0_sign = 0;
807e902e 554 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
d5fa9cc9
JJ
555 op0_sign = -1;
556 }
557 }
558 if (TREE_CODE (arg1) == SSA_NAME)
559 {
807e902e 560 wide_int arg1_min, arg1_max;
d5fa9cc9
JJ
561 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
562 {
807e902e
KZ
563 unsigned int mprec0 = wi::min_precision (arg1_min, SIGNED);
564 unsigned int mprec1 = wi::min_precision (arg1_max, SIGNED);
565 if (mprec0 <= hprec && mprec1 <= hprec)
d5fa9cc9 566 op1_small_p = true;
807e902e 567 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
d5fa9cc9 568 op1_medium_p = true;
807e902e 569 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
d5fa9cc9 570 op1_sign = 0;
807e902e 571 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
d5fa9cc9
JJ
572 op1_sign = -1;
573 }
574 }
575
576 int smaller_sign = 1;
577 int larger_sign = 1;
578 if (op0_small_p)
579 {
580 smaller_sign = op0_sign;
581 larger_sign = op1_sign;
582 }
583 else if (op1_small_p)
584 {
585 smaller_sign = op1_sign;
586 larger_sign = op0_sign;
587 }
588 else if (op0_sign == op1_sign)
589 {
590 smaller_sign = op0_sign;
591 larger_sign = op0_sign;
592 }
593
594 if (!op0_small_p)
595 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
596 false, large_op0, PROB_UNLIKELY);
597
598 if (!op1_small_p)
599 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
600 false, small_op0_large_op1,
601 PROB_UNLIKELY);
602
603 /* If both op0 and op1 are sign extended from hmode to mode,
604 the multiplication will never overflow. We can do just one
605 hmode x hmode => mode widening multiplication. */
606 if (GET_CODE (lopart0) == SUBREG)
607 {
608 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
362d42dc 609 SUBREG_PROMOTED_SET (lopart0, 0);
d5fa9cc9
JJ
610 }
611 if (GET_CODE (lopart1) == SUBREG)
612 {
613 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
362d42dc 614 SUBREG_PROMOTED_SET (lopart1, 0);
d5fa9cc9
JJ
615 }
616 tree halfstype = build_nonstandard_integer_type (hprec, 0);
617 ops.op0 = make_tree (halfstype, lopart0);
618 ops.op1 = make_tree (halfstype, lopart1);
619 ops.code = WIDEN_MULT_EXPR;
620 ops.type = TREE_TYPE (arg0);
621 rtx thisres
622 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
623 emit_move_insn (res, thisres);
624 emit_jump (done_label);
625
626 emit_label (small_op0_large_op1);
627
628 /* If op0 is sign extended from hmode to mode, but op1 is not,
629 just swap the arguments and handle it as op1 sign extended,
630 op0 not. */
631 rtx larger = gen_reg_rtx (mode);
632 rtx hipart = gen_reg_rtx (hmode);
633 rtx lopart = gen_reg_rtx (hmode);
634 emit_move_insn (larger, op1);
635 emit_move_insn (hipart, hipart1);
636 emit_move_insn (lopart, lopart0);
637 emit_jump (one_small_one_large);
638
639 emit_label (large_op0);
640
641 if (!op1_small_p)
642 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
643 false, both_ops_large, PROB_UNLIKELY);
644
645 /* If op1 is sign extended from hmode to mode, but op0 is not,
646 prepare larger, hipart and lopart pseudos and handle it together
647 with small_op0_large_op1. */
648 emit_move_insn (larger, op0);
649 emit_move_insn (hipart, hipart0);
650 emit_move_insn (lopart, lopart1);
651
652 emit_label (one_small_one_large);
653
654 /* lopart is the low part of the operand that is sign extended
655 to mode, larger is the the other operand, hipart is the
656 high part of larger and lopart0 and lopart1 are the low parts
657 of both operands.
658 We perform lopart0 * lopart1 and lopart * hipart widening
659 multiplications. */
660 tree halfutype = build_nonstandard_integer_type (hprec, 1);
661 ops.op0 = make_tree (halfutype, lopart0);
662 ops.op1 = make_tree (halfutype, lopart1);
663 rtx lo0xlo1
664 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
665
666 ops.op0 = make_tree (halfutype, lopart);
667 ops.op1 = make_tree (halfutype, hipart);
668 rtx loxhi = gen_reg_rtx (mode);
669 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
670 emit_move_insn (loxhi, tem);
671
672 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
673 if (larger_sign == 0)
674 emit_jump (after_hipart_neg);
675 else if (larger_sign != -1)
676 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
677 false, after_hipart_neg, PROB_EVEN);
678
47d552eb
JJ
679 tem = convert_modes (mode, hmode, lopart, 1);
680 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
d5fa9cc9
JJ
681 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
682 1, OPTAB_DIRECT);
683 emit_move_insn (loxhi, tem);
684
685 emit_label (after_hipart_neg);
686
687 /* if (lopart < 0) loxhi -= larger; */
688 if (smaller_sign == 0)
689 emit_jump (after_lopart_neg);
690 else if (smaller_sign != -1)
691 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
692 false, after_lopart_neg, PROB_EVEN);
693
694 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
695 1, OPTAB_DIRECT);
696 emit_move_insn (loxhi, tem);
697
698 emit_label (after_lopart_neg);
699
700 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
701 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
702 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
703 1, OPTAB_DIRECT);
704 emit_move_insn (loxhi, tem);
705
706 /* if (loxhi >> (bitsize / 2)
707 == (hmode) loxhi >> (bitsize / 2 - 1)) */
708 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
709 NULL_RTX, 0);
710 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
711 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
712 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
713 hprec - 1, NULL_RTX, 0);
714
715 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
716 hmode, false, do_overflow,
717 PROB_VERY_UNLIKELY);
718
719 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
720 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
721 NULL_RTX, 1);
722 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
723
724 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
725 1, OPTAB_DIRECT);
726 if (tem != res)
727 emit_move_insn (res, tem);
728 emit_jump (done_label);
729
730 emit_label (both_ops_large);
731
732 /* If both operands are large (not sign extended from hmode),
733 then perform the full multiplication which will be the result
734 of the operation. The only cases which don't overflow are
735 some cases where both hipart0 and highpart1 are 0 or -1. */
736 ops.code = MULT_EXPR;
737 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
738 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
739 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
740 emit_move_insn (res, tem);
741
742 if (!op0_medium_p)
743 {
744 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
745 NULL_RTX, 1, OPTAB_DIRECT);
746 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
747 true, do_error, PROB_VERY_UNLIKELY);
748 }
749
750 if (!op1_medium_p)
751 {
752 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
753 NULL_RTX, 1, OPTAB_DIRECT);
754 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
755 true, do_error, PROB_VERY_UNLIKELY);
756 }
757
758 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
759 same, overflow happened if res is negative, if they are different,
760 overflow happened if res is positive. */
761 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
762 emit_jump (hipart_different);
763 else if (op0_sign == 1 || op1_sign == 1)
764 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
765 true, hipart_different, PROB_EVEN);
766
767 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
768 do_error, PROB_VERY_UNLIKELY);
769 emit_jump (done_label);
770
771 emit_label (hipart_different);
772
773 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
774 do_error, PROB_VERY_UNLIKELY);
775 emit_jump (done_label);
776
777 emit_label (do_overflow);
778
779 /* Overflow, do full multiplication and fallthru into do_error. */
780 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
781 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
782 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
783 emit_move_insn (res, tem);
784 }
31e071ae
MP
785 else
786 {
31e071ae
MP
787 ops.code = MULT_EXPR;
788 ops.type = TREE_TYPE (arg0);
789 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
790 emit_jump (done_label);
791 }
792 }
793
794 emit_label (do_error);
795 /* Expand the ubsan builtin call. */
1769415d
MP
796 push_temp_slots ();
797 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
798 TREE_TYPE (arg0), arg0, arg1);
31e071ae 799 expand_normal (fn);
1769415d 800 pop_temp_slots ();
31e071ae
MP
801 do_pending_stack_adjust ();
802
803 /* We're done. */
804 emit_label (done_label);
805
806 if (lhs)
807 emit_move_insn (target, res);
808}
809
810/* Expand UBSAN_CHECK_ADD call STMT. */
811
812static void
813expand_UBSAN_CHECK_ADD (gimple stmt)
814{
815 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
816}
817
818/* Expand UBSAN_CHECK_SUB call STMT. */
819
820static void
821expand_UBSAN_CHECK_SUB (gimple stmt)
822{
823 if (integer_zerop (gimple_call_arg (stmt, 0)))
824 ubsan_expand_si_overflow_neg_check (stmt);
825 else
826 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
827}
828
829/* Expand UBSAN_CHECK_MUL call STMT. */
830
831static void
832expand_UBSAN_CHECK_MUL (gimple stmt)
833{
834 ubsan_expand_si_overflow_mul_check (stmt);
835}
836
5ce9450f
JJ
837/* This should get folded in tree-vectorizer.c. */
838
839static void
840expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
841{
842 gcc_unreachable ();
843}
844
845static void
846expand_MASK_LOAD (gimple stmt)
847{
848 struct expand_operand ops[3];
849 tree type, lhs, rhs, maskt;
850 rtx mem, target, mask;
851
852 maskt = gimple_call_arg (stmt, 2);
853 lhs = gimple_call_lhs (stmt);
8e91d222
JJ
854 if (lhs == NULL_TREE)
855 return;
5ce9450f
JJ
856 type = TREE_TYPE (lhs);
857 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
858 gimple_call_arg (stmt, 1));
859
860 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
861 gcc_assert (MEM_P (mem));
862 mask = expand_normal (maskt);
863 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
864 create_output_operand (&ops[0], target, TYPE_MODE (type));
865 create_fixed_operand (&ops[1], mem);
866 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
867 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
868}
869
870static void
871expand_MASK_STORE (gimple stmt)
872{
873 struct expand_operand ops[3];
874 tree type, lhs, rhs, maskt;
875 rtx mem, reg, mask;
876
877 maskt = gimple_call_arg (stmt, 2);
878 rhs = gimple_call_arg (stmt, 3);
879 type = TREE_TYPE (rhs);
880 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
881 gimple_call_arg (stmt, 1));
882
883 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
884 gcc_assert (MEM_P (mem));
885 mask = expand_normal (maskt);
886 reg = expand_normal (rhs);
887 create_fixed_operand (&ops[0], mem);
888 create_input_operand (&ops[1], reg, TYPE_MODE (type));
889 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
890 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
891}
892
09b22f48
JJ
893static void
894expand_ABNORMAL_DISPATCHER (gimple)
895{
896}
897
ed9c79e1
JJ
898static void
899expand_BUILTIN_EXPECT (gimple stmt)
900{
901 /* When guessing was done, the hints should be already stripped away. */
902 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
903
904 rtx target;
905 tree lhs = gimple_call_lhs (stmt);
906 if (lhs)
907 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
908 else
909 target = const0_rtx;
910 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
911 if (lhs && val != target)
912 emit_move_insn (target, val);
913}
914
25583c4f
RS
915/* Routines to expand each internal function, indexed by function number.
916 Each routine has the prototype:
917
918 expand_<NAME> (gimple stmt)
919
920 where STMT is the statement that performs the call. */
921static void (*const internal_fn_expanders[]) (gimple) = {
b78475cf 922#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
25583c4f
RS
923#include "internal-fn.def"
924#undef DEF_INTERNAL_FN
925 0
926};
927
928/* Expand STMT, which is a call to internal function FN. */
929
930void
931expand_internal_call (gimple stmt)
932{
933 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
934}