]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/internal-fn.c
libstdc++: Fix [multi]map/[multi]set move constructors noexcept qualification
[thirdparty/gcc.git] / gcc / internal-fn.c
CommitLineData
25583c4f 1/* Internal functions.
8d9254fc 2 Copyright (C) 2011-2020 Free Software Foundation, Inc.
25583c4f
RS
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
25583c4f 26#include "tree.h"
c7131fb2 27#include "gimple.h"
957060b5
AM
28#include "predict.h"
29#include "stringpool.h"
f90aa46c 30#include "tree-vrp.h"
957060b5
AM
31#include "tree-ssanames.h"
32#include "expmed.h"
4d0cdd0c 33#include "memmodel.h"
957060b5
AM
34#include "optabs.h"
35#include "emit-rtl.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
0e37a2f3 38#include "internal-fn.h"
d8a2d370 39#include "stor-layout.h"
36566b39 40#include "dojump.h"
25583c4f 41#include "expr.h"
314e6352
ML
42#include "stringpool.h"
43#include "attribs.h"
e3174bdf 44#include "asan.h"
31e071ae 45#include "ubsan.h"
686ee971 46#include "recog.h"
adedd5c1 47#include "builtins.h"
1705cebd 48#include "optabs-tree.h"
0b99f253
JJ
49#include "gimple-ssa.h"
50#include "tree-phinodes.h"
51#include "ssa-iterators.h"
502d63b6 52#include "explow.h"
25583c4f
RS
53
54/* The names of each internal function, indexed by function number. */
55const char *const internal_fn_name_array[] = {
b78475cf 56#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
25583c4f 57#include "internal-fn.def"
25583c4f
RS
58 "<invalid-fn>"
59};
60
61/* The ECF_* flags of each internal function, indexed by function number. */
62const int internal_fn_flags_array[] = {
b78475cf 63#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
25583c4f 64#include "internal-fn.def"
25583c4f
RS
65 0
66};
67
e4f81565
RS
68/* Return the internal function called NAME, or IFN_LAST if there's
69 no such function. */
70
71internal_fn
72lookup_internal_fn (const char *name)
73{
74 typedef hash_map<nofree_string_hash, internal_fn> name_to_fn_map_type;
75 static name_to_fn_map_type *name_to_fn_map;
76
77 if (!name_to_fn_map)
78 {
79 name_to_fn_map = new name_to_fn_map_type (IFN_LAST);
80 for (unsigned int i = 0; i < IFN_LAST; ++i)
81 name_to_fn_map->put (internal_fn_name (internal_fn (i)),
82 internal_fn (i));
83 }
84 internal_fn *entry = name_to_fn_map->get (name);
85 return entry ? *entry : IFN_LAST;
86}
87
b78475cf
YG
88/* Fnspec of each internal function, indexed by function number. */
89const_tree internal_fn_fnspec_array[IFN_LAST + 1];
90
91void
92init_internal_fns ()
93{
94#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
95 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
63a4184f 96 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
b78475cf 97#include "internal-fn.def"
b78475cf
YG
98 internal_fn_fnspec_array[IFN_LAST] = 0;
99}
100
ab23f5d9
RS
101/* Create static initializers for the information returned by
102 direct_internal_fn. */
70439f0d 103#define not_direct { -2, -2, false }
bcde3345 104#define mask_load_direct { -1, 2, false }
70439f0d 105#define load_lanes_direct { -1, -1, false }
7e11fc7f 106#define mask_load_lanes_direct { -1, -1, false }
09eb042a 107#define gather_load_direct { 3, 1, false }
70439f0d
RS
108#define mask_store_direct { 3, 2, false }
109#define store_lanes_direct { 0, 0, false }
7e11fc7f 110#define mask_store_lanes_direct { 0, 0, false }
502d63b6
ML
111#define vec_cond_mask_direct { 0, 0, false }
112#define vec_cond_direct { 0, 0, false }
113#define vec_condu_direct { 0, 0, false }
114#define vec_condeq_direct { 0, 0, false }
09eb042a 115#define scatter_store_direct { 3, 1, false }
70439f0d
RS
116#define unary_direct { 0, 0, true }
117#define binary_direct { 0, 0, true }
c566cc9f 118#define ternary_direct { 0, 0, true }
bfe1bb57 119#define cond_unary_direct { 1, 1, true }
0972596e 120#define cond_binary_direct { 1, 1, true }
b41d1f6e 121#define cond_ternary_direct { 1, 1, true }
7cfb4d93 122#define while_direct { 0, 2, false }
bb6c2b68 123#define fold_extract_direct { 2, 2, false }
b781a135 124#define fold_left_direct { 1, 1, false }
bce29d65 125#define mask_fold_left_direct { 1, 1, false }
58c036c8 126#define check_ptrs_direct { 0, 0, false }
ab23f5d9
RS
127
128const direct_internal_fn_info direct_internal_fn_array[IFN_LAST + 1] = {
129#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) not_direct,
130#define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) TYPE##_direct,
16d24520
RS
131#define DEF_INTERNAL_SIGNED_OPTAB_FN(CODE, FLAGS, SELECTOR, SIGNED_OPTAB, \
132 UNSIGNED_OPTAB, TYPE) TYPE##_direct,
ab23f5d9
RS
133#include "internal-fn.def"
134 not_direct
135};
136
272c6793 137/* ARRAY_TYPE is an array of vector modes. Return the associated insn
ab23f5d9 138 for load-lanes-style optab OPTAB, or CODE_FOR_nothing if none. */
272c6793
RS
139
140static enum insn_code
141get_multi_vector_move (tree array_type, convert_optab optab)
142{
ef4bddc2
RS
143 machine_mode imode;
144 machine_mode vmode;
272c6793
RS
145
146 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
147 imode = TYPE_MODE (array_type);
148 vmode = TYPE_MODE (TREE_TYPE (array_type));
149
ab23f5d9 150 return convert_optab_handler (optab, imode, vmode);
272c6793
RS
151}
152
ab23f5d9 153/* Expand LOAD_LANES call STMT using optab OPTAB. */
272c6793
RS
154
155static void
4cfe7a6c 156expand_load_lanes_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
272c6793 157{
99b1c316 158 class expand_operand ops[2];
272c6793
RS
159 tree type, lhs, rhs;
160 rtx target, mem;
161
162 lhs = gimple_call_lhs (stmt);
163 rhs = gimple_call_arg (stmt, 0);
164 type = TREE_TYPE (lhs);
165
166 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
167 mem = expand_normal (rhs);
168
169 gcc_assert (MEM_P (mem));
170 PUT_MODE (mem, TYPE_MODE (type));
171
172 create_output_operand (&ops[0], target, TYPE_MODE (type));
173 create_fixed_operand (&ops[1], mem);
ab23f5d9 174 expand_insn (get_multi_vector_move (type, optab), 2, ops);
3af3bec2
RS
175 if (!rtx_equal_p (target, ops[0].value))
176 emit_move_insn (target, ops[0].value);
272c6793
RS
177}
178
ab23f5d9 179/* Expand STORE_LANES call STMT using optab OPTAB. */
272c6793
RS
180
181static void
4cfe7a6c 182expand_store_lanes_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
272c6793 183{
99b1c316 184 class expand_operand ops[2];
272c6793
RS
185 tree type, lhs, rhs;
186 rtx target, reg;
187
188 lhs = gimple_call_lhs (stmt);
189 rhs = gimple_call_arg (stmt, 0);
190 type = TREE_TYPE (rhs);
191
192 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
193 reg = expand_normal (rhs);
194
195 gcc_assert (MEM_P (target));
196 PUT_MODE (target, TYPE_MODE (type));
197
198 create_fixed_operand (&ops[0], target);
199 create_input_operand (&ops[1], reg, TYPE_MODE (type));
ab23f5d9 200 expand_insn (get_multi_vector_move (type, optab), 2, ops);
272c6793
RS
201}
202
8170608b 203static void
4cfe7a6c 204expand_ANNOTATE (internal_fn, gcall *)
8170608b
TB
205{
206 gcc_unreachable ();
207}
208
6c7509bc
JJ
209/* This should get expanded in omp_device_lower pass. */
210
211static void
212expand_GOMP_USE_SIMT (internal_fn, gcall *)
213{
214 gcc_unreachable ();
215}
216
0c6b03b5
AM
217/* This should get expanded in omp_device_lower pass. */
218
219static void
220expand_GOMP_SIMT_ENTER (internal_fn, gcall *)
221{
222 gcc_unreachable ();
223}
224
225/* Allocate per-lane storage and begin non-uniform execution region. */
226
227static void
228expand_GOMP_SIMT_ENTER_ALLOC (internal_fn, gcall *stmt)
229{
230 rtx target;
231 tree lhs = gimple_call_lhs (stmt);
232 if (lhs)
233 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
234 else
235 target = gen_reg_rtx (Pmode);
236 rtx size = expand_normal (gimple_call_arg (stmt, 0));
237 rtx align = expand_normal (gimple_call_arg (stmt, 1));
99b1c316 238 class expand_operand ops[3];
0c6b03b5
AM
239 create_output_operand (&ops[0], target, Pmode);
240 create_input_operand (&ops[1], size, Pmode);
241 create_input_operand (&ops[2], align, Pmode);
242 gcc_assert (targetm.have_omp_simt_enter ());
243 expand_insn (targetm.code_for_omp_simt_enter, 3, ops);
244}
245
246/* Deallocate per-lane storage and leave non-uniform execution region. */
247
248static void
249expand_GOMP_SIMT_EXIT (internal_fn, gcall *stmt)
250{
251 gcc_checking_assert (!gimple_call_lhs (stmt));
252 rtx arg = expand_normal (gimple_call_arg (stmt, 0));
99b1c316 253 class expand_operand ops[1];
0c6b03b5
AM
254 create_input_operand (&ops[0], arg, Pmode);
255 gcc_assert (targetm.have_omp_simt_exit ());
256 expand_insn (targetm.code_for_omp_simt_exit, 1, ops);
257}
258
9669b00b
AM
259/* Lane index on SIMT targets: thread index in the warp on NVPTX. On targets
260 without SIMT execution this should be expanded in omp_device_lower pass. */
261
262static void
263expand_GOMP_SIMT_LANE (internal_fn, gcall *stmt)
264{
265 tree lhs = gimple_call_lhs (stmt);
266 if (!lhs)
267 return;
268
269 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
270 gcc_assert (targetm.have_omp_simt_lane ());
271 emit_insn (targetm.gen_omp_simt_lane (target));
272}
273
274/* This should get expanded in omp_device_lower pass. */
275
276static void
277expand_GOMP_SIMT_VF (internal_fn, gcall *)
278{
279 gcc_unreachable ();
280}
281
282/* Lane index of the first SIMT lane that supplies a non-zero argument.
283 This is a SIMT counterpart to GOMP_SIMD_LAST_LANE, used to represent the
284 lane that executed the last iteration for handling OpenMP lastprivate. */
285
286static void
287expand_GOMP_SIMT_LAST_LANE (internal_fn, gcall *stmt)
288{
289 tree lhs = gimple_call_lhs (stmt);
290 if (!lhs)
291 return;
292
293 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
294 rtx cond = expand_normal (gimple_call_arg (stmt, 0));
295 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
99b1c316 296 class expand_operand ops[2];
9669b00b
AM
297 create_output_operand (&ops[0], target, mode);
298 create_input_operand (&ops[1], cond, mode);
299 gcc_assert (targetm.have_omp_simt_last_lane ());
300 expand_insn (targetm.code_for_omp_simt_last_lane, 2, ops);
301}
302
303/* Non-transparent predicate used in SIMT lowering of OpenMP "ordered". */
304
305static void
306expand_GOMP_SIMT_ORDERED_PRED (internal_fn, gcall *stmt)
307{
308 tree lhs = gimple_call_lhs (stmt);
309 if (!lhs)
310 return;
311
312 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
313 rtx ctr = expand_normal (gimple_call_arg (stmt, 0));
314 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
99b1c316 315 class expand_operand ops[2];
9669b00b
AM
316 create_output_operand (&ops[0], target, mode);
317 create_input_operand (&ops[1], ctr, mode);
318 gcc_assert (targetm.have_omp_simt_ordered ());
319 expand_insn (targetm.code_for_omp_simt_ordered, 2, ops);
320}
321
322/* "Or" boolean reduction across SIMT lanes: return non-zero in all lanes if
323 any lane supplies a non-zero argument. */
324
325static void
326expand_GOMP_SIMT_VOTE_ANY (internal_fn, gcall *stmt)
327{
328 tree lhs = gimple_call_lhs (stmt);
329 if (!lhs)
330 return;
331
332 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
333 rtx cond = expand_normal (gimple_call_arg (stmt, 0));
334 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
99b1c316 335 class expand_operand ops[2];
9669b00b
AM
336 create_output_operand (&ops[0], target, mode);
337 create_input_operand (&ops[1], cond, mode);
338 gcc_assert (targetm.have_omp_simt_vote_any ());
339 expand_insn (targetm.code_for_omp_simt_vote_any, 2, ops);
340}
341
342/* Exchange between SIMT lanes with a "butterfly" pattern: source lane index
343 is destination lane index XOR given offset. */
344
345static void
346expand_GOMP_SIMT_XCHG_BFLY (internal_fn, gcall *stmt)
347{
348 tree lhs = gimple_call_lhs (stmt);
349 if (!lhs)
350 return;
351
352 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
353 rtx src = expand_normal (gimple_call_arg (stmt, 0));
354 rtx idx = expand_normal (gimple_call_arg (stmt, 1));
355 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
99b1c316 356 class expand_operand ops[3];
9669b00b
AM
357 create_output_operand (&ops[0], target, mode);
358 create_input_operand (&ops[1], src, mode);
359 create_input_operand (&ops[2], idx, SImode);
360 gcc_assert (targetm.have_omp_simt_xchg_bfly ());
361 expand_insn (targetm.code_for_omp_simt_xchg_bfly, 3, ops);
362}
363
364/* Exchange between SIMT lanes according to given source lane index. */
365
366static void
367expand_GOMP_SIMT_XCHG_IDX (internal_fn, gcall *stmt)
368{
369 tree lhs = gimple_call_lhs (stmt);
370 if (!lhs)
371 return;
372
373 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
374 rtx src = expand_normal (gimple_call_arg (stmt, 0));
375 rtx idx = expand_normal (gimple_call_arg (stmt, 1));
376 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
99b1c316 377 class expand_operand ops[3];
9669b00b
AM
378 create_output_operand (&ops[0], target, mode);
379 create_input_operand (&ops[1], src, mode);
380 create_input_operand (&ops[2], idx, SImode);
381 gcc_assert (targetm.have_omp_simt_xchg_idx ());
382 expand_insn (targetm.code_for_omp_simt_xchg_idx, 3, ops);
383}
384
74bf76ed
JJ
385/* This should get expanded in adjust_simduid_builtins. */
386
387static void
4cfe7a6c 388expand_GOMP_SIMD_LANE (internal_fn, gcall *)
74bf76ed
JJ
389{
390 gcc_unreachable ();
391}
392
393/* This should get expanded in adjust_simduid_builtins. */
394
395static void
4cfe7a6c 396expand_GOMP_SIMD_VF (internal_fn, gcall *)
74bf76ed
JJ
397{
398 gcc_unreachable ();
399}
400
401/* This should get expanded in adjust_simduid_builtins. */
402
403static void
4cfe7a6c 404expand_GOMP_SIMD_LAST_LANE (internal_fn, gcall *)
74bf76ed
JJ
405{
406 gcc_unreachable ();
407}
408
d9a6bd32
JJ
409/* This should get expanded in adjust_simduid_builtins. */
410
411static void
4cfe7a6c 412expand_GOMP_SIMD_ORDERED_START (internal_fn, gcall *)
d9a6bd32
JJ
413{
414 gcc_unreachable ();
415}
416
417/* This should get expanded in adjust_simduid_builtins. */
418
419static void
4cfe7a6c 420expand_GOMP_SIMD_ORDERED_END (internal_fn, gcall *)
d9a6bd32
JJ
421{
422 gcc_unreachable ();
423}
424
b9a55b13
MP
425/* This should get expanded in the sanopt pass. */
426
427static void
4cfe7a6c 428expand_UBSAN_NULL (internal_fn, gcall *)
b9a55b13
MP
429{
430 gcc_unreachable ();
431}
432
0e37a2f3
MP
433/* This should get expanded in the sanopt pass. */
434
435static void
4cfe7a6c 436expand_UBSAN_BOUNDS (internal_fn, gcall *)
0e82f089
MP
437{
438 gcc_unreachable ();
439}
440
441/* This should get expanded in the sanopt pass. */
442
443static void
4cfe7a6c 444expand_UBSAN_VPTR (internal_fn, gcall *)
0e37a2f3
MP
445{
446 gcc_unreachable ();
447}
448
c62ccb9a
YG
449/* This should get expanded in the sanopt pass. */
450
c9b39a49
JJ
451static void
452expand_UBSAN_PTR (internal_fn, gcall *)
453{
454 gcc_unreachable ();
455}
456
457/* This should get expanded in the sanopt pass. */
458
c62ccb9a 459static void
4cfe7a6c 460expand_UBSAN_OBJECT_SIZE (internal_fn, gcall *)
35228ac7
JJ
461{
462 gcc_unreachable ();
463}
464
465/* This should get expanded in the sanopt pass. */
466
467static void
4cfe7a6c 468expand_ASAN_CHECK (internal_fn, gcall *)
c62ccb9a
YG
469{
470 gcc_unreachable ();
471}
472
6dc4a604
ML
473/* This should get expanded in the sanopt pass. */
474
475static void
476expand_ASAN_MARK (internal_fn, gcall *)
477{
478 gcc_unreachable ();
479}
480
c7775327
ML
481/* This should get expanded in the sanopt pass. */
482
483static void
484expand_ASAN_POISON (internal_fn, gcall *)
485{
486 gcc_unreachable ();
487}
6dc4a604 488
f6b9f2ff
ML
489/* This should get expanded in the sanopt pass. */
490
491static void
492expand_ASAN_POISON_USE (internal_fn, gcall *)
493{
494 gcc_unreachable ();
495}
496
fca4adf2
JJ
497/* This should get expanded in the tsan pass. */
498
499static void
4cfe7a6c 500expand_TSAN_FUNC_EXIT (internal_fn, gcall *)
fca4adf2
JJ
501{
502 gcc_unreachable ();
503}
504
81fea426
MP
505/* This should get expanded in the lower pass. */
506
507static void
508expand_FALLTHROUGH (internal_fn, gcall *call)
509{
510 error_at (gimple_location (call),
511 "invalid use of attribute %<fallthrough%>");
512}
513
1304953e
JJ
514/* Return minimum precision needed to represent all values
515 of ARG in SIGNed integral type. */
516
517static int
518get_min_precision (tree arg, signop sign)
519{
520 int prec = TYPE_PRECISION (TREE_TYPE (arg));
521 int cnt = 0;
522 signop orig_sign = sign;
523 if (TREE_CODE (arg) == INTEGER_CST)
524 {
525 int p;
526 if (TYPE_SIGN (TREE_TYPE (arg)) != sign)
527 {
528 widest_int w = wi::to_widest (arg);
529 w = wi::ext (w, prec, sign);
530 p = wi::min_precision (w, sign);
531 }
532 else
8e6cdc90 533 p = wi::min_precision (wi::to_wide (arg), sign);
1304953e
JJ
534 return MIN (p, prec);
535 }
536 while (CONVERT_EXPR_P (arg)
537 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
538 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
539 {
540 arg = TREE_OPERAND (arg, 0);
541 if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
542 {
543 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
544 sign = UNSIGNED;
545 else if (sign == UNSIGNED && get_range_pos_neg (arg) != 1)
546 return prec + (orig_sign != sign);
547 prec = TYPE_PRECISION (TREE_TYPE (arg));
548 }
549 if (++cnt > 30)
550 return prec + (orig_sign != sign);
551 }
552 if (TREE_CODE (arg) != SSA_NAME)
553 return prec + (orig_sign != sign);
554 wide_int arg_min, arg_max;
555 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
556 {
355fe088 557 gimple *g = SSA_NAME_DEF_STMT (arg);
1304953e
JJ
558 if (is_gimple_assign (g)
559 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
560 {
561 tree t = gimple_assign_rhs1 (g);
562 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
563 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
564 {
565 arg = t;
566 if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
567 {
568 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
569 sign = UNSIGNED;
570 else if (sign == UNSIGNED && get_range_pos_neg (arg) != 1)
571 return prec + (orig_sign != sign);
572 prec = TYPE_PRECISION (TREE_TYPE (arg));
573 }
574 if (++cnt > 30)
575 return prec + (orig_sign != sign);
576 continue;
577 }
578 }
579 return prec + (orig_sign != sign);
580 }
581 if (sign == TYPE_SIGN (TREE_TYPE (arg)))
582 {
583 int p1 = wi::min_precision (arg_min, sign);
584 int p2 = wi::min_precision (arg_max, sign);
585 p1 = MAX (p1, p2);
586 prec = MIN (prec, p1);
587 }
588 else if (sign == UNSIGNED && !wi::neg_p (arg_min, SIGNED))
589 {
c1ee2e62 590 int p = wi::min_precision (arg_max, UNSIGNED);
1304953e
JJ
591 prec = MIN (prec, p);
592 }
593 return prec + (orig_sign != sign);
594}
595
a86451b9
JJ
596/* Helper for expand_*_overflow. Set the __imag__ part to true
597 (1 except for signed:1 type, in which case store -1). */
598
599static void
600expand_arith_set_overflow (tree lhs, rtx target)
601{
602 if (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (lhs))) == 1
603 && !TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs))))
604 write_complex_part (target, constm1_rtx, true);
605 else
606 write_complex_part (target, const1_rtx, true);
607}
608
1304953e
JJ
609/* Helper for expand_*_overflow. Store RES into the __real__ part
610 of TARGET. If RES has larger MODE than __real__ part of TARGET,
a86451b9
JJ
611 set the __imag__ part to 1 if RES doesn't fit into it. Similarly
612 if LHS has smaller precision than its mode. */
1304953e
JJ
613
614static void
615expand_arith_overflow_result_store (tree lhs, rtx target,
095a2d76 616 scalar_int_mode mode, rtx res)
1304953e 617{
c7ad039d
RS
618 scalar_int_mode tgtmode
619 = as_a <scalar_int_mode> (GET_MODE_INNER (GET_MODE (target)));
1304953e
JJ
620 rtx lres = res;
621 if (tgtmode != mode)
622 {
623 rtx_code_label *done_label = gen_label_rtx ();
624 int uns = TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs)));
625 lres = convert_modes (tgtmode, mode, res, uns);
626 gcc_assert (GET_MODE_PRECISION (tgtmode) < GET_MODE_PRECISION (mode));
92344ed0 627 do_compare_rtx_and_jump (res, convert_modes (mode, tgtmode, lres, uns),
1476d1bd 628 EQ, true, mode, NULL_RTX, NULL, done_label,
357067f2 629 profile_probability::very_likely ());
a86451b9
JJ
630 expand_arith_set_overflow (lhs, target);
631 emit_label (done_label);
632 }
633 int prec = TYPE_PRECISION (TREE_TYPE (TREE_TYPE (lhs)));
634 int tgtprec = GET_MODE_PRECISION (tgtmode);
635 if (prec < tgtprec)
636 {
637 rtx_code_label *done_label = gen_label_rtx ();
638 int uns = TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs)));
639 res = lres;
640 if (uns)
641 {
642 rtx mask
643 = immed_wide_int_const (wi::shifted_mask (0, prec, false, tgtprec),
644 tgtmode);
645 lres = expand_simple_binop (tgtmode, AND, res, mask, NULL_RTX,
95ef39f4 646 true, OPTAB_LIB_WIDEN);
a86451b9
JJ
647 }
648 else
649 {
650 lres = expand_shift (LSHIFT_EXPR, tgtmode, res, tgtprec - prec,
651 NULL_RTX, 1);
652 lres = expand_shift (RSHIFT_EXPR, tgtmode, lres, tgtprec - prec,
653 NULL_RTX, 0);
654 }
655 do_compare_rtx_and_jump (res, lres,
656 EQ, true, tgtmode, NULL_RTX, NULL, done_label,
357067f2 657 profile_probability::very_likely ());
a86451b9 658 expand_arith_set_overflow (lhs, target);
1304953e
JJ
659 emit_label (done_label);
660 }
661 write_complex_part (target, lres, false);
662}
663
5620052d
JJ
664/* Helper for expand_*_overflow. Store RES into TARGET. */
665
666static void
667expand_ubsan_result_store (rtx target, rtx res)
668{
669 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
670 /* If this is a scalar in a register that is stored in a wider mode
671 than the declared mode, compute the result into its declared mode
672 and then convert to the wider mode. Our value is the computed
673 expression. */
674 convert_move (SUBREG_REG (target), res, SUBREG_PROMOTED_SIGN (target));
675 else
676 emit_move_insn (target, res);
677}
678
31e071ae
MP
679/* Add sub/add overflow checking to the statement STMT.
680 CODE says whether the operation is +, or -. */
681
1304953e
JJ
682static void
683expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
684 tree arg0, tree arg1, bool unsr_p, bool uns0_p,
1705cebd 685 bool uns1_p, bool is_ubsan, tree *datap)
31e071ae 686{
1304953e
JJ
687 rtx res, target = NULL_RTX;
688 tree fn;
689 rtx_code_label *done_label = gen_label_rtx ();
690 rtx_code_label *do_error = gen_label_rtx ();
31e071ae 691 do_pending_stack_adjust ();
1304953e
JJ
692 rtx op0 = expand_normal (arg0);
693 rtx op1 = expand_normal (arg1);
7a504f33 694 scalar_int_mode mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (arg0));
1304953e
JJ
695 int prec = GET_MODE_PRECISION (mode);
696 rtx sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
697 bool do_xor = false;
698
699 if (is_ubsan)
700 gcc_assert (!unsr_p && !uns0_p && !uns1_p);
701
31e071ae 702 if (lhs)
1304953e
JJ
703 {
704 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
705 if (!is_ubsan)
706 write_complex_part (target, const0_rtx, true);
707 }
708
709 /* We assume both operands and result have the same precision
710 here (GET_MODE_BITSIZE (mode)), S stands for signed type
711 with that precision, U for unsigned type with that precision,
712 sgn for unsigned most significant bit in that precision.
713 s1 is signed first operand, u1 is unsigned first operand,
714 s2 is signed second operand, u2 is unsigned second operand,
715 sr is signed result, ur is unsigned result and the following
716 rules say how to compute result (which is always result of
717 the operands as if both were unsigned, cast to the right
718 signedness) and how to compute whether operation overflowed.
719
720 s1 + s2 -> sr
721 res = (S) ((U) s1 + (U) s2)
722 ovf = s2 < 0 ? res > s1 : res < s1 (or jump on overflow)
723 s1 - s2 -> sr
724 res = (S) ((U) s1 - (U) s2)
725 ovf = s2 < 0 ? res < s1 : res > s2 (or jump on overflow)
726 u1 + u2 -> ur
727 res = u1 + u2
728 ovf = res < u1 (or jump on carry, but RTL opts will handle it)
729 u1 - u2 -> ur
730 res = u1 - u2
731 ovf = res > u1 (or jump on carry, but RTL opts will handle it)
732 s1 + u2 -> sr
733 res = (S) ((U) s1 + u2)
734 ovf = ((U) res ^ sgn) < u2
735 s1 + u2 -> ur
736 t1 = (S) (u2 ^ sgn)
737 t2 = s1 + t1
738 res = (U) t2 ^ sgn
739 ovf = t1 < 0 ? t2 > s1 : t2 < s1 (or jump on overflow)
740 s1 - u2 -> sr
741 res = (S) ((U) s1 - u2)
742 ovf = u2 > ((U) s1 ^ sgn)
743 s1 - u2 -> ur
744 res = (U) s1 - u2
745 ovf = s1 < 0 || u2 > (U) s1
746 u1 - s2 -> sr
747 res = u1 - (U) s2
748 ovf = u1 >= ((U) s2 ^ sgn)
749 u1 - s2 -> ur
750 t1 = u1 ^ sgn
751 t2 = t1 - (U) s2
752 res = t2 ^ sgn
753 ovf = s2 < 0 ? (S) t2 < (S) t1 : (S) t2 > (S) t1 (or jump on overflow)
754 s1 + s2 -> ur
755 res = (U) s1 + (U) s2
756 ovf = s2 < 0 ? (s1 | (S) res) < 0) : (s1 & (S) res) < 0)
757 u1 + u2 -> sr
758 res = (S) (u1 + u2)
759 ovf = (U) res < u2 || res < 0
760 u1 - u2 -> sr
761 res = (S) (u1 - u2)
762 ovf = u1 >= u2 ? res < 0 : res >= 0
763 s1 - s2 -> ur
764 res = (U) s1 - (U) s2
765 ovf = s2 >= 0 ? ((s1 | (S) res) < 0) : ((s1 & (S) res) < 0) */
766
767 if (code == PLUS_EXPR && uns0_p && !uns1_p)
768 {
769 /* PLUS_EXPR is commutative, if operand signedness differs,
770 canonicalize to the first operand being signed and second
771 unsigned to simplify following code. */
6b4db501
MM
772 std::swap (op0, op1);
773 std::swap (arg0, arg1);
774 uns0_p = false;
775 uns1_p = true;
1304953e
JJ
776 }
777
778 /* u1 +- u2 -> ur */
779 if (uns0_p && uns1_p && unsr_p)
780 {
cde9d596
RH
781 insn_code icode = optab_handler (code == PLUS_EXPR ? uaddv4_optab
782 : usubv4_optab, mode);
783 if (icode != CODE_FOR_nothing)
784 {
99b1c316 785 class expand_operand ops[4];
cde9d596
RH
786 rtx_insn *last = get_last_insn ();
787
788 res = gen_reg_rtx (mode);
789 create_output_operand (&ops[0], res, mode);
790 create_input_operand (&ops[1], op0, mode);
791 create_input_operand (&ops[2], op1, mode);
792 create_fixed_operand (&ops[3], do_error);
793 if (maybe_expand_insn (icode, 4, ops))
794 {
795 last = get_last_insn ();
796 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
797 && JUMP_P (last)
798 && any_condjump_p (last)
799 && !find_reg_note (last, REG_BR_PROB, 0))
5fa396ad
JH
800 add_reg_br_prob_note (last,
801 profile_probability::very_unlikely ());
cde9d596
RH
802 emit_jump (done_label);
803 goto do_error_label;
804 }
805
806 delete_insns_since (last);
807 }
808
1304953e
JJ
809 /* Compute the operation. On RTL level, the addition is always
810 unsigned. */
811 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
812 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
813 rtx tem = op0;
814 /* For PLUS_EXPR, the operation is commutative, so we can pick
815 operand to compare against. For prec <= BITS_PER_WORD, I think
816 preferring REG operand is better over CONST_INT, because
817 the CONST_INT might enlarge the instruction or CSE would need
818 to figure out we'd already loaded it into a register before.
819 For prec > BITS_PER_WORD, I think CONST_INT might be more beneficial,
820 as then the multi-word comparison can be perhaps simplified. */
821 if (code == PLUS_EXPR
822 && (prec <= BITS_PER_WORD
823 ? (CONST_SCALAR_INT_P (op0) && REG_P (op1))
824 : CONST_SCALAR_INT_P (op1)))
825 tem = op1;
92344ed0 826 do_compare_rtx_and_jump (res, tem, code == PLUS_EXPR ? GEU : LEU,
1476d1bd 827 true, mode, NULL_RTX, NULL, done_label,
357067f2 828 profile_probability::very_likely ());
1304953e
JJ
829 goto do_error_label;
830 }
831
832 /* s1 +- u2 -> sr */
833 if (!uns0_p && uns1_p && !unsr_p)
834 {
835 /* Compute the operation. On RTL level, the addition is always
836 unsigned. */
837 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
838 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
839 rtx tem = expand_binop (mode, add_optab,
840 code == PLUS_EXPR ? res : op0, sgn,
841 NULL_RTX, false, OPTAB_LIB_WIDEN);
1476d1bd 842 do_compare_rtx_and_jump (tem, op1, GEU, true, mode, NULL_RTX, NULL,
357067f2 843 done_label, profile_probability::very_likely ());
1304953e
JJ
844 goto do_error_label;
845 }
846
847 /* s1 + u2 -> ur */
848 if (code == PLUS_EXPR && !uns0_p && uns1_p && unsr_p)
849 {
850 op1 = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
851 OPTAB_LIB_WIDEN);
852 /* As we've changed op1, we have to avoid using the value range
853 for the original argument. */
854 arg1 = error_mark_node;
855 do_xor = true;
856 goto do_signed;
857 }
858
859 /* u1 - s2 -> ur */
860 if (code == MINUS_EXPR && uns0_p && !uns1_p && unsr_p)
861 {
862 op0 = expand_binop (mode, add_optab, op0, sgn, NULL_RTX, false,
863 OPTAB_LIB_WIDEN);
864 /* As we've changed op0, we have to avoid using the value range
865 for the original argument. */
866 arg0 = error_mark_node;
867 do_xor = true;
868 goto do_signed;
869 }
870
871 /* s1 - u2 -> ur */
872 if (code == MINUS_EXPR && !uns0_p && uns1_p && unsr_p)
873 {
874 /* Compute the operation. On RTL level, the addition is always
875 unsigned. */
876 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
877 OPTAB_LIB_WIDEN);
878 int pos_neg = get_range_pos_neg (arg0);
879 if (pos_neg == 2)
880 /* If ARG0 is known to be always negative, this is always overflow. */
881 emit_jump (do_error);
882 else if (pos_neg == 3)
883 /* If ARG0 is not known to be always positive, check at runtime. */
92344ed0 884 do_compare_rtx_and_jump (op0, const0_rtx, LT, false, mode, NULL_RTX,
357067f2 885 NULL, do_error, profile_probability::very_unlikely ());
1476d1bd 886 do_compare_rtx_and_jump (op1, op0, LEU, true, mode, NULL_RTX, NULL,
357067f2 887 done_label, profile_probability::very_likely ());
1304953e
JJ
888 goto do_error_label;
889 }
890
891 /* u1 - s2 -> sr */
892 if (code == MINUS_EXPR && uns0_p && !uns1_p && !unsr_p)
893 {
894 /* Compute the operation. On RTL level, the addition is always
895 unsigned. */
896 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
897 OPTAB_LIB_WIDEN);
898 rtx tem = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
899 OPTAB_LIB_WIDEN);
1476d1bd 900 do_compare_rtx_and_jump (op0, tem, LTU, true, mode, NULL_RTX, NULL,
357067f2 901 done_label, profile_probability::very_likely ());
1304953e
JJ
902 goto do_error_label;
903 }
904
905 /* u1 + u2 -> sr */
906 if (code == PLUS_EXPR && uns0_p && uns1_p && !unsr_p)
907 {
908 /* Compute the operation. On RTL level, the addition is always
909 unsigned. */
910 res = expand_binop (mode, add_optab, op0, op1, NULL_RTX, false,
911 OPTAB_LIB_WIDEN);
92344ed0 912 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
357067f2 913 NULL, do_error, profile_probability::very_unlikely ());
1304953e
JJ
914 rtx tem = op1;
915 /* The operation is commutative, so we can pick operand to compare
916 against. For prec <= BITS_PER_WORD, I think preferring REG operand
917 is better over CONST_INT, because the CONST_INT might enlarge the
918 instruction or CSE would need to figure out we'd already loaded it
919 into a register before. For prec > BITS_PER_WORD, I think CONST_INT
920 might be more beneficial, as then the multi-word comparison can be
921 perhaps simplified. */
922 if (prec <= BITS_PER_WORD
923 ? (CONST_SCALAR_INT_P (op1) && REG_P (op0))
924 : CONST_SCALAR_INT_P (op0))
925 tem = op0;
1476d1bd 926 do_compare_rtx_and_jump (res, tem, GEU, true, mode, NULL_RTX, NULL,
357067f2 927 done_label, profile_probability::very_likely ());
1304953e
JJ
928 goto do_error_label;
929 }
930
931 /* s1 +- s2 -> ur */
932 if (!uns0_p && !uns1_p && unsr_p)
933 {
934 /* Compute the operation. On RTL level, the addition is always
935 unsigned. */
936 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
937 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
938 int pos_neg = get_range_pos_neg (arg1);
939 if (code == PLUS_EXPR)
940 {
941 int pos_neg0 = get_range_pos_neg (arg0);
942 if (pos_neg0 != 3 && pos_neg == 3)
943 {
6b4db501 944 std::swap (op0, op1);
1304953e
JJ
945 pos_neg = pos_neg0;
946 }
947 }
948 rtx tem;
949 if (pos_neg != 3)
950 {
951 tem = expand_binop (mode, ((pos_neg == 1) ^ (code == MINUS_EXPR))
952 ? and_optab : ior_optab,
953 op0, res, NULL_RTX, false, OPTAB_LIB_WIDEN);
1476d1bd 954 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL,
357067f2 955 NULL, done_label, profile_probability::very_likely ());
1304953e
JJ
956 }
957 else
958 {
959 rtx_code_label *do_ior_label = gen_label_rtx ();
92344ed0
JJ
960 do_compare_rtx_and_jump (op1, const0_rtx,
961 code == MINUS_EXPR ? GE : LT, false, mode,
1476d1bd 962 NULL_RTX, NULL, do_ior_label,
357067f2 963 profile_probability::even ());
1304953e
JJ
964 tem = expand_binop (mode, and_optab, op0, res, NULL_RTX, false,
965 OPTAB_LIB_WIDEN);
92344ed0 966 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
357067f2 967 NULL, done_label, profile_probability::very_likely ());
1304953e
JJ
968 emit_jump (do_error);
969 emit_label (do_ior_label);
970 tem = expand_binop (mode, ior_optab, op0, res, NULL_RTX, false,
971 OPTAB_LIB_WIDEN);
92344ed0 972 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
357067f2 973 NULL, done_label, profile_probability::very_likely ());
1304953e
JJ
974 }
975 goto do_error_label;
976 }
977
978 /* u1 - u2 -> sr */
979 if (code == MINUS_EXPR && uns0_p && uns1_p && !unsr_p)
980 {
981 /* Compute the operation. On RTL level, the addition is always
982 unsigned. */
983 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
984 OPTAB_LIB_WIDEN);
985 rtx_code_label *op0_geu_op1 = gen_label_rtx ();
1476d1bd 986 do_compare_rtx_and_jump (op0, op1, GEU, true, mode, NULL_RTX, NULL,
357067f2 987 op0_geu_op1, profile_probability::even ());
92344ed0 988 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
357067f2 989 NULL, done_label, profile_probability::very_likely ());
1304953e
JJ
990 emit_jump (do_error);
991 emit_label (op0_geu_op1);
92344ed0 992 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
357067f2 993 NULL, done_label, profile_probability::very_likely ());
1304953e
JJ
994 goto do_error_label;
995 }
31e071ae 996
1304953e
JJ
997 gcc_assert (!uns0_p && !uns1_p && !unsr_p);
998
999 /* s1 +- s2 -> sr */
cde9d596
RH
1000 do_signed:
1001 {
1002 insn_code icode = optab_handler (code == PLUS_EXPR ? addv4_optab
1003 : subv4_optab, mode);
1004 if (icode != CODE_FOR_nothing)
1005 {
99b1c316 1006 class expand_operand ops[4];
cde9d596
RH
1007 rtx_insn *last = get_last_insn ();
1008
1009 res = gen_reg_rtx (mode);
1010 create_output_operand (&ops[0], res, mode);
1011 create_input_operand (&ops[1], op0, mode);
1012 create_input_operand (&ops[2], op1, mode);
1013 create_fixed_operand (&ops[3], do_error);
1014 if (maybe_expand_insn (icode, 4, ops))
1015 {
1016 last = get_last_insn ();
1017 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
1018 && JUMP_P (last)
1019 && any_condjump_p (last)
1020 && !find_reg_note (last, REG_BR_PROB, 0))
5fa396ad
JH
1021 add_reg_br_prob_note (last,
1022 profile_probability::very_unlikely ());
cde9d596
RH
1023 emit_jump (done_label);
1024 goto do_error_label;
1025 }
1026
1027 delete_insns_since (last);
1028 }
1029
cde9d596
RH
1030 /* Compute the operation. On RTL level, the addition is always
1031 unsigned. */
1032 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
1033 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
1034
47135167 1035 /* If we can prove that one of the arguments (for MINUS_EXPR only
cde9d596
RH
1036 the second operand, as subtraction is not commutative) is always
1037 non-negative or always negative, we can do just one comparison
47135167
EB
1038 and conditional jump. */
1039 int pos_neg = get_range_pos_neg (arg1);
1040 if (code == PLUS_EXPR)
cde9d596 1041 {
47135167
EB
1042 int pos_neg0 = get_range_pos_neg (arg0);
1043 if (pos_neg0 != 3 && pos_neg == 3)
1044 {
1045 std::swap (op0, op1);
1046 pos_neg = pos_neg0;
1047 }
cde9d596 1048 }
cde9d596 1049
47135167
EB
1050 /* Addition overflows if and only if the two operands have the same sign,
1051 and the result has the opposite sign. Subtraction overflows if and
1052 only if the two operands have opposite sign, and the subtrahend has
1053 the same sign as the result. Here 0 is counted as positive. */
cde9d596 1054 if (pos_neg == 3)
47135167
EB
1055 {
1056 /* Compute op0 ^ op1 (operands have opposite sign). */
1057 rtx op_xor = expand_binop (mode, xor_optab, op0, op1, NULL_RTX, false,
1058 OPTAB_LIB_WIDEN);
cde9d596 1059
47135167
EB
1060 /* Compute res ^ op1 (result and 2nd operand have opposite sign). */
1061 rtx res_xor = expand_binop (mode, xor_optab, res, op1, NULL_RTX, false,
1062 OPTAB_LIB_WIDEN);
97286431 1063
47135167
EB
1064 rtx tem;
1065 if (code == PLUS_EXPR)
1066 {
1067 /* Compute (res ^ op1) & ~(op0 ^ op1). */
1068 tem = expand_unop (mode, one_cmpl_optab, op_xor, NULL_RTX, false);
1069 tem = expand_binop (mode, and_optab, res_xor, tem, NULL_RTX, false,
1070 OPTAB_LIB_WIDEN);
1071 }
1072 else
1073 {
1074 /* Compute (op0 ^ op1) & ~(res ^ op1). */
1075 tem = expand_unop (mode, one_cmpl_optab, res_xor, NULL_RTX, false);
1076 tem = expand_binop (mode, and_optab, op_xor, tem, NULL_RTX, false,
1077 OPTAB_LIB_WIDEN);
1078 }
1079
1080 /* No overflow if the result has bit sign cleared. */
1081 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
357067f2 1082 NULL, done_label, profile_probability::very_likely ());
cde9d596 1083 }
31e071ae 1084
47135167
EB
1085 /* Compare the result of the operation with the first operand.
1086 No overflow for addition if second operand is positive and result
1087 is larger or second operand is negative and result is smaller.
1088 Likewise for subtraction with sign of second operand flipped. */
1089 else
1090 do_compare_rtx_and_jump (res, op0,
1091 (pos_neg == 1) ^ (code == MINUS_EXPR) ? GE : LE,
cde9d596 1092 false, mode, NULL_RTX, NULL, done_label,
357067f2 1093 profile_probability::very_likely ());
cde9d596 1094 }
31e071ae 1095
1304953e 1096 do_error_label:
1769415d 1097 emit_label (do_error);
1304953e
JJ
1098 if (is_ubsan)
1099 {
1100 /* Expand the ubsan builtin call. */
1101 push_temp_slots ();
1102 fn = ubsan_build_overflow_builtin (code, loc, TREE_TYPE (arg0),
1705cebd 1103 arg0, arg1, datap);
1304953e
JJ
1104 expand_normal (fn);
1105 pop_temp_slots ();
1106 do_pending_stack_adjust ();
1107 }
1108 else if (lhs)
a86451b9 1109 expand_arith_set_overflow (lhs, target);
31e071ae 1110
1769415d
MP
1111 /* We're done. */
1112 emit_label (done_label);
31e071ae
MP
1113
1114 if (lhs)
1304953e
JJ
1115 {
1116 if (is_ubsan)
5620052d 1117 expand_ubsan_result_store (target, res);
1304953e
JJ
1118 else
1119 {
1120 if (do_xor)
1121 res = expand_binop (mode, add_optab, res, sgn, NULL_RTX, false,
1122 OPTAB_LIB_WIDEN);
1123
1124 expand_arith_overflow_result_store (lhs, target, mode, res);
1125 }
1126 }
31e071ae
MP
1127}
1128
1129/* Add negate overflow checking to the statement STMT. */
1130
1304953e 1131static void
1705cebd
JJ
1132expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan,
1133 tree *datap)
31e071ae
MP
1134{
1135 rtx res, op1;
1304953e 1136 tree fn;
da664544
DM
1137 rtx_code_label *done_label, *do_error;
1138 rtx target = NULL_RTX;
31e071ae 1139
31e071ae
MP
1140 done_label = gen_label_rtx ();
1141 do_error = gen_label_rtx ();
31e071ae
MP
1142
1143 do_pending_stack_adjust ();
1144 op1 = expand_normal (arg1);
1145
7a504f33 1146 scalar_int_mode mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (arg1));
31e071ae 1147 if (lhs)
1304953e
JJ
1148 {
1149 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1150 if (!is_ubsan)
1151 write_complex_part (target, const0_rtx, true);
1152 }
31e071ae
MP
1153
1154 enum insn_code icode = optab_handler (negv3_optab, mode);
1155 if (icode != CODE_FOR_nothing)
1156 {
99b1c316 1157 class expand_operand ops[3];
da664544 1158 rtx_insn *last = get_last_insn ();
31e071ae
MP
1159
1160 res = gen_reg_rtx (mode);
1161 create_output_operand (&ops[0], res, mode);
1162 create_input_operand (&ops[1], op1, mode);
1163 create_fixed_operand (&ops[2], do_error);
1164 if (maybe_expand_insn (icode, 3, ops))
1165 {
1166 last = get_last_insn ();
0a6a6ac9 1167 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
1168 && JUMP_P (last)
1169 && any_condjump_p (last)
1170 && !find_reg_note (last, REG_BR_PROB, 0))
5fa396ad
JH
1171 add_reg_br_prob_note (last,
1172 profile_probability::very_unlikely ());
31e071ae
MP
1173 emit_jump (done_label);
1174 }
1175 else
1176 {
1177 delete_insns_since (last);
1178 icode = CODE_FOR_nothing;
1179 }
1180 }
1181
1182 if (icode == CODE_FOR_nothing)
1183 {
1184 /* Compute the operation. On RTL level, the addition is always
1185 unsigned. */
1186 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
1187
1188 /* Compare the operand with the most negative value. */
1189 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
1476d1bd 1190 do_compare_rtx_and_jump (op1, minv, NE, true, mode, NULL_RTX, NULL,
357067f2 1191 done_label, profile_probability::very_likely ());
31e071ae
MP
1192 }
1193
1194 emit_label (do_error);
1304953e
JJ
1195 if (is_ubsan)
1196 {
1197 /* Expand the ubsan builtin call. */
1198 push_temp_slots ();
1199 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, loc, TREE_TYPE (arg1),
1705cebd 1200 arg1, NULL_TREE, datap);
1304953e
JJ
1201 expand_normal (fn);
1202 pop_temp_slots ();
1203 do_pending_stack_adjust ();
1204 }
1205 else if (lhs)
a86451b9 1206 expand_arith_set_overflow (lhs, target);
31e071ae
MP
1207
1208 /* We're done. */
1209 emit_label (done_label);
1210
1211 if (lhs)
1304953e
JJ
1212 {
1213 if (is_ubsan)
5620052d 1214 expand_ubsan_result_store (target, res);
1304953e
JJ
1215 else
1216 expand_arith_overflow_result_store (lhs, target, mode, res);
1217 }
31e071ae
MP
1218}
1219
0b99f253
JJ
1220/* Return true if UNS WIDEN_MULT_EXPR with result mode WMODE and operand
1221 mode MODE can be expanded without using a libcall. */
1222
1223static bool
1224can_widen_mult_without_libcall (scalar_int_mode wmode, scalar_int_mode mode,
1225 rtx op0, rtx op1, bool uns)
1226{
1227 if (find_widening_optab_handler (umul_widen_optab, wmode, mode)
1228 != CODE_FOR_nothing)
1229 return true;
1230
1231 if (find_widening_optab_handler (smul_widen_optab, wmode, mode)
1232 != CODE_FOR_nothing)
1233 return true;
1234
1235 rtx_insn *last = get_last_insn ();
1236 if (CONSTANT_P (op0))
1237 op0 = convert_modes (wmode, mode, op0, uns);
1238 else
1239 op0 = gen_raw_REG (wmode, LAST_VIRTUAL_REGISTER + 1);
1240 if (CONSTANT_P (op1))
1241 op1 = convert_modes (wmode, mode, op1, uns);
1242 else
1243 op1 = gen_raw_REG (wmode, LAST_VIRTUAL_REGISTER + 2);
1244 rtx ret = expand_mult (wmode, op0, op1, NULL_RTX, uns, true);
1245 delete_insns_since (last);
1246 return ret != NULL_RTX;
1247}
1248
31e071ae
MP
1249/* Add mul overflow checking to the statement STMT. */
1250
1304953e
JJ
1251static void
1252expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
1705cebd
JJ
1253 bool unsr_p, bool uns0_p, bool uns1_p, bool is_ubsan,
1254 tree *datap)
31e071ae
MP
1255{
1256 rtx res, op0, op1;
1304953e 1257 tree fn, type;
da664544
DM
1258 rtx_code_label *done_label, *do_error;
1259 rtx target = NULL_RTX;
1304953e
JJ
1260 signop sign;
1261 enum insn_code icode;
31e071ae 1262
31e071ae
MP
1263 done_label = gen_label_rtx ();
1264 do_error = gen_label_rtx ();
31e071ae
MP
1265
1266 do_pending_stack_adjust ();
1267 op0 = expand_normal (arg0);
1268 op1 = expand_normal (arg1);
1269
7a504f33 1270 scalar_int_mode mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (arg0));
1304953e 1271 bool uns = unsr_p;
31e071ae 1272 if (lhs)
1304953e
JJ
1273 {
1274 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1275 if (!is_ubsan)
1276 write_complex_part (target, const0_rtx, true);
1277 }
1278
1279 if (is_ubsan)
1280 gcc_assert (!unsr_p && !uns0_p && !uns1_p);
1281
1282 /* We assume both operands and result have the same precision
1283 here (GET_MODE_BITSIZE (mode)), S stands for signed type
1284 with that precision, U for unsigned type with that precision,
1285 sgn for unsigned most significant bit in that precision.
1286 s1 is signed first operand, u1 is unsigned first operand,
1287 s2 is signed second operand, u2 is unsigned second operand,
1288 sr is signed result, ur is unsigned result and the following
1289 rules say how to compute result (which is always result of
1290 the operands as if both were unsigned, cast to the right
1291 signedness) and how to compute whether operation overflowed.
1292 main_ovf (false) stands for jump on signed multiplication
1293 overflow or the main algorithm with uns == false.
1294 main_ovf (true) stands for jump on unsigned multiplication
1295 overflow or the main algorithm with uns == true.
1296
1297 s1 * s2 -> sr
1298 res = (S) ((U) s1 * (U) s2)
1299 ovf = main_ovf (false)
1300 u1 * u2 -> ur
1301 res = u1 * u2
1302 ovf = main_ovf (true)
1303 s1 * u2 -> ur
1304 res = (U) s1 * u2
1305 ovf = (s1 < 0 && u2) || main_ovf (true)
1306 u1 * u2 -> sr
1307 res = (S) (u1 * u2)
1308 ovf = res < 0 || main_ovf (true)
1309 s1 * u2 -> sr
1310 res = (S) ((U) s1 * u2)
1311 ovf = (S) u2 >= 0 ? main_ovf (false)
1312 : (s1 != 0 && (s1 != -1 || u2 != (U) res))
1313 s1 * s2 -> ur
1314 t1 = (s1 & s2) < 0 ? (-(U) s1) : ((U) s1)
1315 t2 = (s1 & s2) < 0 ? (-(U) s2) : ((U) s2)
1316 res = t1 * t2
1317 ovf = (s1 ^ s2) < 0 ? (s1 && s2) : main_ovf (true) */
1318
1319 if (uns0_p && !uns1_p)
1320 {
1321 /* Multiplication is commutative, if operand signedness differs,
1322 canonicalize to the first operand being signed and second
1323 unsigned to simplify following code. */
6b4db501
MM
1324 std::swap (op0, op1);
1325 std::swap (arg0, arg1);
1326 uns0_p = false;
1327 uns1_p = true;
1304953e
JJ
1328 }
1329
1330 int pos_neg0 = get_range_pos_neg (arg0);
1331 int pos_neg1 = get_range_pos_neg (arg1);
1332
1333 /* s1 * u2 -> ur */
1334 if (!uns0_p && uns1_p && unsr_p)
1335 {
1336 switch (pos_neg0)
1337 {
1338 case 1:
1339 /* If s1 is non-negative, just perform normal u1 * u2 -> ur. */
1340 goto do_main;
1341 case 2:
1342 /* If s1 is negative, avoid the main code, just multiply and
1343 signal overflow if op1 is not 0. */
1344 struct separate_ops ops;
1345 ops.code = MULT_EXPR;
1346 ops.type = TREE_TYPE (arg1);
1347 ops.op0 = make_tree (ops.type, op0);
1348 ops.op1 = make_tree (ops.type, op1);
1349 ops.op2 = NULL_TREE;
1350 ops.location = loc;
1351 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
92344ed0 1352 do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
357067f2 1353 NULL, done_label, profile_probability::very_likely ());
1304953e
JJ
1354 goto do_error_label;
1355 case 3:
1356 rtx_code_label *do_main_label;
1357 do_main_label = gen_label_rtx ();
92344ed0 1358 do_compare_rtx_and_jump (op0, const0_rtx, GE, false, mode, NULL_RTX,
357067f2 1359 NULL, do_main_label, profile_probability::very_likely ());
92344ed0 1360 do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
357067f2 1361 NULL, do_main_label, profile_probability::very_likely ());
a86451b9 1362 expand_arith_set_overflow (lhs, target);
1304953e
JJ
1363 emit_label (do_main_label);
1364 goto do_main;
1365 default:
1366 gcc_unreachable ();
1367 }
1368 }
1369
1370 /* u1 * u2 -> sr */
1371 if (uns0_p && uns1_p && !unsr_p)
1372 {
1373 uns = true;
1374 /* Rest of handling of this case after res is computed. */
1375 goto do_main;
1376 }
1377
1378 /* s1 * u2 -> sr */
1379 if (!uns0_p && uns1_p && !unsr_p)
1380 {
1381 switch (pos_neg1)
1382 {
1383 case 1:
1384 goto do_main;
1385 case 2:
1386 /* If (S) u2 is negative (i.e. u2 is larger than maximum of S,
1387 avoid the main code, just multiply and signal overflow
1388 unless 0 * u2 or -1 * ((U) Smin). */
1389 struct separate_ops ops;
1390 ops.code = MULT_EXPR;
1391 ops.type = TREE_TYPE (arg1);
1392 ops.op0 = make_tree (ops.type, op0);
1393 ops.op1 = make_tree (ops.type, op1);
1394 ops.op2 = NULL_TREE;
1395 ops.location = loc;
1396 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
92344ed0 1397 do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
357067f2 1398 NULL, done_label, profile_probability::very_likely ());
92344ed0 1399 do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
357067f2 1400 NULL, do_error, profile_probability::very_unlikely ());
1304953e
JJ
1401 int prec;
1402 prec = GET_MODE_PRECISION (mode);
1403 rtx sgn;
1404 sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
92344ed0 1405 do_compare_rtx_and_jump (op1, sgn, EQ, true, mode, NULL_RTX,
357067f2 1406 NULL, done_label, profile_probability::very_likely ());
1304953e
JJ
1407 goto do_error_label;
1408 case 3:
1409 /* Rest of handling of this case after res is computed. */
1410 goto do_main;
1411 default:
1412 gcc_unreachable ();
1413 }
1414 }
31e071ae 1415
1304953e
JJ
1416 /* s1 * s2 -> ur */
1417 if (!uns0_p && !uns1_p && unsr_p)
1418 {
beeac4c2 1419 rtx tem;
1304953e
JJ
1420 switch (pos_neg0 | pos_neg1)
1421 {
1422 case 1: /* Both operands known to be non-negative. */
1423 goto do_main;
1424 case 2: /* Both operands known to be negative. */
1425 op0 = expand_unop (mode, neg_optab, op0, NULL_RTX, false);
1426 op1 = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
1427 /* Avoid looking at arg0/arg1 ranges, as we've changed
1428 the arguments. */
1429 arg0 = error_mark_node;
1430 arg1 = error_mark_node;
1431 goto do_main;
1432 case 3:
1433 if ((pos_neg0 ^ pos_neg1) == 3)
1434 {
1435 /* If one operand is known to be negative and the other
1436 non-negative, this overflows always, unless the non-negative
1437 one is 0. Just do normal multiply and set overflow
1438 unless one of the operands is 0. */
1439 struct separate_ops ops;
1440 ops.code = MULT_EXPR;
1441 ops.type
1442 = build_nonstandard_integer_type (GET_MODE_PRECISION (mode),
1443 1);
1444 ops.op0 = make_tree (ops.type, op0);
1445 ops.op1 = make_tree (ops.type, op1);
1446 ops.op2 = NULL_TREE;
1447 ops.location = loc;
1448 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
beeac4c2
JJ
1449 do_compare_rtx_and_jump (pos_neg0 == 1 ? op0 : op1, const0_rtx, EQ,
1450 true, mode, NULL_RTX, NULL, done_label,
357067f2 1451 profile_probability::very_likely ());
1304953e
JJ
1452 goto do_error_label;
1453 }
1454 /* The general case, do all the needed comparisons at runtime. */
1455 rtx_code_label *do_main_label, *after_negate_label;
1456 rtx rop0, rop1;
1457 rop0 = gen_reg_rtx (mode);
1458 rop1 = gen_reg_rtx (mode);
1459 emit_move_insn (rop0, op0);
1460 emit_move_insn (rop1, op1);
1461 op0 = rop0;
1462 op1 = rop1;
1463 do_main_label = gen_label_rtx ();
1464 after_negate_label = gen_label_rtx ();
1465 tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
1466 OPTAB_LIB_WIDEN);
92344ed0 1467 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
357067f2 1468 NULL, after_negate_label, profile_probability::very_likely ());
1304953e
JJ
1469 /* Both arguments negative here, negate them and continue with
1470 normal unsigned overflow checking multiplication. */
1471 emit_move_insn (op0, expand_unop (mode, neg_optab, op0,
1472 NULL_RTX, false));
1473 emit_move_insn (op1, expand_unop (mode, neg_optab, op1,
1474 NULL_RTX, false));
1475 /* Avoid looking at arg0/arg1 ranges, as we might have changed
1476 the arguments. */
1477 arg0 = error_mark_node;
1478 arg1 = error_mark_node;
1479 emit_jump (do_main_label);
1480 emit_label (after_negate_label);
beeac4c2
JJ
1481 tem = expand_binop (mode, xor_optab, op0, op1, NULL_RTX, false,
1482 OPTAB_LIB_WIDEN);
1483 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
1484 NULL, do_main_label,
1485 profile_probability::very_likely ());
1304953e
JJ
1486 /* One argument is negative here, the other positive. This
1487 overflows always, unless one of the arguments is 0. But
1488 if e.g. s2 is 0, (U) s1 * 0 doesn't overflow, whatever s1
1489 is, thus we can keep do_main code oring in overflow as is. */
beeac4c2
JJ
1490 if (pos_neg0 != 2)
1491 do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
1492 NULL, do_main_label,
1493 profile_probability::very_unlikely ());
1494 if (pos_neg1 != 2)
1495 do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
1496 NULL, do_main_label,
1497 profile_probability::very_unlikely ());
a86451b9 1498 expand_arith_set_overflow (lhs, target);
1304953e
JJ
1499 emit_label (do_main_label);
1500 goto do_main;
1501 default:
1502 gcc_unreachable ();
1503 }
1504 }
1505
1506 do_main:
1507 type = build_nonstandard_integer_type (GET_MODE_PRECISION (mode), uns);
1508 sign = uns ? UNSIGNED : SIGNED;
1509 icode = optab_handler (uns ? umulv4_optab : mulv4_optab, mode);
89b1427f
JJ
1510 if (uns
1511 && (integer_pow2p (arg0) || integer_pow2p (arg1))
1512 && (optimize_insn_for_speed_p () || icode == CODE_FOR_nothing))
1513 {
1514 /* Optimize unsigned multiplication by power of 2 constant
1515 using 2 shifts, one for result, one to extract the shifted
1516 out bits to see if they are all zero.
1517 Don't do this if optimizing for size and we have umulv4_optab,
1518 in that case assume multiplication will be shorter.
1519 This is heuristics based on the single target that provides
1520 umulv4 right now (i?86/x86_64), if further targets add it, this
1521 might need to be revisited.
1522 Cases where both operands are constant should be folded already
1523 during GIMPLE, and cases where one operand is constant but not
1524 power of 2 are questionable, either the WIDEN_MULT_EXPR case
1525 below can be done without multiplication, just by shifts and adds,
1526 or we'd need to divide the result (and hope it actually doesn't
1527 really divide nor multiply) and compare the result of the division
1528 with the original operand. */
1529 rtx opn0 = op0;
1530 rtx opn1 = op1;
1531 tree argn0 = arg0;
1532 tree argn1 = arg1;
1533 if (integer_pow2p (arg0))
1534 {
1535 std::swap (opn0, opn1);
1536 std::swap (argn0, argn1);
1537 }
1538 int cnt = tree_log2 (argn1);
1539 if (cnt >= 0 && cnt < GET_MODE_PRECISION (mode))
1540 {
1541 rtx upper = const0_rtx;
1542 res = expand_shift (LSHIFT_EXPR, mode, opn0, cnt, NULL_RTX, uns);
1543 if (cnt != 0)
1544 upper = expand_shift (RSHIFT_EXPR, mode, opn0,
1545 GET_MODE_PRECISION (mode) - cnt,
1546 NULL_RTX, uns);
1547 do_compare_rtx_and_jump (upper, const0_rtx, EQ, true, mode,
1548 NULL_RTX, NULL, done_label,
1549 profile_probability::very_likely ());
1550 goto do_error_label;
1551 }
1552 }
31e071ae
MP
1553 if (icode != CODE_FOR_nothing)
1554 {
99b1c316 1555 class expand_operand ops[4];
da664544 1556 rtx_insn *last = get_last_insn ();
31e071ae
MP
1557
1558 res = gen_reg_rtx (mode);
1559 create_output_operand (&ops[0], res, mode);
1560 create_input_operand (&ops[1], op0, mode);
1561 create_input_operand (&ops[2], op1, mode);
1562 create_fixed_operand (&ops[3], do_error);
1563 if (maybe_expand_insn (icode, 4, ops))
1564 {
1565 last = get_last_insn ();
0a6a6ac9 1566 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
31e071ae
MP
1567 && JUMP_P (last)
1568 && any_condjump_p (last)
1569 && !find_reg_note (last, REG_BR_PROB, 0))
5fa396ad
JH
1570 add_reg_br_prob_note (last,
1571 profile_probability::very_unlikely ());
31e071ae
MP
1572 emit_jump (done_label);
1573 }
1574 else
1575 {
1576 delete_insns_since (last);
1577 icode = CODE_FOR_nothing;
1578 }
1579 }
1580
1581 if (icode == CODE_FOR_nothing)
1582 {
1583 struct separate_ops ops;
1304953e 1584 int prec = GET_MODE_PRECISION (mode);
095a2d76 1585 scalar_int_mode hmode, wmode;
1304953e
JJ
1586 ops.op0 = make_tree (type, op0);
1587 ops.op1 = make_tree (type, op1);
31e071ae 1588 ops.op2 = NULL_TREE;
1304953e 1589 ops.location = loc;
0b99f253
JJ
1590
1591 /* Optimize unsigned overflow check where we don't use the
1592 multiplication result, just whether overflow happened.
1593 If we can do MULT_HIGHPART_EXPR, that followed by
1594 comparison of the result against zero is cheapest.
1595 We'll still compute res, but it should be DCEd later. */
1596 use_operand_p use;
1597 gimple *use_stmt;
1598 if (!is_ubsan
1599 && lhs
1600 && uns
1601 && !(uns0_p && uns1_p && !unsr_p)
1602 && can_mult_highpart_p (mode, uns) == 1
1603 && single_imm_use (lhs, &use, &use_stmt)
1604 && is_gimple_assign (use_stmt)
1605 && gimple_assign_rhs_code (use_stmt) == IMAGPART_EXPR)
1606 goto highpart;
1607
490d0f6c 1608 if (GET_MODE_2XWIDER_MODE (mode).exists (&wmode)
0b99f253
JJ
1609 && targetm.scalar_mode_supported_p (wmode)
1610 && can_widen_mult_without_libcall (wmode, mode, op0, op1, uns))
31e071ae 1611 {
0b99f253 1612 twoxwider:
31e071ae
MP
1613 ops.code = WIDEN_MULT_EXPR;
1614 ops.type
1304953e 1615 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), uns);
31e071ae
MP
1616
1617 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
1304953e
JJ
1618 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res, prec,
1619 NULL_RTX, uns);
4ed543bc
KC
1620 hipart = convert_modes (mode, wmode, hipart, uns);
1621 res = convert_modes (mode, wmode, res, uns);
1304953e
JJ
1622 if (uns)
1623 /* For the unsigned multiplication, there was overflow if
1624 HIPART is non-zero. */
92344ed0 1625 do_compare_rtx_and_jump (hipart, const0_rtx, EQ, true, mode,
1476d1bd 1626 NULL_RTX, NULL, done_label,
357067f2 1627 profile_probability::very_likely ());
1304953e
JJ
1628 else
1629 {
1630 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res, prec - 1,
1631 NULL_RTX, 0);
1632 /* RES is low half of the double width result, HIPART
1633 the high half. There was overflow if
1634 HIPART is different from RES < 0 ? -1 : 0. */
92344ed0 1635 do_compare_rtx_and_jump (signbit, hipart, EQ, true, mode,
1476d1bd 1636 NULL_RTX, NULL, done_label,
357067f2 1637 profile_probability::very_likely ());
1304953e 1638 }
31e071ae 1639 }
0b99f253
JJ
1640 else if (can_mult_highpart_p (mode, uns) == 1)
1641 {
1642 highpart:
1643 ops.code = MULT_HIGHPART_EXPR;
1644 ops.type = type;
1645
1646 rtx hipart = expand_expr_real_2 (&ops, NULL_RTX, mode,
1647 EXPAND_NORMAL);
1648 ops.code = MULT_EXPR;
1649 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1650 if (uns)
1651 /* For the unsigned multiplication, there was overflow if
1652 HIPART is non-zero. */
1653 do_compare_rtx_and_jump (hipart, const0_rtx, EQ, true, mode,
1654 NULL_RTX, NULL, done_label,
1655 profile_probability::very_likely ());
1656 else
1657 {
1658 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res, prec - 1,
1659 NULL_RTX, 0);
1660 /* RES is low half of the double width result, HIPART
1661 the high half. There was overflow if
1662 HIPART is different from RES < 0 ? -1 : 0. */
1663 do_compare_rtx_and_jump (signbit, hipart, EQ, true, mode,
1664 NULL_RTX, NULL, done_label,
1665 profile_probability::very_likely ());
1666 }
1667
1668 }
fffbab82
RS
1669 else if (int_mode_for_size (prec / 2, 1).exists (&hmode)
1670 && 2 * GET_MODE_PRECISION (hmode) == prec)
d5fa9cc9 1671 {
da664544
DM
1672 rtx_code_label *large_op0 = gen_label_rtx ();
1673 rtx_code_label *small_op0_large_op1 = gen_label_rtx ();
1674 rtx_code_label *one_small_one_large = gen_label_rtx ();
1675 rtx_code_label *both_ops_large = gen_label_rtx ();
1304953e
JJ
1676 rtx_code_label *after_hipart_neg = uns ? NULL : gen_label_rtx ();
1677 rtx_code_label *after_lopart_neg = uns ? NULL : gen_label_rtx ();
da664544 1678 rtx_code_label *do_overflow = gen_label_rtx ();
1304953e 1679 rtx_code_label *hipart_different = uns ? NULL : gen_label_rtx ();
d5fa9cc9 1680
807e902e 1681 unsigned int hprec = GET_MODE_PRECISION (hmode);
d5fa9cc9 1682 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
1304953e 1683 NULL_RTX, uns);
4ed543bc
KC
1684 hipart0 = convert_modes (hmode, mode, hipart0, uns);
1685 rtx lopart0 = convert_modes (hmode, mode, op0, uns);
1304953e
JJ
1686 rtx signbit0 = const0_rtx;
1687 if (!uns)
1688 signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
1689 NULL_RTX, 0);
d5fa9cc9 1690 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
1304953e 1691 NULL_RTX, uns);
4ed543bc
KC
1692 hipart1 = convert_modes (hmode, mode, hipart1, uns);
1693 rtx lopart1 = convert_modes (hmode, mode, op1, uns);
1304953e
JJ
1694 rtx signbit1 = const0_rtx;
1695 if (!uns)
1696 signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
1697 NULL_RTX, 0);
d5fa9cc9
JJ
1698
1699 res = gen_reg_rtx (mode);
1700
1701 /* True if op0 resp. op1 are known to be in the range of
1702 halfstype. */
1703 bool op0_small_p = false;
1704 bool op1_small_p = false;
1705 /* True if op0 resp. op1 are known to have all zeros or all ones
1706 in the upper half of bits, but are not known to be
1707 op{0,1}_small_p. */
1708 bool op0_medium_p = false;
1709 bool op1_medium_p = false;
1710 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
1711 nonnegative, 1 if unknown. */
1712 int op0_sign = 1;
1713 int op1_sign = 1;
1714
1304953e
JJ
1715 if (pos_neg0 == 1)
1716 op0_sign = 0;
1717 else if (pos_neg0 == 2)
1718 op0_sign = -1;
1719 if (pos_neg1 == 1)
1720 op1_sign = 0;
1721 else if (pos_neg1 == 2)
1722 op1_sign = -1;
1723
1724 unsigned int mprec0 = prec;
1725 if (arg0 != error_mark_node)
1726 mprec0 = get_min_precision (arg0, sign);
1727 if (mprec0 <= hprec)
1728 op0_small_p = true;
1729 else if (!uns && mprec0 <= hprec + 1)
1730 op0_medium_p = true;
1731 unsigned int mprec1 = prec;
1732 if (arg1 != error_mark_node)
1733 mprec1 = get_min_precision (arg1, sign);
1734 if (mprec1 <= hprec)
1735 op1_small_p = true;
1736 else if (!uns && mprec1 <= hprec + 1)
1737 op1_medium_p = true;
d5fa9cc9
JJ
1738
1739 int smaller_sign = 1;
1740 int larger_sign = 1;
1741 if (op0_small_p)
1742 {
1743 smaller_sign = op0_sign;
1744 larger_sign = op1_sign;
1745 }
1746 else if (op1_small_p)
1747 {
1748 smaller_sign = op1_sign;
1749 larger_sign = op0_sign;
1750 }
1751 else if (op0_sign == op1_sign)
1752 {
1753 smaller_sign = op0_sign;
1754 larger_sign = op0_sign;
1755 }
1756
1757 if (!op0_small_p)
92344ed0 1758 do_compare_rtx_and_jump (signbit0, hipart0, NE, true, hmode,
1476d1bd 1759 NULL_RTX, NULL, large_op0,
357067f2 1760 profile_probability::unlikely ());
d5fa9cc9
JJ
1761
1762 if (!op1_small_p)
92344ed0 1763 do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
1476d1bd 1764 NULL_RTX, NULL, small_op0_large_op1,
357067f2 1765 profile_probability::unlikely ());
d5fa9cc9 1766
1304953e
JJ
1767 /* If both op0 and op1 are sign (!uns) or zero (uns) extended from
1768 hmode to mode, the multiplication will never overflow. We can
1769 do just one hmode x hmode => mode widening multiplication. */
1304953e 1770 tree halfstype = build_nonstandard_integer_type (hprec, uns);
0764a0d2
JJ
1771 ops.op0 = make_tree (halfstype, lopart0);
1772 ops.op1 = make_tree (halfstype, lopart1);
d5fa9cc9 1773 ops.code = WIDEN_MULT_EXPR;
1304953e 1774 ops.type = type;
d5fa9cc9
JJ
1775 rtx thisres
1776 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1777 emit_move_insn (res, thisres);
1778 emit_jump (done_label);
1779
1780 emit_label (small_op0_large_op1);
1781
1304953e
JJ
1782 /* If op0 is sign (!uns) or zero (uns) extended from hmode to mode,
1783 but op1 is not, just swap the arguments and handle it as op1
1784 sign/zero extended, op0 not. */
d5fa9cc9
JJ
1785 rtx larger = gen_reg_rtx (mode);
1786 rtx hipart = gen_reg_rtx (hmode);
1787 rtx lopart = gen_reg_rtx (hmode);
1788 emit_move_insn (larger, op1);
1789 emit_move_insn (hipart, hipart1);
1790 emit_move_insn (lopart, lopart0);
1791 emit_jump (one_small_one_large);
1792
1793 emit_label (large_op0);
1794
1795 if (!op1_small_p)
92344ed0 1796 do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
1476d1bd 1797 NULL_RTX, NULL, both_ops_large,
357067f2 1798 profile_probability::unlikely ());
d5fa9cc9 1799
1304953e
JJ
1800 /* If op1 is sign (!uns) or zero (uns) extended from hmode to mode,
1801 but op0 is not, prepare larger, hipart and lopart pseudos and
1802 handle it together with small_op0_large_op1. */
d5fa9cc9
JJ
1803 emit_move_insn (larger, op0);
1804 emit_move_insn (hipart, hipart0);
1805 emit_move_insn (lopart, lopart1);
1806
1807 emit_label (one_small_one_large);
1808
1809 /* lopart is the low part of the operand that is sign extended
026c3cfd 1810 to mode, larger is the other operand, hipart is the
d5fa9cc9
JJ
1811 high part of larger and lopart0 and lopart1 are the low parts
1812 of both operands.
1813 We perform lopart0 * lopart1 and lopart * hipart widening
1814 multiplications. */
1815 tree halfutype = build_nonstandard_integer_type (hprec, 1);
1816 ops.op0 = make_tree (halfutype, lopart0);
1817 ops.op1 = make_tree (halfutype, lopart1);
1818 rtx lo0xlo1
1819 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1820
1821 ops.op0 = make_tree (halfutype, lopart);
1822 ops.op1 = make_tree (halfutype, hipart);
1823 rtx loxhi = gen_reg_rtx (mode);
1824 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1825 emit_move_insn (loxhi, tem);
1826
1304953e
JJ
1827 if (!uns)
1828 {
1829 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
1830 if (larger_sign == 0)
1831 emit_jump (after_hipart_neg);
1832 else if (larger_sign != -1)
92344ed0 1833 do_compare_rtx_and_jump (hipart, const0_rtx, GE, false, hmode,
1476d1bd 1834 NULL_RTX, NULL, after_hipart_neg,
357067f2 1835 profile_probability::even ());
1304953e
JJ
1836
1837 tem = convert_modes (mode, hmode, lopart, 1);
1838 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
1839 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
644f0132 1840 1, OPTAB_WIDEN);
1304953e
JJ
1841 emit_move_insn (loxhi, tem);
1842
1843 emit_label (after_hipart_neg);
1844
1845 /* if (lopart < 0) loxhi -= larger; */
1846 if (smaller_sign == 0)
1847 emit_jump (after_lopart_neg);
1848 else if (smaller_sign != -1)
92344ed0 1849 do_compare_rtx_and_jump (lopart, const0_rtx, GE, false, hmode,
1476d1bd 1850 NULL_RTX, NULL, after_lopart_neg,
357067f2 1851 profile_probability::even ());
1304953e
JJ
1852
1853 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
644f0132 1854 1, OPTAB_WIDEN);
1304953e
JJ
1855 emit_move_insn (loxhi, tem);
1856
1857 emit_label (after_lopart_neg);
1858 }
d5fa9cc9
JJ
1859
1860 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
1861 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
1862 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
644f0132 1863 1, OPTAB_WIDEN);
d5fa9cc9
JJ
1864 emit_move_insn (loxhi, tem);
1865
1866 /* if (loxhi >> (bitsize / 2)
1304953e
JJ
1867 == (hmode) loxhi >> (bitsize / 2 - 1)) (if !uns)
1868 if (loxhi >> (bitsize / 2) == 0 (if uns). */
d5fa9cc9
JJ
1869 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
1870 NULL_RTX, 0);
4ed543bc 1871 hipartloxhi = convert_modes (hmode, mode, hipartloxhi, 0);
1304953e
JJ
1872 rtx signbitloxhi = const0_rtx;
1873 if (!uns)
1874 signbitloxhi = expand_shift (RSHIFT_EXPR, hmode,
4ed543bc
KC
1875 convert_modes (hmode, mode,
1876 loxhi, 0),
1304953e 1877 hprec - 1, NULL_RTX, 0);
d5fa9cc9 1878
92344ed0 1879 do_compare_rtx_and_jump (signbitloxhi, hipartloxhi, NE, true, hmode,
1476d1bd 1880 NULL_RTX, NULL, do_overflow,
357067f2 1881 profile_probability::very_unlikely ());
d5fa9cc9
JJ
1882
1883 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
1884 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
1885 NULL_RTX, 1);
4ed543bc
KC
1886 tem = convert_modes (mode, hmode,
1887 convert_modes (hmode, mode, lo0xlo1, 1), 1);
d5fa9cc9
JJ
1888
1889 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
644f0132 1890 1, OPTAB_WIDEN);
d5fa9cc9
JJ
1891 if (tem != res)
1892 emit_move_insn (res, tem);
1893 emit_jump (done_label);
1894
1895 emit_label (both_ops_large);
1896
1304953e
JJ
1897 /* If both operands are large (not sign (!uns) or zero (uns)
1898 extended from hmode), then perform the full multiplication
1899 which will be the result of the operation.
1900 The only cases which don't overflow are for signed multiplication
1901 some cases where both hipart0 and highpart1 are 0 or -1.
1902 For unsigned multiplication when high parts are both non-zero
1903 this overflows always. */
d5fa9cc9 1904 ops.code = MULT_EXPR;
1304953e
JJ
1905 ops.op0 = make_tree (type, op0);
1906 ops.op1 = make_tree (type, op1);
d5fa9cc9
JJ
1907 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1908 emit_move_insn (res, tem);
1909
1304953e 1910 if (!uns)
d5fa9cc9 1911 {
1304953e
JJ
1912 if (!op0_medium_p)
1913 {
1914 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
644f0132 1915 NULL_RTX, 1, OPTAB_WIDEN);
92344ed0 1916 do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
1476d1bd 1917 NULL_RTX, NULL, do_error,
357067f2 1918 profile_probability::very_unlikely ());
1304953e 1919 }
d5fa9cc9 1920
1304953e
JJ
1921 if (!op1_medium_p)
1922 {
1923 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
644f0132 1924 NULL_RTX, 1, OPTAB_WIDEN);
92344ed0 1925 do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
1476d1bd 1926 NULL_RTX, NULL, do_error,
357067f2 1927 profile_probability::very_unlikely ());
1304953e 1928 }
d5fa9cc9 1929
1304953e 1930 /* At this point hipart{0,1} are both in [-1, 0]. If they are
e7176f75
JJ
1931 the same, overflow happened if res is non-positive, if they
1932 are different, overflow happened if res is positive. */
1304953e
JJ
1933 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
1934 emit_jump (hipart_different);
1935 else if (op0_sign == 1 || op1_sign == 1)
92344ed0 1936 do_compare_rtx_and_jump (hipart0, hipart1, NE, true, hmode,
1476d1bd 1937 NULL_RTX, NULL, hipart_different,
357067f2 1938 profile_probability::even ());
d5fa9cc9 1939
e7176f75 1940 do_compare_rtx_and_jump (res, const0_rtx, LE, false, mode,
1476d1bd 1941 NULL_RTX, NULL, do_error,
357067f2 1942 profile_probability::very_unlikely ());
1304953e 1943 emit_jump (done_label);
d5fa9cc9 1944
1304953e
JJ
1945 emit_label (hipart_different);
1946
92344ed0 1947 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode,
1476d1bd 1948 NULL_RTX, NULL, do_error,
357067f2 1949 profile_probability::very_unlikely ());
1304953e
JJ
1950 emit_jump (done_label);
1951 }
d5fa9cc9
JJ
1952
1953 emit_label (do_overflow);
1954
1955 /* Overflow, do full multiplication and fallthru into do_error. */
1304953e
JJ
1956 ops.op0 = make_tree (type, op0);
1957 ops.op1 = make_tree (type, op1);
d5fa9cc9
JJ
1958 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1959 emit_move_insn (res, tem);
1960 }
0b99f253
JJ
1961 else if (GET_MODE_2XWIDER_MODE (mode).exists (&wmode)
1962 && targetm.scalar_mode_supported_p (wmode))
1963 /* Even emitting a libcall is better than not detecting overflow
1964 at all. */
1965 goto twoxwider;
31e071ae
MP
1966 else
1967 {
1304953e 1968 gcc_assert (!is_ubsan);
31e071ae 1969 ops.code = MULT_EXPR;
1304953e 1970 ops.type = type;
31e071ae
MP
1971 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1972 emit_jump (done_label);
1973 }
1974 }
1975
1304953e 1976 do_error_label:
31e071ae 1977 emit_label (do_error);
1304953e
JJ
1978 if (is_ubsan)
1979 {
1980 /* Expand the ubsan builtin call. */
1981 push_temp_slots ();
1982 fn = ubsan_build_overflow_builtin (MULT_EXPR, loc, TREE_TYPE (arg0),
1705cebd 1983 arg0, arg1, datap);
1304953e
JJ
1984 expand_normal (fn);
1985 pop_temp_slots ();
1986 do_pending_stack_adjust ();
1987 }
1988 else if (lhs)
a86451b9 1989 expand_arith_set_overflow (lhs, target);
31e071ae
MP
1990
1991 /* We're done. */
1992 emit_label (done_label);
1993
1304953e
JJ
1994 /* u1 * u2 -> sr */
1995 if (uns0_p && uns1_p && !unsr_p)
1996 {
1997 rtx_code_label *all_done_label = gen_label_rtx ();
92344ed0 1998 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
357067f2 1999 NULL, all_done_label, profile_probability::very_likely ());
a86451b9 2000 expand_arith_set_overflow (lhs, target);
1304953e
JJ
2001 emit_label (all_done_label);
2002 }
2003
2004 /* s1 * u2 -> sr */
2005 if (!uns0_p && uns1_p && !unsr_p && pos_neg1 == 3)
2006 {
2007 rtx_code_label *all_done_label = gen_label_rtx ();
2008 rtx_code_label *set_noovf = gen_label_rtx ();
92344ed0 2009 do_compare_rtx_and_jump (op1, const0_rtx, GE, false, mode, NULL_RTX,
357067f2 2010 NULL, all_done_label, profile_probability::very_likely ());
a86451b9 2011 expand_arith_set_overflow (lhs, target);
92344ed0 2012 do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
357067f2 2013 NULL, set_noovf, profile_probability::very_likely ());
92344ed0 2014 do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
357067f2 2015 NULL, all_done_label, profile_probability::very_unlikely ());
1476d1bd 2016 do_compare_rtx_and_jump (op1, res, NE, true, mode, NULL_RTX, NULL,
357067f2 2017 all_done_label, profile_probability::very_unlikely ());
1304953e
JJ
2018 emit_label (set_noovf);
2019 write_complex_part (target, const0_rtx, true);
2020 emit_label (all_done_label);
2021 }
2022
31e071ae 2023 if (lhs)
1304953e
JJ
2024 {
2025 if (is_ubsan)
5620052d 2026 expand_ubsan_result_store (target, res);
1304953e
JJ
2027 else
2028 expand_arith_overflow_result_store (lhs, target, mode, res);
2029 }
31e071ae
MP
2030}
2031
1705cebd
JJ
2032/* Expand UBSAN_CHECK_* internal function if it has vector operands. */
2033
2034static void
2035expand_vector_ubsan_overflow (location_t loc, enum tree_code code, tree lhs,
2036 tree arg0, tree arg1)
2037{
07626e49 2038 poly_uint64 cnt = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1705cebd
JJ
2039 rtx_code_label *loop_lab = NULL;
2040 rtx cntvar = NULL_RTX;
2041 tree cntv = NULL_TREE;
2042 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
2043 tree sz = TYPE_SIZE (eltype);
2044 tree data = NULL_TREE;
2045 tree resv = NULL_TREE;
2046 rtx lhsr = NULL_RTX;
2047 rtx resvr = NULL_RTX;
07626e49
RS
2048 unsigned HOST_WIDE_INT const_cnt = 0;
2049 bool use_loop_p = (!cnt.is_constant (&const_cnt) || const_cnt > 4);
1705cebd
JJ
2050
2051 if (lhs)
2052 {
2053 optab op;
2054 lhsr = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6a5cdb0e 2055 if (!VECTOR_MODE_P (GET_MODE (lhsr))
1705cebd
JJ
2056 || (op = optab_for_tree_code (code, TREE_TYPE (arg0),
2057 optab_default)) == unknown_optab
2058 || (optab_handler (op, TYPE_MODE (TREE_TYPE (arg0)))
2059 == CODE_FOR_nothing))
2060 {
2061 if (MEM_P (lhsr))
2062 resv = make_tree (TREE_TYPE (lhs), lhsr);
2063 else
2064 {
2065 resvr = assign_temp (TREE_TYPE (lhs), 1, 1);
2066 resv = make_tree (TREE_TYPE (lhs), resvr);
2067 }
2068 }
2069 }
07626e49 2070 if (use_loop_p)
1705cebd
JJ
2071 {
2072 do_pending_stack_adjust ();
2073 loop_lab = gen_label_rtx ();
2074 cntvar = gen_reg_rtx (TYPE_MODE (sizetype));
2075 cntv = make_tree (sizetype, cntvar);
2076 emit_move_insn (cntvar, const0_rtx);
2077 emit_label (loop_lab);
2078 }
2079 if (TREE_CODE (arg0) != VECTOR_CST)
2080 {
2081 rtx arg0r = expand_normal (arg0);
2082 arg0 = make_tree (TREE_TYPE (arg0), arg0r);
2083 }
2084 if (TREE_CODE (arg1) != VECTOR_CST)
2085 {
2086 rtx arg1r = expand_normal (arg1);
2087 arg1 = make_tree (TREE_TYPE (arg1), arg1r);
2088 }
07626e49 2089 for (unsigned int i = 0; i < (use_loop_p ? 1 : const_cnt); i++)
1705cebd
JJ
2090 {
2091 tree op0, op1, res = NULL_TREE;
07626e49 2092 if (use_loop_p)
1705cebd
JJ
2093 {
2094 tree atype = build_array_type_nelts (eltype, cnt);
4b48e883
JJ
2095 op0 = uniform_vector_p (arg0);
2096 if (op0 == NULL_TREE)
2097 {
2098 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, arg0);
2099 op0 = build4_loc (loc, ARRAY_REF, eltype, op0, cntv,
2100 NULL_TREE, NULL_TREE);
2101 }
2102 op1 = uniform_vector_p (arg1);
2103 if (op1 == NULL_TREE)
2104 {
2105 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, arg1);
2106 op1 = build4_loc (loc, ARRAY_REF, eltype, op1, cntv,
2107 NULL_TREE, NULL_TREE);
2108 }
1705cebd
JJ
2109 if (resv)
2110 {
2111 res = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, resv);
2112 res = build4_loc (loc, ARRAY_REF, eltype, res, cntv,
2113 NULL_TREE, NULL_TREE);
2114 }
2115 }
2116 else
2117 {
2118 tree bitpos = bitsize_int (tree_to_uhwi (sz) * i);
2119 op0 = fold_build3_loc (loc, BIT_FIELD_REF, eltype, arg0, sz, bitpos);
2120 op1 = fold_build3_loc (loc, BIT_FIELD_REF, eltype, arg1, sz, bitpos);
2121 if (resv)
2122 res = fold_build3_loc (loc, BIT_FIELD_REF, eltype, resv, sz,
2123 bitpos);
2124 }
2125 switch (code)
2126 {
2127 case PLUS_EXPR:
2128 expand_addsub_overflow (loc, PLUS_EXPR, res, op0, op1,
2129 false, false, false, true, &data);
2130 break;
2131 case MINUS_EXPR:
07626e49 2132 if (use_loop_p ? integer_zerop (arg0) : integer_zerop (op0))
1705cebd
JJ
2133 expand_neg_overflow (loc, res, op1, true, &data);
2134 else
2135 expand_addsub_overflow (loc, MINUS_EXPR, res, op0, op1,
2136 false, false, false, true, &data);
2137 break;
2138 case MULT_EXPR:
2139 expand_mul_overflow (loc, res, op0, op1, false, false, false,
2140 true, &data);
2141 break;
2142 default:
2143 gcc_unreachable ();
2144 }
2145 }
07626e49 2146 if (use_loop_p)
1705cebd
JJ
2147 {
2148 struct separate_ops ops;
2149 ops.code = PLUS_EXPR;
2150 ops.type = TREE_TYPE (cntv);
2151 ops.op0 = cntv;
2152 ops.op1 = build_int_cst (TREE_TYPE (cntv), 1);
2153 ops.op2 = NULL_TREE;
2154 ops.location = loc;
2155 rtx ret = expand_expr_real_2 (&ops, cntvar, TYPE_MODE (sizetype),
2156 EXPAND_NORMAL);
2157 if (ret != cntvar)
2158 emit_move_insn (cntvar, ret);
07626e49
RS
2159 rtx cntrtx = gen_int_mode (cnt, TYPE_MODE (sizetype));
2160 do_compare_rtx_and_jump (cntvar, cntrtx, NE, false,
1705cebd 2161 TYPE_MODE (sizetype), NULL_RTX, NULL, loop_lab,
357067f2 2162 profile_probability::very_likely ());
1705cebd
JJ
2163 }
2164 if (lhs && resv == NULL_TREE)
2165 {
2166 struct separate_ops ops;
2167 ops.code = code;
2168 ops.type = TREE_TYPE (arg0);
2169 ops.op0 = arg0;
2170 ops.op1 = arg1;
2171 ops.op2 = NULL_TREE;
2172 ops.location = loc;
2173 rtx ret = expand_expr_real_2 (&ops, lhsr, TYPE_MODE (TREE_TYPE (arg0)),
2174 EXPAND_NORMAL);
2175 if (ret != lhsr)
2176 emit_move_insn (lhsr, ret);
2177 }
2178 else if (resvr)
2179 emit_move_insn (lhsr, resvr);
2180}
2181
31e071ae
MP
2182/* Expand UBSAN_CHECK_ADD call STMT. */
2183
2184static void
4cfe7a6c 2185expand_UBSAN_CHECK_ADD (internal_fn, gcall *stmt)
31e071ae 2186{
1304953e
JJ
2187 location_t loc = gimple_location (stmt);
2188 tree lhs = gimple_call_lhs (stmt);
2189 tree arg0 = gimple_call_arg (stmt, 0);
2190 tree arg1 = gimple_call_arg (stmt, 1);
1705cebd
JJ
2191 if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
2192 expand_vector_ubsan_overflow (loc, PLUS_EXPR, lhs, arg0, arg1);
2193 else
2194 expand_addsub_overflow (loc, PLUS_EXPR, lhs, arg0, arg1,
2195 false, false, false, true, NULL);
31e071ae
MP
2196}
2197
2198/* Expand UBSAN_CHECK_SUB call STMT. */
2199
2200static void
4cfe7a6c 2201expand_UBSAN_CHECK_SUB (internal_fn, gcall *stmt)
31e071ae 2202{
1304953e
JJ
2203 location_t loc = gimple_location (stmt);
2204 tree lhs = gimple_call_lhs (stmt);
2205 tree arg0 = gimple_call_arg (stmt, 0);
2206 tree arg1 = gimple_call_arg (stmt, 1);
1705cebd
JJ
2207 if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
2208 expand_vector_ubsan_overflow (loc, MINUS_EXPR, lhs, arg0, arg1);
2209 else if (integer_zerop (arg0))
2210 expand_neg_overflow (loc, lhs, arg1, true, NULL);
31e071ae 2211 else
1304953e 2212 expand_addsub_overflow (loc, MINUS_EXPR, lhs, arg0, arg1,
1705cebd 2213 false, false, false, true, NULL);
31e071ae
MP
2214}
2215
2216/* Expand UBSAN_CHECK_MUL call STMT. */
2217
2218static void
4cfe7a6c 2219expand_UBSAN_CHECK_MUL (internal_fn, gcall *stmt)
31e071ae 2220{
1304953e
JJ
2221 location_t loc = gimple_location (stmt);
2222 tree lhs = gimple_call_lhs (stmt);
2223 tree arg0 = gimple_call_arg (stmt, 0);
2224 tree arg1 = gimple_call_arg (stmt, 1);
1705cebd
JJ
2225 if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
2226 expand_vector_ubsan_overflow (loc, MULT_EXPR, lhs, arg0, arg1);
2227 else
2228 expand_mul_overflow (loc, lhs, arg0, arg1, false, false, false, true,
2229 NULL);
1304953e
JJ
2230}
2231
2232/* Helper function for {ADD,SUB,MUL}_OVERFLOW call stmt expansion. */
2233
2234static void
355fe088 2235expand_arith_overflow (enum tree_code code, gimple *stmt)
1304953e
JJ
2236{
2237 tree lhs = gimple_call_lhs (stmt);
2238 if (lhs == NULL_TREE)
2239 return;
2240 tree arg0 = gimple_call_arg (stmt, 0);
2241 tree arg1 = gimple_call_arg (stmt, 1);
2242 tree type = TREE_TYPE (TREE_TYPE (lhs));
2243 int uns0_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
2244 int uns1_p = TYPE_UNSIGNED (TREE_TYPE (arg1));
2245 int unsr_p = TYPE_UNSIGNED (type);
2246 int prec0 = TYPE_PRECISION (TREE_TYPE (arg0));
2247 int prec1 = TYPE_PRECISION (TREE_TYPE (arg1));
2248 int precres = TYPE_PRECISION (type);
2249 location_t loc = gimple_location (stmt);
2250 if (!uns0_p && get_range_pos_neg (arg0) == 1)
2251 uns0_p = true;
2252 if (!uns1_p && get_range_pos_neg (arg1) == 1)
2253 uns1_p = true;
2254 int pr = get_min_precision (arg0, uns0_p ? UNSIGNED : SIGNED);
2255 prec0 = MIN (prec0, pr);
2256 pr = get_min_precision (arg1, uns1_p ? UNSIGNED : SIGNED);
2257 prec1 = MIN (prec1, pr);
2258
2259 /* If uns0_p && uns1_p, precop is minimum needed precision
2260 of unsigned type to hold the exact result, otherwise
2261 precop is minimum needed precision of signed type to
2262 hold the exact result. */
2263 int precop;
2264 if (code == MULT_EXPR)
2265 precop = prec0 + prec1 + (uns0_p != uns1_p);
2266 else
2267 {
2268 if (uns0_p == uns1_p)
2269 precop = MAX (prec0, prec1) + 1;
2270 else if (uns0_p)
2271 precop = MAX (prec0 + 1, prec1) + 1;
2272 else
2273 precop = MAX (prec0, prec1 + 1) + 1;
2274 }
2275 int orig_precres = precres;
2276
2277 do
2278 {
2279 if ((uns0_p && uns1_p)
2280 ? ((precop + !unsr_p) <= precres
2281 /* u1 - u2 -> ur can overflow, no matter what precision
2282 the result has. */
2283 && (code != MINUS_EXPR || !unsr_p))
2284 : (!unsr_p && precop <= precres))
2285 {
2286 /* The infinity precision result will always fit into result. */
2287 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2288 write_complex_part (target, const0_rtx, true);
7a504f33 2289 scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
1304953e
JJ
2290 struct separate_ops ops;
2291 ops.code = code;
2292 ops.type = type;
2293 ops.op0 = fold_convert_loc (loc, type, arg0);
2294 ops.op1 = fold_convert_loc (loc, type, arg1);
2295 ops.op2 = NULL_TREE;
2296 ops.location = loc;
2297 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
2298 expand_arith_overflow_result_store (lhs, target, mode, tem);
2299 return;
2300 }
2301
894d8b41
EB
2302 /* For operations with low precision, if target doesn't have them, start
2303 with precres widening right away, otherwise do it only if the most
2304 simple cases can't be used. */
2305 const int min_precision = targetm.min_arithmetic_precision ();
2306 if (orig_precres == precres && precres < min_precision)
1304953e 2307 ;
9e11bfef
TS
2308 else if ((uns0_p && uns1_p && unsr_p && prec0 <= precres
2309 && prec1 <= precres)
1304953e
JJ
2310 || ((!uns0_p || !uns1_p) && !unsr_p
2311 && prec0 + uns0_p <= precres
2312 && prec1 + uns1_p <= precres))
2313 {
2314 arg0 = fold_convert_loc (loc, type, arg0);
2315 arg1 = fold_convert_loc (loc, type, arg1);
2316 switch (code)
2317 {
2318 case MINUS_EXPR:
2319 if (integer_zerop (arg0) && !unsr_p)
7d704548 2320 {
1705cebd 2321 expand_neg_overflow (loc, lhs, arg1, false, NULL);
7d704548
JJ
2322 return;
2323 }
1304953e
JJ
2324 /* FALLTHRU */
2325 case PLUS_EXPR:
1705cebd
JJ
2326 expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
2327 unsr_p, unsr_p, false, NULL);
1304953e
JJ
2328 return;
2329 case MULT_EXPR:
1705cebd
JJ
2330 expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
2331 unsr_p, unsr_p, false, NULL);
1304953e
JJ
2332 return;
2333 default:
2334 gcc_unreachable ();
2335 }
2336 }
2337
2338 /* For sub-word operations, retry with a wider type first. */
2339 if (orig_precres == precres && precop <= BITS_PER_WORD)
2340 {
894d8b41 2341 int p = MAX (min_precision, precop);
f67f4dff 2342 scalar_int_mode m = smallest_int_mode_for_size (p);
1304953e
JJ
2343 tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
2344 uns0_p && uns1_p
2345 && unsr_p);
2346 p = TYPE_PRECISION (optype);
2347 if (p > precres)
2348 {
2349 precres = p;
2350 unsr_p = TYPE_UNSIGNED (optype);
2351 type = optype;
2352 continue;
2353 }
2354 }
2355
2356 if (prec0 <= precres && prec1 <= precres)
2357 {
2358 tree types[2];
2359 if (unsr_p)
2360 {
2361 types[0] = build_nonstandard_integer_type (precres, 0);
2362 types[1] = type;
2363 }
2364 else
2365 {
2366 types[0] = type;
2367 types[1] = build_nonstandard_integer_type (precres, 1);
2368 }
2369 arg0 = fold_convert_loc (loc, types[uns0_p], arg0);
2370 arg1 = fold_convert_loc (loc, types[uns1_p], arg1);
2371 if (code != MULT_EXPR)
2372 expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
1705cebd 2373 uns0_p, uns1_p, false, NULL);
1304953e
JJ
2374 else
2375 expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
1705cebd 2376 uns0_p, uns1_p, false, NULL);
1304953e
JJ
2377 return;
2378 }
2379
2380 /* Retry with a wider type. */
2381 if (orig_precres == precres)
2382 {
2383 int p = MAX (prec0, prec1);
f67f4dff 2384 scalar_int_mode m = smallest_int_mode_for_size (p);
1304953e
JJ
2385 tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
2386 uns0_p && uns1_p
2387 && unsr_p);
2388 p = TYPE_PRECISION (optype);
2389 if (p > precres)
2390 {
2391 precres = p;
2392 unsr_p = TYPE_UNSIGNED (optype);
2393 type = optype;
2394 continue;
2395 }
2396 }
2397
2398 gcc_unreachable ();
2399 }
2400 while (1);
2401}
2402
2403/* Expand ADD_OVERFLOW STMT. */
2404
2405static void
4cfe7a6c 2406expand_ADD_OVERFLOW (internal_fn, gcall *stmt)
1304953e
JJ
2407{
2408 expand_arith_overflow (PLUS_EXPR, stmt);
2409}
2410
2411/* Expand SUB_OVERFLOW STMT. */
2412
2413static void
4cfe7a6c 2414expand_SUB_OVERFLOW (internal_fn, gcall *stmt)
1304953e
JJ
2415{
2416 expand_arith_overflow (MINUS_EXPR, stmt);
2417}
2418
2419/* Expand MUL_OVERFLOW STMT. */
2420
2421static void
4cfe7a6c 2422expand_MUL_OVERFLOW (internal_fn, gcall *stmt)
1304953e
JJ
2423{
2424 expand_arith_overflow (MULT_EXPR, stmt);
31e071ae
MP
2425}
2426
5ce9450f
JJ
2427/* This should get folded in tree-vectorizer.c. */
2428
2429static void
4cfe7a6c 2430expand_LOOP_VECTORIZED (internal_fn, gcall *)
5ce9450f
JJ
2431{
2432 gcc_unreachable ();
2433}
2434
542e7230
BC
2435/* This should get folded in tree-vectorizer.c. */
2436
2437static void
2438expand_LOOP_DIST_ALIAS (internal_fn, gcall *)
2439{
2440 gcc_unreachable ();
2441}
2442
65dd1346
RS
2443/* Return a memory reference of type TYPE for argument INDEX of STMT.
2444 Use argument INDEX + 1 to derive the second (TBAA) operand. */
2445
2446static tree
2447expand_call_mem_ref (tree type, gcall *stmt, int index)
2448{
2449 tree addr = gimple_call_arg (stmt, index);
2450 tree alias_ptr_type = TREE_TYPE (gimple_call_arg (stmt, index + 1));
2451 unsigned int align = tree_to_shwi (gimple_call_arg (stmt, index + 1));
2452 if (TYPE_ALIGN (type) != align)
2453 type = build_aligned_type (type, align);
2454
2455 tree tmp = addr;
2456 if (TREE_CODE (tmp) == SSA_NAME)
2457 {
2458 gimple *def = SSA_NAME_DEF_STMT (tmp);
2459 if (gimple_assign_single_p (def))
2460 tmp = gimple_assign_rhs1 (def);
2461 }
2462
2463 if (TREE_CODE (tmp) == ADDR_EXPR)
2464 {
2465 tree mem = TREE_OPERAND (tmp, 0);
2466 if (TREE_CODE (mem) == TARGET_MEM_REF
2467 && types_compatible_p (TREE_TYPE (mem), type))
2468 {
2469 tree offset = TMR_OFFSET (mem);
9bd958c5
RS
2470 if (type != TREE_TYPE (mem)
2471 || alias_ptr_type != TREE_TYPE (offset)
2472 || !integer_zerop (offset))
65dd1346
RS
2473 {
2474 mem = copy_node (mem);
2475 TMR_OFFSET (mem) = wide_int_to_tree (alias_ptr_type,
2476 wi::to_poly_wide (offset));
9bd958c5 2477 TREE_TYPE (mem) = type;
65dd1346
RS
2478 }
2479 return mem;
2480 }
2481 }
2482
2483 return fold_build2 (MEM_REF, type, addr, build_int_cst (alias_ptr_type, 0));
2484}
2485
7e11fc7f 2486/* Expand MASK_LOAD{,_LANES} call STMT using optab OPTAB. */
ab23f5d9 2487
5ce9450f 2488static void
4cfe7a6c 2489expand_mask_load_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
5ce9450f 2490{
99b1c316 2491 class expand_operand ops[3];
65dd1346 2492 tree type, lhs, rhs, maskt;
5ce9450f 2493 rtx mem, target, mask;
7e11fc7f 2494 insn_code icode;
5ce9450f
JJ
2495
2496 maskt = gimple_call_arg (stmt, 2);
2497 lhs = gimple_call_lhs (stmt);
8e91d222
JJ
2498 if (lhs == NULL_TREE)
2499 return;
5ce9450f 2500 type = TREE_TYPE (lhs);
65dd1346 2501 rhs = expand_call_mem_ref (type, stmt, 0);
5ce9450f 2502
7e11fc7f
RS
2503 if (optab == vec_mask_load_lanes_optab)
2504 icode = get_multi_vector_move (type, optab);
2505 else
2506 icode = convert_optab_handler (optab, TYPE_MODE (type),
2507 TYPE_MODE (TREE_TYPE (maskt)));
2508
5ce9450f
JJ
2509 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2510 gcc_assert (MEM_P (mem));
2511 mask = expand_normal (maskt);
2512 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2513 create_output_operand (&ops[0], target, TYPE_MODE (type));
2514 create_fixed_operand (&ops[1], mem);
2515 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
7e11fc7f 2516 expand_insn (icode, 3, ops);
3af3bec2
RS
2517 if (!rtx_equal_p (target, ops[0].value))
2518 emit_move_insn (target, ops[0].value);
5ce9450f
JJ
2519}
2520
7e11fc7f
RS
2521#define expand_mask_load_lanes_optab_fn expand_mask_load_optab_fn
2522
2523/* Expand MASK_STORE{,_LANES} call STMT using optab OPTAB. */
ab23f5d9 2524
5ce9450f 2525static void
4cfe7a6c 2526expand_mask_store_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
5ce9450f 2527{
99b1c316 2528 class expand_operand ops[3];
65dd1346 2529 tree type, lhs, rhs, maskt;
5ce9450f 2530 rtx mem, reg, mask;
7e11fc7f 2531 insn_code icode;
5ce9450f
JJ
2532
2533 maskt = gimple_call_arg (stmt, 2);
2534 rhs = gimple_call_arg (stmt, 3);
2535 type = TREE_TYPE (rhs);
65dd1346 2536 lhs = expand_call_mem_ref (type, stmt, 0);
5ce9450f 2537
7e11fc7f
RS
2538 if (optab == vec_mask_store_lanes_optab)
2539 icode = get_multi_vector_move (type, optab);
2540 else
2541 icode = convert_optab_handler (optab, TYPE_MODE (type),
2542 TYPE_MODE (TREE_TYPE (maskt)));
2543
5ce9450f
JJ
2544 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2545 gcc_assert (MEM_P (mem));
2546 mask = expand_normal (maskt);
2547 reg = expand_normal (rhs);
2548 create_fixed_operand (&ops[0], mem);
2549 create_input_operand (&ops[1], reg, TYPE_MODE (type));
2550 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
7e11fc7f 2551 expand_insn (icode, 3, ops);
5ce9450f
JJ
2552}
2553
7e11fc7f
RS
2554#define expand_mask_store_lanes_optab_fn expand_mask_store_optab_fn
2555
502d63b6
ML
2556/* Expand VCOND, VCONDU and VCONDEQ optab internal functions.
2557 The expansion of STMT happens based on OPTAB table associated. */
2558
2559static void
2560expand_vect_cond_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
2561{
2562 class expand_operand ops[6];
2563 insn_code icode;
2564 tree lhs = gimple_call_lhs (stmt);
2565 tree op0a = gimple_call_arg (stmt, 0);
2566 tree op0b = gimple_call_arg (stmt, 1);
2567 tree op1 = gimple_call_arg (stmt, 2);
2568 tree op2 = gimple_call_arg (stmt, 3);
2569 enum tree_code tcode = (tree_code) int_cst_value (gimple_call_arg (stmt, 4));
2570
2571 tree vec_cond_type = TREE_TYPE (lhs);
2572 tree op_mode = TREE_TYPE (op0a);
2573 bool unsignedp = TYPE_UNSIGNED (op_mode);
2574
2575 machine_mode mode = TYPE_MODE (vec_cond_type);
2576 machine_mode cmp_op_mode = TYPE_MODE (op_mode);
2577
2578 icode = convert_optab_handler (optab, mode, cmp_op_mode);
2579 rtx comparison
2580 = vector_compare_rtx (VOIDmode, tcode, op0a, op0b, unsignedp, icode, 4);
2581 rtx rtx_op1 = expand_normal (op1);
2582 rtx rtx_op2 = expand_normal (op2);
2583
2584 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2585 create_output_operand (&ops[0], target, mode);
2586 create_input_operand (&ops[1], rtx_op1, mode);
2587 create_input_operand (&ops[2], rtx_op2, mode);
2588 create_fixed_operand (&ops[3], comparison);
2589 create_fixed_operand (&ops[4], XEXP (comparison, 0));
2590 create_fixed_operand (&ops[5], XEXP (comparison, 1));
2591 expand_insn (icode, 6, ops);
eccc3d43
RB
2592 if (!rtx_equal_p (ops[0].value, target))
2593 emit_move_insn (target, ops[0].value);
502d63b6
ML
2594}
2595
2596#define expand_vec_cond_optab_fn expand_vect_cond_optab_fn
2597#define expand_vec_condu_optab_fn expand_vect_cond_optab_fn
2598#define expand_vec_condeq_optab_fn expand_vect_cond_optab_fn
2599
2600/* Expand VCOND_MASK optab internal function.
2601 The expansion of STMT happens based on OPTAB table associated. */
2602
2603static void
2604expand_vect_cond_mask_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
2605{
2606 class expand_operand ops[4];
2607
2608 tree lhs = gimple_call_lhs (stmt);
2609 tree op0 = gimple_call_arg (stmt, 0);
2610 tree op1 = gimple_call_arg (stmt, 1);
2611 tree op2 = gimple_call_arg (stmt, 2);
2612 tree vec_cond_type = TREE_TYPE (lhs);
2613
2614 machine_mode mode = TYPE_MODE (vec_cond_type);
2615 machine_mode mask_mode = TYPE_MODE (TREE_TYPE (op0));
2616 enum insn_code icode = convert_optab_handler (optab, mode, mask_mode);
2617 rtx mask, rtx_op1, rtx_op2;
2618
2619 gcc_assert (icode != CODE_FOR_nothing);
2620
2621 mask = expand_normal (op0);
2622 rtx_op1 = expand_normal (op1);
2623 rtx_op2 = expand_normal (op2);
2624
2625 mask = force_reg (mask_mode, mask);
2626 rtx_op1 = force_reg (GET_MODE (rtx_op1), rtx_op1);
2627
2628 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2629 create_output_operand (&ops[0], target, mode);
2630 create_input_operand (&ops[1], rtx_op1, mode);
2631 create_input_operand (&ops[2], rtx_op2, mode);
2632 create_input_operand (&ops[3], mask, mask_mode);
2633 expand_insn (icode, 4, ops);
eccc3d43
RB
2634 if (!rtx_equal_p (ops[0].value, target))
2635 emit_move_insn (target, ops[0].value);
502d63b6
ML
2636}
2637
2638#define expand_vec_cond_mask_optab_fn expand_vect_cond_mask_optab_fn
2639
09b22f48 2640static void
4cfe7a6c 2641expand_ABNORMAL_DISPATCHER (internal_fn, gcall *)
09b22f48
JJ
2642{
2643}
2644
ed9c79e1 2645static void
4cfe7a6c 2646expand_BUILTIN_EXPECT (internal_fn, gcall *stmt)
ed9c79e1
JJ
2647{
2648 /* When guessing was done, the hints should be already stripped away. */
2649 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
2650
2651 rtx target;
2652 tree lhs = gimple_call_lhs (stmt);
2653 if (lhs)
2654 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2655 else
2656 target = const0_rtx;
2657 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
2658 if (lhs && val != target)
2659 emit_move_insn (target, val);
2660}
2661
f8e89441
TV
2662/* IFN_VA_ARG is supposed to be expanded at pass_stdarg. So this dummy function
2663 should never be called. */
2664
2665static void
4cfe7a6c 2666expand_VA_ARG (internal_fn, gcall *)
d8fcab68
JJ
2667{
2668 gcc_unreachable ();
2669}
2670
2671/* IFN_VEC_CONVERT is supposed to be expanded at pass_lower_vector. So this
2672 dummy function should never be called. */
2673
2674static void
2675expand_VEC_CONVERT (internal_fn, gcall *)
f8e89441
TV
2676{
2677 gcc_unreachable ();
2678}
2679
8ab78162
NS
2680/* Expand the IFN_UNIQUE function according to its first argument. */
2681
2682static void
4cfe7a6c 2683expand_UNIQUE (internal_fn, gcall *stmt)
8ab78162
NS
2684{
2685 rtx pattern = NULL_RTX;
2686 enum ifn_unique_kind kind
2687 = (enum ifn_unique_kind) TREE_INT_CST_LOW (gimple_call_arg (stmt, 0));
2688
2689 switch (kind)
2690 {
2691 default:
2692 gcc_unreachable ();
2693
2694 case IFN_UNIQUE_UNSPEC:
2695 if (targetm.have_unique ())
2696 pattern = targetm.gen_unique ();
2697 break;
9bd46bc9
NS
2698
2699 case IFN_UNIQUE_OACC_FORK:
2700 case IFN_UNIQUE_OACC_JOIN:
2701 if (targetm.have_oacc_fork () && targetm.have_oacc_join ())
2702 {
2703 tree lhs = gimple_call_lhs (stmt);
2704 rtx target = const0_rtx;
2705
2706 if (lhs)
2707 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2708
2709 rtx data_dep = expand_normal (gimple_call_arg (stmt, 1));
2710 rtx axis = expand_normal (gimple_call_arg (stmt, 2));
2711
2712 if (kind == IFN_UNIQUE_OACC_FORK)
2713 pattern = targetm.gen_oacc_fork (target, data_dep, axis);
2714 else
2715 pattern = targetm.gen_oacc_join (target, data_dep, axis);
2716 }
2717 else
2718 gcc_unreachable ();
2719 break;
8ab78162
NS
2720 }
2721
2722 if (pattern)
2723 emit_insn (pattern);
2724}
2725
9bd46bc9
NS
2726/* The size of an OpenACC compute dimension. */
2727
2728static void
4cfe7a6c 2729expand_GOACC_DIM_SIZE (internal_fn, gcall *stmt)
9bd46bc9
NS
2730{
2731 tree lhs = gimple_call_lhs (stmt);
2732
2733 if (!lhs)
2734 return;
2735
2736 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2737 if (targetm.have_oacc_dim_size ())
2738 {
2739 rtx dim = expand_expr (gimple_call_arg (stmt, 0), NULL_RTX,
2740 VOIDmode, EXPAND_NORMAL);
2741 emit_insn (targetm.gen_oacc_dim_size (target, dim));
2742 }
2743 else
2744 emit_move_insn (target, GEN_INT (1));
2745}
2746
2747/* The position of an OpenACC execution engine along one compute axis. */
2748
2749static void
4cfe7a6c 2750expand_GOACC_DIM_POS (internal_fn, gcall *stmt)
9bd46bc9
NS
2751{
2752 tree lhs = gimple_call_lhs (stmt);
2753
2754 if (!lhs)
2755 return;
2756
2757 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2758 if (targetm.have_oacc_dim_pos ())
2759 {
2760 rtx dim = expand_expr (gimple_call_arg (stmt, 0), NULL_RTX,
2761 VOIDmode, EXPAND_NORMAL);
2762 emit_insn (targetm.gen_oacc_dim_pos (target, dim));
2763 }
2764 else
2765 emit_move_insn (target, const0_rtx);
2766}
2767
2768/* This is expanded by oacc_device_lower pass. */
2769
2770static void
4cfe7a6c 2771expand_GOACC_LOOP (internal_fn, gcall *)
9bd46bc9
NS
2772{
2773 gcc_unreachable ();
2774}
2775
e5014671
NS
2776/* This is expanded by oacc_device_lower pass. */
2777
2778static void
4cfe7a6c 2779expand_GOACC_REDUCTION (internal_fn, gcall *)
e5014671
NS
2780{
2781 gcc_unreachable ();
02889d23
CLT
2782}
2783
2784/* This is expanded by oacc_device_lower pass. */
2785
2786static void
2787expand_GOACC_TILE (internal_fn, gcall *)
2788{
2789 gcc_unreachable ();
e5014671
NS
2790}
2791
883cabde
RS
2792/* Set errno to EDOM. */
2793
2794static void
2795expand_SET_EDOM (internal_fn, gcall *)
2796{
2797#ifdef TARGET_EDOM
2798#ifdef GEN_ERRNO_RTX
2799 rtx errno_rtx = GEN_ERRNO_RTX;
2800#else
2801 rtx errno_rtx = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2802#endif
2803 emit_move_insn (errno_rtx,
2804 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2805#else
2806 gcc_unreachable ();
2807#endif
2808}
2809
adedd5c1
JJ
2810/* Expand atomic bit test and set. */
2811
2812static void
2813expand_ATOMIC_BIT_TEST_AND_SET (internal_fn, gcall *call)
2814{
2815 expand_ifn_atomic_bit_test_and (call);
2816}
2817
2818/* Expand atomic bit test and complement. */
2819
2820static void
2821expand_ATOMIC_BIT_TEST_AND_COMPLEMENT (internal_fn, gcall *call)
2822{
2823 expand_ifn_atomic_bit_test_and (call);
2824}
2825
2826/* Expand atomic bit test and reset. */
2827
2828static void
2829expand_ATOMIC_BIT_TEST_AND_RESET (internal_fn, gcall *call)
2830{
2831 expand_ifn_atomic_bit_test_and (call);
2832}
2833
849a76a5
JJ
2834/* Expand atomic bit test and set. */
2835
2836static void
2837expand_ATOMIC_COMPARE_EXCHANGE (internal_fn, gcall *call)
2838{
2839 expand_ifn_atomic_compare_exchange (call);
2840}
2841
e16f1cc7
JJ
2842/* Expand LAUNDER to assignment, lhs = arg0. */
2843
2844static void
2845expand_LAUNDER (internal_fn, gcall *call)
2846{
2847 tree lhs = gimple_call_lhs (call);
2848
2849 if (!lhs)
2850 return;
2851
2852 expand_assignment (lhs, gimple_call_arg (call, 0), false);
2853}
2854
f307441a
RS
2855/* Expand {MASK_,}SCATTER_STORE{S,U} call CALL using optab OPTAB. */
2856
2857static void
2858expand_scatter_store_optab_fn (internal_fn, gcall *stmt, direct_optab optab)
2859{
2860 internal_fn ifn = gimple_call_internal_fn (stmt);
2861 int rhs_index = internal_fn_stored_value_index (ifn);
2862 int mask_index = internal_fn_mask_index (ifn);
2863 tree base = gimple_call_arg (stmt, 0);
2864 tree offset = gimple_call_arg (stmt, 1);
2865 tree scale = gimple_call_arg (stmt, 2);
2866 tree rhs = gimple_call_arg (stmt, rhs_index);
2867
2868 rtx base_rtx = expand_normal (base);
2869 rtx offset_rtx = expand_normal (offset);
2870 HOST_WIDE_INT scale_int = tree_to_shwi (scale);
2871 rtx rhs_rtx = expand_normal (rhs);
2872
99b1c316 2873 class expand_operand ops[6];
f307441a
RS
2874 int i = 0;
2875 create_address_operand (&ops[i++], base_rtx);
2876 create_input_operand (&ops[i++], offset_rtx, TYPE_MODE (TREE_TYPE (offset)));
2877 create_integer_operand (&ops[i++], TYPE_UNSIGNED (TREE_TYPE (offset)));
2878 create_integer_operand (&ops[i++], scale_int);
2879 create_input_operand (&ops[i++], rhs_rtx, TYPE_MODE (TREE_TYPE (rhs)));
2880 if (mask_index >= 0)
2881 {
2882 tree mask = gimple_call_arg (stmt, mask_index);
2883 rtx mask_rtx = expand_normal (mask);
2884 create_input_operand (&ops[i++], mask_rtx, TYPE_MODE (TREE_TYPE (mask)));
2885 }
2886
09eb042a
RS
2887 insn_code icode = convert_optab_handler (optab, TYPE_MODE (TREE_TYPE (rhs)),
2888 TYPE_MODE (TREE_TYPE (offset)));
f307441a
RS
2889 expand_insn (icode, i, ops);
2890}
2891
bfaa08b7
RS
2892/* Expand {MASK_,}GATHER_LOAD call CALL using optab OPTAB. */
2893
2894static void
2895expand_gather_load_optab_fn (internal_fn, gcall *stmt, direct_optab optab)
2896{
2897 tree lhs = gimple_call_lhs (stmt);
2898 tree base = gimple_call_arg (stmt, 0);
2899 tree offset = gimple_call_arg (stmt, 1);
2900 tree scale = gimple_call_arg (stmt, 2);
2901
2902 rtx lhs_rtx = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2903 rtx base_rtx = expand_normal (base);
2904 rtx offset_rtx = expand_normal (offset);
2905 HOST_WIDE_INT scale_int = tree_to_shwi (scale);
2906
2907 int i = 0;
99b1c316 2908 class expand_operand ops[6];
bfaa08b7
RS
2909 create_output_operand (&ops[i++], lhs_rtx, TYPE_MODE (TREE_TYPE (lhs)));
2910 create_address_operand (&ops[i++], base_rtx);
2911 create_input_operand (&ops[i++], offset_rtx, TYPE_MODE (TREE_TYPE (offset)));
2912 create_integer_operand (&ops[i++], TYPE_UNSIGNED (TREE_TYPE (offset)));
2913 create_integer_operand (&ops[i++], scale_int);
2914 if (optab == mask_gather_load_optab)
2915 {
09eb042a 2916 tree mask = gimple_call_arg (stmt, 4);
bfaa08b7
RS
2917 rtx mask_rtx = expand_normal (mask);
2918 create_input_operand (&ops[i++], mask_rtx, TYPE_MODE (TREE_TYPE (mask)));
2919 }
09eb042a
RS
2920 insn_code icode = convert_optab_handler (optab, TYPE_MODE (TREE_TYPE (lhs)),
2921 TYPE_MODE (TREE_TYPE (offset)));
bfaa08b7 2922 expand_insn (icode, i, ops);
3af3bec2
RS
2923 if (!rtx_equal_p (lhs_rtx, ops[0].value))
2924 emit_move_insn (lhs_rtx, ops[0].value);
bfaa08b7
RS
2925}
2926
e72531b9
PK
2927/* Expand DIVMOD() using:
2928 a) optab handler for udivmod/sdivmod if it is available.
2929 b) If optab_handler doesn't exist, generate call to
2930 target-specific divmod libfunc. */
2931
2932static void
2933expand_DIVMOD (internal_fn, gcall *call_stmt)
2934{
2935 tree lhs = gimple_call_lhs (call_stmt);
2936 tree arg0 = gimple_call_arg (call_stmt, 0);
2937 tree arg1 = gimple_call_arg (call_stmt, 1);
2938
2939 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
2940 tree type = TREE_TYPE (TREE_TYPE (lhs));
2941 machine_mode mode = TYPE_MODE (type);
2942 bool unsignedp = TYPE_UNSIGNED (type);
2943 optab tab = (unsignedp) ? udivmod_optab : sdivmod_optab;
2944
2945 rtx op0 = expand_normal (arg0);
2946 rtx op1 = expand_normal (arg1);
2947 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2948
2949 rtx quotient, remainder, libfunc;
2950
2951 /* Check if optab_handler exists for divmod_optab for given mode. */
2952 if (optab_handler (tab, mode) != CODE_FOR_nothing)
2953 {
2954 quotient = gen_reg_rtx (mode);
2955 remainder = gen_reg_rtx (mode);
2956 expand_twoval_binop (tab, op0, op1, quotient, remainder, unsignedp);
2957 }
2958
2959 /* Generate call to divmod libfunc if it exists. */
2960 else if ((libfunc = optab_libfunc (tab, mode)) != NULL_RTX)
2961 targetm.expand_divmod_libfunc (libfunc, mode, op0, op1,
2962 &quotient, &remainder);
2963
2964 else
2965 gcc_unreachable ();
2966
2967 /* Wrap the return value (quotient, remainder) within COMPLEX_EXPR. */
2968 expand_expr (build2 (COMPLEX_EXPR, TREE_TYPE (lhs),
2969 make_tree (TREE_TYPE (arg0), quotient),
2970 make_tree (TREE_TYPE (arg1), remainder)),
0b99f253 2971 target, VOIDmode, EXPAND_NORMAL);
e72531b9
PK
2972}
2973
87a5e0e8
RB
2974/* Expand a NOP. */
2975
2976static void
2977expand_NOP (internal_fn, gcall *)
2978{
2979 /* Nothing. But it shouldn't really prevail. */
2980}
2981
49789fd0
IS
2982/* Coroutines, all should have been processed at this stage. */
2983
2984static void
2985expand_CO_FRAME (internal_fn, gcall *)
2986{
2987 gcc_unreachable ();
2988}
2989
2990static void
2991expand_CO_YIELD (internal_fn, gcall *)
2992{
2993 gcc_unreachable ();
2994}
2995
2996static void
2997expand_CO_SUSPN (internal_fn, gcall *)
2998{
2999 gcc_unreachable ();
3000}
3001
3002static void
3003expand_CO_ACTOR (internal_fn, gcall *)
3004{
3005 gcc_unreachable ();
3006}
3007
4cfe7a6c
RS
3008/* Expand a call to FN using the operands in STMT. FN has a single
3009 output operand and NARGS input operands. */
686ee971
RS
3010
3011static void
4cfe7a6c
RS
3012expand_direct_optab_fn (internal_fn fn, gcall *stmt, direct_optab optab,
3013 unsigned int nargs)
686ee971
RS
3014{
3015 expand_operand *ops = XALLOCAVEC (expand_operand, nargs + 1);
3016
686ee971
RS
3017 tree_pair types = direct_internal_fn_types (fn, stmt);
3018 insn_code icode = direct_optab_handler (optab, TYPE_MODE (types.first));
41241199 3019 gcc_assert (icode != CODE_FOR_nothing);
686ee971
RS
3020
3021 tree lhs = gimple_call_lhs (stmt);
440ed9f8
RS
3022 rtx lhs_rtx = NULL_RTX;
3023 if (lhs)
3024 lhs_rtx = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
b7753f75
RS
3025
3026 /* Do not assign directly to a promoted subreg, since there is no
3027 guarantee that the instruction will leave the upper bits of the
3028 register in the state required by SUBREG_PROMOTED_SIGN. */
3029 rtx dest = lhs_rtx;
440ed9f8 3030 if (dest && GET_CODE (dest) == SUBREG && SUBREG_PROMOTED_VAR_P (dest))
b7753f75
RS
3031 dest = NULL_RTX;
3032
3033 create_output_operand (&ops[0], dest, insn_data[icode].operand[0].mode);
686ee971
RS
3034
3035 for (unsigned int i = 0; i < nargs; ++i)
3036 {
3037 tree rhs = gimple_call_arg (stmt, i);
3038 tree rhs_type = TREE_TYPE (rhs);
3039 rtx rhs_rtx = expand_normal (rhs);
3040 if (INTEGRAL_TYPE_P (rhs_type))
3041 create_convert_operand_from (&ops[i + 1], rhs_rtx,
3042 TYPE_MODE (rhs_type),
3043 TYPE_UNSIGNED (rhs_type));
3044 else
3045 create_input_operand (&ops[i + 1], rhs_rtx, TYPE_MODE (rhs_type));
3046 }
3047
3048 expand_insn (icode, nargs + 1, ops);
440ed9f8 3049 if (lhs_rtx && !rtx_equal_p (lhs_rtx, ops[0].value))
686ee971 3050 {
ee132692
RS
3051 /* If the return value has an integral type, convert the instruction
3052 result to that type. This is useful for things that return an
3053 int regardless of the size of the input. If the instruction result
3054 is smaller than required, assume that it is signed.
3055
3056 If the return value has a nonintegral type, its mode must match
3057 the instruction result. */
3058 if (GET_CODE (lhs_rtx) == SUBREG && SUBREG_PROMOTED_VAR_P (lhs_rtx))
3059 {
3060 /* If this is a scalar in a register that is stored in a wider
3061 mode than the declared mode, compute the result into its
3062 declared mode and then convert to the wider mode. */
440ed9f8 3063 gcc_checking_assert (INTEGRAL_TYPE_P (TREE_TYPE (lhs)));
ee132692
RS
3064 rtx tmp = convert_to_mode (GET_MODE (lhs_rtx), ops[0].value, 0);
3065 convert_move (SUBREG_REG (lhs_rtx), tmp,
3066 SUBREG_PROMOTED_SIGN (lhs_rtx));
3067 }
3068 else if (GET_MODE (lhs_rtx) == GET_MODE (ops[0].value))
686ee971 3069 emit_move_insn (lhs_rtx, ops[0].value);
ee132692
RS
3070 else
3071 {
440ed9f8 3072 gcc_checking_assert (INTEGRAL_TYPE_P (TREE_TYPE (lhs)));
ee132692
RS
3073 convert_move (lhs_rtx, ops[0].value, 0);
3074 }
686ee971
RS
3075 }
3076}
3077
7cfb4d93
RS
3078/* Expand WHILE_ULT call STMT using optab OPTAB. */
3079
3080static void
3081expand_while_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
3082{
3083 expand_operand ops[3];
3084 tree rhs_type[2];
3085
3086 tree lhs = gimple_call_lhs (stmt);
3087 tree lhs_type = TREE_TYPE (lhs);
3088 rtx lhs_rtx = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3089 create_output_operand (&ops[0], lhs_rtx, TYPE_MODE (lhs_type));
3090
3091 for (unsigned int i = 0; i < 2; ++i)
3092 {
3093 tree rhs = gimple_call_arg (stmt, i);
3094 rhs_type[i] = TREE_TYPE (rhs);
3095 rtx rhs_rtx = expand_normal (rhs);
3096 create_input_operand (&ops[i + 1], rhs_rtx, TYPE_MODE (rhs_type[i]));
3097 }
3098
3099 insn_code icode = convert_optab_handler (optab, TYPE_MODE (rhs_type[0]),
3100 TYPE_MODE (lhs_type));
3101
3102 expand_insn (icode, 3, ops);
3103 if (!rtx_equal_p (lhs_rtx, ops[0].value))
3104 emit_move_insn (lhs_rtx, ops[0].value);
3105}
3106
686ee971
RS
3107/* Expanders for optabs that can use expand_direct_optab_fn. */
3108
4cfe7a6c
RS
3109#define expand_unary_optab_fn(FN, STMT, OPTAB) \
3110 expand_direct_optab_fn (FN, STMT, OPTAB, 1)
686ee971 3111
4cfe7a6c
RS
3112#define expand_binary_optab_fn(FN, STMT, OPTAB) \
3113 expand_direct_optab_fn (FN, STMT, OPTAB, 2)
686ee971 3114
c566cc9f
RS
3115#define expand_ternary_optab_fn(FN, STMT, OPTAB) \
3116 expand_direct_optab_fn (FN, STMT, OPTAB, 3)
3117
bfe1bb57 3118#define expand_cond_unary_optab_fn(FN, STMT, OPTAB) \
9d4ac06e 3119 expand_direct_optab_fn (FN, STMT, OPTAB, 3)
bfe1bb57 3120
0972596e 3121#define expand_cond_binary_optab_fn(FN, STMT, OPTAB) \
9d4ac06e 3122 expand_direct_optab_fn (FN, STMT, OPTAB, 4)
0972596e 3123
b41d1f6e
RS
3124#define expand_cond_ternary_optab_fn(FN, STMT, OPTAB) \
3125 expand_direct_optab_fn (FN, STMT, OPTAB, 5)
3126
bb6c2b68
RS
3127#define expand_fold_extract_optab_fn(FN, STMT, OPTAB) \
3128 expand_direct_optab_fn (FN, STMT, OPTAB, 3)
3129
b781a135
RS
3130#define expand_fold_left_optab_fn(FN, STMT, OPTAB) \
3131 expand_direct_optab_fn (FN, STMT, OPTAB, 2)
3132
bce29d65
AM
3133#define expand_mask_fold_left_optab_fn(FN, STMT, OPTAB) \
3134 expand_direct_optab_fn (FN, STMT, OPTAB, 3)
3135
58c036c8
RS
3136#define expand_check_ptrs_optab_fn(FN, STMT, OPTAB) \
3137 expand_direct_optab_fn (FN, STMT, OPTAB, 4)
3138
ab23f5d9
RS
3139/* RETURN_TYPE and ARGS are a return type and argument list that are
3140 in principle compatible with FN (which satisfies direct_internal_fn_p).
3141 Return the types that should be used to determine whether the
3142 target supports FN. */
3143
3144tree_pair
3145direct_internal_fn_types (internal_fn fn, tree return_type, tree *args)
3146{
3147 const direct_internal_fn_info &info = direct_internal_fn (fn);
3148 tree type0 = (info.type0 < 0 ? return_type : TREE_TYPE (args[info.type0]));
3149 tree type1 = (info.type1 < 0 ? return_type : TREE_TYPE (args[info.type1]));
3150 return tree_pair (type0, type1);
3151}
3152
3153/* CALL is a call whose return type and arguments are in principle
3154 compatible with FN (which satisfies direct_internal_fn_p). Return the
3155 types that should be used to determine whether the target supports FN. */
3156
3157tree_pair
3158direct_internal_fn_types (internal_fn fn, gcall *call)
3159{
3160 const direct_internal_fn_info &info = direct_internal_fn (fn);
3161 tree op0 = (info.type0 < 0
3162 ? gimple_call_lhs (call)
3163 : gimple_call_arg (call, info.type0));
3164 tree op1 = (info.type1 < 0
3165 ? gimple_call_lhs (call)
3166 : gimple_call_arg (call, info.type1));
3167 return tree_pair (TREE_TYPE (op0), TREE_TYPE (op1));
3168}
3169
3170/* Return true if OPTAB is supported for TYPES (whose modes should be
d95ab70a
RS
3171 the same) when the optimization type is OPT_TYPE. Used for simple
3172 direct optabs. */
ab23f5d9
RS
3173
3174static bool
d95ab70a
RS
3175direct_optab_supported_p (direct_optab optab, tree_pair types,
3176 optimization_type opt_type)
ab23f5d9
RS
3177{
3178 machine_mode mode = TYPE_MODE (types.first);
3179 gcc_checking_assert (mode == TYPE_MODE (types.second));
d95ab70a 3180 return direct_optab_handler (optab, mode, opt_type) != CODE_FOR_nothing;
ab23f5d9
RS
3181}
3182
7cfb4d93
RS
3183/* Return true if OPTAB is supported for TYPES, where the first type
3184 is the destination and the second type is the source. Used for
3185 convert optabs. */
3186
3187static bool
3188convert_optab_supported_p (convert_optab optab, tree_pair types,
3189 optimization_type opt_type)
3190{
3191 return (convert_optab_handler (optab, TYPE_MODE (types.first),
3192 TYPE_MODE (types.second), opt_type)
3193 != CODE_FOR_nothing);
3194}
3195
ab23f5d9 3196/* Return true if load/store lanes optab OPTAB is supported for
d95ab70a 3197 array type TYPES.first when the optimization type is OPT_TYPE. */
ab23f5d9
RS
3198
3199static bool
d95ab70a
RS
3200multi_vector_optab_supported_p (convert_optab optab, tree_pair types,
3201 optimization_type opt_type)
ab23f5d9 3202{
d95ab70a
RS
3203 gcc_assert (TREE_CODE (types.first) == ARRAY_TYPE);
3204 machine_mode imode = TYPE_MODE (types.first);
3205 machine_mode vmode = TYPE_MODE (TREE_TYPE (types.first));
3206 return (convert_optab_handler (optab, imode, vmode, opt_type)
3207 != CODE_FOR_nothing);
ab23f5d9
RS
3208}
3209
686ee971
RS
3210#define direct_unary_optab_supported_p direct_optab_supported_p
3211#define direct_binary_optab_supported_p direct_optab_supported_p
c566cc9f 3212#define direct_ternary_optab_supported_p direct_optab_supported_p
bfe1bb57 3213#define direct_cond_unary_optab_supported_p direct_optab_supported_p
0972596e 3214#define direct_cond_binary_optab_supported_p direct_optab_supported_p
b41d1f6e 3215#define direct_cond_ternary_optab_supported_p direct_optab_supported_p
ef8d1da1 3216#define direct_mask_load_optab_supported_p convert_optab_supported_p
ab23f5d9 3217#define direct_load_lanes_optab_supported_p multi_vector_optab_supported_p
7e11fc7f 3218#define direct_mask_load_lanes_optab_supported_p multi_vector_optab_supported_p
09eb042a 3219#define direct_gather_load_optab_supported_p convert_optab_supported_p
ef8d1da1 3220#define direct_mask_store_optab_supported_p convert_optab_supported_p
ab23f5d9 3221#define direct_store_lanes_optab_supported_p multi_vector_optab_supported_p
7e11fc7f 3222#define direct_mask_store_lanes_optab_supported_p multi_vector_optab_supported_p
502d63b6
ML
3223#define direct_vec_cond_mask_optab_supported_p multi_vector_optab_supported_p
3224#define direct_vec_cond_optab_supported_p multi_vector_optab_supported_p
3225#define direct_vec_condu_optab_supported_p multi_vector_optab_supported_p
3226#define direct_vec_condeq_optab_supported_p multi_vector_optab_supported_p
09eb042a 3227#define direct_scatter_store_optab_supported_p convert_optab_supported_p
7cfb4d93 3228#define direct_while_optab_supported_p convert_optab_supported_p
bb6c2b68 3229#define direct_fold_extract_optab_supported_p direct_optab_supported_p
b781a135 3230#define direct_fold_left_optab_supported_p direct_optab_supported_p
bce29d65 3231#define direct_mask_fold_left_optab_supported_p direct_optab_supported_p
58c036c8 3232#define direct_check_ptrs_optab_supported_p direct_optab_supported_p
ab23f5d9 3233
16d24520
RS
3234/* Return the optab used by internal function FN. */
3235
3236static optab
3237direct_internal_fn_optab (internal_fn fn, tree_pair types)
3238{
3239 switch (fn)
3240 {
3241#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
3242 case IFN_##CODE: break;
3243#define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) \
3244 case IFN_##CODE: return OPTAB##_optab;
3245#define DEF_INTERNAL_SIGNED_OPTAB_FN(CODE, FLAGS, SELECTOR, SIGNED_OPTAB, \
3246 UNSIGNED_OPTAB, TYPE) \
3247 case IFN_##CODE: return (TYPE_UNSIGNED (types.SELECTOR) \
3248 ? UNSIGNED_OPTAB ## _optab \
3249 : SIGNED_OPTAB ## _optab);
3250#include "internal-fn.def"
3251
3252 case IFN_LAST:
3253 break;
3254 }
3255 gcc_unreachable ();
3256}
3257
bfaa08b7
RS
3258/* Return the optab used by internal function FN. */
3259
3260static optab
3261direct_internal_fn_optab (internal_fn fn)
3262{
3263 switch (fn)
3264 {
3265#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
3266 case IFN_##CODE: break;
3267#define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) \
3268 case IFN_##CODE: return OPTAB##_optab;
3269#include "internal-fn.def"
3270
3271 case IFN_LAST:
3272 break;
3273 }
3274 gcc_unreachable ();
3275}
3276
d95ab70a
RS
3277/* Return true if FN is supported for the types in TYPES when the
3278 optimization type is OPT_TYPE. The types are those associated with
3279 the "type0" and "type1" fields of FN's direct_internal_fn_info
3280 structure. */
ab23f5d9
RS
3281
3282bool
d95ab70a
RS
3283direct_internal_fn_supported_p (internal_fn fn, tree_pair types,
3284 optimization_type opt_type)
ab23f5d9
RS
3285{
3286 switch (fn)
3287 {
3288#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
3289 case IFN_##CODE: break;
3290#define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) \
3291 case IFN_##CODE: \
d95ab70a
RS
3292 return direct_##TYPE##_optab_supported_p (OPTAB##_optab, types, \
3293 opt_type);
16d24520
RS
3294#define DEF_INTERNAL_SIGNED_OPTAB_FN(CODE, FLAGS, SELECTOR, SIGNED_OPTAB, \
3295 UNSIGNED_OPTAB, TYPE) \
3296 case IFN_##CODE: \
3297 { \
3298 optab which_optab = (TYPE_UNSIGNED (types.SELECTOR) \
3299 ? UNSIGNED_OPTAB ## _optab \
3300 : SIGNED_OPTAB ## _optab); \
3301 return direct_##TYPE##_optab_supported_p (which_optab, types, \
3302 opt_type); \
3303 }
ab23f5d9
RS
3304#include "internal-fn.def"
3305
3306 case IFN_LAST:
3307 break;
3308 }
3309 gcc_unreachable ();
3310}
3311
d95ab70a
RS
3312/* Return true if FN is supported for type TYPE when the optimization
3313 type is OPT_TYPE. The caller knows that the "type0" and "type1"
3314 fields of FN's direct_internal_fn_info structure are the same. */
ab23f5d9
RS
3315
3316bool
d95ab70a
RS
3317direct_internal_fn_supported_p (internal_fn fn, tree type,
3318 optimization_type opt_type)
ab23f5d9
RS
3319{
3320 const direct_internal_fn_info &info = direct_internal_fn (fn);
3321 gcc_checking_assert (info.type0 == info.type1);
d95ab70a 3322 return direct_internal_fn_supported_p (fn, tree_pair (type, type), opt_type);
ab23f5d9
RS
3323}
3324
41241199
RL
3325/* Return true if the STMT is supported when the optimization type is OPT_TYPE,
3326 given that STMT is a call to a direct internal function. */
3327
3328bool
3329direct_internal_fn_supported_p (gcall *stmt, optimization_type opt_type)
3330{
3331 internal_fn fn = gimple_call_internal_fn (stmt);
3332 tree_pair types = direct_internal_fn_types (fn, stmt);
3333 return direct_internal_fn_supported_p (fn, types, opt_type);
3334}
3335
0246112a
RS
3336/* If FN is commutative in two consecutive arguments, return the
3337 index of the first, otherwise return -1. */
3338
3339int
3340first_commutative_argument (internal_fn fn)
3341{
3342 switch (fn)
3343 {
3344 case IFN_FMA:
3345 case IFN_FMS:
3346 case IFN_FNMA:
3347 case IFN_FNMS:
3348 case IFN_AVG_FLOOR:
3349 case IFN_AVG_CEIL:
58cc9876
YW
3350 case IFN_MULHS:
3351 case IFN_MULHRS:
0246112a
RS
3352 case IFN_FMIN:
3353 case IFN_FMAX:
3354 return 0;
3355
3356 case IFN_COND_ADD:
3357 case IFN_COND_MUL:
3358 case IFN_COND_MIN:
3359 case IFN_COND_MAX:
3360 case IFN_COND_AND:
3361 case IFN_COND_IOR:
3362 case IFN_COND_XOR:
3363 case IFN_COND_FMA:
3364 case IFN_COND_FMS:
3365 case IFN_COND_FNMA:
3366 case IFN_COND_FNMS:
3367 return 1;
3368
3369 default:
3370 return -1;
3371 }
3372}
3373
883cabde
RS
3374/* Return true if IFN_SET_EDOM is supported. */
3375
3376bool
3377set_edom_supported_p (void)
3378{
3379#ifdef TARGET_EDOM
3380 return true;
3381#else
3382 return false;
3383#endif
3384}
3385
ab23f5d9
RS
3386#define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) \
3387 static void \
4cfe7a6c 3388 expand_##CODE (internal_fn fn, gcall *stmt) \
ab23f5d9 3389 { \
4cfe7a6c 3390 expand_##TYPE##_optab_fn (fn, stmt, OPTAB##_optab); \
ab23f5d9 3391 }
16d24520
RS
3392#define DEF_INTERNAL_SIGNED_OPTAB_FN(CODE, FLAGS, SELECTOR, SIGNED_OPTAB, \
3393 UNSIGNED_OPTAB, TYPE) \
3394 static void \
3395 expand_##CODE (internal_fn fn, gcall *stmt) \
3396 { \
3397 tree_pair types = direct_internal_fn_types (fn, stmt); \
3398 optab which_optab = direct_internal_fn_optab (fn, types); \
3399 expand_##TYPE##_optab_fn (fn, stmt, which_optab); \
3400 }
ab23f5d9
RS
3401#include "internal-fn.def"
3402
25583c4f
RS
3403/* Routines to expand each internal function, indexed by function number.
3404 Each routine has the prototype:
3405
538dd0b7 3406 expand_<NAME> (gcall *stmt)
25583c4f
RS
3407
3408 where STMT is the statement that performs the call. */
4cfe7a6c 3409static void (*const internal_fn_expanders[]) (internal_fn, gcall *) = {
b78475cf 3410#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
25583c4f 3411#include "internal-fn.def"
25583c4f
RS
3412 0
3413};
3414
6a86928d
RS
3415/* Invoke T(CODE, IFN) for each conditional function IFN that maps to a
3416 tree code CODE. */
3417#define FOR_EACH_CODE_MAPPING(T) \
3418 T (PLUS_EXPR, IFN_COND_ADD) \
3419 T (MINUS_EXPR, IFN_COND_SUB) \
3420 T (MULT_EXPR, IFN_COND_MUL) \
3421 T (TRUNC_DIV_EXPR, IFN_COND_DIV) \
3422 T (TRUNC_MOD_EXPR, IFN_COND_MOD) \
3423 T (RDIV_EXPR, IFN_COND_RDIV) \
3424 T (MIN_EXPR, IFN_COND_MIN) \
3425 T (MAX_EXPR, IFN_COND_MAX) \
3426 T (BIT_AND_EXPR, IFN_COND_AND) \
3427 T (BIT_IOR_EXPR, IFN_COND_IOR) \
20103c0e
RS
3428 T (BIT_XOR_EXPR, IFN_COND_XOR) \
3429 T (LSHIFT_EXPR, IFN_COND_SHL) \
3430 T (RSHIFT_EXPR, IFN_COND_SHR)
6a86928d 3431
9d4ac06e
RS
3432/* Return a function that only performs CODE when a certain condition is met
3433 and that uses a given fallback value otherwise. For example, if CODE is
3434 a binary operation associated with conditional function FN:
3435
3436 LHS = FN (COND, A, B, ELSE)
3437
3438 is equivalent to the C expression:
3439
3440 LHS = COND ? A CODE B : ELSE;
0972596e 3441
9d4ac06e 3442 operating elementwise if the operands are vectors.
0972596e 3443
9d4ac06e 3444 Return IFN_LAST if no such function exists. */
0972596e
RS
3445
3446internal_fn
3447get_conditional_internal_fn (tree_code code)
3448{
3449 switch (code)
3450 {
6a86928d
RS
3451#define CASE(CODE, IFN) case CODE: return IFN;
3452 FOR_EACH_CODE_MAPPING(CASE)
3453#undef CASE
0972596e
RS
3454 default:
3455 return IFN_LAST;
3456 }
3457}
3458
6a86928d
RS
3459/* If IFN implements the conditional form of a tree code, return that
3460 tree code, otherwise return ERROR_MARK. */
3461
3462tree_code
3463conditional_internal_fn_code (internal_fn ifn)
3464{
3465 switch (ifn)
3466 {
3467#define CASE(CODE, IFN) case IFN: return CODE;
3468 FOR_EACH_CODE_MAPPING(CASE)
3469#undef CASE
3470 default:
3471 return ERROR_MARK;
3472 }
3473}
3474
b41d1f6e
RS
3475/* Invoke T(IFN) for each internal function IFN that also has an
3476 IFN_COND_* form. */
3477#define FOR_EACH_COND_FN_PAIR(T) \
3478 T (FMA) \
3479 T (FMS) \
3480 T (FNMA) \
3481 T (FNMS)
3482
3483/* Return a function that only performs internal function FN when a
3484 certain condition is met and that uses a given fallback value otherwise.
3485 In other words, the returned function FN' is such that:
3486
3487 LHS = FN' (COND, A1, ... An, ELSE)
3488
3489 is equivalent to the C expression:
3490
3491 LHS = COND ? FN (A1, ..., An) : ELSE;
3492
3493 operating elementwise if the operands are vectors.
3494
3495 Return IFN_LAST if no such function exists. */
3496
3497internal_fn
3498get_conditional_internal_fn (internal_fn fn)
3499{
3500 switch (fn)
3501 {
3502#define CASE(NAME) case IFN_##NAME: return IFN_COND_##NAME;
3503 FOR_EACH_COND_FN_PAIR(CASE)
3504#undef CASE
3505 default:
3506 return IFN_LAST;
3507 }
3508}
3509
3510/* If IFN implements the conditional form of an unconditional internal
3511 function, return that unconditional function, otherwise return IFN_LAST. */
3512
3513internal_fn
3514get_unconditional_internal_fn (internal_fn ifn)
3515{
3516 switch (ifn)
3517 {
3518#define CASE(NAME) case IFN_COND_##NAME: return IFN_##NAME;
3519 FOR_EACH_COND_FN_PAIR(CASE)
3520#undef CASE
3521 default:
3522 return IFN_LAST;
3523 }
3524}
3525
0936858f
RS
3526/* Return true if STMT can be interpreted as a conditional tree code
3527 operation of the form:
3528
3529 LHS = COND ? OP (RHS1, ...) : ELSE;
3530
3531 operating elementwise if the operands are vectors. This includes
3532 the case of an all-true COND, so that the operation always happens.
3533
3534 When returning true, set:
3535
3536 - *COND_OUT to the condition COND, or to NULL_TREE if the condition
3537 is known to be all-true
3538 - *CODE_OUT to the tree code
3539 - OPS[I] to operand I of *CODE_OUT
3540 - *ELSE_OUT to the fallback value ELSE, or to NULL_TREE if the
3541 condition is known to be all true. */
3542
3543bool
3544can_interpret_as_conditional_op_p (gimple *stmt, tree *cond_out,
3545 tree_code *code_out,
3546 tree (&ops)[3], tree *else_out)
3547{
3548 if (gassign *assign = dyn_cast <gassign *> (stmt))
3549 {
3550 *cond_out = NULL_TREE;
3551 *code_out = gimple_assign_rhs_code (assign);
3552 ops[0] = gimple_assign_rhs1 (assign);
3553 ops[1] = gimple_assign_rhs2 (assign);
3554 ops[2] = gimple_assign_rhs3 (assign);
3555 *else_out = NULL_TREE;
3556 return true;
3557 }
3558 if (gcall *call = dyn_cast <gcall *> (stmt))
3559 if (gimple_call_internal_p (call))
3560 {
3561 internal_fn ifn = gimple_call_internal_fn (call);
3562 tree_code code = conditional_internal_fn_code (ifn);
3563 if (code != ERROR_MARK)
3564 {
3565 *cond_out = gimple_call_arg (call, 0);
3566 *code_out = code;
3567 unsigned int nops = gimple_call_num_args (call) - 2;
3568 for (unsigned int i = 0; i < 3; ++i)
3569 ops[i] = i < nops ? gimple_call_arg (call, i + 1) : NULL_TREE;
3570 *else_out = gimple_call_arg (call, nops + 1);
3571 if (integer_truep (*cond_out))
3572 {
3573 *cond_out = NULL_TREE;
3574 *else_out = NULL_TREE;
3575 }
3576 return true;
3577 }
3578 }
3579 return false;
3580}
3581
bfaa08b7
RS
3582/* Return true if IFN is some form of load from memory. */
3583
3584bool
3585internal_load_fn_p (internal_fn fn)
3586{
3587 switch (fn)
3588 {
3589 case IFN_MASK_LOAD:
3590 case IFN_LOAD_LANES:
3591 case IFN_MASK_LOAD_LANES:
3592 case IFN_GATHER_LOAD:
3593 case IFN_MASK_GATHER_LOAD:
3594 return true;
3595
3596 default:
3597 return false;
3598 }
3599}
3600
f307441a
RS
3601/* Return true if IFN is some form of store to memory. */
3602
3603bool
3604internal_store_fn_p (internal_fn fn)
3605{
3606 switch (fn)
3607 {
3608 case IFN_MASK_STORE:
3609 case IFN_STORE_LANES:
3610 case IFN_MASK_STORE_LANES:
3611 case IFN_SCATTER_STORE:
3612 case IFN_MASK_SCATTER_STORE:
3613 return true;
3614
3615 default:
3616 return false;
3617 }
3618}
3619
bfaa08b7
RS
3620/* Return true if IFN is some form of gather load or scatter store. */
3621
3622bool
3623internal_gather_scatter_fn_p (internal_fn fn)
3624{
3625 switch (fn)
3626 {
3627 case IFN_GATHER_LOAD:
3628 case IFN_MASK_GATHER_LOAD:
f307441a
RS
3629 case IFN_SCATTER_STORE:
3630 case IFN_MASK_SCATTER_STORE:
bfaa08b7
RS
3631 return true;
3632
3633 default:
3634 return false;
3635 }
3636}
3637
3638/* If FN takes a vector mask argument, return the index of that argument,
3639 otherwise return -1. */
3640
3641int
3642internal_fn_mask_index (internal_fn fn)
3643{
3644 switch (fn)
3645 {
3646 case IFN_MASK_LOAD:
3647 case IFN_MASK_LOAD_LANES:
3648 case IFN_MASK_STORE:
3649 case IFN_MASK_STORE_LANES:
3650 return 2;
3651
3652 case IFN_MASK_GATHER_LOAD:
f307441a
RS
3653 case IFN_MASK_SCATTER_STORE:
3654 return 4;
3655
3656 default:
2c58d42c
RS
3657 return (conditional_internal_fn_code (fn) != ERROR_MARK
3658 || get_unconditional_internal_fn (fn) != IFN_LAST ? 0 : -1);
f307441a
RS
3659 }
3660}
3661
3662/* If FN takes a value that should be stored to memory, return the index
3663 of that argument, otherwise return -1. */
3664
3665int
3666internal_fn_stored_value_index (internal_fn fn)
3667{
3668 switch (fn)
3669 {
3670 case IFN_MASK_STORE:
3671 case IFN_SCATTER_STORE:
3672 case IFN_MASK_SCATTER_STORE:
3673 return 3;
3674
bfaa08b7
RS
3675 default:
3676 return -1;
3677 }
3678}
3679
3680/* Return true if the target supports gather load or scatter store function
3681 IFN. For loads, VECTOR_TYPE is the vector type of the load result,
3682 while for stores it is the vector type of the stored data argument.
3683 MEMORY_ELEMENT_TYPE is the type of the memory elements being loaded
09eb042a
RS
3684 or stored. OFFSET_VECTOR_TYPE is the vector type that holds the
3685 offset from the shared base address of each loaded or stored element.
3686 SCALE is the amount by which these offsets should be multiplied
3687 *after* they have been extended to address width. */
bfaa08b7
RS
3688
3689bool
3690internal_gather_scatter_fn_supported_p (internal_fn ifn, tree vector_type,
3691 tree memory_element_type,
09eb042a 3692 tree offset_vector_type, int scale)
bfaa08b7
RS
3693{
3694 if (!tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (vector_type)),
3695 TYPE_SIZE (memory_element_type)))
3696 return false;
09eb042a
RS
3697 if (maybe_ne (TYPE_VECTOR_SUBPARTS (vector_type),
3698 TYPE_VECTOR_SUBPARTS (offset_vector_type)))
3699 return false;
bfaa08b7 3700 optab optab = direct_internal_fn_optab (ifn);
09eb042a
RS
3701 insn_code icode = convert_optab_handler (optab, TYPE_MODE (vector_type),
3702 TYPE_MODE (offset_vector_type));
f307441a 3703 int output_ops = internal_load_fn_p (ifn) ? 1 : 0;
09eb042a 3704 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (offset_vector_type));
bfaa08b7 3705 return (icode != CODE_FOR_nothing
09eb042a
RS
3706 && insn_operand_matches (icode, 2 + output_ops, GEN_INT (unsigned_p))
3707 && insn_operand_matches (icode, 3 + output_ops, GEN_INT (scale)));
bfaa08b7
RS
3708}
3709
58c036c8
RS
3710/* Return true if the target supports IFN_CHECK_{RAW,WAR}_PTRS function IFN
3711 for pointers of type TYPE when the accesses have LENGTH bytes and their
3712 common byte alignment is ALIGN. */
3713
3714bool
3715internal_check_ptrs_fn_supported_p (internal_fn ifn, tree type,
3716 poly_uint64 length, unsigned int align)
3717{
3718 machine_mode mode = TYPE_MODE (type);
3719 optab optab = direct_internal_fn_optab (ifn);
3720 insn_code icode = direct_optab_handler (optab, mode);
3721 if (icode == CODE_FOR_nothing)
3722 return false;
3723 rtx length_rtx = immed_wide_int_const (length, mode);
3724 return (insn_operand_matches (icode, 3, length_rtx)
3725 && insn_operand_matches (icode, 4, GEN_INT (align)));
3726}
3727
4cfe7a6c
RS
3728/* Expand STMT as though it were a call to internal function FN. */
3729
3730void
3731expand_internal_call (internal_fn fn, gcall *stmt)
3732{
3733 internal_fn_expanders[fn] (fn, stmt);
3734}
3735
25583c4f
RS
3736/* Expand STMT, which is a call to internal function FN. */
3737
3738void
538dd0b7 3739expand_internal_call (gcall *stmt)
25583c4f 3740{
4cfe7a6c 3741 expand_internal_call (gimple_call_internal_fn (stmt), stmt);
25583c4f 3742}
1ee62b92 3743
2c58d42c
RS
3744/* If TYPE is a vector type, return true if IFN is a direct internal
3745 function that is supported for that type. If TYPE is a scalar type,
3746 return true if IFN is a direct internal function that is supported for
3747 the target's preferred vector version of TYPE. */
3748
3749bool
3750vectorized_internal_fn_supported_p (internal_fn ifn, tree type)
3751{
3752 scalar_mode smode;
3753 if (!VECTOR_TYPE_P (type) && is_a <scalar_mode> (TYPE_MODE (type), &smode))
3754 {
3755 machine_mode vmode = targetm.vectorize.preferred_simd_mode (smode);
3756 if (VECTOR_MODE_P (vmode))
3757 type = build_vector_type_for_mode (type, vmode);
3758 }
3759
3760 return (VECTOR_MODE_P (TYPE_MODE (type))
3761 && direct_internal_fn_supported_p (ifn, type, OPTIMIZE_FOR_SPEED));
3762}
3763
1ee62b92
PG
3764void
3765expand_PHI (internal_fn, gcall *)
3766{
3767 gcc_unreachable ();
3768}