1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
31 #include "fold-const.h"
33 #include "gimple-iterator.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
57 #include "gimple-lower-bitint.h"
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
68 enum bitint_prec_kind
{
75 /* Caches to speed up bitint_precision_kind. */
77 static int small_max_prec
, mid_min_prec
, large_min_prec
, huge_min_prec
;
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
82 static bitint_prec_kind
83 bitint_precision_kind (int prec
)
85 if (prec
<= small_max_prec
)
86 return bitint_prec_small
;
87 if (huge_min_prec
&& prec
>= huge_min_prec
)
88 return bitint_prec_huge
;
89 if (large_min_prec
&& prec
>= large_min_prec
)
90 return bitint_prec_large
;
91 if (mid_min_prec
&& prec
>= mid_min_prec
)
92 return bitint_prec_middle
;
94 struct bitint_info info
;
95 bool ok
= targetm
.c
.bitint_type_info (prec
, &info
);
97 scalar_int_mode limb_mode
= as_a
<scalar_int_mode
> (info
.limb_mode
);
98 if (prec
<= GET_MODE_PRECISION (limb_mode
))
100 small_max_prec
= prec
;
101 return bitint_prec_small
;
104 && GET_MODE_PRECISION (limb_mode
) < MAX_FIXED_MODE_SIZE
)
105 large_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
107 limb_prec
= GET_MODE_PRECISION (limb_mode
);
110 if (4 * limb_prec
>= MAX_FIXED_MODE_SIZE
)
111 huge_min_prec
= 4 * limb_prec
;
113 huge_min_prec
= MAX_FIXED_MODE_SIZE
+ 1;
115 if (prec
<= MAX_FIXED_MODE_SIZE
)
117 if (!mid_min_prec
|| prec
< mid_min_prec
)
119 return bitint_prec_middle
;
121 if (large_min_prec
&& prec
<= large_min_prec
)
122 return bitint_prec_large
;
123 return bitint_prec_huge
;
126 /* Same for a TYPE. */
128 static bitint_prec_kind
129 bitint_precision_kind (tree type
)
131 return bitint_precision_kind (TYPE_PRECISION (type
));
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
140 bitint_min_cst_precision (tree cst
, int &ext
)
142 ext
= tree_int_cst_sgn (cst
) < 0 ? -1 : 0;
143 wide_int w
= wi::to_wide (cst
);
144 unsigned min_prec
= wi::min_precision (w
, TYPE_SIGN (TREE_TYPE (cst
)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst
)))
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2
= wi::min_precision (w
, SIGNED
) - 1;
154 if (min_prec2
< min_prec
)
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
169 maybe_cast_middle_bitint (gimple_stmt_iterator
*gsi
, tree op
, tree
&type
)
172 || TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op
)) != bitint_prec_middle
)
176 int prec
= TYPE_PRECISION (TREE_TYPE (op
));
177 int uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
178 if (type
== NULL_TREE
179 || TYPE_PRECISION (type
) != prec
180 || TYPE_UNSIGNED (type
) != uns
)
181 type
= build_nonstandard_integer_type (prec
, uns
);
183 if (TREE_CODE (op
) != SSA_NAME
)
185 tree nop
= fold_convert (type
, op
);
186 if (is_gimple_val (nop
))
190 tree nop
= make_ssa_name (type
);
191 gimple
*g
= gimple_build_assign (nop
, NOP_EXPR
, op
);
192 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
200 mergeable_op (gimple
*stmt
)
202 if (!is_gimple_assign (stmt
))
204 switch (gimple_assign_rhs_code (stmt
))
218 tree cnt
= gimple_assign_rhs2 (stmt
);
219 if (tree_fits_uhwi_p (cnt
)
220 && tree_to_uhwi (cnt
) < (unsigned HOST_WIDE_INT
) limb_prec
)
225 case VIEW_CONVERT_EXPR
:
227 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
228 tree rhs_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
230 && TREE_CODE (lhs_type
) == BITINT_TYPE
231 && TREE_CODE (rhs_type
) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type
), TYPE_SIZE (rhs_type
)))
236 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
))
238 if ((unsigned) TYPE_PRECISION (lhs_type
) % (2 * limb_prec
) != 0)
240 if (bitint_precision_kind (lhs_type
) == bitint_prec_large
)
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
262 optimizable_arith_overflow (gimple
*stmt
)
264 bool is_ubsan
= false;
265 if (!is_gimple_call (stmt
) || !gimple_call_internal_p (stmt
))
267 switch (gimple_call_internal_fn (stmt
))
269 case IFN_ADD_OVERFLOW
:
270 case IFN_SUB_OVERFLOW
:
271 case IFN_MUL_OVERFLOW
:
273 case IFN_UBSAN_CHECK_ADD
:
274 case IFN_UBSAN_CHECK_SUB
:
275 case IFN_UBSAN_CHECK_MUL
:
281 tree lhs
= gimple_call_lhs (stmt
);
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
286 tree type
= is_ubsan
? TREE_TYPE (lhs
) : TREE_TYPE (TREE_TYPE (lhs
));
287 if (TREE_CODE (type
) != BITINT_TYPE
288 || bitint_precision_kind (type
) < bitint_prec_large
)
295 if (!single_imm_use (lhs
, &use_p
, &use_stmt
)
296 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
297 || !gimple_store_p (use_stmt
)
298 || !is_gimple_assign (use_stmt
)
299 || gimple_has_volatile_ops (use_stmt
)
300 || stmt_ends_bb_p (use_stmt
))
308 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
310 gimple
*g
= USE_STMT (use_p
);
311 if (is_gimple_debug (g
))
313 if (!is_gimple_assign (g
) || gimple_bb (g
) != gimple_bb (stmt
))
315 if (gimple_assign_rhs_code (g
) == REALPART_EXPR
)
321 else if (gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
327 use_operand_p use2_p
;
329 tree lhs2
= gimple_assign_lhs (g
);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2
))
332 if (!single_imm_use (lhs2
, &use2_p
, &use_stmt
)
333 || gimple_bb (use_stmt
) != gimple_bb (stmt
)
334 || !gimple_assign_cast_p (use_stmt
))
337 lhs2
= gimple_assign_lhs (use_stmt
);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2
))
339 || TREE_CODE (TREE_TYPE (lhs2
)) == BITINT_TYPE
)
347 return seen
== 3 ? 2 : 1;
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
355 comparison_op (gimple
*stmt
, tree
*pop1
, tree
*pop2
)
357 tree op1
= NULL_TREE
, op2
= NULL_TREE
;
358 tree_code code
= ERROR_MARK
;
359 if (gimple_code (stmt
) == GIMPLE_COND
)
361 code
= gimple_cond_code (stmt
);
362 op1
= gimple_cond_lhs (stmt
);
363 op2
= gimple_cond_rhs (stmt
);
365 else if (is_gimple_assign (stmt
))
367 code
= gimple_assign_rhs_code (stmt
);
368 op1
= gimple_assign_rhs1 (stmt
);
369 if (TREE_CODE_CLASS (code
) == tcc_comparison
370 || TREE_CODE_CLASS (code
) == tcc_binary
)
371 op2
= gimple_assign_rhs2 (stmt
);
373 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
375 tree type
= TREE_TYPE (op1
);
376 if (TREE_CODE (type
) != BITINT_TYPE
377 || bitint_precision_kind (type
) < bitint_prec_large
)
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
390 struct bitint_large_huge
393 : m_names (NULL
), m_loads (NULL
), m_preserved (NULL
),
394 m_single_use_names (NULL
), m_map (NULL
), m_vars (NULL
),
395 m_limb_type (NULL_TREE
), m_data (vNULL
) {}
397 ~bitint_large_huge ();
399 void insert_before (gimple
*);
400 tree
limb_access_type (tree
, tree
);
401 tree
limb_access (tree
, tree
, tree
, bool);
402 void if_then (gimple
*, profile_probability
, edge
&, edge
&);
403 void if_then_else (gimple
*, profile_probability
, edge
&, edge
&);
404 void if_then_if_then_else (gimple
*g
, gimple
*,
405 profile_probability
, profile_probability
,
406 edge
&, edge
&, edge
&);
407 tree
handle_operand (tree
, tree
);
408 tree
prepare_data_in_out (tree
, tree
, tree
*, tree
= NULL_TREE
);
409 tree
add_cast (tree
, tree
);
410 tree
handle_plus_minus (tree_code
, tree
, tree
, tree
);
411 tree
handle_lshift (tree
, tree
, tree
);
412 tree
handle_cast (tree
, tree
, tree
);
413 tree
handle_load (gimple
*, tree
);
414 tree
handle_stmt (gimple
*, tree
);
415 tree
handle_operand_addr (tree
, gimple
*, int *, int *);
416 tree
create_loop (tree
, tree
*);
417 tree
lower_mergeable_stmt (gimple
*, tree_code
&, tree
, tree
);
418 tree
lower_comparison_stmt (gimple
*, tree_code
&, tree
, tree
);
419 void lower_shift_stmt (tree
, gimple
*);
420 void lower_muldiv_stmt (tree
, gimple
*);
421 void lower_float_conv_stmt (tree
, gimple
*);
422 tree
arith_overflow_extract_bits (unsigned int, unsigned int, tree
,
424 void finish_arith_overflow (tree
, tree
, tree
, tree
, tree
, tree
, gimple
*,
426 void lower_addsub_overflow (tree
, gimple
*);
427 void lower_mul_overflow (tree
, gimple
*);
428 void lower_cplxpart_stmt (tree
, gimple
*);
429 void lower_complexexpr_stmt (gimple
*);
430 void lower_bit_query (gimple
*);
431 void lower_call (tree
, gimple
*);
432 void lower_asm (gimple
*);
433 void lower_stmt (gimple
*);
435 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
436 merged with their uses. */
438 /* Subset of those for lhs of load statements. These will be
439 cleared in m_names if the loads will be mergeable with all
442 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
443 to later passes (arguments or return values of calls). */
445 /* Subset of m_names which have a single use. As the lowering
446 can replace various original statements with their lowered
447 form even before it is done iterating over all basic blocks,
448 testing has_single_use for the purpose of emitting clobbers
449 doesn't work properly. */
450 bitmap m_single_use_names
;
451 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
454 /* Mapping of the partitions to corresponding decls. */
456 /* Unsigned integer type with limb precision. */
458 /* Its TYPE_SIZE_UNIT. */
459 unsigned HOST_WIDE_INT m_limb_size
;
460 /* Location of a gimple stmt which is being currently lowered. */
462 /* Current stmt iterator where code is being lowered currently. */
463 gimple_stmt_iterator m_gsi
;
464 /* Statement after which any clobbers should be added if non-NULL. */
465 gimple
*m_after_stmt
;
466 /* Set when creating loops to the loop header bb and its preheader. */
467 basic_block m_bb
, m_preheader_bb
;
468 /* Stmt iterator after which initialization statements should be emitted. */
469 gimple_stmt_iterator m_init_gsi
;
470 /* Decl into which a mergeable statement stores result. */
472 /* handle_operand/handle_stmt can be invoked in various ways.
474 lower_mergeable_stmt for large _BitInt calls those with constant
475 idx only, expanding to straight line code, for huge _BitInt
476 emits a loop from least significant limb upwards, where each loop
477 iteration handles 2 limbs, plus there can be up to one full limb
478 and one partial limb processed after the loop, where handle_operand
479 and/or handle_stmt are called with constant idx. m_upwards_2limb
480 is set for this case, false otherwise. m_upwards is true if it
481 is either large or huge _BitInt handled by lower_mergeable_stmt,
482 i.e. indexes always increase.
484 Another way is used by lower_comparison_stmt, which walks limbs
485 from most significant to least significant, partial limb if any
486 processed first with constant idx and then loop processing a single
487 limb per iteration with non-constant idx.
489 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
490 destination limbs are processed from most significant to least
491 significant or for RSHIFT_EXPR the other way around, in loops or
492 straight line code, but idx usually is non-constant (so from
493 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
494 handling there can access even partial limbs using non-constant
495 idx (then m_var_msb should be true, for all the other cases
496 including lower_mergeable_stmt/lower_comparison_stmt that is
497 not the case and so m_var_msb should be false.
499 m_first should be set the first time handle_operand/handle_stmt
500 is called and clear when it is called for some other limb with
501 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
502 or statement (e.g. +/-/<< with < limb_prec constant) needs some
503 state between the different calls, when m_first is true it should
504 push some trees to m_data vector and also make sure m_data_cnt is
505 incremented by how many trees were pushed, and when m_first is
506 false, it can use the m_data[m_data_cnt] etc. data or update them,
507 just needs to bump m_data_cnt by the same amount as when it was
508 called with m_first set. The toplevel calls to
509 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
510 m_data vector when setting m_first to true.
512 m_cast_conditional and m_bitfld_load are used when handling a
513 bit-field load inside of a widening cast. handle_cast sometimes
514 needs to do runtime comparisons and handle_operand only conditionally
515 or even in two separate conditional blocks for one idx (once with
516 constant index after comparing the runtime one for equality with the
517 constant). In these cases, m_cast_conditional is set to true and
518 the bit-field load then communicates its m_data_cnt to handle_cast
519 using m_bitfld_load. */
522 unsigned m_upwards_2limb
;
524 bool m_cast_conditional
;
525 unsigned m_bitfld_load
;
527 unsigned int m_data_cnt
;
530 bitint_large_huge::~bitint_large_huge ()
532 BITMAP_FREE (m_names
);
533 BITMAP_FREE (m_loads
);
534 BITMAP_FREE (m_preserved
);
535 BITMAP_FREE (m_single_use_names
);
537 delete_var_map (m_map
);
542 /* Insert gimple statement G before current location
543 and set its gimple_location. */
546 bitint_large_huge::insert_before (gimple
*g
)
548 gimple_set_location (g
, m_loc
);
549 gsi_insert_before (&m_gsi
, g
, GSI_SAME_STMT
);
552 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
553 This is normally m_limb_type, except for a partial most
554 significant limb if any. */
557 bitint_large_huge::limb_access_type (tree type
, tree idx
)
559 if (type
== NULL_TREE
)
561 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
562 unsigned int prec
= TYPE_PRECISION (type
);
563 gcc_assert (i
* limb_prec
< prec
);
564 if ((i
+ 1) * limb_prec
<= prec
)
567 return build_nonstandard_integer_type (prec
% limb_prec
,
568 TYPE_UNSIGNED (type
));
571 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
572 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
575 bitint_large_huge::limb_access (tree type
, tree var
, tree idx
, bool write_p
)
577 tree atype
= (tree_fits_uhwi_p (idx
)
578 ? limb_access_type (type
, idx
) : m_limb_type
);
580 if (DECL_P (var
) && tree_fits_uhwi_p (idx
))
582 tree ptype
= build_pointer_type (strip_array_types (TREE_TYPE (var
)));
583 unsigned HOST_WIDE_INT off
= tree_to_uhwi (idx
) * m_limb_size
;
584 ret
= build2 (MEM_REF
, m_limb_type
,
585 build_fold_addr_expr (var
),
586 build_int_cst (ptype
, off
));
587 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
588 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
590 else if (TREE_CODE (var
) == MEM_REF
&& tree_fits_uhwi_p (idx
))
593 = build2 (MEM_REF
, m_limb_type
, TREE_OPERAND (var
, 0),
594 size_binop (PLUS_EXPR
, TREE_OPERAND (var
, 1),
595 build_int_cst (TREE_TYPE (TREE_OPERAND (var
, 1)),
598 TREE_THIS_VOLATILE (ret
) = TREE_THIS_VOLATILE (var
);
599 TREE_SIDE_EFFECTS (ret
) = TREE_SIDE_EFFECTS (var
);
600 TREE_THIS_NOTRAP (ret
) = TREE_THIS_NOTRAP (var
);
604 var
= unshare_expr (var
);
605 if (TREE_CODE (TREE_TYPE (var
)) != ARRAY_TYPE
606 || !useless_type_conversion_p (m_limb_type
,
607 TREE_TYPE (TREE_TYPE (var
))))
609 unsigned HOST_WIDE_INT nelts
610 = CEIL (tree_to_uhwi (TYPE_SIZE (type
)), limb_prec
);
611 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
612 var
= build1 (VIEW_CONVERT_EXPR
, atype
, var
);
614 ret
= build4 (ARRAY_REF
, m_limb_type
, var
, idx
, NULL_TREE
, NULL_TREE
);
616 if (!write_p
&& !useless_type_conversion_p (atype
, m_limb_type
))
618 gimple
*g
= gimple_build_assign (make_ssa_name (m_limb_type
), ret
);
620 ret
= gimple_assign_lhs (g
);
621 ret
= build1 (NOP_EXPR
, atype
, ret
);
626 /* Emit a half diamond,
635 or if (COND) new_bb1;
636 PROB is the probability that the condition is true.
637 Updates m_gsi to start of new_bb1.
638 Sets EDGE_TRUE to edge from new_bb1 to successor and
639 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
642 bitint_large_huge::if_then (gimple
*cond
, profile_probability prob
,
643 edge
&edge_true
, edge
&edge_false
)
645 insert_before (cond
);
646 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
647 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
648 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
649 e1
->flags
= EDGE_TRUE_VALUE
;
650 e1
->probability
= prob
;
651 e3
->probability
= prob
.invert ();
652 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
655 m_gsi
= gsi_after_labels (e1
->dest
);
658 /* Emit a full diamond,
667 or if (COND) new_bb2; else new_bb1;
668 PROB is the probability that the condition is true.
669 Updates m_gsi to start of new_bb2.
670 Sets EDGE_TRUE to edge from new_bb1 to successor and
671 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
674 bitint_large_huge::if_then_else (gimple
*cond
, profile_probability prob
,
675 edge
&edge_true
, edge
&edge_false
)
677 insert_before (cond
);
678 edge e1
= split_block (gsi_bb (m_gsi
), cond
);
679 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
680 basic_block bb
= create_empty_bb (e1
->dest
);
681 add_bb_to_loop (bb
, e1
->dest
->loop_father
);
682 edge e3
= make_edge (e1
->src
, bb
, EDGE_TRUE_VALUE
);
683 e1
->flags
= EDGE_FALSE_VALUE
;
684 e3
->probability
= prob
;
685 e1
->probability
= prob
.invert ();
686 bb
->count
= e1
->src
->count
.apply_probability (prob
);
687 set_immediate_dominator (CDI_DOMINATORS
, bb
, e1
->src
);
688 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
689 edge_true
= make_single_succ_edge (bb
, e2
->dest
, EDGE_FALLTHRU
);
691 m_gsi
= gsi_after_labels (bb
);
694 /* Emit a half diamond with full diamond in it
708 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
709 PROB1 is the probability that the condition 1 is true.
710 PROB2 is the probability that the condition 2 is true.
711 Updates m_gsi to start of new_bb1.
712 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
713 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
714 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
715 If COND2 is NULL, this is equivalent to
716 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
717 EDGE_TRUE_TRUE = NULL; */
720 bitint_large_huge::if_then_if_then_else (gimple
*cond1
, gimple
*cond2
,
721 profile_probability prob1
,
722 profile_probability prob2
,
723 edge
&edge_true_true
,
724 edge
&edge_true_false
,
727 edge e2
, e3
, e4
= NULL
;
728 if_then (cond1
, prob1
, e2
, e3
);
731 edge_true_true
= NULL
;
732 edge_true_false
= e2
;
736 insert_before (cond2
);
737 e2
= split_block (gsi_bb (m_gsi
), cond2
);
738 basic_block bb
= create_empty_bb (e2
->dest
);
739 add_bb_to_loop (bb
, e2
->dest
->loop_father
);
740 e4
= make_edge (e2
->src
, bb
, EDGE_TRUE_VALUE
);
741 set_immediate_dominator (CDI_DOMINATORS
, bb
, e2
->src
);
742 e4
->probability
= prob2
;
743 e2
->flags
= EDGE_FALSE_VALUE
;
744 e2
->probability
= prob2
.invert ();
745 bb
->count
= e2
->src
->count
.apply_probability (prob2
);
746 e4
= make_single_succ_edge (bb
, e3
->dest
, EDGE_FALLTHRU
);
747 e2
= find_edge (e2
->dest
, e3
->dest
);
749 edge_true_false
= e2
;
751 m_gsi
= gsi_after_labels (e2
->src
);
754 /* Emit code to access limb IDX from OP. */
757 bitint_large_huge::handle_operand (tree op
, tree idx
)
759 switch (TREE_CODE (op
))
763 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
765 if (SSA_NAME_IS_DEFAULT_DEF (op
))
769 tree v
= create_tmp_reg (m_limb_type
);
770 if (SSA_NAME_VAR (op
) && VAR_P (SSA_NAME_VAR (op
)))
772 DECL_NAME (v
) = DECL_NAME (SSA_NAME_VAR (op
));
773 DECL_SOURCE_LOCATION (v
)
774 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op
));
776 v
= get_or_create_ssa_default_def (cfun
, v
);
777 m_data
.safe_push (v
);
779 tree ret
= m_data
[m_data_cnt
];
781 if (tree_fits_uhwi_p (idx
))
783 tree type
= limb_access_type (TREE_TYPE (op
), idx
);
784 ret
= add_cast (type
, ret
);
788 location_t loc_save
= m_loc
;
789 m_loc
= gimple_location (SSA_NAME_DEF_STMT (op
));
790 tree ret
= handle_stmt (SSA_NAME_DEF_STMT (op
), idx
);
797 p
= var_to_partition (m_map
, op
);
798 gcc_assert (m_vars
[p
] != NULL_TREE
);
799 t
= limb_access (TREE_TYPE (op
), m_vars
[p
], idx
, false);
800 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
802 t
= gimple_assign_lhs (g
);
804 && m_single_use_names
805 && m_vars
[p
] != m_lhs
807 && bitmap_bit_p (m_single_use_names
, SSA_NAME_VERSION (op
)))
809 tree clobber
= build_clobber (TREE_TYPE (m_vars
[p
]),
810 CLOBBER_STORAGE_END
);
811 g
= gimple_build_assign (m_vars
[p
], clobber
);
812 gimple_stmt_iterator gsi
= gsi_for_stmt (m_after_stmt
);
813 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
817 if (tree_fits_uhwi_p (idx
))
819 tree c
, type
= limb_access_type (TREE_TYPE (op
), idx
);
820 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
823 m_data
.safe_push (NULL_TREE
);
824 m_data
.safe_push (NULL_TREE
);
826 if (limb_prec
!= HOST_BITS_PER_WIDE_INT
)
828 wide_int w
= wi::rshift (wi::to_wide (op
), i
* limb_prec
,
829 TYPE_SIGN (TREE_TYPE (op
)));
830 c
= wide_int_to_tree (type
,
831 wide_int::from (w
, TYPE_PRECISION (type
),
834 else if (i
>= TREE_INT_CST_EXT_NUNITS (op
))
835 c
= build_int_cst (type
,
836 tree_int_cst_sgn (op
) < 0 ? -1 : 0);
838 c
= build_int_cst (type
, TREE_INT_CST_ELT (op
, i
));
843 || (m_data
[m_data_cnt
] == NULL_TREE
844 && m_data
[m_data_cnt
+ 1] == NULL_TREE
))
846 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
847 unsigned int rem
= prec
% (2 * limb_prec
);
849 unsigned min_prec
= bitint_min_cst_precision (op
, ext
);
852 m_data
.safe_push (NULL_TREE
);
853 m_data
.safe_push (NULL_TREE
);
855 if (integer_zerop (op
))
857 tree c
= build_zero_cst (m_limb_type
);
858 m_data
[m_data_cnt
] = c
;
859 m_data
[m_data_cnt
+ 1] = c
;
861 else if (integer_all_onesp (op
))
863 tree c
= build_all_ones_cst (m_limb_type
);
864 m_data
[m_data_cnt
] = c
;
865 m_data
[m_data_cnt
+ 1] = c
;
867 else if (m_upwards_2limb
&& min_prec
<= (unsigned) limb_prec
)
869 /* Single limb constant. Use a phi with that limb from
870 the preheader edge and 0 or -1 constant from the other edge
871 and for the second limb in the loop. */
873 gcc_assert (m_first
);
876 prepare_data_in_out (fold_convert (m_limb_type
, op
), idx
, &out
,
877 build_int_cst (m_limb_type
, ext
));
879 else if (min_prec
> prec
- rem
- 2 * limb_prec
)
881 /* Constant which has enough significant bits that it isn't
882 worth trying to save .rodata space by extending from smaller
886 type
= TREE_TYPE (op
);
888 /* If we have a guarantee the most significant partial limb
889 (if any) will be only accessed through handle_operand
890 with INTEGER_CST idx, we don't need to include the partial
892 type
= build_bitint_type (prec
- rem
, 1);
893 tree c
= tree_output_constant_def (fold_convert (type
, op
));
894 m_data
[m_data_cnt
] = c
;
895 m_data
[m_data_cnt
+ 1] = NULL_TREE
;
897 else if (m_upwards_2limb
)
899 /* Constant with smaller number of bits. Trade conditional
900 code for .rodata space by extending from smaller number. */
901 min_prec
= CEIL (min_prec
, 2 * limb_prec
) * (2 * limb_prec
);
902 tree type
= build_bitint_type (min_prec
, 1);
903 tree c
= tree_output_constant_def (fold_convert (type
, op
));
904 tree idx2
= make_ssa_name (sizetype
);
905 g
= gimple_build_assign (idx2
, PLUS_EXPR
, idx
, size_one_node
);
907 g
= gimple_build_cond (LT_EXPR
, idx
,
908 size_int (min_prec
/ limb_prec
),
909 NULL_TREE
, NULL_TREE
);
910 edge edge_true
, edge_false
;
911 if_then (g
, (min_prec
>= (prec
- rem
) / 2
912 ? profile_probability::likely ()
913 : profile_probability::unlikely ()),
914 edge_true
, edge_false
);
915 tree c1
= limb_access (TREE_TYPE (op
), c
, idx
, false);
916 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c1
)), c1
);
918 c1
= gimple_assign_lhs (g
);
919 tree c2
= limb_access (TREE_TYPE (op
), c
, idx2
, false);
920 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c2
)), c2
);
922 c2
= gimple_assign_lhs (g
);
923 tree c3
= build_int_cst (m_limb_type
, ext
);
924 m_gsi
= gsi_after_labels (edge_true
->dest
);
925 m_data
[m_data_cnt
] = make_ssa_name (m_limb_type
);
926 m_data
[m_data_cnt
+ 1] = make_ssa_name (m_limb_type
);
927 gphi
*phi
= create_phi_node (m_data
[m_data_cnt
],
929 add_phi_arg (phi
, c1
, edge_true
, UNKNOWN_LOCATION
);
930 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
931 phi
= create_phi_node (m_data
[m_data_cnt
+ 1], edge_true
->dest
);
932 add_phi_arg (phi
, c2
, edge_true
, UNKNOWN_LOCATION
);
933 add_phi_arg (phi
, c3
, edge_false
, UNKNOWN_LOCATION
);
937 /* Constant with smaller number of bits. Trade conditional
938 code for .rodata space by extending from smaller number.
939 Version for loops with random access to the limbs or
941 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
943 if (min_prec
<= (unsigned) limb_prec
)
944 c
= fold_convert (m_limb_type
, op
);
947 tree type
= build_bitint_type (min_prec
, 1);
948 c
= tree_output_constant_def (fold_convert (type
, op
));
950 m_data
[m_data_cnt
] = c
;
951 m_data
[m_data_cnt
+ 1] = integer_type_node
;
953 t
= m_data
[m_data_cnt
];
954 if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
956 t
= limb_access (TREE_TYPE (op
), t
, idx
, false);
957 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
959 t
= gimple_assign_lhs (g
);
962 else if (m_data
[m_data_cnt
+ 1] == NULL_TREE
)
964 t
= limb_access (TREE_TYPE (op
), m_data
[m_data_cnt
], idx
, false);
965 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (t
)), t
);
967 t
= gimple_assign_lhs (g
);
970 t
= m_data
[m_data_cnt
+ 1];
971 if (m_data
[m_data_cnt
+ 1] == integer_type_node
)
973 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (op
));
974 unsigned rem
= prec
% (2 * limb_prec
);
975 int ext
= tree_int_cst_sgn (op
) < 0 ? -1 : 0;
976 tree c
= m_data
[m_data_cnt
];
977 unsigned min_prec
= TYPE_PRECISION (TREE_TYPE (c
));
978 g
= gimple_build_cond (LT_EXPR
, idx
,
979 size_int (min_prec
/ limb_prec
),
980 NULL_TREE
, NULL_TREE
);
981 edge edge_true
, edge_false
;
982 if_then (g
, (min_prec
>= (prec
- rem
) / 2
983 ? profile_probability::likely ()
984 : profile_probability::unlikely ()),
985 edge_true
, edge_false
);
986 if (min_prec
> (unsigned) limb_prec
)
988 c
= limb_access (TREE_TYPE (op
), c
, idx
, false);
989 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (c
)), c
);
991 c
= gimple_assign_lhs (g
);
993 tree c2
= build_int_cst (m_limb_type
, ext
);
994 m_gsi
= gsi_after_labels (edge_true
->dest
);
995 t
= make_ssa_name (m_limb_type
);
996 gphi
*phi
= create_phi_node (t
, edge_true
->dest
);
997 add_phi_arg (phi
, c
, edge_true
, UNKNOWN_LOCATION
);
998 add_phi_arg (phi
, c2
, edge_false
, UNKNOWN_LOCATION
);
1007 /* Helper method, add a PHI node with VAL from preheader edge if
1008 inside of a loop and m_first. Keep state in a pair of m_data
1009 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1010 the latch edge, otherwise create a new SSA_NAME for it and let
1011 caller initialize it. */
1014 bitint_large_huge::prepare_data_in_out (tree val
, tree idx
, tree
*data_out
,
1019 *data_out
= tree_fits_uhwi_p (idx
) ? NULL_TREE
: m_data
[m_data_cnt
+ 1];
1020 return m_data
[m_data_cnt
];
1023 *data_out
= NULL_TREE
;
1024 if (tree_fits_uhwi_p (idx
))
1026 m_data
.safe_push (val
);
1027 m_data
.safe_push (NULL_TREE
);
1031 tree in
= make_ssa_name (TREE_TYPE (val
));
1032 gphi
*phi
= create_phi_node (in
, m_bb
);
1033 edge e1
= find_edge (m_preheader_bb
, m_bb
);
1034 edge e2
= EDGE_PRED (m_bb
, 0);
1036 e2
= EDGE_PRED (m_bb
, 1);
1037 add_phi_arg (phi
, val
, e1
, UNKNOWN_LOCATION
);
1038 tree out
= val_out
? val_out
: make_ssa_name (TREE_TYPE (val
));
1039 add_phi_arg (phi
, out
, e2
, UNKNOWN_LOCATION
);
1040 m_data
.safe_push (in
);
1041 m_data
.safe_push (out
);
1045 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1046 convert it without emitting any code, otherwise emit
1047 the conversion statement before the current location. */
1050 bitint_large_huge::add_cast (tree type
, tree val
)
1052 if (TREE_CODE (val
) == INTEGER_CST
)
1053 return fold_convert (type
, val
);
1055 tree lhs
= make_ssa_name (type
);
1056 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, val
);
1061 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1064 bitint_large_huge::handle_plus_minus (tree_code code
, tree rhs1
, tree rhs2
,
1067 tree lhs
, data_out
, ctype
;
1068 tree rhs1_type
= TREE_TYPE (rhs1
);
1070 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1073 if (optab_handler (code
== PLUS_EXPR
? uaddc5_optab
: usubc5_optab
,
1074 TYPE_MODE (m_limb_type
)) != CODE_FOR_nothing
)
1076 ctype
= build_complex_type (m_limb_type
);
1077 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1079 if (!TYPE_UNSIGNED (rhs1_type
))
1081 tree type
= unsigned_type_for (rhs1_type
);
1082 rhs1
= add_cast (type
, rhs1
);
1083 rhs2
= add_cast (type
, rhs2
);
1085 rhs1
= add_cast (m_limb_type
, rhs1
);
1086 rhs2
= add_cast (m_limb_type
, rhs2
);
1088 lhs
= make_ssa_name (ctype
);
1089 g
= gimple_build_call_internal (code
== PLUS_EXPR
1090 ? IFN_UADDC
: IFN_USUBC
,
1091 3, rhs1
, rhs2
, data_in
);
1092 gimple_call_set_lhs (g
, lhs
);
1094 if (data_out
== NULL_TREE
)
1095 data_out
= make_ssa_name (m_limb_type
);
1096 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1097 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1100 else if (types_compatible_p (rhs1_type
, m_limb_type
))
1102 ctype
= build_complex_type (m_limb_type
);
1103 lhs
= make_ssa_name (ctype
);
1104 g
= gimple_build_call_internal (code
== PLUS_EXPR
1105 ? IFN_ADD_OVERFLOW
: IFN_SUB_OVERFLOW
,
1107 gimple_call_set_lhs (g
, lhs
);
1109 if (data_out
== NULL_TREE
)
1110 data_out
= make_ssa_name (m_limb_type
);
1111 if (!integer_zerop (data_in
))
1113 rhs1
= make_ssa_name (m_limb_type
);
1114 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1115 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1117 rhs2
= make_ssa_name (m_limb_type
);
1118 g
= gimple_build_assign (rhs2
, IMAGPART_EXPR
,
1119 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1121 lhs
= make_ssa_name (ctype
);
1122 g
= gimple_build_call_internal (code
== PLUS_EXPR
1126 gimple_call_set_lhs (g
, lhs
);
1128 data_in
= make_ssa_name (m_limb_type
);
1129 g
= gimple_build_assign (data_in
, IMAGPART_EXPR
,
1130 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1132 g
= gimple_build_assign (data_out
, PLUS_EXPR
, rhs2
, data_in
);
1137 g
= gimple_build_assign (data_out
, IMAGPART_EXPR
,
1138 build1 (IMAGPART_EXPR
, m_limb_type
, lhs
));
1144 tree in
= add_cast (rhs1_type
, data_in
);
1145 lhs
= make_ssa_name (rhs1_type
);
1146 g
= gimple_build_assign (lhs
, code
, rhs1
, rhs2
);
1148 rhs1
= make_ssa_name (rhs1_type
);
1149 g
= gimple_build_assign (rhs1
, code
, lhs
, in
);
1151 m_data
[m_data_cnt
] = NULL_TREE
;
1155 rhs1
= make_ssa_name (m_limb_type
);
1156 g
= gimple_build_assign (rhs1
, REALPART_EXPR
,
1157 build1 (REALPART_EXPR
, m_limb_type
, lhs
));
1159 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1160 rhs1
= add_cast (rhs1_type
, rhs1
);
1161 m_data
[m_data_cnt
] = data_out
;
1166 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1167 count in [0, limb_prec - 1] range. */
1170 bitint_large_huge::handle_lshift (tree rhs1
, tree rhs2
, tree idx
)
1172 unsigned HOST_WIDE_INT cnt
= tree_to_uhwi (rhs2
);
1173 gcc_checking_assert (cnt
< (unsigned) limb_prec
);
1177 tree lhs
, data_out
, rhs1_type
= TREE_TYPE (rhs1
);
1179 tree data_in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
,
1182 if (!integer_zerop (data_in
))
1184 lhs
= make_ssa_name (m_limb_type
);
1185 g
= gimple_build_assign (lhs
, RSHIFT_EXPR
, data_in
,
1186 build_int_cst (unsigned_type_node
,
1189 if (!types_compatible_p (rhs1_type
, m_limb_type
))
1190 lhs
= add_cast (rhs1_type
, lhs
);
1193 if (types_compatible_p (rhs1_type
, m_limb_type
))
1195 if (data_out
== NULL_TREE
)
1196 data_out
= make_ssa_name (m_limb_type
);
1197 g
= gimple_build_assign (data_out
, rhs1
);
1200 if (cnt
< (unsigned) TYPE_PRECISION (rhs1_type
))
1202 lhs
= make_ssa_name (rhs1_type
);
1203 g
= gimple_build_assign (lhs
, LSHIFT_EXPR
, rhs1
, rhs2
);
1205 if (!integer_zerop (data_in
))
1208 lhs
= make_ssa_name (rhs1_type
);
1209 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, rhs1
, data_in
);
1215 m_data
[m_data_cnt
] = data_out
;
1220 /* Helper function for handle_stmt method, handle an integral
1221 to integral conversion. */
1224 bitint_large_huge::handle_cast (tree lhs_type
, tree rhs1
, tree idx
)
1226 tree rhs_type
= TREE_TYPE (rhs1
);
1228 if (TREE_CODE (rhs1
) == SSA_NAME
1229 && TREE_CODE (lhs_type
) == BITINT_TYPE
1230 && TREE_CODE (rhs_type
) == BITINT_TYPE
1231 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1232 && bitint_precision_kind (rhs_type
) >= bitint_prec_large
)
1234 if (TYPE_PRECISION (rhs_type
) >= TYPE_PRECISION (lhs_type
)
1235 /* If lhs has bigger precision than rhs, we can use
1236 the simple case only if there is a guarantee that
1237 the most significant limb is handled in straight
1238 line code. If m_var_msb (on left shifts) or
1239 if m_upwards_2limb * limb_prec is equal to
1240 lhs precision that is not the case. */
1242 && tree_int_cst_equal (TYPE_SIZE (rhs_type
),
1243 TYPE_SIZE (lhs_type
))
1244 && (!m_upwards_2limb
1245 || (m_upwards_2limb
* limb_prec
1246 < TYPE_PRECISION (lhs_type
)))))
1248 rhs1
= handle_operand (rhs1
, idx
);
1249 if (tree_fits_uhwi_p (idx
))
1251 tree type
= limb_access_type (lhs_type
, idx
);
1252 if (!types_compatible_p (type
, TREE_TYPE (rhs1
)))
1253 rhs1
= add_cast (type
, rhs1
);
1258 /* Indexes lower than this don't need any special processing. */
1259 unsigned low
= ((unsigned) TYPE_PRECISION (rhs_type
)
1260 - !TYPE_UNSIGNED (rhs_type
)) / limb_prec
;
1261 /* Indexes >= than this always contain an extension. */
1262 unsigned high
= CEIL ((unsigned) TYPE_PRECISION (rhs_type
), limb_prec
);
1263 bool save_first
= m_first
;
1266 m_data
.safe_push (NULL_TREE
);
1267 m_data
.safe_push (NULL_TREE
);
1268 m_data
.safe_push (NULL_TREE
);
1269 if (TYPE_UNSIGNED (rhs_type
))
1270 /* No need to keep state between iterations. */
1272 else if (m_upwards
&& !m_upwards_2limb
)
1273 /* We need to keep state between iterations, but
1274 not within any loop, everything is straight line
1275 code with only increasing indexes. */
1277 else if (!m_upwards_2limb
)
1279 unsigned save_data_cnt
= m_data_cnt
;
1280 gimple_stmt_iterator save_gsi
= m_gsi
;
1282 if (gsi_end_p (m_gsi
))
1283 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1286 m_data_cnt
= save_data_cnt
+ 3;
1287 t
= handle_operand (rhs1
, size_int (low
));
1289 m_data
[save_data_cnt
+ 2]
1290 = build_int_cst (NULL_TREE
, m_data_cnt
);
1291 m_data_cnt
= save_data_cnt
;
1292 t
= add_cast (signed_type_for (m_limb_type
), t
);
1293 tree lpm1
= build_int_cst (unsigned_type_node
, limb_prec
- 1);
1294 tree n
= make_ssa_name (TREE_TYPE (t
));
1295 g
= gimple_build_assign (n
, RSHIFT_EXPR
, t
, lpm1
);
1297 m_data
[save_data_cnt
+ 1] = add_cast (m_limb_type
, n
);
1299 if (gsi_end_p (m_init_gsi
))
1300 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1302 gsi_prev (&m_init_gsi
);
1305 else if (m_upwards_2limb
* limb_prec
< TYPE_PRECISION (rhs_type
))
1306 /* We need to keep state between iterations, but
1307 fortunately not within the loop, only afterwards. */
1312 m_data
.truncate (m_data_cnt
);
1313 prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
1314 m_data
.safe_push (NULL_TREE
);
1318 unsigned save_data_cnt
= m_data_cnt
;
1320 if (!tree_fits_uhwi_p (idx
))
1323 && (m_upwards_2limb
* limb_prec
1324 <= ((unsigned) TYPE_PRECISION (rhs_type
)
1325 - !TYPE_UNSIGNED (rhs_type
))))
1327 rhs1
= handle_operand (rhs1
, idx
);
1329 m_data
[save_data_cnt
+ 2]
1330 = build_int_cst (NULL_TREE
, m_data_cnt
);
1331 m_first
= save_first
;
1334 bool single_comparison
1335 = low
== high
|| (m_upwards_2limb
&& (low
& 1) == m_first
);
1336 g
= gimple_build_cond (single_comparison
? LT_EXPR
: LE_EXPR
,
1337 idx
, size_int (low
), NULL_TREE
, NULL_TREE
);
1338 edge edge_true_true
, edge_true_false
, edge_false
;
1339 if_then_if_then_else (g
, (single_comparison
? NULL
1340 : gimple_build_cond (EQ_EXPR
, idx
,
1344 profile_probability::likely (),
1345 profile_probability::unlikely (),
1346 edge_true_true
, edge_true_false
, edge_false
);
1347 bool save_cast_conditional
= m_cast_conditional
;
1348 m_cast_conditional
= true;
1350 tree t1
= handle_operand (rhs1
, idx
), t2
= NULL_TREE
;
1352 m_data
[save_data_cnt
+ 2]
1353 = build_int_cst (NULL_TREE
, m_data_cnt
);
1354 tree ext
= NULL_TREE
;
1355 tree bitfld
= NULL_TREE
;
1356 if (!single_comparison
)
1358 m_gsi
= gsi_after_labels (edge_true_true
->src
);
1360 m_data_cnt
= save_data_cnt
+ 3;
1363 bitfld
= m_data
[m_bitfld_load
];
1364 m_data
[m_bitfld_load
] = m_data
[m_bitfld_load
+ 2];
1367 t2
= handle_operand (rhs1
, size_int (low
));
1368 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t2
)))
1369 t2
= add_cast (m_limb_type
, t2
);
1370 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards_2limb
)
1372 ext
= add_cast (signed_type_for (m_limb_type
), t2
);
1373 tree lpm1
= build_int_cst (unsigned_type_node
,
1375 tree n
= make_ssa_name (TREE_TYPE (ext
));
1376 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1378 ext
= add_cast (m_limb_type
, n
);
1382 if (TYPE_UNSIGNED (rhs_type
))
1383 t3
= build_zero_cst (m_limb_type
);
1384 else if (m_upwards_2limb
&& (save_first
|| ext
!= NULL_TREE
))
1385 t3
= m_data
[save_data_cnt
];
1387 t3
= m_data
[save_data_cnt
+ 1];
1388 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
1389 t
= make_ssa_name (m_limb_type
);
1390 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
1391 add_phi_arg (phi
, t1
, edge_true_false
, UNKNOWN_LOCATION
);
1392 add_phi_arg (phi
, t3
, edge_false
, UNKNOWN_LOCATION
);
1394 add_phi_arg (phi
, t2
, edge_true_true
, UNKNOWN_LOCATION
);
1397 tree t4
= make_ssa_name (m_limb_type
);
1398 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1399 add_phi_arg (phi
, build_zero_cst (m_limb_type
), edge_true_false
,
1401 add_phi_arg (phi
, m_data
[save_data_cnt
], edge_false
,
1403 add_phi_arg (phi
, ext
, edge_true_true
, UNKNOWN_LOCATION
);
1404 if (!save_cast_conditional
)
1406 g
= gimple_build_assign (m_data
[save_data_cnt
+ 1], t4
);
1410 for (basic_block bb
= gsi_bb (m_gsi
);;)
1412 edge e1
= single_succ_edge (bb
);
1413 edge e2
= find_edge (e1
->dest
, m_bb
), e3
;
1414 tree t5
= (e2
? m_data
[save_data_cnt
+ 1]
1415 : make_ssa_name (m_limb_type
));
1416 phi
= create_phi_node (t5
, e1
->dest
);
1418 FOR_EACH_EDGE (e3
, ei
, e1
->dest
->preds
)
1419 add_phi_arg (phi
, (e3
== e1
? t4
1420 : build_zero_cst (m_limb_type
)),
1421 e3
, UNKNOWN_LOCATION
);
1432 t4
= m_data
[m_bitfld_load
+ 1];
1434 t4
= make_ssa_name (m_limb_type
);
1435 phi
= create_phi_node (t4
, edge_true_false
->dest
);
1437 edge_true_true
? bitfld
: m_data
[m_bitfld_load
],
1438 edge_true_false
, UNKNOWN_LOCATION
);
1439 add_phi_arg (phi
, m_data
[m_bitfld_load
+ 2],
1440 edge_false
, UNKNOWN_LOCATION
);
1442 add_phi_arg (phi
, m_data
[m_bitfld_load
], edge_true_true
,
1444 m_data
[m_bitfld_load
] = t4
;
1445 m_data
[m_bitfld_load
+ 2] = t4
;
1448 m_cast_conditional
= save_cast_conditional
;
1449 m_first
= save_first
;
1454 if (tree_to_uhwi (idx
) < low
)
1456 t
= handle_operand (rhs1
, idx
);
1458 m_data
[save_data_cnt
+ 2]
1459 = build_int_cst (NULL_TREE
, m_data_cnt
);
1461 else if (tree_to_uhwi (idx
) < high
)
1463 t
= handle_operand (rhs1
, size_int (low
));
1465 m_data
[save_data_cnt
+ 2]
1466 = build_int_cst (NULL_TREE
, m_data_cnt
);
1467 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (t
)))
1468 t
= add_cast (m_limb_type
, t
);
1469 tree ext
= NULL_TREE
;
1470 if (!TYPE_UNSIGNED (rhs_type
) && m_upwards
)
1472 ext
= add_cast (signed_type_for (m_limb_type
), t
);
1473 tree lpm1
= build_int_cst (unsigned_type_node
,
1475 tree n
= make_ssa_name (TREE_TYPE (ext
));
1476 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
1478 ext
= add_cast (m_limb_type
, n
);
1479 m_data
[save_data_cnt
+ 1] = ext
;
1484 if (TYPE_UNSIGNED (rhs_type
) && m_first
)
1486 handle_operand (rhs1
, size_zero_node
);
1487 m_data
[save_data_cnt
+ 2]
1488 = build_int_cst (NULL_TREE
, m_data_cnt
);
1491 m_data_cnt
= tree_to_uhwi (m_data
[save_data_cnt
+ 2]);
1492 if (TYPE_UNSIGNED (rhs_type
))
1493 t
= build_zero_cst (m_limb_type
);
1494 else if (m_bb
&& m_data
[save_data_cnt
])
1495 t
= m_data
[save_data_cnt
];
1497 t
= m_data
[save_data_cnt
+ 1];
1499 tree type
= limb_access_type (lhs_type
, idx
);
1500 if (!useless_type_conversion_p (type
, m_limb_type
))
1501 t
= add_cast (type
, t
);
1502 m_first
= save_first
;
1506 else if (TREE_CODE (lhs_type
) == BITINT_TYPE
1507 && bitint_precision_kind (lhs_type
) >= bitint_prec_large
1508 && INTEGRAL_TYPE_P (rhs_type
))
1510 /* Add support for 3 or more limbs filled in from normal integral
1511 type if this assert fails. If no target chooses limb mode smaller
1512 than half of largest supported normal integral type, this will not
1514 gcc_assert (TYPE_PRECISION (rhs_type
) <= 2 * limb_prec
);
1515 tree r1
= NULL_TREE
, r2
= NULL_TREE
, rext
= NULL_TREE
;
1518 gimple_stmt_iterator save_gsi
= m_gsi
;
1520 if (gsi_end_p (m_gsi
))
1521 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1524 if (TREE_CODE (rhs_type
) == BITINT_TYPE
1525 && bitint_precision_kind (rhs_type
) == bitint_prec_middle
)
1527 tree type
= NULL_TREE
;
1528 rhs1
= maybe_cast_middle_bitint (&m_gsi
, rhs1
, type
);
1529 rhs_type
= TREE_TYPE (rhs1
);
1532 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
1533 r1
= add_cast (m_limb_type
, rhs1
);
1534 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1536 g
= gimple_build_assign (make_ssa_name (rhs_type
),
1538 build_int_cst (unsigned_type_node
,
1541 r2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1543 if (TYPE_UNSIGNED (rhs_type
))
1544 rext
= build_zero_cst (m_limb_type
);
1547 rext
= add_cast (signed_type_for (m_limb_type
), r2
? r2
: r1
);
1548 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rext
)),
1550 build_int_cst (unsigned_type_node
,
1553 rext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
1556 if (gsi_end_p (m_init_gsi
))
1557 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1559 gsi_prev (&m_init_gsi
);
1563 if (m_upwards_2limb
)
1568 prepare_data_in_out (r1
, idx
, &out1
, rext
);
1569 if (TYPE_PRECISION (rhs_type
) > limb_prec
)
1571 prepare_data_in_out (r2
, idx
, &out2
, rext
);
1574 m_data
[m_data_cnt
+ 1] = t
;
1577 m_data
[m_data_cnt
+ 1] = rext
;
1578 m_data
.safe_push (rext
);
1579 t
= m_data
[m_data_cnt
];
1581 else if (!tree_fits_uhwi_p (idx
))
1582 t
= m_data
[m_data_cnt
+ 1];
1585 tree type
= limb_access_type (lhs_type
, idx
);
1586 t
= m_data
[m_data_cnt
+ 2];
1587 if (!useless_type_conversion_p (type
, m_limb_type
))
1588 t
= add_cast (type
, t
);
1595 m_data
.safe_push (r1
);
1596 m_data
.safe_push (r2
);
1597 m_data
.safe_push (rext
);
1599 if (tree_fits_uhwi_p (idx
))
1601 tree type
= limb_access_type (lhs_type
, idx
);
1602 if (integer_zerop (idx
))
1603 t
= m_data
[m_data_cnt
];
1604 else if (TYPE_PRECISION (rhs_type
) > limb_prec
1605 && integer_onep (idx
))
1606 t
= m_data
[m_data_cnt
+ 1];
1608 t
= m_data
[m_data_cnt
+ 2];
1609 if (!useless_type_conversion_p (type
, m_limb_type
))
1610 t
= add_cast (type
, t
);
1614 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
1615 NULL_TREE
, NULL_TREE
);
1616 edge e2
, e3
, e4
= NULL
;
1617 if_then (g
, profile_probability::likely (), e2
, e3
);
1618 if (m_data
[m_data_cnt
+ 1])
1620 g
= gimple_build_cond (EQ_EXPR
, idx
, size_one_node
,
1621 NULL_TREE
, NULL_TREE
);
1623 edge e5
= split_block (gsi_bb (m_gsi
), g
);
1624 e4
= make_edge (e5
->src
, e2
->dest
, EDGE_TRUE_VALUE
);
1625 e2
= find_edge (e5
->dest
, e2
->dest
);
1626 e4
->probability
= profile_probability::unlikely ();
1627 e5
->flags
= EDGE_FALSE_VALUE
;
1628 e5
->probability
= e4
->probability
.invert ();
1630 m_gsi
= gsi_after_labels (e2
->dest
);
1631 t
= make_ssa_name (m_limb_type
);
1632 gphi
*phi
= create_phi_node (t
, e2
->dest
);
1633 add_phi_arg (phi
, m_data
[m_data_cnt
+ 2], e2
, UNKNOWN_LOCATION
);
1634 add_phi_arg (phi
, m_data
[m_data_cnt
], e3
, UNKNOWN_LOCATION
);
1636 add_phi_arg (phi
, m_data
[m_data_cnt
+ 1], e4
, UNKNOWN_LOCATION
);
1643 /* Helper function for handle_stmt method, handle a load from memory. */
1646 bitint_large_huge::handle_load (gimple
*stmt
, tree idx
)
1648 tree rhs1
= gimple_assign_rhs1 (stmt
);
1649 tree rhs_type
= TREE_TYPE (rhs1
);
1650 bool eh
= stmt_ends_bb_p (stmt
);
1651 edge eh_edge
= NULL
;
1657 basic_block bb
= gimple_bb (stmt
);
1659 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
1660 if (eh_edge
->flags
& EDGE_EH
)
1664 if (TREE_CODE (rhs1
) == COMPONENT_REF
1665 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
1667 tree fld
= TREE_OPERAND (rhs1
, 1);
1668 /* For little-endian, we can allow as inputs bit-fields
1669 which start at a limb boundary. */
1670 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
1671 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
1672 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % limb_prec
) == 0)
1674 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1675 handle it normally for now. */
1676 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
1678 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
1679 poly_int64 bitoffset
;
1680 poly_uint64 field_offset
, repr_offset
;
1681 bool var_field_off
= false;
1682 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
1683 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
1684 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
1688 var_field_off
= true;
1690 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
1691 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
1692 tree nrhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1693 TREE_OPERAND (rhs1
, 0), repr
,
1694 var_field_off
? TREE_OPERAND (rhs1
, 2) : NULL_TREE
);
1695 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
1696 unsigned bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
1697 unsigned bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
1702 gimple_stmt_iterator save_gsi
= m_gsi
;
1704 if (gsi_end_p (m_gsi
))
1705 m_gsi
= gsi_after_labels (gsi_bb (m_gsi
));
1708 tree t
= limb_access (rhs_type
, nrhs1
, size_int (bo_idx
), true);
1709 tree iv
= make_ssa_name (m_limb_type
);
1710 g
= gimple_build_assign (iv
, t
);
1714 maybe_duplicate_eh_stmt (g
, stmt
);
1717 edge e
= split_block (gsi_bb (m_gsi
), g
);
1718 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1719 = profile_probability::very_unlikely ();
1720 m_gsi
= gsi_after_labels (e
->dest
);
1721 if (gsi_bb (save_gsi
) == e
->src
)
1723 if (gsi_end_p (save_gsi
))
1724 save_gsi
= gsi_end_bb (e
->dest
);
1726 save_gsi
= gsi_for_stmt (gsi_stmt (save_gsi
));
1728 if (m_preheader_bb
== e
->src
)
1729 m_preheader_bb
= e
->dest
;
1733 if (gsi_end_p (m_init_gsi
))
1734 m_init_gsi
= gsi_last_bb (gsi_bb (m_init_gsi
));
1736 gsi_prev (&m_init_gsi
);
1739 prepare_data_in_out (iv
, idx
, &out
);
1740 out
= m_data
[m_data_cnt
];
1741 m_data
.safe_push (out
);
1745 m_data
.safe_push (NULL_TREE
);
1746 m_data
.safe_push (NULL_TREE
);
1747 m_data
.safe_push (NULL_TREE
);
1751 tree nidx0
= NULL_TREE
, nidx1
;
1752 tree iv
= m_data
[m_data_cnt
];
1753 if (m_cast_conditional
&& iv
)
1755 gcc_assert (!m_bitfld_load
);
1756 m_bitfld_load
= m_data_cnt
;
1758 if (tree_fits_uhwi_p (idx
))
1760 unsigned prec
= TYPE_PRECISION (rhs_type
);
1761 unsigned HOST_WIDE_INT i
= tree_to_uhwi (idx
);
1762 gcc_assert (i
* limb_prec
< prec
);
1763 nidx1
= size_int (i
+ bo_idx
+ 1);
1764 if ((i
+ 1) * limb_prec
> prec
)
1767 if (prec
+ bo_bit
<= (unsigned) limb_prec
)
1771 nidx0
= size_int (i
+ bo_idx
);
1781 nidx0
= make_ssa_name (sizetype
);
1782 g
= gimple_build_assign (nidx0
, PLUS_EXPR
, idx
,
1787 nidx1
= make_ssa_name (sizetype
);
1788 g
= gimple_build_assign (nidx1
, PLUS_EXPR
, idx
,
1789 size_int (bo_idx
+ 1));
1793 tree iv2
= NULL_TREE
;
1796 tree t
= limb_access (rhs_type
, nrhs1
, nidx0
, true);
1797 iv
= make_ssa_name (m_limb_type
);
1798 g
= gimple_build_assign (iv
, t
);
1804 bool conditional
= m_var_msb
&& !tree_fits_uhwi_p (idx
);
1805 unsigned prec
= TYPE_PRECISION (rhs_type
);
1808 if ((prec
% limb_prec
) == 0
1809 || ((prec
% limb_prec
) + bo_bit
> (unsigned) limb_prec
))
1810 conditional
= false;
1812 edge edge_true
= NULL
, edge_false
= NULL
;
1815 g
= gimple_build_cond (NE_EXPR
, idx
,
1816 size_int (prec
/ limb_prec
),
1817 NULL_TREE
, NULL_TREE
);
1818 if_then (g
, profile_probability::likely (),
1819 edge_true
, edge_false
);
1821 tree t
= limb_access (rhs_type
, nrhs1
, nidx1
, true);
1825 && !tree_fits_uhwi_p (idx
))
1826 iv2
= m_data
[m_data_cnt
+ 1];
1828 iv2
= make_ssa_name (m_limb_type
);
1829 g
= gimple_build_assign (iv2
, t
);
1833 maybe_duplicate_eh_stmt (g
, stmt
);
1836 edge e
= split_block (gsi_bb (m_gsi
), g
);
1837 m_gsi
= gsi_after_labels (e
->dest
);
1838 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1839 = profile_probability::very_unlikely ();
1844 tree iv3
= make_ssa_name (m_limb_type
);
1846 edge_true
= find_edge (gsi_bb (m_gsi
), edge_false
->dest
);
1847 gphi
*phi
= create_phi_node (iv3
, edge_true
->dest
);
1848 add_phi_arg (phi
, iv2
, edge_true
, UNKNOWN_LOCATION
);
1849 add_phi_arg (phi
, build_zero_cst (m_limb_type
),
1850 edge_false
, UNKNOWN_LOCATION
);
1851 m_gsi
= gsi_after_labels (edge_true
->dest
);
1854 g
= gimple_build_assign (make_ssa_name (m_limb_type
), RSHIFT_EXPR
,
1855 iv
, build_int_cst (unsigned_type_node
, bo_bit
));
1857 iv
= gimple_assign_lhs (g
);
1860 g
= gimple_build_assign (make_ssa_name (m_limb_type
), LSHIFT_EXPR
,
1861 iv2
, build_int_cst (unsigned_type_node
,
1862 limb_prec
- bo_bit
));
1864 g
= gimple_build_assign (make_ssa_name (m_limb_type
), BIT_IOR_EXPR
,
1865 gimple_assign_lhs (g
), iv
);
1867 iv
= gimple_assign_lhs (g
);
1868 if (m_data
[m_data_cnt
])
1869 m_data
[m_data_cnt
] = iv2
;
1871 if (tree_fits_uhwi_p (idx
))
1873 tree atype
= limb_access_type (rhs_type
, idx
);
1874 if (!useless_type_conversion_p (atype
, TREE_TYPE (iv
)))
1875 iv
= add_cast (atype
, iv
);
1882 /* Use write_p = true for loads with EH edges to make
1883 sure limb_access doesn't add a cast as separate
1884 statement after it. */
1885 rhs1
= limb_access (rhs_type
, rhs1
, idx
, eh
);
1886 tree ret
= make_ssa_name (TREE_TYPE (rhs1
));
1887 g
= gimple_build_assign (ret
, rhs1
);
1891 maybe_duplicate_eh_stmt (g
, stmt
);
1894 edge e
= split_block (gsi_bb (m_gsi
), g
);
1895 m_gsi
= gsi_after_labels (e
->dest
);
1896 make_edge (e
->src
, eh_edge
->dest
, EDGE_EH
)->probability
1897 = profile_probability::very_unlikely ();
1899 if (tree_fits_uhwi_p (idx
))
1901 tree atype
= limb_access_type (rhs_type
, idx
);
1902 if (!useless_type_conversion_p (atype
, TREE_TYPE (rhs1
)))
1903 ret
= add_cast (atype
, ret
);
1909 /* Return a limb IDX from a mergeable statement STMT. */
1912 bitint_large_huge::handle_stmt (gimple
*stmt
, tree idx
)
1914 tree lhs
, rhs1
, rhs2
= NULL_TREE
;
1916 switch (gimple_code (stmt
))
1919 if (gimple_assign_load_p (stmt
))
1920 return handle_load (stmt
, idx
);
1921 switch (gimple_assign_rhs_code (stmt
))
1926 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
1929 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1930 lhs
= make_ssa_name (TREE_TYPE (rhs1
));
1931 g
= gimple_build_assign (lhs
, gimple_assign_rhs_code (stmt
),
1937 rhs1
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1938 rhs2
= handle_operand (gimple_assign_rhs2 (stmt
), idx
);
1939 return handle_plus_minus (gimple_assign_rhs_code (stmt
),
1942 rhs2
= handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1943 rhs1
= build_zero_cst (TREE_TYPE (rhs2
));
1944 return handle_plus_minus (MINUS_EXPR
, rhs1
, rhs2
, idx
);
1946 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt
),
1948 gimple_assign_rhs2 (stmt
), idx
);
1951 return handle_operand (gimple_assign_rhs1 (stmt
), idx
);
1953 case VIEW_CONVERT_EXPR
:
1954 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt
)),
1955 gimple_assign_rhs1 (stmt
), idx
);
1966 /* Return minimum precision of OP at STMT.
1967 Positive value is minimum precision above which all bits
1968 are zero, negative means all bits above negation of the
1969 value are copies of the sign bit. */
1972 range_to_prec (tree op
, gimple
*stmt
)
1976 tree type
= TREE_TYPE (op
);
1977 unsigned int prec
= TYPE_PRECISION (type
);
1980 || !get_range_query (cfun
)->range_of_expr (r
, op
, stmt
)
1981 || r
.undefined_p ())
1983 if (TYPE_UNSIGNED (type
))
1986 return MIN ((int) -prec
, -2);
1989 if (!TYPE_UNSIGNED (TREE_TYPE (op
)))
1991 w
= r
.lower_bound ();
1994 int min_prec1
= wi::min_precision (w
, SIGNED
);
1995 w
= r
.upper_bound ();
1996 int min_prec2
= wi::min_precision (w
, SIGNED
);
1997 int min_prec
= MAX (min_prec1
, min_prec2
);
1998 return MIN (-min_prec
, -2);
2002 w
= r
.upper_bound ();
2003 int min_prec
= wi::min_precision (w
, UNSIGNED
);
2004 return MAX (min_prec
, 1);
2007 /* Return address of the first limb of OP and write into *PREC
2008 its precision. If positive, the operand is zero extended
2009 from that precision, if it is negative, the operand is sign-extended
2010 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2011 otherwise *PREC_STORED is prec from the innermost call without
2012 range optimizations. */
2015 bitint_large_huge::handle_operand_addr (tree op
, gimple
*stmt
,
2016 int *prec_stored
, int *prec
)
2019 location_t loc_save
= m_loc
;
2020 if ((TREE_CODE (TREE_TYPE (op
)) != BITINT_TYPE
2021 || bitint_precision_kind (TREE_TYPE (op
)) < bitint_prec_large
)
2022 && TREE_CODE (op
) != INTEGER_CST
)
2025 *prec
= range_to_prec (op
, stmt
);
2026 bitint_prec_kind kind
= bitint_prec_small
;
2027 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op
)));
2028 if (TREE_CODE (TREE_TYPE (op
)) == BITINT_TYPE
)
2029 kind
= bitint_precision_kind (TREE_TYPE (op
));
2030 if (kind
== bitint_prec_middle
)
2032 tree type
= NULL_TREE
;
2033 op
= maybe_cast_middle_bitint (&m_gsi
, op
, type
);
2035 tree op_type
= TREE_TYPE (op
);
2036 unsigned HOST_WIDE_INT nelts
2037 = CEIL (TYPE_PRECISION (op_type
), limb_prec
);
2038 /* Add support for 3 or more limbs filled in from normal
2039 integral type if this assert fails. If no target chooses
2040 limb mode smaller than half of largest supported normal
2041 integral type, this will not be needed. */
2042 gcc_assert (nelts
<= 2);
2044 *prec_stored
= (TYPE_UNSIGNED (op_type
)
2045 ? TYPE_PRECISION (op_type
)
2046 : -TYPE_PRECISION (op_type
));
2047 if (*prec
<= limb_prec
&& *prec
>= -limb_prec
)
2052 if (TYPE_UNSIGNED (op_type
))
2054 if (*prec_stored
> limb_prec
)
2055 *prec_stored
= limb_prec
;
2057 else if (*prec_stored
< -limb_prec
)
2058 *prec_stored
= -limb_prec
;
2061 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
2062 tree var
= create_tmp_var (atype
);
2064 if (!useless_type_conversion_p (m_limb_type
, op_type
))
2065 t1
= add_cast (m_limb_type
, t1
);
2066 tree v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_zero_node
,
2067 NULL_TREE
, NULL_TREE
);
2068 gimple
*g
= gimple_build_assign (v
, t1
);
2072 tree lp
= build_int_cst (unsigned_type_node
, limb_prec
);
2073 g
= gimple_build_assign (make_ssa_name (op_type
),
2074 RSHIFT_EXPR
, op
, lp
);
2076 tree t2
= gimple_assign_lhs (g
);
2077 t2
= add_cast (m_limb_type
, t2
);
2078 v
= build4 (ARRAY_REF
, m_limb_type
, var
, size_one_node
,
2079 NULL_TREE
, NULL_TREE
);
2080 g
= gimple_build_assign (v
, t2
);
2083 tree ret
= build_fold_addr_expr (var
);
2084 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2086 tree clobber
= build_clobber (atype
, CLOBBER_STORAGE_END
);
2087 g
= gimple_build_assign (var
, clobber
);
2088 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2093 switch (TREE_CODE (op
))
2097 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (op
)))
2099 gimple
*g
= SSA_NAME_DEF_STMT (op
);
2101 m_loc
= gimple_location (g
);
2102 if (gimple_assign_load_p (g
))
2104 *prec
= range_to_prec (op
, NULL
);
2106 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2107 ? TYPE_PRECISION (TREE_TYPE (op
))
2108 : -TYPE_PRECISION (TREE_TYPE (op
)));
2109 ret
= build_fold_addr_expr (gimple_assign_rhs1 (g
));
2110 ret
= force_gimple_operand_gsi (&m_gsi
, ret
, true,
2111 NULL_TREE
, true, GSI_SAME_STMT
);
2113 else if (gimple_code (g
) == GIMPLE_NOP
)
2115 *prec
= TYPE_UNSIGNED (TREE_TYPE (op
)) ? limb_prec
: -limb_prec
;
2117 *prec_stored
= *prec
;
2118 tree var
= create_tmp_var (m_limb_type
);
2119 TREE_ADDRESSABLE (var
) = 1;
2120 ret
= build_fold_addr_expr (var
);
2121 if (!stmt_ends_bb_p (gsi_stmt (m_gsi
)))
2123 tree clobber
= build_clobber (m_limb_type
,
2124 CLOBBER_STORAGE_END
);
2125 g
= gimple_build_assign (var
, clobber
);
2126 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
2131 gcc_assert (gimple_assign_cast_p (g
));
2132 tree rhs1
= gimple_assign_rhs1 (g
);
2133 bitint_prec_kind kind
= bitint_prec_small
;
2134 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)));
2135 if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
)
2136 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2137 if (kind
>= bitint_prec_large
)
2139 tree lhs_type
= TREE_TYPE (op
);
2140 tree rhs_type
= TREE_TYPE (rhs1
);
2141 int prec_stored_val
= 0;
2142 ret
= handle_operand_addr (rhs1
, g
, &prec_stored_val
, prec
);
2143 if (TYPE_PRECISION (lhs_type
) > TYPE_PRECISION (rhs_type
))
2145 if (TYPE_UNSIGNED (lhs_type
)
2146 && !TYPE_UNSIGNED (rhs_type
))
2147 gcc_assert (*prec
>= 0 || prec_stored
== NULL
);
2151 if (*prec
> 0 && *prec
< TYPE_PRECISION (lhs_type
))
2153 else if (TYPE_UNSIGNED (lhs_type
))
2155 gcc_assert (*prec
> 0
2156 || prec_stored_val
> 0
2157 || (-prec_stored_val
2158 >= TYPE_PRECISION (lhs_type
)));
2159 *prec
= TYPE_PRECISION (lhs_type
);
2161 else if (*prec
< 0 && -*prec
< TYPE_PRECISION (lhs_type
))
2164 *prec
= -TYPE_PRECISION (lhs_type
);
2179 int p
= var_to_partition (m_map
, op
);
2180 gcc_assert (m_vars
[p
] != NULL_TREE
);
2181 *prec
= range_to_prec (op
, stmt
);
2183 *prec_stored
= (TYPE_UNSIGNED (TREE_TYPE (op
))
2184 ? TYPE_PRECISION (TREE_TYPE (op
))
2185 : -TYPE_PRECISION (TREE_TYPE (op
)));
2186 return build_fold_addr_expr (m_vars
[p
]);
2189 unsigned int min_prec
, mp
;
2191 w
= wi::to_wide (op
);
2192 if (tree_int_cst_sgn (op
) >= 0)
2194 min_prec
= wi::min_precision (w
, UNSIGNED
);
2195 *prec
= MAX (min_prec
, 1);
2199 min_prec
= wi::min_precision (w
, SIGNED
);
2200 *prec
= MIN ((int) -min_prec
, -2);
2202 mp
= CEIL (min_prec
, limb_prec
) * limb_prec
;
2205 if (mp
>= (unsigned) TYPE_PRECISION (TREE_TYPE (op
)))
2206 type
= TREE_TYPE (op
);
2208 type
= build_bitint_type (mp
, 1);
2209 if (TREE_CODE (type
) != BITINT_TYPE
2210 || bitint_precision_kind (type
) == bitint_prec_small
)
2212 if (TYPE_PRECISION (type
) <= limb_prec
)
2215 /* This case is for targets which e.g. have 64-bit
2216 limb but categorize up to 128-bits _BitInts as
2217 small. We could use type of m_limb_type[2] and
2218 similar instead to save space. */
2219 type
= build_bitint_type (mid_min_prec
, 1);
2223 if (tree_int_cst_sgn (op
) >= 0)
2224 *prec_stored
= MAX (TYPE_PRECISION (type
), 1);
2226 *prec_stored
= MIN ((int) -TYPE_PRECISION (type
), -2);
2228 op
= tree_output_constant_def (fold_convert (type
, op
));
2229 return build_fold_addr_expr (op
);
2235 /* Helper function, create a loop before the current location,
2236 start with sizetype INIT value from the preheader edge. Return
2237 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2238 from the latch edge. */
2241 bitint_large_huge::create_loop (tree init
, tree
*idx_next
)
2243 if (!gsi_end_p (m_gsi
))
2246 m_gsi
= gsi_last_bb (gsi_bb (m_gsi
));
2247 edge e1
= split_block (gsi_bb (m_gsi
), gsi_stmt (m_gsi
));
2248 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
2249 edge e3
= make_edge (e1
->dest
, e1
->dest
, EDGE_TRUE_VALUE
);
2250 e3
->probability
= profile_probability::very_unlikely ();
2251 e2
->flags
= EDGE_FALSE_VALUE
;
2252 e2
->probability
= e3
->probability
.invert ();
2253 tree idx
= make_ssa_name (sizetype
);
2254 gphi
*phi
= create_phi_node (idx
, e1
->dest
);
2255 add_phi_arg (phi
, init
, e1
, UNKNOWN_LOCATION
);
2256 *idx_next
= make_ssa_name (sizetype
);
2257 add_phi_arg (phi
, *idx_next
, e3
, UNKNOWN_LOCATION
);
2258 m_gsi
= gsi_after_labels (e1
->dest
);
2260 m_preheader_bb
= e1
->src
;
2261 class loop
*loop
= alloc_loop ();
2262 loop
->header
= e1
->dest
;
2263 add_loop (loop
, e1
->src
->loop_father
);
2267 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2268 lowered using iteration from the least significant limb up to the most
2269 significant limb. For large _BitInt it is emitted as straight line code
2270 before current location, for huge _BitInt as a loop handling two limbs
2271 at once, followed by handling up to limbs in straight line code (at most
2272 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2273 comparisons, in that case CMP_CODE should be the comparison code and
2274 CMP_OP1/CMP_OP2 the comparison operands. */
2277 bitint_large_huge::lower_mergeable_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2278 tree cmp_op1
, tree cmp_op2
)
2280 bool eq_p
= cmp_code
!= ERROR_MARK
;
2283 type
= TREE_TYPE (cmp_op1
);
2285 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
2286 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2287 bitint_prec_kind kind
= bitint_precision_kind (type
);
2288 gcc_assert (kind
>= bitint_prec_large
);
2290 tree lhs
= gimple_get_lhs (stmt
);
2291 tree rhs1
, lhs_type
= lhs
? TREE_TYPE (lhs
) : NULL_TREE
;
2293 && TREE_CODE (lhs
) == SSA_NAME
2294 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
2295 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
2297 int p
= var_to_partition (m_map
, lhs
);
2298 gcc_assert (m_vars
[p
] != NULL_TREE
);
2299 m_lhs
= lhs
= m_vars
[p
];
2301 unsigned cnt
, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
2303 tree ext
= NULL_TREE
, store_operand
= NULL_TREE
;
2305 basic_block eh_pad
= NULL
;
2306 tree nlhs
= NULL_TREE
;
2307 unsigned HOST_WIDE_INT bo_idx
= 0;
2308 unsigned HOST_WIDE_INT bo_bit
= 0;
2309 tree bf_cur
= NULL_TREE
, bf_next
= NULL_TREE
;
2310 if (gimple_store_p (stmt
))
2312 store_operand
= gimple_assign_rhs1 (stmt
);
2313 eh
= stmt_ends_bb_p (stmt
);
2318 basic_block bb
= gimple_bb (stmt
);
2320 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2321 if (e
->flags
& EDGE_EH
)
2327 if (TREE_CODE (lhs
) == COMPONENT_REF
2328 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
2330 tree fld
= TREE_OPERAND (lhs
, 1);
2331 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
)));
2332 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (fld
);
2333 poly_int64 bitoffset
;
2334 poly_uint64 field_offset
, repr_offset
;
2335 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
)) % BITS_PER_UNIT
) == 0)
2339 bool var_field_off
= false;
2340 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld
), &field_offset
)
2341 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
2342 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
2346 var_field_off
= true;
2348 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
2349 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
2350 nlhs
= build3 (COMPONENT_REF
, TREE_TYPE (repr
),
2351 TREE_OPERAND (lhs
, 0), repr
,
2353 ? TREE_OPERAND (lhs
, 2) : NULL_TREE
);
2354 HOST_WIDE_INT bo
= bitoffset
.to_constant ();
2355 bo_idx
= (unsigned HOST_WIDE_INT
) bo
/ limb_prec
;
2356 bo_bit
= (unsigned HOST_WIDE_INT
) bo
% limb_prec
;
2361 && TREE_CODE (store_operand
) == SSA_NAME
2363 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (store_operand
)))
2364 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand
)))
2365 || gimple_assign_cast_p (stmt
))
2367 rhs1
= gimple_assign_rhs1 (store_operand
2368 ? SSA_NAME_DEF_STMT (store_operand
)
2370 /* Optimize mergeable ops ending with widening cast to _BitInt
2371 (or followed by store). We can lower just the limbs of the
2372 cast operand and widen afterwards. */
2373 if (TREE_CODE (rhs1
) == SSA_NAME
2375 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
)))
2376 && TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
2377 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
2378 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1
)),
2379 limb_prec
) < CEIL (prec
, limb_prec
)
2380 || (kind
== bitint_prec_huge
2381 && TYPE_PRECISION (TREE_TYPE (rhs1
)) < prec
)))
2383 store_operand
= rhs1
;
2384 prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
2385 kind
= bitint_precision_kind (TREE_TYPE (rhs1
));
2386 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2390 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
2391 if (kind
== bitint_prec_large
)
2392 cnt
= CEIL (prec
, limb_prec
);
2395 rem
= (prec
% (2 * limb_prec
));
2396 end
= (prec
- rem
) / limb_prec
;
2397 cnt
= 2 + CEIL (rem
, limb_prec
);
2398 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
2401 basic_block edge_bb
= NULL
;
2404 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2406 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2408 if (kind
== bitint_prec_large
)
2409 m_gsi
= gsi_end_bb (edge_bb
);
2412 m_after_stmt
= stmt
;
2413 if (kind
!= bitint_prec_large
)
2414 m_upwards_2limb
= end
;
2418 = (prec
!= (unsigned) TYPE_PRECISION (type
)
2419 && (CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
)
2420 > CEIL (prec
, limb_prec
)));
2422 for (unsigned i
= 0; i
< cnt
; i
++)
2425 if (kind
== bitint_prec_large
)
2428 idx
= size_int (end
+ (i
> 2));
2431 rhs1
= handle_operand (cmp_op1
, idx
);
2432 tree rhs2
= handle_operand (cmp_op2
, idx
);
2433 g
= gimple_build_cond (NE_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2435 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2436 e1
->flags
= EDGE_FALSE_VALUE
;
2437 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2438 e1
->probability
= profile_probability::unlikely ();
2439 e2
->probability
= e1
->probability
.invert ();
2441 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2442 m_gsi
= gsi_after_labels (e1
->dest
);
2447 rhs1
= handle_operand (store_operand
, idx
);
2449 rhs1
= handle_stmt (stmt
, idx
);
2450 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
2451 rhs1
= add_cast (m_limb_type
, rhs1
);
2452 if (sext
&& i
== cnt
- 1)
2457 if (tree_fits_uhwi_p (idx
))
2458 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2461 nidx
= make_ssa_name (sizetype
);
2462 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2468 basic_block new_bb
= NULL
;
2469 /* Handle stores into bit-fields. */
2475 if (kind
!= bitint_prec_large
)
2477 prepare_data_in_out (build_zero_cst (m_limb_type
),
2479 bf_next
= m_data
.pop ();
2480 bf_cur
= m_data
.pop ();
2481 g
= gimple_build_cond (EQ_EXPR
, idx
, size_zero_node
,
2482 NULL_TREE
, NULL_TREE
);
2484 if_then_else (g
, profile_probability::unlikely (),
2489 = build_nonstandard_integer_type (limb_prec
- bo_bit
, 1);
2490 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2491 bitsize_int (limb_prec
- bo_bit
),
2492 bitsize_int (bo_idx
* limb_prec
+ bo_bit
));
2493 tree t
= add_cast (ftype
, rhs1
);
2494 g
= gimple_build_assign (bfr
, t
);
2498 maybe_duplicate_eh_stmt (g
, stmt
);
2501 edge e
= split_block (gsi_bb (m_gsi
), g
);
2502 m_gsi
= gsi_after_labels (e
->dest
);
2503 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2504 = profile_probability::very_unlikely ();
2507 if (kind
== bitint_prec_large
)
2513 m_gsi
= gsi_after_labels (e2
->src
);
2517 tree t1
= make_ssa_name (m_limb_type
);
2518 tree t2
= make_ssa_name (m_limb_type
);
2519 tree t3
= make_ssa_name (m_limb_type
);
2520 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2521 build_int_cst (unsigned_type_node
,
2522 limb_prec
- bo_bit
));
2524 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, rhs1
,
2525 build_int_cst (unsigned_type_node
,
2529 g
= gimple_build_assign (t3
, BIT_IOR_EXPR
, t1
, t2
);
2532 if (bf_next
&& i
== 1)
2534 g
= gimple_build_assign (bf_next
, bf_cur
);
2541 /* Handle bit-field access to partial last limb if needed. */
2545 && tree_fits_uhwi_p (idx
))
2547 unsigned int tprec
= TYPE_PRECISION (type
);
2548 unsigned int rprec
= tprec
% limb_prec
;
2549 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2552 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2553 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2554 unshare_expr (nlhs
),
2555 bitsize_int (rprec
+ bo_bit
),
2556 bitsize_int ((bo_idx
2557 + tprec
/ limb_prec
)
2559 tree t
= add_cast (ftype
, rhs1
);
2560 g
= gimple_build_assign (bfr
, t
);
2564 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2567 /* Otherwise, stores to any other lhs. */
2570 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
,
2572 g
= gimple_build_assign (l
, rhs1
);
2577 maybe_duplicate_eh_stmt (g
, stmt
);
2580 edge e
= split_block (gsi_bb (m_gsi
), g
);
2581 m_gsi
= gsi_after_labels (e
->dest
);
2582 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2583 = profile_probability::very_unlikely ();
2587 m_gsi
= gsi_after_labels (new_bb
);
2591 if (kind
== bitint_prec_huge
&& i
<= 1)
2595 idx
= make_ssa_name (sizetype
);
2596 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
2602 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
2605 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2606 NULL_TREE
, NULL_TREE
);
2609 m_gsi
= gsi_after_labels (edge_bb
);
2611 m_gsi
= gsi_for_stmt (stmt
);
2621 ext
= add_cast (signed_type_for (m_limb_type
), ext
);
2622 tree lpm1
= build_int_cst (unsigned_type_node
,
2624 tree n
= make_ssa_name (TREE_TYPE (ext
));
2625 g
= gimple_build_assign (n
, RSHIFT_EXPR
, ext
, lpm1
);
2627 ext
= add_cast (m_limb_type
, n
);
2630 ext
= build_zero_cst (m_limb_type
);
2631 kind
= bitint_precision_kind (type
);
2632 unsigned start
= CEIL (prec
, limb_prec
);
2633 prec
= TYPE_PRECISION (type
);
2634 idx
= idx_first
= idx_next
= NULL_TREE
;
2635 if (prec
<= (start
+ 2 + (bo_bit
!= 0)) * limb_prec
)
2636 kind
= bitint_prec_large
;
2637 if (kind
== bitint_prec_large
)
2638 cnt
= CEIL (prec
, limb_prec
) - start
;
2641 rem
= prec
% limb_prec
;
2642 end
= (prec
- rem
) / limb_prec
;
2643 cnt
= (bo_bit
!= 0) + 1 + (rem
!= 0);
2645 for (unsigned i
= 0; i
< cnt
; i
++)
2647 if (kind
== bitint_prec_large
|| (i
== 0 && bo_bit
!= 0))
2648 idx
= size_int (start
+ i
);
2649 else if (i
== cnt
- 1 && (rem
!= 0))
2650 idx
= size_int (end
);
2651 else if (i
== (bo_bit
!= 0))
2652 idx
= create_loop (size_int (start
+ i
), &idx_next
);
2654 if (bf_cur
!= NULL_TREE
&& bf_cur
!= ext
)
2656 tree t1
= make_ssa_name (m_limb_type
);
2657 g
= gimple_build_assign (t1
, RSHIFT_EXPR
, bf_cur
,
2658 build_int_cst (unsigned_type_node
,
2659 limb_prec
- bo_bit
));
2661 if (integer_zerop (ext
))
2665 tree t2
= make_ssa_name (m_limb_type
);
2666 rhs1
= make_ssa_name (m_limb_type
);
2667 g
= gimple_build_assign (t2
, LSHIFT_EXPR
, ext
,
2668 build_int_cst (unsigned_type_node
,
2671 g
= gimple_build_assign (rhs1
, BIT_IOR_EXPR
, t1
, t2
);
2679 if (tree_fits_uhwi_p (idx
))
2680 nidx
= size_int (tree_to_uhwi (idx
) + bo_idx
);
2683 nidx
= make_ssa_name (sizetype
);
2684 g
= gimple_build_assign (nidx
, PLUS_EXPR
, idx
,
2690 /* Handle bit-field access to partial last limb if needed. */
2691 if (nlhs
&& i
== cnt
- 1)
2693 unsigned int tprec
= TYPE_PRECISION (type
);
2694 unsigned int rprec
= tprec
% limb_prec
;
2695 if (rprec
+ bo_bit
< (unsigned) limb_prec
)
2698 = build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2699 tree bfr
= build3 (BIT_FIELD_REF
, ftype
,
2700 unshare_expr (nlhs
),
2701 bitsize_int (rprec
+ bo_bit
),
2702 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2704 tree t
= add_cast (ftype
, rhs1
);
2705 g
= gimple_build_assign (bfr
, t
);
2709 else if (rprec
+ bo_bit
== (unsigned) limb_prec
)
2712 /* Otherwise, stores to any other lhs. */
2715 tree l
= limb_access (lhs_type
, nlhs
? nlhs
: lhs
, nidx
, true);
2716 g
= gimple_build_assign (l
, rhs1
);
2721 maybe_duplicate_eh_stmt (g
, stmt
);
2724 edge e
= split_block (gsi_bb (m_gsi
), g
);
2725 m_gsi
= gsi_after_labels (e
->dest
);
2726 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2727 = profile_probability::very_unlikely ();
2730 if (kind
== bitint_prec_huge
&& i
== (bo_bit
!= 0))
2732 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
2735 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
2736 NULL_TREE
, NULL_TREE
);
2738 m_gsi
= gsi_for_stmt (stmt
);
2743 if (bf_cur
!= NULL_TREE
)
2745 unsigned int tprec
= TYPE_PRECISION (type
);
2746 unsigned int rprec
= tprec
% limb_prec
;
2747 tree ftype
= build_nonstandard_integer_type (rprec
+ bo_bit
, 1);
2748 tree bfr
= build3 (BIT_FIELD_REF
, ftype
, unshare_expr (nlhs
),
2749 bitsize_int (rprec
+ bo_bit
),
2750 bitsize_int ((bo_idx
+ tprec
/ limb_prec
)
2755 rhs1
= make_ssa_name (TREE_TYPE (rhs1
));
2756 g
= gimple_build_assign (rhs1
, RSHIFT_EXPR
, bf_cur
,
2757 build_int_cst (unsigned_type_node
,
2758 limb_prec
- bo_bit
));
2761 rhs1
= add_cast (ftype
, rhs1
);
2762 g
= gimple_build_assign (bfr
, rhs1
);
2766 maybe_duplicate_eh_stmt (g
, stmt
);
2769 edge e
= split_block (gsi_bb (m_gsi
), g
);
2770 m_gsi
= gsi_after_labels (e
->dest
);
2771 make_edge (e
->src
, eh_pad
, EDGE_EH
)->probability
2772 = profile_probability::very_unlikely ();
2777 if (gimple_store_p (stmt
))
2779 unlink_stmt_vdef (stmt
);
2780 release_ssa_name (gimple_vdef (stmt
));
2781 gsi_remove (&m_gsi
, true);
2785 lhs
= make_ssa_name (boolean_type_node
);
2786 basic_block bb
= gimple_bb (stmt
);
2787 gphi
*phi
= create_phi_node (lhs
, bb
);
2788 edge e
= find_edge (gsi_bb (m_gsi
), bb
);
2789 unsigned int n
= EDGE_COUNT (bb
->preds
);
2790 for (unsigned int i
= 0; i
< n
; i
++)
2792 edge e2
= EDGE_PRED (bb
, i
);
2793 add_phi_arg (phi
, e
== e2
? boolean_true_node
: boolean_false_node
,
2794 e2
, UNKNOWN_LOCATION
);
2796 cmp_code
= cmp_code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2803 /* Handle a large/huge _BitInt comparison statement STMT other than
2804 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2805 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2806 lowered by iteration from the most significant limb downwards to
2807 the least significant one, for large _BitInt in straight line code,
2808 otherwise with most significant limb handled in
2809 straight line code followed by a loop handling one limb at a time.
2810 Comparisons with unsigned huge _BitInt with precisions which are
2811 multiples of limb precision can use just the loop and don't need to
2812 handle most significant limb before the loop. The loop or straight
2813 line code jumps to final basic block if a particular pair of limbs
2817 bitint_large_huge::lower_comparison_stmt (gimple
*stmt
, tree_code
&cmp_code
,
2818 tree cmp_op1
, tree cmp_op2
)
2820 tree type
= TREE_TYPE (cmp_op1
);
2821 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
2822 bitint_prec_kind kind
= bitint_precision_kind (type
);
2823 gcc_assert (kind
>= bitint_prec_large
);
2825 if (!TYPE_UNSIGNED (type
)
2826 && integer_zerop (cmp_op2
)
2827 && (cmp_code
== GE_EXPR
|| cmp_code
== LT_EXPR
))
2829 unsigned end
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
) - 1;
2830 tree idx
= size_int (end
);
2832 tree rhs1
= handle_operand (cmp_op1
, idx
);
2833 if (TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2835 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2836 rhs1
= add_cast (stype
, rhs1
);
2838 tree lhs
= make_ssa_name (boolean_type_node
);
2839 g
= gimple_build_assign (lhs
, cmp_code
, rhs1
,
2840 build_zero_cst (TREE_TYPE (rhs1
)));
2846 unsigned cnt
, rem
= 0, end
= 0;
2847 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
;
2848 if (kind
== bitint_prec_large
)
2849 cnt
= CEIL ((unsigned) TYPE_PRECISION (type
), limb_prec
);
2852 rem
= ((unsigned) TYPE_PRECISION (type
) % limb_prec
);
2853 if (rem
== 0 && !TYPE_UNSIGNED (type
))
2855 end
= ((unsigned) TYPE_PRECISION (type
) - rem
) / limb_prec
;
2856 cnt
= 1 + (rem
!= 0);
2859 basic_block edge_bb
= NULL
;
2860 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2862 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
2864 m_gsi
= gsi_end_bb (edge_bb
);
2866 edge
*edges
= XALLOCAVEC (edge
, cnt
* 2);
2867 for (unsigned i
= 0; i
< cnt
; i
++)
2870 if (kind
== bitint_prec_large
)
2871 idx
= size_int (cnt
- i
- 1);
2872 else if (i
== cnt
- 1)
2873 idx
= create_loop (size_int (end
- 1), &idx_next
);
2875 idx
= size_int (end
);
2876 tree rhs1
= handle_operand (cmp_op1
, idx
);
2877 tree rhs2
= handle_operand (cmp_op2
, idx
);
2879 && !TYPE_UNSIGNED (type
)
2880 && TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
2882 tree stype
= signed_type_for (TREE_TYPE (rhs1
));
2883 rhs1
= add_cast (stype
, rhs1
);
2884 rhs2
= add_cast (stype
, rhs2
);
2886 g
= gimple_build_cond (GT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2888 edge e1
= split_block (gsi_bb (m_gsi
), g
);
2889 e1
->flags
= EDGE_FALSE_VALUE
;
2890 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2891 e1
->probability
= profile_probability::likely ();
2892 e2
->probability
= e1
->probability
.invert ();
2894 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
2895 m_gsi
= gsi_after_labels (e1
->dest
);
2897 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
, NULL_TREE
, NULL_TREE
);
2899 e1
= split_block (gsi_bb (m_gsi
), g
);
2900 e1
->flags
= EDGE_FALSE_VALUE
;
2901 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
2902 e1
->probability
= profile_probability::unlikely ();
2903 e2
->probability
= e1
->probability
.invert ();
2904 m_gsi
= gsi_after_labels (e1
->dest
);
2905 edges
[2 * i
+ 1] = e2
;
2907 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
2909 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
2911 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
2912 NULL_TREE
, NULL_TREE
);
2914 edge true_edge
, false_edge
;
2915 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
2916 &true_edge
, &false_edge
);
2917 m_gsi
= gsi_after_labels (false_edge
->dest
);
2922 tree lhs
= make_ssa_name (boolean_type_node
);
2923 basic_block bb
= gimple_bb (stmt
);
2924 gphi
*phi
= create_phi_node (lhs
, bb
);
2925 for (unsigned int i
= 0; i
< cnt
* 2; i
++)
2927 tree val
= ((cmp_code
== GT_EXPR
|| cmp_code
== GE_EXPR
)
2928 ^ (i
& 1)) ? boolean_true_node
: boolean_false_node
;
2929 add_phi_arg (phi
, val
, edges
[i
], UNKNOWN_LOCATION
);
2931 add_phi_arg (phi
, (cmp_code
== GE_EXPR
|| cmp_code
== LE_EXPR
)
2932 ? boolean_true_node
: boolean_false_node
,
2933 find_edge (gsi_bb (m_gsi
), bb
), UNKNOWN_LOCATION
);
2938 /* Lower large/huge _BitInt left and right shift except for left
2939 shift by < limb_prec constant. */
2942 bitint_large_huge::lower_shift_stmt (tree obj
, gimple
*stmt
)
2944 tree rhs1
= gimple_assign_rhs1 (stmt
);
2945 tree lhs
= gimple_assign_lhs (stmt
);
2946 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
2947 tree type
= TREE_TYPE (rhs1
);
2948 gimple
*final_stmt
= gsi_stmt (m_gsi
);
2949 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
2950 && bitint_precision_kind (type
) >= bitint_prec_large
);
2951 int prec
= TYPE_PRECISION (type
);
2952 tree n
= gimple_assign_rhs2 (stmt
), n1
, n2
, n3
, n4
;
2954 if (obj
== NULL_TREE
)
2956 int part
= var_to_partition (m_map
, lhs
);
2957 gcc_assert (m_vars
[part
] != NULL_TREE
);
2960 /* Preparation code common for both left and right shifts.
2961 unsigned n1 = n % limb_prec;
2962 size_t n2 = n / limb_prec;
2963 size_t n3 = n1 != 0;
2964 unsigned n4 = (limb_prec - n1) % limb_prec;
2965 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2966 if (TREE_CODE (n
) == INTEGER_CST
)
2968 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
2969 n1
= int_const_binop (TRUNC_MOD_EXPR
, n
, lp
);
2970 n2
= fold_convert (sizetype
, int_const_binop (TRUNC_DIV_EXPR
, n
, lp
));
2971 n3
= size_int (!integer_zerop (n1
));
2972 n4
= int_const_binop (TRUNC_MOD_EXPR
,
2973 int_const_binop (MINUS_EXPR
, lp
, n1
), lp
);
2977 n1
= make_ssa_name (TREE_TYPE (n
));
2978 n2
= make_ssa_name (sizetype
);
2979 n3
= make_ssa_name (sizetype
);
2980 n4
= make_ssa_name (TREE_TYPE (n
));
2981 if (pow2p_hwi (limb_prec
))
2983 tree lpm1
= build_int_cst (TREE_TYPE (n
), limb_prec
- 1);
2984 g
= gimple_build_assign (n1
, BIT_AND_EXPR
, n
, lpm1
);
2986 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
2988 ? n2
: make_ssa_name (TREE_TYPE (n
)),
2990 build_int_cst (TREE_TYPE (n
),
2991 exact_log2 (limb_prec
)));
2993 if (gimple_assign_lhs (g
) != n2
)
2995 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
2998 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3001 g
= gimple_build_assign (n4
, BIT_AND_EXPR
, gimple_assign_lhs (g
),
3007 tree lp
= build_int_cst (TREE_TYPE (n
), limb_prec
);
3008 g
= gimple_build_assign (n1
, TRUNC_MOD_EXPR
, n
, lp
);
3010 g
= gimple_build_assign (useless_type_conversion_p (sizetype
,
3012 ? n2
: make_ssa_name (TREE_TYPE (n
)),
3013 TRUNC_DIV_EXPR
, n
, lp
);
3015 if (gimple_assign_lhs (g
) != n2
)
3017 g
= gimple_build_assign (n2
, NOP_EXPR
, gimple_assign_lhs (g
));
3020 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (n
)),
3021 MINUS_EXPR
, lp
, n1
);
3023 g
= gimple_build_assign (n4
, TRUNC_MOD_EXPR
, gimple_assign_lhs (g
),
3027 g
= gimple_build_assign (make_ssa_name (boolean_type_node
), NE_EXPR
, n1
,
3028 build_zero_cst (TREE_TYPE (n
)));
3030 g
= gimple_build_assign (n3
, NOP_EXPR
, gimple_assign_lhs (g
));
3033 tree p
= build_int_cst (sizetype
,
3034 prec
/ limb_prec
- (prec
% limb_prec
== 0));
3035 if (rhs_code
== RSHIFT_EXPR
)
3040 unsigned n1 = n % limb_prec;
3041 size_t n2 = n / limb_prec;
3042 size_t n3 = n1 != 0;
3043 unsigned n4 = (limb_prec - n1) % limb_prec;
3045 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3046 int signed_p = (typeof (src) -1) < 0;
3047 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3048 ? p : p - n3); ++idx)
3049 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3051 if (prec % limb_prec == 0)
3054 ext = ((signed limb_type) (src[p] << (limb_prec
3055 - (prec % limb_prec))))
3056 >> (limb_prec - (prec % limb_prec));
3058 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3059 if (!signed_p && (prec % limb_prec == 0))
3061 else if (idx < prec / 64)
3063 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3069 dst[idx] = ((signed limb_type) ext) >> n1;
3070 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3074 dst[idx] = ext >> n1;
3077 for (++idx; idx <= p; ++idx)
3080 if (TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3082 else if (TREE_CODE (n3
) == INTEGER_CST
)
3083 pmn3
= int_const_binop (MINUS_EXPR
, p
, n3
);
3086 pmn3
= make_ssa_name (sizetype
);
3087 g
= gimple_build_assign (pmn3
, MINUS_EXPR
, p
, n3
);
3090 g
= gimple_build_cond (LT_EXPR
, n2
, pmn3
, NULL_TREE
, NULL_TREE
);
3091 edge edge_true
, edge_false
;
3092 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3094 tree idx
= create_loop (n2
, &idx_next
);
3095 tree idxmn2
= make_ssa_name (sizetype
);
3096 tree idxpn3
= make_ssa_name (sizetype
);
3097 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3099 g
= gimple_build_assign (idxpn3
, PLUS_EXPR
, idx
, n3
);
3102 tree t1
= handle_operand (rhs1
, idx
);
3104 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3105 RSHIFT_EXPR
, t1
, n1
);
3107 t1
= gimple_assign_lhs (g
);
3108 if (!integer_zerop (n3
))
3111 tree t2
= handle_operand (rhs1
, idxpn3
);
3112 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3113 LSHIFT_EXPR
, t2
, n4
);
3115 t2
= gimple_assign_lhs (g
);
3116 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3117 BIT_IOR_EXPR
, t1
, t2
);
3119 t1
= gimple_assign_lhs (g
);
3121 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3122 g
= gimple_build_assign (l
, t1
);
3124 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3126 g
= gimple_build_cond (LT_EXPR
, idx_next
, pmn3
, NULL_TREE
, NULL_TREE
);
3128 idx
= make_ssa_name (sizetype
);
3129 m_gsi
= gsi_for_stmt (final_stmt
);
3130 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3131 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3132 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3133 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3134 add_phi_arg (phi
, n2
, edge_false
, UNKNOWN_LOCATION
);
3135 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3137 tree ms
= handle_operand (rhs1
, p
);
3139 if (!types_compatible_p (TREE_TYPE (ms
), m_limb_type
))
3140 ext
= add_cast (m_limb_type
, ms
);
3141 if (!(TYPE_UNSIGNED (type
) && prec
% limb_prec
== 0)
3142 && !integer_zerop (n3
))
3144 g
= gimple_build_cond (LT_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3145 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3147 t1
= handle_operand (rhs1
, idx
);
3148 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3149 RSHIFT_EXPR
, t1
, n1
);
3151 t1
= gimple_assign_lhs (g
);
3152 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3153 LSHIFT_EXPR
, ext
, n4
);
3155 tree t2
= gimple_assign_lhs (g
);
3156 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3157 BIT_IOR_EXPR
, t1
, t2
);
3159 t1
= gimple_assign_lhs (g
);
3160 idxmn2
= make_ssa_name (sizetype
);
3161 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3163 l
= limb_access (TREE_TYPE (lhs
), obj
, idxmn2
, true);
3164 g
= gimple_build_assign (l
, t1
);
3166 idx_next
= make_ssa_name (sizetype
);
3167 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3169 m_gsi
= gsi_for_stmt (final_stmt
);
3170 tree nidx
= make_ssa_name (sizetype
);
3171 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3172 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3173 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3174 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3175 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3176 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3179 g
= gimple_build_assign (make_ssa_name (sizetype
), MINUS_EXPR
, idx
, n2
);
3181 idx
= gimple_assign_lhs (g
);
3183 if (!TYPE_UNSIGNED (type
))
3184 sext
= add_cast (signed_type_for (m_limb_type
), ext
);
3185 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3186 RSHIFT_EXPR
, sext
, n1
);
3188 t1
= gimple_assign_lhs (g
);
3189 if (!TYPE_UNSIGNED (type
))
3191 t1
= add_cast (m_limb_type
, t1
);
3192 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (sext
)),
3194 build_int_cst (TREE_TYPE (n
),
3197 ext
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
3200 ext
= build_zero_cst (m_limb_type
);
3201 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3202 g
= gimple_build_assign (l
, t1
);
3204 g
= gimple_build_assign (make_ssa_name (sizetype
), PLUS_EXPR
, idx
,
3207 idx
= gimple_assign_lhs (g
);
3208 g
= gimple_build_cond (LE_EXPR
, idx
, p
, NULL_TREE
, NULL_TREE
);
3209 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3210 idx
= create_loop (idx
, &idx_next
);
3211 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3212 g
= gimple_build_assign (l
, ext
);
3214 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_one_node
);
3216 g
= gimple_build_cond (LE_EXPR
, idx_next
, p
, NULL_TREE
, NULL_TREE
);
3224 unsigned n1 = n % limb_prec;
3225 size_t n2 = n / limb_prec;
3226 size_t n3 = n1 != 0;
3227 unsigned n4 = (limb_prec - n1) % limb_prec;
3229 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3230 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3231 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3234 dst[idx] = src[idx - n2] << n1;
3237 for (; (ssize_t) idx >= 0; --idx)
3240 if (TREE_CODE (n2
) == INTEGER_CST
&& TREE_CODE (n3
) == INTEGER_CST
)
3241 n2pn3
= int_const_binop (PLUS_EXPR
, n2
, n3
);
3244 n2pn3
= make_ssa_name (sizetype
);
3245 g
= gimple_build_assign (n2pn3
, PLUS_EXPR
, n2
, n3
);
3248 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3249 idx even to access the most significant partial limb. */
3251 if (integer_zerop (n3
))
3252 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3253 counts. Emit if (true) condition that can be optimized later. */
3254 g
= gimple_build_cond (NE_EXPR
, boolean_true_node
, boolean_false_node
,
3255 NULL_TREE
, NULL_TREE
);
3257 g
= gimple_build_cond (LE_EXPR
, n2pn3
, p
, NULL_TREE
, NULL_TREE
);
3258 edge edge_true
, edge_false
;
3259 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3261 tree idx
= create_loop (p
, &idx_next
);
3262 tree idxmn2
= make_ssa_name (sizetype
);
3263 tree idxmn2mn3
= make_ssa_name (sizetype
);
3264 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3266 g
= gimple_build_assign (idxmn2mn3
, MINUS_EXPR
, idxmn2
, n3
);
3269 tree t1
= handle_operand (rhs1
, idxmn2
);
3271 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3272 LSHIFT_EXPR
, t1
, n1
);
3274 t1
= gimple_assign_lhs (g
);
3275 if (!integer_zerop (n3
))
3278 tree t2
= handle_operand (rhs1
, idxmn2mn3
);
3279 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3280 RSHIFT_EXPR
, t2
, n4
);
3282 t2
= gimple_assign_lhs (g
);
3283 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3284 BIT_IOR_EXPR
, t1
, t2
);
3286 t1
= gimple_assign_lhs (g
);
3288 tree l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3289 g
= gimple_build_assign (l
, t1
);
3291 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3293 tree sn2pn3
= add_cast (ssizetype
, n2pn3
);
3294 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
), sn2pn3
,
3295 NULL_TREE
, NULL_TREE
);
3297 idx
= make_ssa_name (sizetype
);
3298 m_gsi
= gsi_for_stmt (final_stmt
);
3299 gphi
*phi
= create_phi_node (idx
, gsi_bb (m_gsi
));
3300 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3301 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3302 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3303 add_phi_arg (phi
, p
, edge_false
, UNKNOWN_LOCATION
);
3304 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3306 if (!integer_zerop (n3
))
3308 g
= gimple_build_cond (NE_EXPR
, n3
, size_zero_node
,
3309 NULL_TREE
, NULL_TREE
);
3310 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3311 idxmn2
= make_ssa_name (sizetype
);
3312 g
= gimple_build_assign (idxmn2
, MINUS_EXPR
, idx
, n2
);
3315 t1
= handle_operand (rhs1
, idxmn2
);
3316 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3317 LSHIFT_EXPR
, t1
, n1
);
3319 t1
= gimple_assign_lhs (g
);
3320 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3321 g
= gimple_build_assign (l
, t1
);
3323 idx_next
= make_ssa_name (sizetype
);
3324 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3326 m_gsi
= gsi_for_stmt (final_stmt
);
3327 tree nidx
= make_ssa_name (sizetype
);
3328 phi
= create_phi_node (nidx
, gsi_bb (m_gsi
));
3329 edge_false
= find_edge (edge_false
->src
, gsi_bb (m_gsi
));
3330 edge_true
= EDGE_PRED (gsi_bb (m_gsi
),
3331 EDGE_PRED (gsi_bb (m_gsi
), 0) == edge_false
);
3332 add_phi_arg (phi
, idx
, edge_false
, UNKNOWN_LOCATION
);
3333 add_phi_arg (phi
, idx_next
, edge_true
, UNKNOWN_LOCATION
);
3336 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx
),
3337 ssize_int (0), NULL_TREE
, NULL_TREE
);
3338 if_then (g
, profile_probability::likely (), edge_true
, edge_false
);
3339 idx
= create_loop (idx
, &idx_next
);
3340 l
= limb_access (TREE_TYPE (lhs
), obj
, idx
, true);
3341 g
= gimple_build_assign (l
, build_zero_cst (m_limb_type
));
3343 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
, size_int (-1));
3345 g
= gimple_build_cond (GE_EXPR
, add_cast (ssizetype
, idx_next
),
3346 ssize_int (0), NULL_TREE
, NULL_TREE
);
3351 /* Lower large/huge _BitInt multiplication or division. */
3354 bitint_large_huge::lower_muldiv_stmt (tree obj
, gimple
*stmt
)
3356 tree rhs1
= gimple_assign_rhs1 (stmt
);
3357 tree rhs2
= gimple_assign_rhs2 (stmt
);
3358 tree lhs
= gimple_assign_lhs (stmt
);
3359 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3360 tree type
= TREE_TYPE (rhs1
);
3361 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
3362 && bitint_precision_kind (type
) >= bitint_prec_large
);
3363 int prec
= TYPE_PRECISION (type
), prec1
, prec2
;
3364 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec1
);
3365 rhs2
= handle_operand_addr (rhs2
, stmt
, NULL
, &prec2
);
3366 if (obj
== NULL_TREE
)
3368 int part
= var_to_partition (m_map
, lhs
);
3369 gcc_assert (m_vars
[part
] != NULL_TREE
);
3371 lhs
= build_fold_addr_expr (obj
);
3375 lhs
= build_fold_addr_expr (obj
);
3376 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3377 NULL_TREE
, true, GSI_SAME_STMT
);
3379 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3384 g
= gimple_build_call_internal (IFN_MULBITINT
, 6,
3385 lhs
, build_int_cst (sitype
, prec
),
3386 rhs1
, build_int_cst (sitype
, prec1
),
3387 rhs2
, build_int_cst (sitype
, prec2
));
3390 case TRUNC_DIV_EXPR
:
3391 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8,
3392 lhs
, build_int_cst (sitype
, prec
),
3394 build_int_cst (sitype
, 0),
3395 rhs1
, build_int_cst (sitype
, prec1
),
3396 rhs2
, build_int_cst (sitype
, prec2
));
3397 if (!stmt_ends_bb_p (stmt
))
3398 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3401 case TRUNC_MOD_EXPR
:
3402 g
= gimple_build_call_internal (IFN_DIVMODBITINT
, 8, null_pointer_node
,
3403 build_int_cst (sitype
, 0),
3404 lhs
, build_int_cst (sitype
, prec
),
3405 rhs1
, build_int_cst (sitype
, prec1
),
3406 rhs2
, build_int_cst (sitype
, prec2
));
3407 if (!stmt_ends_bb_p (stmt
))
3408 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3414 if (stmt_ends_bb_p (stmt
))
3416 maybe_duplicate_eh_stmt (g
, stmt
);
3419 basic_block bb
= gimple_bb (stmt
);
3421 FOR_EACH_EDGE (e1
, ei
, bb
->succs
)
3422 if (e1
->flags
& EDGE_EH
)
3426 edge e2
= split_block (gsi_bb (m_gsi
), g
);
3427 m_gsi
= gsi_after_labels (e2
->dest
);
3428 make_edge (e2
->src
, e1
->dest
, EDGE_EH
)->probability
3429 = profile_probability::very_unlikely ();
3434 /* Lower large/huge _BitInt conversion to/from floating point. */
3437 bitint_large_huge::lower_float_conv_stmt (tree obj
, gimple
*stmt
)
3439 tree rhs1
= gimple_assign_rhs1 (stmt
);
3440 tree lhs
= gimple_assign_lhs (stmt
);
3441 tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3442 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
3444 if (rhs_code
== FIX_TRUNC_EXPR
)
3446 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
3447 if (!TYPE_UNSIGNED (TREE_TYPE (lhs
)))
3449 if (obj
== NULL_TREE
)
3451 int part
= var_to_partition (m_map
, lhs
);
3452 gcc_assert (m_vars
[part
] != NULL_TREE
);
3454 lhs
= build_fold_addr_expr (obj
);
3458 lhs
= build_fold_addr_expr (obj
);
3459 lhs
= force_gimple_operand_gsi (&m_gsi
, lhs
, true,
3460 NULL_TREE
, true, GSI_SAME_STMT
);
3462 scalar_mode from_mode
3463 = as_a
<scalar_mode
> (TYPE_MODE (TREE_TYPE (rhs1
)));
3465 /* IEEE single is a full superset of both IEEE half and
3466 bfloat formats, convert to float first and then to _BitInt
3467 to avoid the need of another 2 library routines. */
3468 if ((REAL_MODE_FORMAT (from_mode
) == &arm_bfloat_half_format
3469 || REAL_MODE_FORMAT (from_mode
) == &ieee_half_format
)
3470 && REAL_MODE_FORMAT (SFmode
) == &ieee_single_format
)
3472 tree type
= lang_hooks
.types
.type_for_mode (SFmode
, 0);
3474 rhs1
= add_cast (type
, rhs1
);
3477 g
= gimple_build_call_internal (IFN_FLOATTOBITINT
, 3,
3478 lhs
, build_int_cst (sitype
, prec
),
3485 rhs1
= handle_operand_addr (rhs1
, stmt
, NULL
, &prec
);
3486 g
= gimple_build_call_internal (IFN_BITINTTOFLOAT
, 2,
3487 rhs1
, build_int_cst (sitype
, prec
));
3488 gimple_call_set_lhs (g
, lhs
);
3489 if (!stmt_ends_bb_p (stmt
))
3490 gimple_call_set_nothrow (as_a
<gcall
*> (g
), true);
3491 gsi_replace (&m_gsi
, g
, true);
3495 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3496 If check_zero is true, caller wants to check if all bits in [start, end)
3497 are zero, otherwise if bits in [start, end) are either all zero or
3498 all ones. L is the limb with index LIMB, START and END are measured
3502 bitint_large_huge::arith_overflow_extract_bits (unsigned int start
,
3503 unsigned int end
, tree l
,
3507 unsigned startlimb
= start
/ limb_prec
;
3508 unsigned endlimb
= (end
- 1) / limb_prec
;
3511 if ((start
% limb_prec
) == 0 && (end
% limb_prec
) == 0)
3513 if (startlimb
== endlimb
&& limb
== startlimb
)
3517 wide_int w
= wi::shifted_mask (start
% limb_prec
,
3518 end
- start
, false, limb_prec
);
3519 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3521 wide_int_to_tree (m_limb_type
, w
));
3523 return gimple_assign_lhs (g
);
3525 unsigned int shift
= start
% limb_prec
;
3526 if ((end
% limb_prec
) != 0)
3528 unsigned int lshift
= (-end
) % limb_prec
;
3530 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3532 build_int_cst (unsigned_type_node
,
3535 l
= gimple_assign_lhs (g
);
3537 l
= add_cast (signed_type_for (m_limb_type
), l
);
3538 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3540 build_int_cst (unsigned_type_node
, shift
));
3542 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3544 else if (limb
== startlimb
)
3546 if ((start
% limb_prec
) == 0)
3549 l
= add_cast (signed_type_for (m_limb_type
), l
);
3550 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3552 build_int_cst (unsigned_type_node
,
3553 start
% limb_prec
));
3555 l
= gimple_assign_lhs (g
);
3557 l
= add_cast (m_limb_type
, l
);
3560 else if (limb
== endlimb
)
3562 if ((end
% limb_prec
) == 0)
3566 wide_int w
= wi::mask (end
% limb_prec
, false, limb_prec
);
3567 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3569 wide_int_to_tree (m_limb_type
, w
));
3571 return gimple_assign_lhs (g
);
3573 unsigned int shift
= (-end
) % limb_prec
;
3574 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
3576 build_int_cst (unsigned_type_node
, shift
));
3578 l
= add_cast (signed_type_for (m_limb_type
), gimple_assign_lhs (g
));
3579 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (l
)),
3581 build_int_cst (unsigned_type_node
, shift
));
3583 return add_cast (m_limb_type
, gimple_assign_lhs (g
));
3588 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3589 result including overflow flag into the right locations. */
3592 bitint_large_huge::finish_arith_overflow (tree var
, tree obj
, tree type
,
3593 tree ovf
, tree lhs
, tree orig_obj
,
3594 gimple
*stmt
, tree_code code
)
3598 if (obj
== NULL_TREE
3599 && (TREE_CODE (type
) != BITINT_TYPE
3600 || bitint_precision_kind (type
) < bitint_prec_large
))
3602 /* Add support for 3 or more limbs filled in from normal integral
3603 type if this assert fails. If no target chooses limb mode smaller
3604 than half of largest supported normal integral type, this will not
3606 gcc_assert (TYPE_PRECISION (type
) <= 2 * limb_prec
);
3607 tree lhs_type
= type
;
3608 if (TREE_CODE (type
) == BITINT_TYPE
3609 && bitint_precision_kind (type
) == bitint_prec_middle
)
3610 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (type
),
3611 TYPE_UNSIGNED (type
));
3612 tree r1
= limb_access (NULL_TREE
, var
, size_int (0), true);
3613 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r1
);
3615 r1
= gimple_assign_lhs (g
);
3616 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
3617 r1
= add_cast (lhs_type
, r1
);
3618 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
3620 tree r2
= limb_access (NULL_TREE
, var
, size_int (1), true);
3621 g
= gimple_build_assign (make_ssa_name (m_limb_type
), r2
);
3623 r2
= gimple_assign_lhs (g
);
3624 r2
= add_cast (lhs_type
, r2
);
3625 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
3626 build_int_cst (unsigned_type_node
,
3629 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
3630 gimple_assign_lhs (g
));
3632 r1
= gimple_assign_lhs (g
);
3634 if (lhs_type
!= type
)
3635 r1
= add_cast (type
, r1
);
3636 ovf
= add_cast (lhs_type
, ovf
);
3637 if (lhs_type
!= type
)
3638 ovf
= add_cast (type
, ovf
);
3639 g
= gimple_build_assign (lhs
, COMPLEX_EXPR
, r1
, ovf
);
3640 m_gsi
= gsi_for_stmt (stmt
);
3641 gsi_replace (&m_gsi
, g
, true);
3645 unsigned HOST_WIDE_INT nelts
= 0;
3646 tree atype
= NULL_TREE
;
3649 nelts
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
3650 if (orig_obj
== NULL_TREE
)
3652 atype
= build_array_type_nelts (m_limb_type
, nelts
);
3658 if (orig_obj
== NULL_TREE
)
3660 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (obj
)));
3661 v1
= build2 (MEM_REF
, atype
,
3662 build_fold_addr_expr (unshare_expr (obj
)), zero
);
3664 else if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
3665 v1
= build1 (VIEW_CONVERT_EXPR
, atype
, unshare_expr (obj
));
3667 v1
= unshare_expr (obj
);
3668 zero
= build_zero_cst (build_pointer_type (TREE_TYPE (var
)));
3669 v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), zero
);
3670 g
= gimple_build_assign (v1
, v2
);
3673 if (orig_obj
== NULL_TREE
&& obj
)
3675 ovf
= add_cast (m_limb_type
, ovf
);
3676 tree l
= limb_access (NULL_TREE
, obj
, size_int (nelts
), true);
3677 g
= gimple_build_assign (l
, ovf
);
3681 atype
= build_array_type_nelts (m_limb_type
, nelts
- 1);
3682 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (obj
)),
3683 (nelts
+ 1) * m_limb_size
);
3684 tree v1
= build2 (MEM_REF
, atype
,
3685 build_fold_addr_expr (unshare_expr (obj
)),
3687 g
= gimple_build_assign (v1
, build_zero_cst (atype
));
3691 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
)
3693 imm_use_iterator ui
;
3694 use_operand_p use_p
;
3695 FOR_EACH_IMM_USE_FAST (use_p
, ui
, lhs
)
3697 g
= USE_STMT (use_p
);
3698 if (!is_gimple_assign (g
)
3699 || gimple_assign_rhs_code (g
) != IMAGPART_EXPR
)
3701 tree lhs2
= gimple_assign_lhs (g
);
3703 single_imm_use (lhs2
, &use_p
, &use_stmt
);
3704 lhs2
= gimple_assign_lhs (use_stmt
);
3705 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
3706 if (useless_type_conversion_p (TREE_TYPE (lhs2
), TREE_TYPE (ovf
)))
3707 g
= gimple_build_assign (lhs2
, ovf
);
3709 g
= gimple_build_assign (lhs2
, NOP_EXPR
, ovf
);
3710 gsi_replace (&gsi
, g
, true);
3711 if (gsi_stmt (m_gsi
) == use_stmt
)
3712 m_gsi
= gsi_for_stmt (g
);
3716 else if (ovf
!= boolean_false_node
)
3718 g
= gimple_build_cond (NE_EXPR
, ovf
, boolean_false_node
,
3719 NULL_TREE
, NULL_TREE
);
3720 edge edge_true
, edge_false
;
3721 if_then (g
, profile_probability::very_unlikely (),
3722 edge_true
, edge_false
);
3723 tree zero
= build_zero_cst (TREE_TYPE (lhs
));
3724 tree fn
= ubsan_build_overflow_builtin (code
, m_loc
,
3727 force_gimple_operand_gsi (&m_gsi
, fn
, true, NULL_TREE
,
3728 true, GSI_SAME_STMT
);
3729 m_gsi
= gsi_after_labels (edge_true
->dest
);
3734 tree clobber
= build_clobber (TREE_TYPE (var
), CLOBBER_STORAGE_END
);
3735 g
= gimple_build_assign (var
, clobber
);
3736 gsi_insert_after (&m_gsi
, g
, GSI_SAME_STMT
);
3740 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3741 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3742 argument 1 precision PREC1 and minimum precision for the result
3743 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3746 arith_overflow (tree_code code
, tree type
, int prec
, int prec0
, int prec1
,
3747 int prec2
, unsigned *start
, unsigned *end
, bool *check_zero
)
3752 /* Ignore this special rule for subtraction, even if both
3753 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3754 in infinite precision. */
3755 if (code
!= MINUS_EXPR
&& prec0
>= 0 && prec1
>= 0)
3757 /* Result in [0, prec2) is unsigned, if prec > prec2,
3758 all bits above it will be zero. */
3759 if ((prec
- !TYPE_UNSIGNED (type
)) >= prec2
)
3760 return boolean_false_node
;
3763 /* ovf if any of bits in [start, end) is non-zero. */
3764 *start
= prec
- !TYPE_UNSIGNED (type
);
3768 else if (TYPE_UNSIGNED (type
))
3770 /* If result in [0, prec2) is signed and if prec > prec2,
3771 all bits above it will be sign bit copies. */
3774 /* ovf if bit prec - 1 is non-zero. */
3780 /* ovf if any of bits in [start, end) is non-zero. */
3785 else if (prec
>= prec2
)
3786 return boolean_false_node
;
3789 /* ovf if [start, end) bits aren't all zeros or all ones. */
3792 *check_zero
= false;
3797 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3798 argument or return type _Complex large/huge _BitInt. */
3801 bitint_large_huge::lower_addsub_overflow (tree obj
, gimple
*stmt
)
3803 tree arg0
= gimple_call_arg (stmt
, 0);
3804 tree arg1
= gimple_call_arg (stmt
, 1);
3805 tree lhs
= gimple_call_lhs (stmt
);
3810 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3811 gsi_remove (&gsi
, true);
3814 gimple
*final_stmt
= gsi_stmt (m_gsi
);
3815 tree type
= TREE_TYPE (lhs
);
3816 if (TREE_CODE (type
) == COMPLEX_TYPE
)
3817 type
= TREE_TYPE (type
);
3818 int prec
= TYPE_PRECISION (type
);
3819 int prec0
= range_to_prec (arg0
, stmt
);
3820 int prec1
= range_to_prec (arg1
, stmt
);
3821 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3822 the be minimum unsigned precision of any possible operation's
3823 result, otherwise it is minimum signed precision.
3825 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3826 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3827 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3828 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3829 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3830 8 + 8 [0, 0x1fe] 9 UNSIGNED
3831 8 + 10 [0, 0x4fe] 11 UNSIGNED
3832 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3833 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3834 8 + -8 [-0x80, 0x17e] 10 SIGNED
3835 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3836 10 + -8 [-0x80, 0x47e] 12 SIGNED
3837 8 - 8 [-0xff, 0xff] 9 SIGNED
3838 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3839 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3840 -8 - -8 [-0xff, 0xff] 9 SIGNED
3841 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3842 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3843 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3844 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3845 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3846 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3847 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3848 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3849 int prec2
= MAX (prec0
< 0 ? -prec0
: prec0
,
3850 prec1
< 0 ? -prec1
: prec1
);
3851 /* If operands are either both signed or both unsigned,
3852 we need just one additional bit. */
3853 prec2
= (((prec0
< 0) == (prec1
< 0)
3854 /* If one operand is signed and one unsigned and
3855 the signed one has larger precision, we need
3856 just one extra bit, otherwise two. */
3857 || (prec0
< 0 ? (prec2
== -prec0
&& prec2
!= prec1
)
3858 : (prec2
== -prec1
&& prec2
!= prec0
)))
3859 ? prec2
+ 1 : prec2
+ 2);
3860 int prec3
= MAX (prec0
< 0 ? -prec0
: prec0
,
3861 prec1
< 0 ? -prec1
: prec1
);
3862 prec3
= MAX (prec3
, prec
);
3863 tree var
= NULL_TREE
;
3864 tree orig_obj
= obj
;
3865 if (obj
== NULL_TREE
3866 && TREE_CODE (type
) == BITINT_TYPE
3867 && bitint_precision_kind (type
) >= bitint_prec_large
3869 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
3871 int part
= var_to_partition (m_map
, lhs
);
3872 gcc_assert (m_vars
[part
] != NULL_TREE
);
3874 if (TREE_TYPE (lhs
) == type
)
3877 if (TREE_CODE (type
) != BITINT_TYPE
3878 || bitint_precision_kind (type
) < bitint_prec_large
)
3880 unsigned HOST_WIDE_INT nelts
= CEIL (prec
, limb_prec
);
3881 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
3882 var
= create_tmp_var (atype
);
3885 enum tree_code code
;
3886 switch (gimple_call_internal_fn (stmt
))
3888 case IFN_ADD_OVERFLOW
:
3889 case IFN_UBSAN_CHECK_ADD
:
3892 case IFN_SUB_OVERFLOW
:
3893 case IFN_UBSAN_CHECK_SUB
:
3899 unsigned start
, end
;
3901 tree ovf
= arith_overflow (code
, type
, prec
, prec0
, prec1
, prec2
,
3902 &start
, &end
, &check_zero
);
3904 unsigned startlimb
, endlimb
;
3912 startlimb
= start
/ limb_prec
;
3913 endlimb
= (end
- 1) / limb_prec
;
3916 int prec4
= ovf
!= NULL_TREE
? prec
: prec3
;
3917 bitint_prec_kind kind
= bitint_precision_kind (prec4
);
3918 unsigned cnt
, rem
= 0, fin
= 0;
3919 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
3920 bool last_ovf
= (ovf
== NULL_TREE
3921 && CEIL (prec2
, limb_prec
) > CEIL (prec3
, limb_prec
));
3922 if (kind
!= bitint_prec_huge
)
3923 cnt
= CEIL (prec4
, limb_prec
) + last_ovf
;
3926 rem
= (prec4
% (2 * limb_prec
));
3927 fin
= (prec4
- rem
) / limb_prec
;
3928 cnt
= 2 + CEIL (rem
, limb_prec
) + last_ovf
;
3929 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
3932 if (kind
== bitint_prec_huge
)
3933 m_upwards_2limb
= fin
;
3936 tree type0
= TREE_TYPE (arg0
);
3937 tree type1
= TREE_TYPE (arg1
);
3939 if (bitint_precision_kind (prec5
) < bitint_prec_large
)
3940 prec5
= MAX (TYPE_PRECISION (type0
), TYPE_PRECISION (type1
));
3941 if (TYPE_PRECISION (type0
) < prec5
)
3943 type0
= build_bitint_type (prec5
, TYPE_UNSIGNED (type0
));
3944 if (TREE_CODE (arg0
) == INTEGER_CST
)
3945 arg0
= fold_convert (type0
, arg0
);
3947 if (TYPE_PRECISION (type1
) < prec5
)
3949 type1
= build_bitint_type (prec5
, TYPE_UNSIGNED (type1
));
3950 if (TREE_CODE (arg1
) == INTEGER_CST
)
3951 arg1
= fold_convert (type1
, arg1
);
3953 unsigned int data_cnt
= 0;
3954 tree last_rhs1
= NULL_TREE
, last_rhs2
= NULL_TREE
;
3955 tree cmp
= build_zero_cst (m_limb_type
);
3956 unsigned prec_limbs
= CEIL ((unsigned) prec
, limb_prec
);
3957 tree ovf_out
= NULL_TREE
, cmp_out
= NULL_TREE
;
3958 for (unsigned i
= 0; i
< cnt
; i
++)
3962 if (kind
!= bitint_prec_huge
)
3965 idx
= size_int (fin
+ (i
> 2));
3966 if (!last_ovf
|| i
< cnt
- 1)
3968 if (type0
!= TREE_TYPE (arg0
))
3969 rhs1
= handle_cast (type0
, arg0
, idx
);
3971 rhs1
= handle_operand (arg0
, idx
);
3972 if (type1
!= TREE_TYPE (arg1
))
3973 rhs2
= handle_cast (type1
, arg1
, idx
);
3975 rhs2
= handle_operand (arg1
, idx
);
3977 data_cnt
= m_data_cnt
;
3978 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
3979 rhs1
= add_cast (m_limb_type
, rhs1
);
3980 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs2
)))
3981 rhs2
= add_cast (m_limb_type
, rhs2
);
3987 m_data_cnt
= data_cnt
;
3988 if (TYPE_UNSIGNED (type0
))
3989 rhs1
= build_zero_cst (m_limb_type
);
3992 rhs1
= add_cast (signed_type_for (m_limb_type
), last_rhs1
);
3993 if (TREE_CODE (rhs1
) == INTEGER_CST
)
3994 rhs1
= build_int_cst (m_limb_type
,
3995 tree_int_cst_sgn (rhs1
) < 0 ? -1 : 0);
3998 tree lpm1
= build_int_cst (unsigned_type_node
,
4000 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
4001 RSHIFT_EXPR
, rhs1
, lpm1
);
4003 rhs1
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4006 if (TYPE_UNSIGNED (type1
))
4007 rhs2
= build_zero_cst (m_limb_type
);
4010 rhs2
= add_cast (signed_type_for (m_limb_type
), last_rhs2
);
4011 if (TREE_CODE (rhs2
) == INTEGER_CST
)
4012 rhs2
= build_int_cst (m_limb_type
,
4013 tree_int_cst_sgn (rhs2
) < 0 ? -1 : 0);
4016 tree lpm1
= build_int_cst (unsigned_type_node
,
4018 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2
)),
4019 RSHIFT_EXPR
, rhs2
, lpm1
);
4021 rhs2
= add_cast (m_limb_type
, gimple_assign_lhs (g
));
4025 tree rhs
= handle_plus_minus (code
, rhs1
, rhs2
, idx
);
4026 if (ovf
!= boolean_false_node
)
4028 if (tree_fits_uhwi_p (idx
))
4030 unsigned limb
= tree_to_uhwi (idx
);
4031 if (limb
>= startlimb
&& limb
<= endlimb
)
4033 tree l
= arith_overflow_extract_bits (start
, end
, rhs
,
4035 tree this_ovf
= make_ssa_name (boolean_type_node
);
4036 if (ovf
== NULL_TREE
&& !check_zero
)
4039 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4041 build_int_cst (m_limb_type
, 1));
4043 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4044 gimple_assign_lhs (g
),
4045 build_int_cst (m_limb_type
, 1));
4048 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4050 if (ovf
== NULL_TREE
)
4054 tree b
= make_ssa_name (boolean_type_node
);
4055 g
= gimple_build_assign (b
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4061 else if (startlimb
< fin
)
4063 if (m_first
&& startlimb
+ 2 < fin
)
4066 ovf
= prepare_data_in_out (boolean_false_node
, idx
, &data_out
);
4067 ovf_out
= m_data
.pop ();
4071 cmp
= prepare_data_in_out (cmp
, idx
, &data_out
);
4072 cmp_out
= m_data
.pop ();
4076 if (i
!= 0 || startlimb
!= fin
- 1)
4079 bool single_comparison
4080 = (startlimb
+ 2 >= fin
|| (startlimb
& 1) != (i
& 1));
4081 if (!single_comparison
)
4084 if (!check_zero
&& (start
% limb_prec
) == 0)
4085 single_comparison
= true;
4087 else if ((startlimb
& 1) == (i
& 1))
4091 g
= gimple_build_cond (cmp_code
, idx
, size_int (startlimb
),
4092 NULL_TREE
, NULL_TREE
);
4093 edge edge_true_true
, edge_true_false
, edge_false
;
4095 if (!single_comparison
)
4096 g2
= gimple_build_cond (NE_EXPR
, idx
,
4097 size_int (startlimb
), NULL_TREE
,
4099 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4100 profile_probability::likely (),
4101 edge_true_true
, edge_true_false
,
4103 unsigned tidx
= startlimb
+ (cmp_code
== GT_EXPR
);
4104 tree l
= arith_overflow_extract_bits (start
, end
, rhs
, tidx
,
4106 tree this_ovf
= make_ssa_name (boolean_type_node
);
4107 if (cmp_code
!= GT_EXPR
&& !check_zero
)
4109 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4111 build_int_cst (m_limb_type
, 1));
4113 g
= gimple_build_assign (this_ovf
, GT_EXPR
,
4114 gimple_assign_lhs (g
),
4115 build_int_cst (m_limb_type
, 1));
4118 g
= gimple_build_assign (this_ovf
, NE_EXPR
, l
, cmp
);
4120 if (cmp_code
== GT_EXPR
)
4122 tree t
= make_ssa_name (boolean_type_node
);
4123 g
= gimple_build_assign (t
, BIT_IOR_EXPR
, ovf
, this_ovf
);
4127 tree this_ovf2
= NULL_TREE
;
4128 if (!single_comparison
)
4130 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4131 tree t
= make_ssa_name (boolean_type_node
);
4132 g
= gimple_build_assign (t
, NE_EXPR
, rhs
, cmp
);
4134 this_ovf2
= make_ssa_name (boolean_type_node
);
4135 g
= gimple_build_assign (this_ovf2
, BIT_IOR_EXPR
,
4139 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4141 if (i
== 1 && ovf_out
)
4144 t
= make_ssa_name (boolean_type_node
);
4145 gphi
*phi
= create_phi_node (t
, edge_true_false
->dest
);
4146 add_phi_arg (phi
, this_ovf
, edge_true_false
,
4148 add_phi_arg (phi
, ovf
? ovf
4149 : boolean_false_node
, edge_false
,
4152 add_phi_arg (phi
, this_ovf2
, edge_true_true
,
4155 if (!check_zero
&& cmp_code
!= GT_EXPR
)
4157 t
= cmp_out
? cmp_out
: make_ssa_name (m_limb_type
);
4158 phi
= create_phi_node (t
, edge_true_false
->dest
);
4159 add_phi_arg (phi
, l
, edge_true_false
, UNKNOWN_LOCATION
);
4160 add_phi_arg (phi
, cmp
, edge_false
, UNKNOWN_LOCATION
);
4162 add_phi_arg (phi
, cmp
, edge_true_true
,
4172 if (tree_fits_uhwi_p (idx
) && tree_to_uhwi (idx
) >= prec_limbs
)
4174 else if (!tree_fits_uhwi_p (idx
)
4175 && (unsigned) prec
< (fin
- (i
== 0)) * limb_prec
)
4177 bool single_comparison
4178 = (((unsigned) prec
% limb_prec
) == 0
4179 || prec_limbs
+ 1 >= fin
4180 || (prec_limbs
& 1) == (i
& 1));
4181 g
= gimple_build_cond (LE_EXPR
, idx
, size_int (prec_limbs
- 1),
4182 NULL_TREE
, NULL_TREE
);
4184 if (!single_comparison
)
4185 g2
= gimple_build_cond (LT_EXPR
, idx
,
4186 size_int (prec_limbs
- 1),
4187 NULL_TREE
, NULL_TREE
);
4188 edge edge_true_true
, edge_true_false
, edge_false
;
4189 if_then_if_then_else (g
, g2
, profile_probability::likely (),
4190 profile_probability::likely (),
4191 edge_true_true
, edge_true_false
,
4193 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4194 g
= gimple_build_assign (l
, rhs
);
4196 if (!single_comparison
)
4198 m_gsi
= gsi_after_labels (edge_true_true
->src
);
4199 l
= limb_access (type
, var
? var
: obj
,
4200 size_int (prec_limbs
- 1), true);
4201 if (!useless_type_conversion_p (TREE_TYPE (l
),
4203 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4204 g
= gimple_build_assign (l
, rhs
);
4207 m_gsi
= gsi_after_labels (edge_true_false
->dest
);
4211 tree l
= limb_access (type
, var
? var
: obj
, idx
, true);
4212 if (!useless_type_conversion_p (TREE_TYPE (l
), TREE_TYPE (rhs
)))
4213 rhs
= add_cast (TREE_TYPE (l
), rhs
);
4214 g
= gimple_build_assign (l
, rhs
);
4219 if (kind
== bitint_prec_huge
&& i
<= 1)
4223 idx
= make_ssa_name (sizetype
);
4224 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4230 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4233 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (fin
),
4234 NULL_TREE
, NULL_TREE
);
4236 m_gsi
= gsi_for_stmt (final_stmt
);
4242 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, code
);
4245 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4246 argument or return type _Complex large/huge _BitInt. */
4249 bitint_large_huge::lower_mul_overflow (tree obj
, gimple
*stmt
)
4251 tree arg0
= gimple_call_arg (stmt
, 0);
4252 tree arg1
= gimple_call_arg (stmt
, 1);
4253 tree lhs
= gimple_call_lhs (stmt
);
4256 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4257 gsi_remove (&gsi
, true);
4260 gimple
*final_stmt
= gsi_stmt (m_gsi
);
4261 tree type
= TREE_TYPE (lhs
);
4262 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4263 type
= TREE_TYPE (type
);
4264 int prec
= TYPE_PRECISION (type
), prec0
, prec1
;
4265 arg0
= handle_operand_addr (arg0
, stmt
, NULL
, &prec0
);
4266 arg1
= handle_operand_addr (arg1
, stmt
, NULL
, &prec1
);
4267 int prec2
= ((prec0
< 0 ? -prec0
: prec0
)
4268 + (prec1
< 0 ? -prec1
: prec1
));
4269 if (prec0
== 1 || prec1
== 1)
4271 tree var
= NULL_TREE
;
4272 tree orig_obj
= obj
;
4273 bool force_var
= false;
4274 if (obj
== NULL_TREE
4275 && TREE_CODE (type
) == BITINT_TYPE
4276 && bitint_precision_kind (type
) >= bitint_prec_large
4278 && bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
4280 int part
= var_to_partition (m_map
, lhs
);
4281 gcc_assert (m_vars
[part
] != NULL_TREE
);
4283 if (TREE_TYPE (lhs
) == type
)
4286 else if (obj
!= NULL_TREE
&& DECL_P (obj
))
4288 for (int i
= 0; i
< 2; ++i
)
4290 tree arg
= i
? arg1
: arg0
;
4291 if (TREE_CODE (arg
) == ADDR_EXPR
)
4292 arg
= TREE_OPERAND (arg
, 0);
4293 if (get_base_address (arg
) == obj
)
4300 if (obj
== NULL_TREE
4302 || TREE_CODE (type
) != BITINT_TYPE
4303 || bitint_precision_kind (type
) < bitint_prec_large
4304 || prec2
> (CEIL (prec
, limb_prec
) * limb_prec
* (orig_obj
? 1 : 2)))
4306 unsigned HOST_WIDE_INT nelts
= CEIL (MAX (prec
, prec2
), limb_prec
);
4307 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4308 var
= create_tmp_var (atype
);
4310 tree addr
= build_fold_addr_expr (var
? var
: obj
);
4311 addr
= force_gimple_operand_gsi (&m_gsi
, addr
, true,
4312 NULL_TREE
, true, GSI_SAME_STMT
);
4313 tree sitype
= lang_hooks
.types
.type_for_mode (SImode
, 0);
4315 = gimple_build_call_internal (IFN_MULBITINT
, 6,
4316 addr
, build_int_cst (sitype
,
4318 arg0
, build_int_cst (sitype
, prec0
),
4319 arg1
, build_int_cst (sitype
, prec1
));
4322 unsigned start
, end
;
4324 tree ovf
= arith_overflow (MULT_EXPR
, type
, prec
, prec0
, prec1
, prec2
,
4325 &start
, &end
, &check_zero
);
4326 if (ovf
== NULL_TREE
)
4328 unsigned startlimb
= start
/ limb_prec
;
4329 unsigned endlimb
= (end
- 1) / limb_prec
;
4331 bool use_loop
= false;
4332 if (startlimb
== endlimb
)
4334 else if (startlimb
+ 1 == endlimb
)
4336 else if ((end
% limb_prec
) == 0)
4344 use_loop
= startlimb
+ 2 < endlimb
;
4348 tree l
= limb_access (NULL_TREE
, var
? var
: obj
,
4349 size_int (startlimb
), true);
4350 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4352 l
= arith_overflow_extract_bits (start
, end
, gimple_assign_lhs (g
),
4353 startlimb
, check_zero
);
4354 ovf
= make_ssa_name (boolean_type_node
);
4356 g
= gimple_build_assign (ovf
, NE_EXPR
, l
,
4357 build_zero_cst (m_limb_type
));
4360 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4362 build_int_cst (m_limb_type
, 1));
4364 g
= gimple_build_assign (ovf
, GT_EXPR
, gimple_assign_lhs (g
),
4365 build_int_cst (m_limb_type
, 1));
4371 basic_block edge_bb
= NULL
;
4372 gimple_stmt_iterator gsi
= m_gsi
;
4374 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4376 m_gsi
= gsi_end_bb (edge_bb
);
4378 tree cmp
= build_zero_cst (m_limb_type
);
4379 for (unsigned i
= 0; i
< cnt
; i
++)
4381 tree idx
, idx_next
= NULL_TREE
;
4383 idx
= size_int (startlimb
);
4385 idx
= size_int (endlimb
);
4387 idx
= create_loop (size_int (startlimb
+ 1), &idx_next
);
4389 idx
= size_int (startlimb
+ 1);
4390 tree l
= limb_access (NULL_TREE
, var
? var
: obj
, idx
, true);
4391 g
= gimple_build_assign (make_ssa_name (m_limb_type
), l
);
4393 l
= gimple_assign_lhs (g
);
4394 if (i
== 0 || i
== 2)
4395 l
= arith_overflow_extract_bits (start
, end
, l
,
4398 if (i
== 0 && !check_zero
)
4401 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4403 build_int_cst (m_limb_type
, 1));
4405 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4406 build_int_cst (m_limb_type
, 1),
4407 NULL_TREE
, NULL_TREE
);
4410 g
= gimple_build_cond (NE_EXPR
, l
, cmp
, NULL_TREE
, NULL_TREE
);
4412 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4413 e1
->flags
= EDGE_FALSE_VALUE
;
4414 edge e2
= make_edge (e1
->src
, gimple_bb (final_stmt
),
4416 e1
->probability
= profile_probability::likely ();
4417 e2
->probability
= e1
->probability
.invert ();
4419 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4420 m_gsi
= gsi_after_labels (e1
->dest
);
4421 if (i
== 1 && use_loop
)
4423 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4426 g
= gimple_build_cond (NE_EXPR
, idx_next
,
4427 size_int (endlimb
+ (cnt
== 1)),
4428 NULL_TREE
, NULL_TREE
);
4430 edge true_edge
, false_edge
;
4431 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4434 m_gsi
= gsi_after_labels (false_edge
->dest
);
4439 ovf
= make_ssa_name (boolean_type_node
);
4440 basic_block bb
= gimple_bb (final_stmt
);
4441 gphi
*phi
= create_phi_node (ovf
, bb
);
4442 edge e1
= find_edge (gsi_bb (m_gsi
), bb
);
4444 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4446 tree val
= e
== e1
? boolean_false_node
: boolean_true_node
;
4447 add_phi_arg (phi
, val
, e
, UNKNOWN_LOCATION
);
4449 m_gsi
= gsi_for_stmt (final_stmt
);
4453 finish_arith_overflow (var
, obj
, type
, ovf
, lhs
, orig_obj
, stmt
, MULT_EXPR
);
4456 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4457 .{ADD,SUB,MUL}_OVERFLOW call. */
4460 bitint_large_huge::lower_cplxpart_stmt (tree obj
, gimple
*stmt
)
4462 tree rhs1
= gimple_assign_rhs1 (stmt
);
4463 rhs1
= TREE_OPERAND (rhs1
, 0);
4464 if (obj
== NULL_TREE
)
4466 int part
= var_to_partition (m_map
, gimple_assign_lhs (stmt
));
4467 gcc_assert (m_vars
[part
] != NULL_TREE
);
4470 if (TREE_CODE (rhs1
) == SSA_NAME
4472 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
4474 lower_call (obj
, SSA_NAME_DEF_STMT (rhs1
));
4477 int part
= var_to_partition (m_map
, rhs1
);
4478 gcc_assert (m_vars
[part
] != NULL_TREE
);
4479 tree var
= m_vars
[part
];
4480 unsigned HOST_WIDE_INT nelts
4481 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj
))) / limb_prec
;
4482 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4483 if (!useless_type_conversion_p (atype
, TREE_TYPE (obj
)))
4484 obj
= build1 (VIEW_CONVERT_EXPR
, atype
, obj
);
4485 tree off
= build_int_cst (build_pointer_type (TREE_TYPE (var
)),
4486 gimple_assign_rhs_code (stmt
) == REALPART_EXPR
4487 ? 0 : nelts
* m_limb_size
);
4488 tree v2
= build2 (MEM_REF
, atype
, build_fold_addr_expr (var
), off
);
4489 gimple
*g
= gimple_build_assign (obj
, v2
);
4493 /* Lower COMPLEX_EXPR stmt. */
4496 bitint_large_huge::lower_complexexpr_stmt (gimple
*stmt
)
4498 tree lhs
= gimple_assign_lhs (stmt
);
4499 tree rhs1
= gimple_assign_rhs1 (stmt
);
4500 tree rhs2
= gimple_assign_rhs2 (stmt
);
4501 int part
= var_to_partition (m_map
, lhs
);
4502 gcc_assert (m_vars
[part
] != NULL_TREE
);
4504 unsigned HOST_WIDE_INT nelts
4505 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1
))) / limb_prec
;
4506 tree atype
= build_array_type_nelts (m_limb_type
, nelts
);
4507 tree zero
= build_zero_cst (build_pointer_type (TREE_TYPE (lhs
)));
4508 tree v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), zero
);
4510 if (TREE_CODE (rhs1
) == SSA_NAME
)
4512 part
= var_to_partition (m_map
, rhs1
);
4513 gcc_assert (m_vars
[part
] != NULL_TREE
);
4516 else if (integer_zerop (rhs1
))
4517 v2
= build_zero_cst (atype
);
4519 v2
= tree_output_constant_def (rhs1
);
4520 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4521 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4522 gimple
*g
= gimple_build_assign (v1
, v2
);
4524 tree off
= fold_convert (build_pointer_type (TREE_TYPE (lhs
)),
4525 TYPE_SIZE_UNIT (atype
));
4526 v1
= build2 (MEM_REF
, atype
, build_fold_addr_expr (lhs
), off
);
4527 if (TREE_CODE (rhs2
) == SSA_NAME
)
4529 part
= var_to_partition (m_map
, rhs2
);
4530 gcc_assert (m_vars
[part
] != NULL_TREE
);
4533 else if (integer_zerop (rhs2
))
4534 v2
= build_zero_cst (atype
);
4536 v2
= tree_output_constant_def (rhs2
);
4537 if (!useless_type_conversion_p (atype
, TREE_TYPE (v2
)))
4538 v2
= build1 (VIEW_CONVERT_EXPR
, atype
, v2
);
4539 g
= gimple_build_assign (v1
, v2
);
4543 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4547 bitint_large_huge::lower_bit_query (gimple
*stmt
)
4549 tree arg0
= gimple_call_arg (stmt
, 0);
4550 tree arg1
= (gimple_call_num_args (stmt
) == 2
4551 ? gimple_call_arg (stmt
, 1) : NULL_TREE
);
4552 tree lhs
= gimple_call_lhs (stmt
);
4557 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4558 gsi_remove (&gsi
, true);
4561 tree type
= TREE_TYPE (arg0
);
4562 gcc_assert (TREE_CODE (type
) == BITINT_TYPE
);
4563 bitint_prec_kind kind
= bitint_precision_kind (type
);
4564 gcc_assert (kind
>= bitint_prec_large
);
4565 enum internal_fn ifn
= gimple_call_internal_fn (stmt
);
4566 enum built_in_function fcode
= END_BUILTINS
;
4567 gcc_assert (TYPE_PRECISION (unsigned_type_node
) == limb_prec
4568 || TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
4569 || TYPE_PRECISION (long_long_unsigned_type_node
) == limb_prec
);
4573 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4574 fcode
= BUILT_IN_CLZ
;
4575 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4576 fcode
= BUILT_IN_CLZL
;
4578 fcode
= BUILT_IN_CLZLL
;
4581 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4582 we don't add the addend at the end. */
4583 arg1
= integer_zero_node
;
4586 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4587 fcode
= BUILT_IN_CTZ
;
4588 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4589 fcode
= BUILT_IN_CTZL
;
4591 fcode
= BUILT_IN_CTZLL
;
4595 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4596 fcode
= BUILT_IN_CLRSB
;
4597 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4598 fcode
= BUILT_IN_CLRSBL
;
4600 fcode
= BUILT_IN_CLRSBLL
;
4603 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4604 fcode
= BUILT_IN_PARITY
;
4605 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4606 fcode
= BUILT_IN_PARITYL
;
4608 fcode
= BUILT_IN_PARITYLL
;
4612 if (TYPE_PRECISION (unsigned_type_node
) == limb_prec
)
4613 fcode
= BUILT_IN_POPCOUNT
;
4614 else if (TYPE_PRECISION (long_unsigned_type_node
) == limb_prec
)
4615 fcode
= BUILT_IN_POPCOUNTL
;
4617 fcode
= BUILT_IN_POPCOUNTLL
;
4623 tree fndecl
= builtin_decl_explicit (fcode
), res
= NULL_TREE
;
4624 unsigned cnt
= 0, rem
= 0, end
= 0, prec
= TYPE_PRECISION (type
);
4625 struct bq_details
{ edge e
; tree val
, addend
; } *bqp
= NULL
;
4626 basic_block edge_bb
= NULL
;
4629 tree idx
= NULL_TREE
, idx_first
= NULL_TREE
, idx_next
= NULL_TREE
;
4630 if (kind
== bitint_prec_large
)
4631 cnt
= CEIL (prec
, limb_prec
);
4634 rem
= (prec
% (2 * limb_prec
));
4635 end
= (prec
- rem
) / limb_prec
;
4636 cnt
= 2 + CEIL (rem
, limb_prec
);
4637 idx
= idx_first
= create_loop (size_zero_node
, &idx_next
);
4640 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4642 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4644 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4646 if (kind
== bitint_prec_large
)
4647 m_gsi
= gsi_end_bb (edge_bb
);
4648 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4651 m_after_stmt
= stmt
;
4652 if (kind
!= bitint_prec_large
)
4653 m_upwards_2limb
= end
;
4655 for (unsigned i
= 0; i
< cnt
; i
++)
4658 if (kind
== bitint_prec_large
)
4661 idx
= size_int (end
+ (i
> 2));
4663 tree rhs1
= handle_operand (arg0
, idx
);
4664 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4666 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4667 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4668 rhs1
= add_cast (m_limb_type
, rhs1
);
4672 if (ifn
== IFN_PARITY
)
4673 in
= prepare_data_in_out (build_zero_cst (m_limb_type
), idx
, &out
);
4674 else if (ifn
== IFN_FFS
)
4675 in
= prepare_data_in_out (integer_one_node
, idx
, &out
);
4677 in
= prepare_data_in_out (integer_zero_node
, idx
, &out
);
4683 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4684 build_zero_cst (m_limb_type
),
4685 NULL_TREE
, NULL_TREE
);
4688 e1
= split_block (gsi_bb (m_gsi
), g
);
4689 e1
->flags
= EDGE_FALSE_VALUE
;
4690 e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4691 e1
->probability
= profile_probability::unlikely ();
4692 e2
->probability
= e1
->probability
.invert ();
4694 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4695 m_gsi
= gsi_after_labels (e1
->dest
);
4698 if (tree_fits_uhwi_p (idx
))
4700 = build_int_cst (integer_type_node
,
4701 tree_to_uhwi (idx
) * limb_prec
4702 + (ifn
== IFN_FFS
));
4709 res
= make_ssa_name (integer_type_node
);
4710 g
= gimple_build_assign (res
, PLUS_EXPR
, in
,
4711 build_int_cst (integer_type_node
,
4714 m_data
[m_data_cnt
] = res
;
4718 if (!integer_zerop (in
))
4720 if (kind
== bitint_prec_huge
&& i
== 1)
4723 res
= make_ssa_name (m_limb_type
);
4724 g
= gimple_build_assign (res
, BIT_XOR_EXPR
, in
, rhs1
);
4729 m_data
[m_data_cnt
] = res
;
4732 g
= gimple_build_call (fndecl
, 1, rhs1
);
4733 tem
= make_ssa_name (integer_type_node
);
4734 gimple_call_set_lhs (g
, tem
);
4736 if (!integer_zerop (in
))
4738 if (kind
== bitint_prec_huge
&& i
== 1)
4741 res
= make_ssa_name (integer_type_node
);
4742 g
= gimple_build_assign (res
, PLUS_EXPR
, in
, tem
);
4747 m_data
[m_data_cnt
] = res
;
4754 if (kind
== bitint_prec_huge
&& i
<= 1)
4758 idx
= make_ssa_name (sizetype
);
4759 g
= gimple_build_assign (idx
, PLUS_EXPR
, idx_first
,
4765 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx_first
,
4768 g
= gimple_build_cond (NE_EXPR
, idx_next
, size_int (end
),
4769 NULL_TREE
, NULL_TREE
);
4771 if (ifn
== IFN_CTZ
|| ifn
== IFN_FFS
)
4772 m_gsi
= gsi_after_labels (edge_bb
);
4774 m_gsi
= gsi_for_stmt (stmt
);
4782 tree idx
= NULL_TREE
, idx_next
= NULL_TREE
, first
= NULL_TREE
;
4784 if (kind
== bitint_prec_large
)
4785 cnt
= CEIL (prec
, limb_prec
);
4788 rem
= prec
% limb_prec
;
4789 if (rem
== 0 && (!TYPE_UNSIGNED (type
) || ifn
== IFN_CLRSB
))
4791 end
= (prec
- rem
) / limb_prec
;
4792 cnt
= 1 + (rem
!= 0);
4793 if (ifn
== IFN_CLRSB
)
4797 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
4799 edge e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4801 m_gsi
= gsi_end_bb (edge_bb
);
4804 bqp
= XALLOCAVEC (struct bq_details
, cnt
);
4807 gsi
= gsi_for_stmt (stmt
);
4809 e
= split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
4811 bqp
= XALLOCAVEC (struct bq_details
, 2 * cnt
);
4814 for (unsigned i
= 0; i
< cnt
; i
++)
4817 if (kind
== bitint_prec_large
)
4818 idx
= size_int (cnt
- i
- 1);
4819 else if (i
== cnt
- 1)
4820 idx
= create_loop (size_int (end
- 1), &idx_next
);
4822 idx
= size_int (end
);
4824 tree rhs1
= handle_operand (arg0
, idx
);
4825 if (!useless_type_conversion_p (m_limb_type
, TREE_TYPE (rhs1
)))
4827 if (ifn
== IFN_CLZ
&& !TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4828 rhs1
= add_cast (unsigned_type_for (TREE_TYPE (rhs1
)), rhs1
);
4829 else if (ifn
== IFN_CLRSB
&& TYPE_UNSIGNED (TREE_TYPE (rhs1
)))
4830 rhs1
= add_cast (signed_type_for (TREE_TYPE (rhs1
)), rhs1
);
4831 rhs1
= add_cast (m_limb_type
, rhs1
);
4836 g
= gimple_build_cond (NE_EXPR
, rhs1
,
4837 build_zero_cst (m_limb_type
),
4838 NULL_TREE
, NULL_TREE
);
4840 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4841 e1
->flags
= EDGE_FALSE_VALUE
;
4842 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
), EDGE_TRUE_VALUE
);
4843 e1
->probability
= profile_probability::unlikely ();
4844 e2
->probability
= e1
->probability
.invert ();
4846 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4847 m_gsi
= gsi_after_labels (e1
->dest
);
4856 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4858 build_int_cst (m_limb_type
, 1));
4860 g
= gimple_build_cond (GT_EXPR
, gimple_assign_lhs (g
),
4861 build_int_cst (m_limb_type
, 1),
4862 NULL_TREE
, NULL_TREE
);
4867 g
= gimple_build_assign (make_ssa_name (m_limb_type
),
4868 BIT_XOR_EXPR
, rhs1
, first
);
4870 tree stype
= signed_type_for (m_limb_type
);
4871 g
= gimple_build_cond (LT_EXPR
,
4873 gimple_assign_lhs (g
)),
4874 build_zero_cst (stype
),
4875 NULL_TREE
, NULL_TREE
);
4877 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4878 e1
->flags
= EDGE_FALSE_VALUE
;
4879 edge e2
= make_edge (e1
->src
, gimple_bb (stmt
),
4881 e1
->probability
= profile_probability::unlikely ();
4882 e2
->probability
= e1
->probability
.invert ();
4884 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
,
4886 m_gsi
= gsi_after_labels (e1
->dest
);
4888 g
= gimple_build_cond (NE_EXPR
, rhs1
, first
,
4889 NULL_TREE
, NULL_TREE
);
4892 edge e1
= split_block (gsi_bb (m_gsi
), g
);
4893 e1
->flags
= EDGE_FALSE_VALUE
;
4894 edge e2
= make_edge (e1
->src
, edge_bb
, EDGE_TRUE_VALUE
);
4895 e1
->probability
= profile_probability::unlikely ();
4896 e2
->probability
= e1
->probability
.invert ();
4898 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e2
->src
);
4899 m_gsi
= gsi_after_labels (e1
->dest
);
4900 bqp
[2 * i
+ 1].e
= e2
;
4903 if (tree_fits_uhwi_p (idx
))
4905 = build_int_cst (integer_type_node
,
4907 - (((int) tree_to_uhwi (idx
) + 1)
4908 * limb_prec
) - sub_one
);
4912 in
= build_int_cst (integer_type_node
, rem
- sub_one
);
4914 in
= prepare_data_in_out (in
, idx
, &out
);
4915 out
= m_data
[m_data_cnt
+ 1];
4917 g
= gimple_build_assign (out
, PLUS_EXPR
, in
,
4918 build_int_cst (integer_type_node
,
4921 m_data
[m_data_cnt
] = out
;
4925 if (kind
== bitint_prec_huge
&& i
== cnt
- 1)
4927 g
= gimple_build_assign (idx_next
, PLUS_EXPR
, idx
,
4930 g
= gimple_build_cond (NE_EXPR
, idx
, size_zero_node
,
4931 NULL_TREE
, NULL_TREE
);
4933 edge true_edge
, false_edge
;
4934 extract_true_false_edges_from_block (gsi_bb (m_gsi
),
4935 &true_edge
, &false_edge
);
4936 m_gsi
= gsi_after_labels (false_edge
->dest
);
4946 gphi
*phi1
, *phi2
, *phi3
;
4948 bb
= gsi_bb (m_gsi
);
4949 remove_edge (find_edge (bb
, gimple_bb (stmt
)));
4950 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
4952 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
4954 for (unsigned i
= 0; i
< cnt
; i
++)
4956 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[i
].e
, UNKNOWN_LOCATION
);
4957 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[i
].e
, UNKNOWN_LOCATION
);
4959 if (arg1
== NULL_TREE
)
4961 g
= gimple_build_builtin_unreachable (m_loc
);
4964 m_gsi
= gsi_for_stmt (stmt
);
4965 g
= gimple_build_call (fndecl
, 1, gimple_phi_result (phi1
));
4966 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
4968 if (arg1
== NULL_TREE
)
4969 g
= gimple_build_assign (lhs
, PLUS_EXPR
,
4970 gimple_phi_result (phi2
),
4971 gimple_call_lhs (g
));
4974 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
4975 PLUS_EXPR
, gimple_phi_result (phi2
),
4976 gimple_call_lhs (g
));
4978 edge e1
= split_block (gimple_bb (stmt
), g
);
4979 edge e2
= make_edge (bb
, e1
->dest
, EDGE_FALLTHRU
);
4980 e2
->probability
= profile_probability::always ();
4981 set_immediate_dominator (CDI_DOMINATORS
, e1
->dest
,
4982 get_immediate_dominator (CDI_DOMINATORS
,
4984 phi3
= create_phi_node (make_ssa_name (integer_type_node
), e1
->dest
);
4985 add_phi_arg (phi3
, gimple_assign_lhs (g
), e1
, UNKNOWN_LOCATION
);
4986 add_phi_arg (phi3
, arg1
, e2
, UNKNOWN_LOCATION
);
4987 m_gsi
= gsi_for_stmt (stmt
);
4988 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
4990 gsi_replace (&m_gsi
, g
, true);
4993 bb
= gsi_bb (m_gsi
);
4994 remove_edge (find_edge (bb
, edge_bb
));
4996 e
= make_edge (bb
, gimple_bb (stmt
), EDGE_FALLTHRU
);
4997 e
->probability
= profile_probability::always ();
4998 set_immediate_dominator (CDI_DOMINATORS
, gimple_bb (stmt
),
4999 get_immediate_dominator (CDI_DOMINATORS
,
5001 phi1
= create_phi_node (make_ssa_name (m_limb_type
),
5003 phi2
= create_phi_node (make_ssa_name (integer_type_node
),
5005 phi3
= create_phi_node (make_ssa_name (integer_type_node
),
5007 for (unsigned i
= 0; i
< cnt
; i
++)
5009 add_phi_arg (phi1
, bqp
[i
].val
, bqp
[2 * i
+ 1].e
, UNKNOWN_LOCATION
);
5010 add_phi_arg (phi2
, bqp
[i
].addend
, bqp
[2 * i
+ 1].e
,
5012 tree a
= bqp
[i
].addend
;
5013 if (i
&& kind
== bitint_prec_large
)
5014 a
= int_const_binop (PLUS_EXPR
, a
, integer_minus_one_node
);
5016 add_phi_arg (phi3
, a
, bqp
[2 * i
].e
, UNKNOWN_LOCATION
);
5018 add_phi_arg (phi3
, build_int_cst (integer_type_node
, prec
- 1), e
,
5020 m_gsi
= gsi_after_labels (edge_bb
);
5021 g
= gimple_build_call (fndecl
, 1,
5022 add_cast (signed_type_for (m_limb_type
),
5023 gimple_phi_result (phi1
)));
5024 gimple_call_set_lhs (g
, make_ssa_name (integer_type_node
));
5026 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5027 PLUS_EXPR
, gimple_call_lhs (g
),
5028 gimple_phi_result (phi2
));
5030 if (kind
!= bitint_prec_large
)
5032 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
5033 PLUS_EXPR
, gimple_assign_lhs (g
),
5037 add_phi_arg (phi3
, gimple_assign_lhs (g
),
5038 find_edge (edge_bb
, gimple_bb (stmt
)), UNKNOWN_LOCATION
);
5039 m_gsi
= gsi_for_stmt (stmt
);
5040 g
= gimple_build_assign (lhs
, gimple_phi_result (phi3
));
5041 gsi_replace (&m_gsi
, g
, true);
5044 g
= gimple_build_call (fndecl
, 1, res
);
5045 gimple_call_set_lhs (g
, lhs
);
5046 gsi_replace (&m_gsi
, g
, true);
5049 g
= gimple_build_assign (lhs
, res
);
5050 gsi_replace (&m_gsi
, g
, true);
5057 /* Lower a call statement with one or more large/huge _BitInt
5058 arguments or large/huge _BitInt return value. */
5061 bitint_large_huge::lower_call (tree obj
, gimple
*stmt
)
5063 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5064 unsigned int nargs
= gimple_call_num_args (stmt
);
5065 if (gimple_call_internal_p (stmt
))
5066 switch (gimple_call_internal_fn (stmt
))
5068 case IFN_ADD_OVERFLOW
:
5069 case IFN_SUB_OVERFLOW
:
5070 case IFN_UBSAN_CHECK_ADD
:
5071 case IFN_UBSAN_CHECK_SUB
:
5072 lower_addsub_overflow (obj
, stmt
);
5074 case IFN_MUL_OVERFLOW
:
5075 case IFN_UBSAN_CHECK_MUL
:
5076 lower_mul_overflow (obj
, stmt
);
5084 lower_bit_query (stmt
);
5089 for (unsigned int i
= 0; i
< nargs
; ++i
)
5091 tree arg
= gimple_call_arg (stmt
, i
);
5092 if (TREE_CODE (arg
) != SSA_NAME
5093 || TREE_CODE (TREE_TYPE (arg
)) != BITINT_TYPE
5094 || bitint_precision_kind (TREE_TYPE (arg
)) <= bitint_prec_middle
)
5096 int p
= var_to_partition (m_map
, arg
);
5098 gcc_assert (v
!= NULL_TREE
);
5099 if (!types_compatible_p (TREE_TYPE (arg
), TREE_TYPE (v
)))
5100 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (arg
), v
);
5101 arg
= make_ssa_name (TREE_TYPE (arg
));
5102 gimple
*g
= gimple_build_assign (arg
, v
);
5103 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5104 gimple_call_set_arg (stmt
, i
, arg
);
5105 if (m_preserved
== NULL
)
5106 m_preserved
= BITMAP_ALLOC (NULL
);
5107 bitmap_set_bit (m_preserved
, SSA_NAME_VERSION (arg
));
5109 tree lhs
= gimple_call_lhs (stmt
);
5111 && TREE_CODE (lhs
) == SSA_NAME
5112 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5113 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5115 int p
= var_to_partition (m_map
, lhs
);
5117 gcc_assert (v
!= NULL_TREE
);
5118 if (!types_compatible_p (TREE_TYPE (lhs
), TREE_TYPE (v
)))
5119 v
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), v
);
5120 gimple_call_set_lhs (stmt
, v
);
5121 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5126 /* Lower __asm STMT which involves large/huge _BitInt values. */
5129 bitint_large_huge::lower_asm (gimple
*stmt
)
5131 gasm
*g
= as_a
<gasm
*> (stmt
);
5132 unsigned noutputs
= gimple_asm_noutputs (g
);
5133 unsigned ninputs
= gimple_asm_ninputs (g
);
5135 for (unsigned i
= 0; i
< noutputs
; ++i
)
5137 tree t
= gimple_asm_output_op (g
, i
);
5138 tree s
= TREE_VALUE (t
);
5139 if (TREE_CODE (s
) == SSA_NAME
5140 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5141 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5143 int part
= var_to_partition (m_map
, s
);
5144 gcc_assert (m_vars
[part
] != NULL_TREE
);
5145 TREE_VALUE (t
) = m_vars
[part
];
5148 for (unsigned i
= 0; i
< ninputs
; ++i
)
5150 tree t
= gimple_asm_input_op (g
, i
);
5151 tree s
= TREE_VALUE (t
);
5152 if (TREE_CODE (s
) == SSA_NAME
5153 && TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5154 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5156 int part
= var_to_partition (m_map
, s
);
5157 gcc_assert (m_vars
[part
] != NULL_TREE
);
5158 TREE_VALUE (t
) = m_vars
[part
];
5164 /* Lower statement STMT which involves large/huge _BitInt values
5165 into code accessing individual limbs. */
5168 bitint_large_huge::lower_stmt (gimple
*stmt
)
5172 m_data
.truncate (0);
5174 m_gsi
= gsi_for_stmt (stmt
);
5175 m_after_stmt
= NULL
;
5178 gsi_prev (&m_init_gsi
);
5179 m_preheader_bb
= NULL
;
5180 m_upwards_2limb
= 0;
5183 m_cast_conditional
= false;
5185 m_loc
= gimple_location (stmt
);
5186 if (is_gimple_call (stmt
))
5188 lower_call (NULL_TREE
, stmt
);
5191 if (gimple_code (stmt
) == GIMPLE_ASM
)
5196 tree lhs
= NULL_TREE
, cmp_op1
= NULL_TREE
, cmp_op2
= NULL_TREE
;
5197 tree_code cmp_code
= comparison_op (stmt
, &cmp_op1
, &cmp_op2
);
5198 bool eq_p
= (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
);
5199 bool mergeable_cast_p
= false;
5200 bool final_cast_p
= false;
5201 if (gimple_assign_cast_p (stmt
))
5203 lhs
= gimple_assign_lhs (stmt
);
5204 tree rhs1
= gimple_assign_rhs1 (stmt
);
5205 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5206 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5207 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1
)))
5208 mergeable_cast_p
= true;
5209 else if (TREE_CODE (TREE_TYPE (rhs1
)) == BITINT_TYPE
5210 && bitint_precision_kind (TREE_TYPE (rhs1
)) >= bitint_prec_large
5211 && INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
5213 final_cast_p
= true;
5214 if (TREE_CODE (rhs1
) == SSA_NAME
5216 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5218 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5219 if (is_gimple_assign (g
)
5220 && gimple_assign_rhs_code (g
) == IMAGPART_EXPR
)
5222 tree rhs2
= TREE_OPERAND (gimple_assign_rhs1 (g
), 0);
5223 if (TREE_CODE (rhs2
) == SSA_NAME
5225 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs2
))))
5227 g
= SSA_NAME_DEF_STMT (rhs2
);
5228 int ovf
= optimizable_arith_overflow (g
);
5230 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5231 and IMAGPART_EXPR uses, where the latter is cast to
5232 non-_BitInt, it will be optimized when handling
5233 the REALPART_EXPR. */
5237 lower_call (NULL_TREE
, g
);
5245 if (gimple_store_p (stmt
))
5247 tree rhs1
= gimple_assign_rhs1 (stmt
);
5248 if (TREE_CODE (rhs1
) == SSA_NAME
5250 || !bitmap_bit_p (m_names
, SSA_NAME_VERSION (rhs1
))))
5252 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
5253 m_loc
= gimple_location (g
);
5254 lhs
= gimple_assign_lhs (stmt
);
5255 if (is_gimple_assign (g
) && !mergeable_op (g
))
5256 switch (gimple_assign_rhs_code (g
))
5260 lower_shift_stmt (lhs
, g
);
5262 m_gsi
= gsi_for_stmt (stmt
);
5263 unlink_stmt_vdef (stmt
);
5264 release_ssa_name (gimple_vdef (stmt
));
5265 gsi_remove (&m_gsi
, true);
5268 case TRUNC_DIV_EXPR
:
5269 case TRUNC_MOD_EXPR
:
5270 lower_muldiv_stmt (lhs
, g
);
5272 case FIX_TRUNC_EXPR
:
5273 lower_float_conv_stmt (lhs
, g
);
5277 lower_cplxpart_stmt (lhs
, g
);
5282 else if (optimizable_arith_overflow (g
) == 3)
5284 lower_call (lhs
, g
);
5287 m_loc
= gimple_location (stmt
);
5290 if (mergeable_op (stmt
)
5291 || gimple_store_p (stmt
)
5292 || gimple_assign_load_p (stmt
)
5294 || mergeable_cast_p
)
5296 lhs
= lower_mergeable_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5300 else if (cmp_code
!= ERROR_MARK
)
5301 lhs
= lower_comparison_stmt (stmt
, cmp_code
, cmp_op1
, cmp_op2
);
5302 if (cmp_code
!= ERROR_MARK
)
5304 if (gimple_code (stmt
) == GIMPLE_COND
)
5306 gcond
*cstmt
= as_a
<gcond
*> (stmt
);
5307 gimple_cond_set_lhs (cstmt
, lhs
);
5308 gimple_cond_set_rhs (cstmt
, boolean_false_node
);
5309 gimple_cond_set_code (cstmt
, cmp_code
);
5313 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
5315 tree cond
= build2 (cmp_code
, boolean_type_node
, lhs
,
5316 boolean_false_node
);
5317 gimple_assign_set_rhs1 (stmt
, cond
);
5318 lhs
= gimple_assign_lhs (stmt
);
5319 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5320 || (bitint_precision_kind (TREE_TYPE (lhs
))
5321 <= bitint_prec_middle
));
5325 gimple_assign_set_rhs1 (stmt
, lhs
);
5326 gimple_assign_set_rhs2 (stmt
, boolean_false_node
);
5327 gimple_assign_set_rhs_code (stmt
, cmp_code
);
5333 tree lhs_type
= TREE_TYPE (lhs
);
5334 /* Add support for 3 or more limbs filled in from normal integral
5335 type if this assert fails. If no target chooses limb mode smaller
5336 than half of largest supported normal integral type, this will not
5338 gcc_assert (TYPE_PRECISION (lhs_type
) <= 2 * limb_prec
);
5340 if (TREE_CODE (lhs_type
) == BITINT_TYPE
5341 && bitint_precision_kind (lhs_type
) == bitint_prec_middle
)
5342 lhs_type
= build_nonstandard_integer_type (TYPE_PRECISION (lhs_type
),
5343 TYPE_UNSIGNED (lhs_type
));
5345 tree rhs1
= gimple_assign_rhs1 (stmt
);
5346 tree r1
= handle_operand (rhs1
, size_int (0));
5347 if (!useless_type_conversion_p (lhs_type
, TREE_TYPE (r1
)))
5348 r1
= add_cast (lhs_type
, r1
);
5349 if (TYPE_PRECISION (lhs_type
) > limb_prec
)
5353 tree r2
= handle_operand (rhs1
, size_int (1));
5354 r2
= add_cast (lhs_type
, r2
);
5355 g
= gimple_build_assign (make_ssa_name (lhs_type
), LSHIFT_EXPR
, r2
,
5356 build_int_cst (unsigned_type_node
,
5359 g
= gimple_build_assign (make_ssa_name (lhs_type
), BIT_IOR_EXPR
, r1
,
5360 gimple_assign_lhs (g
));
5362 r1
= gimple_assign_lhs (g
);
5364 if (lhs_type
!= TREE_TYPE (lhs
))
5365 g
= gimple_build_assign (lhs
, NOP_EXPR
, r1
);
5367 g
= gimple_build_assign (lhs
, r1
);
5368 gsi_replace (&m_gsi
, g
, true);
5371 if (is_gimple_assign (stmt
))
5372 switch (gimple_assign_rhs_code (stmt
))
5376 lower_shift_stmt (NULL_TREE
, stmt
);
5379 case TRUNC_DIV_EXPR
:
5380 case TRUNC_MOD_EXPR
:
5381 lower_muldiv_stmt (NULL_TREE
, stmt
);
5383 case FIX_TRUNC_EXPR
:
5385 lower_float_conv_stmt (NULL_TREE
, stmt
);
5389 lower_cplxpart_stmt (NULL_TREE
, stmt
);
5392 lower_complexexpr_stmt (stmt
);
5400 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5401 the desired memory state. */
5404 vuse_eq (ao_ref
*, tree vuse1
, void *data
)
5406 tree vuse2
= (tree
) data
;
5413 /* Return true if STMT uses a library function and needs to take
5414 address of its inputs. We need to avoid bit-fields in those
5418 stmt_needs_operand_addr (gimple
*stmt
)
5420 if (is_gimple_assign (stmt
))
5421 switch (gimple_assign_rhs_code (stmt
))
5424 case TRUNC_DIV_EXPR
:
5425 case TRUNC_MOD_EXPR
:
5431 else if (gimple_call_internal_p (stmt
, IFN_MUL_OVERFLOW
)
5432 || gimple_call_internal_p (stmt
, IFN_UBSAN_CHECK_MUL
))
5437 /* Dominator walker used to discover which large/huge _BitInt
5438 loads could be sunk into all their uses. */
5440 class bitint_dom_walker
: public dom_walker
5443 bitint_dom_walker (bitmap names
, bitmap loads
)
5444 : dom_walker (CDI_DOMINATORS
), m_names (names
), m_loads (loads
) {}
5446 edge
before_dom_children (basic_block
) final override
;
5449 bitmap m_names
, m_loads
;
5453 bitint_dom_walker::before_dom_children (basic_block bb
)
5455 gphi
*phi
= get_virtual_phi (bb
);
5458 vop
= gimple_phi_result (phi
);
5459 else if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5462 vop
= (tree
) get_immediate_dominator (CDI_DOMINATORS
, bb
)->aux
;
5464 auto_vec
<tree
, 16> worklist
;
5465 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5466 !gsi_end_p (gsi
); gsi_next (&gsi
))
5468 gimple
*stmt
= gsi_stmt (gsi
);
5469 if (is_gimple_debug (stmt
))
5472 if (!vop
&& gimple_vuse (stmt
))
5473 vop
= gimple_vuse (stmt
);
5476 if (gimple_vdef (stmt
))
5477 vop
= gimple_vdef (stmt
);
5479 tree lhs
= gimple_get_lhs (stmt
);
5481 && TREE_CODE (lhs
) == SSA_NAME
5482 && TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
5483 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
5484 && !bitmap_bit_p (m_names
, SSA_NAME_VERSION (lhs
)))
5485 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5486 it means it will be handled in a loop or straight line code
5487 at the location of its (ultimate) immediate use, so for
5488 vop checking purposes check these only at the ultimate
5493 use_operand_p use_p
;
5494 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, oi
, SSA_OP_USE
)
5496 tree s
= USE_FROM_PTR (use_p
);
5497 if (TREE_CODE (TREE_TYPE (s
)) == BITINT_TYPE
5498 && bitint_precision_kind (TREE_TYPE (s
)) >= bitint_prec_large
)
5499 worklist
.safe_push (s
);
5502 bool needs_operand_addr
= stmt_needs_operand_addr (stmt
);
5503 while (worklist
.length () > 0)
5505 tree s
= worklist
.pop ();
5507 if (!bitmap_bit_p (m_names
, SSA_NAME_VERSION (s
)))
5509 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5510 needs_operand_addr
|= stmt_needs_operand_addr (g
);
5511 FOR_EACH_SSA_USE_OPERAND (use_p
, g
, oi
, SSA_OP_USE
)
5513 tree s2
= USE_FROM_PTR (use_p
);
5514 if (TREE_CODE (TREE_TYPE (s2
)) == BITINT_TYPE
5515 && (bitint_precision_kind (TREE_TYPE (s2
))
5516 >= bitint_prec_large
))
5517 worklist
.safe_push (s2
);
5521 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
5522 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
5524 tree rhs
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5525 if (TREE_CODE (rhs
) == SSA_NAME
5526 && bitmap_bit_p (m_loads
, SSA_NAME_VERSION (rhs
)))
5531 else if (!bitmap_bit_p (m_loads
, SSA_NAME_VERSION (s
)))
5534 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
5535 if (needs_operand_addr
5536 && TREE_CODE (rhs1
) == COMPONENT_REF
5537 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1
, 1)))
5539 tree fld
= TREE_OPERAND (rhs1
, 1);
5540 /* For little-endian, we can allow as inputs bit-fields
5541 which start at a limb boundary. */
5542 if (DECL_OFFSET_ALIGN (fld
) >= TYPE_ALIGN (TREE_TYPE (rhs1
))
5543 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld
))
5544 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld
))
5549 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5555 ao_ref_init (&ref
, rhs1
);
5556 tree lvop
= gimple_vuse (SSA_NAME_DEF_STMT (s
));
5557 unsigned limit
= 64;
5560 && is_gimple_assign (stmt
)
5561 && gimple_store_p (stmt
)
5562 && !operand_equal_p (lhs
,
5563 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
)),
5567 && walk_non_aliased_vuses (&ref
, vuse
, false, vuse_eq
,
5568 NULL
, NULL
, limit
, lvop
) == NULL
)
5569 bitmap_clear_bit (m_loads
, SSA_NAME_VERSION (s
));
5573 bb
->aux
= (void *) vop
;
5579 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5580 build_ssa_conflict_graph.
5581 The differences are:
5582 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5583 2) for large/huge _BitInt multiplication/division/modulo process def
5584 only after processing uses rather than before to make uses conflict
5586 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5587 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5588 the final statement. */
5591 build_bitint_stmt_ssa_conflicts (gimple
*stmt
, live_track
*live
,
5592 ssa_conflicts
*graph
, bitmap names
,
5593 void (*def
) (live_track
*, tree
,
5595 void (*use
) (live_track
*, tree
))
5597 bool muldiv_p
= false;
5598 tree lhs
= NULL_TREE
;
5599 if (is_gimple_assign (stmt
))
5601 lhs
= gimple_assign_lhs (stmt
);
5602 if (TREE_CODE (lhs
) == SSA_NAME
5603 && TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
5604 && bitint_precision_kind (TREE_TYPE (lhs
)) >= bitint_prec_large
)
5606 if (!bitmap_bit_p (names
, SSA_NAME_VERSION (lhs
)))
5608 switch (gimple_assign_rhs_code (stmt
))
5611 case TRUNC_DIV_EXPR
:
5612 case TRUNC_MOD_EXPR
:
5624 /* For stmts with more than one SSA_NAME definition pretend all the
5625 SSA_NAME outputs but the first one are live at this point, so
5626 that conflicts are added in between all those even when they are
5627 actually not really live after the asm, because expansion might
5628 copy those into pseudos after the asm and if multiple outputs
5629 share the same partition, it might overwrite those that should
5631 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5635 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5641 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_DEF
)
5642 def (live
, var
, graph
);
5645 auto_vec
<tree
, 16> worklist
;
5646 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
5647 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5648 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5650 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5653 worklist
.safe_push (var
);
5656 while (worklist
.length () > 0)
5658 tree s
= worklist
.pop ();
5659 FOR_EACH_SSA_TREE_OPERAND (var
, SSA_NAME_DEF_STMT (s
), iter
, SSA_OP_USE
)
5660 if (TREE_CODE (TREE_TYPE (var
)) == BITINT_TYPE
5661 && bitint_precision_kind (TREE_TYPE (var
)) >= bitint_prec_large
)
5663 if (bitmap_bit_p (names
, SSA_NAME_VERSION (var
)))
5666 worklist
.safe_push (var
);
5671 def (live
, lhs
, graph
);
5674 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5675 return the largest bitint_prec_kind of them, otherwise return
5676 bitint_prec_small. */
5678 static bitint_prec_kind
5679 arith_overflow_arg_kind (gimple
*stmt
)
5681 bitint_prec_kind ret
= bitint_prec_small
;
5682 if (is_gimple_call (stmt
) && gimple_call_internal_p (stmt
))
5683 switch (gimple_call_internal_fn (stmt
))
5685 case IFN_ADD_OVERFLOW
:
5686 case IFN_SUB_OVERFLOW
:
5687 case IFN_MUL_OVERFLOW
:
5688 for (int i
= 0; i
< 2; ++i
)
5690 tree a
= gimple_call_arg (stmt
, i
);
5691 if (TREE_CODE (a
) == INTEGER_CST
5692 && TREE_CODE (TREE_TYPE (a
)) == BITINT_TYPE
)
5694 bitint_prec_kind kind
= bitint_precision_kind (TREE_TYPE (a
));
5695 ret
= MAX (ret
, kind
);
5705 /* Entry point for _BitInt(N) operation lowering during optimization. */
5708 gimple_lower_bitint (void)
5710 small_max_prec
= mid_min_prec
= large_min_prec
= huge_min_prec
= 0;
5714 for (i
= 0; i
< num_ssa_names
; ++i
)
5716 tree s
= ssa_name (i
);
5719 tree type
= TREE_TYPE (s
);
5720 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5722 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
5723 != bitint_prec_small
)
5725 type
= TREE_TYPE (type
);
5727 if (TREE_CODE (type
) == BITINT_TYPE
5728 && bitint_precision_kind (type
) != bitint_prec_small
)
5730 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5731 into memory. Such functions could have no large/huge SSA_NAMEs. */
5732 if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
5734 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5735 if (is_gimple_assign (g
) && gimple_store_p (g
))
5737 tree t
= gimple_assign_rhs1 (g
);
5738 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5739 && (bitint_precision_kind (TREE_TYPE (t
))
5740 >= bitint_prec_large
))
5744 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5745 to floating point types need to be rewritten. */
5746 else if (SCALAR_FLOAT_TYPE_P (type
))
5748 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5749 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
5751 tree t
= gimple_assign_rhs1 (g
);
5752 if (TREE_CODE (t
) == INTEGER_CST
5753 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5754 && (bitint_precision_kind (TREE_TYPE (t
))
5755 != bitint_prec_small
))
5760 if (i
== num_ssa_names
)
5764 auto_vec
<gimple
*, 4> switch_statements
;
5765 FOR_EACH_BB_FN (bb
, cfun
)
5767 if (gswitch
*swtch
= safe_dyn_cast
<gswitch
*> (*gsi_last_bb (bb
)))
5769 tree idx
= gimple_switch_index (swtch
);
5770 if (TREE_CODE (TREE_TYPE (idx
)) != BITINT_TYPE
5771 || bitint_precision_kind (TREE_TYPE (idx
)) < bitint_prec_large
)
5775 group_case_labels_stmt (swtch
);
5776 switch_statements
.safe_push (swtch
);
5780 if (!switch_statements
.is_empty ())
5782 bool expanded
= false;
5786 FOR_EACH_VEC_ELT (switch_statements
, j
, stmt
)
5788 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
5789 tree_switch_conversion::switch_decision_tree
dt (swtch
);
5790 expanded
|= dt
.analyze_switch_statement ();
5795 free_dominance_info (CDI_DOMINATORS
);
5796 free_dominance_info (CDI_POST_DOMINATORS
);
5797 mark_virtual_operands_for_renaming (cfun
);
5798 cleanup_tree_cfg (TODO_update_ssa
);
5802 struct bitint_large_huge large_huge
;
5803 bool has_large_huge_parm_result
= false;
5804 bool has_large_huge
= false;
5805 unsigned int ret
= 0, first_large_huge
= ~0U;
5806 bool edge_insertions
= false;
5807 for (; i
< num_ssa_names
; ++i
)
5809 tree s
= ssa_name (i
);
5812 tree type
= TREE_TYPE (s
);
5813 if (TREE_CODE (type
) == COMPLEX_TYPE
)
5815 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s
))
5816 >= bitint_prec_large
)
5817 has_large_huge
= true;
5818 type
= TREE_TYPE (type
);
5820 if (TREE_CODE (type
) == BITINT_TYPE
5821 && bitint_precision_kind (type
) >= bitint_prec_large
)
5823 if (first_large_huge
== ~0U)
5824 first_large_huge
= i
;
5825 gimple
*stmt
= SSA_NAME_DEF_STMT (s
), *g
;
5826 gimple_stmt_iterator gsi
;
5828 /* Unoptimize certain constructs to simpler alternatives to
5829 avoid having to lower all of them. */
5830 if (is_gimple_assign (stmt
) && gimple_bb (stmt
))
5831 switch (rhs_code
= gimple_assign_rhs_code (stmt
))
5838 first_large_huge
= 0;
5839 location_t loc
= gimple_location (stmt
);
5840 gsi
= gsi_for_stmt (stmt
);
5841 tree rhs1
= gimple_assign_rhs1 (stmt
);
5842 tree type
= TREE_TYPE (rhs1
);
5843 tree n
= gimple_assign_rhs2 (stmt
), m
;
5844 tree p
= build_int_cst (TREE_TYPE (n
),
5845 TYPE_PRECISION (type
));
5846 if (TREE_CODE (n
) == INTEGER_CST
)
5847 m
= fold_build2 (MINUS_EXPR
, TREE_TYPE (n
), p
, n
);
5850 m
= make_ssa_name (TREE_TYPE (n
));
5851 g
= gimple_build_assign (m
, MINUS_EXPR
, p
, n
);
5852 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5853 gimple_set_location (g
, loc
);
5855 if (!TYPE_UNSIGNED (type
))
5857 tree utype
= build_bitint_type (TYPE_PRECISION (type
),
5859 if (TREE_CODE (rhs1
) == INTEGER_CST
)
5860 rhs1
= fold_convert (utype
, rhs1
);
5863 tree t
= make_ssa_name (type
);
5864 g
= gimple_build_assign (t
, NOP_EXPR
, rhs1
);
5865 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5866 gimple_set_location (g
, loc
);
5869 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5870 rhs_code
== LROTATE_EXPR
5871 ? LSHIFT_EXPR
: RSHIFT_EXPR
,
5873 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5874 gimple_set_location (g
, loc
);
5875 tree op1
= gimple_assign_lhs (g
);
5876 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5877 rhs_code
== LROTATE_EXPR
5878 ? RSHIFT_EXPR
: LSHIFT_EXPR
,
5880 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5881 gimple_set_location (g
, loc
);
5882 tree op2
= gimple_assign_lhs (g
);
5883 tree lhs
= gimple_assign_lhs (stmt
);
5884 if (!TYPE_UNSIGNED (type
))
5886 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (op1
)),
5887 BIT_IOR_EXPR
, op1
, op2
);
5888 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5889 gimple_set_location (g
, loc
);
5890 g
= gimple_build_assign (lhs
, NOP_EXPR
,
5891 gimple_assign_lhs (g
));
5894 g
= gimple_build_assign (lhs
, BIT_IOR_EXPR
, op1
, op2
);
5895 gsi_replace (&gsi
, g
, true);
5896 gimple_set_location (g
, loc
);
5904 first_large_huge
= 0;
5905 gsi
= gsi_for_stmt (stmt
);
5906 tree lhs
= gimple_assign_lhs (stmt
);
5907 tree rhs1
= gimple_assign_rhs1 (stmt
), rhs2
= NULL_TREE
;
5908 location_t loc
= gimple_location (stmt
);
5909 if (rhs_code
== ABS_EXPR
)
5910 g
= gimple_build_cond (LT_EXPR
, rhs1
,
5911 build_zero_cst (TREE_TYPE (rhs1
)),
5912 NULL_TREE
, NULL_TREE
);
5913 else if (rhs_code
== ABSU_EXPR
)
5915 rhs2
= make_ssa_name (TREE_TYPE (lhs
));
5916 g
= gimple_build_assign (rhs2
, NOP_EXPR
, rhs1
);
5917 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5918 gimple_set_location (g
, loc
);
5919 g
= gimple_build_cond (LT_EXPR
, rhs1
,
5920 build_zero_cst (TREE_TYPE (rhs1
)),
5921 NULL_TREE
, NULL_TREE
);
5924 else if (rhs_code
== MIN_EXPR
|| rhs_code
== MAX_EXPR
)
5926 rhs2
= gimple_assign_rhs2 (stmt
);
5927 if (TREE_CODE (rhs1
) == INTEGER_CST
)
5928 std::swap (rhs1
, rhs2
);
5929 g
= gimple_build_cond (LT_EXPR
, rhs1
, rhs2
,
5930 NULL_TREE
, NULL_TREE
);
5931 if (rhs_code
== MAX_EXPR
)
5932 std::swap (rhs1
, rhs2
);
5936 g
= gimple_build_cond (NE_EXPR
, rhs1
,
5937 build_zero_cst (TREE_TYPE (rhs1
)),
5938 NULL_TREE
, NULL_TREE
);
5939 rhs1
= gimple_assign_rhs2 (stmt
);
5940 rhs2
= gimple_assign_rhs3 (stmt
);
5942 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5943 gimple_set_location (g
, loc
);
5944 edge e1
= split_block (gsi_bb (gsi
), g
);
5945 edge e2
= split_block (e1
->dest
, (gimple
*) NULL
);
5946 edge e3
= make_edge (e1
->src
, e2
->dest
, EDGE_FALSE_VALUE
);
5947 e3
->probability
= profile_probability::even ();
5948 e1
->flags
= EDGE_TRUE_VALUE
;
5949 e1
->probability
= e3
->probability
.invert ();
5950 if (dom_info_available_p (CDI_DOMINATORS
))
5951 set_immediate_dominator (CDI_DOMINATORS
, e2
->dest
, e1
->src
);
5952 if (rhs_code
== ABS_EXPR
|| rhs_code
== ABSU_EXPR
)
5954 gsi
= gsi_after_labels (e1
->dest
);
5955 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1
)),
5957 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
5958 gimple_set_location (g
, loc
);
5959 rhs2
= gimple_assign_lhs (g
);
5960 std::swap (rhs1
, rhs2
);
5962 gsi
= gsi_for_stmt (stmt
);
5963 gsi_remove (&gsi
, true);
5964 gphi
*phi
= create_phi_node (lhs
, e2
->dest
);
5965 add_phi_arg (phi
, rhs1
, e2
, UNKNOWN_LOCATION
);
5966 add_phi_arg (phi
, rhs2
, e3
, UNKNOWN_LOCATION
);
5970 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5971 into memory. Such functions could have no large/huge SSA_NAMEs. */
5972 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
5974 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5975 if (is_gimple_assign (g
) && gimple_store_p (g
))
5977 tree t
= gimple_assign_rhs1 (g
);
5978 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5979 && (bitint_precision_kind (TREE_TYPE (t
))
5980 >= bitint_prec_large
))
5981 has_large_huge
= true;
5984 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5985 to floating point types need to be rewritten. */
5986 else if (SCALAR_FLOAT_TYPE_P (type
))
5988 gimple
*g
= SSA_NAME_DEF_STMT (s
);
5989 if (is_gimple_assign (g
) && gimple_assign_rhs_code (g
) == FLOAT_EXPR
)
5991 tree t
= gimple_assign_rhs1 (g
);
5992 if (TREE_CODE (t
) == INTEGER_CST
5993 && TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
5994 && (bitint_precision_kind (TREE_TYPE (t
))
5995 >= bitint_prec_large
))
5996 has_large_huge
= true;
6000 for (i
= first_large_huge
; i
< num_ssa_names
; ++i
)
6002 tree s
= ssa_name (i
);
6005 tree type
= TREE_TYPE (s
);
6006 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6007 type
= TREE_TYPE (type
);
6008 if (TREE_CODE (type
) == BITINT_TYPE
6009 && bitint_precision_kind (type
) >= bitint_prec_large
)
6011 use_operand_p use_p
;
6013 has_large_huge
= true;
6015 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s
)))
6017 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6018 the same bb and could be handled in the same loop with the
6021 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6022 && single_imm_use (s
, &use_p
, &use_stmt
)
6023 && gimple_bb (SSA_NAME_DEF_STMT (s
)) == gimple_bb (use_stmt
))
6025 if (mergeable_op (SSA_NAME_DEF_STMT (s
)))
6027 if (mergeable_op (use_stmt
))
6029 tree_code cmp_code
= comparison_op (use_stmt
, NULL
, NULL
);
6030 if (cmp_code
== EQ_EXPR
|| cmp_code
== NE_EXPR
)
6032 if (gimple_assign_cast_p (use_stmt
))
6034 tree lhs
= gimple_assign_lhs (use_stmt
);
6035 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6038 else if (gimple_store_p (use_stmt
)
6039 && is_gimple_assign (use_stmt
)
6040 && !gimple_has_volatile_ops (use_stmt
)
6041 && !stmt_ends_bb_p (use_stmt
))
6044 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s
)))
6046 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6047 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
6048 && ((is_gimple_assign (use_stmt
)
6049 && (gimple_assign_rhs_code (use_stmt
)
6051 || gimple_code (use_stmt
) == GIMPLE_COND
)
6052 && (!gimple_store_p (use_stmt
)
6053 || (is_gimple_assign (use_stmt
)
6054 && !gimple_has_volatile_ops (use_stmt
)
6055 && !stmt_ends_bb_p (use_stmt
)))
6056 && (TREE_CODE (rhs1
) != SSA_NAME
6057 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
6059 if (TREE_CODE (TREE_TYPE (rhs1
)) != BITINT_TYPE
6060 || (bitint_precision_kind (TREE_TYPE (rhs1
))
6061 < bitint_prec_large
))
6063 if (is_gimple_assign (use_stmt
))
6064 switch (gimple_assign_rhs_code (use_stmt
))
6067 case TRUNC_DIV_EXPR
:
6068 case TRUNC_MOD_EXPR
:
6070 /* Uses which use handle_operand_addr can't
6071 deal with nested casts. */
6072 if (TREE_CODE (rhs1
) == SSA_NAME
6073 && gimple_assign_cast_p
6074 (SSA_NAME_DEF_STMT (rhs1
))
6075 && has_single_use (rhs1
)
6076 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6077 == gimple_bb (SSA_NAME_DEF_STMT (s
))))
6083 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6084 >= TYPE_PRECISION (TREE_TYPE (s
)))
6085 && mergeable_op (use_stmt
))
6087 /* Prevent merging a widening non-mergeable cast
6088 on result of some narrower mergeable op
6089 together with later mergeable operations. E.g.
6090 result of _BitInt(223) addition shouldn't be
6091 sign-extended to _BitInt(513) and have another
6092 _BitInt(513) added to it, as handle_plus_minus
6093 with its PHI node handling inside of handle_cast
6094 will not work correctly. An exception is if
6095 use_stmt is a store, this is handled directly
6096 in lower_mergeable_stmt. */
6097 if (TREE_CODE (rhs1
) != SSA_NAME
6098 || !has_single_use (rhs1
)
6099 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1
))
6100 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6101 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1
))
6102 || gimple_store_p (use_stmt
))
6104 if ((TYPE_PRECISION (TREE_TYPE (rhs1
))
6105 < TYPE_PRECISION (TREE_TYPE (s
)))
6106 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1
)))
6108 /* Another exception is if the widening cast is
6109 from mergeable same precision cast from something
6112 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1
));
6113 if (TREE_CODE (TREE_TYPE (rhs2
)) == BITINT_TYPE
6114 && (TYPE_PRECISION (TREE_TYPE (rhs1
))
6115 == TYPE_PRECISION (TREE_TYPE (rhs2
))))
6117 if (TREE_CODE (rhs2
) != SSA_NAME
6118 || !has_single_use (rhs2
)
6119 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2
))
6120 != gimple_bb (SSA_NAME_DEF_STMT (s
)))
6121 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2
)))
6127 if (is_gimple_assign (SSA_NAME_DEF_STMT (s
)))
6128 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s
)))
6132 tree rhs1
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s
));
6133 rhs1
= TREE_OPERAND (rhs1
, 0);
6134 if (TREE_CODE (rhs1
) == SSA_NAME
)
6136 gimple
*g
= SSA_NAME_DEF_STMT (rhs1
);
6137 if (optimizable_arith_overflow (g
))
6145 case TRUNC_DIV_EXPR
:
6146 case TRUNC_MOD_EXPR
:
6147 case FIX_TRUNC_EXPR
:
6149 if (gimple_store_p (use_stmt
)
6150 && is_gimple_assign (use_stmt
)
6151 && !gimple_has_volatile_ops (use_stmt
)
6152 && !stmt_ends_bb_p (use_stmt
))
6154 tree lhs
= gimple_assign_lhs (use_stmt
);
6155 /* As multiply/division passes address of the lhs
6156 to library function and that assumes it can extend
6157 it to whole number of limbs, avoid merging those
6158 with bit-field stores. Don't allow it for
6159 shifts etc. either, so that the bit-field store
6160 handling doesn't have to be done everywhere. */
6161 if (TREE_CODE (lhs
) == COMPONENT_REF
6162 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6172 /* Also ignore uninitialized uses. */
6173 if (SSA_NAME_IS_DEFAULT_DEF (s
)
6174 && (!SSA_NAME_VAR (s
) || VAR_P (SSA_NAME_VAR (s
))))
6178 if (!large_huge
.m_names
)
6179 large_huge
.m_names
= BITMAP_ALLOC (NULL
);
6180 bitmap_set_bit (large_huge
.m_names
, SSA_NAME_VERSION (s
));
6181 if (has_single_use (s
))
6183 if (!large_huge
.m_single_use_names
)
6184 large_huge
.m_single_use_names
= BITMAP_ALLOC (NULL
);
6185 bitmap_set_bit (large_huge
.m_single_use_names
,
6186 SSA_NAME_VERSION (s
));
6188 if (SSA_NAME_VAR (s
)
6189 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6190 && SSA_NAME_IS_DEFAULT_DEF (s
))
6191 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6192 has_large_huge_parm_result
= true;
6194 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s
)
6195 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s
))
6196 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s
))
6197 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6199 use_operand_p use_p
;
6200 imm_use_iterator iter
;
6201 bool optimizable_load
= true;
6202 FOR_EACH_IMM_USE_FAST (use_p
, iter
, s
)
6204 gimple
*use_stmt
= USE_STMT (use_p
);
6205 if (is_gimple_debug (use_stmt
))
6207 if (gimple_code (use_stmt
) == GIMPLE_PHI
6208 || is_gimple_call (use_stmt
))
6210 optimizable_load
= false;
6216 FOR_EACH_SSA_USE_OPERAND (use_p
, SSA_NAME_DEF_STMT (s
),
6219 tree s2
= USE_FROM_PTR (use_p
);
6220 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2
))
6222 optimizable_load
= false;
6227 if (optimizable_load
&& !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s
)))
6229 if (!large_huge
.m_loads
)
6230 large_huge
.m_loads
= BITMAP_ALLOC (NULL
);
6231 bitmap_set_bit (large_huge
.m_loads
, SSA_NAME_VERSION (s
));
6235 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6236 into memory. Such functions could have no large/huge SSA_NAMEs. */
6237 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s
))
6239 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6240 if (is_gimple_assign (g
) && gimple_store_p (g
))
6242 tree t
= gimple_assign_rhs1 (g
);
6243 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6244 && bitint_precision_kind (TREE_TYPE (t
)) >= bitint_prec_large
)
6245 has_large_huge
= true;
6250 if (large_huge
.m_names
|| has_large_huge
)
6252 ret
= TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
6253 calculate_dominance_info (CDI_DOMINATORS
);
6255 enable_ranger (cfun
);
6256 if (large_huge
.m_loads
)
6258 basic_block entry
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
6260 bitint_dom_walker (large_huge
.m_names
,
6261 large_huge
.m_loads
).walk (entry
);
6262 bitmap_and_compl_into (large_huge
.m_names
, large_huge
.m_loads
);
6263 clear_aux_for_blocks ();
6264 BITMAP_FREE (large_huge
.m_loads
);
6266 large_huge
.m_limb_type
= build_nonstandard_integer_type (limb_prec
, 1);
6267 large_huge
.m_limb_size
6268 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge
.m_limb_type
));
6270 if (large_huge
.m_names
)
6273 = init_var_map (num_ssa_names
, NULL
, large_huge
.m_names
);
6274 coalesce_ssa_name (large_huge
.m_map
);
6275 partition_view_normal (large_huge
.m_map
);
6276 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6278 fprintf (dump_file
, "After Coalescing:\n");
6279 dump_var_map (dump_file
, large_huge
.m_map
);
6282 = XCNEWVEC (tree
, num_var_partitions (large_huge
.m_map
));
6284 if (has_large_huge_parm_result
)
6285 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6287 tree s
= ssa_name (i
);
6288 if (SSA_NAME_VAR (s
)
6289 && ((TREE_CODE (SSA_NAME_VAR (s
)) == PARM_DECL
6290 && SSA_NAME_IS_DEFAULT_DEF (s
))
6291 || TREE_CODE (SSA_NAME_VAR (s
)) == RESULT_DECL
))
6293 int p
= var_to_partition (large_huge
.m_map
, s
);
6294 if (large_huge
.m_vars
[p
] == NULL_TREE
)
6296 large_huge
.m_vars
[p
] = SSA_NAME_VAR (s
);
6297 mark_addressable (SSA_NAME_VAR (s
));
6301 tree atype
= NULL_TREE
;
6302 EXECUTE_IF_SET_IN_BITMAP (large_huge
.m_names
, 0, i
, bi
)
6304 tree s
= ssa_name (i
);
6305 int p
= var_to_partition (large_huge
.m_map
, s
);
6306 if (large_huge
.m_vars
[p
] != NULL_TREE
)
6308 if (atype
== NULL_TREE
6309 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6310 TYPE_SIZE (TREE_TYPE (s
))))
6312 unsigned HOST_WIDE_INT nelts
6313 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s
))) / limb_prec
;
6314 atype
= build_array_type_nelts (large_huge
.m_limb_type
, nelts
);
6316 large_huge
.m_vars
[p
] = create_tmp_var (atype
, "bitint");
6317 mark_addressable (large_huge
.m_vars
[p
]);
6321 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
6323 gimple_stmt_iterator prev
;
6324 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);
6330 gimple
*stmt
= gsi_stmt (gsi
);
6331 if (is_gimple_debug (stmt
))
6333 bitint_prec_kind kind
= bitint_prec_small
;
6335 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, SSA_OP_ALL_OPERANDS
)
6336 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6338 bitint_prec_kind this_kind
6339 = bitint_precision_kind (TREE_TYPE (t
));
6340 kind
= MAX (kind
, this_kind
);
6342 if (is_gimple_assign (stmt
) && gimple_store_p (stmt
))
6344 t
= gimple_assign_rhs1 (stmt
);
6345 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
)
6347 bitint_prec_kind this_kind
6348 = bitint_precision_kind (TREE_TYPE (t
));
6349 kind
= MAX (kind
, this_kind
);
6352 if (is_gimple_assign (stmt
)
6353 && gimple_assign_rhs_code (stmt
) == FLOAT_EXPR
)
6355 t
= gimple_assign_rhs1 (stmt
);
6356 if (TREE_CODE (TREE_TYPE (t
)) == BITINT_TYPE
6357 && TREE_CODE (t
) == INTEGER_CST
)
6359 bitint_prec_kind this_kind
6360 = bitint_precision_kind (TREE_TYPE (t
));
6361 kind
= MAX (kind
, this_kind
);
6364 if (is_gimple_call (stmt
))
6366 t
= gimple_call_lhs (stmt
);
6367 if (t
&& TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
)
6369 bitint_prec_kind this_kind
= arith_overflow_arg_kind (stmt
);
6370 kind
= MAX (kind
, this_kind
);
6371 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t
))) == BITINT_TYPE
)
6374 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t
)));
6375 kind
= MAX (kind
, this_kind
);
6379 if (kind
== bitint_prec_small
)
6381 switch (gimple_code (stmt
))
6384 /* For now. We'll need to handle some internal functions and
6385 perhaps some builtins. */
6386 if (kind
== bitint_prec_middle
)
6390 if (kind
== bitint_prec_middle
)
6396 if (gimple_clobber_p (stmt
))
6398 if (kind
>= bitint_prec_large
)
6400 if (gimple_assign_single_p (stmt
))
6401 /* No need to lower copies, loads or stores. */
6403 if (gimple_assign_cast_p (stmt
))
6405 tree lhs
= gimple_assign_lhs (stmt
);
6406 tree rhs
= gimple_assign_rhs1 (stmt
);
6407 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6408 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
6409 && (TYPE_PRECISION (TREE_TYPE (lhs
))
6410 == TYPE_PRECISION (TREE_TYPE (rhs
))))
6411 /* No need to lower casts to same precision. */
6419 if (kind
== bitint_prec_middle
)
6421 tree type
= NULL_TREE
;
6422 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6423 with the same precision and back. */
6424 unsigned int nops
= gimple_num_ops (stmt
);
6425 for (unsigned int i
= is_gimple_assign (stmt
) ? 1 : 0;
6427 if (tree op
= gimple_op (stmt
, i
))
6429 tree nop
= maybe_cast_middle_bitint (&gsi
, op
, type
);
6431 gimple_set_op (stmt
, i
, nop
);
6432 else if (COMPARISON_CLASS_P (op
))
6434 TREE_OPERAND (op
, 0)
6435 = maybe_cast_middle_bitint (&gsi
,
6436 TREE_OPERAND (op
, 0),
6438 TREE_OPERAND (op
, 1)
6439 = maybe_cast_middle_bitint (&gsi
,
6440 TREE_OPERAND (op
, 1),
6443 else if (TREE_CODE (op
) == CASE_LABEL_EXPR
)
6446 = maybe_cast_middle_bitint (&gsi
, CASE_LOW (op
),
6449 = maybe_cast_middle_bitint (&gsi
, CASE_HIGH (op
),
6453 if (tree lhs
= gimple_get_lhs (stmt
))
6454 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
6455 && (bitint_precision_kind (TREE_TYPE (lhs
))
6456 == bitint_prec_middle
))
6458 int prec
= TYPE_PRECISION (TREE_TYPE (lhs
));
6459 int uns
= TYPE_UNSIGNED (TREE_TYPE (lhs
));
6460 type
= build_nonstandard_integer_type (prec
, uns
);
6461 tree lhs2
= make_ssa_name (type
);
6462 gimple_set_lhs (stmt
, lhs2
);
6463 gimple
*g
= gimple_build_assign (lhs
, NOP_EXPR
, lhs2
);
6464 if (stmt_ends_bb_p (stmt
))
6466 edge e
= find_fallthru_edge (gsi_bb (gsi
)->succs
);
6467 gsi_insert_on_edge_immediate (e
, g
);
6470 gsi_insert_after (&gsi
, g
, GSI_SAME_STMT
);
6476 if (tree lhs
= gimple_get_lhs (stmt
))
6477 if (TREE_CODE (lhs
) == SSA_NAME
)
6479 tree type
= TREE_TYPE (lhs
);
6480 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6481 type
= TREE_TYPE (type
);
6482 if (TREE_CODE (type
) == BITINT_TYPE
6483 && bitint_precision_kind (type
) >= bitint_prec_large
6484 && (large_huge
.m_names
== NULL
6485 || !bitmap_bit_p (large_huge
.m_names
,
6486 SSA_NAME_VERSION (lhs
))))
6490 large_huge
.lower_stmt (stmt
);
6493 tree atype
= NULL_TREE
;
6494 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6497 gphi
*phi
= gsi
.phi ();
6498 tree lhs
= gimple_phi_result (phi
);
6499 if (TREE_CODE (TREE_TYPE (lhs
)) != BITINT_TYPE
6500 || bitint_precision_kind (TREE_TYPE (lhs
)) < bitint_prec_large
)
6502 int p1
= var_to_partition (large_huge
.m_map
, lhs
);
6503 gcc_assert (large_huge
.m_vars
[p1
] != NULL_TREE
);
6504 tree v1
= large_huge
.m_vars
[p1
];
6505 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
6507 tree arg
= gimple_phi_arg_def (phi
, i
);
6508 edge e
= gimple_phi_arg_edge (phi
, i
);
6510 switch (TREE_CODE (arg
))
6513 if (integer_zerop (arg
) && VAR_P (v1
))
6515 tree zero
= build_zero_cst (TREE_TYPE (v1
));
6516 g
= gimple_build_assign (v1
, zero
);
6517 gsi_insert_on_edge (e
, g
);
6518 edge_insertions
= true;
6522 unsigned int min_prec
, prec
, rem
;
6524 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
6525 rem
= prec
% (2 * limb_prec
);
6526 min_prec
= bitint_min_cst_precision (arg
, ext
);
6527 if (min_prec
> prec
- rem
- 2 * limb_prec
6528 && min_prec
> (unsigned) limb_prec
)
6529 /* Constant which has enough significant bits that it
6530 isn't worth trying to save .rodata space by extending
6531 from smaller number. */
6534 min_prec
= CEIL (min_prec
, limb_prec
) * limb_prec
;
6537 else if (min_prec
== prec
)
6538 c
= tree_output_constant_def (arg
);
6539 else if (min_prec
== (unsigned) limb_prec
)
6540 c
= fold_convert (large_huge
.m_limb_type
, arg
);
6543 tree ctype
= build_bitint_type (min_prec
, 1);
6544 c
= tree_output_constant_def (fold_convert (ctype
, arg
));
6548 if (VAR_P (v1
) && min_prec
== prec
)
6550 tree v2
= build1 (VIEW_CONVERT_EXPR
,
6552 g
= gimple_build_assign (v1
, v2
);
6553 gsi_insert_on_edge (e
, g
);
6554 edge_insertions
= true;
6557 if (TREE_CODE (TREE_TYPE (c
)) == INTEGER_TYPE
)
6558 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6563 unsigned HOST_WIDE_INT nelts
6564 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c
)))
6567 = build_array_type_nelts (large_huge
.m_limb_type
,
6569 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6571 build1 (VIEW_CONVERT_EXPR
,
6574 gsi_insert_on_edge (e
, g
);
6578 unsigned HOST_WIDE_INT nelts
6579 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1
)))
6580 - min_prec
) / limb_prec
;
6582 = build_array_type_nelts (large_huge
.m_limb_type
,
6584 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6585 tree off
= fold_convert (ptype
,
6586 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6587 tree vd
= build2 (MEM_REF
, vtype
,
6588 build_fold_addr_expr (v1
), off
);
6589 g
= gimple_build_assign (vd
, build_zero_cst (vtype
));
6596 tree ptype
= build_pointer_type (TREE_TYPE (v1
));
6598 = fold_convert (ptype
,
6599 TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6600 vd
= build2 (MEM_REF
, large_huge
.m_limb_type
,
6601 build_fold_addr_expr (v1
), off
);
6603 vd
= build_fold_addr_expr (vd
);
6604 unsigned HOST_WIDE_INT nbytes
6605 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1
)));
6608 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c
)));
6609 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
6610 g
= gimple_build_call (fn
, 3, vd
,
6611 integer_minus_one_node
,
6612 build_int_cst (sizetype
,
6615 gsi_insert_on_edge (e
, g
);
6616 edge_insertions
= true;
6621 if (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_NOP
)
6623 if (large_huge
.m_names
== NULL
6624 || !bitmap_bit_p (large_huge
.m_names
,
6625 SSA_NAME_VERSION (arg
)))
6628 int p2
= var_to_partition (large_huge
.m_map
, arg
);
6631 gcc_assert (large_huge
.m_vars
[p2
] != NULL_TREE
);
6632 tree v2
= large_huge
.m_vars
[p2
];
6633 if (VAR_P (v1
) && VAR_P (v2
))
6634 g
= gimple_build_assign (v1
, v2
);
6635 else if (VAR_P (v1
))
6636 g
= gimple_build_assign (v1
, build1 (VIEW_CONVERT_EXPR
,
6637 TREE_TYPE (v1
), v2
));
6638 else if (VAR_P (v2
))
6639 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6640 TREE_TYPE (v2
), v1
), v2
);
6643 if (atype
== NULL_TREE
6644 || !tree_int_cst_equal (TYPE_SIZE (atype
),
6645 TYPE_SIZE (TREE_TYPE (lhs
))))
6647 unsigned HOST_WIDE_INT nelts
6648 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs
)))
6651 = build_array_type_nelts (large_huge
.m_limb_type
,
6654 g
= gimple_build_assign (build1 (VIEW_CONVERT_EXPR
,
6656 build1 (VIEW_CONVERT_EXPR
,
6659 gsi_insert_on_edge (e
, g
);
6660 edge_insertions
= true;
6667 if (large_huge
.m_names
|| has_large_huge
)
6670 for (i
= 0; i
< num_ssa_names
; ++i
)
6672 tree s
= ssa_name (i
);
6675 tree type
= TREE_TYPE (s
);
6676 if (TREE_CODE (type
) == COMPLEX_TYPE
)
6677 type
= TREE_TYPE (type
);
6678 if (TREE_CODE (type
) == BITINT_TYPE
6679 && bitint_precision_kind (type
) >= bitint_prec_large
)
6681 if (large_huge
.m_preserved
6682 && bitmap_bit_p (large_huge
.m_preserved
,
6683 SSA_NAME_VERSION (s
)))
6685 gimple
*g
= SSA_NAME_DEF_STMT (s
);
6686 if (gimple_code (g
) == GIMPLE_NOP
)
6688 if (SSA_NAME_VAR (s
))
6689 set_ssa_default_def (cfun
, SSA_NAME_VAR (s
), NULL_TREE
);
6690 release_ssa_name (s
);
6693 if (gimple_bb (g
) == NULL
)
6695 release_ssa_name (s
);
6698 if (gimple_code (g
) != GIMPLE_ASM
)
6700 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
6701 bool save_vta
= flag_var_tracking_assignments
;
6702 flag_var_tracking_assignments
= false;
6703 gsi_remove (&gsi
, true);
6704 flag_var_tracking_assignments
= save_vta
;
6707 nop
= gimple_build_nop ();
6708 SSA_NAME_DEF_STMT (s
) = nop
;
6709 release_ssa_name (s
);
6713 disable_ranger (cfun
);
6716 if (edge_insertions
)
6717 gsi_commit_edge_inserts ();
6724 const pass_data pass_data_lower_bitint
=
6726 GIMPLE_PASS
, /* type */
6727 "bitintlower", /* name */
6728 OPTGROUP_NONE
, /* optinfo_flags */
6729 TV_NONE
, /* tv_id */
6730 PROP_ssa
, /* properties_required */
6731 PROP_gimple_lbitint
, /* properties_provided */
6732 0, /* properties_destroyed */
6733 0, /* todo_flags_start */
6734 0, /* todo_flags_finish */
6737 class pass_lower_bitint
: public gimple_opt_pass
6740 pass_lower_bitint (gcc::context
*ctxt
)
6741 : gimple_opt_pass (pass_data_lower_bitint
, ctxt
)
6744 /* opt_pass methods: */
6745 opt_pass
* clone () final override
{ return new pass_lower_bitint (m_ctxt
); }
6746 unsigned int execute (function
*) final override
6748 return gimple_lower_bitint ();
6751 }; // class pass_lower_bitint
6756 make_pass_lower_bitint (gcc::context
*ctxt
)
6758 return new pass_lower_bitint (ctxt
);
6764 const pass_data pass_data_lower_bitint_O0
=
6766 GIMPLE_PASS
, /* type */
6767 "bitintlower0", /* name */
6768 OPTGROUP_NONE
, /* optinfo_flags */
6769 TV_NONE
, /* tv_id */
6770 PROP_cfg
, /* properties_required */
6771 PROP_gimple_lbitint
, /* properties_provided */
6772 0, /* properties_destroyed */
6773 0, /* todo_flags_start */
6774 0, /* todo_flags_finish */
6777 class pass_lower_bitint_O0
: public gimple_opt_pass
6780 pass_lower_bitint_O0 (gcc::context
*ctxt
)
6781 : gimple_opt_pass (pass_data_lower_bitint_O0
, ctxt
)
6784 /* opt_pass methods: */
6785 bool gate (function
*fun
) final override
6787 /* With errors, normal optimization passes are not run. If we don't
6788 lower bitint operations at all, rtl expansion will abort. */
6789 return !(fun
->curr_properties
& PROP_gimple_lbitint
);
6792 unsigned int execute (function
*) final override
6794 return gimple_lower_bitint ();
6797 }; // class pass_lower_bitint_O0
6802 make_pass_lower_bitint_O0 (gcc::context
*ctxt
)
6804 return new pass_lower_bitint_O0 (ctxt
);