]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-lower-bitint.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / gimple-lower-bitint.cc
1 /* Lower _BitInt(N) operations to scalar operations.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "fold-const.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "cfgloop.h"
37 #include "cfganal.h"
38 #include "target.h"
39 #include "tree-ssa-live.h"
40 #include "tree-ssa-coalesce.h"
41 #include "domwalk.h"
42 #include "memmodel.h"
43 #include "optabs.h"
44 #include "varasm.h"
45 #include "gimple-range.h"
46 #include "value-range.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "diagnostic-core.h"
50 #include "tree-eh.h"
51 #include "tree-pretty-print.h"
52 #include "alloc-pool.h"
53 #include "tree-into-ssa.h"
54 #include "tree-cfgcleanup.h"
55 #include "tree-switch-conversion.h"
56 #include "ubsan.h"
57 #include "gimple-lower-bitint.h"
58
59 /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
60 target hook says it is a single limb, middle _BitInt which per ABI
61 does not, but there is some INTEGER_TYPE in which arithmetics can be
62 performed (operations on such _BitInt are lowered to casts to that
63 arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
64 target supports TImode, so _BitInt(65) to _BitInt(128) are middle
65 ones), large _BitInt which should by straight line code and
66 finally huge _BitInt which should be handled by loops over the limbs. */
67
68 enum bitint_prec_kind {
69 bitint_prec_small,
70 bitint_prec_middle,
71 bitint_prec_large,
72 bitint_prec_huge
73 };
74
75 /* Caches to speed up bitint_precision_kind. */
76
77 static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
78 static int limb_prec;
79
80 /* Categorize _BitInt(PREC) as small, middle, large or huge. */
81
82 static bitint_prec_kind
83 bitint_precision_kind (int prec)
84 {
85 if (prec <= small_max_prec)
86 return bitint_prec_small;
87 if (huge_min_prec && prec >= huge_min_prec)
88 return bitint_prec_huge;
89 if (large_min_prec && prec >= large_min_prec)
90 return bitint_prec_large;
91 if (mid_min_prec && prec >= mid_min_prec)
92 return bitint_prec_middle;
93
94 struct bitint_info info;
95 bool ok = targetm.c.bitint_type_info (prec, &info);
96 gcc_assert (ok);
97 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
98 if (prec <= GET_MODE_PRECISION (limb_mode))
99 {
100 small_max_prec = prec;
101 return bitint_prec_small;
102 }
103 if (!large_min_prec
104 && GET_MODE_PRECISION (limb_mode) < MAX_FIXED_MODE_SIZE)
105 large_min_prec = MAX_FIXED_MODE_SIZE + 1;
106 if (!limb_prec)
107 limb_prec = GET_MODE_PRECISION (limb_mode);
108 if (!huge_min_prec)
109 {
110 if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
111 huge_min_prec = 4 * limb_prec;
112 else
113 huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
114 }
115 if (prec <= MAX_FIXED_MODE_SIZE)
116 {
117 if (!mid_min_prec || prec < mid_min_prec)
118 mid_min_prec = prec;
119 return bitint_prec_middle;
120 }
121 if (large_min_prec && prec <= large_min_prec)
122 return bitint_prec_large;
123 return bitint_prec_huge;
124 }
125
126 /* Same for a TYPE. */
127
128 static bitint_prec_kind
129 bitint_precision_kind (tree type)
130 {
131 return bitint_precision_kind (TYPE_PRECISION (type));
132 }
133
134 /* Return minimum precision needed to describe INTEGER_CST
135 CST. All bits above that precision up to precision of
136 TREE_TYPE (CST) are cleared if EXT is set to 0, or set
137 if EXT is set to -1. */
138
139 static unsigned
140 bitint_min_cst_precision (tree cst, int &ext)
141 {
142 ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
143 wide_int w = wi::to_wide (cst);
144 unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
145 /* For signed values, we don't need to count the sign bit,
146 we'll use constant 0 or -1 for the upper bits. */
147 if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
148 --min_prec;
149 else
150 {
151 /* For unsigned values, also try signed min_precision
152 in case the constant has lots of most significant bits set. */
153 unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
154 if (min_prec2 < min_prec)
155 {
156 ext = -1;
157 return min_prec2;
158 }
159 }
160 return min_prec;
161 }
162
163 namespace {
164
165 /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
166 cached in TYPE and return it. */
167
168 tree
169 maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
170 {
171 if (op == NULL_TREE
172 || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
173 || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
174 return op;
175
176 int prec = TYPE_PRECISION (TREE_TYPE (op));
177 int uns = TYPE_UNSIGNED (TREE_TYPE (op));
178 if (type == NULL_TREE
179 || TYPE_PRECISION (type) != prec
180 || TYPE_UNSIGNED (type) != uns)
181 type = build_nonstandard_integer_type (prec, uns);
182
183 if (TREE_CODE (op) != SSA_NAME)
184 {
185 tree nop = fold_convert (type, op);
186 if (is_gimple_val (nop))
187 return nop;
188 }
189
190 tree nop = make_ssa_name (type);
191 gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
192 gsi_insert_before (gsi, g, GSI_SAME_STMT);
193 return nop;
194 }
195
196 /* Return true if STMT can be handled in a loop from least to most
197 significant limb together with its dependencies. */
198
199 bool
200 mergeable_op (gimple *stmt)
201 {
202 if (!is_gimple_assign (stmt))
203 return false;
204 switch (gimple_assign_rhs_code (stmt))
205 {
206 case PLUS_EXPR:
207 case MINUS_EXPR:
208 case NEGATE_EXPR:
209 case BIT_AND_EXPR:
210 case BIT_IOR_EXPR:
211 case BIT_XOR_EXPR:
212 case BIT_NOT_EXPR:
213 case SSA_NAME:
214 case INTEGER_CST:
215 return true;
216 case LSHIFT_EXPR:
217 {
218 tree cnt = gimple_assign_rhs2 (stmt);
219 if (tree_fits_uhwi_p (cnt)
220 && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
221 return true;
222 }
223 break;
224 CASE_CONVERT:
225 case VIEW_CONVERT_EXPR:
226 {
227 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
228 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
229 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
230 && TREE_CODE (lhs_type) == BITINT_TYPE
231 && TREE_CODE (rhs_type) == BITINT_TYPE
232 && bitint_precision_kind (lhs_type) >= bitint_prec_large
233 && bitint_precision_kind (rhs_type) >= bitint_prec_large
234 && tree_int_cst_equal (TYPE_SIZE (lhs_type), TYPE_SIZE (rhs_type)))
235 {
236 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
237 return true;
238 if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
239 return true;
240 if (bitint_precision_kind (lhs_type) == bitint_prec_large)
241 return true;
242 }
243 break;
244 }
245 default:
246 break;
247 }
248 return false;
249 }
250
251 /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
252 _Complex large/huge _BitInt lhs which has at most two immediate uses,
253 at most one use in REALPART_EXPR stmt in the same bb and exactly one
254 IMAGPART_EXPR use in the same bb with a single use which casts it to
255 non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
256 return 2. Such cases (most common uses of those builtins) can be
257 optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
258 of REALPART_EXPR as not needed to be backed up by a stack variable.
259 For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
260
261 int
262 optimizable_arith_overflow (gimple *stmt)
263 {
264 bool is_ubsan = false;
265 if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
266 return false;
267 switch (gimple_call_internal_fn (stmt))
268 {
269 case IFN_ADD_OVERFLOW:
270 case IFN_SUB_OVERFLOW:
271 case IFN_MUL_OVERFLOW:
272 break;
273 case IFN_UBSAN_CHECK_ADD:
274 case IFN_UBSAN_CHECK_SUB:
275 case IFN_UBSAN_CHECK_MUL:
276 is_ubsan = true;
277 break;
278 default:
279 return 0;
280 }
281 tree lhs = gimple_call_lhs (stmt);
282 if (!lhs)
283 return 0;
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
285 return 0;
286 tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
287 if (TREE_CODE (type) != BITINT_TYPE
288 || bitint_precision_kind (type) < bitint_prec_large)
289 return 0;
290
291 if (is_ubsan)
292 {
293 use_operand_p use_p;
294 gimple *use_stmt;
295 if (!single_imm_use (lhs, &use_p, &use_stmt)
296 || gimple_bb (use_stmt) != gimple_bb (stmt)
297 || !gimple_store_p (use_stmt)
298 || !is_gimple_assign (use_stmt)
299 || gimple_has_volatile_ops (use_stmt)
300 || stmt_ends_bb_p (use_stmt))
301 return 0;
302 return 3;
303 }
304
305 imm_use_iterator ui;
306 use_operand_p use_p;
307 int seen = 0;
308 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
309 {
310 gimple *g = USE_STMT (use_p);
311 if (is_gimple_debug (g))
312 continue;
313 if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
314 return 0;
315 if (gimple_assign_rhs_code (g) == REALPART_EXPR)
316 {
317 if ((seen & 1) != 0)
318 return 0;
319 seen |= 1;
320 }
321 else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
322 {
323 if ((seen & 2) != 0)
324 return 0;
325 seen |= 2;
326
327 use_operand_p use2_p;
328 gimple *use_stmt;
329 tree lhs2 = gimple_assign_lhs (g);
330 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
331 return 0;
332 if (!single_imm_use (lhs2, &use2_p, &use_stmt)
333 || gimple_bb (use_stmt) != gimple_bb (stmt)
334 || !gimple_assign_cast_p (use_stmt))
335 return 0;
336
337 lhs2 = gimple_assign_lhs (use_stmt);
338 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
339 || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
340 return 0;
341 }
342 else
343 return 0;
344 }
345 if ((seen & 2) == 0)
346 return 0;
347 return seen == 3 ? 2 : 1;
348 }
349
350 /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
351 comparing large/huge _BitInt types, return the comparison code and if
352 non-NULL fill in the comparison operands to *POP1 and *POP2. */
353
354 tree_code
355 comparison_op (gimple *stmt, tree *pop1, tree *pop2)
356 {
357 tree op1 = NULL_TREE, op2 = NULL_TREE;
358 tree_code code = ERROR_MARK;
359 if (gimple_code (stmt) == GIMPLE_COND)
360 {
361 code = gimple_cond_code (stmt);
362 op1 = gimple_cond_lhs (stmt);
363 op2 = gimple_cond_rhs (stmt);
364 }
365 else if (is_gimple_assign (stmt))
366 {
367 code = gimple_assign_rhs_code (stmt);
368 op1 = gimple_assign_rhs1 (stmt);
369 if (TREE_CODE_CLASS (code) == tcc_comparison
370 || TREE_CODE_CLASS (code) == tcc_binary)
371 op2 = gimple_assign_rhs2 (stmt);
372 }
373 if (TREE_CODE_CLASS (code) != tcc_comparison)
374 return ERROR_MARK;
375 tree type = TREE_TYPE (op1);
376 if (TREE_CODE (type) != BITINT_TYPE
377 || bitint_precision_kind (type) < bitint_prec_large)
378 return ERROR_MARK;
379 if (pop1)
380 {
381 *pop1 = op1;
382 *pop2 = op2;
383 }
384 return code;
385 }
386
387 /* Class used during large/huge _BitInt lowering containing all the
388 state for the methods. */
389
390 struct bitint_large_huge
391 {
392 bitint_large_huge ()
393 : m_names (NULL), m_loads (NULL), m_preserved (NULL),
394 m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
395 m_limb_type (NULL_TREE), m_data (vNULL) {}
396
397 ~bitint_large_huge ();
398
399 void insert_before (gimple *);
400 tree limb_access_type (tree, tree);
401 tree limb_access (tree, tree, tree, bool);
402 void if_then (gimple *, profile_probability, edge &, edge &);
403 void if_then_else (gimple *, profile_probability, edge &, edge &);
404 void if_then_if_then_else (gimple *g, gimple *,
405 profile_probability, profile_probability,
406 edge &, edge &, edge &);
407 tree handle_operand (tree, tree);
408 tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
409 tree add_cast (tree, tree);
410 tree handle_plus_minus (tree_code, tree, tree, tree);
411 tree handle_lshift (tree, tree, tree);
412 tree handle_cast (tree, tree, tree);
413 tree handle_load (gimple *, tree);
414 tree handle_stmt (gimple *, tree);
415 tree handle_operand_addr (tree, gimple *, int *, int *);
416 tree create_loop (tree, tree *);
417 tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
418 tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
419 void lower_shift_stmt (tree, gimple *);
420 void lower_muldiv_stmt (tree, gimple *);
421 void lower_float_conv_stmt (tree, gimple *);
422 tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
423 unsigned int, bool);
424 void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
425 tree_code);
426 void lower_addsub_overflow (tree, gimple *);
427 void lower_mul_overflow (tree, gimple *);
428 void lower_cplxpart_stmt (tree, gimple *);
429 void lower_complexexpr_stmt (gimple *);
430 void lower_bit_query (gimple *);
431 void lower_call (tree, gimple *);
432 void lower_asm (gimple *);
433 void lower_stmt (gimple *);
434
435 /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
436 merged with their uses. */
437 bitmap m_names;
438 /* Subset of those for lhs of load statements. These will be
439 cleared in m_names if the loads will be mergeable with all
440 their uses. */
441 bitmap m_loads;
442 /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
443 to later passes (arguments or return values of calls). */
444 bitmap m_preserved;
445 /* Subset of m_names which have a single use. As the lowering
446 can replace various original statements with their lowered
447 form even before it is done iterating over all basic blocks,
448 testing has_single_use for the purpose of emitting clobbers
449 doesn't work properly. */
450 bitmap m_single_use_names;
451 /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
452 set in m_names. */
453 var_map m_map;
454 /* Mapping of the partitions to corresponding decls. */
455 tree *m_vars;
456 /* Unsigned integer type with limb precision. */
457 tree m_limb_type;
458 /* Its TYPE_SIZE_UNIT. */
459 unsigned HOST_WIDE_INT m_limb_size;
460 /* Location of a gimple stmt which is being currently lowered. */
461 location_t m_loc;
462 /* Current stmt iterator where code is being lowered currently. */
463 gimple_stmt_iterator m_gsi;
464 /* Statement after which any clobbers should be added if non-NULL. */
465 gimple *m_after_stmt;
466 /* Set when creating loops to the loop header bb and its preheader. */
467 basic_block m_bb, m_preheader_bb;
468 /* Stmt iterator after which initialization statements should be emitted. */
469 gimple_stmt_iterator m_init_gsi;
470 /* Decl into which a mergeable statement stores result. */
471 tree m_lhs;
472 /* handle_operand/handle_stmt can be invoked in various ways.
473
474 lower_mergeable_stmt for large _BitInt calls those with constant
475 idx only, expanding to straight line code, for huge _BitInt
476 emits a loop from least significant limb upwards, where each loop
477 iteration handles 2 limbs, plus there can be up to one full limb
478 and one partial limb processed after the loop, where handle_operand
479 and/or handle_stmt are called with constant idx. m_upwards_2limb
480 is set for this case, false otherwise. m_upwards is true if it
481 is either large or huge _BitInt handled by lower_mergeable_stmt,
482 i.e. indexes always increase.
483
484 Another way is used by lower_comparison_stmt, which walks limbs
485 from most significant to least significant, partial limb if any
486 processed first with constant idx and then loop processing a single
487 limb per iteration with non-constant idx.
488
489 Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
490 destination limbs are processed from most significant to least
491 significant or for RSHIFT_EXPR the other way around, in loops or
492 straight line code, but idx usually is non-constant (so from
493 handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
494 handling there can access even partial limbs using non-constant
495 idx (then m_var_msb should be true, for all the other cases
496 including lower_mergeable_stmt/lower_comparison_stmt that is
497 not the case and so m_var_msb should be false.
498
499 m_first should be set the first time handle_operand/handle_stmt
500 is called and clear when it is called for some other limb with
501 the same argument. If the lowering of an operand (e.g. INTEGER_CST)
502 or statement (e.g. +/-/<< with < limb_prec constant) needs some
503 state between the different calls, when m_first is true it should
504 push some trees to m_data vector and also make sure m_data_cnt is
505 incremented by how many trees were pushed, and when m_first is
506 false, it can use the m_data[m_data_cnt] etc. data or update them,
507 just needs to bump m_data_cnt by the same amount as when it was
508 called with m_first set. The toplevel calls to
509 handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
510 m_data vector when setting m_first to true.
511
512 m_cast_conditional and m_bitfld_load are used when handling a
513 bit-field load inside of a widening cast. handle_cast sometimes
514 needs to do runtime comparisons and handle_operand only conditionally
515 or even in two separate conditional blocks for one idx (once with
516 constant index after comparing the runtime one for equality with the
517 constant). In these cases, m_cast_conditional is set to true and
518 the bit-field load then communicates its m_data_cnt to handle_cast
519 using m_bitfld_load. */
520 bool m_first;
521 bool m_var_msb;
522 unsigned m_upwards_2limb;
523 bool m_upwards;
524 bool m_cast_conditional;
525 unsigned m_bitfld_load;
526 vec<tree> m_data;
527 unsigned int m_data_cnt;
528 };
529
530 bitint_large_huge::~bitint_large_huge ()
531 {
532 BITMAP_FREE (m_names);
533 BITMAP_FREE (m_loads);
534 BITMAP_FREE (m_preserved);
535 BITMAP_FREE (m_single_use_names);
536 if (m_map)
537 delete_var_map (m_map);
538 XDELETEVEC (m_vars);
539 m_data.release ();
540 }
541
542 /* Insert gimple statement G before current location
543 and set its gimple_location. */
544
545 void
546 bitint_large_huge::insert_before (gimple *g)
547 {
548 gimple_set_location (g, m_loc);
549 gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
550 }
551
552 /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
553 This is normally m_limb_type, except for a partial most
554 significant limb if any. */
555
556 tree
557 bitint_large_huge::limb_access_type (tree type, tree idx)
558 {
559 if (type == NULL_TREE)
560 return m_limb_type;
561 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
562 unsigned int prec = TYPE_PRECISION (type);
563 gcc_assert (i * limb_prec < prec);
564 if ((i + 1) * limb_prec <= prec)
565 return m_limb_type;
566 else
567 return build_nonstandard_integer_type (prec % limb_prec,
568 TYPE_UNSIGNED (type));
569 }
570
571 /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
572 TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
573
574 tree
575 bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p)
576 {
577 tree atype = (tree_fits_uhwi_p (idx)
578 ? limb_access_type (type, idx) : m_limb_type);
579 tree ret;
580 if (DECL_P (var) && tree_fits_uhwi_p (idx))
581 {
582 tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
583 unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
584 ret = build2 (MEM_REF, m_limb_type,
585 build_fold_addr_expr (var),
586 build_int_cst (ptype, off));
587 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
588 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
589 }
590 else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
591 {
592 ret
593 = build2 (MEM_REF, m_limb_type, TREE_OPERAND (var, 0),
594 size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
595 build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
596 tree_to_uhwi (idx)
597 * m_limb_size)));
598 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
599 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
600 TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
601 }
602 else
603 {
604 var = unshare_expr (var);
605 if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
606 || !useless_type_conversion_p (m_limb_type,
607 TREE_TYPE (TREE_TYPE (var))))
608 {
609 unsigned HOST_WIDE_INT nelts
610 = CEIL (tree_to_uhwi (TYPE_SIZE (type)), limb_prec);
611 tree atype = build_array_type_nelts (m_limb_type, nelts);
612 var = build1 (VIEW_CONVERT_EXPR, atype, var);
613 }
614 ret = build4 (ARRAY_REF, m_limb_type, var, idx, NULL_TREE, NULL_TREE);
615 }
616 if (!write_p && !useless_type_conversion_p (atype, m_limb_type))
617 {
618 gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
619 insert_before (g);
620 ret = gimple_assign_lhs (g);
621 ret = build1 (NOP_EXPR, atype, ret);
622 }
623 return ret;
624 }
625
626 /* Emit a half diamond,
627 if (COND)
628 |\
629 | \
630 | \
631 | new_bb1
632 | /
633 | /
634 |/
635 or if (COND) new_bb1;
636 PROB is the probability that the condition is true.
637 Updates m_gsi to start of new_bb1.
638 Sets EDGE_TRUE to edge from new_bb1 to successor and
639 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
640
641 void
642 bitint_large_huge::if_then (gimple *cond, profile_probability prob,
643 edge &edge_true, edge &edge_false)
644 {
645 insert_before (cond);
646 edge e1 = split_block (gsi_bb (m_gsi), cond);
647 edge e2 = split_block (e1->dest, (gimple *) NULL);
648 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
649 e1->flags = EDGE_TRUE_VALUE;
650 e1->probability = prob;
651 e3->probability = prob.invert ();
652 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
653 edge_true = e2;
654 edge_false = e3;
655 m_gsi = gsi_after_labels (e1->dest);
656 }
657
658 /* Emit a full diamond,
659 if (COND)
660 /\
661 / \
662 / \
663 new_bb1 new_bb2
664 \ /
665 \ /
666 \/
667 or if (COND) new_bb2; else new_bb1;
668 PROB is the probability that the condition is true.
669 Updates m_gsi to start of new_bb2.
670 Sets EDGE_TRUE to edge from new_bb1 to successor and
671 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
672
673 void
674 bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
675 edge &edge_true, edge &edge_false)
676 {
677 insert_before (cond);
678 edge e1 = split_block (gsi_bb (m_gsi), cond);
679 edge e2 = split_block (e1->dest, (gimple *) NULL);
680 basic_block bb = create_empty_bb (e1->dest);
681 add_bb_to_loop (bb, e1->dest->loop_father);
682 edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
683 e1->flags = EDGE_FALSE_VALUE;
684 e3->probability = prob;
685 e1->probability = prob.invert ();
686 bb->count = e1->src->count.apply_probability (prob);
687 set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
688 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
689 edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
690 edge_false = e2;
691 m_gsi = gsi_after_labels (bb);
692 }
693
694 /* Emit a half diamond with full diamond in it
695 if (COND1)
696 |\
697 | \
698 | \
699 | if (COND2)
700 | / \
701 | / \
702 |new_bb1 new_bb2
703 | | /
704 \ | /
705 \ | /
706 \ | /
707 \|/
708 or if (COND1) { if (COND2) new_bb2; else new_bb1; }
709 PROB1 is the probability that the condition 1 is true.
710 PROB2 is the probability that the condition 2 is true.
711 Updates m_gsi to start of new_bb1.
712 Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
713 EDGE_TRUE_FALSE to edge from new_bb1 to successor and
714 EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
715 If COND2 is NULL, this is equivalent to
716 if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
717 EDGE_TRUE_TRUE = NULL; */
718
719 void
720 bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
721 profile_probability prob1,
722 profile_probability prob2,
723 edge &edge_true_true,
724 edge &edge_true_false,
725 edge &edge_false)
726 {
727 edge e2, e3, e4 = NULL;
728 if_then (cond1, prob1, e2, e3);
729 if (cond2 == NULL)
730 {
731 edge_true_true = NULL;
732 edge_true_false = e2;
733 edge_false = e3;
734 return;
735 }
736 insert_before (cond2);
737 e2 = split_block (gsi_bb (m_gsi), cond2);
738 basic_block bb = create_empty_bb (e2->dest);
739 add_bb_to_loop (bb, e2->dest->loop_father);
740 e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
741 set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
742 e4->probability = prob2;
743 e2->flags = EDGE_FALSE_VALUE;
744 e2->probability = prob2.invert ();
745 bb->count = e2->src->count.apply_probability (prob2);
746 e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
747 e2 = find_edge (e2->dest, e3->dest);
748 edge_true_true = e4;
749 edge_true_false = e2;
750 edge_false = e3;
751 m_gsi = gsi_after_labels (e2->src);
752 }
753
754 /* Emit code to access limb IDX from OP. */
755
756 tree
757 bitint_large_huge::handle_operand (tree op, tree idx)
758 {
759 switch (TREE_CODE (op))
760 {
761 case SSA_NAME:
762 if (m_names == NULL
763 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
764 {
765 if (SSA_NAME_IS_DEFAULT_DEF (op))
766 {
767 if (m_first)
768 {
769 tree v = create_tmp_reg (m_limb_type);
770 if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
771 {
772 DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
773 DECL_SOURCE_LOCATION (v)
774 = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
775 }
776 v = get_or_create_ssa_default_def (cfun, v);
777 m_data.safe_push (v);
778 }
779 tree ret = m_data[m_data_cnt];
780 m_data_cnt++;
781 if (tree_fits_uhwi_p (idx))
782 {
783 tree type = limb_access_type (TREE_TYPE (op), idx);
784 ret = add_cast (type, ret);
785 }
786 return ret;
787 }
788 location_t loc_save = m_loc;
789 m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
790 tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
791 m_loc = loc_save;
792 return ret;
793 }
794 int p;
795 gimple *g;
796 tree t;
797 p = var_to_partition (m_map, op);
798 gcc_assert (m_vars[p] != NULL_TREE);
799 t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
800 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
801 insert_before (g);
802 t = gimple_assign_lhs (g);
803 if (m_first
804 && m_single_use_names
805 && m_vars[p] != m_lhs
806 && m_after_stmt
807 && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
808 {
809 tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
810 CLOBBER_STORAGE_END);
811 g = gimple_build_assign (m_vars[p], clobber);
812 gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
813 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
814 }
815 return t;
816 case INTEGER_CST:
817 if (tree_fits_uhwi_p (idx))
818 {
819 tree c, type = limb_access_type (TREE_TYPE (op), idx);
820 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
821 if (m_first)
822 {
823 m_data.safe_push (NULL_TREE);
824 m_data.safe_push (NULL_TREE);
825 }
826 if (limb_prec != HOST_BITS_PER_WIDE_INT)
827 {
828 wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
829 TYPE_SIGN (TREE_TYPE (op)));
830 c = wide_int_to_tree (type,
831 wide_int::from (w, TYPE_PRECISION (type),
832 UNSIGNED));
833 }
834 else if (i >= TREE_INT_CST_EXT_NUNITS (op))
835 c = build_int_cst (type,
836 tree_int_cst_sgn (op) < 0 ? -1 : 0);
837 else
838 c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
839 m_data_cnt += 2;
840 return c;
841 }
842 if (m_first
843 || (m_data[m_data_cnt] == NULL_TREE
844 && m_data[m_data_cnt + 1] == NULL_TREE))
845 {
846 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
847 unsigned int rem = prec % (2 * limb_prec);
848 int ext;
849 unsigned min_prec = bitint_min_cst_precision (op, ext);
850 if (m_first)
851 {
852 m_data.safe_push (NULL_TREE);
853 m_data.safe_push (NULL_TREE);
854 }
855 if (integer_zerop (op))
856 {
857 tree c = build_zero_cst (m_limb_type);
858 m_data[m_data_cnt] = c;
859 m_data[m_data_cnt + 1] = c;
860 }
861 else if (integer_all_onesp (op))
862 {
863 tree c = build_all_ones_cst (m_limb_type);
864 m_data[m_data_cnt] = c;
865 m_data[m_data_cnt + 1] = c;
866 }
867 else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
868 {
869 /* Single limb constant. Use a phi with that limb from
870 the preheader edge and 0 or -1 constant from the other edge
871 and for the second limb in the loop. */
872 tree out;
873 gcc_assert (m_first);
874 m_data.pop ();
875 m_data.pop ();
876 prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
877 build_int_cst (m_limb_type, ext));
878 }
879 else if (min_prec > prec - rem - 2 * limb_prec)
880 {
881 /* Constant which has enough significant bits that it isn't
882 worth trying to save .rodata space by extending from smaller
883 number. */
884 tree type;
885 if (m_var_msb)
886 type = TREE_TYPE (op);
887 else
888 /* If we have a guarantee the most significant partial limb
889 (if any) will be only accessed through handle_operand
890 with INTEGER_CST idx, we don't need to include the partial
891 limb in .rodata. */
892 type = build_bitint_type (prec - rem, 1);
893 tree c = tree_output_constant_def (fold_convert (type, op));
894 m_data[m_data_cnt] = c;
895 m_data[m_data_cnt + 1] = NULL_TREE;
896 }
897 else if (m_upwards_2limb)
898 {
899 /* Constant with smaller number of bits. Trade conditional
900 code for .rodata space by extending from smaller number. */
901 min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
902 tree type = build_bitint_type (min_prec, 1);
903 tree c = tree_output_constant_def (fold_convert (type, op));
904 tree idx2 = make_ssa_name (sizetype);
905 g = gimple_build_assign (idx2, PLUS_EXPR, idx, size_one_node);
906 insert_before (g);
907 g = gimple_build_cond (LT_EXPR, idx,
908 size_int (min_prec / limb_prec),
909 NULL_TREE, NULL_TREE);
910 edge edge_true, edge_false;
911 if_then (g, (min_prec >= (prec - rem) / 2
912 ? profile_probability::likely ()
913 : profile_probability::unlikely ()),
914 edge_true, edge_false);
915 tree c1 = limb_access (TREE_TYPE (op), c, idx, false);
916 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
917 insert_before (g);
918 c1 = gimple_assign_lhs (g);
919 tree c2 = limb_access (TREE_TYPE (op), c, idx2, false);
920 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
921 insert_before (g);
922 c2 = gimple_assign_lhs (g);
923 tree c3 = build_int_cst (m_limb_type, ext);
924 m_gsi = gsi_after_labels (edge_true->dest);
925 m_data[m_data_cnt] = make_ssa_name (m_limb_type);
926 m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
927 gphi *phi = create_phi_node (m_data[m_data_cnt],
928 edge_true->dest);
929 add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
930 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
931 phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
932 add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
933 add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
934 }
935 else
936 {
937 /* Constant with smaller number of bits. Trade conditional
938 code for .rodata space by extending from smaller number.
939 Version for loops with random access to the limbs or
940 downwards loops. */
941 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
942 tree c;
943 if (min_prec <= (unsigned) limb_prec)
944 c = fold_convert (m_limb_type, op);
945 else
946 {
947 tree type = build_bitint_type (min_prec, 1);
948 c = tree_output_constant_def (fold_convert (type, op));
949 }
950 m_data[m_data_cnt] = c;
951 m_data[m_data_cnt + 1] = integer_type_node;
952 }
953 t = m_data[m_data_cnt];
954 if (m_data[m_data_cnt + 1] == NULL_TREE)
955 {
956 t = limb_access (TREE_TYPE (op), t, idx, false);
957 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
958 insert_before (g);
959 t = gimple_assign_lhs (g);
960 }
961 }
962 else if (m_data[m_data_cnt + 1] == NULL_TREE)
963 {
964 t = limb_access (TREE_TYPE (op), m_data[m_data_cnt], idx, false);
965 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
966 insert_before (g);
967 t = gimple_assign_lhs (g);
968 }
969 else
970 t = m_data[m_data_cnt + 1];
971 if (m_data[m_data_cnt + 1] == integer_type_node)
972 {
973 unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
974 unsigned rem = prec % (2 * limb_prec);
975 int ext = tree_int_cst_sgn (op) < 0 ? -1 : 0;
976 tree c = m_data[m_data_cnt];
977 unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
978 g = gimple_build_cond (LT_EXPR, idx,
979 size_int (min_prec / limb_prec),
980 NULL_TREE, NULL_TREE);
981 edge edge_true, edge_false;
982 if_then (g, (min_prec >= (prec - rem) / 2
983 ? profile_probability::likely ()
984 : profile_probability::unlikely ()),
985 edge_true, edge_false);
986 if (min_prec > (unsigned) limb_prec)
987 {
988 c = limb_access (TREE_TYPE (op), c, idx, false);
989 g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
990 insert_before (g);
991 c = gimple_assign_lhs (g);
992 }
993 tree c2 = build_int_cst (m_limb_type, ext);
994 m_gsi = gsi_after_labels (edge_true->dest);
995 t = make_ssa_name (m_limb_type);
996 gphi *phi = create_phi_node (t, edge_true->dest);
997 add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
998 add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
999 }
1000 m_data_cnt += 2;
1001 return t;
1002 default:
1003 gcc_unreachable ();
1004 }
1005 }
1006
1007 /* Helper method, add a PHI node with VAL from preheader edge if
1008 inside of a loop and m_first. Keep state in a pair of m_data
1009 elements. If VAL_OUT is non-NULL, use that as PHI argument from
1010 the latch edge, otherwise create a new SSA_NAME for it and let
1011 caller initialize it. */
1012
1013 tree
1014 bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1015 tree val_out)
1016 {
1017 if (!m_first)
1018 {
1019 *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1020 return m_data[m_data_cnt];
1021 }
1022
1023 *data_out = NULL_TREE;
1024 if (tree_fits_uhwi_p (idx))
1025 {
1026 m_data.safe_push (val);
1027 m_data.safe_push (NULL_TREE);
1028 return val;
1029 }
1030
1031 tree in = make_ssa_name (TREE_TYPE (val));
1032 gphi *phi = create_phi_node (in, m_bb);
1033 edge e1 = find_edge (m_preheader_bb, m_bb);
1034 edge e2 = EDGE_PRED (m_bb, 0);
1035 if (e1 == e2)
1036 e2 = EDGE_PRED (m_bb, 1);
1037 add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1038 tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1039 add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1040 m_data.safe_push (in);
1041 m_data.safe_push (out);
1042 return in;
1043 }
1044
1045 /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1046 convert it without emitting any code, otherwise emit
1047 the conversion statement before the current location. */
1048
1049 tree
1050 bitint_large_huge::add_cast (tree type, tree val)
1051 {
1052 if (TREE_CODE (val) == INTEGER_CST)
1053 return fold_convert (type, val);
1054
1055 tree lhs = make_ssa_name (type);
1056 gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1057 insert_before (g);
1058 return lhs;
1059 }
1060
1061 /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1062
1063 tree
1064 bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1065 tree idx)
1066 {
1067 tree lhs, data_out, ctype;
1068 tree rhs1_type = TREE_TYPE (rhs1);
1069 gimple *g;
1070 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1071 &data_out);
1072
1073 if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1074 TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1075 {
1076 ctype = build_complex_type (m_limb_type);
1077 if (!types_compatible_p (rhs1_type, m_limb_type))
1078 {
1079 if (!TYPE_UNSIGNED (rhs1_type))
1080 {
1081 tree type = unsigned_type_for (rhs1_type);
1082 rhs1 = add_cast (type, rhs1);
1083 rhs2 = add_cast (type, rhs2);
1084 }
1085 rhs1 = add_cast (m_limb_type, rhs1);
1086 rhs2 = add_cast (m_limb_type, rhs2);
1087 }
1088 lhs = make_ssa_name (ctype);
1089 g = gimple_build_call_internal (code == PLUS_EXPR
1090 ? IFN_UADDC : IFN_USUBC,
1091 3, rhs1, rhs2, data_in);
1092 gimple_call_set_lhs (g, lhs);
1093 insert_before (g);
1094 if (data_out == NULL_TREE)
1095 data_out = make_ssa_name (m_limb_type);
1096 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1097 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1098 insert_before (g);
1099 }
1100 else if (types_compatible_p (rhs1_type, m_limb_type))
1101 {
1102 ctype = build_complex_type (m_limb_type);
1103 lhs = make_ssa_name (ctype);
1104 g = gimple_build_call_internal (code == PLUS_EXPR
1105 ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1106 2, rhs1, rhs2);
1107 gimple_call_set_lhs (g, lhs);
1108 insert_before (g);
1109 if (data_out == NULL_TREE)
1110 data_out = make_ssa_name (m_limb_type);
1111 if (!integer_zerop (data_in))
1112 {
1113 rhs1 = make_ssa_name (m_limb_type);
1114 g = gimple_build_assign (rhs1, REALPART_EXPR,
1115 build1 (REALPART_EXPR, m_limb_type, lhs));
1116 insert_before (g);
1117 rhs2 = make_ssa_name (m_limb_type);
1118 g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1119 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1120 insert_before (g);
1121 lhs = make_ssa_name (ctype);
1122 g = gimple_build_call_internal (code == PLUS_EXPR
1123 ? IFN_ADD_OVERFLOW
1124 : IFN_SUB_OVERFLOW,
1125 2, rhs1, data_in);
1126 gimple_call_set_lhs (g, lhs);
1127 insert_before (g);
1128 data_in = make_ssa_name (m_limb_type);
1129 g = gimple_build_assign (data_in, IMAGPART_EXPR,
1130 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1131 insert_before (g);
1132 g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1133 insert_before (g);
1134 }
1135 else
1136 {
1137 g = gimple_build_assign (data_out, IMAGPART_EXPR,
1138 build1 (IMAGPART_EXPR, m_limb_type, lhs));
1139 insert_before (g);
1140 }
1141 }
1142 else
1143 {
1144 tree in = add_cast (rhs1_type, data_in);
1145 lhs = make_ssa_name (rhs1_type);
1146 g = gimple_build_assign (lhs, code, rhs1, rhs2);
1147 insert_before (g);
1148 rhs1 = make_ssa_name (rhs1_type);
1149 g = gimple_build_assign (rhs1, code, lhs, in);
1150 insert_before (g);
1151 m_data[m_data_cnt] = NULL_TREE;
1152 m_data_cnt += 2;
1153 return rhs1;
1154 }
1155 rhs1 = make_ssa_name (m_limb_type);
1156 g = gimple_build_assign (rhs1, REALPART_EXPR,
1157 build1 (REALPART_EXPR, m_limb_type, lhs));
1158 insert_before (g);
1159 if (!types_compatible_p (rhs1_type, m_limb_type))
1160 rhs1 = add_cast (rhs1_type, rhs1);
1161 m_data[m_data_cnt] = data_out;
1162 m_data_cnt += 2;
1163 return rhs1;
1164 }
1165
1166 /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1167 count in [0, limb_prec - 1] range. */
1168
1169 tree
1170 bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1171 {
1172 unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1173 gcc_checking_assert (cnt < (unsigned) limb_prec);
1174 if (cnt == 0)
1175 return rhs1;
1176
1177 tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1178 gimple *g;
1179 tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1180 &data_out);
1181
1182 if (!integer_zerop (data_in))
1183 {
1184 lhs = make_ssa_name (m_limb_type);
1185 g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1186 build_int_cst (unsigned_type_node,
1187 limb_prec - cnt));
1188 insert_before (g);
1189 if (!types_compatible_p (rhs1_type, m_limb_type))
1190 lhs = add_cast (rhs1_type, lhs);
1191 data_in = lhs;
1192 }
1193 if (types_compatible_p (rhs1_type, m_limb_type))
1194 {
1195 if (data_out == NULL_TREE)
1196 data_out = make_ssa_name (m_limb_type);
1197 g = gimple_build_assign (data_out, rhs1);
1198 insert_before (g);
1199 }
1200 if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1201 {
1202 lhs = make_ssa_name (rhs1_type);
1203 g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1204 insert_before (g);
1205 if (!integer_zerop (data_in))
1206 {
1207 rhs1 = lhs;
1208 lhs = make_ssa_name (rhs1_type);
1209 g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1210 insert_before (g);
1211 }
1212 }
1213 else
1214 lhs = data_in;
1215 m_data[m_data_cnt] = data_out;
1216 m_data_cnt += 2;
1217 return lhs;
1218 }
1219
1220 /* Helper function for handle_stmt method, handle an integral
1221 to integral conversion. */
1222
1223 tree
1224 bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1225 {
1226 tree rhs_type = TREE_TYPE (rhs1);
1227 gimple *g;
1228 if (TREE_CODE (rhs1) == SSA_NAME
1229 && TREE_CODE (lhs_type) == BITINT_TYPE
1230 && TREE_CODE (rhs_type) == BITINT_TYPE
1231 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1232 && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1233 {
1234 if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1235 /* If lhs has bigger precision than rhs, we can use
1236 the simple case only if there is a guarantee that
1237 the most significant limb is handled in straight
1238 line code. If m_var_msb (on left shifts) or
1239 if m_upwards_2limb * limb_prec is equal to
1240 lhs precision that is not the case. */
1241 || (!m_var_msb
1242 && tree_int_cst_equal (TYPE_SIZE (rhs_type),
1243 TYPE_SIZE (lhs_type))
1244 && (!m_upwards_2limb
1245 || (m_upwards_2limb * limb_prec
1246 < TYPE_PRECISION (lhs_type)))))
1247 {
1248 rhs1 = handle_operand (rhs1, idx);
1249 if (tree_fits_uhwi_p (idx))
1250 {
1251 tree type = limb_access_type (lhs_type, idx);
1252 if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1253 rhs1 = add_cast (type, rhs1);
1254 }
1255 return rhs1;
1256 }
1257 tree t;
1258 /* Indexes lower than this don't need any special processing. */
1259 unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1260 - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1261 /* Indexes >= than this always contain an extension. */
1262 unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1263 bool save_first = m_first;
1264 if (m_first)
1265 {
1266 m_data.safe_push (NULL_TREE);
1267 m_data.safe_push (NULL_TREE);
1268 m_data.safe_push (NULL_TREE);
1269 if (TYPE_UNSIGNED (rhs_type))
1270 /* No need to keep state between iterations. */
1271 ;
1272 else if (m_upwards && !m_upwards_2limb)
1273 /* We need to keep state between iterations, but
1274 not within any loop, everything is straight line
1275 code with only increasing indexes. */
1276 ;
1277 else if (!m_upwards_2limb)
1278 {
1279 unsigned save_data_cnt = m_data_cnt;
1280 gimple_stmt_iterator save_gsi = m_gsi;
1281 m_gsi = m_init_gsi;
1282 if (gsi_end_p (m_gsi))
1283 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1284 else
1285 gsi_next (&m_gsi);
1286 m_data_cnt = save_data_cnt + 3;
1287 t = handle_operand (rhs1, size_int (low));
1288 m_first = false;
1289 m_data[save_data_cnt + 2]
1290 = build_int_cst (NULL_TREE, m_data_cnt);
1291 m_data_cnt = save_data_cnt;
1292 t = add_cast (signed_type_for (m_limb_type), t);
1293 tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1294 tree n = make_ssa_name (TREE_TYPE (t));
1295 g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1296 insert_before (g);
1297 m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1298 m_init_gsi = m_gsi;
1299 if (gsi_end_p (m_init_gsi))
1300 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1301 else
1302 gsi_prev (&m_init_gsi);
1303 m_gsi = save_gsi;
1304 }
1305 else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1306 /* We need to keep state between iterations, but
1307 fortunately not within the loop, only afterwards. */
1308 ;
1309 else
1310 {
1311 tree out;
1312 m_data.truncate (m_data_cnt);
1313 prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1314 m_data.safe_push (NULL_TREE);
1315 }
1316 }
1317
1318 unsigned save_data_cnt = m_data_cnt;
1319 m_data_cnt += 3;
1320 if (!tree_fits_uhwi_p (idx))
1321 {
1322 if (m_upwards_2limb
1323 && (m_upwards_2limb * limb_prec
1324 <= ((unsigned) TYPE_PRECISION (rhs_type)
1325 - !TYPE_UNSIGNED (rhs_type))))
1326 {
1327 rhs1 = handle_operand (rhs1, idx);
1328 if (m_first)
1329 m_data[save_data_cnt + 2]
1330 = build_int_cst (NULL_TREE, m_data_cnt);
1331 m_first = save_first;
1332 return rhs1;
1333 }
1334 bool single_comparison
1335 = low == high || (m_upwards_2limb && (low & 1) == m_first);
1336 g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1337 idx, size_int (low), NULL_TREE, NULL_TREE);
1338 edge edge_true_true, edge_true_false, edge_false;
1339 if_then_if_then_else (g, (single_comparison ? NULL
1340 : gimple_build_cond (EQ_EXPR, idx,
1341 size_int (low),
1342 NULL_TREE,
1343 NULL_TREE)),
1344 profile_probability::likely (),
1345 profile_probability::unlikely (),
1346 edge_true_true, edge_true_false, edge_false);
1347 bool save_cast_conditional = m_cast_conditional;
1348 m_cast_conditional = true;
1349 m_bitfld_load = 0;
1350 tree t1 = handle_operand (rhs1, idx), t2 = NULL_TREE;
1351 if (m_first)
1352 m_data[save_data_cnt + 2]
1353 = build_int_cst (NULL_TREE, m_data_cnt);
1354 tree ext = NULL_TREE;
1355 tree bitfld = NULL_TREE;
1356 if (!single_comparison)
1357 {
1358 m_gsi = gsi_after_labels (edge_true_true->src);
1359 m_first = false;
1360 m_data_cnt = save_data_cnt + 3;
1361 if (m_bitfld_load)
1362 {
1363 bitfld = m_data[m_bitfld_load];
1364 m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1365 m_bitfld_load = 0;
1366 }
1367 t2 = handle_operand (rhs1, size_int (low));
1368 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1369 t2 = add_cast (m_limb_type, t2);
1370 if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1371 {
1372 ext = add_cast (signed_type_for (m_limb_type), t2);
1373 tree lpm1 = build_int_cst (unsigned_type_node,
1374 limb_prec - 1);
1375 tree n = make_ssa_name (TREE_TYPE (ext));
1376 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1377 insert_before (g);
1378 ext = add_cast (m_limb_type, n);
1379 }
1380 }
1381 tree t3;
1382 if (TYPE_UNSIGNED (rhs_type))
1383 t3 = build_zero_cst (m_limb_type);
1384 else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1385 t3 = m_data[save_data_cnt];
1386 else
1387 t3 = m_data[save_data_cnt + 1];
1388 m_gsi = gsi_after_labels (edge_true_false->dest);
1389 t = make_ssa_name (m_limb_type);
1390 gphi *phi = create_phi_node (t, edge_true_false->dest);
1391 add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1392 add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1393 if (edge_true_true)
1394 add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1395 if (ext)
1396 {
1397 tree t4 = make_ssa_name (m_limb_type);
1398 phi = create_phi_node (t4, edge_true_false->dest);
1399 add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1400 UNKNOWN_LOCATION);
1401 add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1402 UNKNOWN_LOCATION);
1403 add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1404 if (!save_cast_conditional)
1405 {
1406 g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1407 insert_before (g);
1408 }
1409 else
1410 for (basic_block bb = gsi_bb (m_gsi);;)
1411 {
1412 edge e1 = single_succ_edge (bb);
1413 edge e2 = find_edge (e1->dest, m_bb), e3;
1414 tree t5 = (e2 ? m_data[save_data_cnt + 1]
1415 : make_ssa_name (m_limb_type));
1416 phi = create_phi_node (t5, e1->dest);
1417 edge_iterator ei;
1418 FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1419 add_phi_arg (phi, (e3 == e1 ? t4
1420 : build_zero_cst (m_limb_type)),
1421 e3, UNKNOWN_LOCATION);
1422 if (e2)
1423 break;
1424 t4 = t5;
1425 bb = e1->dest;
1426 }
1427 }
1428 if (m_bitfld_load)
1429 {
1430 tree t4;
1431 if (!m_first)
1432 t4 = m_data[m_bitfld_load + 1];
1433 else
1434 t4 = make_ssa_name (m_limb_type);
1435 phi = create_phi_node (t4, edge_true_false->dest);
1436 add_phi_arg (phi,
1437 edge_true_true ? bitfld : m_data[m_bitfld_load],
1438 edge_true_false, UNKNOWN_LOCATION);
1439 add_phi_arg (phi, m_data[m_bitfld_load + 2],
1440 edge_false, UNKNOWN_LOCATION);
1441 if (edge_true_true)
1442 add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1443 UNKNOWN_LOCATION);
1444 m_data[m_bitfld_load] = t4;
1445 m_data[m_bitfld_load + 2] = t4;
1446 m_bitfld_load = 0;
1447 }
1448 m_cast_conditional = save_cast_conditional;
1449 m_first = save_first;
1450 return t;
1451 }
1452 else
1453 {
1454 if (tree_to_uhwi (idx) < low)
1455 {
1456 t = handle_operand (rhs1, idx);
1457 if (m_first)
1458 m_data[save_data_cnt + 2]
1459 = build_int_cst (NULL_TREE, m_data_cnt);
1460 }
1461 else if (tree_to_uhwi (idx) < high)
1462 {
1463 t = handle_operand (rhs1, size_int (low));
1464 if (m_first)
1465 m_data[save_data_cnt + 2]
1466 = build_int_cst (NULL_TREE, m_data_cnt);
1467 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1468 t = add_cast (m_limb_type, t);
1469 tree ext = NULL_TREE;
1470 if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1471 {
1472 ext = add_cast (signed_type_for (m_limb_type), t);
1473 tree lpm1 = build_int_cst (unsigned_type_node,
1474 limb_prec - 1);
1475 tree n = make_ssa_name (TREE_TYPE (ext));
1476 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1477 insert_before (g);
1478 ext = add_cast (m_limb_type, n);
1479 m_data[save_data_cnt + 1] = ext;
1480 }
1481 }
1482 else
1483 {
1484 if (TYPE_UNSIGNED (rhs_type) && m_first)
1485 {
1486 handle_operand (rhs1, size_zero_node);
1487 m_data[save_data_cnt + 2]
1488 = build_int_cst (NULL_TREE, m_data_cnt);
1489 }
1490 else
1491 m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1492 if (TYPE_UNSIGNED (rhs_type))
1493 t = build_zero_cst (m_limb_type);
1494 else if (m_bb && m_data[save_data_cnt])
1495 t = m_data[save_data_cnt];
1496 else
1497 t = m_data[save_data_cnt + 1];
1498 }
1499 tree type = limb_access_type (lhs_type, idx);
1500 if (!useless_type_conversion_p (type, m_limb_type))
1501 t = add_cast (type, t);
1502 m_first = save_first;
1503 return t;
1504 }
1505 }
1506 else if (TREE_CODE (lhs_type) == BITINT_TYPE
1507 && bitint_precision_kind (lhs_type) >= bitint_prec_large
1508 && INTEGRAL_TYPE_P (rhs_type))
1509 {
1510 /* Add support for 3 or more limbs filled in from normal integral
1511 type if this assert fails. If no target chooses limb mode smaller
1512 than half of largest supported normal integral type, this will not
1513 be needed. */
1514 gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1515 tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1516 if (m_first)
1517 {
1518 gimple_stmt_iterator save_gsi = m_gsi;
1519 m_gsi = m_init_gsi;
1520 if (gsi_end_p (m_gsi))
1521 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1522 else
1523 gsi_next (&m_gsi);
1524 if (TREE_CODE (rhs_type) == BITINT_TYPE
1525 && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1526 {
1527 tree type = NULL_TREE;
1528 rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1529 rhs_type = TREE_TYPE (rhs1);
1530 }
1531 r1 = rhs1;
1532 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1533 r1 = add_cast (m_limb_type, rhs1);
1534 if (TYPE_PRECISION (rhs_type) > limb_prec)
1535 {
1536 g = gimple_build_assign (make_ssa_name (rhs_type),
1537 RSHIFT_EXPR, rhs1,
1538 build_int_cst (unsigned_type_node,
1539 limb_prec));
1540 insert_before (g);
1541 r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1542 }
1543 if (TYPE_UNSIGNED (rhs_type))
1544 rext = build_zero_cst (m_limb_type);
1545 else
1546 {
1547 rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1548 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1549 RSHIFT_EXPR, rext,
1550 build_int_cst (unsigned_type_node,
1551 limb_prec - 1));
1552 insert_before (g);
1553 rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1554 }
1555 m_init_gsi = m_gsi;
1556 if (gsi_end_p (m_init_gsi))
1557 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1558 else
1559 gsi_prev (&m_init_gsi);
1560 m_gsi = save_gsi;
1561 }
1562 tree t;
1563 if (m_upwards_2limb)
1564 {
1565 if (m_first)
1566 {
1567 tree out1, out2;
1568 prepare_data_in_out (r1, idx, &out1, rext);
1569 if (TYPE_PRECISION (rhs_type) > limb_prec)
1570 {
1571 prepare_data_in_out (r2, idx, &out2, rext);
1572 m_data.pop ();
1573 t = m_data.pop ();
1574 m_data[m_data_cnt + 1] = t;
1575 }
1576 else
1577 m_data[m_data_cnt + 1] = rext;
1578 m_data.safe_push (rext);
1579 t = m_data[m_data_cnt];
1580 }
1581 else if (!tree_fits_uhwi_p (idx))
1582 t = m_data[m_data_cnt + 1];
1583 else
1584 {
1585 tree type = limb_access_type (lhs_type, idx);
1586 t = m_data[m_data_cnt + 2];
1587 if (!useless_type_conversion_p (type, m_limb_type))
1588 t = add_cast (type, t);
1589 }
1590 m_data_cnt += 3;
1591 return t;
1592 }
1593 else if (m_first)
1594 {
1595 m_data.safe_push (r1);
1596 m_data.safe_push (r2);
1597 m_data.safe_push (rext);
1598 }
1599 if (tree_fits_uhwi_p (idx))
1600 {
1601 tree type = limb_access_type (lhs_type, idx);
1602 if (integer_zerop (idx))
1603 t = m_data[m_data_cnt];
1604 else if (TYPE_PRECISION (rhs_type) > limb_prec
1605 && integer_onep (idx))
1606 t = m_data[m_data_cnt + 1];
1607 else
1608 t = m_data[m_data_cnt + 2];
1609 if (!useless_type_conversion_p (type, m_limb_type))
1610 t = add_cast (type, t);
1611 m_data_cnt += 3;
1612 return t;
1613 }
1614 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
1615 NULL_TREE, NULL_TREE);
1616 edge e2, e3, e4 = NULL;
1617 if_then (g, profile_probability::likely (), e2, e3);
1618 if (m_data[m_data_cnt + 1])
1619 {
1620 g = gimple_build_cond (EQ_EXPR, idx, size_one_node,
1621 NULL_TREE, NULL_TREE);
1622 insert_before (g);
1623 edge e5 = split_block (gsi_bb (m_gsi), g);
1624 e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1625 e2 = find_edge (e5->dest, e2->dest);
1626 e4->probability = profile_probability::unlikely ();
1627 e5->flags = EDGE_FALSE_VALUE;
1628 e5->probability = e4->probability.invert ();
1629 }
1630 m_gsi = gsi_after_labels (e2->dest);
1631 t = make_ssa_name (m_limb_type);
1632 gphi *phi = create_phi_node (t, e2->dest);
1633 add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1634 add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1635 if (e4)
1636 add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1637 m_data_cnt += 3;
1638 return t;
1639 }
1640 return NULL_TREE;
1641 }
1642
1643 /* Helper function for handle_stmt method, handle a load from memory. */
1644
1645 tree
1646 bitint_large_huge::handle_load (gimple *stmt, tree idx)
1647 {
1648 tree rhs1 = gimple_assign_rhs1 (stmt);
1649 tree rhs_type = TREE_TYPE (rhs1);
1650 bool eh = stmt_ends_bb_p (stmt);
1651 edge eh_edge = NULL;
1652 gimple *g;
1653
1654 if (eh)
1655 {
1656 edge_iterator ei;
1657 basic_block bb = gimple_bb (stmt);
1658
1659 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1660 if (eh_edge->flags & EDGE_EH)
1661 break;
1662 }
1663
1664 if (TREE_CODE (rhs1) == COMPONENT_REF
1665 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
1666 {
1667 tree fld = TREE_OPERAND (rhs1, 1);
1668 /* For little-endian, we can allow as inputs bit-fields
1669 which start at a limb boundary. */
1670 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
1671 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
1672 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
1673 goto normal_load;
1674 /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of UNITS_PER_BIT,
1675 handle it normally for now. */
1676 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
1677 goto normal_load;
1678 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
1679 poly_int64 bitoffset;
1680 poly_uint64 field_offset, repr_offset;
1681 bool var_field_off = false;
1682 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
1683 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
1684 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
1685 else
1686 {
1687 bitoffset = 0;
1688 var_field_off = true;
1689 }
1690 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
1691 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1692 tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
1693 TREE_OPERAND (rhs1, 0), repr,
1694 var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
1695 HOST_WIDE_INT bo = bitoffset.to_constant ();
1696 unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
1697 unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
1698 if (m_first)
1699 {
1700 if (m_upwards)
1701 {
1702 gimple_stmt_iterator save_gsi = m_gsi;
1703 m_gsi = m_init_gsi;
1704 if (gsi_end_p (m_gsi))
1705 m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1706 else
1707 gsi_next (&m_gsi);
1708 tree t = limb_access (rhs_type, nrhs1, size_int (bo_idx), true);
1709 tree iv = make_ssa_name (m_limb_type);
1710 g = gimple_build_assign (iv, t);
1711 insert_before (g);
1712 if (eh)
1713 {
1714 maybe_duplicate_eh_stmt (g, stmt);
1715 if (eh_edge)
1716 {
1717 edge e = split_block (gsi_bb (m_gsi), g);
1718 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1719 = profile_probability::very_unlikely ();
1720 m_gsi = gsi_after_labels (e->dest);
1721 if (gsi_bb (save_gsi) == e->src)
1722 {
1723 if (gsi_end_p (save_gsi))
1724 save_gsi = gsi_end_bb (e->dest);
1725 else
1726 save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
1727 }
1728 if (m_preheader_bb == e->src)
1729 m_preheader_bb = e->dest;
1730 }
1731 }
1732 m_init_gsi = m_gsi;
1733 if (gsi_end_p (m_init_gsi))
1734 m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1735 else
1736 gsi_prev (&m_init_gsi);
1737 m_gsi = save_gsi;
1738 tree out;
1739 prepare_data_in_out (iv, idx, &out);
1740 out = m_data[m_data_cnt];
1741 m_data.safe_push (out);
1742 }
1743 else
1744 {
1745 m_data.safe_push (NULL_TREE);
1746 m_data.safe_push (NULL_TREE);
1747 m_data.safe_push (NULL_TREE);
1748 }
1749 }
1750
1751 tree nidx0 = NULL_TREE, nidx1;
1752 tree iv = m_data[m_data_cnt];
1753 if (m_cast_conditional && iv)
1754 {
1755 gcc_assert (!m_bitfld_load);
1756 m_bitfld_load = m_data_cnt;
1757 }
1758 if (tree_fits_uhwi_p (idx))
1759 {
1760 unsigned prec = TYPE_PRECISION (rhs_type);
1761 unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
1762 gcc_assert (i * limb_prec < prec);
1763 nidx1 = size_int (i + bo_idx + 1);
1764 if ((i + 1) * limb_prec > prec)
1765 {
1766 prec %= limb_prec;
1767 if (prec + bo_bit <= (unsigned) limb_prec)
1768 nidx1 = NULL_TREE;
1769 }
1770 if (!iv)
1771 nidx0 = size_int (i + bo_idx);
1772 }
1773 else
1774 {
1775 if (!iv)
1776 {
1777 if (bo_idx == 0)
1778 nidx0 = idx;
1779 else
1780 {
1781 nidx0 = make_ssa_name (sizetype);
1782 g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
1783 size_int (bo_idx));
1784 insert_before (g);
1785 }
1786 }
1787 nidx1 = make_ssa_name (sizetype);
1788 g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
1789 size_int (bo_idx + 1));
1790 insert_before (g);
1791 }
1792
1793 tree iv2 = NULL_TREE;
1794 if (nidx0)
1795 {
1796 tree t = limb_access (rhs_type, nrhs1, nidx0, true);
1797 iv = make_ssa_name (m_limb_type);
1798 g = gimple_build_assign (iv, t);
1799 insert_before (g);
1800 gcc_assert (!eh);
1801 }
1802 if (nidx1)
1803 {
1804 bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
1805 unsigned prec = TYPE_PRECISION (rhs_type);
1806 if (conditional)
1807 {
1808 if ((prec % limb_prec) == 0
1809 || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
1810 conditional = false;
1811 }
1812 edge edge_true = NULL, edge_false = NULL;
1813 if (conditional)
1814 {
1815 g = gimple_build_cond (NE_EXPR, idx,
1816 size_int (prec / limb_prec),
1817 NULL_TREE, NULL_TREE);
1818 if_then (g, profile_probability::likely (),
1819 edge_true, edge_false);
1820 }
1821 tree t = limb_access (rhs_type, nrhs1, nidx1, true);
1822 if (m_upwards_2limb
1823 && !m_first
1824 && !m_bitfld_load
1825 && !tree_fits_uhwi_p (idx))
1826 iv2 = m_data[m_data_cnt + 1];
1827 else
1828 iv2 = make_ssa_name (m_limb_type);
1829 g = gimple_build_assign (iv2, t);
1830 insert_before (g);
1831 if (eh)
1832 {
1833 maybe_duplicate_eh_stmt (g, stmt);
1834 if (eh_edge)
1835 {
1836 edge e = split_block (gsi_bb (m_gsi), g);
1837 m_gsi = gsi_after_labels (e->dest);
1838 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1839 = profile_probability::very_unlikely ();
1840 }
1841 }
1842 if (conditional)
1843 {
1844 tree iv3 = make_ssa_name (m_limb_type);
1845 if (eh)
1846 edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
1847 gphi *phi = create_phi_node (iv3, edge_true->dest);
1848 add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
1849 add_phi_arg (phi, build_zero_cst (m_limb_type),
1850 edge_false, UNKNOWN_LOCATION);
1851 m_gsi = gsi_after_labels (edge_true->dest);
1852 }
1853 }
1854 g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
1855 iv, build_int_cst (unsigned_type_node, bo_bit));
1856 insert_before (g);
1857 iv = gimple_assign_lhs (g);
1858 if (iv2)
1859 {
1860 g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
1861 iv2, build_int_cst (unsigned_type_node,
1862 limb_prec - bo_bit));
1863 insert_before (g);
1864 g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
1865 gimple_assign_lhs (g), iv);
1866 insert_before (g);
1867 iv = gimple_assign_lhs (g);
1868 if (m_data[m_data_cnt])
1869 m_data[m_data_cnt] = iv2;
1870 }
1871 if (tree_fits_uhwi_p (idx))
1872 {
1873 tree atype = limb_access_type (rhs_type, idx);
1874 if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
1875 iv = add_cast (atype, iv);
1876 }
1877 m_data_cnt += 3;
1878 return iv;
1879 }
1880
1881 normal_load:
1882 /* Use write_p = true for loads with EH edges to make
1883 sure limb_access doesn't add a cast as separate
1884 statement after it. */
1885 rhs1 = limb_access (rhs_type, rhs1, idx, eh);
1886 tree ret = make_ssa_name (TREE_TYPE (rhs1));
1887 g = gimple_build_assign (ret, rhs1);
1888 insert_before (g);
1889 if (eh)
1890 {
1891 maybe_duplicate_eh_stmt (g, stmt);
1892 if (eh_edge)
1893 {
1894 edge e = split_block (gsi_bb (m_gsi), g);
1895 m_gsi = gsi_after_labels (e->dest);
1896 make_edge (e->src, eh_edge->dest, EDGE_EH)->probability
1897 = profile_probability::very_unlikely ();
1898 }
1899 if (tree_fits_uhwi_p (idx))
1900 {
1901 tree atype = limb_access_type (rhs_type, idx);
1902 if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
1903 ret = add_cast (atype, ret);
1904 }
1905 }
1906 return ret;
1907 }
1908
1909 /* Return a limb IDX from a mergeable statement STMT. */
1910
1911 tree
1912 bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
1913 {
1914 tree lhs, rhs1, rhs2 = NULL_TREE;
1915 gimple *g;
1916 switch (gimple_code (stmt))
1917 {
1918 case GIMPLE_ASSIGN:
1919 if (gimple_assign_load_p (stmt))
1920 return handle_load (stmt, idx);
1921 switch (gimple_assign_rhs_code (stmt))
1922 {
1923 case BIT_AND_EXPR:
1924 case BIT_IOR_EXPR:
1925 case BIT_XOR_EXPR:
1926 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1927 /* FALLTHRU */
1928 case BIT_NOT_EXPR:
1929 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1930 lhs = make_ssa_name (TREE_TYPE (rhs1));
1931 g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
1932 rhs1, rhs2);
1933 insert_before (g);
1934 return lhs;
1935 case PLUS_EXPR:
1936 case MINUS_EXPR:
1937 rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1938 rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
1939 return handle_plus_minus (gimple_assign_rhs_code (stmt),
1940 rhs1, rhs2, idx);
1941 case NEGATE_EXPR:
1942 rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
1943 rhs1 = build_zero_cst (TREE_TYPE (rhs2));
1944 return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
1945 case LSHIFT_EXPR:
1946 return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
1947 idx),
1948 gimple_assign_rhs2 (stmt), idx);
1949 case SSA_NAME:
1950 case INTEGER_CST:
1951 return handle_operand (gimple_assign_rhs1 (stmt), idx);
1952 CASE_CONVERT:
1953 case VIEW_CONVERT_EXPR:
1954 return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
1955 gimple_assign_rhs1 (stmt), idx);
1956 default:
1957 break;
1958 }
1959 break;
1960 default:
1961 break;
1962 }
1963 gcc_unreachable ();
1964 }
1965
1966 /* Return minimum precision of OP at STMT.
1967 Positive value is minimum precision above which all bits
1968 are zero, negative means all bits above negation of the
1969 value are copies of the sign bit. */
1970
1971 static int
1972 range_to_prec (tree op, gimple *stmt)
1973 {
1974 int_range_max r;
1975 wide_int w;
1976 tree type = TREE_TYPE (op);
1977 unsigned int prec = TYPE_PRECISION (type);
1978
1979 if (!optimize
1980 || !get_range_query (cfun)->range_of_expr (r, op, stmt)
1981 || r.undefined_p ())
1982 {
1983 if (TYPE_UNSIGNED (type))
1984 return prec;
1985 else
1986 return MIN ((int) -prec, -2);
1987 }
1988
1989 if (!TYPE_UNSIGNED (TREE_TYPE (op)))
1990 {
1991 w = r.lower_bound ();
1992 if (wi::neg_p (w))
1993 {
1994 int min_prec1 = wi::min_precision (w, SIGNED);
1995 w = r.upper_bound ();
1996 int min_prec2 = wi::min_precision (w, SIGNED);
1997 int min_prec = MAX (min_prec1, min_prec2);
1998 return MIN (-min_prec, -2);
1999 }
2000 }
2001
2002 w = r.upper_bound ();
2003 int min_prec = wi::min_precision (w, UNSIGNED);
2004 return MAX (min_prec, 1);
2005 }
2006
2007 /* Return address of the first limb of OP and write into *PREC
2008 its precision. If positive, the operand is zero extended
2009 from that precision, if it is negative, the operand is sign-extended
2010 from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2011 otherwise *PREC_STORED is prec from the innermost call without
2012 range optimizations. */
2013
2014 tree
2015 bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
2016 int *prec_stored, int *prec)
2017 {
2018 wide_int w;
2019 location_t loc_save = m_loc;
2020 if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2021 || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2022 && TREE_CODE (op) != INTEGER_CST)
2023 {
2024 do_int:
2025 *prec = range_to_prec (op, stmt);
2026 bitint_prec_kind kind = bitint_prec_small;
2027 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2028 if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2029 kind = bitint_precision_kind (TREE_TYPE (op));
2030 if (kind == bitint_prec_middle)
2031 {
2032 tree type = NULL_TREE;
2033 op = maybe_cast_middle_bitint (&m_gsi, op, type);
2034 }
2035 tree op_type = TREE_TYPE (op);
2036 unsigned HOST_WIDE_INT nelts
2037 = CEIL (TYPE_PRECISION (op_type), limb_prec);
2038 /* Add support for 3 or more limbs filled in from normal
2039 integral type if this assert fails. If no target chooses
2040 limb mode smaller than half of largest supported normal
2041 integral type, this will not be needed. */
2042 gcc_assert (nelts <= 2);
2043 if (prec_stored)
2044 *prec_stored = (TYPE_UNSIGNED (op_type)
2045 ? TYPE_PRECISION (op_type)
2046 : -TYPE_PRECISION (op_type));
2047 if (*prec <= limb_prec && *prec >= -limb_prec)
2048 {
2049 nelts = 1;
2050 if (prec_stored)
2051 {
2052 if (TYPE_UNSIGNED (op_type))
2053 {
2054 if (*prec_stored > limb_prec)
2055 *prec_stored = limb_prec;
2056 }
2057 else if (*prec_stored < -limb_prec)
2058 *prec_stored = -limb_prec;
2059 }
2060 }
2061 tree atype = build_array_type_nelts (m_limb_type, nelts);
2062 tree var = create_tmp_var (atype);
2063 tree t1 = op;
2064 if (!useless_type_conversion_p (m_limb_type, op_type))
2065 t1 = add_cast (m_limb_type, t1);
2066 tree v = build4 (ARRAY_REF, m_limb_type, var, size_zero_node,
2067 NULL_TREE, NULL_TREE);
2068 gimple *g = gimple_build_assign (v, t1);
2069 insert_before (g);
2070 if (nelts > 1)
2071 {
2072 tree lp = build_int_cst (unsigned_type_node, limb_prec);
2073 g = gimple_build_assign (make_ssa_name (op_type),
2074 RSHIFT_EXPR, op, lp);
2075 insert_before (g);
2076 tree t2 = gimple_assign_lhs (g);
2077 t2 = add_cast (m_limb_type, t2);
2078 v = build4 (ARRAY_REF, m_limb_type, var, size_one_node,
2079 NULL_TREE, NULL_TREE);
2080 g = gimple_build_assign (v, t2);
2081 insert_before (g);
2082 }
2083 tree ret = build_fold_addr_expr (var);
2084 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2085 {
2086 tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2087 g = gimple_build_assign (var, clobber);
2088 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2089 }
2090 m_loc = loc_save;
2091 return ret;
2092 }
2093 switch (TREE_CODE (op))
2094 {
2095 case SSA_NAME:
2096 if (m_names == NULL
2097 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2098 {
2099 gimple *g = SSA_NAME_DEF_STMT (op);
2100 tree ret;
2101 m_loc = gimple_location (g);
2102 if (gimple_assign_load_p (g))
2103 {
2104 *prec = range_to_prec (op, NULL);
2105 if (prec_stored)
2106 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2107 ? TYPE_PRECISION (TREE_TYPE (op))
2108 : -TYPE_PRECISION (TREE_TYPE (op)));
2109 ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2110 ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2111 NULL_TREE, true, GSI_SAME_STMT);
2112 }
2113 else if (gimple_code (g) == GIMPLE_NOP)
2114 {
2115 *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2116 if (prec_stored)
2117 *prec_stored = *prec;
2118 tree var = create_tmp_var (m_limb_type);
2119 TREE_ADDRESSABLE (var) = 1;
2120 ret = build_fold_addr_expr (var);
2121 if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2122 {
2123 tree clobber = build_clobber (m_limb_type,
2124 CLOBBER_STORAGE_END);
2125 g = gimple_build_assign (var, clobber);
2126 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2127 }
2128 }
2129 else
2130 {
2131 gcc_assert (gimple_assign_cast_p (g));
2132 tree rhs1 = gimple_assign_rhs1 (g);
2133 bitint_prec_kind kind = bitint_prec_small;
2134 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2135 if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2136 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2137 if (kind >= bitint_prec_large)
2138 {
2139 tree lhs_type = TREE_TYPE (op);
2140 tree rhs_type = TREE_TYPE (rhs1);
2141 int prec_stored_val = 0;
2142 ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2143 if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2144 {
2145 if (TYPE_UNSIGNED (lhs_type)
2146 && !TYPE_UNSIGNED (rhs_type))
2147 gcc_assert (*prec >= 0 || prec_stored == NULL);
2148 }
2149 else
2150 {
2151 if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2152 ;
2153 else if (TYPE_UNSIGNED (lhs_type))
2154 {
2155 gcc_assert (*prec > 0
2156 || prec_stored_val > 0
2157 || (-prec_stored_val
2158 >= TYPE_PRECISION (lhs_type)));
2159 *prec = TYPE_PRECISION (lhs_type);
2160 }
2161 else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2162 ;
2163 else
2164 *prec = -TYPE_PRECISION (lhs_type);
2165 }
2166 }
2167 else
2168 {
2169 op = rhs1;
2170 stmt = g;
2171 goto do_int;
2172 }
2173 }
2174 m_loc = loc_save;
2175 return ret;
2176 }
2177 else
2178 {
2179 int p = var_to_partition (m_map, op);
2180 gcc_assert (m_vars[p] != NULL_TREE);
2181 *prec = range_to_prec (op, stmt);
2182 if (prec_stored)
2183 *prec_stored = (TYPE_UNSIGNED (TREE_TYPE (op))
2184 ? TYPE_PRECISION (TREE_TYPE (op))
2185 : -TYPE_PRECISION (TREE_TYPE (op)));
2186 return build_fold_addr_expr (m_vars[p]);
2187 }
2188 case INTEGER_CST:
2189 unsigned int min_prec, mp;
2190 tree type;
2191 w = wi::to_wide (op);
2192 if (tree_int_cst_sgn (op) >= 0)
2193 {
2194 min_prec = wi::min_precision (w, UNSIGNED);
2195 *prec = MAX (min_prec, 1);
2196 }
2197 else
2198 {
2199 min_prec = wi::min_precision (w, SIGNED);
2200 *prec = MIN ((int) -min_prec, -2);
2201 }
2202 mp = CEIL (min_prec, limb_prec) * limb_prec;
2203 if (mp == 0)
2204 mp = 1;
2205 if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op)))
2206 type = TREE_TYPE (op);
2207 else
2208 type = build_bitint_type (mp, 1);
2209 if (TREE_CODE (type) != BITINT_TYPE
2210 || bitint_precision_kind (type) == bitint_prec_small)
2211 {
2212 if (TYPE_PRECISION (type) <= limb_prec)
2213 type = m_limb_type;
2214 else
2215 /* This case is for targets which e.g. have 64-bit
2216 limb but categorize up to 128-bits _BitInts as
2217 small. We could use type of m_limb_type[2] and
2218 similar instead to save space. */
2219 type = build_bitint_type (mid_min_prec, 1);
2220 }
2221 if (prec_stored)
2222 {
2223 if (tree_int_cst_sgn (op) >= 0)
2224 *prec_stored = MAX (TYPE_PRECISION (type), 1);
2225 else
2226 *prec_stored = MIN ((int) -TYPE_PRECISION (type), -2);
2227 }
2228 op = tree_output_constant_def (fold_convert (type, op));
2229 return build_fold_addr_expr (op);
2230 default:
2231 gcc_unreachable ();
2232 }
2233 }
2234
2235 /* Helper function, create a loop before the current location,
2236 start with sizetype INIT value from the preheader edge. Return
2237 a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2238 from the latch edge. */
2239
2240 tree
2241 bitint_large_huge::create_loop (tree init, tree *idx_next)
2242 {
2243 if (!gsi_end_p (m_gsi))
2244 gsi_prev (&m_gsi);
2245 else
2246 m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2247 edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2248 edge e2 = split_block (e1->dest, (gimple *) NULL);
2249 edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2250 e3->probability = profile_probability::very_unlikely ();
2251 e2->flags = EDGE_FALSE_VALUE;
2252 e2->probability = e3->probability.invert ();
2253 tree idx = make_ssa_name (sizetype);
2254 gphi *phi = create_phi_node (idx, e1->dest);
2255 add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2256 *idx_next = make_ssa_name (sizetype);
2257 add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2258 m_gsi = gsi_after_labels (e1->dest);
2259 m_bb = e1->dest;
2260 m_preheader_bb = e1->src;
2261 class loop *loop = alloc_loop ();
2262 loop->header = e1->dest;
2263 add_loop (loop, e1->src->loop_father);
2264 return idx;
2265 }
2266
2267 /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2268 lowered using iteration from the least significant limb up to the most
2269 significant limb. For large _BitInt it is emitted as straight line code
2270 before current location, for huge _BitInt as a loop handling two limbs
2271 at once, followed by handling up to limbs in straight line code (at most
2272 one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2273 comparisons, in that case CMP_CODE should be the comparison code and
2274 CMP_OP1/CMP_OP2 the comparison operands. */
2275
2276 tree
2277 bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2278 tree cmp_op1, tree cmp_op2)
2279 {
2280 bool eq_p = cmp_code != ERROR_MARK;
2281 tree type;
2282 if (eq_p)
2283 type = TREE_TYPE (cmp_op1);
2284 else
2285 type = TREE_TYPE (gimple_assign_lhs (stmt));
2286 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2287 bitint_prec_kind kind = bitint_precision_kind (type);
2288 gcc_assert (kind >= bitint_prec_large);
2289 gimple *g;
2290 tree lhs = gimple_get_lhs (stmt);
2291 tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2292 if (lhs
2293 && TREE_CODE (lhs) == SSA_NAME
2294 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2295 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2296 {
2297 int p = var_to_partition (m_map, lhs);
2298 gcc_assert (m_vars[p] != NULL_TREE);
2299 m_lhs = lhs = m_vars[p];
2300 }
2301 unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2302 bool sext = false;
2303 tree ext = NULL_TREE, store_operand = NULL_TREE;
2304 bool eh = false;
2305 basic_block eh_pad = NULL;
2306 tree nlhs = NULL_TREE;
2307 unsigned HOST_WIDE_INT bo_idx = 0;
2308 unsigned HOST_WIDE_INT bo_bit = 0;
2309 tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2310 if (gimple_store_p (stmt))
2311 {
2312 store_operand = gimple_assign_rhs1 (stmt);
2313 eh = stmt_ends_bb_p (stmt);
2314 if (eh)
2315 {
2316 edge e;
2317 edge_iterator ei;
2318 basic_block bb = gimple_bb (stmt);
2319
2320 FOR_EACH_EDGE (e, ei, bb->succs)
2321 if (e->flags & EDGE_EH)
2322 {
2323 eh_pad = e->dest;
2324 break;
2325 }
2326 }
2327 if (TREE_CODE (lhs) == COMPONENT_REF
2328 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2329 {
2330 tree fld = TREE_OPERAND (lhs, 1);
2331 gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2332 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2333 poly_int64 bitoffset;
2334 poly_uint64 field_offset, repr_offset;
2335 if ((tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2336 nlhs = lhs;
2337 else
2338 {
2339 bool var_field_off = false;
2340 if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2341 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2342 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2343 else
2344 {
2345 bitoffset = 0;
2346 var_field_off = true;
2347 }
2348 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2349 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2350 nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2351 TREE_OPERAND (lhs, 0), repr,
2352 var_field_off
2353 ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2354 HOST_WIDE_INT bo = bitoffset.to_constant ();
2355 bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2356 bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2357 }
2358 }
2359 }
2360 if ((store_operand
2361 && TREE_CODE (store_operand) == SSA_NAME
2362 && (m_names == NULL
2363 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2364 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2365 || gimple_assign_cast_p (stmt))
2366 {
2367 rhs1 = gimple_assign_rhs1 (store_operand
2368 ? SSA_NAME_DEF_STMT (store_operand)
2369 : stmt);
2370 /* Optimize mergeable ops ending with widening cast to _BitInt
2371 (or followed by store). We can lower just the limbs of the
2372 cast operand and widen afterwards. */
2373 if (TREE_CODE (rhs1) == SSA_NAME
2374 && (m_names == NULL
2375 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2376 && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2377 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2378 && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2379 limb_prec) < CEIL (prec, limb_prec)
2380 || (kind == bitint_prec_huge
2381 && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2382 {
2383 store_operand = rhs1;
2384 prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2385 kind = bitint_precision_kind (TREE_TYPE (rhs1));
2386 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2387 sext = true;
2388 }
2389 }
2390 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2391 if (kind == bitint_prec_large)
2392 cnt = CEIL (prec, limb_prec);
2393 else
2394 {
2395 rem = (prec % (2 * limb_prec));
2396 end = (prec - rem) / limb_prec;
2397 cnt = 2 + CEIL (rem, limb_prec);
2398 idx = idx_first = create_loop (size_zero_node, &idx_next);
2399 }
2400
2401 basic_block edge_bb = NULL;
2402 if (eq_p)
2403 {
2404 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2405 gsi_prev (&gsi);
2406 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2407 edge_bb = e->src;
2408 if (kind == bitint_prec_large)
2409 m_gsi = gsi_end_bb (edge_bb);
2410 }
2411 else
2412 m_after_stmt = stmt;
2413 if (kind != bitint_prec_large)
2414 m_upwards_2limb = end;
2415 m_upwards = true;
2416
2417 bool separate_ext
2418 = (prec != (unsigned) TYPE_PRECISION (type)
2419 && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2420 > CEIL (prec, limb_prec)));
2421
2422 for (unsigned i = 0; i < cnt; i++)
2423 {
2424 m_data_cnt = 0;
2425 if (kind == bitint_prec_large)
2426 idx = size_int (i);
2427 else if (i >= 2)
2428 idx = size_int (end + (i > 2));
2429 if (eq_p)
2430 {
2431 rhs1 = handle_operand (cmp_op1, idx);
2432 tree rhs2 = handle_operand (cmp_op2, idx);
2433 g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2434 insert_before (g);
2435 edge e1 = split_block (gsi_bb (m_gsi), g);
2436 e1->flags = EDGE_FALSE_VALUE;
2437 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2438 e1->probability = profile_probability::unlikely ();
2439 e2->probability = e1->probability.invert ();
2440 if (i == 0)
2441 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2442 m_gsi = gsi_after_labels (e1->dest);
2443 }
2444 else
2445 {
2446 if (store_operand)
2447 rhs1 = handle_operand (store_operand, idx);
2448 else
2449 rhs1 = handle_stmt (stmt, idx);
2450 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2451 rhs1 = add_cast (m_limb_type, rhs1);
2452 if (sext && i == cnt - 1)
2453 ext = rhs1;
2454 tree nidx = idx;
2455 if (bo_idx)
2456 {
2457 if (tree_fits_uhwi_p (idx))
2458 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2459 else
2460 {
2461 nidx = make_ssa_name (sizetype);
2462 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2463 size_int (bo_idx));
2464 insert_before (g);
2465 }
2466 }
2467 bool done = false;
2468 basic_block new_bb = NULL;
2469 /* Handle stores into bit-fields. */
2470 if (bo_bit)
2471 {
2472 if (i == 0)
2473 {
2474 edge e2 = NULL;
2475 if (kind != bitint_prec_large)
2476 {
2477 prepare_data_in_out (build_zero_cst (m_limb_type),
2478 idx, &bf_next);
2479 bf_next = m_data.pop ();
2480 bf_cur = m_data.pop ();
2481 g = gimple_build_cond (EQ_EXPR, idx, size_zero_node,
2482 NULL_TREE, NULL_TREE);
2483 edge edge_true;
2484 if_then_else (g, profile_probability::unlikely (),
2485 edge_true, e2);
2486 new_bb = e2->dest;
2487 }
2488 tree ftype
2489 = build_nonstandard_integer_type (limb_prec - bo_bit, 1);
2490 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2491 bitsize_int (limb_prec - bo_bit),
2492 bitsize_int (bo_idx * limb_prec + bo_bit));
2493 tree t = add_cast (ftype, rhs1);
2494 g = gimple_build_assign (bfr, t);
2495 insert_before (g);
2496 if (eh)
2497 {
2498 maybe_duplicate_eh_stmt (g, stmt);
2499 if (eh_pad)
2500 {
2501 edge e = split_block (gsi_bb (m_gsi), g);
2502 m_gsi = gsi_after_labels (e->dest);
2503 make_edge (e->src, eh_pad, EDGE_EH)->probability
2504 = profile_probability::very_unlikely ();
2505 }
2506 }
2507 if (kind == bitint_prec_large)
2508 {
2509 bf_cur = rhs1;
2510 done = true;
2511 }
2512 else if (e2)
2513 m_gsi = gsi_after_labels (e2->src);
2514 }
2515 if (!done)
2516 {
2517 tree t1 = make_ssa_name (m_limb_type);
2518 tree t2 = make_ssa_name (m_limb_type);
2519 tree t3 = make_ssa_name (m_limb_type);
2520 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2521 build_int_cst (unsigned_type_node,
2522 limb_prec - bo_bit));
2523 insert_before (g);
2524 g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2525 build_int_cst (unsigned_type_node,
2526 bo_bit));
2527 insert_before (g);
2528 bf_cur = rhs1;
2529 g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2530 insert_before (g);
2531 rhs1 = t3;
2532 if (bf_next && i == 1)
2533 {
2534 g = gimple_build_assign (bf_next, bf_cur);
2535 insert_before (g);
2536 }
2537 }
2538 }
2539 if (!done)
2540 {
2541 /* Handle bit-field access to partial last limb if needed. */
2542 if (nlhs
2543 && i == cnt - 1
2544 && !separate_ext
2545 && tree_fits_uhwi_p (idx))
2546 {
2547 unsigned int tprec = TYPE_PRECISION (type);
2548 unsigned int rprec = tprec % limb_prec;
2549 if (rprec + bo_bit < (unsigned) limb_prec)
2550 {
2551 tree ftype
2552 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2553 tree bfr = build3 (BIT_FIELD_REF, ftype,
2554 unshare_expr (nlhs),
2555 bitsize_int (rprec + bo_bit),
2556 bitsize_int ((bo_idx
2557 + tprec / limb_prec)
2558 * limb_prec));
2559 tree t = add_cast (ftype, rhs1);
2560 g = gimple_build_assign (bfr, t);
2561 done = true;
2562 bf_cur = NULL_TREE;
2563 }
2564 else if (rprec + bo_bit == (unsigned) limb_prec)
2565 bf_cur = NULL_TREE;
2566 }
2567 /* Otherwise, stores to any other lhs. */
2568 if (!done)
2569 {
2570 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs,
2571 nidx, true);
2572 g = gimple_build_assign (l, rhs1);
2573 }
2574 insert_before (g);
2575 if (eh)
2576 {
2577 maybe_duplicate_eh_stmt (g, stmt);
2578 if (eh_pad)
2579 {
2580 edge e = split_block (gsi_bb (m_gsi), g);
2581 m_gsi = gsi_after_labels (e->dest);
2582 make_edge (e->src, eh_pad, EDGE_EH)->probability
2583 = profile_probability::very_unlikely ();
2584 }
2585 }
2586 if (new_bb)
2587 m_gsi = gsi_after_labels (new_bb);
2588 }
2589 }
2590 m_first = false;
2591 if (kind == bitint_prec_huge && i <= 1)
2592 {
2593 if (i == 0)
2594 {
2595 idx = make_ssa_name (sizetype);
2596 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
2597 size_one_node);
2598 insert_before (g);
2599 }
2600 else
2601 {
2602 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
2603 size_int (2));
2604 insert_before (g);
2605 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2606 NULL_TREE, NULL_TREE);
2607 insert_before (g);
2608 if (eq_p)
2609 m_gsi = gsi_after_labels (edge_bb);
2610 else
2611 m_gsi = gsi_for_stmt (stmt);
2612 m_bb = NULL;
2613 }
2614 }
2615 }
2616
2617 if (separate_ext)
2618 {
2619 if (sext)
2620 {
2621 ext = add_cast (signed_type_for (m_limb_type), ext);
2622 tree lpm1 = build_int_cst (unsigned_type_node,
2623 limb_prec - 1);
2624 tree n = make_ssa_name (TREE_TYPE (ext));
2625 g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
2626 insert_before (g);
2627 ext = add_cast (m_limb_type, n);
2628 }
2629 else
2630 ext = build_zero_cst (m_limb_type);
2631 kind = bitint_precision_kind (type);
2632 unsigned start = CEIL (prec, limb_prec);
2633 prec = TYPE_PRECISION (type);
2634 idx = idx_first = idx_next = NULL_TREE;
2635 if (prec <= (start + 2 + (bo_bit != 0)) * limb_prec)
2636 kind = bitint_prec_large;
2637 if (kind == bitint_prec_large)
2638 cnt = CEIL (prec, limb_prec) - start;
2639 else
2640 {
2641 rem = prec % limb_prec;
2642 end = (prec - rem) / limb_prec;
2643 cnt = (bo_bit != 0) + 1 + (rem != 0);
2644 }
2645 for (unsigned i = 0; i < cnt; i++)
2646 {
2647 if (kind == bitint_prec_large || (i == 0 && bo_bit != 0))
2648 idx = size_int (start + i);
2649 else if (i == cnt - 1 && (rem != 0))
2650 idx = size_int (end);
2651 else if (i == (bo_bit != 0))
2652 idx = create_loop (size_int (start + i), &idx_next);
2653 rhs1 = ext;
2654 if (bf_cur != NULL_TREE && bf_cur != ext)
2655 {
2656 tree t1 = make_ssa_name (m_limb_type);
2657 g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2658 build_int_cst (unsigned_type_node,
2659 limb_prec - bo_bit));
2660 insert_before (g);
2661 if (integer_zerop (ext))
2662 rhs1 = t1;
2663 else
2664 {
2665 tree t2 = make_ssa_name (m_limb_type);
2666 rhs1 = make_ssa_name (m_limb_type);
2667 g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
2668 build_int_cst (unsigned_type_node,
2669 bo_bit));
2670 insert_before (g);
2671 g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
2672 insert_before (g);
2673 }
2674 bf_cur = ext;
2675 }
2676 tree nidx = idx;
2677 if (bo_idx)
2678 {
2679 if (tree_fits_uhwi_p (idx))
2680 nidx = size_int (tree_to_uhwi (idx) + bo_idx);
2681 else
2682 {
2683 nidx = make_ssa_name (sizetype);
2684 g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2685 size_int (bo_idx));
2686 insert_before (g);
2687 }
2688 }
2689 bool done = false;
2690 /* Handle bit-field access to partial last limb if needed. */
2691 if (nlhs && i == cnt - 1)
2692 {
2693 unsigned int tprec = TYPE_PRECISION (type);
2694 unsigned int rprec = tprec % limb_prec;
2695 if (rprec + bo_bit < (unsigned) limb_prec)
2696 {
2697 tree ftype
2698 = build_nonstandard_integer_type (rprec + bo_bit, 1);
2699 tree bfr = build3 (BIT_FIELD_REF, ftype,
2700 unshare_expr (nlhs),
2701 bitsize_int (rprec + bo_bit),
2702 bitsize_int ((bo_idx + tprec / limb_prec)
2703 * limb_prec));
2704 tree t = add_cast (ftype, rhs1);
2705 g = gimple_build_assign (bfr, t);
2706 done = true;
2707 bf_cur = NULL_TREE;
2708 }
2709 else if (rprec + bo_bit == (unsigned) limb_prec)
2710 bf_cur = NULL_TREE;
2711 }
2712 /* Otherwise, stores to any other lhs. */
2713 if (!done)
2714 {
2715 tree l = limb_access (lhs_type, nlhs ? nlhs : lhs, nidx, true);
2716 g = gimple_build_assign (l, rhs1);
2717 }
2718 insert_before (g);
2719 if (eh)
2720 {
2721 maybe_duplicate_eh_stmt (g, stmt);
2722 if (eh_pad)
2723 {
2724 edge e = split_block (gsi_bb (m_gsi), g);
2725 m_gsi = gsi_after_labels (e->dest);
2726 make_edge (e->src, eh_pad, EDGE_EH)->probability
2727 = profile_probability::very_unlikely ();
2728 }
2729 }
2730 if (kind == bitint_prec_huge && i == (bo_bit != 0))
2731 {
2732 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
2733 size_one_node);
2734 insert_before (g);
2735 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
2736 NULL_TREE, NULL_TREE);
2737 insert_before (g);
2738 m_gsi = gsi_for_stmt (stmt);
2739 m_bb = NULL;
2740 }
2741 }
2742 }
2743 if (bf_cur != NULL_TREE)
2744 {
2745 unsigned int tprec = TYPE_PRECISION (type);
2746 unsigned int rprec = tprec % limb_prec;
2747 tree ftype = build_nonstandard_integer_type (rprec + bo_bit, 1);
2748 tree bfr = build3 (BIT_FIELD_REF, ftype, unshare_expr (nlhs),
2749 bitsize_int (rprec + bo_bit),
2750 bitsize_int ((bo_idx + tprec / limb_prec)
2751 * limb_prec));
2752 rhs1 = bf_cur;
2753 if (bf_cur != ext)
2754 {
2755 rhs1 = make_ssa_name (TREE_TYPE (rhs1));
2756 g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
2757 build_int_cst (unsigned_type_node,
2758 limb_prec - bo_bit));
2759 insert_before (g);
2760 }
2761 rhs1 = add_cast (ftype, rhs1);
2762 g = gimple_build_assign (bfr, rhs1);
2763 insert_before (g);
2764 if (eh)
2765 {
2766 maybe_duplicate_eh_stmt (g, stmt);
2767 if (eh_pad)
2768 {
2769 edge e = split_block (gsi_bb (m_gsi), g);
2770 m_gsi = gsi_after_labels (e->dest);
2771 make_edge (e->src, eh_pad, EDGE_EH)->probability
2772 = profile_probability::very_unlikely ();
2773 }
2774 }
2775 }
2776
2777 if (gimple_store_p (stmt))
2778 {
2779 unlink_stmt_vdef (stmt);
2780 release_ssa_name (gimple_vdef (stmt));
2781 gsi_remove (&m_gsi, true);
2782 }
2783 if (eq_p)
2784 {
2785 lhs = make_ssa_name (boolean_type_node);
2786 basic_block bb = gimple_bb (stmt);
2787 gphi *phi = create_phi_node (lhs, bb);
2788 edge e = find_edge (gsi_bb (m_gsi), bb);
2789 unsigned int n = EDGE_COUNT (bb->preds);
2790 for (unsigned int i = 0; i < n; i++)
2791 {
2792 edge e2 = EDGE_PRED (bb, i);
2793 add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
2794 e2, UNKNOWN_LOCATION);
2795 }
2796 cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2797 return lhs;
2798 }
2799 else
2800 return NULL_TREE;
2801 }
2802
2803 /* Handle a large/huge _BitInt comparison statement STMT other than
2804 EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
2805 lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
2806 lowered by iteration from the most significant limb downwards to
2807 the least significant one, for large _BitInt in straight line code,
2808 otherwise with most significant limb handled in
2809 straight line code followed by a loop handling one limb at a time.
2810 Comparisons with unsigned huge _BitInt with precisions which are
2811 multiples of limb precision can use just the loop and don't need to
2812 handle most significant limb before the loop. The loop or straight
2813 line code jumps to final basic block if a particular pair of limbs
2814 is not equal. */
2815
2816 tree
2817 bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
2818 tree cmp_op1, tree cmp_op2)
2819 {
2820 tree type = TREE_TYPE (cmp_op1);
2821 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2822 bitint_prec_kind kind = bitint_precision_kind (type);
2823 gcc_assert (kind >= bitint_prec_large);
2824 gimple *g;
2825 if (!TYPE_UNSIGNED (type)
2826 && integer_zerop (cmp_op2)
2827 && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
2828 {
2829 unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
2830 tree idx = size_int (end);
2831 m_data_cnt = 0;
2832 tree rhs1 = handle_operand (cmp_op1, idx);
2833 if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2834 {
2835 tree stype = signed_type_for (TREE_TYPE (rhs1));
2836 rhs1 = add_cast (stype, rhs1);
2837 }
2838 tree lhs = make_ssa_name (boolean_type_node);
2839 g = gimple_build_assign (lhs, cmp_code, rhs1,
2840 build_zero_cst (TREE_TYPE (rhs1)));
2841 insert_before (g);
2842 cmp_code = NE_EXPR;
2843 return lhs;
2844 }
2845
2846 unsigned cnt, rem = 0, end = 0;
2847 tree idx = NULL_TREE, idx_next = NULL_TREE;
2848 if (kind == bitint_prec_large)
2849 cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
2850 else
2851 {
2852 rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
2853 if (rem == 0 && !TYPE_UNSIGNED (type))
2854 rem = limb_prec;
2855 end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
2856 cnt = 1 + (rem != 0);
2857 }
2858
2859 basic_block edge_bb = NULL;
2860 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2861 gsi_prev (&gsi);
2862 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2863 edge_bb = e->src;
2864 m_gsi = gsi_end_bb (edge_bb);
2865
2866 edge *edges = XALLOCAVEC (edge, cnt * 2);
2867 for (unsigned i = 0; i < cnt; i++)
2868 {
2869 m_data_cnt = 0;
2870 if (kind == bitint_prec_large)
2871 idx = size_int (cnt - i - 1);
2872 else if (i == cnt - 1)
2873 idx = create_loop (size_int (end - 1), &idx_next);
2874 else
2875 idx = size_int (end);
2876 tree rhs1 = handle_operand (cmp_op1, idx);
2877 tree rhs2 = handle_operand (cmp_op2, idx);
2878 if (i == 0
2879 && !TYPE_UNSIGNED (type)
2880 && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2881 {
2882 tree stype = signed_type_for (TREE_TYPE (rhs1));
2883 rhs1 = add_cast (stype, rhs1);
2884 rhs2 = add_cast (stype, rhs2);
2885 }
2886 g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2887 insert_before (g);
2888 edge e1 = split_block (gsi_bb (m_gsi), g);
2889 e1->flags = EDGE_FALSE_VALUE;
2890 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2891 e1->probability = profile_probability::likely ();
2892 e2->probability = e1->probability.invert ();
2893 if (i == 0)
2894 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2895 m_gsi = gsi_after_labels (e1->dest);
2896 edges[2 * i] = e2;
2897 g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2898 insert_before (g);
2899 e1 = split_block (gsi_bb (m_gsi), g);
2900 e1->flags = EDGE_FALSE_VALUE;
2901 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2902 e1->probability = profile_probability::unlikely ();
2903 e2->probability = e1->probability.invert ();
2904 m_gsi = gsi_after_labels (e1->dest);
2905 edges[2 * i + 1] = e2;
2906 m_first = false;
2907 if (kind == bitint_prec_huge && i == cnt - 1)
2908 {
2909 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
2910 insert_before (g);
2911 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
2912 NULL_TREE, NULL_TREE);
2913 insert_before (g);
2914 edge true_edge, false_edge;
2915 extract_true_false_edges_from_block (gsi_bb (m_gsi),
2916 &true_edge, &false_edge);
2917 m_gsi = gsi_after_labels (false_edge->dest);
2918 m_bb = NULL;
2919 }
2920 }
2921
2922 tree lhs = make_ssa_name (boolean_type_node);
2923 basic_block bb = gimple_bb (stmt);
2924 gphi *phi = create_phi_node (lhs, bb);
2925 for (unsigned int i = 0; i < cnt * 2; i++)
2926 {
2927 tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
2928 ^ (i & 1)) ? boolean_true_node : boolean_false_node;
2929 add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
2930 }
2931 add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
2932 ? boolean_true_node : boolean_false_node,
2933 find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
2934 cmp_code = NE_EXPR;
2935 return lhs;
2936 }
2937
2938 /* Lower large/huge _BitInt left and right shift except for left
2939 shift by < limb_prec constant. */
2940
2941 void
2942 bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
2943 {
2944 tree rhs1 = gimple_assign_rhs1 (stmt);
2945 tree lhs = gimple_assign_lhs (stmt);
2946 tree_code rhs_code = gimple_assign_rhs_code (stmt);
2947 tree type = TREE_TYPE (rhs1);
2948 gimple *final_stmt = gsi_stmt (m_gsi);
2949 gcc_assert (TREE_CODE (type) == BITINT_TYPE
2950 && bitint_precision_kind (type) >= bitint_prec_large);
2951 int prec = TYPE_PRECISION (type);
2952 tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
2953 gimple *g;
2954 if (obj == NULL_TREE)
2955 {
2956 int part = var_to_partition (m_map, lhs);
2957 gcc_assert (m_vars[part] != NULL_TREE);
2958 obj = m_vars[part];
2959 }
2960 /* Preparation code common for both left and right shifts.
2961 unsigned n1 = n % limb_prec;
2962 size_t n2 = n / limb_prec;
2963 size_t n3 = n1 != 0;
2964 unsigned n4 = (limb_prec - n1) % limb_prec;
2965 (for power of 2 limb_prec n4 can be -n1 & (limb_prec)). */
2966 if (TREE_CODE (n) == INTEGER_CST)
2967 {
2968 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
2969 n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
2970 n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
2971 n3 = size_int (!integer_zerop (n1));
2972 n4 = int_const_binop (TRUNC_MOD_EXPR,
2973 int_const_binop (MINUS_EXPR, lp, n1), lp);
2974 }
2975 else
2976 {
2977 n1 = make_ssa_name (TREE_TYPE (n));
2978 n2 = make_ssa_name (sizetype);
2979 n3 = make_ssa_name (sizetype);
2980 n4 = make_ssa_name (TREE_TYPE (n));
2981 if (pow2p_hwi (limb_prec))
2982 {
2983 tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
2984 g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
2985 insert_before (g);
2986 g = gimple_build_assign (useless_type_conversion_p (sizetype,
2987 TREE_TYPE (n))
2988 ? n2 : make_ssa_name (TREE_TYPE (n)),
2989 RSHIFT_EXPR, n,
2990 build_int_cst (TREE_TYPE (n),
2991 exact_log2 (limb_prec)));
2992 insert_before (g);
2993 if (gimple_assign_lhs (g) != n2)
2994 {
2995 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
2996 insert_before (g);
2997 }
2998 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
2999 NEGATE_EXPR, n1);
3000 insert_before (g);
3001 g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
3002 lpm1);
3003 insert_before (g);
3004 }
3005 else
3006 {
3007 tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3008 g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
3009 insert_before (g);
3010 g = gimple_build_assign (useless_type_conversion_p (sizetype,
3011 TREE_TYPE (n))
3012 ? n2 : make_ssa_name (TREE_TYPE (n)),
3013 TRUNC_DIV_EXPR, n, lp);
3014 insert_before (g);
3015 if (gimple_assign_lhs (g) != n2)
3016 {
3017 g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3018 insert_before (g);
3019 }
3020 g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3021 MINUS_EXPR, lp, n1);
3022 insert_before (g);
3023 g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3024 lp);
3025 insert_before (g);
3026 }
3027 g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3028 build_zero_cst (TREE_TYPE (n)));
3029 insert_before (g);
3030 g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3031 insert_before (g);
3032 }
3033 tree p = build_int_cst (sizetype,
3034 prec / limb_prec - (prec % limb_prec == 0));
3035 if (rhs_code == RSHIFT_EXPR)
3036 {
3037 /* Lower
3038 dst = src >> n;
3039 as
3040 unsigned n1 = n % limb_prec;
3041 size_t n2 = n / limb_prec;
3042 size_t n3 = n1 != 0;
3043 unsigned n4 = (limb_prec - n1) % limb_prec;
3044 size_t idx;
3045 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3046 int signed_p = (typeof (src) -1) < 0;
3047 for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3048 ? p : p - n3); ++idx)
3049 dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3050 limb_type ext;
3051 if (prec % limb_prec == 0)
3052 ext = src[p];
3053 else if (signed_p)
3054 ext = ((signed limb_type) (src[p] << (limb_prec
3055 - (prec % limb_prec))))
3056 >> (limb_prec - (prec % limb_prec));
3057 else
3058 ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3059 if (!signed_p && (prec % limb_prec == 0))
3060 ;
3061 else if (idx < prec / 64)
3062 {
3063 dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3064 ++idx;
3065 }
3066 idx -= n2;
3067 if (signed_p)
3068 {
3069 dst[idx] = ((signed limb_type) ext) >> n1;
3070 ext = ((signed limb_type) ext) >> (limb_prec - 1);
3071 }
3072 else
3073 {
3074 dst[idx] = ext >> n1;
3075 ext = 0;
3076 }
3077 for (++idx; idx <= p; ++idx)
3078 dst[idx] = ext; */
3079 tree pmn3;
3080 if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3081 pmn3 = p;
3082 else if (TREE_CODE (n3) == INTEGER_CST)
3083 pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3084 else
3085 {
3086 pmn3 = make_ssa_name (sizetype);
3087 g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3088 insert_before (g);
3089 }
3090 g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3091 edge edge_true, edge_false;
3092 if_then (g, profile_probability::likely (), edge_true, edge_false);
3093 tree idx_next;
3094 tree idx = create_loop (n2, &idx_next);
3095 tree idxmn2 = make_ssa_name (sizetype);
3096 tree idxpn3 = make_ssa_name (sizetype);
3097 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3098 insert_before (g);
3099 g = gimple_build_assign (idxpn3, PLUS_EXPR, idx, n3);
3100 insert_before (g);
3101 m_data_cnt = 0;
3102 tree t1 = handle_operand (rhs1, idx);
3103 m_first = false;
3104 g = gimple_build_assign (make_ssa_name (m_limb_type),
3105 RSHIFT_EXPR, t1, n1);
3106 insert_before (g);
3107 t1 = gimple_assign_lhs (g);
3108 if (!integer_zerop (n3))
3109 {
3110 m_data_cnt = 0;
3111 tree t2 = handle_operand (rhs1, idxpn3);
3112 g = gimple_build_assign (make_ssa_name (m_limb_type),
3113 LSHIFT_EXPR, t2, n4);
3114 insert_before (g);
3115 t2 = gimple_assign_lhs (g);
3116 g = gimple_build_assign (make_ssa_name (m_limb_type),
3117 BIT_IOR_EXPR, t1, t2);
3118 insert_before (g);
3119 t1 = gimple_assign_lhs (g);
3120 }
3121 tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3122 g = gimple_build_assign (l, t1);
3123 insert_before (g);
3124 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3125 insert_before (g);
3126 g = gimple_build_cond (LT_EXPR, idx_next, pmn3, NULL_TREE, NULL_TREE);
3127 insert_before (g);
3128 idx = make_ssa_name (sizetype);
3129 m_gsi = gsi_for_stmt (final_stmt);
3130 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3131 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3132 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3133 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3134 add_phi_arg (phi, n2, edge_false, UNKNOWN_LOCATION);
3135 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3136 m_data_cnt = 0;
3137 tree ms = handle_operand (rhs1, p);
3138 tree ext = ms;
3139 if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3140 ext = add_cast (m_limb_type, ms);
3141 if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3142 && !integer_zerop (n3))
3143 {
3144 g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3145 if_then (g, profile_probability::likely (), edge_true, edge_false);
3146 m_data_cnt = 0;
3147 t1 = handle_operand (rhs1, idx);
3148 g = gimple_build_assign (make_ssa_name (m_limb_type),
3149 RSHIFT_EXPR, t1, n1);
3150 insert_before (g);
3151 t1 = gimple_assign_lhs (g);
3152 g = gimple_build_assign (make_ssa_name (m_limb_type),
3153 LSHIFT_EXPR, ext, n4);
3154 insert_before (g);
3155 tree t2 = gimple_assign_lhs (g);
3156 g = gimple_build_assign (make_ssa_name (m_limb_type),
3157 BIT_IOR_EXPR, t1, t2);
3158 insert_before (g);
3159 t1 = gimple_assign_lhs (g);
3160 idxmn2 = make_ssa_name (sizetype);
3161 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3162 insert_before (g);
3163 l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3164 g = gimple_build_assign (l, t1);
3165 insert_before (g);
3166 idx_next = make_ssa_name (sizetype);
3167 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3168 insert_before (g);
3169 m_gsi = gsi_for_stmt (final_stmt);
3170 tree nidx = make_ssa_name (sizetype);
3171 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3172 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3173 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3174 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3175 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3176 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3177 idx = nidx;
3178 }
3179 g = gimple_build_assign (make_ssa_name (sizetype), MINUS_EXPR, idx, n2);
3180 insert_before (g);
3181 idx = gimple_assign_lhs (g);
3182 tree sext = ext;
3183 if (!TYPE_UNSIGNED (type))
3184 sext = add_cast (signed_type_for (m_limb_type), ext);
3185 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3186 RSHIFT_EXPR, sext, n1);
3187 insert_before (g);
3188 t1 = gimple_assign_lhs (g);
3189 if (!TYPE_UNSIGNED (type))
3190 {
3191 t1 = add_cast (m_limb_type, t1);
3192 g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3193 RSHIFT_EXPR, sext,
3194 build_int_cst (TREE_TYPE (n),
3195 limb_prec - 1));
3196 insert_before (g);
3197 ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3198 }
3199 else
3200 ext = build_zero_cst (m_limb_type);
3201 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3202 g = gimple_build_assign (l, t1);
3203 insert_before (g);
3204 g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3205 size_one_node);
3206 insert_before (g);
3207 idx = gimple_assign_lhs (g);
3208 g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3209 if_then (g, profile_probability::likely (), edge_true, edge_false);
3210 idx = create_loop (idx, &idx_next);
3211 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3212 g = gimple_build_assign (l, ext);
3213 insert_before (g);
3214 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_one_node);
3215 insert_before (g);
3216 g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3217 insert_before (g);
3218 }
3219 else
3220 {
3221 /* Lower
3222 dst = src << n;
3223 as
3224 unsigned n1 = n % limb_prec;
3225 size_t n2 = n / limb_prec;
3226 size_t n3 = n1 != 0;
3227 unsigned n4 = (limb_prec - n1) % limb_prec;
3228 size_t idx;
3229 size_t p = prec / limb_prec - (prec % limb_prec == 0);
3230 for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3231 dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3232 if (n1)
3233 {
3234 dst[idx] = src[idx - n2] << n1;
3235 --idx;
3236 }
3237 for (; (ssize_t) idx >= 0; --idx)
3238 dst[idx] = 0; */
3239 tree n2pn3;
3240 if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3241 n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3242 else
3243 {
3244 n2pn3 = make_ssa_name (sizetype);
3245 g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3246 insert_before (g);
3247 }
3248 /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3249 idx even to access the most significant partial limb. */
3250 m_var_msb = true;
3251 if (integer_zerop (n3))
3252 /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3253 counts. Emit if (true) condition that can be optimized later. */
3254 g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3255 NULL_TREE, NULL_TREE);
3256 else
3257 g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3258 edge edge_true, edge_false;
3259 if_then (g, profile_probability::likely (), edge_true, edge_false);
3260 tree idx_next;
3261 tree idx = create_loop (p, &idx_next);
3262 tree idxmn2 = make_ssa_name (sizetype);
3263 tree idxmn2mn3 = make_ssa_name (sizetype);
3264 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3265 insert_before (g);
3266 g = gimple_build_assign (idxmn2mn3, MINUS_EXPR, idxmn2, n3);
3267 insert_before (g);
3268 m_data_cnt = 0;
3269 tree t1 = handle_operand (rhs1, idxmn2);
3270 m_first = false;
3271 g = gimple_build_assign (make_ssa_name (m_limb_type),
3272 LSHIFT_EXPR, t1, n1);
3273 insert_before (g);
3274 t1 = gimple_assign_lhs (g);
3275 if (!integer_zerop (n3))
3276 {
3277 m_data_cnt = 0;
3278 tree t2 = handle_operand (rhs1, idxmn2mn3);
3279 g = gimple_build_assign (make_ssa_name (m_limb_type),
3280 RSHIFT_EXPR, t2, n4);
3281 insert_before (g);
3282 t2 = gimple_assign_lhs (g);
3283 g = gimple_build_assign (make_ssa_name (m_limb_type),
3284 BIT_IOR_EXPR, t1, t2);
3285 insert_before (g);
3286 t1 = gimple_assign_lhs (g);
3287 }
3288 tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3289 g = gimple_build_assign (l, t1);
3290 insert_before (g);
3291 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3292 insert_before (g);
3293 tree sn2pn3 = add_cast (ssizetype, n2pn3);
3294 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next), sn2pn3,
3295 NULL_TREE, NULL_TREE);
3296 insert_before (g);
3297 idx = make_ssa_name (sizetype);
3298 m_gsi = gsi_for_stmt (final_stmt);
3299 gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3300 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3301 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3302 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3303 add_phi_arg (phi, p, edge_false, UNKNOWN_LOCATION);
3304 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3305 m_data_cnt = 0;
3306 if (!integer_zerop (n3))
3307 {
3308 g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3309 NULL_TREE, NULL_TREE);
3310 if_then (g, profile_probability::likely (), edge_true, edge_false);
3311 idxmn2 = make_ssa_name (sizetype);
3312 g = gimple_build_assign (idxmn2, MINUS_EXPR, idx, n2);
3313 insert_before (g);
3314 m_data_cnt = 0;
3315 t1 = handle_operand (rhs1, idxmn2);
3316 g = gimple_build_assign (make_ssa_name (m_limb_type),
3317 LSHIFT_EXPR, t1, n1);
3318 insert_before (g);
3319 t1 = gimple_assign_lhs (g);
3320 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3321 g = gimple_build_assign (l, t1);
3322 insert_before (g);
3323 idx_next = make_ssa_name (sizetype);
3324 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3325 insert_before (g);
3326 m_gsi = gsi_for_stmt (final_stmt);
3327 tree nidx = make_ssa_name (sizetype);
3328 phi = create_phi_node (nidx, gsi_bb (m_gsi));
3329 edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3330 edge_true = EDGE_PRED (gsi_bb (m_gsi),
3331 EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3332 add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3333 add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3334 idx = nidx;
3335 }
3336 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3337 ssize_int (0), NULL_TREE, NULL_TREE);
3338 if_then (g, profile_probability::likely (), edge_true, edge_false);
3339 idx = create_loop (idx, &idx_next);
3340 l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3341 g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3342 insert_before (g);
3343 g = gimple_build_assign (idx_next, PLUS_EXPR, idx, size_int (-1));
3344 insert_before (g);
3345 g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3346 ssize_int (0), NULL_TREE, NULL_TREE);
3347 insert_before (g);
3348 }
3349 }
3350
3351 /* Lower large/huge _BitInt multiplication or division. */
3352
3353 void
3354 bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3355 {
3356 tree rhs1 = gimple_assign_rhs1 (stmt);
3357 tree rhs2 = gimple_assign_rhs2 (stmt);
3358 tree lhs = gimple_assign_lhs (stmt);
3359 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3360 tree type = TREE_TYPE (rhs1);
3361 gcc_assert (TREE_CODE (type) == BITINT_TYPE
3362 && bitint_precision_kind (type) >= bitint_prec_large);
3363 int prec = TYPE_PRECISION (type), prec1, prec2;
3364 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3365 rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3366 if (obj == NULL_TREE)
3367 {
3368 int part = var_to_partition (m_map, lhs);
3369 gcc_assert (m_vars[part] != NULL_TREE);
3370 obj = m_vars[part];
3371 lhs = build_fold_addr_expr (obj);
3372 }
3373 else
3374 {
3375 lhs = build_fold_addr_expr (obj);
3376 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3377 NULL_TREE, true, GSI_SAME_STMT);
3378 }
3379 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3380 gimple *g;
3381 switch (rhs_code)
3382 {
3383 case MULT_EXPR:
3384 g = gimple_build_call_internal (IFN_MULBITINT, 6,
3385 lhs, build_int_cst (sitype, prec),
3386 rhs1, build_int_cst (sitype, prec1),
3387 rhs2, build_int_cst (sitype, prec2));
3388 insert_before (g);
3389 break;
3390 case TRUNC_DIV_EXPR:
3391 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3392 lhs, build_int_cst (sitype, prec),
3393 null_pointer_node,
3394 build_int_cst (sitype, 0),
3395 rhs1, build_int_cst (sitype, prec1),
3396 rhs2, build_int_cst (sitype, prec2));
3397 if (!stmt_ends_bb_p (stmt))
3398 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3399 insert_before (g);
3400 break;
3401 case TRUNC_MOD_EXPR:
3402 g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
3403 build_int_cst (sitype, 0),
3404 lhs, build_int_cst (sitype, prec),
3405 rhs1, build_int_cst (sitype, prec1),
3406 rhs2, build_int_cst (sitype, prec2));
3407 if (!stmt_ends_bb_p (stmt))
3408 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3409 insert_before (g);
3410 break;
3411 default:
3412 gcc_unreachable ();
3413 }
3414 if (stmt_ends_bb_p (stmt))
3415 {
3416 maybe_duplicate_eh_stmt (g, stmt);
3417 edge e1;
3418 edge_iterator ei;
3419 basic_block bb = gimple_bb (stmt);
3420
3421 FOR_EACH_EDGE (e1, ei, bb->succs)
3422 if (e1->flags & EDGE_EH)
3423 break;
3424 if (e1)
3425 {
3426 edge e2 = split_block (gsi_bb (m_gsi), g);
3427 m_gsi = gsi_after_labels (e2->dest);
3428 make_edge (e2->src, e1->dest, EDGE_EH)->probability
3429 = profile_probability::very_unlikely ();
3430 }
3431 }
3432 }
3433
3434 /* Lower large/huge _BitInt conversion to/from floating point. */
3435
3436 void
3437 bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
3438 {
3439 tree rhs1 = gimple_assign_rhs1 (stmt);
3440 tree lhs = gimple_assign_lhs (stmt);
3441 tree_code rhs_code = gimple_assign_rhs_code (stmt);
3442 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3443 gimple *g;
3444 if (rhs_code == FIX_TRUNC_EXPR)
3445 {
3446 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
3447 if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
3448 prec = -prec;
3449 if (obj == NULL_TREE)
3450 {
3451 int part = var_to_partition (m_map, lhs);
3452 gcc_assert (m_vars[part] != NULL_TREE);
3453 obj = m_vars[part];
3454 lhs = build_fold_addr_expr (obj);
3455 }
3456 else
3457 {
3458 lhs = build_fold_addr_expr (obj);
3459 lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3460 NULL_TREE, true, GSI_SAME_STMT);
3461 }
3462 scalar_mode from_mode
3463 = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
3464 #ifdef HAVE_SFmode
3465 /* IEEE single is a full superset of both IEEE half and
3466 bfloat formats, convert to float first and then to _BitInt
3467 to avoid the need of another 2 library routines. */
3468 if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
3469 || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
3470 && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
3471 {
3472 tree type = lang_hooks.types.type_for_mode (SFmode, 0);
3473 if (type)
3474 rhs1 = add_cast (type, rhs1);
3475 }
3476 #endif
3477 g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
3478 lhs, build_int_cst (sitype, prec),
3479 rhs1);
3480 insert_before (g);
3481 }
3482 else
3483 {
3484 int prec;
3485 rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
3486 g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
3487 rhs1, build_int_cst (sitype, prec));
3488 gimple_call_set_lhs (g, lhs);
3489 if (!stmt_ends_bb_p (stmt))
3490 gimple_call_set_nothrow (as_a <gcall *> (g), true);
3491 gsi_replace (&m_gsi, g, true);
3492 }
3493 }
3494
3495 /* Helper method for lower_addsub_overflow and lower_mul_overflow.
3496 If check_zero is true, caller wants to check if all bits in [start, end)
3497 are zero, otherwise if bits in [start, end) are either all zero or
3498 all ones. L is the limb with index LIMB, START and END are measured
3499 in bits. */
3500
3501 tree
3502 bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
3503 unsigned int end, tree l,
3504 unsigned int limb,
3505 bool check_zero)
3506 {
3507 unsigned startlimb = start / limb_prec;
3508 unsigned endlimb = (end - 1) / limb_prec;
3509 gimple *g;
3510
3511 if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
3512 return l;
3513 if (startlimb == endlimb && limb == startlimb)
3514 {
3515 if (check_zero)
3516 {
3517 wide_int w = wi::shifted_mask (start % limb_prec,
3518 end - start, false, limb_prec);
3519 g = gimple_build_assign (make_ssa_name (m_limb_type),
3520 BIT_AND_EXPR, l,
3521 wide_int_to_tree (m_limb_type, w));
3522 insert_before (g);
3523 return gimple_assign_lhs (g);
3524 }
3525 unsigned int shift = start % limb_prec;
3526 if ((end % limb_prec) != 0)
3527 {
3528 unsigned int lshift = (-end) % limb_prec;
3529 shift += lshift;
3530 g = gimple_build_assign (make_ssa_name (m_limb_type),
3531 LSHIFT_EXPR, l,
3532 build_int_cst (unsigned_type_node,
3533 lshift));
3534 insert_before (g);
3535 l = gimple_assign_lhs (g);
3536 }
3537 l = add_cast (signed_type_for (m_limb_type), l);
3538 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3539 RSHIFT_EXPR, l,
3540 build_int_cst (unsigned_type_node, shift));
3541 insert_before (g);
3542 return add_cast (m_limb_type, gimple_assign_lhs (g));
3543 }
3544 else if (limb == startlimb)
3545 {
3546 if ((start % limb_prec) == 0)
3547 return l;
3548 if (!check_zero)
3549 l = add_cast (signed_type_for (m_limb_type), l);
3550 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3551 RSHIFT_EXPR, l,
3552 build_int_cst (unsigned_type_node,
3553 start % limb_prec));
3554 insert_before (g);
3555 l = gimple_assign_lhs (g);
3556 if (!check_zero)
3557 l = add_cast (m_limb_type, l);
3558 return l;
3559 }
3560 else if (limb == endlimb)
3561 {
3562 if ((end % limb_prec) == 0)
3563 return l;
3564 if (check_zero)
3565 {
3566 wide_int w = wi::mask (end % limb_prec, false, limb_prec);
3567 g = gimple_build_assign (make_ssa_name (m_limb_type),
3568 BIT_AND_EXPR, l,
3569 wide_int_to_tree (m_limb_type, w));
3570 insert_before (g);
3571 return gimple_assign_lhs (g);
3572 }
3573 unsigned int shift = (-end) % limb_prec;
3574 g = gimple_build_assign (make_ssa_name (m_limb_type),
3575 LSHIFT_EXPR, l,
3576 build_int_cst (unsigned_type_node, shift));
3577 insert_before (g);
3578 l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
3579 g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
3580 RSHIFT_EXPR, l,
3581 build_int_cst (unsigned_type_node, shift));
3582 insert_before (g);
3583 return add_cast (m_limb_type, gimple_assign_lhs (g));
3584 }
3585 return l;
3586 }
3587
3588 /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
3589 result including overflow flag into the right locations. */
3590
3591 void
3592 bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
3593 tree ovf, tree lhs, tree orig_obj,
3594 gimple *stmt, tree_code code)
3595 {
3596 gimple *g;
3597
3598 if (obj == NULL_TREE
3599 && (TREE_CODE (type) != BITINT_TYPE
3600 || bitint_precision_kind (type) < bitint_prec_large))
3601 {
3602 /* Add support for 3 or more limbs filled in from normal integral
3603 type if this assert fails. If no target chooses limb mode smaller
3604 than half of largest supported normal integral type, this will not
3605 be needed. */
3606 gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
3607 tree lhs_type = type;
3608 if (TREE_CODE (type) == BITINT_TYPE
3609 && bitint_precision_kind (type) == bitint_prec_middle)
3610 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
3611 TYPE_UNSIGNED (type));
3612 tree r1 = limb_access (NULL_TREE, var, size_int (0), true);
3613 g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
3614 insert_before (g);
3615 r1 = gimple_assign_lhs (g);
3616 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
3617 r1 = add_cast (lhs_type, r1);
3618 if (TYPE_PRECISION (lhs_type) > limb_prec)
3619 {
3620 tree r2 = limb_access (NULL_TREE, var, size_int (1), true);
3621 g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
3622 insert_before (g);
3623 r2 = gimple_assign_lhs (g);
3624 r2 = add_cast (lhs_type, r2);
3625 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
3626 build_int_cst (unsigned_type_node,
3627 limb_prec));
3628 insert_before (g);
3629 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
3630 gimple_assign_lhs (g));
3631 insert_before (g);
3632 r1 = gimple_assign_lhs (g);
3633 }
3634 if (lhs_type != type)
3635 r1 = add_cast (type, r1);
3636 ovf = add_cast (lhs_type, ovf);
3637 if (lhs_type != type)
3638 ovf = add_cast (type, ovf);
3639 g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
3640 m_gsi = gsi_for_stmt (stmt);
3641 gsi_replace (&m_gsi, g, true);
3642 }
3643 else
3644 {
3645 unsigned HOST_WIDE_INT nelts = 0;
3646 tree atype = NULL_TREE;
3647 if (obj)
3648 {
3649 nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
3650 if (orig_obj == NULL_TREE)
3651 nelts >>= 1;
3652 atype = build_array_type_nelts (m_limb_type, nelts);
3653 }
3654 if (var && obj)
3655 {
3656 tree v1, v2;
3657 tree zero;
3658 if (orig_obj == NULL_TREE)
3659 {
3660 zero = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
3661 v1 = build2 (MEM_REF, atype,
3662 build_fold_addr_expr (unshare_expr (obj)), zero);
3663 }
3664 else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
3665 v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
3666 else
3667 v1 = unshare_expr (obj);
3668 zero = build_zero_cst (build_pointer_type (TREE_TYPE (var)));
3669 v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), zero);
3670 g = gimple_build_assign (v1, v2);
3671 insert_before (g);
3672 }
3673 if (orig_obj == NULL_TREE && obj)
3674 {
3675 ovf = add_cast (m_limb_type, ovf);
3676 tree l = limb_access (NULL_TREE, obj, size_int (nelts), true);
3677 g = gimple_build_assign (l, ovf);
3678 insert_before (g);
3679 if (nelts > 1)
3680 {
3681 atype = build_array_type_nelts (m_limb_type, nelts - 1);
3682 tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
3683 (nelts + 1) * m_limb_size);
3684 tree v1 = build2 (MEM_REF, atype,
3685 build_fold_addr_expr (unshare_expr (obj)),
3686 off);
3687 g = gimple_build_assign (v1, build_zero_cst (atype));
3688 insert_before (g);
3689 }
3690 }
3691 else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
3692 {
3693 imm_use_iterator ui;
3694 use_operand_p use_p;
3695 FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
3696 {
3697 g = USE_STMT (use_p);
3698 if (!is_gimple_assign (g)
3699 || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
3700 continue;
3701 tree lhs2 = gimple_assign_lhs (g);
3702 gimple *use_stmt;
3703 single_imm_use (lhs2, &use_p, &use_stmt);
3704 lhs2 = gimple_assign_lhs (use_stmt);
3705 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
3706 if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
3707 g = gimple_build_assign (lhs2, ovf);
3708 else
3709 g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
3710 gsi_replace (&gsi, g, true);
3711 if (gsi_stmt (m_gsi) == use_stmt)
3712 m_gsi = gsi_for_stmt (g);
3713 break;
3714 }
3715 }
3716 else if (ovf != boolean_false_node)
3717 {
3718 g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
3719 NULL_TREE, NULL_TREE);
3720 edge edge_true, edge_false;
3721 if_then (g, profile_probability::very_unlikely (),
3722 edge_true, edge_false);
3723 tree zero = build_zero_cst (TREE_TYPE (lhs));
3724 tree fn = ubsan_build_overflow_builtin (code, m_loc,
3725 TREE_TYPE (lhs),
3726 zero, zero, NULL);
3727 force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
3728 true, GSI_SAME_STMT);
3729 m_gsi = gsi_after_labels (edge_true->dest);
3730 }
3731 }
3732 if (var)
3733 {
3734 tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
3735 g = gimple_build_assign (var, clobber);
3736 gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
3737 }
3738 }
3739
3740 /* Helper function for lower_addsub_overflow and lower_mul_overflow.
3741 Given precisions of result TYPE (PREC), argument 0 precision PREC0,
3742 argument 1 precision PREC1 and minimum precision for the result
3743 PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
3744
3745 static tree
3746 arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
3747 int prec2, unsigned *start, unsigned *end, bool *check_zero)
3748 {
3749 *start = 0;
3750 *end = 0;
3751 *check_zero = true;
3752 /* Ignore this special rule for subtraction, even if both
3753 prec0 >= 0 and prec1 >= 0, their subtraction can be negative
3754 in infinite precision. */
3755 if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
3756 {
3757 /* Result in [0, prec2) is unsigned, if prec > prec2,
3758 all bits above it will be zero. */
3759 if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
3760 return boolean_false_node;
3761 else
3762 {
3763 /* ovf if any of bits in [start, end) is non-zero. */
3764 *start = prec - !TYPE_UNSIGNED (type);
3765 *end = prec2;
3766 }
3767 }
3768 else if (TYPE_UNSIGNED (type))
3769 {
3770 /* If result in [0, prec2) is signed and if prec > prec2,
3771 all bits above it will be sign bit copies. */
3772 if (prec >= prec2)
3773 {
3774 /* ovf if bit prec - 1 is non-zero. */
3775 *start = prec - 1;
3776 *end = prec;
3777 }
3778 else
3779 {
3780 /* ovf if any of bits in [start, end) is non-zero. */
3781 *start = prec;
3782 *end = prec2;
3783 }
3784 }
3785 else if (prec >= prec2)
3786 return boolean_false_node;
3787 else
3788 {
3789 /* ovf if [start, end) bits aren't all zeros or all ones. */
3790 *start = prec - 1;
3791 *end = prec2;
3792 *check_zero = false;
3793 }
3794 return NULL_TREE;
3795 }
3796
3797 /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
3798 argument or return type _Complex large/huge _BitInt. */
3799
3800 void
3801 bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
3802 {
3803 tree arg0 = gimple_call_arg (stmt, 0);
3804 tree arg1 = gimple_call_arg (stmt, 1);
3805 tree lhs = gimple_call_lhs (stmt);
3806 gimple *g;
3807
3808 if (!lhs)
3809 {
3810 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3811 gsi_remove (&gsi, true);
3812 return;
3813 }
3814 gimple *final_stmt = gsi_stmt (m_gsi);
3815 tree type = TREE_TYPE (lhs);
3816 if (TREE_CODE (type) == COMPLEX_TYPE)
3817 type = TREE_TYPE (type);
3818 int prec = TYPE_PRECISION (type);
3819 int prec0 = range_to_prec (arg0, stmt);
3820 int prec1 = range_to_prec (arg1, stmt);
3821 /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
3822 the be minimum unsigned precision of any possible operation's
3823 result, otherwise it is minimum signed precision.
3824 Some examples:
3825 If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
3826 if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
3827 if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
3828 if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
3829 PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
3830 8 + 8 [0, 0x1fe] 9 UNSIGNED
3831 8 + 10 [0, 0x4fe] 11 UNSIGNED
3832 -8 + -8 [-0x100, 0xfe] 9 SIGNED
3833 -8 + -10 [-0x280, 0x27e] 11 SIGNED
3834 8 + -8 [-0x80, 0x17e] 10 SIGNED
3835 8 + -10 [-0x200, 0x2fe] 11 SIGNED
3836 10 + -8 [-0x80, 0x47e] 12 SIGNED
3837 8 - 8 [-0xff, 0xff] 9 SIGNED
3838 8 - 10 [-0x3ff, 0xff] 11 SIGNED
3839 10 - 8 [-0xff, 0x3ff] 11 SIGNED
3840 -8 - -8 [-0xff, 0xff] 9 SIGNED
3841 -8 - -10 [-0x27f, 0x27f] 11 SIGNED
3842 -10 - -8 [-0x27f, 0x27f] 11 SIGNED
3843 8 - -8 [-0x7f, 0x17f] 10 SIGNED
3844 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
3845 10 - -8 [-0x7f, 0x47f] 12 SIGNED
3846 -8 - 8 [-0x17f, 0x7f] 10 SIGNED
3847 -8 - 10 [-0x47f, 0x7f] 12 SIGNED
3848 -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
3849 int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
3850 prec1 < 0 ? -prec1 : prec1);
3851 /* If operands are either both signed or both unsigned,
3852 we need just one additional bit. */
3853 prec2 = (((prec0 < 0) == (prec1 < 0)
3854 /* If one operand is signed and one unsigned and
3855 the signed one has larger precision, we need
3856 just one extra bit, otherwise two. */
3857 || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
3858 : (prec2 == -prec1 && prec2 != prec0)))
3859 ? prec2 + 1 : prec2 + 2);
3860 int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
3861 prec1 < 0 ? -prec1 : prec1);
3862 prec3 = MAX (prec3, prec);
3863 tree var = NULL_TREE;
3864 tree orig_obj = obj;
3865 if (obj == NULL_TREE
3866 && TREE_CODE (type) == BITINT_TYPE
3867 && bitint_precision_kind (type) >= bitint_prec_large
3868 && m_names
3869 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
3870 {
3871 int part = var_to_partition (m_map, lhs);
3872 gcc_assert (m_vars[part] != NULL_TREE);
3873 obj = m_vars[part];
3874 if (TREE_TYPE (lhs) == type)
3875 orig_obj = obj;
3876 }
3877 if (TREE_CODE (type) != BITINT_TYPE
3878 || bitint_precision_kind (type) < bitint_prec_large)
3879 {
3880 unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
3881 tree atype = build_array_type_nelts (m_limb_type, nelts);
3882 var = create_tmp_var (atype);
3883 }
3884
3885 enum tree_code code;
3886 switch (gimple_call_internal_fn (stmt))
3887 {
3888 case IFN_ADD_OVERFLOW:
3889 case IFN_UBSAN_CHECK_ADD:
3890 code = PLUS_EXPR;
3891 break;
3892 case IFN_SUB_OVERFLOW:
3893 case IFN_UBSAN_CHECK_SUB:
3894 code = MINUS_EXPR;
3895 break;
3896 default:
3897 gcc_unreachable ();
3898 }
3899 unsigned start, end;
3900 bool check_zero;
3901 tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
3902 &start, &end, &check_zero);
3903
3904 unsigned startlimb, endlimb;
3905 if (ovf)
3906 {
3907 startlimb = ~0U;
3908 endlimb = ~0U;
3909 }
3910 else
3911 {
3912 startlimb = start / limb_prec;
3913 endlimb = (end - 1) / limb_prec;
3914 }
3915
3916 int prec4 = ovf != NULL_TREE ? prec : prec3;
3917 bitint_prec_kind kind = bitint_precision_kind (prec4);
3918 unsigned cnt, rem = 0, fin = 0;
3919 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
3920 bool last_ovf = (ovf == NULL_TREE
3921 && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
3922 if (kind != bitint_prec_huge)
3923 cnt = CEIL (prec4, limb_prec) + last_ovf;
3924 else
3925 {
3926 rem = (prec4 % (2 * limb_prec));
3927 fin = (prec4 - rem) / limb_prec;
3928 cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
3929 idx = idx_first = create_loop (size_zero_node, &idx_next);
3930 }
3931
3932 if (kind == bitint_prec_huge)
3933 m_upwards_2limb = fin;
3934 m_upwards = true;
3935
3936 tree type0 = TREE_TYPE (arg0);
3937 tree type1 = TREE_TYPE (arg1);
3938 int prec5 = prec3;
3939 if (bitint_precision_kind (prec5) < bitint_prec_large)
3940 prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
3941 if (TYPE_PRECISION (type0) < prec5)
3942 {
3943 type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
3944 if (TREE_CODE (arg0) == INTEGER_CST)
3945 arg0 = fold_convert (type0, arg0);
3946 }
3947 if (TYPE_PRECISION (type1) < prec5)
3948 {
3949 type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
3950 if (TREE_CODE (arg1) == INTEGER_CST)
3951 arg1 = fold_convert (type1, arg1);
3952 }
3953 unsigned int data_cnt = 0;
3954 tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
3955 tree cmp = build_zero_cst (m_limb_type);
3956 unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
3957 tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
3958 for (unsigned i = 0; i < cnt; i++)
3959 {
3960 m_data_cnt = 0;
3961 tree rhs1, rhs2;
3962 if (kind != bitint_prec_huge)
3963 idx = size_int (i);
3964 else if (i >= 2)
3965 idx = size_int (fin + (i > 2));
3966 if (!last_ovf || i < cnt - 1)
3967 {
3968 if (type0 != TREE_TYPE (arg0))
3969 rhs1 = handle_cast (type0, arg0, idx);
3970 else
3971 rhs1 = handle_operand (arg0, idx);
3972 if (type1 != TREE_TYPE (arg1))
3973 rhs2 = handle_cast (type1, arg1, idx);
3974 else
3975 rhs2 = handle_operand (arg1, idx);
3976 if (i == 0)
3977 data_cnt = m_data_cnt;
3978 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
3979 rhs1 = add_cast (m_limb_type, rhs1);
3980 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
3981 rhs2 = add_cast (m_limb_type, rhs2);
3982 last_rhs1 = rhs1;
3983 last_rhs2 = rhs2;
3984 }
3985 else
3986 {
3987 m_data_cnt = data_cnt;
3988 if (TYPE_UNSIGNED (type0))
3989 rhs1 = build_zero_cst (m_limb_type);
3990 else
3991 {
3992 rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
3993 if (TREE_CODE (rhs1) == INTEGER_CST)
3994 rhs1 = build_int_cst (m_limb_type,
3995 tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
3996 else
3997 {
3998 tree lpm1 = build_int_cst (unsigned_type_node,
3999 limb_prec - 1);
4000 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
4001 RSHIFT_EXPR, rhs1, lpm1);
4002 insert_before (g);
4003 rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
4004 }
4005 }
4006 if (TYPE_UNSIGNED (type1))
4007 rhs2 = build_zero_cst (m_limb_type);
4008 else
4009 {
4010 rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
4011 if (TREE_CODE (rhs2) == INTEGER_CST)
4012 rhs2 = build_int_cst (m_limb_type,
4013 tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
4014 else
4015 {
4016 tree lpm1 = build_int_cst (unsigned_type_node,
4017 limb_prec - 1);
4018 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
4019 RSHIFT_EXPR, rhs2, lpm1);
4020 insert_before (g);
4021 rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4022 }
4023 }
4024 }
4025 tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4026 if (ovf != boolean_false_node)
4027 {
4028 if (tree_fits_uhwi_p (idx))
4029 {
4030 unsigned limb = tree_to_uhwi (idx);
4031 if (limb >= startlimb && limb <= endlimb)
4032 {
4033 tree l = arith_overflow_extract_bits (start, end, rhs,
4034 limb, check_zero);
4035 tree this_ovf = make_ssa_name (boolean_type_node);
4036 if (ovf == NULL_TREE && !check_zero)
4037 {
4038 cmp = l;
4039 g = gimple_build_assign (make_ssa_name (m_limb_type),
4040 PLUS_EXPR, l,
4041 build_int_cst (m_limb_type, 1));
4042 insert_before (g);
4043 g = gimple_build_assign (this_ovf, GT_EXPR,
4044 gimple_assign_lhs (g),
4045 build_int_cst (m_limb_type, 1));
4046 }
4047 else
4048 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4049 insert_before (g);
4050 if (ovf == NULL_TREE)
4051 ovf = this_ovf;
4052 else
4053 {
4054 tree b = make_ssa_name (boolean_type_node);
4055 g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4056 insert_before (g);
4057 ovf = b;
4058 }
4059 }
4060 }
4061 else if (startlimb < fin)
4062 {
4063 if (m_first && startlimb + 2 < fin)
4064 {
4065 tree data_out;
4066 ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4067 ovf_out = m_data.pop ();
4068 m_data.pop ();
4069 if (!check_zero)
4070 {
4071 cmp = prepare_data_in_out (cmp, idx, &data_out);
4072 cmp_out = m_data.pop ();
4073 m_data.pop ();
4074 }
4075 }
4076 if (i != 0 || startlimb != fin - 1)
4077 {
4078 tree_code cmp_code;
4079 bool single_comparison
4080 = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4081 if (!single_comparison)
4082 {
4083 cmp_code = GE_EXPR;
4084 if (!check_zero && (start % limb_prec) == 0)
4085 single_comparison = true;
4086 }
4087 else if ((startlimb & 1) == (i & 1))
4088 cmp_code = EQ_EXPR;
4089 else
4090 cmp_code = GT_EXPR;
4091 g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4092 NULL_TREE, NULL_TREE);
4093 edge edge_true_true, edge_true_false, edge_false;
4094 gimple *g2 = NULL;
4095 if (!single_comparison)
4096 g2 = gimple_build_cond (NE_EXPR, idx,
4097 size_int (startlimb), NULL_TREE,
4098 NULL_TREE);
4099 if_then_if_then_else (g, g2, profile_probability::likely (),
4100 profile_probability::likely (),
4101 edge_true_true, edge_true_false,
4102 edge_false);
4103 unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4104 tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4105 check_zero);
4106 tree this_ovf = make_ssa_name (boolean_type_node);
4107 if (cmp_code != GT_EXPR && !check_zero)
4108 {
4109 g = gimple_build_assign (make_ssa_name (m_limb_type),
4110 PLUS_EXPR, l,
4111 build_int_cst (m_limb_type, 1));
4112 insert_before (g);
4113 g = gimple_build_assign (this_ovf, GT_EXPR,
4114 gimple_assign_lhs (g),
4115 build_int_cst (m_limb_type, 1));
4116 }
4117 else
4118 g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4119 insert_before (g);
4120 if (cmp_code == GT_EXPR)
4121 {
4122 tree t = make_ssa_name (boolean_type_node);
4123 g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4124 insert_before (g);
4125 this_ovf = t;
4126 }
4127 tree this_ovf2 = NULL_TREE;
4128 if (!single_comparison)
4129 {
4130 m_gsi = gsi_after_labels (edge_true_true->src);
4131 tree t = make_ssa_name (boolean_type_node);
4132 g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4133 insert_before (g);
4134 this_ovf2 = make_ssa_name (boolean_type_node);
4135 g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4136 ovf, t);
4137 insert_before (g);
4138 }
4139 m_gsi = gsi_after_labels (edge_true_false->dest);
4140 tree t;
4141 if (i == 1 && ovf_out)
4142 t = ovf_out;
4143 else
4144 t = make_ssa_name (boolean_type_node);
4145 gphi *phi = create_phi_node (t, edge_true_false->dest);
4146 add_phi_arg (phi, this_ovf, edge_true_false,
4147 UNKNOWN_LOCATION);
4148 add_phi_arg (phi, ovf ? ovf
4149 : boolean_false_node, edge_false,
4150 UNKNOWN_LOCATION);
4151 if (edge_true_true)
4152 add_phi_arg (phi, this_ovf2, edge_true_true,
4153 UNKNOWN_LOCATION);
4154 ovf = t;
4155 if (!check_zero && cmp_code != GT_EXPR)
4156 {
4157 t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4158 phi = create_phi_node (t, edge_true_false->dest);
4159 add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4160 add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4161 if (edge_true_true)
4162 add_phi_arg (phi, cmp, edge_true_true,
4163 UNKNOWN_LOCATION);
4164 cmp = t;
4165 }
4166 }
4167 }
4168 }
4169
4170 if (var || obj)
4171 {
4172 if (tree_fits_uhwi_p (idx) && tree_to_uhwi (idx) >= prec_limbs)
4173 ;
4174 else if (!tree_fits_uhwi_p (idx)
4175 && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4176 {
4177 bool single_comparison
4178 = (((unsigned) prec % limb_prec) == 0
4179 || prec_limbs + 1 >= fin
4180 || (prec_limbs & 1) == (i & 1));
4181 g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4182 NULL_TREE, NULL_TREE);
4183 gimple *g2 = NULL;
4184 if (!single_comparison)
4185 g2 = gimple_build_cond (LT_EXPR, idx,
4186 size_int (prec_limbs - 1),
4187 NULL_TREE, NULL_TREE);
4188 edge edge_true_true, edge_true_false, edge_false;
4189 if_then_if_then_else (g, g2, profile_probability::likely (),
4190 profile_probability::likely (),
4191 edge_true_true, edge_true_false,
4192 edge_false);
4193 tree l = limb_access (type, var ? var : obj, idx, true);
4194 g = gimple_build_assign (l, rhs);
4195 insert_before (g);
4196 if (!single_comparison)
4197 {
4198 m_gsi = gsi_after_labels (edge_true_true->src);
4199 l = limb_access (type, var ? var : obj,
4200 size_int (prec_limbs - 1), true);
4201 if (!useless_type_conversion_p (TREE_TYPE (l),
4202 TREE_TYPE (rhs)))
4203 rhs = add_cast (TREE_TYPE (l), rhs);
4204 g = gimple_build_assign (l, rhs);
4205 insert_before (g);
4206 }
4207 m_gsi = gsi_after_labels (edge_true_false->dest);
4208 }
4209 else
4210 {
4211 tree l = limb_access (type, var ? var : obj, idx, true);
4212 if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4213 rhs = add_cast (TREE_TYPE (l), rhs);
4214 g = gimple_build_assign (l, rhs);
4215 insert_before (g);
4216 }
4217 }
4218 m_first = false;
4219 if (kind == bitint_prec_huge && i <= 1)
4220 {
4221 if (i == 0)
4222 {
4223 idx = make_ssa_name (sizetype);
4224 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4225 size_one_node);
4226 insert_before (g);
4227 }
4228 else
4229 {
4230 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4231 size_int (2));
4232 insert_before (g);
4233 g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4234 NULL_TREE, NULL_TREE);
4235 insert_before (g);
4236 m_gsi = gsi_for_stmt (final_stmt);
4237 m_bb = NULL;
4238 }
4239 }
4240 }
4241
4242 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, code);
4243 }
4244
4245 /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4246 argument or return type _Complex large/huge _BitInt. */
4247
4248 void
4249 bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4250 {
4251 tree arg0 = gimple_call_arg (stmt, 0);
4252 tree arg1 = gimple_call_arg (stmt, 1);
4253 tree lhs = gimple_call_lhs (stmt);
4254 if (!lhs)
4255 {
4256 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4257 gsi_remove (&gsi, true);
4258 return;
4259 }
4260 gimple *final_stmt = gsi_stmt (m_gsi);
4261 tree type = TREE_TYPE (lhs);
4262 if (TREE_CODE (type) == COMPLEX_TYPE)
4263 type = TREE_TYPE (type);
4264 int prec = TYPE_PRECISION (type), prec0, prec1;
4265 arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4266 arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4267 int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4268 + (prec1 < 0 ? -prec1 : prec1));
4269 if (prec0 == 1 || prec1 == 1)
4270 --prec2;
4271 tree var = NULL_TREE;
4272 tree orig_obj = obj;
4273 bool force_var = false;
4274 if (obj == NULL_TREE
4275 && TREE_CODE (type) == BITINT_TYPE
4276 && bitint_precision_kind (type) >= bitint_prec_large
4277 && m_names
4278 && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4279 {
4280 int part = var_to_partition (m_map, lhs);
4281 gcc_assert (m_vars[part] != NULL_TREE);
4282 obj = m_vars[part];
4283 if (TREE_TYPE (lhs) == type)
4284 orig_obj = obj;
4285 }
4286 else if (obj != NULL_TREE && DECL_P (obj))
4287 {
4288 for (int i = 0; i < 2; ++i)
4289 {
4290 tree arg = i ? arg1 : arg0;
4291 if (TREE_CODE (arg) == ADDR_EXPR)
4292 arg = TREE_OPERAND (arg, 0);
4293 if (get_base_address (arg) == obj)
4294 {
4295 force_var = true;
4296 break;
4297 }
4298 }
4299 }
4300 if (obj == NULL_TREE
4301 || force_var
4302 || TREE_CODE (type) != BITINT_TYPE
4303 || bitint_precision_kind (type) < bitint_prec_large
4304 || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
4305 {
4306 unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
4307 tree atype = build_array_type_nelts (m_limb_type, nelts);
4308 var = create_tmp_var (atype);
4309 }
4310 tree addr = build_fold_addr_expr (var ? var : obj);
4311 addr = force_gimple_operand_gsi (&m_gsi, addr, true,
4312 NULL_TREE, true, GSI_SAME_STMT);
4313 tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4314 gimple *g
4315 = gimple_build_call_internal (IFN_MULBITINT, 6,
4316 addr, build_int_cst (sitype,
4317 MAX (prec2, prec)),
4318 arg0, build_int_cst (sitype, prec0),
4319 arg1, build_int_cst (sitype, prec1));
4320 insert_before (g);
4321
4322 unsigned start, end;
4323 bool check_zero;
4324 tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
4325 &start, &end, &check_zero);
4326 if (ovf == NULL_TREE)
4327 {
4328 unsigned startlimb = start / limb_prec;
4329 unsigned endlimb = (end - 1) / limb_prec;
4330 unsigned cnt;
4331 bool use_loop = false;
4332 if (startlimb == endlimb)
4333 cnt = 1;
4334 else if (startlimb + 1 == endlimb)
4335 cnt = 2;
4336 else if ((end % limb_prec) == 0)
4337 {
4338 cnt = 2;
4339 use_loop = true;
4340 }
4341 else
4342 {
4343 cnt = 3;
4344 use_loop = startlimb + 2 < endlimb;
4345 }
4346 if (cnt == 1)
4347 {
4348 tree l = limb_access (NULL_TREE, var ? var : obj,
4349 size_int (startlimb), true);
4350 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4351 insert_before (g);
4352 l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
4353 startlimb, check_zero);
4354 ovf = make_ssa_name (boolean_type_node);
4355 if (check_zero)
4356 g = gimple_build_assign (ovf, NE_EXPR, l,
4357 build_zero_cst (m_limb_type));
4358 else
4359 {
4360 g = gimple_build_assign (make_ssa_name (m_limb_type),
4361 PLUS_EXPR, l,
4362 build_int_cst (m_limb_type, 1));
4363 insert_before (g);
4364 g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
4365 build_int_cst (m_limb_type, 1));
4366 }
4367 insert_before (g);
4368 }
4369 else
4370 {
4371 basic_block edge_bb = NULL;
4372 gimple_stmt_iterator gsi = m_gsi;
4373 gsi_prev (&gsi);
4374 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4375 edge_bb = e->src;
4376 m_gsi = gsi_end_bb (edge_bb);
4377
4378 tree cmp = build_zero_cst (m_limb_type);
4379 for (unsigned i = 0; i < cnt; i++)
4380 {
4381 tree idx, idx_next = NULL_TREE;
4382 if (i == 0)
4383 idx = size_int (startlimb);
4384 else if (i == 2)
4385 idx = size_int (endlimb);
4386 else if (use_loop)
4387 idx = create_loop (size_int (startlimb + 1), &idx_next);
4388 else
4389 idx = size_int (startlimb + 1);
4390 tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
4391 g = gimple_build_assign (make_ssa_name (m_limb_type), l);
4392 insert_before (g);
4393 l = gimple_assign_lhs (g);
4394 if (i == 0 || i == 2)
4395 l = arith_overflow_extract_bits (start, end, l,
4396 tree_to_uhwi (idx),
4397 check_zero);
4398 if (i == 0 && !check_zero)
4399 {
4400 cmp = l;
4401 g = gimple_build_assign (make_ssa_name (m_limb_type),
4402 PLUS_EXPR, l,
4403 build_int_cst (m_limb_type, 1));
4404 insert_before (g);
4405 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4406 build_int_cst (m_limb_type, 1),
4407 NULL_TREE, NULL_TREE);
4408 }
4409 else
4410 g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
4411 insert_before (g);
4412 edge e1 = split_block (gsi_bb (m_gsi), g);
4413 e1->flags = EDGE_FALSE_VALUE;
4414 edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
4415 EDGE_TRUE_VALUE);
4416 e1->probability = profile_probability::likely ();
4417 e2->probability = e1->probability.invert ();
4418 if (i == 0)
4419 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4420 m_gsi = gsi_after_labels (e1->dest);
4421 if (i == 1 && use_loop)
4422 {
4423 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4424 size_one_node);
4425 insert_before (g);
4426 g = gimple_build_cond (NE_EXPR, idx_next,
4427 size_int (endlimb + (cnt == 1)),
4428 NULL_TREE, NULL_TREE);
4429 insert_before (g);
4430 edge true_edge, false_edge;
4431 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4432 &true_edge,
4433 &false_edge);
4434 m_gsi = gsi_after_labels (false_edge->dest);
4435 m_bb = NULL;
4436 }
4437 }
4438
4439 ovf = make_ssa_name (boolean_type_node);
4440 basic_block bb = gimple_bb (final_stmt);
4441 gphi *phi = create_phi_node (ovf, bb);
4442 edge e1 = find_edge (gsi_bb (m_gsi), bb);
4443 edge_iterator ei;
4444 FOR_EACH_EDGE (e, ei, bb->preds)
4445 {
4446 tree val = e == e1 ? boolean_false_node : boolean_true_node;
4447 add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
4448 }
4449 m_gsi = gsi_for_stmt (final_stmt);
4450 }
4451 }
4452
4453 finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt, MULT_EXPR);
4454 }
4455
4456 /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
4457 .{ADD,SUB,MUL}_OVERFLOW call. */
4458
4459 void
4460 bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
4461 {
4462 tree rhs1 = gimple_assign_rhs1 (stmt);
4463 rhs1 = TREE_OPERAND (rhs1, 0);
4464 if (obj == NULL_TREE)
4465 {
4466 int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
4467 gcc_assert (m_vars[part] != NULL_TREE);
4468 obj = m_vars[part];
4469 }
4470 if (TREE_CODE (rhs1) == SSA_NAME
4471 && (m_names == NULL
4472 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
4473 {
4474 lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
4475 return;
4476 }
4477 int part = var_to_partition (m_map, rhs1);
4478 gcc_assert (m_vars[part] != NULL_TREE);
4479 tree var = m_vars[part];
4480 unsigned HOST_WIDE_INT nelts
4481 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4482 tree atype = build_array_type_nelts (m_limb_type, nelts);
4483 if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4484 obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
4485 tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4486 gimple_assign_rhs_code (stmt) == REALPART_EXPR
4487 ? 0 : nelts * m_limb_size);
4488 tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4489 gimple *g = gimple_build_assign (obj, v2);
4490 insert_before (g);
4491 }
4492
4493 /* Lower COMPLEX_EXPR stmt. */
4494
4495 void
4496 bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
4497 {
4498 tree lhs = gimple_assign_lhs (stmt);
4499 tree rhs1 = gimple_assign_rhs1 (stmt);
4500 tree rhs2 = gimple_assign_rhs2 (stmt);
4501 int part = var_to_partition (m_map, lhs);
4502 gcc_assert (m_vars[part] != NULL_TREE);
4503 lhs = m_vars[part];
4504 unsigned HOST_WIDE_INT nelts
4505 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
4506 tree atype = build_array_type_nelts (m_limb_type, nelts);
4507 tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
4508 tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
4509 tree v2;
4510 if (TREE_CODE (rhs1) == SSA_NAME)
4511 {
4512 part = var_to_partition (m_map, rhs1);
4513 gcc_assert (m_vars[part] != NULL_TREE);
4514 v2 = m_vars[part];
4515 }
4516 else if (integer_zerop (rhs1))
4517 v2 = build_zero_cst (atype);
4518 else
4519 v2 = tree_output_constant_def (rhs1);
4520 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4521 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4522 gimple *g = gimple_build_assign (v1, v2);
4523 insert_before (g);
4524 tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
4525 TYPE_SIZE_UNIT (atype));
4526 v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
4527 if (TREE_CODE (rhs2) == SSA_NAME)
4528 {
4529 part = var_to_partition (m_map, rhs2);
4530 gcc_assert (m_vars[part] != NULL_TREE);
4531 v2 = m_vars[part];
4532 }
4533 else if (integer_zerop (rhs2))
4534 v2 = build_zero_cst (atype);
4535 else
4536 v2 = tree_output_constant_def (rhs2);
4537 if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
4538 v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
4539 g = gimple_build_assign (v1, v2);
4540 insert_before (g);
4541 }
4542
4543 /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
4544 argument. */
4545
4546 void
4547 bitint_large_huge::lower_bit_query (gimple *stmt)
4548 {
4549 tree arg0 = gimple_call_arg (stmt, 0);
4550 tree arg1 = (gimple_call_num_args (stmt) == 2
4551 ? gimple_call_arg (stmt, 1) : NULL_TREE);
4552 tree lhs = gimple_call_lhs (stmt);
4553 gimple *g;
4554
4555 if (!lhs)
4556 {
4557 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4558 gsi_remove (&gsi, true);
4559 return;
4560 }
4561 tree type = TREE_TYPE (arg0);
4562 gcc_assert (TREE_CODE (type) == BITINT_TYPE);
4563 bitint_prec_kind kind = bitint_precision_kind (type);
4564 gcc_assert (kind >= bitint_prec_large);
4565 enum internal_fn ifn = gimple_call_internal_fn (stmt);
4566 enum built_in_function fcode = END_BUILTINS;
4567 gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
4568 || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
4569 || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
4570 switch (ifn)
4571 {
4572 case IFN_CLZ:
4573 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4574 fcode = BUILT_IN_CLZ;
4575 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4576 fcode = BUILT_IN_CLZL;
4577 else
4578 fcode = BUILT_IN_CLZLL;
4579 break;
4580 case IFN_FFS:
4581 /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
4582 we don't add the addend at the end. */
4583 arg1 = integer_zero_node;
4584 /* FALLTHRU */
4585 case IFN_CTZ:
4586 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4587 fcode = BUILT_IN_CTZ;
4588 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4589 fcode = BUILT_IN_CTZL;
4590 else
4591 fcode = BUILT_IN_CTZLL;
4592 m_upwards = true;
4593 break;
4594 case IFN_CLRSB:
4595 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4596 fcode = BUILT_IN_CLRSB;
4597 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4598 fcode = BUILT_IN_CLRSBL;
4599 else
4600 fcode = BUILT_IN_CLRSBLL;
4601 break;
4602 case IFN_PARITY:
4603 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4604 fcode = BUILT_IN_PARITY;
4605 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4606 fcode = BUILT_IN_PARITYL;
4607 else
4608 fcode = BUILT_IN_PARITYLL;
4609 m_upwards = true;
4610 break;
4611 case IFN_POPCOUNT:
4612 if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
4613 fcode = BUILT_IN_POPCOUNT;
4614 else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
4615 fcode = BUILT_IN_POPCOUNTL;
4616 else
4617 fcode = BUILT_IN_POPCOUNTLL;
4618 m_upwards = true;
4619 break;
4620 default:
4621 gcc_unreachable ();
4622 }
4623 tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
4624 unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
4625 struct bq_details { edge e; tree val, addend; } *bqp = NULL;
4626 basic_block edge_bb = NULL;
4627 if (m_upwards)
4628 {
4629 tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4630 if (kind == bitint_prec_large)
4631 cnt = CEIL (prec, limb_prec);
4632 else
4633 {
4634 rem = (prec % (2 * limb_prec));
4635 end = (prec - rem) / limb_prec;
4636 cnt = 2 + CEIL (rem, limb_prec);
4637 idx = idx_first = create_loop (size_zero_node, &idx_next);
4638 }
4639
4640 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4641 {
4642 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4643 gsi_prev (&gsi);
4644 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4645 edge_bb = e->src;
4646 if (kind == bitint_prec_large)
4647 m_gsi = gsi_end_bb (edge_bb);
4648 bqp = XALLOCAVEC (struct bq_details, cnt);
4649 }
4650 else
4651 m_after_stmt = stmt;
4652 if (kind != bitint_prec_large)
4653 m_upwards_2limb = end;
4654
4655 for (unsigned i = 0; i < cnt; i++)
4656 {
4657 m_data_cnt = 0;
4658 if (kind == bitint_prec_large)
4659 idx = size_int (i);
4660 else if (i >= 2)
4661 idx = size_int (end + (i > 2));
4662
4663 tree rhs1 = handle_operand (arg0, idx);
4664 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4665 {
4666 if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4667 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4668 rhs1 = add_cast (m_limb_type, rhs1);
4669 }
4670
4671 tree in, out, tem;
4672 if (ifn == IFN_PARITY)
4673 in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
4674 else if (ifn == IFN_FFS)
4675 in = prepare_data_in_out (integer_one_node, idx, &out);
4676 else
4677 in = prepare_data_in_out (integer_zero_node, idx, &out);
4678
4679 switch (ifn)
4680 {
4681 case IFN_CTZ:
4682 case IFN_FFS:
4683 g = gimple_build_cond (NE_EXPR, rhs1,
4684 build_zero_cst (m_limb_type),
4685 NULL_TREE, NULL_TREE);
4686 insert_before (g);
4687 edge e1, e2;
4688 e1 = split_block (gsi_bb (m_gsi), g);
4689 e1->flags = EDGE_FALSE_VALUE;
4690 e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4691 e1->probability = profile_probability::unlikely ();
4692 e2->probability = e1->probability.invert ();
4693 if (i == 0)
4694 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4695 m_gsi = gsi_after_labels (e1->dest);
4696 bqp[i].e = e2;
4697 bqp[i].val = rhs1;
4698 if (tree_fits_uhwi_p (idx))
4699 bqp[i].addend
4700 = build_int_cst (integer_type_node,
4701 tree_to_uhwi (idx) * limb_prec
4702 + (ifn == IFN_FFS));
4703 else
4704 {
4705 bqp[i].addend = in;
4706 if (i == 1)
4707 res = out;
4708 else
4709 res = make_ssa_name (integer_type_node);
4710 g = gimple_build_assign (res, PLUS_EXPR, in,
4711 build_int_cst (integer_type_node,
4712 limb_prec));
4713 insert_before (g);
4714 m_data[m_data_cnt] = res;
4715 }
4716 break;
4717 case IFN_PARITY:
4718 if (!integer_zerop (in))
4719 {
4720 if (kind == bitint_prec_huge && i == 1)
4721 res = out;
4722 else
4723 res = make_ssa_name (m_limb_type);
4724 g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
4725 insert_before (g);
4726 }
4727 else
4728 res = rhs1;
4729 m_data[m_data_cnt] = res;
4730 break;
4731 case IFN_POPCOUNT:
4732 g = gimple_build_call (fndecl, 1, rhs1);
4733 tem = make_ssa_name (integer_type_node);
4734 gimple_call_set_lhs (g, tem);
4735 insert_before (g);
4736 if (!integer_zerop (in))
4737 {
4738 if (kind == bitint_prec_huge && i == 1)
4739 res = out;
4740 else
4741 res = make_ssa_name (integer_type_node);
4742 g = gimple_build_assign (res, PLUS_EXPR, in, tem);
4743 insert_before (g);
4744 }
4745 else
4746 res = tem;
4747 m_data[m_data_cnt] = res;
4748 break;
4749 default:
4750 gcc_unreachable ();
4751 }
4752
4753 m_first = false;
4754 if (kind == bitint_prec_huge && i <= 1)
4755 {
4756 if (i == 0)
4757 {
4758 idx = make_ssa_name (sizetype);
4759 g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4760 size_one_node);
4761 insert_before (g);
4762 }
4763 else
4764 {
4765 g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4766 size_int (2));
4767 insert_before (g);
4768 g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
4769 NULL_TREE, NULL_TREE);
4770 insert_before (g);
4771 if (ifn == IFN_CTZ || ifn == IFN_FFS)
4772 m_gsi = gsi_after_labels (edge_bb);
4773 else
4774 m_gsi = gsi_for_stmt (stmt);
4775 m_bb = NULL;
4776 }
4777 }
4778 }
4779 }
4780 else
4781 {
4782 tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
4783 int sub_one = 0;
4784 if (kind == bitint_prec_large)
4785 cnt = CEIL (prec, limb_prec);
4786 else
4787 {
4788 rem = prec % limb_prec;
4789 if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
4790 rem = limb_prec;
4791 end = (prec - rem) / limb_prec;
4792 cnt = 1 + (rem != 0);
4793 if (ifn == IFN_CLRSB)
4794 sub_one = 1;
4795 }
4796
4797 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4798 gsi_prev (&gsi);
4799 edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4800 edge_bb = e->src;
4801 m_gsi = gsi_end_bb (edge_bb);
4802
4803 if (ifn == IFN_CLZ)
4804 bqp = XALLOCAVEC (struct bq_details, cnt);
4805 else
4806 {
4807 gsi = gsi_for_stmt (stmt);
4808 gsi_prev (&gsi);
4809 e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
4810 edge_bb = e->src;
4811 bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
4812 }
4813
4814 for (unsigned i = 0; i < cnt; i++)
4815 {
4816 m_data_cnt = 0;
4817 if (kind == bitint_prec_large)
4818 idx = size_int (cnt - i - 1);
4819 else if (i == cnt - 1)
4820 idx = create_loop (size_int (end - 1), &idx_next);
4821 else
4822 idx = size_int (end);
4823
4824 tree rhs1 = handle_operand (arg0, idx);
4825 if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4826 {
4827 if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4828 rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
4829 else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
4830 rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
4831 rhs1 = add_cast (m_limb_type, rhs1);
4832 }
4833
4834 if (ifn == IFN_CLZ)
4835 {
4836 g = gimple_build_cond (NE_EXPR, rhs1,
4837 build_zero_cst (m_limb_type),
4838 NULL_TREE, NULL_TREE);
4839 insert_before (g);
4840 edge e1 = split_block (gsi_bb (m_gsi), g);
4841 e1->flags = EDGE_FALSE_VALUE;
4842 edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
4843 e1->probability = profile_probability::unlikely ();
4844 e2->probability = e1->probability.invert ();
4845 if (i == 0)
4846 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4847 m_gsi = gsi_after_labels (e1->dest);
4848 bqp[i].e = e2;
4849 bqp[i].val = rhs1;
4850 }
4851 else
4852 {
4853 if (i == 0)
4854 {
4855 first = rhs1;
4856 g = gimple_build_assign (make_ssa_name (m_limb_type),
4857 PLUS_EXPR, rhs1,
4858 build_int_cst (m_limb_type, 1));
4859 insert_before (g);
4860 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
4861 build_int_cst (m_limb_type, 1),
4862 NULL_TREE, NULL_TREE);
4863 insert_before (g);
4864 }
4865 else
4866 {
4867 g = gimple_build_assign (make_ssa_name (m_limb_type),
4868 BIT_XOR_EXPR, rhs1, first);
4869 insert_before (g);
4870 tree stype = signed_type_for (m_limb_type);
4871 g = gimple_build_cond (LT_EXPR,
4872 add_cast (stype,
4873 gimple_assign_lhs (g)),
4874 build_zero_cst (stype),
4875 NULL_TREE, NULL_TREE);
4876 insert_before (g);
4877 edge e1 = split_block (gsi_bb (m_gsi), g);
4878 e1->flags = EDGE_FALSE_VALUE;
4879 edge e2 = make_edge (e1->src, gimple_bb (stmt),
4880 EDGE_TRUE_VALUE);
4881 e1->probability = profile_probability::unlikely ();
4882 e2->probability = e1->probability.invert ();
4883 if (i == 1)
4884 set_immediate_dominator (CDI_DOMINATORS, e2->dest,
4885 e2->src);
4886 m_gsi = gsi_after_labels (e1->dest);
4887 bqp[2 * i].e = e2;
4888 g = gimple_build_cond (NE_EXPR, rhs1, first,
4889 NULL_TREE, NULL_TREE);
4890 insert_before (g);
4891 }
4892 edge e1 = split_block (gsi_bb (m_gsi), g);
4893 e1->flags = EDGE_FALSE_VALUE;
4894 edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
4895 e1->probability = profile_probability::unlikely ();
4896 e2->probability = e1->probability.invert ();
4897 if (i == 0)
4898 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
4899 m_gsi = gsi_after_labels (e1->dest);
4900 bqp[2 * i + 1].e = e2;
4901 bqp[i].val = rhs1;
4902 }
4903 if (tree_fits_uhwi_p (idx))
4904 bqp[i].addend
4905 = build_int_cst (integer_type_node,
4906 (int) prec
4907 - (((int) tree_to_uhwi (idx) + 1)
4908 * limb_prec) - sub_one);
4909 else
4910 {
4911 tree in, out;
4912 in = build_int_cst (integer_type_node, rem - sub_one);
4913 m_first = true;
4914 in = prepare_data_in_out (in, idx, &out);
4915 out = m_data[m_data_cnt + 1];
4916 bqp[i].addend = in;
4917 g = gimple_build_assign (out, PLUS_EXPR, in,
4918 build_int_cst (integer_type_node,
4919 limb_prec));
4920 insert_before (g);
4921 m_data[m_data_cnt] = out;
4922 }
4923
4924 m_first = false;
4925 if (kind == bitint_prec_huge && i == cnt - 1)
4926 {
4927 g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
4928 size_int (-1));
4929 insert_before (g);
4930 g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
4931 NULL_TREE, NULL_TREE);
4932 insert_before (g);
4933 edge true_edge, false_edge;
4934 extract_true_false_edges_from_block (gsi_bb (m_gsi),
4935 &true_edge, &false_edge);
4936 m_gsi = gsi_after_labels (false_edge->dest);
4937 m_bb = NULL;
4938 }
4939 }
4940 }
4941 switch (ifn)
4942 {
4943 case IFN_CLZ:
4944 case IFN_CTZ:
4945 case IFN_FFS:
4946 gphi *phi1, *phi2, *phi3;
4947 basic_block bb;
4948 bb = gsi_bb (m_gsi);
4949 remove_edge (find_edge (bb, gimple_bb (stmt)));
4950 phi1 = create_phi_node (make_ssa_name (m_limb_type),
4951 gimple_bb (stmt));
4952 phi2 = create_phi_node (make_ssa_name (integer_type_node),
4953 gimple_bb (stmt));
4954 for (unsigned i = 0; i < cnt; i++)
4955 {
4956 add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
4957 add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
4958 }
4959 if (arg1 == NULL_TREE)
4960 {
4961 g = gimple_build_builtin_unreachable (m_loc);
4962 insert_before (g);
4963 }
4964 m_gsi = gsi_for_stmt (stmt);
4965 g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
4966 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
4967 insert_before (g);
4968 if (arg1 == NULL_TREE)
4969 g = gimple_build_assign (lhs, PLUS_EXPR,
4970 gimple_phi_result (phi2),
4971 gimple_call_lhs (g));
4972 else
4973 {
4974 g = gimple_build_assign (make_ssa_name (integer_type_node),
4975 PLUS_EXPR, gimple_phi_result (phi2),
4976 gimple_call_lhs (g));
4977 insert_before (g);
4978 edge e1 = split_block (gimple_bb (stmt), g);
4979 edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
4980 e2->probability = profile_probability::always ();
4981 set_immediate_dominator (CDI_DOMINATORS, e1->dest,
4982 get_immediate_dominator (CDI_DOMINATORS,
4983 e1->src));
4984 phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
4985 add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
4986 add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
4987 m_gsi = gsi_for_stmt (stmt);
4988 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
4989 }
4990 gsi_replace (&m_gsi, g, true);
4991 break;
4992 case IFN_CLRSB:
4993 bb = gsi_bb (m_gsi);
4994 remove_edge (find_edge (bb, edge_bb));
4995 edge e;
4996 e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
4997 e->probability = profile_probability::always ();
4998 set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
4999 get_immediate_dominator (CDI_DOMINATORS,
5000 edge_bb));
5001 phi1 = create_phi_node (make_ssa_name (m_limb_type),
5002 edge_bb);
5003 phi2 = create_phi_node (make_ssa_name (integer_type_node),
5004 edge_bb);
5005 phi3 = create_phi_node (make_ssa_name (integer_type_node),
5006 gimple_bb (stmt));
5007 for (unsigned i = 0; i < cnt; i++)
5008 {
5009 add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
5010 add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
5011 UNKNOWN_LOCATION);
5012 tree a = bqp[i].addend;
5013 if (i && kind == bitint_prec_large)
5014 a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
5015 if (i)
5016 add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
5017 }
5018 add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
5019 UNKNOWN_LOCATION);
5020 m_gsi = gsi_after_labels (edge_bb);
5021 g = gimple_build_call (fndecl, 1,
5022 add_cast (signed_type_for (m_limb_type),
5023 gimple_phi_result (phi1)));
5024 gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5025 insert_before (g);
5026 g = gimple_build_assign (make_ssa_name (integer_type_node),
5027 PLUS_EXPR, gimple_call_lhs (g),
5028 gimple_phi_result (phi2));
5029 insert_before (g);
5030 if (kind != bitint_prec_large)
5031 {
5032 g = gimple_build_assign (make_ssa_name (integer_type_node),
5033 PLUS_EXPR, gimple_assign_lhs (g),
5034 integer_one_node);
5035 insert_before (g);
5036 }
5037 add_phi_arg (phi3, gimple_assign_lhs (g),
5038 find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5039 m_gsi = gsi_for_stmt (stmt);
5040 g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5041 gsi_replace (&m_gsi, g, true);
5042 break;
5043 case IFN_PARITY:
5044 g = gimple_build_call (fndecl, 1, res);
5045 gimple_call_set_lhs (g, lhs);
5046 gsi_replace (&m_gsi, g, true);
5047 break;
5048 case IFN_POPCOUNT:
5049 g = gimple_build_assign (lhs, res);
5050 gsi_replace (&m_gsi, g, true);
5051 break;
5052 default:
5053 gcc_unreachable ();
5054 }
5055 }
5056
5057 /* Lower a call statement with one or more large/huge _BitInt
5058 arguments or large/huge _BitInt return value. */
5059
5060 void
5061 bitint_large_huge::lower_call (tree obj, gimple *stmt)
5062 {
5063 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5064 unsigned int nargs = gimple_call_num_args (stmt);
5065 if (gimple_call_internal_p (stmt))
5066 switch (gimple_call_internal_fn (stmt))
5067 {
5068 case IFN_ADD_OVERFLOW:
5069 case IFN_SUB_OVERFLOW:
5070 case IFN_UBSAN_CHECK_ADD:
5071 case IFN_UBSAN_CHECK_SUB:
5072 lower_addsub_overflow (obj, stmt);
5073 return;
5074 case IFN_MUL_OVERFLOW:
5075 case IFN_UBSAN_CHECK_MUL:
5076 lower_mul_overflow (obj, stmt);
5077 return;
5078 case IFN_CLZ:
5079 case IFN_CTZ:
5080 case IFN_CLRSB:
5081 case IFN_FFS:
5082 case IFN_PARITY:
5083 case IFN_POPCOUNT:
5084 lower_bit_query (stmt);
5085 return;
5086 default:
5087 break;
5088 }
5089 for (unsigned int i = 0; i < nargs; ++i)
5090 {
5091 tree arg = gimple_call_arg (stmt, i);
5092 if (TREE_CODE (arg) != SSA_NAME
5093 || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5094 || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5095 continue;
5096 int p = var_to_partition (m_map, arg);
5097 tree v = m_vars[p];
5098 gcc_assert (v != NULL_TREE);
5099 if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5100 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5101 arg = make_ssa_name (TREE_TYPE (arg));
5102 gimple *g = gimple_build_assign (arg, v);
5103 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5104 gimple_call_set_arg (stmt, i, arg);
5105 if (m_preserved == NULL)
5106 m_preserved = BITMAP_ALLOC (NULL);
5107 bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5108 }
5109 tree lhs = gimple_call_lhs (stmt);
5110 if (lhs
5111 && TREE_CODE (lhs) == SSA_NAME
5112 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5113 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5114 {
5115 int p = var_to_partition (m_map, lhs);
5116 tree v = m_vars[p];
5117 gcc_assert (v != NULL_TREE);
5118 if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5119 v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5120 gimple_call_set_lhs (stmt, v);
5121 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5122 }
5123 update_stmt (stmt);
5124 }
5125
5126 /* Lower __asm STMT which involves large/huge _BitInt values. */
5127
5128 void
5129 bitint_large_huge::lower_asm (gimple *stmt)
5130 {
5131 gasm *g = as_a <gasm *> (stmt);
5132 unsigned noutputs = gimple_asm_noutputs (g);
5133 unsigned ninputs = gimple_asm_ninputs (g);
5134
5135 for (unsigned i = 0; i < noutputs; ++i)
5136 {
5137 tree t = gimple_asm_output_op (g, i);
5138 tree s = TREE_VALUE (t);
5139 if (TREE_CODE (s) == SSA_NAME
5140 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5141 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5142 {
5143 int part = var_to_partition (m_map, s);
5144 gcc_assert (m_vars[part] != NULL_TREE);
5145 TREE_VALUE (t) = m_vars[part];
5146 }
5147 }
5148 for (unsigned i = 0; i < ninputs; ++i)
5149 {
5150 tree t = gimple_asm_input_op (g, i);
5151 tree s = TREE_VALUE (t);
5152 if (TREE_CODE (s) == SSA_NAME
5153 && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5154 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5155 {
5156 int part = var_to_partition (m_map, s);
5157 gcc_assert (m_vars[part] != NULL_TREE);
5158 TREE_VALUE (t) = m_vars[part];
5159 }
5160 }
5161 update_stmt (stmt);
5162 }
5163
5164 /* Lower statement STMT which involves large/huge _BitInt values
5165 into code accessing individual limbs. */
5166
5167 void
5168 bitint_large_huge::lower_stmt (gimple *stmt)
5169 {
5170 m_first = true;
5171 m_lhs = NULL_TREE;
5172 m_data.truncate (0);
5173 m_data_cnt = 0;
5174 m_gsi = gsi_for_stmt (stmt);
5175 m_after_stmt = NULL;
5176 m_bb = NULL;
5177 m_init_gsi = m_gsi;
5178 gsi_prev (&m_init_gsi);
5179 m_preheader_bb = NULL;
5180 m_upwards_2limb = 0;
5181 m_upwards = false;
5182 m_var_msb = false;
5183 m_cast_conditional = false;
5184 m_bitfld_load = 0;
5185 m_loc = gimple_location (stmt);
5186 if (is_gimple_call (stmt))
5187 {
5188 lower_call (NULL_TREE, stmt);
5189 return;
5190 }
5191 if (gimple_code (stmt) == GIMPLE_ASM)
5192 {
5193 lower_asm (stmt);
5194 return;
5195 }
5196 tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5197 tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5198 bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5199 bool mergeable_cast_p = false;
5200 bool final_cast_p = false;
5201 if (gimple_assign_cast_p (stmt))
5202 {
5203 lhs = gimple_assign_lhs (stmt);
5204 tree rhs1 = gimple_assign_rhs1 (stmt);
5205 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5206 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5207 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5208 mergeable_cast_p = true;
5209 else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5210 && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5211 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
5212 {
5213 final_cast_p = true;
5214 if (TREE_CODE (rhs1) == SSA_NAME
5215 && (m_names == NULL
5216 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5217 {
5218 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5219 if (is_gimple_assign (g)
5220 && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
5221 {
5222 tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
5223 if (TREE_CODE (rhs2) == SSA_NAME
5224 && (m_names == NULL
5225 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
5226 {
5227 g = SSA_NAME_DEF_STMT (rhs2);
5228 int ovf = optimizable_arith_overflow (g);
5229 if (ovf == 2)
5230 /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
5231 and IMAGPART_EXPR uses, where the latter is cast to
5232 non-_BitInt, it will be optimized when handling
5233 the REALPART_EXPR. */
5234 return;
5235 if (ovf == 1)
5236 {
5237 lower_call (NULL_TREE, g);
5238 return;
5239 }
5240 }
5241 }
5242 }
5243 }
5244 }
5245 if (gimple_store_p (stmt))
5246 {
5247 tree rhs1 = gimple_assign_rhs1 (stmt);
5248 if (TREE_CODE (rhs1) == SSA_NAME
5249 && (m_names == NULL
5250 || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5251 {
5252 gimple *g = SSA_NAME_DEF_STMT (rhs1);
5253 m_loc = gimple_location (g);
5254 lhs = gimple_assign_lhs (stmt);
5255 if (is_gimple_assign (g) && !mergeable_op (g))
5256 switch (gimple_assign_rhs_code (g))
5257 {
5258 case LSHIFT_EXPR:
5259 case RSHIFT_EXPR:
5260 lower_shift_stmt (lhs, g);
5261 handled:
5262 m_gsi = gsi_for_stmt (stmt);
5263 unlink_stmt_vdef (stmt);
5264 release_ssa_name (gimple_vdef (stmt));
5265 gsi_remove (&m_gsi, true);
5266 return;
5267 case MULT_EXPR:
5268 case TRUNC_DIV_EXPR:
5269 case TRUNC_MOD_EXPR:
5270 lower_muldiv_stmt (lhs, g);
5271 goto handled;
5272 case FIX_TRUNC_EXPR:
5273 lower_float_conv_stmt (lhs, g);
5274 goto handled;
5275 case REALPART_EXPR:
5276 case IMAGPART_EXPR:
5277 lower_cplxpart_stmt (lhs, g);
5278 goto handled;
5279 default:
5280 break;
5281 }
5282 else if (optimizable_arith_overflow (g) == 3)
5283 {
5284 lower_call (lhs, g);
5285 goto handled;
5286 }
5287 m_loc = gimple_location (stmt);
5288 }
5289 }
5290 if (mergeable_op (stmt)
5291 || gimple_store_p (stmt)
5292 || gimple_assign_load_p (stmt)
5293 || eq_p
5294 || mergeable_cast_p)
5295 {
5296 lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5297 if (!eq_p)
5298 return;
5299 }
5300 else if (cmp_code != ERROR_MARK)
5301 lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
5302 if (cmp_code != ERROR_MARK)
5303 {
5304 if (gimple_code (stmt) == GIMPLE_COND)
5305 {
5306 gcond *cstmt = as_a <gcond *> (stmt);
5307 gimple_cond_set_lhs (cstmt, lhs);
5308 gimple_cond_set_rhs (cstmt, boolean_false_node);
5309 gimple_cond_set_code (cstmt, cmp_code);
5310 update_stmt (stmt);
5311 return;
5312 }
5313 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
5314 {
5315 tree cond = build2 (cmp_code, boolean_type_node, lhs,
5316 boolean_false_node);
5317 gimple_assign_set_rhs1 (stmt, cond);
5318 lhs = gimple_assign_lhs (stmt);
5319 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5320 || (bitint_precision_kind (TREE_TYPE (lhs))
5321 <= bitint_prec_middle));
5322 update_stmt (stmt);
5323 return;
5324 }
5325 gimple_assign_set_rhs1 (stmt, lhs);
5326 gimple_assign_set_rhs2 (stmt, boolean_false_node);
5327 gimple_assign_set_rhs_code (stmt, cmp_code);
5328 update_stmt (stmt);
5329 return;
5330 }
5331 if (final_cast_p)
5332 {
5333 tree lhs_type = TREE_TYPE (lhs);
5334 /* Add support for 3 or more limbs filled in from normal integral
5335 type if this assert fails. If no target chooses limb mode smaller
5336 than half of largest supported normal integral type, this will not
5337 be needed. */
5338 gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
5339 gimple *g;
5340 if (TREE_CODE (lhs_type) == BITINT_TYPE
5341 && bitint_precision_kind (lhs_type) == bitint_prec_middle)
5342 lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
5343 TYPE_UNSIGNED (lhs_type));
5344 m_data_cnt = 0;
5345 tree rhs1 = gimple_assign_rhs1 (stmt);
5346 tree r1 = handle_operand (rhs1, size_int (0));
5347 if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
5348 r1 = add_cast (lhs_type, r1);
5349 if (TYPE_PRECISION (lhs_type) > limb_prec)
5350 {
5351 m_data_cnt = 0;
5352 m_first = false;
5353 tree r2 = handle_operand (rhs1, size_int (1));
5354 r2 = add_cast (lhs_type, r2);
5355 g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
5356 build_int_cst (unsigned_type_node,
5357 limb_prec));
5358 insert_before (g);
5359 g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
5360 gimple_assign_lhs (g));
5361 insert_before (g);
5362 r1 = gimple_assign_lhs (g);
5363 }
5364 if (lhs_type != TREE_TYPE (lhs))
5365 g = gimple_build_assign (lhs, NOP_EXPR, r1);
5366 else
5367 g = gimple_build_assign (lhs, r1);
5368 gsi_replace (&m_gsi, g, true);
5369 return;
5370 }
5371 if (is_gimple_assign (stmt))
5372 switch (gimple_assign_rhs_code (stmt))
5373 {
5374 case LSHIFT_EXPR:
5375 case RSHIFT_EXPR:
5376 lower_shift_stmt (NULL_TREE, stmt);
5377 return;
5378 case MULT_EXPR:
5379 case TRUNC_DIV_EXPR:
5380 case TRUNC_MOD_EXPR:
5381 lower_muldiv_stmt (NULL_TREE, stmt);
5382 return;
5383 case FIX_TRUNC_EXPR:
5384 case FLOAT_EXPR:
5385 lower_float_conv_stmt (NULL_TREE, stmt);
5386 return;
5387 case REALPART_EXPR:
5388 case IMAGPART_EXPR:
5389 lower_cplxpart_stmt (NULL_TREE, stmt);
5390 return;
5391 case COMPLEX_EXPR:
5392 lower_complexexpr_stmt (stmt);
5393 return;
5394 default:
5395 break;
5396 }
5397 gcc_unreachable ();
5398 }
5399
5400 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
5401 the desired memory state. */
5402
5403 void *
5404 vuse_eq (ao_ref *, tree vuse1, void *data)
5405 {
5406 tree vuse2 = (tree) data;
5407 if (vuse1 == vuse2)
5408 return data;
5409
5410 return NULL;
5411 }
5412
5413 /* Return true if STMT uses a library function and needs to take
5414 address of its inputs. We need to avoid bit-fields in those
5415 cases. */
5416
5417 bool
5418 stmt_needs_operand_addr (gimple *stmt)
5419 {
5420 if (is_gimple_assign (stmt))
5421 switch (gimple_assign_rhs_code (stmt))
5422 {
5423 case MULT_EXPR:
5424 case TRUNC_DIV_EXPR:
5425 case TRUNC_MOD_EXPR:
5426 case FLOAT_EXPR:
5427 return true;
5428 default:
5429 break;
5430 }
5431 else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
5432 || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
5433 return true;
5434 return false;
5435 }
5436
5437 /* Dominator walker used to discover which large/huge _BitInt
5438 loads could be sunk into all their uses. */
5439
5440 class bitint_dom_walker : public dom_walker
5441 {
5442 public:
5443 bitint_dom_walker (bitmap names, bitmap loads)
5444 : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
5445
5446 edge before_dom_children (basic_block) final override;
5447
5448 private:
5449 bitmap m_names, m_loads;
5450 };
5451
5452 edge
5453 bitint_dom_walker::before_dom_children (basic_block bb)
5454 {
5455 gphi *phi = get_virtual_phi (bb);
5456 tree vop;
5457 if (phi)
5458 vop = gimple_phi_result (phi);
5459 else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
5460 vop = NULL_TREE;
5461 else
5462 vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
5463
5464 auto_vec<tree, 16> worklist;
5465 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5466 !gsi_end_p (gsi); gsi_next (&gsi))
5467 {
5468 gimple *stmt = gsi_stmt (gsi);
5469 if (is_gimple_debug (stmt))
5470 continue;
5471
5472 if (!vop && gimple_vuse (stmt))
5473 vop = gimple_vuse (stmt);
5474
5475 tree cvop = vop;
5476 if (gimple_vdef (stmt))
5477 vop = gimple_vdef (stmt);
5478
5479 tree lhs = gimple_get_lhs (stmt);
5480 if (lhs
5481 && TREE_CODE (lhs) == SSA_NAME
5482 && TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
5483 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5484 && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5485 /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
5486 it means it will be handled in a loop or straight line code
5487 at the location of its (ultimate) immediate use, so for
5488 vop checking purposes check these only at the ultimate
5489 immediate use. */
5490 continue;
5491
5492 ssa_op_iter oi;
5493 use_operand_p use_p;
5494 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
5495 {
5496 tree s = USE_FROM_PTR (use_p);
5497 if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5498 && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5499 worklist.safe_push (s);
5500 }
5501
5502 bool needs_operand_addr = stmt_needs_operand_addr (stmt);
5503 while (worklist.length () > 0)
5504 {
5505 tree s = worklist.pop ();
5506
5507 if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
5508 {
5509 gimple *g = SSA_NAME_DEF_STMT (s);
5510 needs_operand_addr |= stmt_needs_operand_addr (g);
5511 FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
5512 {
5513 tree s2 = USE_FROM_PTR (use_p);
5514 if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
5515 && (bitint_precision_kind (TREE_TYPE (s2))
5516 >= bitint_prec_large))
5517 worklist.safe_push (s2);
5518 }
5519 continue;
5520 }
5521 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
5522 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
5523 {
5524 tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5525 if (TREE_CODE (rhs) == SSA_NAME
5526 && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
5527 s = rhs;
5528 else
5529 continue;
5530 }
5531 else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
5532 continue;
5533
5534 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
5535 if (needs_operand_addr
5536 && TREE_CODE (rhs1) == COMPONENT_REF
5537 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
5538 {
5539 tree fld = TREE_OPERAND (rhs1, 1);
5540 /* For little-endian, we can allow as inputs bit-fields
5541 which start at a limb boundary. */
5542 if (DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
5543 && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
5544 && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
5545 % limb_prec) == 0)
5546 ;
5547 else
5548 {
5549 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5550 continue;
5551 }
5552 }
5553
5554 ao_ref ref;
5555 ao_ref_init (&ref, rhs1);
5556 tree lvop = gimple_vuse (SSA_NAME_DEF_STMT (s));
5557 unsigned limit = 64;
5558 tree vuse = cvop;
5559 if (vop != cvop
5560 && is_gimple_assign (stmt)
5561 && gimple_store_p (stmt)
5562 && !operand_equal_p (lhs,
5563 gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s)),
5564 0))
5565 vuse = vop;
5566 if (vuse != lvop
5567 && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
5568 NULL, NULL, limit, lvop) == NULL)
5569 bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
5570 }
5571 }
5572
5573 bb->aux = (void *) vop;
5574 return NULL;
5575 }
5576
5577 }
5578
5579 /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
5580 build_ssa_conflict_graph.
5581 The differences are:
5582 1) don't process assignments with large/huge _BitInt lhs not in NAMES
5583 2) for large/huge _BitInt multiplication/division/modulo process def
5584 only after processing uses rather than before to make uses conflict
5585 with the definition
5586 3) for large/huge _BitInt uses not in NAMES mark the uses of their
5587 SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
5588 the final statement. */
5589
5590 void
5591 build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
5592 ssa_conflicts *graph, bitmap names,
5593 void (*def) (live_track *, tree,
5594 ssa_conflicts *),
5595 void (*use) (live_track *, tree))
5596 {
5597 bool muldiv_p = false;
5598 tree lhs = NULL_TREE;
5599 if (is_gimple_assign (stmt))
5600 {
5601 lhs = gimple_assign_lhs (stmt);
5602 if (TREE_CODE (lhs) == SSA_NAME
5603 && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5604 && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5605 {
5606 if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
5607 return;
5608 switch (gimple_assign_rhs_code (stmt))
5609 {
5610 case MULT_EXPR:
5611 case TRUNC_DIV_EXPR:
5612 case TRUNC_MOD_EXPR:
5613 muldiv_p = true;
5614 default:
5615 break;
5616 }
5617 }
5618 }
5619
5620 ssa_op_iter iter;
5621 tree var;
5622 if (!muldiv_p)
5623 {
5624 /* For stmts with more than one SSA_NAME definition pretend all the
5625 SSA_NAME outputs but the first one are live at this point, so
5626 that conflicts are added in between all those even when they are
5627 actually not really live after the asm, because expansion might
5628 copy those into pseudos after the asm and if multiple outputs
5629 share the same partition, it might overwrite those that should
5630 be live. E.g.
5631 asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
5632 return a;
5633 See PR70593. */
5634 bool first = true;
5635 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5636 if (first)
5637 first = false;
5638 else
5639 use (live, var);
5640
5641 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
5642 def (live, var, graph);
5643 }
5644
5645 auto_vec<tree, 16> worklist;
5646 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
5647 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5648 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5649 {
5650 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5651 use (live, var);
5652 else
5653 worklist.safe_push (var);
5654 }
5655
5656 while (worklist.length () > 0)
5657 {
5658 tree s = worklist.pop ();
5659 FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
5660 if (TREE_CODE (TREE_TYPE (var)) == BITINT_TYPE
5661 && bitint_precision_kind (TREE_TYPE (var)) >= bitint_prec_large)
5662 {
5663 if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
5664 use (live, var);
5665 else
5666 worklist.safe_push (var);
5667 }
5668 }
5669
5670 if (muldiv_p)
5671 def (live, lhs, graph);
5672 }
5673
5674 /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
5675 return the largest bitint_prec_kind of them, otherwise return
5676 bitint_prec_small. */
5677
5678 static bitint_prec_kind
5679 arith_overflow_arg_kind (gimple *stmt)
5680 {
5681 bitint_prec_kind ret = bitint_prec_small;
5682 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
5683 switch (gimple_call_internal_fn (stmt))
5684 {
5685 case IFN_ADD_OVERFLOW:
5686 case IFN_SUB_OVERFLOW:
5687 case IFN_MUL_OVERFLOW:
5688 for (int i = 0; i < 2; ++i)
5689 {
5690 tree a = gimple_call_arg (stmt, i);
5691 if (TREE_CODE (a) == INTEGER_CST
5692 && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
5693 {
5694 bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
5695 ret = MAX (ret, kind);
5696 }
5697 }
5698 break;
5699 default:
5700 break;
5701 }
5702 return ret;
5703 }
5704
5705 /* Entry point for _BitInt(N) operation lowering during optimization. */
5706
5707 static unsigned int
5708 gimple_lower_bitint (void)
5709 {
5710 small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
5711 limb_prec = 0;
5712
5713 unsigned int i;
5714 for (i = 0; i < num_ssa_names; ++i)
5715 {
5716 tree s = ssa_name (i);
5717 if (s == NULL)
5718 continue;
5719 tree type = TREE_TYPE (s);
5720 if (TREE_CODE (type) == COMPLEX_TYPE)
5721 {
5722 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5723 != bitint_prec_small)
5724 break;
5725 type = TREE_TYPE (type);
5726 }
5727 if (TREE_CODE (type) == BITINT_TYPE
5728 && bitint_precision_kind (type) != bitint_prec_small)
5729 break;
5730 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5731 into memory. Such functions could have no large/huge SSA_NAMEs. */
5732 if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5733 {
5734 gimple *g = SSA_NAME_DEF_STMT (s);
5735 if (is_gimple_assign (g) && gimple_store_p (g))
5736 {
5737 tree t = gimple_assign_rhs1 (g);
5738 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5739 && (bitint_precision_kind (TREE_TYPE (t))
5740 >= bitint_prec_large))
5741 break;
5742 }
5743 }
5744 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5745 to floating point types need to be rewritten. */
5746 else if (SCALAR_FLOAT_TYPE_P (type))
5747 {
5748 gimple *g = SSA_NAME_DEF_STMT (s);
5749 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5750 {
5751 tree t = gimple_assign_rhs1 (g);
5752 if (TREE_CODE (t) == INTEGER_CST
5753 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5754 && (bitint_precision_kind (TREE_TYPE (t))
5755 != bitint_prec_small))
5756 break;
5757 }
5758 }
5759 }
5760 if (i == num_ssa_names)
5761 return 0;
5762
5763 basic_block bb;
5764 auto_vec<gimple *, 4> switch_statements;
5765 FOR_EACH_BB_FN (bb, cfun)
5766 {
5767 if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
5768 {
5769 tree idx = gimple_switch_index (swtch);
5770 if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
5771 || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
5772 continue;
5773
5774 if (optimize)
5775 group_case_labels_stmt (swtch);
5776 switch_statements.safe_push (swtch);
5777 }
5778 }
5779
5780 if (!switch_statements.is_empty ())
5781 {
5782 bool expanded = false;
5783 gimple *stmt;
5784 unsigned int j;
5785 i = 0;
5786 FOR_EACH_VEC_ELT (switch_statements, j, stmt)
5787 {
5788 gswitch *swtch = as_a<gswitch *> (stmt);
5789 tree_switch_conversion::switch_decision_tree dt (swtch);
5790 expanded |= dt.analyze_switch_statement ();
5791 }
5792
5793 if (expanded)
5794 {
5795 free_dominance_info (CDI_DOMINATORS);
5796 free_dominance_info (CDI_POST_DOMINATORS);
5797 mark_virtual_operands_for_renaming (cfun);
5798 cleanup_tree_cfg (TODO_update_ssa);
5799 }
5800 }
5801
5802 struct bitint_large_huge large_huge;
5803 bool has_large_huge_parm_result = false;
5804 bool has_large_huge = false;
5805 unsigned int ret = 0, first_large_huge = ~0U;
5806 bool edge_insertions = false;
5807 for (; i < num_ssa_names; ++i)
5808 {
5809 tree s = ssa_name (i);
5810 if (s == NULL)
5811 continue;
5812 tree type = TREE_TYPE (s);
5813 if (TREE_CODE (type) == COMPLEX_TYPE)
5814 {
5815 if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
5816 >= bitint_prec_large)
5817 has_large_huge = true;
5818 type = TREE_TYPE (type);
5819 }
5820 if (TREE_CODE (type) == BITINT_TYPE
5821 && bitint_precision_kind (type) >= bitint_prec_large)
5822 {
5823 if (first_large_huge == ~0U)
5824 first_large_huge = i;
5825 gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
5826 gimple_stmt_iterator gsi;
5827 tree_code rhs_code;
5828 /* Unoptimize certain constructs to simpler alternatives to
5829 avoid having to lower all of them. */
5830 if (is_gimple_assign (stmt) && gimple_bb (stmt))
5831 switch (rhs_code = gimple_assign_rhs_code (stmt))
5832 {
5833 default:
5834 break;
5835 case LROTATE_EXPR:
5836 case RROTATE_EXPR:
5837 {
5838 first_large_huge = 0;
5839 location_t loc = gimple_location (stmt);
5840 gsi = gsi_for_stmt (stmt);
5841 tree rhs1 = gimple_assign_rhs1 (stmt);
5842 tree type = TREE_TYPE (rhs1);
5843 tree n = gimple_assign_rhs2 (stmt), m;
5844 tree p = build_int_cst (TREE_TYPE (n),
5845 TYPE_PRECISION (type));
5846 if (TREE_CODE (n) == INTEGER_CST)
5847 m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
5848 else
5849 {
5850 m = make_ssa_name (TREE_TYPE (n));
5851 g = gimple_build_assign (m, MINUS_EXPR, p, n);
5852 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5853 gimple_set_location (g, loc);
5854 }
5855 if (!TYPE_UNSIGNED (type))
5856 {
5857 tree utype = build_bitint_type (TYPE_PRECISION (type),
5858 1);
5859 if (TREE_CODE (rhs1) == INTEGER_CST)
5860 rhs1 = fold_convert (utype, rhs1);
5861 else
5862 {
5863 tree t = make_ssa_name (type);
5864 g = gimple_build_assign (t, NOP_EXPR, rhs1);
5865 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5866 gimple_set_location (g, loc);
5867 }
5868 }
5869 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5870 rhs_code == LROTATE_EXPR
5871 ? LSHIFT_EXPR : RSHIFT_EXPR,
5872 rhs1, n);
5873 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5874 gimple_set_location (g, loc);
5875 tree op1 = gimple_assign_lhs (g);
5876 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5877 rhs_code == LROTATE_EXPR
5878 ? RSHIFT_EXPR : LSHIFT_EXPR,
5879 rhs1, m);
5880 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5881 gimple_set_location (g, loc);
5882 tree op2 = gimple_assign_lhs (g);
5883 tree lhs = gimple_assign_lhs (stmt);
5884 if (!TYPE_UNSIGNED (type))
5885 {
5886 g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
5887 BIT_IOR_EXPR, op1, op2);
5888 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5889 gimple_set_location (g, loc);
5890 g = gimple_build_assign (lhs, NOP_EXPR,
5891 gimple_assign_lhs (g));
5892 }
5893 else
5894 g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
5895 gsi_replace (&gsi, g, true);
5896 gimple_set_location (g, loc);
5897 }
5898 break;
5899 case ABS_EXPR:
5900 case ABSU_EXPR:
5901 case MIN_EXPR:
5902 case MAX_EXPR:
5903 case COND_EXPR:
5904 first_large_huge = 0;
5905 gsi = gsi_for_stmt (stmt);
5906 tree lhs = gimple_assign_lhs (stmt);
5907 tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
5908 location_t loc = gimple_location (stmt);
5909 if (rhs_code == ABS_EXPR)
5910 g = gimple_build_cond (LT_EXPR, rhs1,
5911 build_zero_cst (TREE_TYPE (rhs1)),
5912 NULL_TREE, NULL_TREE);
5913 else if (rhs_code == ABSU_EXPR)
5914 {
5915 rhs2 = make_ssa_name (TREE_TYPE (lhs));
5916 g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
5917 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5918 gimple_set_location (g, loc);
5919 g = gimple_build_cond (LT_EXPR, rhs1,
5920 build_zero_cst (TREE_TYPE (rhs1)),
5921 NULL_TREE, NULL_TREE);
5922 rhs1 = rhs2;
5923 }
5924 else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
5925 {
5926 rhs2 = gimple_assign_rhs2 (stmt);
5927 if (TREE_CODE (rhs1) == INTEGER_CST)
5928 std::swap (rhs1, rhs2);
5929 g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
5930 NULL_TREE, NULL_TREE);
5931 if (rhs_code == MAX_EXPR)
5932 std::swap (rhs1, rhs2);
5933 }
5934 else
5935 {
5936 g = gimple_build_cond (NE_EXPR, rhs1,
5937 build_zero_cst (TREE_TYPE (rhs1)),
5938 NULL_TREE, NULL_TREE);
5939 rhs1 = gimple_assign_rhs2 (stmt);
5940 rhs2 = gimple_assign_rhs3 (stmt);
5941 }
5942 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5943 gimple_set_location (g, loc);
5944 edge e1 = split_block (gsi_bb (gsi), g);
5945 edge e2 = split_block (e1->dest, (gimple *) NULL);
5946 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
5947 e3->probability = profile_probability::even ();
5948 e1->flags = EDGE_TRUE_VALUE;
5949 e1->probability = e3->probability.invert ();
5950 if (dom_info_available_p (CDI_DOMINATORS))
5951 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
5952 if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
5953 {
5954 gsi = gsi_after_labels (e1->dest);
5955 g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
5956 NEGATE_EXPR, rhs1);
5957 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5958 gimple_set_location (g, loc);
5959 rhs2 = gimple_assign_lhs (g);
5960 std::swap (rhs1, rhs2);
5961 }
5962 gsi = gsi_for_stmt (stmt);
5963 gsi_remove (&gsi, true);
5964 gphi *phi = create_phi_node (lhs, e2->dest);
5965 add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
5966 add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
5967 break;
5968 }
5969 }
5970 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
5971 into memory. Such functions could have no large/huge SSA_NAMEs. */
5972 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
5973 {
5974 gimple *g = SSA_NAME_DEF_STMT (s);
5975 if (is_gimple_assign (g) && gimple_store_p (g))
5976 {
5977 tree t = gimple_assign_rhs1 (g);
5978 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5979 && (bitint_precision_kind (TREE_TYPE (t))
5980 >= bitint_prec_large))
5981 has_large_huge = true;
5982 }
5983 }
5984 /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
5985 to floating point types need to be rewritten. */
5986 else if (SCALAR_FLOAT_TYPE_P (type))
5987 {
5988 gimple *g = SSA_NAME_DEF_STMT (s);
5989 if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
5990 {
5991 tree t = gimple_assign_rhs1 (g);
5992 if (TREE_CODE (t) == INTEGER_CST
5993 && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
5994 && (bitint_precision_kind (TREE_TYPE (t))
5995 >= bitint_prec_large))
5996 has_large_huge = true;
5997 }
5998 }
5999 }
6000 for (i = first_large_huge; i < num_ssa_names; ++i)
6001 {
6002 tree s = ssa_name (i);
6003 if (s == NULL)
6004 continue;
6005 tree type = TREE_TYPE (s);
6006 if (TREE_CODE (type) == COMPLEX_TYPE)
6007 type = TREE_TYPE (type);
6008 if (TREE_CODE (type) == BITINT_TYPE
6009 && bitint_precision_kind (type) >= bitint_prec_large)
6010 {
6011 use_operand_p use_p;
6012 gimple *use_stmt;
6013 has_large_huge = true;
6014 if (optimize
6015 && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
6016 continue;
6017 /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
6018 the same bb and could be handled in the same loop with the
6019 immediate use. */
6020 if (optimize
6021 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6022 && single_imm_use (s, &use_p, &use_stmt)
6023 && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
6024 {
6025 if (mergeable_op (SSA_NAME_DEF_STMT (s)))
6026 {
6027 if (mergeable_op (use_stmt))
6028 continue;
6029 tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
6030 if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
6031 continue;
6032 if (gimple_assign_cast_p (use_stmt))
6033 {
6034 tree lhs = gimple_assign_lhs (use_stmt);
6035 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6036 continue;
6037 }
6038 else if (gimple_store_p (use_stmt)
6039 && is_gimple_assign (use_stmt)
6040 && !gimple_has_volatile_ops (use_stmt)
6041 && !stmt_ends_bb_p (use_stmt))
6042 continue;
6043 }
6044 if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6045 {
6046 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6047 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6048 && ((is_gimple_assign (use_stmt)
6049 && (gimple_assign_rhs_code (use_stmt)
6050 != COMPLEX_EXPR))
6051 || gimple_code (use_stmt) == GIMPLE_COND)
6052 && (!gimple_store_p (use_stmt)
6053 || (is_gimple_assign (use_stmt)
6054 && !gimple_has_volatile_ops (use_stmt)
6055 && !stmt_ends_bb_p (use_stmt)))
6056 && (TREE_CODE (rhs1) != SSA_NAME
6057 || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6058 {
6059 if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
6060 || (bitint_precision_kind (TREE_TYPE (rhs1))
6061 < bitint_prec_large))
6062 continue;
6063 if (is_gimple_assign (use_stmt))
6064 switch (gimple_assign_rhs_code (use_stmt))
6065 {
6066 case MULT_EXPR:
6067 case TRUNC_DIV_EXPR:
6068 case TRUNC_MOD_EXPR:
6069 case FLOAT_EXPR:
6070 /* Uses which use handle_operand_addr can't
6071 deal with nested casts. */
6072 if (TREE_CODE (rhs1) == SSA_NAME
6073 && gimple_assign_cast_p
6074 (SSA_NAME_DEF_STMT (rhs1))
6075 && has_single_use (rhs1)
6076 && (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6077 == gimple_bb (SSA_NAME_DEF_STMT (s))))
6078 goto force_name;
6079 break;
6080 default:
6081 break;
6082 }
6083 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6084 >= TYPE_PRECISION (TREE_TYPE (s)))
6085 && mergeable_op (use_stmt))
6086 continue;
6087 /* Prevent merging a widening non-mergeable cast
6088 on result of some narrower mergeable op
6089 together with later mergeable operations. E.g.
6090 result of _BitInt(223) addition shouldn't be
6091 sign-extended to _BitInt(513) and have another
6092 _BitInt(513) added to it, as handle_plus_minus
6093 with its PHI node handling inside of handle_cast
6094 will not work correctly. An exception is if
6095 use_stmt is a store, this is handled directly
6096 in lower_mergeable_stmt. */
6097 if (TREE_CODE (rhs1) != SSA_NAME
6098 || !has_single_use (rhs1)
6099 || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
6100 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6101 || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
6102 || gimple_store_p (use_stmt))
6103 continue;
6104 if ((TYPE_PRECISION (TREE_TYPE (rhs1))
6105 < TYPE_PRECISION (TREE_TYPE (s)))
6106 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
6107 {
6108 /* Another exception is if the widening cast is
6109 from mergeable same precision cast from something
6110 not mergeable. */
6111 tree rhs2
6112 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
6113 if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
6114 && (TYPE_PRECISION (TREE_TYPE (rhs1))
6115 == TYPE_PRECISION (TREE_TYPE (rhs2))))
6116 {
6117 if (TREE_CODE (rhs2) != SSA_NAME
6118 || !has_single_use (rhs2)
6119 || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
6120 != gimple_bb (SSA_NAME_DEF_STMT (s)))
6121 || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
6122 continue;
6123 }
6124 }
6125 }
6126 }
6127 if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
6128 switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
6129 {
6130 case IMAGPART_EXPR:
6131 {
6132 tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6133 rhs1 = TREE_OPERAND (rhs1, 0);
6134 if (TREE_CODE (rhs1) == SSA_NAME)
6135 {
6136 gimple *g = SSA_NAME_DEF_STMT (rhs1);
6137 if (optimizable_arith_overflow (g))
6138 continue;
6139 }
6140 }
6141 /* FALLTHRU */
6142 case LSHIFT_EXPR:
6143 case RSHIFT_EXPR:
6144 case MULT_EXPR:
6145 case TRUNC_DIV_EXPR:
6146 case TRUNC_MOD_EXPR:
6147 case FIX_TRUNC_EXPR:
6148 case REALPART_EXPR:
6149 if (gimple_store_p (use_stmt)
6150 && is_gimple_assign (use_stmt)
6151 && !gimple_has_volatile_ops (use_stmt)
6152 && !stmt_ends_bb_p (use_stmt))
6153 {
6154 tree lhs = gimple_assign_lhs (use_stmt);
6155 /* As multiply/division passes address of the lhs
6156 to library function and that assumes it can extend
6157 it to whole number of limbs, avoid merging those
6158 with bit-field stores. Don't allow it for
6159 shifts etc. either, so that the bit-field store
6160 handling doesn't have to be done everywhere. */
6161 if (TREE_CODE (lhs) == COMPONENT_REF
6162 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6163 break;
6164 continue;
6165 }
6166 break;
6167 default:
6168 break;
6169 }
6170 }
6171
6172 /* Also ignore uninitialized uses. */
6173 if (SSA_NAME_IS_DEFAULT_DEF (s)
6174 && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6175 continue;
6176
6177 force_name:
6178 if (!large_huge.m_names)
6179 large_huge.m_names = BITMAP_ALLOC (NULL);
6180 bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
6181 if (has_single_use (s))
6182 {
6183 if (!large_huge.m_single_use_names)
6184 large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
6185 bitmap_set_bit (large_huge.m_single_use_names,
6186 SSA_NAME_VERSION (s));
6187 }
6188 if (SSA_NAME_VAR (s)
6189 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6190 && SSA_NAME_IS_DEFAULT_DEF (s))
6191 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6192 has_large_huge_parm_result = true;
6193 if (optimize
6194 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6195 && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
6196 && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
6197 && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6198 {
6199 use_operand_p use_p;
6200 imm_use_iterator iter;
6201 bool optimizable_load = true;
6202 FOR_EACH_IMM_USE_FAST (use_p, iter, s)
6203 {
6204 gimple *use_stmt = USE_STMT (use_p);
6205 if (is_gimple_debug (use_stmt))
6206 continue;
6207 if (gimple_code (use_stmt) == GIMPLE_PHI
6208 || is_gimple_call (use_stmt))
6209 {
6210 optimizable_load = false;
6211 break;
6212 }
6213 }
6214
6215 ssa_op_iter oi;
6216 FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
6217 oi, SSA_OP_USE)
6218 {
6219 tree s2 = USE_FROM_PTR (use_p);
6220 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
6221 {
6222 optimizable_load = false;
6223 break;
6224 }
6225 }
6226
6227 if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
6228 {
6229 if (!large_huge.m_loads)
6230 large_huge.m_loads = BITMAP_ALLOC (NULL);
6231 bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
6232 }
6233 }
6234 }
6235 /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6236 into memory. Such functions could have no large/huge SSA_NAMEs. */
6237 else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6238 {
6239 gimple *g = SSA_NAME_DEF_STMT (s);
6240 if (is_gimple_assign (g) && gimple_store_p (g))
6241 {
6242 tree t = gimple_assign_rhs1 (g);
6243 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6244 && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
6245 has_large_huge = true;
6246 }
6247 }
6248 }
6249
6250 if (large_huge.m_names || has_large_huge)
6251 {
6252 ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
6253 calculate_dominance_info (CDI_DOMINATORS);
6254 if (optimize)
6255 enable_ranger (cfun);
6256 if (large_huge.m_loads)
6257 {
6258 basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
6259 entry->aux = NULL;
6260 bitint_dom_walker (large_huge.m_names,
6261 large_huge.m_loads).walk (entry);
6262 bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
6263 clear_aux_for_blocks ();
6264 BITMAP_FREE (large_huge.m_loads);
6265 }
6266 large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
6267 large_huge.m_limb_size
6268 = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
6269 }
6270 if (large_huge.m_names)
6271 {
6272 large_huge.m_map
6273 = init_var_map (num_ssa_names, NULL, large_huge.m_names);
6274 coalesce_ssa_name (large_huge.m_map);
6275 partition_view_normal (large_huge.m_map);
6276 if (dump_file && (dump_flags & TDF_DETAILS))
6277 {
6278 fprintf (dump_file, "After Coalescing:\n");
6279 dump_var_map (dump_file, large_huge.m_map);
6280 }
6281 large_huge.m_vars
6282 = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
6283 bitmap_iterator bi;
6284 if (has_large_huge_parm_result)
6285 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6286 {
6287 tree s = ssa_name (i);
6288 if (SSA_NAME_VAR (s)
6289 && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
6290 && SSA_NAME_IS_DEFAULT_DEF (s))
6291 || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
6292 {
6293 int p = var_to_partition (large_huge.m_map, s);
6294 if (large_huge.m_vars[p] == NULL_TREE)
6295 {
6296 large_huge.m_vars[p] = SSA_NAME_VAR (s);
6297 mark_addressable (SSA_NAME_VAR (s));
6298 }
6299 }
6300 }
6301 tree atype = NULL_TREE;
6302 EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
6303 {
6304 tree s = ssa_name (i);
6305 int p = var_to_partition (large_huge.m_map, s);
6306 if (large_huge.m_vars[p] != NULL_TREE)
6307 continue;
6308 if (atype == NULL_TREE
6309 || !tree_int_cst_equal (TYPE_SIZE (atype),
6310 TYPE_SIZE (TREE_TYPE (s))))
6311 {
6312 unsigned HOST_WIDE_INT nelts
6313 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
6314 atype = build_array_type_nelts (large_huge.m_limb_type, nelts);
6315 }
6316 large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
6317 mark_addressable (large_huge.m_vars[p]);
6318 }
6319 }
6320
6321 FOR_EACH_BB_REVERSE_FN (bb, cfun)
6322 {
6323 gimple_stmt_iterator prev;
6324 for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
6325 gsi = prev)
6326 {
6327 prev = gsi;
6328 gsi_prev (&prev);
6329 ssa_op_iter iter;
6330 gimple *stmt = gsi_stmt (gsi);
6331 if (is_gimple_debug (stmt))
6332 continue;
6333 bitint_prec_kind kind = bitint_prec_small;
6334 tree t;
6335 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
6336 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6337 {
6338 bitint_prec_kind this_kind
6339 = bitint_precision_kind (TREE_TYPE (t));
6340 kind = MAX (kind, this_kind);
6341 }
6342 if (is_gimple_assign (stmt) && gimple_store_p (stmt))
6343 {
6344 t = gimple_assign_rhs1 (stmt);
6345 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
6346 {
6347 bitint_prec_kind this_kind
6348 = bitint_precision_kind (TREE_TYPE (t));
6349 kind = MAX (kind, this_kind);
6350 }
6351 }
6352 if (is_gimple_assign (stmt)
6353 && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
6354 {
6355 t = gimple_assign_rhs1 (stmt);
6356 if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6357 && TREE_CODE (t) == INTEGER_CST)
6358 {
6359 bitint_prec_kind this_kind
6360 = bitint_precision_kind (TREE_TYPE (t));
6361 kind = MAX (kind, this_kind);
6362 }
6363 }
6364 if (is_gimple_call (stmt))
6365 {
6366 t = gimple_call_lhs (stmt);
6367 if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
6368 {
6369 bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
6370 kind = MAX (kind, this_kind);
6371 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
6372 {
6373 this_kind
6374 = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
6375 kind = MAX (kind, this_kind);
6376 }
6377 }
6378 }
6379 if (kind == bitint_prec_small)
6380 continue;
6381 switch (gimple_code (stmt))
6382 {
6383 case GIMPLE_CALL:
6384 /* For now. We'll need to handle some internal functions and
6385 perhaps some builtins. */
6386 if (kind == bitint_prec_middle)
6387 continue;
6388 break;
6389 case GIMPLE_ASM:
6390 if (kind == bitint_prec_middle)
6391 continue;
6392 break;
6393 case GIMPLE_RETURN:
6394 continue;
6395 case GIMPLE_ASSIGN:
6396 if (gimple_clobber_p (stmt))
6397 continue;
6398 if (kind >= bitint_prec_large)
6399 break;
6400 if (gimple_assign_single_p (stmt))
6401 /* No need to lower copies, loads or stores. */
6402 continue;
6403 if (gimple_assign_cast_p (stmt))
6404 {
6405 tree lhs = gimple_assign_lhs (stmt);
6406 tree rhs = gimple_assign_rhs1 (stmt);
6407 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6408 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6409 && (TYPE_PRECISION (TREE_TYPE (lhs))
6410 == TYPE_PRECISION (TREE_TYPE (rhs))))
6411 /* No need to lower casts to same precision. */
6412 continue;
6413 }
6414 break;
6415 default:
6416 break;
6417 }
6418
6419 if (kind == bitint_prec_middle)
6420 {
6421 tree type = NULL_TREE;
6422 /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
6423 with the same precision and back. */
6424 unsigned int nops = gimple_num_ops (stmt);
6425 for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
6426 i < nops; ++i)
6427 if (tree op = gimple_op (stmt, i))
6428 {
6429 tree nop = maybe_cast_middle_bitint (&gsi, op, type);
6430 if (nop != op)
6431 gimple_set_op (stmt, i, nop);
6432 else if (COMPARISON_CLASS_P (op))
6433 {
6434 TREE_OPERAND (op, 0)
6435 = maybe_cast_middle_bitint (&gsi,
6436 TREE_OPERAND (op, 0),
6437 type);
6438 TREE_OPERAND (op, 1)
6439 = maybe_cast_middle_bitint (&gsi,
6440 TREE_OPERAND (op, 1),
6441 type);
6442 }
6443 else if (TREE_CODE (op) == CASE_LABEL_EXPR)
6444 {
6445 CASE_LOW (op)
6446 = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
6447 type);
6448 CASE_HIGH (op)
6449 = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
6450 type);
6451 }
6452 }
6453 if (tree lhs = gimple_get_lhs (stmt))
6454 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6455 && (bitint_precision_kind (TREE_TYPE (lhs))
6456 == bitint_prec_middle))
6457 {
6458 int prec = TYPE_PRECISION (TREE_TYPE (lhs));
6459 int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
6460 type = build_nonstandard_integer_type (prec, uns);
6461 tree lhs2 = make_ssa_name (type);
6462 gimple_set_lhs (stmt, lhs2);
6463 gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
6464 if (stmt_ends_bb_p (stmt))
6465 {
6466 edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
6467 gsi_insert_on_edge_immediate (e, g);
6468 }
6469 else
6470 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
6471 }
6472 update_stmt (stmt);
6473 continue;
6474 }
6475
6476 if (tree lhs = gimple_get_lhs (stmt))
6477 if (TREE_CODE (lhs) == SSA_NAME)
6478 {
6479 tree type = TREE_TYPE (lhs);
6480 if (TREE_CODE (type) == COMPLEX_TYPE)
6481 type = TREE_TYPE (type);
6482 if (TREE_CODE (type) == BITINT_TYPE
6483 && bitint_precision_kind (type) >= bitint_prec_large
6484 && (large_huge.m_names == NULL
6485 || !bitmap_bit_p (large_huge.m_names,
6486 SSA_NAME_VERSION (lhs))))
6487 continue;
6488 }
6489
6490 large_huge.lower_stmt (stmt);
6491 }
6492
6493 tree atype = NULL_TREE;
6494 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6495 gsi_next (&gsi))
6496 {
6497 gphi *phi = gsi.phi ();
6498 tree lhs = gimple_phi_result (phi);
6499 if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6500 || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
6501 continue;
6502 int p1 = var_to_partition (large_huge.m_map, lhs);
6503 gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
6504 tree v1 = large_huge.m_vars[p1];
6505 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
6506 {
6507 tree arg = gimple_phi_arg_def (phi, i);
6508 edge e = gimple_phi_arg_edge (phi, i);
6509 gimple *g;
6510 switch (TREE_CODE (arg))
6511 {
6512 case INTEGER_CST:
6513 if (integer_zerop (arg) && VAR_P (v1))
6514 {
6515 tree zero = build_zero_cst (TREE_TYPE (v1));
6516 g = gimple_build_assign (v1, zero);
6517 gsi_insert_on_edge (e, g);
6518 edge_insertions = true;
6519 break;
6520 }
6521 int ext;
6522 unsigned int min_prec, prec, rem;
6523 tree c;
6524 prec = TYPE_PRECISION (TREE_TYPE (arg));
6525 rem = prec % (2 * limb_prec);
6526 min_prec = bitint_min_cst_precision (arg, ext);
6527 if (min_prec > prec - rem - 2 * limb_prec
6528 && min_prec > (unsigned) limb_prec)
6529 /* Constant which has enough significant bits that it
6530 isn't worth trying to save .rodata space by extending
6531 from smaller number. */
6532 min_prec = prec;
6533 else
6534 min_prec = CEIL (min_prec, limb_prec) * limb_prec;
6535 if (min_prec == 0)
6536 c = NULL_TREE;
6537 else if (min_prec == prec)
6538 c = tree_output_constant_def (arg);
6539 else if (min_prec == (unsigned) limb_prec)
6540 c = fold_convert (large_huge.m_limb_type, arg);
6541 else
6542 {
6543 tree ctype = build_bitint_type (min_prec, 1);
6544 c = tree_output_constant_def (fold_convert (ctype, arg));
6545 }
6546 if (c)
6547 {
6548 if (VAR_P (v1) && min_prec == prec)
6549 {
6550 tree v2 = build1 (VIEW_CONVERT_EXPR,
6551 TREE_TYPE (v1), c);
6552 g = gimple_build_assign (v1, v2);
6553 gsi_insert_on_edge (e, g);
6554 edge_insertions = true;
6555 break;
6556 }
6557 if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
6558 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6559 TREE_TYPE (c), v1),
6560 c);
6561 else
6562 {
6563 unsigned HOST_WIDE_INT nelts
6564 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
6565 / limb_prec;
6566 tree vtype
6567 = build_array_type_nelts (large_huge.m_limb_type,
6568 nelts);
6569 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6570 vtype, v1),
6571 build1 (VIEW_CONVERT_EXPR,
6572 vtype, c));
6573 }
6574 gsi_insert_on_edge (e, g);
6575 }
6576 if (ext == 0)
6577 {
6578 unsigned HOST_WIDE_INT nelts
6579 = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
6580 - min_prec) / limb_prec;
6581 tree vtype
6582 = build_array_type_nelts (large_huge.m_limb_type,
6583 nelts);
6584 tree ptype = build_pointer_type (TREE_TYPE (v1));
6585 tree off = fold_convert (ptype,
6586 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6587 tree vd = build2 (MEM_REF, vtype,
6588 build_fold_addr_expr (v1), off);
6589 g = gimple_build_assign (vd, build_zero_cst (vtype));
6590 }
6591 else
6592 {
6593 tree vd = v1;
6594 if (c)
6595 {
6596 tree ptype = build_pointer_type (TREE_TYPE (v1));
6597 tree off
6598 = fold_convert (ptype,
6599 TYPE_SIZE_UNIT (TREE_TYPE (c)));
6600 vd = build2 (MEM_REF, large_huge.m_limb_type,
6601 build_fold_addr_expr (v1), off);
6602 }
6603 vd = build_fold_addr_expr (vd);
6604 unsigned HOST_WIDE_INT nbytes
6605 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
6606 if (c)
6607 nbytes
6608 -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
6609 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
6610 g = gimple_build_call (fn, 3, vd,
6611 integer_minus_one_node,
6612 build_int_cst (sizetype,
6613 nbytes));
6614 }
6615 gsi_insert_on_edge (e, g);
6616 edge_insertions = true;
6617 break;
6618 default:
6619 gcc_unreachable ();
6620 case SSA_NAME:
6621 if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
6622 {
6623 if (large_huge.m_names == NULL
6624 || !bitmap_bit_p (large_huge.m_names,
6625 SSA_NAME_VERSION (arg)))
6626 continue;
6627 }
6628 int p2 = var_to_partition (large_huge.m_map, arg);
6629 if (p1 == p2)
6630 continue;
6631 gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
6632 tree v2 = large_huge.m_vars[p2];
6633 if (VAR_P (v1) && VAR_P (v2))
6634 g = gimple_build_assign (v1, v2);
6635 else if (VAR_P (v1))
6636 g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
6637 TREE_TYPE (v1), v2));
6638 else if (VAR_P (v2))
6639 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6640 TREE_TYPE (v2), v1), v2);
6641 else
6642 {
6643 if (atype == NULL_TREE
6644 || !tree_int_cst_equal (TYPE_SIZE (atype),
6645 TYPE_SIZE (TREE_TYPE (lhs))))
6646 {
6647 unsigned HOST_WIDE_INT nelts
6648 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
6649 / limb_prec;
6650 atype
6651 = build_array_type_nelts (large_huge.m_limb_type,
6652 nelts);
6653 }
6654 g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
6655 atype, v1),
6656 build1 (VIEW_CONVERT_EXPR,
6657 atype, v2));
6658 }
6659 gsi_insert_on_edge (e, g);
6660 edge_insertions = true;
6661 break;
6662 }
6663 }
6664 }
6665 }
6666
6667 if (large_huge.m_names || has_large_huge)
6668 {
6669 gimple *nop = NULL;
6670 for (i = 0; i < num_ssa_names; ++i)
6671 {
6672 tree s = ssa_name (i);
6673 if (s == NULL_TREE)
6674 continue;
6675 tree type = TREE_TYPE (s);
6676 if (TREE_CODE (type) == COMPLEX_TYPE)
6677 type = TREE_TYPE (type);
6678 if (TREE_CODE (type) == BITINT_TYPE
6679 && bitint_precision_kind (type) >= bitint_prec_large)
6680 {
6681 if (large_huge.m_preserved
6682 && bitmap_bit_p (large_huge.m_preserved,
6683 SSA_NAME_VERSION (s)))
6684 continue;
6685 gimple *g = SSA_NAME_DEF_STMT (s);
6686 if (gimple_code (g) == GIMPLE_NOP)
6687 {
6688 if (SSA_NAME_VAR (s))
6689 set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
6690 release_ssa_name (s);
6691 continue;
6692 }
6693 if (gimple_bb (g) == NULL)
6694 {
6695 release_ssa_name (s);
6696 continue;
6697 }
6698 if (gimple_code (g) != GIMPLE_ASM)
6699 {
6700 gimple_stmt_iterator gsi = gsi_for_stmt (g);
6701 bool save_vta = flag_var_tracking_assignments;
6702 flag_var_tracking_assignments = false;
6703 gsi_remove (&gsi, true);
6704 flag_var_tracking_assignments = save_vta;
6705 }
6706 if (nop == NULL)
6707 nop = gimple_build_nop ();
6708 SSA_NAME_DEF_STMT (s) = nop;
6709 release_ssa_name (s);
6710 }
6711 }
6712 if (optimize)
6713 disable_ranger (cfun);
6714 }
6715
6716 if (edge_insertions)
6717 gsi_commit_edge_inserts ();
6718
6719 return ret;
6720 }
6721
6722 namespace {
6723
6724 const pass_data pass_data_lower_bitint =
6725 {
6726 GIMPLE_PASS, /* type */
6727 "bitintlower", /* name */
6728 OPTGROUP_NONE, /* optinfo_flags */
6729 TV_NONE, /* tv_id */
6730 PROP_ssa, /* properties_required */
6731 PROP_gimple_lbitint, /* properties_provided */
6732 0, /* properties_destroyed */
6733 0, /* todo_flags_start */
6734 0, /* todo_flags_finish */
6735 };
6736
6737 class pass_lower_bitint : public gimple_opt_pass
6738 {
6739 public:
6740 pass_lower_bitint (gcc::context *ctxt)
6741 : gimple_opt_pass (pass_data_lower_bitint, ctxt)
6742 {}
6743
6744 /* opt_pass methods: */
6745 opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
6746 unsigned int execute (function *) final override
6747 {
6748 return gimple_lower_bitint ();
6749 }
6750
6751 }; // class pass_lower_bitint
6752
6753 } // anon namespace
6754
6755 gimple_opt_pass *
6756 make_pass_lower_bitint (gcc::context *ctxt)
6757 {
6758 return new pass_lower_bitint (ctxt);
6759 }
6760
6761 \f
6762 namespace {
6763
6764 const pass_data pass_data_lower_bitint_O0 =
6765 {
6766 GIMPLE_PASS, /* type */
6767 "bitintlower0", /* name */
6768 OPTGROUP_NONE, /* optinfo_flags */
6769 TV_NONE, /* tv_id */
6770 PROP_cfg, /* properties_required */
6771 PROP_gimple_lbitint, /* properties_provided */
6772 0, /* properties_destroyed */
6773 0, /* todo_flags_start */
6774 0, /* todo_flags_finish */
6775 };
6776
6777 class pass_lower_bitint_O0 : public gimple_opt_pass
6778 {
6779 public:
6780 pass_lower_bitint_O0 (gcc::context *ctxt)
6781 : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
6782 {}
6783
6784 /* opt_pass methods: */
6785 bool gate (function *fun) final override
6786 {
6787 /* With errors, normal optimization passes are not run. If we don't
6788 lower bitint operations at all, rtl expansion will abort. */
6789 return !(fun->curr_properties & PROP_gimple_lbitint);
6790 }
6791
6792 unsigned int execute (function *) final override
6793 {
6794 return gimple_lower_bitint ();
6795 }
6796
6797 }; // class pass_lower_bitint_O0
6798
6799 } // anon namespace
6800
6801 gimple_opt_pass *
6802 make_pass_lower_bitint_O0 (gcc::context *ctxt)
6803 {
6804 return new pass_lower_bitint_O0 (ctxt);
6805 }