]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-complex.c
re PR tree-optimization/22022 (ACATS ICE cxg1002 tree_split_edge, at tree-cfg.c:3025)
[thirdparty/gcc.git] / gcc / tree-complex.c
1 /* Lower complex number operations to scalar operations.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "real.h"
28 #include "flags.h"
29 #include "tree-flow.h"
30 #include "tree-gimple.h"
31 #include "tree-iterator.h"
32 #include "tree-pass.h"
33 #include "tree-ssa-propagate.h"
34
35
36 /* For each complex ssa name, a lattice value. We're interested in finding
37 out whether a complex number is degenerate in some way, having only real
38 or only complex parts. */
39
40 typedef enum
41 {
42 UNINITIALIZED = 0,
43 ONLY_REAL = 1,
44 ONLY_IMAG = 2,
45 VARYING = 3
46 } complex_lattice_t;
47
48 #define PAIR(a, b) ((a) << 2 | (b))
49
50 DEF_VEC_I(complex_lattice_t);
51 DEF_VEC_ALLOC_I(complex_lattice_t, heap);
52
53 static VEC(complex_lattice_t, heap) *complex_lattice_values;
54
55 /* For each complex variable, a pair of variables for the components. */
56 static VEC(tree, heap) *complex_variable_components;
57
58
59 /* Return true if T is not a zero constant. In the case of real values,
60 we're only interested in +0.0. */
61
62 static int
63 some_nonzerop (tree t)
64 {
65 int zerop = false;
66
67 if (TREE_CODE (t) == REAL_CST)
68 zerop = REAL_VALUES_IDENTICAL (TREE_REAL_CST (t), dconst0);
69 else if (TREE_CODE (t) == INTEGER_CST)
70 zerop = integer_zerop (t);
71
72 return !zerop;
73 }
74
75 /* Compute a lattice value from T. It may be a gimple_val, or, as a
76 special exception, a COMPLEX_EXPR. */
77
78 static complex_lattice_t
79 find_lattice_value (tree t)
80 {
81 tree real, imag;
82 int r, i;
83 complex_lattice_t ret;
84
85 switch (TREE_CODE (t))
86 {
87 case SSA_NAME:
88 return VEC_index (complex_lattice_t, complex_lattice_values,
89 SSA_NAME_VERSION (t));
90
91 case COMPLEX_CST:
92 real = TREE_REALPART (t);
93 imag = TREE_IMAGPART (t);
94 break;
95
96 case COMPLEX_EXPR:
97 real = TREE_OPERAND (t, 0);
98 imag = TREE_OPERAND (t, 1);
99 break;
100
101 default:
102 gcc_unreachable ();
103 }
104
105 r = some_nonzerop (real);
106 i = some_nonzerop (imag);
107 ret = r*ONLY_REAL + i*ONLY_IMAG;
108
109 /* ??? On occasion we could do better than mapping 0+0i to real, but we
110 certainly don't want to leave it UNINITIALIZED, which eventually gets
111 mapped to VARYING. */
112 if (ret == UNINITIALIZED)
113 ret = ONLY_REAL;
114
115 return ret;
116 }
117
118 /* Determine if LHS is something for which we're interested in seeing
119 simulation results. */
120
121 static bool
122 is_complex_reg (tree lhs)
123 {
124 return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs);
125 }
126
127 /* Mark the incoming parameters to the function as VARYING. */
128
129 static void
130 init_parameter_lattice_values (void)
131 {
132 tree parm;
133
134 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = TREE_CHAIN (parm))
135 if (is_complex_reg (parm) && var_ann (parm) != NULL)
136 {
137 tree ssa_name = default_def (parm);
138 VEC_replace (complex_lattice_t, complex_lattice_values,
139 SSA_NAME_VERSION (ssa_name), VARYING);
140 }
141 }
142
143 /* Initialize DONT_SIMULATE_AGAIN for each stmt and phi. Return false if
144 we found no statements we want to simulate, and thus there's nothing for
145 the entire pass to do. */
146
147 static bool
148 init_dont_simulate_again (void)
149 {
150 basic_block bb;
151 block_stmt_iterator bsi;
152 tree phi;
153 bool saw_a_complex_op = false;
154
155 FOR_EACH_BB (bb)
156 {
157 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
158 DONT_SIMULATE_AGAIN (phi) = !is_complex_reg (PHI_RESULT (phi));
159
160 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
161 {
162 tree orig_stmt, stmt, rhs = NULL;
163 bool dsa = true;
164
165 orig_stmt = stmt = bsi_stmt (bsi);
166 switch (TREE_CODE (stmt))
167 {
168 case RETURN_EXPR:
169 stmt = TREE_OPERAND (stmt, 0);
170 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
171 break;
172 /* FALLTHRU */
173
174 case MODIFY_EXPR:
175 dsa = !is_complex_reg (TREE_OPERAND (stmt, 0));
176 rhs = TREE_OPERAND (stmt, 1);
177 break;
178
179 case COND_EXPR:
180 rhs = TREE_OPERAND (stmt, 0);
181 break;
182
183 default:
184 break;
185 }
186
187 if (rhs)
188 switch (TREE_CODE (rhs))
189 {
190 case EQ_EXPR:
191 case NE_EXPR:
192 rhs = TREE_OPERAND (rhs, 0);
193 /* FALLTHRU */
194
195 case PLUS_EXPR:
196 case MINUS_EXPR:
197 case MULT_EXPR:
198 case TRUNC_DIV_EXPR:
199 case CEIL_DIV_EXPR:
200 case FLOOR_DIV_EXPR:
201 case ROUND_DIV_EXPR:
202 case RDIV_EXPR:
203 case NEGATE_EXPR:
204 case CONJ_EXPR:
205 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE)
206 saw_a_complex_op = true;
207 break;
208
209 default:
210 break;
211 }
212
213 DONT_SIMULATE_AGAIN (orig_stmt) = dsa;
214 }
215 }
216
217 return saw_a_complex_op;
218 }
219
220
221 /* Evaluate statement STMT against the complex lattice defined above. */
222
223 static enum ssa_prop_result
224 complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
225 tree *result_p)
226 {
227 complex_lattice_t new_l, old_l, op1_l, op2_l;
228 unsigned int ver;
229 tree lhs, rhs;
230
231 /* These conditions should be satisfied due to the initial filter
232 set up in init_dont_simulate_again. */
233 if (TREE_CODE (stmt) == RETURN_EXPR)
234 stmt = TREE_OPERAND (stmt, 0);
235 gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
236
237 lhs = TREE_OPERAND (stmt, 0);
238 rhs = TREE_OPERAND (stmt, 1);
239
240 gcc_assert (TREE_CODE (lhs) == SSA_NAME);
241 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
242
243 *result_p = lhs;
244 ver = SSA_NAME_VERSION (lhs);
245 old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
246
247 switch (TREE_CODE (rhs))
248 {
249 case SSA_NAME:
250 case COMPLEX_EXPR:
251 case COMPLEX_CST:
252 new_l = find_lattice_value (rhs);
253 break;
254
255 case PLUS_EXPR:
256 case MINUS_EXPR:
257 op1_l = find_lattice_value (TREE_OPERAND (rhs, 0));
258 op2_l = find_lattice_value (TREE_OPERAND (rhs, 1));
259
260 /* We've set up the lattice values such that IOR neatly
261 models addition. */
262 new_l = op1_l | op2_l;
263 break;
264
265 case MULT_EXPR:
266 case RDIV_EXPR:
267 case TRUNC_DIV_EXPR:
268 case CEIL_DIV_EXPR:
269 case FLOOR_DIV_EXPR:
270 case ROUND_DIV_EXPR:
271 op1_l = find_lattice_value (TREE_OPERAND (rhs, 0));
272 op2_l = find_lattice_value (TREE_OPERAND (rhs, 1));
273
274 /* Obviously, if either varies, so does the result. */
275 if (op1_l == VARYING || op2_l == VARYING)
276 new_l = VARYING;
277 /* Don't prematurely promote variables if we've not yet seen
278 their inputs. */
279 else if (op1_l == UNINITIALIZED)
280 new_l = op2_l;
281 else if (op2_l == UNINITIALIZED)
282 new_l = op1_l;
283 else
284 {
285 /* At this point both numbers have only one component. If the
286 numbers are of opposite kind, the result is imaginary,
287 otherwise the result is real. The add/subtract translates
288 the real/imag from/to 0/1; the ^ performs the comparison. */
289 new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL;
290
291 /* Don't allow the lattice value to flip-flop indefinitely. */
292 new_l |= old_l;
293 }
294 break;
295
296 case NEGATE_EXPR:
297 case CONJ_EXPR:
298 new_l = find_lattice_value (TREE_OPERAND (rhs, 0));
299 break;
300
301 default:
302 new_l = VARYING;
303 break;
304 }
305
306 /* If nothing changed this round, let the propagator know. */
307 if (new_l == old_l)
308 return SSA_PROP_NOT_INTERESTING;
309
310 VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l);
311 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
312 }
313
314 /* Evaluate a PHI node against the complex lattice defined above. */
315
316 static enum ssa_prop_result
317 complex_visit_phi (tree phi)
318 {
319 complex_lattice_t new_l, old_l;
320 unsigned int ver;
321 tree lhs;
322 int i;
323
324 lhs = PHI_RESULT (phi);
325
326 /* This condition should be satisfied due to the initial filter
327 set up in init_dont_simulate_again. */
328 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
329
330 /* We've set up the lattice values such that IOR neatly models PHI meet. */
331 new_l = UNINITIALIZED;
332 for (i = PHI_NUM_ARGS (phi) - 1; i >= 0; --i)
333 new_l |= find_lattice_value (PHI_ARG_DEF (phi, i));
334
335 ver = SSA_NAME_VERSION (lhs);
336 old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
337
338 if (new_l == old_l)
339 return SSA_PROP_NOT_INTERESTING;
340
341 VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l);
342 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
343 }
344
345 /* For each referenced complex gimple register, set up a pair of registers
346 to hold the components of the complex value. */
347
348 static void
349 create_components (void)
350 {
351 size_t k, n;
352
353 n = num_referenced_vars;
354 if (n == 0)
355 return;
356
357 complex_variable_components = VEC_alloc (tree, heap, 2*n);
358 VEC_safe_grow (tree, heap, complex_variable_components, 2*n);
359
360 for (k = 0; k < n; ++k)
361 {
362 tree var = referenced_var (k);
363 tree r = NULL, i = NULL;
364
365 if (var != NULL
366 && TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
367 && is_gimple_reg (var))
368 {
369 tree inner_type = TREE_TYPE (TREE_TYPE (var));
370
371 r = make_rename_temp (inner_type, "CR");
372 i = make_rename_temp (inner_type, "CI");
373 DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (var);
374 DECL_SOURCE_LOCATION (i) = DECL_SOURCE_LOCATION (var);
375 DECL_ARTIFICIAL (r) = 1;
376 DECL_ARTIFICIAL (i) = 1;
377
378 if (DECL_NAME (var) && !DECL_IGNORED_P (var))
379 {
380 const char *name = IDENTIFIER_POINTER (DECL_NAME (var));
381
382 DECL_NAME (r) = get_identifier (ACONCAT ((name, "$real", NULL)));
383 DECL_NAME (i) = get_identifier (ACONCAT ((name, "$imag", NULL)));
384
385 SET_DECL_DEBUG_EXPR (r, build1 (REALPART_EXPR, inner_type, var));
386 SET_DECL_DEBUG_EXPR (i, build1 (IMAGPART_EXPR, inner_type, var));
387 DECL_DEBUG_EXPR_IS_FROM (r) = 1;
388 DECL_DEBUG_EXPR_IS_FROM (i) = 1;
389
390 DECL_IGNORED_P (r) = 0;
391 DECL_IGNORED_P (i) = 0;
392
393 TREE_NO_WARNING (r) = TREE_NO_WARNING (var);
394 TREE_NO_WARNING (i) = TREE_NO_WARNING (var);
395 }
396 else
397 {
398 DECL_IGNORED_P (r) = 1;
399 DECL_IGNORED_P (i) = 1;
400 TREE_NO_WARNING (r) = 1;
401 TREE_NO_WARNING (i) = 1;
402 }
403 }
404
405 VEC_replace (tree, complex_variable_components, 2*k, r);
406 VEC_replace (tree, complex_variable_components, 2*k + 1, i);
407 }
408 }
409
410 /* Extract the real or imaginary part of a complex variable or constant.
411 Make sure that it's a proper gimple_val and gimplify it if not.
412 Emit any new code before BSI. */
413
414 static tree
415 extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
416 bool gimple_p)
417 {
418 switch (TREE_CODE (t))
419 {
420 case COMPLEX_CST:
421 return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t);
422
423 case COMPLEX_EXPR:
424 return TREE_OPERAND (t, imagpart_p);
425
426 case VAR_DECL:
427 case PARM_DECL:
428 case INDIRECT_REF:
429 case COMPONENT_REF:
430 case ARRAY_REF:
431 {
432 tree inner_type = TREE_TYPE (TREE_TYPE (t));
433
434 t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR),
435 inner_type, unshare_expr (t));
436
437 if (gimple_p)
438 t = gimplify_val (bsi, inner_type, t);
439
440 return t;
441 }
442
443 case SSA_NAME:
444 {
445 tree def = SSA_NAME_DEF_STMT (t);
446
447 if (TREE_CODE (def) == MODIFY_EXPR)
448 {
449 def = TREE_OPERAND (def, 1);
450 if (TREE_CODE (def) == COMPLEX_CST)
451 return imagpart_p ? TREE_IMAGPART (def) : TREE_REALPART (def);
452 if (TREE_CODE (def) == COMPLEX_EXPR)
453 {
454 def = TREE_OPERAND (def, imagpart_p);
455 if (TREE_CONSTANT (def))
456 return def;
457 }
458 }
459
460 return VEC_index (tree, complex_variable_components,
461 var_ann (SSA_NAME_VAR (t))->uid * 2 + imagpart_p);
462 }
463
464 default:
465 gcc_unreachable ();
466 }
467 }
468
469 /* Update the complex components of the ssa name on the lhs of STMT. */
470
471 static void
472 update_complex_components (block_stmt_iterator *bsi, tree stmt, tree r, tree i)
473 {
474 unsigned int uid = var_ann (SSA_NAME_VAR (TREE_OPERAND (stmt, 0)))->uid;
475 tree v, x;
476
477 v = VEC_index (tree, complex_variable_components, 2*uid);
478 x = build2 (MODIFY_EXPR, TREE_TYPE (v), v, r);
479 SET_EXPR_LOCUS (x, EXPR_LOCUS (stmt));
480 TREE_BLOCK (x) = TREE_BLOCK (stmt);
481 bsi_insert_after (bsi, x, BSI_NEW_STMT);
482
483 v = VEC_index (tree, complex_variable_components, 2*uid + 1);
484 x = build2 (MODIFY_EXPR, TREE_TYPE (v), v, i);
485 SET_EXPR_LOCUS (x, EXPR_LOCUS (stmt));
486 TREE_BLOCK (x) = TREE_BLOCK (stmt);
487 bsi_insert_after (bsi, x, BSI_NEW_STMT);
488 }
489
490 static void
491 update_complex_components_on_edge (edge e, tree stmt, tree lhs, tree r, tree i)
492 {
493 unsigned int uid = var_ann (SSA_NAME_VAR (lhs))->uid;
494 tree v, x;
495
496 v = VEC_index (tree, complex_variable_components, 2*uid);
497 x = build2 (MODIFY_EXPR, TREE_TYPE (v), v, r);
498 if (stmt)
499 {
500 SET_EXPR_LOCUS (x, EXPR_LOCUS (stmt));
501 TREE_BLOCK (x) = TREE_BLOCK (stmt);
502 }
503 bsi_insert_on_edge (e, x);
504
505 v = VEC_index (tree, complex_variable_components, 2*uid + 1);
506 x = build2 (MODIFY_EXPR, TREE_TYPE (v), v, i);
507 if (stmt)
508 {
509 SET_EXPR_LOCUS (x, EXPR_LOCUS (stmt));
510 TREE_BLOCK (x) = TREE_BLOCK (stmt);
511 }
512 bsi_insert_on_edge (e, x);
513 }
514
515 /* Update an assignment to a complex variable in place. */
516
517 static void
518 update_complex_assignment (block_stmt_iterator *bsi, tree r, tree i)
519 {
520 tree stmt, mod;
521 tree type;
522
523 mod = stmt = bsi_stmt (*bsi);
524 if (TREE_CODE (stmt) == RETURN_EXPR)
525 mod = TREE_OPERAND (mod, 0);
526 else if (in_ssa_p)
527 update_complex_components (bsi, stmt, r, i);
528
529 type = TREE_TYPE (TREE_OPERAND (mod, 1));
530 TREE_OPERAND (mod, 1) = build (COMPLEX_EXPR, type, r, i);
531 update_stmt (stmt);
532 }
533
534 /* Generate code at the entry point of the function to initialize the
535 component variables for a complex parameter. */
536
537 static void
538 update_parameter_components (void)
539 {
540 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
541 tree parm;
542
543 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = TREE_CHAIN (parm))
544 {
545 tree type = TREE_TYPE (parm);
546 tree ssa_name, r, i;
547
548 if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm))
549 continue;
550
551 type = TREE_TYPE (type);
552 ssa_name = default_def (parm);
553
554 r = build1 (REALPART_EXPR, type, ssa_name);
555 i = build1 (IMAGPART_EXPR, type, ssa_name);
556 update_complex_components_on_edge (entry_edge, NULL, ssa_name, r, i);
557 }
558 }
559
560 /* Generate code to set the component variables of a complex variable
561 to match the PHI statements in block BB. */
562
563 static void
564 update_phi_components (basic_block bb)
565 {
566 tree phi;
567
568 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
569 if (is_complex_reg (PHI_RESULT (phi)))
570 {
571 unsigned int i, n;
572 tree lhs = PHI_RESULT (phi);
573
574 for (i = 0, n = PHI_NUM_ARGS (phi); i < n; ++i)
575 {
576 edge e = PHI_ARG_EDGE (phi, i);
577 tree arg = PHI_ARG_DEF (phi, i);
578 tree r, i;
579
580 /* Avoid no-op assignments. This also prevents insertting stmts
581 onto abnormal edges, assuming the PHI isn't already broken. */
582 if (TREE_CODE (arg) == SSA_NAME
583 && SSA_NAME_VAR (arg) == SSA_NAME_VAR (lhs))
584 continue;
585
586 r = extract_component (NULL, arg, 0, false);
587 i = extract_component (NULL, arg, 1, false);
588 update_complex_components_on_edge (e, NULL, lhs, r, i);
589 }
590 }
591 }
592
593 /* Mark each virtual op in STMT for ssa update. */
594
595 static void
596 update_all_vops (tree stmt)
597 {
598 ssa_op_iter iter;
599 tree sym;
600
601 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
602 {
603 if (TREE_CODE (sym) == SSA_NAME)
604 sym = SSA_NAME_VAR (sym);
605 mark_sym_for_renaming (sym);
606 }
607 }
608
609 /* Expand a complex move to scalars. */
610
611 static void
612 expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
613 tree lhs, tree rhs)
614 {
615 tree inner_type = TREE_TYPE (type);
616 tree r, i;
617
618 if (TREE_CODE (lhs) == SSA_NAME)
619 {
620 if (is_ctrl_altering_stmt (bsi_stmt (*bsi)))
621 {
622 edge_iterator ei;
623 edge e;
624
625 /* The value is not assigned on the exception edges, so we need not
626 concern ourselves there. We do need to update on the fallthru
627 edge. Find it. */
628 FOR_EACH_EDGE (e, ei, bsi->bb->succs)
629 if (e->flags & EDGE_FALLTHRU)
630 goto found_fallthru;
631 gcc_unreachable ();
632 found_fallthru:
633
634 r = build1 (REALPART_EXPR, inner_type, lhs);
635 i = build1 (IMAGPART_EXPR, inner_type, lhs);
636 update_complex_components_on_edge (e, stmt, lhs, r, i);
637 }
638 else if (TREE_CODE (rhs) == CALL_EXPR || TREE_SIDE_EFFECTS (rhs))
639 {
640 r = build1 (REALPART_EXPR, inner_type, lhs);
641 i = build1 (IMAGPART_EXPR, inner_type, lhs);
642 update_complex_components (bsi, stmt, r, i);
643 }
644 else
645 {
646 update_all_vops (bsi_stmt (*bsi));
647 r = extract_component (bsi, rhs, 0, true);
648 i = extract_component (bsi, rhs, 1, true);
649 update_complex_assignment (bsi, r, i);
650 }
651 }
652 else if (TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
653 {
654 tree x;
655
656 r = extract_component (bsi, rhs, 0, false);
657 i = extract_component (bsi, rhs, 1, false);
658
659 x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs));
660 x = build2 (MODIFY_EXPR, inner_type, x, r);
661 bsi_insert_before (bsi, x, BSI_SAME_STMT);
662
663 if (stmt == bsi_stmt (*bsi))
664 {
665 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
666 TREE_OPERAND (stmt, 0) = x;
667 TREE_OPERAND (stmt, 1) = i;
668 TREE_TYPE (stmt) = inner_type;
669 }
670 else
671 {
672 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
673 x = build2 (MODIFY_EXPR, inner_type, x, i);
674 bsi_insert_before (bsi, x, BSI_SAME_STMT);
675
676 stmt = bsi_stmt (*bsi);
677 gcc_assert (TREE_CODE (stmt) == RETURN_EXPR);
678 TREE_OPERAND (stmt, 0) = lhs;
679 }
680
681 update_all_vops (stmt);
682 update_stmt (stmt);
683 }
684 }
685
686 /* Expand complex addition to scalars:
687 a + b = (ar + br) + i(ai + bi)
688 a - b = (ar - br) + i(ai + bi)
689 */
690
691 static void
692 expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
693 tree ar, tree ai, tree br, tree bi,
694 enum tree_code code,
695 complex_lattice_t al, complex_lattice_t bl)
696 {
697 tree rr, ri;
698
699 switch (PAIR (al, bl))
700 {
701 case PAIR (ONLY_REAL, ONLY_REAL):
702 rr = gimplify_build2 (bsi, code, inner_type, ar, br);
703 ri = ai;
704 break;
705
706 case PAIR (ONLY_REAL, ONLY_IMAG):
707 rr = ar;
708 if (code == MINUS_EXPR)
709 ri = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, bi);
710 else
711 ri = bi;
712 break;
713
714 case PAIR (ONLY_IMAG, ONLY_REAL):
715 if (code == MINUS_EXPR)
716 rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ar, br);
717 else
718 rr = br;
719 ri = ai;
720 break;
721
722 case PAIR (ONLY_IMAG, ONLY_IMAG):
723 rr = ar;
724 ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
725 break;
726
727 case PAIR (VARYING, ONLY_REAL):
728 rr = gimplify_build2 (bsi, code, inner_type, ar, br);
729 ri = ai;
730 break;
731
732 case PAIR (VARYING, ONLY_IMAG):
733 rr = ar;
734 ri = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, bi);
735 break;
736
737 case PAIR (ONLY_REAL, VARYING):
738 if (code == MINUS_EXPR)
739 goto general;
740 rr = gimplify_build2 (bsi, code, inner_type, ar, br);
741 ri = bi;
742 break;
743
744 case PAIR (ONLY_IMAG, VARYING):
745 if (code == MINUS_EXPR)
746 goto general;
747 rr = br;
748 ri = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, bi);
749 break;
750
751 case PAIR (VARYING, VARYING):
752 general:
753 rr = gimplify_build2 (bsi, code, inner_type, ar, br);
754 ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
755 break;
756
757 default:
758 gcc_unreachable ();
759 }
760
761 update_complex_assignment (bsi, rr, ri);
762 }
763
764 /* Expand a complex multiplication or division to a libcall to the c99
765 compliant routines. */
766
767 static void
768 expand_complex_libcall (block_stmt_iterator *bsi, tree ar, tree ai,
769 tree br, tree bi, enum tree_code code)
770 {
771 enum machine_mode mode;
772 enum built_in_function bcode;
773 tree args, fn, stmt, type;
774
775 args = tree_cons (NULL, bi, NULL);
776 args = tree_cons (NULL, br, args);
777 args = tree_cons (NULL, ai, args);
778 args = tree_cons (NULL, ar, args);
779
780 stmt = bsi_stmt (*bsi);
781 type = TREE_TYPE (TREE_OPERAND (stmt, 1));
782
783 mode = TYPE_MODE (type);
784 gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT);
785 if (code == MULT_EXPR)
786 bcode = BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT;
787 else if (code == RDIV_EXPR)
788 bcode = BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT;
789 else
790 gcc_unreachable ();
791 fn = built_in_decls[bcode];
792
793 TREE_OPERAND (stmt, 1)
794 = build3 (CALL_EXPR, type, build_fold_addr_expr (fn), args, NULL);
795 update_stmt (stmt);
796
797 if (in_ssa_p)
798 {
799 tree lhs = TREE_OPERAND (stmt, 0);
800 update_complex_components (bsi, stmt,
801 build1 (REALPART_EXPR, type, lhs),
802 build1 (IMAGPART_EXPR, type, lhs));
803 }
804 }
805
806 /* Expand complex multiplication to scalars:
807 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai)
808 */
809
810 static void
811 expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
812 tree ar, tree ai, tree br, tree bi,
813 complex_lattice_t al, complex_lattice_t bl)
814 {
815 tree rr, ri;
816
817 if (al < bl)
818 {
819 complex_lattice_t tl;
820 rr = ar, ar = br, br = rr;
821 ri = ai, ai = bi, bi = ri;
822 tl = al, al = bl, bl = tl;
823 }
824
825 switch (PAIR (al, bl))
826 {
827 case PAIR (ONLY_REAL, ONLY_REAL):
828 rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
829 ri = ai;
830 break;
831
832 case PAIR (ONLY_IMAG, ONLY_REAL):
833 rr = ar;
834 if (TREE_CODE (ai) == REAL_CST
835 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst1))
836 ri = br;
837 else
838 ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
839 break;
840
841 case PAIR (ONLY_IMAG, ONLY_IMAG):
842 rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
843 rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, rr);
844 ri = ar;
845 break;
846
847 case PAIR (VARYING, ONLY_REAL):
848 rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
849 ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
850 break;
851
852 case PAIR (VARYING, ONLY_IMAG):
853 rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
854 rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, rr);
855 ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
856 break;
857
858 case PAIR (VARYING, VARYING):
859 if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type))
860 {
861 expand_complex_libcall (bsi, ar, ai, br, bi, MULT_EXPR);
862 return;
863 }
864 else
865 {
866 tree t1, t2, t3, t4;
867
868 t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
869 t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
870 t3 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
871
872 /* Avoid expanding redundant multiplication for the common
873 case of squaring a complex number. */
874 if (ar == br && ai == bi)
875 t4 = t3;
876 else
877 t4 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
878
879 rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, t2);
880 ri = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t3, t4);
881 }
882 break;
883
884 default:
885 gcc_unreachable ();
886 }
887
888 update_complex_assignment (bsi, rr, ri);
889 }
890
891 /* Expand complex division to scalars, straightforward algorithm.
892 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
893 t = br*br + bi*bi
894 */
895
896 static void
897 expand_complex_div_straight (block_stmt_iterator *bsi, tree inner_type,
898 tree ar, tree ai, tree br, tree bi,
899 enum tree_code code)
900 {
901 tree rr, ri, div, t1, t2, t3;
902
903 t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, br, br);
904 t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, bi, bi);
905 div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, t2);
906
907 t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
908 t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
909 t3 = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, t2);
910 rr = gimplify_build2 (bsi, code, inner_type, t3, div);
911
912 t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
913 t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
914 t3 = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, t2);
915 ri = gimplify_build2 (bsi, code, inner_type, t3, div);
916
917 update_complex_assignment (bsi, rr, ri);
918 }
919
920 /* Expand complex division to scalars, modified algorithm to minimize
921 overflow with wide input ranges. */
922
923 static void
924 expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
925 tree ar, tree ai, tree br, tree bi,
926 enum tree_code code)
927 {
928 tree rr, ri, ratio, div, t1, t2, tr, ti, cond;
929 basic_block bb_cond, bb_true, bb_false, bb_join;
930
931 /* Examine |br| < |bi|, and branch. */
932 t1 = gimplify_build1 (bsi, ABS_EXPR, inner_type, br);
933 t2 = gimplify_build1 (bsi, ABS_EXPR, inner_type, bi);
934 cond = fold (build (LT_EXPR, boolean_type_node, t1, t2));
935 STRIP_NOPS (cond);
936
937 bb_cond = bb_true = bb_false = bb_join = NULL;
938 rr = ri = tr = ti = NULL;
939 if (!TREE_CONSTANT (cond))
940 {
941 edge e;
942
943 cond = build (COND_EXPR, void_type_node, cond, NULL, NULL);
944 bsi_insert_before (bsi, cond, BSI_SAME_STMT);
945
946 /* Split the original block, and create the TRUE and FALSE blocks. */
947 e = split_block (bsi->bb, cond);
948 bb_cond = e->src;
949 bb_join = e->dest;
950 bb_true = create_empty_bb (bb_cond);
951 bb_false = create_empty_bb (bb_true);
952
953 t1 = build (GOTO_EXPR, void_type_node, tree_block_label (bb_true));
954 t2 = build (GOTO_EXPR, void_type_node, tree_block_label (bb_false));
955 COND_EXPR_THEN (cond) = t1;
956 COND_EXPR_ELSE (cond) = t2;
957
958 /* Wire the blocks together. */
959 e->flags = EDGE_TRUE_VALUE;
960 redirect_edge_succ (e, bb_true);
961 make_edge (bb_cond, bb_false, EDGE_FALSE_VALUE);
962 make_edge (bb_true, bb_join, EDGE_FALLTHRU);
963 make_edge (bb_false, bb_join, EDGE_FALLTHRU);
964
965 /* Update dominance info. Note that bb_join's data was
966 updated by split_block. */
967 if (dom_info_available_p (CDI_DOMINATORS))
968 {
969 set_immediate_dominator (CDI_DOMINATORS, bb_true, bb_cond);
970 set_immediate_dominator (CDI_DOMINATORS, bb_false, bb_cond);
971 }
972
973 rr = make_rename_temp (inner_type, NULL);
974 ri = make_rename_temp (inner_type, NULL);
975 }
976
977 /* In the TRUE branch, we compute
978 ratio = br/bi;
979 div = (br * ratio) + bi;
980 tr = (ar * ratio) + ai;
981 ti = (ai * ratio) - ar;
982 tr = tr / div;
983 ti = ti / div; */
984 if (bb_true || integer_nonzerop (cond))
985 {
986 if (bb_true)
987 {
988 *bsi = bsi_last (bb_true);
989 bsi_insert_after (bsi, build_empty_stmt (), BSI_NEW_STMT);
990 }
991
992 ratio = gimplify_build2 (bsi, code, inner_type, br, bi);
993
994 t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, br, ratio);
995 div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, bi);
996
997 t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, ratio);
998 tr = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, ai);
999
1000 t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, ratio);
1001 ti = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, ar);
1002
1003 tr = gimplify_build2 (bsi, code, inner_type, tr, div);
1004 ti = gimplify_build2 (bsi, code, inner_type, ti, div);
1005
1006 if (bb_true)
1007 {
1008 t1 = build (MODIFY_EXPR, inner_type, rr, tr);
1009 bsi_insert_before (bsi, t1, BSI_SAME_STMT);
1010 t1 = build (MODIFY_EXPR, inner_type, ri, ti);
1011 bsi_insert_before (bsi, t1, BSI_SAME_STMT);
1012 bsi_remove (bsi);
1013 }
1014 }
1015
1016 /* In the FALSE branch, we compute
1017 ratio = d/c;
1018 divisor = (d * ratio) + c;
1019 tr = (b * ratio) + a;
1020 ti = b - (a * ratio);
1021 tr = tr / div;
1022 ti = ti / div; */
1023 if (bb_false || integer_zerop (cond))
1024 {
1025 if (bb_false)
1026 {
1027 *bsi = bsi_last (bb_false);
1028 bsi_insert_after (bsi, build_empty_stmt (), BSI_NEW_STMT);
1029 }
1030
1031 ratio = gimplify_build2 (bsi, code, inner_type, bi, br);
1032
1033 t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, bi, ratio);
1034 div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, br);
1035
1036 t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, ratio);
1037 tr = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, ar);
1038
1039 t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, ratio);
1040 ti = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, t1);
1041
1042 tr = gimplify_build2 (bsi, code, inner_type, tr, div);
1043 ti = gimplify_build2 (bsi, code, inner_type, ti, div);
1044
1045 if (bb_false)
1046 {
1047 t1 = build (MODIFY_EXPR, inner_type, rr, tr);
1048 bsi_insert_before (bsi, t1, BSI_SAME_STMT);
1049 t1 = build (MODIFY_EXPR, inner_type, ri, ti);
1050 bsi_insert_before (bsi, t1, BSI_SAME_STMT);
1051 bsi_remove (bsi);
1052 }
1053 }
1054
1055 if (bb_join)
1056 *bsi = bsi_start (bb_join);
1057 else
1058 rr = tr, ri = ti;
1059
1060 update_complex_assignment (bsi, rr, ri);
1061 }
1062
1063 /* Expand complex division to scalars. */
1064
1065 static void
1066 expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
1067 tree ar, tree ai, tree br, tree bi,
1068 enum tree_code code,
1069 complex_lattice_t al, complex_lattice_t bl)
1070 {
1071 tree rr, ri;
1072
1073 switch (PAIR (al, bl))
1074 {
1075 case PAIR (ONLY_REAL, ONLY_REAL):
1076 rr = gimplify_build2 (bsi, code, inner_type, ar, br);
1077 ri = ai;
1078 break;
1079
1080 case PAIR (ONLY_REAL, ONLY_IMAG):
1081 rr = ai;
1082 ri = gimplify_build2 (bsi, code, inner_type, ar, bi);
1083 ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ri);
1084 break;
1085
1086 case PAIR (ONLY_IMAG, ONLY_REAL):
1087 rr = ar;
1088 ri = gimplify_build2 (bsi, code, inner_type, ai, br);
1089 break;
1090
1091 case PAIR (ONLY_IMAG, ONLY_IMAG):
1092 rr = gimplify_build2 (bsi, code, inner_type, ai, bi);
1093 ri = ar;
1094 break;
1095
1096 case PAIR (VARYING, ONLY_REAL):
1097 rr = gimplify_build2 (bsi, code, inner_type, ar, br);
1098 ri = gimplify_build2 (bsi, code, inner_type, ai, br);
1099 break;
1100
1101 case PAIR (VARYING, ONLY_IMAG):
1102 rr = gimplify_build2 (bsi, code, inner_type, ai, bi);
1103 ri = gimplify_build2 (bsi, code, inner_type, ar, bi);
1104 ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ri);
1105
1106 case PAIR (ONLY_REAL, VARYING):
1107 case PAIR (ONLY_IMAG, VARYING):
1108 case PAIR (VARYING, VARYING):
1109 switch (flag_complex_method)
1110 {
1111 case 0:
1112 /* straightforward implementation of complex divide acceptable. */
1113 expand_complex_div_straight (bsi, inner_type, ar, ai, br, bi, code);
1114 break;
1115
1116 case 2:
1117 if (SCALAR_FLOAT_TYPE_P (inner_type))
1118 {
1119 expand_complex_libcall (bsi, ar, ai, br, bi, code);
1120 break;
1121 }
1122 /* FALLTHRU */
1123
1124 case 1:
1125 /* wide ranges of inputs must work for complex divide. */
1126 expand_complex_div_wide (bsi, inner_type, ar, ai, br, bi, code);
1127 break;
1128
1129 default:
1130 gcc_unreachable ();
1131 }
1132 return;
1133
1134 default:
1135 gcc_unreachable ();
1136 }
1137
1138 update_complex_assignment (bsi, rr, ri);
1139 }
1140
1141 /* Expand complex negation to scalars:
1142 -a = (-ar) + i(-ai)
1143 */
1144
1145 static void
1146 expand_complex_negation (block_stmt_iterator *bsi, tree inner_type,
1147 tree ar, tree ai)
1148 {
1149 tree rr, ri;
1150
1151 rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ar);
1152 ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ai);
1153
1154 update_complex_assignment (bsi, rr, ri);
1155 }
1156
1157 /* Expand complex conjugate to scalars:
1158 ~a = (ar) + i(-ai)
1159 */
1160
1161 static void
1162 expand_complex_conjugate (block_stmt_iterator *bsi, tree inner_type,
1163 tree ar, tree ai)
1164 {
1165 tree ri;
1166
1167 ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ai);
1168
1169 update_complex_assignment (bsi, ar, ri);
1170 }
1171
1172 /* Expand complex comparison (EQ or NE only). */
1173
1174 static void
1175 expand_complex_comparison (block_stmt_iterator *bsi, tree ar, tree ai,
1176 tree br, tree bi, enum tree_code code)
1177 {
1178 tree cr, ci, cc, stmt, expr, type;
1179
1180 cr = gimplify_build2 (bsi, code, boolean_type_node, ar, br);
1181 ci = gimplify_build2 (bsi, code, boolean_type_node, ai, bi);
1182 cc = gimplify_build2 (bsi,
1183 (code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR),
1184 boolean_type_node, cr, ci);
1185
1186 stmt = expr = bsi_stmt (*bsi);
1187
1188 switch (TREE_CODE (stmt))
1189 {
1190 case RETURN_EXPR:
1191 expr = TREE_OPERAND (stmt, 0);
1192 /* FALLTHRU */
1193 case MODIFY_EXPR:
1194 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1195 TREE_OPERAND (expr, 1) = fold_convert (type, cc);
1196 break;
1197 case COND_EXPR:
1198 TREE_OPERAND (stmt, 0) = cc;
1199 break;
1200 default:
1201 gcc_unreachable ();
1202 }
1203
1204 update_stmt (stmt);
1205 }
1206
1207 /* Process one statement. If we identify a complex operation, expand it. */
1208
1209 static void
1210 expand_complex_operations_1 (block_stmt_iterator *bsi)
1211 {
1212 tree stmt = bsi_stmt (*bsi);
1213 tree rhs, type, inner_type;
1214 tree ac, ar, ai, bc, br, bi;
1215 complex_lattice_t al, bl;
1216 enum tree_code code;
1217
1218 switch (TREE_CODE (stmt))
1219 {
1220 case RETURN_EXPR:
1221 stmt = TREE_OPERAND (stmt, 0);
1222 if (!stmt)
1223 return;
1224 if (TREE_CODE (stmt) != MODIFY_EXPR)
1225 return;
1226 /* FALLTHRU */
1227
1228 case MODIFY_EXPR:
1229 rhs = TREE_OPERAND (stmt, 1);
1230 break;
1231
1232 case COND_EXPR:
1233 rhs = TREE_OPERAND (stmt, 0);
1234 break;
1235
1236 default:
1237 return;
1238 }
1239
1240 type = TREE_TYPE (rhs);
1241 code = TREE_CODE (rhs);
1242
1243 /* Initial filter for operations we handle. */
1244 switch (code)
1245 {
1246 case PLUS_EXPR:
1247 case MINUS_EXPR:
1248 case MULT_EXPR:
1249 case TRUNC_DIV_EXPR:
1250 case CEIL_DIV_EXPR:
1251 case FLOOR_DIV_EXPR:
1252 case ROUND_DIV_EXPR:
1253 case RDIV_EXPR:
1254 case NEGATE_EXPR:
1255 case CONJ_EXPR:
1256 if (TREE_CODE (type) != COMPLEX_TYPE)
1257 return;
1258 inner_type = TREE_TYPE (type);
1259 break;
1260
1261 case EQ_EXPR:
1262 case NE_EXPR:
1263 inner_type = TREE_TYPE (TREE_OPERAND (rhs, 1));
1264 if (TREE_CODE (inner_type) != COMPLEX_TYPE)
1265 return;
1266 break;
1267
1268 default:
1269 {
1270 tree lhs = TREE_OPERAND (stmt, 0);
1271 tree rhs = TREE_OPERAND (stmt, 1);
1272
1273 if (TREE_CODE (type) == COMPLEX_TYPE)
1274 expand_complex_move (bsi, stmt, type, lhs, rhs);
1275 else if ((TREE_CODE (rhs) == REALPART_EXPR
1276 || TREE_CODE (rhs) == IMAGPART_EXPR)
1277 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1278 {
1279 TREE_OPERAND (stmt, 1)
1280 = extract_component (bsi, TREE_OPERAND (rhs, 0),
1281 TREE_CODE (rhs) == IMAGPART_EXPR, false);
1282 update_stmt (stmt);
1283 }
1284 }
1285 return;
1286 }
1287
1288 /* Extract the components of the two complex values. Make sure and
1289 handle the common case of the same value used twice specially. */
1290 ac = TREE_OPERAND (rhs, 0);
1291 ar = extract_component (bsi, ac, 0, true);
1292 ai = extract_component (bsi, ac, 1, true);
1293
1294 if (TREE_CODE_CLASS (code) == tcc_unary)
1295 bc = br = bi = NULL;
1296 else
1297 {
1298 bc = TREE_OPERAND (rhs, 1);
1299 if (ac == bc)
1300 br = ar, bi = ai;
1301 else
1302 {
1303 br = extract_component (bsi, bc, 0, true);
1304 bi = extract_component (bsi, bc, 1, true);
1305 }
1306 }
1307
1308 if (in_ssa_p)
1309 {
1310 al = find_lattice_value (ac);
1311 if (al == UNINITIALIZED)
1312 al = VARYING;
1313
1314 if (TREE_CODE_CLASS (code) == tcc_unary)
1315 bl = UNINITIALIZED;
1316 else if (ac == bc)
1317 bl = al;
1318 else
1319 {
1320 bl = find_lattice_value (bc);
1321 if (bl == UNINITIALIZED)
1322 bl = VARYING;
1323 }
1324 }
1325 else
1326 al = bl = VARYING;
1327
1328 switch (code)
1329 {
1330 case PLUS_EXPR:
1331 case MINUS_EXPR:
1332 expand_complex_addition (bsi, inner_type, ar, ai, br, bi, code, al, bl);
1333 break;
1334
1335 case MULT_EXPR:
1336 expand_complex_multiplication (bsi, inner_type, ar, ai, br, bi, al, bl);
1337 break;
1338
1339 case TRUNC_DIV_EXPR:
1340 case CEIL_DIV_EXPR:
1341 case FLOOR_DIV_EXPR:
1342 case ROUND_DIV_EXPR:
1343 case RDIV_EXPR:
1344 expand_complex_division (bsi, inner_type, ar, ai, br, bi, code, al, bl);
1345 break;
1346
1347 case NEGATE_EXPR:
1348 expand_complex_negation (bsi, inner_type, ar, ai);
1349 break;
1350
1351 case CONJ_EXPR:
1352 expand_complex_conjugate (bsi, inner_type, ar, ai);
1353 break;
1354
1355 case EQ_EXPR:
1356 case NE_EXPR:
1357 expand_complex_comparison (bsi, ar, ai, br, bi, code);
1358 break;
1359
1360 default:
1361 gcc_unreachable ();
1362 }
1363 }
1364
1365 \f
1366 /* Entry point for complex operation lowering during optimization. */
1367
1368 static void
1369 tree_lower_complex (void)
1370 {
1371 int old_last_basic_block;
1372 block_stmt_iterator bsi;
1373 basic_block bb;
1374
1375 if (!init_dont_simulate_again ())
1376 return;
1377
1378 complex_lattice_values = VEC_alloc (complex_lattice_t, heap, num_ssa_names);
1379 VEC_safe_grow (complex_lattice_t, heap,
1380 complex_lattice_values, num_ssa_names);
1381 memset (VEC_address (complex_lattice_t, complex_lattice_values), 0,
1382 num_ssa_names * sizeof(complex_lattice_t));
1383 init_parameter_lattice_values ();
1384
1385 ssa_propagate (complex_visit_stmt, complex_visit_phi);
1386
1387 create_components ();
1388 update_parameter_components ();
1389
1390 old_last_basic_block = last_basic_block;
1391 FOR_EACH_BB (bb)
1392 {
1393 if (bb->index >= old_last_basic_block)
1394 continue;
1395 update_phi_components (bb);
1396 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1397 expand_complex_operations_1 (&bsi);
1398 }
1399
1400 bsi_commit_edge_inserts ();
1401
1402 VEC_free (tree, heap, complex_variable_components);
1403 VEC_free (complex_lattice_t, heap, complex_lattice_values);
1404 }
1405
1406 struct tree_opt_pass pass_lower_complex =
1407 {
1408 "cplxlower", /* name */
1409 0, /* gate */
1410 tree_lower_complex, /* execute */
1411 NULL, /* sub */
1412 NULL, /* next */
1413 0, /* static_pass_number */
1414 0, /* tv_id */
1415 PROP_ssa, /* properties_required */
1416 0, /* properties_provided */
1417 0, /* properties_destroyed */
1418 0, /* todo_flags_start */
1419 TODO_dump_func | TODO_ggc_collect
1420 | TODO_update_ssa
1421 | TODO_verify_stmts, /* todo_flags_finish */
1422 0 /* letter */
1423 };
1424
1425 \f
1426 /* Entry point for complex operation lowering without optimization. */
1427
1428 static void
1429 tree_lower_complex_O0 (void)
1430 {
1431 int old_last_basic_block = last_basic_block;
1432 block_stmt_iterator bsi;
1433 basic_block bb;
1434
1435 FOR_EACH_BB (bb)
1436 {
1437 if (bb->index >= old_last_basic_block)
1438 continue;
1439 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1440 expand_complex_operations_1 (&bsi);
1441 }
1442 }
1443
1444 static bool
1445 gate_no_optimization (void)
1446 {
1447 return optimize == 0;
1448 }
1449
1450 struct tree_opt_pass pass_lower_complex_O0 =
1451 {
1452 "cplxlower0", /* name */
1453 gate_no_optimization, /* gate */
1454 tree_lower_complex_O0, /* execute */
1455 NULL, /* sub */
1456 NULL, /* next */
1457 0, /* static_pass_number */
1458 0, /* tv_id */
1459 PROP_cfg, /* properties_required */
1460 0, /* properties_provided */
1461 0, /* properties_destroyed */
1462 0, /* todo_flags_start */
1463 TODO_dump_func | TODO_ggc_collect
1464 | TODO_verify_stmts, /* todo_flags_finish */
1465 0 /* letter */
1466 };