]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.c
268bef184cfe548406fdefc4a77ad7a9904b5528
[thirdparty/gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "rtl.h"
31 #include "varray.h"
32 #include "tree-gimple.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
53
54
55 enum gimplify_omp_var_data
56 {
57 GOVD_SEEN = 1,
58 GOVD_EXPLICIT = 2,
59 GOVD_SHARED = 4,
60 GOVD_PRIVATE = 8,
61 GOVD_FIRSTPRIVATE = 16,
62 GOVD_LASTPRIVATE = 32,
63 GOVD_REDUCTION = 64,
64 GOVD_LOCAL = 128,
65 GOVD_DEBUG_PRIVATE = 256,
66 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
67 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
68 };
69
70 struct gimplify_omp_ctx
71 {
72 struct gimplify_omp_ctx *outer_context;
73 splay_tree variables;
74 struct pointer_set_t *privatized_types;
75 location_t location;
76 enum omp_clause_default_kind default_kind;
77 bool is_parallel;
78 bool is_combined_parallel;
79 };
80
81 struct gimplify_ctx
82 {
83 struct gimplify_ctx *prev_context;
84
85 tree current_bind_expr;
86 tree temps;
87 tree conditional_cleanups;
88 tree exit_label;
89 tree return_temp;
90
91 VEC(tree,heap) *case_labels;
92 /* The formal temporary table. Should this be persistent? */
93 htab_t temp_htab;
94
95 int conditions;
96 bool save_stack;
97 bool into_ssa;
98 };
99
100 static struct gimplify_ctx *gimplify_ctxp;
101 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
102
103
104
105 /* Formal (expression) temporary table handling: Multiple occurrences of
106 the same scalar expression are evaluated into the same temporary. */
107
108 typedef struct gimple_temp_hash_elt
109 {
110 tree val; /* Key */
111 tree temp; /* Value */
112 } elt_t;
113
114 /* Forward declarations. */
115 static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
116 #ifdef ENABLE_CHECKING
117 static bool cpt_same_type (tree a, tree b);
118 #endif
119
120
121 /* Return a hash value for a formal temporary table entry. */
122
123 static hashval_t
124 gimple_tree_hash (const void *p)
125 {
126 tree t = ((const elt_t *) p)->val;
127 return iterative_hash_expr (t, 0);
128 }
129
130 /* Compare two formal temporary table entries. */
131
132 static int
133 gimple_tree_eq (const void *p1, const void *p2)
134 {
135 tree t1 = ((const elt_t *) p1)->val;
136 tree t2 = ((const elt_t *) p2)->val;
137 enum tree_code code = TREE_CODE (t1);
138
139 if (TREE_CODE (t2) != code
140 || TREE_TYPE (t1) != TREE_TYPE (t2))
141 return 0;
142
143 if (!operand_equal_p (t1, t2, 0))
144 return 0;
145
146 /* Only allow them to compare equal if they also hash equal; otherwise
147 results are nondeterminate, and we fail bootstrap comparison. */
148 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
149
150 return 1;
151 }
152
153 /* Set up a context for the gimplifier. */
154
155 void
156 push_gimplify_context (void)
157 {
158 struct gimplify_ctx *c;
159
160 c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
161 c->prev_context = gimplify_ctxp;
162 if (optimize)
163 c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
164
165 gimplify_ctxp = c;
166 }
167
168 /* Tear down a context for the gimplifier. If BODY is non-null, then
169 put the temporaries into the outer BIND_EXPR. Otherwise, put them
170 in the unexpanded_var_list. */
171
172 void
173 pop_gimplify_context (tree body)
174 {
175 struct gimplify_ctx *c = gimplify_ctxp;
176 tree t;
177
178 gcc_assert (c && !c->current_bind_expr);
179 gimplify_ctxp = c->prev_context;
180
181 for (t = c->temps; t ; t = TREE_CHAIN (t))
182 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
183
184 if (body)
185 declare_vars (c->temps, body, false);
186 else
187 record_vars (c->temps);
188
189 if (optimize)
190 htab_delete (c->temp_htab);
191 free (c);
192 }
193
194 static void
195 gimple_push_bind_expr (tree bind)
196 {
197 TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
198 gimplify_ctxp->current_bind_expr = bind;
199 }
200
201 static void
202 gimple_pop_bind_expr (void)
203 {
204 gimplify_ctxp->current_bind_expr
205 = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
206 }
207
208 tree
209 gimple_current_bind_expr (void)
210 {
211 return gimplify_ctxp->current_bind_expr;
212 }
213
214 /* Returns true iff there is a COND_EXPR between us and the innermost
215 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
216
217 static bool
218 gimple_conditional_context (void)
219 {
220 return gimplify_ctxp->conditions > 0;
221 }
222
223 /* Note that we've entered a COND_EXPR. */
224
225 static void
226 gimple_push_condition (void)
227 {
228 #ifdef ENABLE_CHECKING
229 if (gimplify_ctxp->conditions == 0)
230 gcc_assert (!gimplify_ctxp->conditional_cleanups);
231 #endif
232 ++(gimplify_ctxp->conditions);
233 }
234
235 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
236 now, add any conditional cleanups we've seen to the prequeue. */
237
238 static void
239 gimple_pop_condition (tree *pre_p)
240 {
241 int conds = --(gimplify_ctxp->conditions);
242
243 gcc_assert (conds >= 0);
244 if (conds == 0)
245 {
246 append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
247 gimplify_ctxp->conditional_cleanups = NULL_TREE;
248 }
249 }
250
251 /* A stable comparison routine for use with splay trees and DECLs. */
252
253 static int
254 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
255 {
256 tree a = (tree) xa;
257 tree b = (tree) xb;
258
259 return DECL_UID (a) - DECL_UID (b);
260 }
261
262 /* Create a new omp construct that deals with variable remapping. */
263
264 static struct gimplify_omp_ctx *
265 new_omp_context (bool is_parallel, bool is_combined_parallel)
266 {
267 struct gimplify_omp_ctx *c;
268
269 c = XCNEW (struct gimplify_omp_ctx);
270 c->outer_context = gimplify_omp_ctxp;
271 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
272 c->privatized_types = pointer_set_create ();
273 c->location = input_location;
274 c->is_parallel = is_parallel;
275 c->is_combined_parallel = is_combined_parallel;
276 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
277
278 return c;
279 }
280
281 /* Destroy an omp construct that deals with variable remapping. */
282
283 static void
284 delete_omp_context (struct gimplify_omp_ctx *c)
285 {
286 splay_tree_delete (c->variables);
287 pointer_set_destroy (c->privatized_types);
288 XDELETE (c);
289 }
290
291 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
292 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
293
294 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
295
296 static void
297 append_to_statement_list_1 (tree t, tree *list_p)
298 {
299 tree list = *list_p;
300 tree_stmt_iterator i;
301
302 if (!list)
303 {
304 if (t && TREE_CODE (t) == STATEMENT_LIST)
305 {
306 *list_p = t;
307 return;
308 }
309 *list_p = list = alloc_stmt_list ();
310 }
311
312 i = tsi_last (list);
313 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
314 }
315
316 /* Add T to the end of the list container pointed to by LIST_P.
317 If T is an expression with no effects, it is ignored. */
318
319 void
320 append_to_statement_list (tree t, tree *list_p)
321 {
322 if (t && TREE_SIDE_EFFECTS (t))
323 append_to_statement_list_1 (t, list_p);
324 }
325
326 /* Similar, but the statement is always added, regardless of side effects. */
327
328 void
329 append_to_statement_list_force (tree t, tree *list_p)
330 {
331 if (t != NULL_TREE)
332 append_to_statement_list_1 (t, list_p);
333 }
334
335 /* Both gimplify the statement T and append it to LIST_P. */
336
337 void
338 gimplify_and_add (tree t, tree *list_p)
339 {
340 gimplify_stmt (&t);
341 append_to_statement_list (t, list_p);
342 }
343
344 /* Strip off a legitimate source ending from the input string NAME of
345 length LEN. Rather than having to know the names used by all of
346 our front ends, we strip off an ending of a period followed by
347 up to five characters. (Java uses ".class".) */
348
349 static inline void
350 remove_suffix (char *name, int len)
351 {
352 int i;
353
354 for (i = 2; i < 8 && len > i; i++)
355 {
356 if (name[len - i] == '.')
357 {
358 name[len - i] = '\0';
359 break;
360 }
361 }
362 }
363
364 /* Create a nameless artificial label and put it in the current function
365 context. Returns the newly created label. */
366
367 tree
368 create_artificial_label (void)
369 {
370 tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
371
372 DECL_ARTIFICIAL (lab) = 1;
373 DECL_IGNORED_P (lab) = 1;
374 DECL_CONTEXT (lab) = current_function_decl;
375 return lab;
376 }
377
378 /* Subroutine for find_single_pointer_decl. */
379
380 static tree
381 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
382 void *data)
383 {
384 tree *pdecl = (tree *) data;
385
386 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
387 {
388 if (*pdecl)
389 {
390 /* We already found a pointer decl; return anything other
391 than NULL_TREE to unwind from walk_tree signalling that
392 we have a duplicate. */
393 return *tp;
394 }
395 *pdecl = *tp;
396 }
397
398 return NULL_TREE;
399 }
400
401 /* Find the single DECL of pointer type in the tree T and return it.
402 If there are zero or more than one such DECLs, return NULL. */
403
404 static tree
405 find_single_pointer_decl (tree t)
406 {
407 tree decl = NULL_TREE;
408
409 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
410 {
411 /* find_single_pointer_decl_1 returns a nonzero value, causing
412 walk_tree to return a nonzero value, to indicate that it
413 found more than one pointer DECL. */
414 return NULL_TREE;
415 }
416
417 return decl;
418 }
419
420 /* Create a new temporary name with PREFIX. Returns an identifier. */
421
422 static GTY(()) unsigned int tmp_var_id_num;
423
424 tree
425 create_tmp_var_name (const char *prefix)
426 {
427 char *tmp_name;
428
429 if (prefix)
430 {
431 char *preftmp = ASTRDUP (prefix);
432
433 remove_suffix (preftmp, strlen (preftmp));
434 prefix = preftmp;
435 }
436
437 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
438 return get_identifier (tmp_name);
439 }
440
441
442 /* Create a new temporary variable declaration of type TYPE.
443 Does NOT push it into the current binding. */
444
445 tree
446 create_tmp_var_raw (tree type, const char *prefix)
447 {
448 tree tmp_var;
449 tree new_type;
450
451 /* Make the type of the variable writable. */
452 new_type = build_type_variant (type, 0, 0);
453 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
454
455 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
456 type);
457
458 /* The variable was declared by the compiler. */
459 DECL_ARTIFICIAL (tmp_var) = 1;
460 /* And we don't want debug info for it. */
461 DECL_IGNORED_P (tmp_var) = 1;
462
463 /* Make the variable writable. */
464 TREE_READONLY (tmp_var) = 0;
465
466 DECL_EXTERNAL (tmp_var) = 0;
467 TREE_STATIC (tmp_var) = 0;
468 TREE_USED (tmp_var) = 1;
469
470 return tmp_var;
471 }
472
473 /* Create a new temporary variable declaration of type TYPE. DOES push the
474 variable into the current binding. Further, assume that this is called
475 only from gimplification or optimization, at which point the creation of
476 certain types are bugs. */
477
478 tree
479 create_tmp_var (tree type, const char *prefix)
480 {
481 tree tmp_var;
482
483 /* We don't allow types that are addressable (meaning we can't make copies),
484 or incomplete. We also used to reject every variable size objects here,
485 but now support those for which a constant upper bound can be obtained.
486 The processing for variable sizes is performed in gimple_add_tmp_var,
487 point at which it really matters and possibly reached via paths not going
488 through this function, e.g. after direct calls to create_tmp_var_raw. */
489 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
490
491 tmp_var = create_tmp_var_raw (type, prefix);
492 gimple_add_tmp_var (tmp_var);
493 return tmp_var;
494 }
495
496 /* Given a tree, try to return a useful variable name that we can use
497 to prefix a temporary that is being assigned the value of the tree.
498 I.E. given <temp> = &A, return A. */
499
500 const char *
501 get_name (tree t)
502 {
503 tree stripped_decl;
504
505 stripped_decl = t;
506 STRIP_NOPS (stripped_decl);
507 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
508 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
509 else
510 {
511 switch (TREE_CODE (stripped_decl))
512 {
513 case ADDR_EXPR:
514 return get_name (TREE_OPERAND (stripped_decl, 0));
515 default:
516 return NULL;
517 }
518 }
519 }
520
521 /* Create a temporary with a name derived from VAL. Subroutine of
522 lookup_tmp_var; nobody else should call this function. */
523
524 static inline tree
525 create_tmp_from_val (tree val)
526 {
527 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
528 }
529
530 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
531 an existing expression temporary. */
532
533 static tree
534 lookup_tmp_var (tree val, bool is_formal)
535 {
536 tree ret;
537
538 /* If not optimizing, never really reuse a temporary. local-alloc
539 won't allocate any variable that is used in more than one basic
540 block, which means it will go into memory, causing much extra
541 work in reload and final and poorer code generation, outweighing
542 the extra memory allocation here. */
543 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
544 ret = create_tmp_from_val (val);
545 else
546 {
547 elt_t elt, *elt_p;
548 void **slot;
549
550 elt.val = val;
551 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
552 if (*slot == NULL)
553 {
554 elt_p = XNEW (elt_t);
555 elt_p->val = val;
556 elt_p->temp = ret = create_tmp_from_val (val);
557 *slot = (void *) elt_p;
558 }
559 else
560 {
561 elt_p = (elt_t *) *slot;
562 ret = elt_p->temp;
563 }
564 }
565
566 if (is_formal)
567 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
568
569 return ret;
570 }
571
572 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
573 in gimplify_expr. Only use this function if:
574
575 1) The value of the unfactored expression represented by VAL will not
576 change between the initialization and use of the temporary, and
577 2) The temporary will not be otherwise modified.
578
579 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
580 and #2 means it is inappropriate for && temps.
581
582 For other cases, use get_initialized_tmp_var instead. */
583
584 static tree
585 internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
586 {
587 tree t, mod;
588
589 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
590
591 t = lookup_tmp_var (val, is_formal);
592
593 if (is_formal)
594 {
595 tree u = find_single_pointer_decl (val);
596
597 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
598 u = DECL_GET_RESTRICT_BASE (u);
599 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
600 {
601 if (DECL_BASED_ON_RESTRICT_P (t))
602 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
603 else
604 {
605 DECL_BASED_ON_RESTRICT_P (t) = 1;
606 SET_DECL_RESTRICT_BASE (t, u);
607 }
608 }
609 }
610
611 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
612 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
613 DECL_GIMPLE_REG_P (t) = 1;
614
615 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
616
617 if (EXPR_HAS_LOCATION (val))
618 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
619 else
620 SET_EXPR_LOCATION (mod, input_location);
621
622 /* gimplify_modify_expr might want to reduce this further. */
623 gimplify_and_add (mod, pre_p);
624
625 /* If we're gimplifying into ssa, gimplify_modify_expr will have
626 given our temporary an ssa name. Find and return it. */
627 if (gimplify_ctxp->into_ssa)
628 t = TREE_OPERAND (mod, 0);
629
630 return t;
631 }
632
633 /* Returns a formal temporary variable initialized with VAL. PRE_P
634 points to a statement list where side-effects needed to compute VAL
635 should be stored. */
636
637 tree
638 get_formal_tmp_var (tree val, tree *pre_p)
639 {
640 return internal_get_tmp_var (val, pre_p, NULL, true);
641 }
642
643 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
644 are as in gimplify_expr. */
645
646 tree
647 get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
648 {
649 return internal_get_tmp_var (val, pre_p, post_p, false);
650 }
651
652 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
653 true, generate debug info for them; otherwise don't. */
654
655 void
656 declare_vars (tree vars, tree scope, bool debug_info)
657 {
658 tree last = vars;
659 if (last)
660 {
661 tree temps, block;
662
663 /* C99 mode puts the default 'return 0;' for main outside the outer
664 braces. So drill down until we find an actual scope. */
665 while (TREE_CODE (scope) == COMPOUND_EXPR)
666 scope = TREE_OPERAND (scope, 0);
667
668 gcc_assert (TREE_CODE (scope) == BIND_EXPR);
669
670 temps = nreverse (last);
671
672 block = BIND_EXPR_BLOCK (scope);
673 if (!block || !debug_info)
674 {
675 TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
676 BIND_EXPR_VARS (scope) = temps;
677 }
678 else
679 {
680 /* We need to attach the nodes both to the BIND_EXPR and to its
681 associated BLOCK for debugging purposes. The key point here
682 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
683 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
684 if (BLOCK_VARS (block))
685 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
686 else
687 {
688 BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
689 BLOCK_VARS (block) = temps;
690 }
691 }
692 }
693 }
694
695 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
696 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
697 no such upper bound can be obtained. */
698
699 static void
700 force_constant_size (tree var)
701 {
702 /* The only attempt we make is by querying the maximum size of objects
703 of the variable's type. */
704
705 HOST_WIDE_INT max_size;
706
707 gcc_assert (TREE_CODE (var) == VAR_DECL);
708
709 max_size = max_int_size_in_bytes (TREE_TYPE (var));
710
711 gcc_assert (max_size >= 0);
712
713 DECL_SIZE_UNIT (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
715 DECL_SIZE (var)
716 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
717 }
718
719 void
720 gimple_add_tmp_var (tree tmp)
721 {
722 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
723
724 /* Later processing assumes that the object size is constant, which might
725 not be true at this point. Force the use of a constant upper bound in
726 this case. */
727 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
728 force_constant_size (tmp);
729
730 DECL_CONTEXT (tmp) = current_function_decl;
731 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
732
733 if (gimplify_ctxp)
734 {
735 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
736 gimplify_ctxp->temps = tmp;
737
738 /* Mark temporaries local within the nearest enclosing parallel. */
739 if (gimplify_omp_ctxp)
740 {
741 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
742 while (ctx && !ctx->is_parallel)
743 ctx = ctx->outer_context;
744 if (ctx)
745 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
746 }
747 }
748 else if (cfun)
749 record_vars (tmp);
750 else
751 declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
752 }
753
754 /* Determines whether to assign a locus to the statement STMT. */
755
756 static bool
757 should_carry_locus_p (tree stmt)
758 {
759 /* Don't emit a line note for a label. We particularly don't want to
760 emit one for the break label, since it doesn't actually correspond
761 to the beginning of the loop/switch. */
762 if (TREE_CODE (stmt) == LABEL_EXPR)
763 return false;
764
765 /* Do not annotate empty statements, since it confuses gcov. */
766 if (!TREE_SIDE_EFFECTS (stmt))
767 return false;
768
769 return true;
770 }
771
772 static void
773 annotate_one_with_locus (tree t, location_t locus)
774 {
775 if (CAN_HAVE_LOCATION_P (t)
776 && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
777 SET_EXPR_LOCATION (t, locus);
778 }
779
780 void
781 annotate_all_with_locus (tree *stmt_p, location_t locus)
782 {
783 tree_stmt_iterator i;
784
785 if (!*stmt_p)
786 return;
787
788 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
789 {
790 tree t = tsi_stmt (i);
791
792 /* Assuming we've already been gimplified, we shouldn't
793 see nested chaining constructs anymore. */
794 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
795 && TREE_CODE (t) != COMPOUND_EXPR);
796
797 annotate_one_with_locus (t, locus);
798 }
799 }
800
801 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
802 These nodes model computations that should only be done once. If we
803 were to unshare something like SAVE_EXPR(i++), the gimplification
804 process would create wrong code. */
805
806 static tree
807 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
808 {
809 enum tree_code code = TREE_CODE (*tp);
810 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
811 if (TREE_CODE_CLASS (code) == tcc_type
812 || TREE_CODE_CLASS (code) == tcc_declaration
813 || TREE_CODE_CLASS (code) == tcc_constant
814 || code == SAVE_EXPR || code == TARGET_EXPR
815 /* We can't do anything sensible with a BLOCK used as an expression,
816 but we also can't just die when we see it because of non-expression
817 uses. So just avert our eyes and cross our fingers. Silly Java. */
818 || code == BLOCK)
819 *walk_subtrees = 0;
820 else
821 {
822 gcc_assert (code != BIND_EXPR);
823 copy_tree_r (tp, walk_subtrees, data);
824 }
825
826 return NULL_TREE;
827 }
828
829 /* Callback for walk_tree to unshare most of the shared trees rooted at
830 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
831 then *TP is deep copied by calling copy_tree_r.
832
833 This unshares the same trees as copy_tree_r with the exception of
834 SAVE_EXPR nodes. These nodes model computations that should only be
835 done once. If we were to unshare something like SAVE_EXPR(i++), the
836 gimplification process would create wrong code. */
837
838 static tree
839 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
840 void *data ATTRIBUTE_UNUSED)
841 {
842 tree t = *tp;
843 enum tree_code code = TREE_CODE (t);
844
845 /* Skip types, decls, and constants. But we do want to look at their
846 types and the bounds of types. Mark them as visited so we properly
847 unmark their subtrees on the unmark pass. If we've already seen them,
848 don't look down further. */
849 if (TREE_CODE_CLASS (code) == tcc_type
850 || TREE_CODE_CLASS (code) == tcc_declaration
851 || TREE_CODE_CLASS (code) == tcc_constant)
852 {
853 if (TREE_VISITED (t))
854 *walk_subtrees = 0;
855 else
856 TREE_VISITED (t) = 1;
857 }
858
859 /* If this node has been visited already, unshare it and don't look
860 any deeper. */
861 else if (TREE_VISITED (t))
862 {
863 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
864 *walk_subtrees = 0;
865 }
866
867 /* Otherwise, mark the tree as visited and keep looking. */
868 else
869 TREE_VISITED (t) = 1;
870
871 return NULL_TREE;
872 }
873
874 static tree
875 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
876 void *data ATTRIBUTE_UNUSED)
877 {
878 if (TREE_VISITED (*tp))
879 TREE_VISITED (*tp) = 0;
880 else
881 *walk_subtrees = 0;
882
883 return NULL_TREE;
884 }
885
886 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
887 bodies of any nested functions if we are unsharing the entire body of
888 FNDECL. */
889
890 static void
891 unshare_body (tree *body_p, tree fndecl)
892 {
893 struct cgraph_node *cgn = cgraph_node (fndecl);
894
895 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
896 if (body_p == &DECL_SAVED_TREE (fndecl))
897 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
898 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
899 }
900
901 /* Likewise, but mark all trees as not visited. */
902
903 static void
904 unvisit_body (tree *body_p, tree fndecl)
905 {
906 struct cgraph_node *cgn = cgraph_node (fndecl);
907
908 walk_tree (body_p, unmark_visited_r, NULL, NULL);
909 if (body_p == &DECL_SAVED_TREE (fndecl))
910 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
911 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
912 }
913
914 /* Unshare T and all the trees reached from T via TREE_CHAIN. */
915
916 static void
917 unshare_all_trees (tree t)
918 {
919 walk_tree (&t, copy_if_shared_r, NULL, NULL);
920 walk_tree (&t, unmark_visited_r, NULL, NULL);
921 }
922
923 /* Unconditionally make an unshared copy of EXPR. This is used when using
924 stored expressions which span multiple functions, such as BINFO_VTABLE,
925 as the normal unsharing process can't tell that they're shared. */
926
927 tree
928 unshare_expr (tree expr)
929 {
930 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
931 return expr;
932 }
933
934 /* A terser interface for building a representation of an exception
935 specification. */
936
937 tree
938 gimple_build_eh_filter (tree body, tree allowed, tree failure)
939 {
940 tree t;
941
942 /* FIXME should the allowed types go in TREE_TYPE? */
943 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
944 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
945
946 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
947 append_to_statement_list (body, &TREE_OPERAND (t, 0));
948
949 return t;
950 }
951
952 \f
953 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
954 contain statements and have a value. Assign its value to a temporary
955 and give it void_type_node. Returns the temporary, or NULL_TREE if
956 WRAPPER was already void. */
957
958 tree
959 voidify_wrapper_expr (tree wrapper, tree temp)
960 {
961 tree type = TREE_TYPE (wrapper);
962 if (type && !VOID_TYPE_P (type))
963 {
964 tree *p;
965
966 /* Set p to point to the body of the wrapper. Loop until we find
967 something that isn't a wrapper. */
968 for (p = &wrapper; p && *p; )
969 {
970 switch (TREE_CODE (*p))
971 {
972 case BIND_EXPR:
973 TREE_SIDE_EFFECTS (*p) = 1;
974 TREE_TYPE (*p) = void_type_node;
975 /* For a BIND_EXPR, the body is operand 1. */
976 p = &BIND_EXPR_BODY (*p);
977 break;
978
979 case CLEANUP_POINT_EXPR:
980 case TRY_FINALLY_EXPR:
981 case TRY_CATCH_EXPR:
982 TREE_SIDE_EFFECTS (*p) = 1;
983 TREE_TYPE (*p) = void_type_node;
984 p = &TREE_OPERAND (*p, 0);
985 break;
986
987 case STATEMENT_LIST:
988 {
989 tree_stmt_iterator i = tsi_last (*p);
990 TREE_SIDE_EFFECTS (*p) = 1;
991 TREE_TYPE (*p) = void_type_node;
992 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
993 }
994 break;
995
996 case COMPOUND_EXPR:
997 /* Advance to the last statement. Set all container types to void. */
998 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
999 {
1000 TREE_SIDE_EFFECTS (*p) = 1;
1001 TREE_TYPE (*p) = void_type_node;
1002 }
1003 break;
1004
1005 default:
1006 goto out;
1007 }
1008 }
1009
1010 out:
1011 if (p == NULL || IS_EMPTY_STMT (*p))
1012 temp = NULL_TREE;
1013 else if (temp)
1014 {
1015 /* The wrapper is on the RHS of an assignment that we're pushing
1016 down. */
1017 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1018 || TREE_CODE (temp) == GIMPLE_MODIFY_STMT
1019 || TREE_CODE (temp) == MODIFY_EXPR);
1020 GENERIC_TREE_OPERAND (temp, 1) = *p;
1021 *p = temp;
1022 }
1023 else
1024 {
1025 temp = create_tmp_var (type, "retval");
1026 *p = build2 (INIT_EXPR, type, temp, *p);
1027 }
1028
1029 return temp;
1030 }
1031
1032 return NULL_TREE;
1033 }
1034
1035 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1036 a temporary through which they communicate. */
1037
1038 static void
1039 build_stack_save_restore (tree *save, tree *restore)
1040 {
1041 tree save_call, tmp_var;
1042
1043 save_call =
1044 build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1045 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1046
1047 *save = build_gimple_modify_stmt (tmp_var, save_call);
1048 *restore =
1049 build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1050 1, tmp_var);
1051 }
1052
1053 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1054
1055 static enum gimplify_status
1056 gimplify_bind_expr (tree *expr_p, tree *pre_p)
1057 {
1058 tree bind_expr = *expr_p;
1059 bool old_save_stack = gimplify_ctxp->save_stack;
1060 tree t;
1061
1062 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1063
1064 /* Mark variables seen in this bind expr. */
1065 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1066 {
1067 if (TREE_CODE (t) == VAR_DECL)
1068 {
1069 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1070
1071 /* Mark variable as local. */
1072 if (ctx && !is_global_var (t)
1073 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1074 || splay_tree_lookup (ctx->variables,
1075 (splay_tree_key) t) == NULL))
1076 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1077
1078 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1079 }
1080
1081 /* Preliminarily mark non-addressed complex variables as eligible
1082 for promotion to gimple registers. We'll transform their uses
1083 as we find them. */
1084 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1085 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1086 && !TREE_THIS_VOLATILE (t)
1087 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1088 && !needs_to_live_in_memory (t))
1089 DECL_GIMPLE_REG_P (t) = 1;
1090 }
1091
1092 gimple_push_bind_expr (bind_expr);
1093 gimplify_ctxp->save_stack = false;
1094
1095 gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
1096
1097 if (gimplify_ctxp->save_stack)
1098 {
1099 tree stack_save, stack_restore;
1100
1101 /* Save stack on entry and restore it on exit. Add a try_finally
1102 block to achieve this. Note that mudflap depends on the
1103 format of the emitted code: see mx_register_decls(). */
1104 build_stack_save_restore (&stack_save, &stack_restore);
1105
1106 t = build2 (TRY_FINALLY_EXPR, void_type_node,
1107 BIND_EXPR_BODY (bind_expr), NULL_TREE);
1108 append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
1109
1110 BIND_EXPR_BODY (bind_expr) = NULL_TREE;
1111 append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
1112 append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
1113 }
1114
1115 gimplify_ctxp->save_stack = old_save_stack;
1116 gimple_pop_bind_expr ();
1117
1118 if (temp)
1119 {
1120 *expr_p = temp;
1121 append_to_statement_list (bind_expr, pre_p);
1122 return GS_OK;
1123 }
1124 else
1125 return GS_ALL_DONE;
1126 }
1127
1128 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1129 GIMPLE value, it is assigned to a new temporary and the statement is
1130 re-written to return the temporary.
1131
1132 PRE_P points to the list where side effects that must happen before
1133 STMT should be stored. */
1134
1135 static enum gimplify_status
1136 gimplify_return_expr (tree stmt, tree *pre_p)
1137 {
1138 tree ret_expr = TREE_OPERAND (stmt, 0);
1139 tree result_decl, result;
1140
1141 if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
1142 || ret_expr == error_mark_node)
1143 return GS_ALL_DONE;
1144
1145 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1146 result_decl = NULL_TREE;
1147 else
1148 {
1149 result_decl = GENERIC_TREE_OPERAND (ret_expr, 0);
1150 if (TREE_CODE (result_decl) == INDIRECT_REF)
1151 /* See through a return by reference. */
1152 result_decl = TREE_OPERAND (result_decl, 0);
1153
1154 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1155 || TREE_CODE (ret_expr) == GIMPLE_MODIFY_STMT
1156 || TREE_CODE (ret_expr) == INIT_EXPR)
1157 && TREE_CODE (result_decl) == RESULT_DECL);
1158 }
1159
1160 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1161 Recall that aggregate_value_p is FALSE for any aggregate type that is
1162 returned in registers. If we're returning values in registers, then
1163 we don't want to extend the lifetime of the RESULT_DECL, particularly
1164 across another call. In addition, for those aggregates for which
1165 hard_function_value generates a PARALLEL, we'll die during normal
1166 expansion of structure assignments; there's special code in expand_return
1167 to handle this case that does not exist in expand_expr. */
1168 if (!result_decl
1169 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1170 result = result_decl;
1171 else if (gimplify_ctxp->return_temp)
1172 result = gimplify_ctxp->return_temp;
1173 else
1174 {
1175 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1176 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1177 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1178 DECL_GIMPLE_REG_P (result) = 1;
1179
1180 /* ??? With complex control flow (usually involving abnormal edges),
1181 we can wind up warning about an uninitialized value for this. Due
1182 to how this variable is constructed and initialized, this is never
1183 true. Give up and never warn. */
1184 TREE_NO_WARNING (result) = 1;
1185
1186 gimplify_ctxp->return_temp = result;
1187 }
1188
1189 /* Smash the lhs of the GIMPLE_MODIFY_STMT to the temporary we plan to use.
1190 Then gimplify the whole thing. */
1191 if (result != result_decl)
1192 GENERIC_TREE_OPERAND (ret_expr, 0) = result;
1193
1194 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1195
1196 /* If we didn't use a temporary, then the result is just the result_decl.
1197 Otherwise we need a simple copy. This should already be gimple. */
1198 if (result == result_decl)
1199 ret_expr = result;
1200 else
1201 ret_expr = build_gimple_modify_stmt (result_decl, result);
1202 TREE_OPERAND (stmt, 0) = ret_expr;
1203
1204 return GS_ALL_DONE;
1205 }
1206
1207 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1208 and initialization explicit. */
1209
1210 static enum gimplify_status
1211 gimplify_decl_expr (tree *stmt_p)
1212 {
1213 tree stmt = *stmt_p;
1214 tree decl = DECL_EXPR_DECL (stmt);
1215
1216 *stmt_p = NULL_TREE;
1217
1218 if (TREE_TYPE (decl) == error_mark_node)
1219 return GS_ERROR;
1220
1221 if ((TREE_CODE (decl) == TYPE_DECL
1222 || TREE_CODE (decl) == VAR_DECL)
1223 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1224 gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
1225
1226 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1227 {
1228 tree init = DECL_INITIAL (decl);
1229
1230 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1231 {
1232 /* This is a variable-sized decl. Simplify its size and mark it
1233 for deferred expansion. Note that mudflap depends on the format
1234 of the emitted code: see mx_register_decls(). */
1235 tree t, addr, ptr_type;
1236
1237 gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
1238 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
1239
1240 /* All occurrences of this decl in final gimplified code will be
1241 replaced by indirection. Setting DECL_VALUE_EXPR does two
1242 things: First, it lets the rest of the gimplifier know what
1243 replacement to use. Second, it lets the debug info know
1244 where to find the value. */
1245 ptr_type = build_pointer_type (TREE_TYPE (decl));
1246 addr = create_tmp_var (ptr_type, get_name (decl));
1247 DECL_IGNORED_P (addr) = 0;
1248 t = build_fold_indirect_ref (addr);
1249 SET_DECL_VALUE_EXPR (decl, t);
1250 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1251
1252 t = built_in_decls[BUILT_IN_ALLOCA];
1253 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1254 t = fold_convert (ptr_type, t);
1255 t = build_gimple_modify_stmt (addr, t);
1256
1257 gimplify_and_add (t, stmt_p);
1258
1259 /* Indicate that we need to restore the stack level when the
1260 enclosing BIND_EXPR is exited. */
1261 gimplify_ctxp->save_stack = true;
1262 }
1263
1264 if (init && init != error_mark_node)
1265 {
1266 if (!TREE_STATIC (decl))
1267 {
1268 DECL_INITIAL (decl) = NULL_TREE;
1269 init = build2 (INIT_EXPR, void_type_node, decl, init);
1270 gimplify_and_add (init, stmt_p);
1271 }
1272 else
1273 /* We must still examine initializers for static variables
1274 as they may contain a label address. */
1275 walk_tree (&init, force_labels_r, NULL, NULL);
1276 }
1277
1278 /* Some front ends do not explicitly declare all anonymous
1279 artificial variables. We compensate here by declaring the
1280 variables, though it would be better if the front ends would
1281 explicitly declare them. */
1282 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1283 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1284 gimple_add_tmp_var (decl);
1285 }
1286
1287 return GS_ALL_DONE;
1288 }
1289
1290 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1291 and replacing the LOOP_EXPR with goto, but if the loop contains an
1292 EXIT_EXPR, we need to append a label for it to jump to. */
1293
1294 static enum gimplify_status
1295 gimplify_loop_expr (tree *expr_p, tree *pre_p)
1296 {
1297 tree saved_label = gimplify_ctxp->exit_label;
1298 tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
1299 tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
1300
1301 append_to_statement_list (start_label, pre_p);
1302
1303 gimplify_ctxp->exit_label = NULL_TREE;
1304
1305 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1306
1307 if (gimplify_ctxp->exit_label)
1308 {
1309 append_to_statement_list (jump_stmt, pre_p);
1310 *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
1311 }
1312 else
1313 *expr_p = jump_stmt;
1314
1315 gimplify_ctxp->exit_label = saved_label;
1316
1317 return GS_ALL_DONE;
1318 }
1319
1320 /* Compare two case labels. Because the front end should already have
1321 made sure that case ranges do not overlap, it is enough to only compare
1322 the CASE_LOW values of each case label. */
1323
1324 static int
1325 compare_case_labels (const void *p1, const void *p2)
1326 {
1327 tree case1 = *(tree *)p1;
1328 tree case2 = *(tree *)p2;
1329
1330 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1331 }
1332
1333 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1334
1335 void
1336 sort_case_labels (tree label_vec)
1337 {
1338 size_t len = TREE_VEC_LENGTH (label_vec);
1339 tree default_case = TREE_VEC_ELT (label_vec, len - 1);
1340
1341 if (CASE_LOW (default_case))
1342 {
1343 size_t i;
1344
1345 /* The last label in the vector should be the default case
1346 but it is not. */
1347 for (i = 0; i < len; ++i)
1348 {
1349 tree t = TREE_VEC_ELT (label_vec, i);
1350 if (!CASE_LOW (t))
1351 {
1352 default_case = t;
1353 TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
1354 TREE_VEC_ELT (label_vec, len - 1) = default_case;
1355 break;
1356 }
1357 }
1358 }
1359
1360 qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
1361 compare_case_labels);
1362 }
1363
1364 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1365 branch to. */
1366
1367 static enum gimplify_status
1368 gimplify_switch_expr (tree *expr_p, tree *pre_p)
1369 {
1370 tree switch_expr = *expr_p;
1371 enum gimplify_status ret;
1372
1373 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
1374 is_gimple_val, fb_rvalue);
1375
1376 if (SWITCH_BODY (switch_expr))
1377 {
1378 VEC(tree,heap) *labels, *saved_labels;
1379 tree label_vec, default_case = NULL_TREE;
1380 size_t i, len;
1381
1382 /* If someone can be bothered to fill in the labels, they can
1383 be bothered to null out the body too. */
1384 gcc_assert (!SWITCH_LABELS (switch_expr));
1385
1386 saved_labels = gimplify_ctxp->case_labels;
1387 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1388
1389 gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
1390
1391 labels = gimplify_ctxp->case_labels;
1392 gimplify_ctxp->case_labels = saved_labels;
1393
1394 i = 0;
1395 while (i < VEC_length (tree, labels))
1396 {
1397 tree elt = VEC_index (tree, labels, i);
1398 tree low = CASE_LOW (elt);
1399 bool remove_element = FALSE;
1400
1401 if (low)
1402 {
1403 /* Discard empty ranges. */
1404 tree high = CASE_HIGH (elt);
1405 if (high && INT_CST_LT (high, low))
1406 remove_element = TRUE;
1407 }
1408 else
1409 {
1410 /* The default case must be the last label in the list. */
1411 gcc_assert (!default_case);
1412 default_case = elt;
1413 remove_element = TRUE;
1414 }
1415
1416 if (remove_element)
1417 VEC_ordered_remove (tree, labels, i);
1418 else
1419 i++;
1420 }
1421 len = i;
1422
1423 label_vec = make_tree_vec (len + 1);
1424 SWITCH_LABELS (*expr_p) = label_vec;
1425 append_to_statement_list (switch_expr, pre_p);
1426
1427 if (! default_case)
1428 {
1429 /* If the switch has no default label, add one, so that we jump
1430 around the switch body. */
1431 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1432 NULL_TREE, create_artificial_label ());
1433 append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
1434 *expr_p = build1 (LABEL_EXPR, void_type_node,
1435 CASE_LABEL (default_case));
1436 }
1437 else
1438 *expr_p = SWITCH_BODY (switch_expr);
1439
1440 for (i = 0; i < len; ++i)
1441 TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
1442 TREE_VEC_ELT (label_vec, len) = default_case;
1443
1444 VEC_free (tree, heap, labels);
1445
1446 sort_case_labels (label_vec);
1447
1448 SWITCH_BODY (switch_expr) = NULL;
1449 }
1450 else
1451 gcc_assert (SWITCH_LABELS (switch_expr));
1452
1453 return ret;
1454 }
1455
1456 static enum gimplify_status
1457 gimplify_case_label_expr (tree *expr_p)
1458 {
1459 tree expr = *expr_p;
1460 struct gimplify_ctx *ctxp;
1461
1462 /* Invalid OpenMP programs can play Duff's Device type games with
1463 #pragma omp parallel. At least in the C front end, we don't
1464 detect such invalid branches until after gimplification. */
1465 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1466 if (ctxp->case_labels)
1467 break;
1468
1469 VEC_safe_push (tree, heap, ctxp->case_labels, expr);
1470 *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
1471 return GS_ALL_DONE;
1472 }
1473
1474 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1475 if necessary. */
1476
1477 tree
1478 build_and_jump (tree *label_p)
1479 {
1480 if (label_p == NULL)
1481 /* If there's nowhere to jump, just fall through. */
1482 return NULL_TREE;
1483
1484 if (*label_p == NULL_TREE)
1485 {
1486 tree label = create_artificial_label ();
1487 *label_p = label;
1488 }
1489
1490 return build1 (GOTO_EXPR, void_type_node, *label_p);
1491 }
1492
1493 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1494 This also involves building a label to jump to and communicating it to
1495 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1496
1497 static enum gimplify_status
1498 gimplify_exit_expr (tree *expr_p)
1499 {
1500 tree cond = TREE_OPERAND (*expr_p, 0);
1501 tree expr;
1502
1503 expr = build_and_jump (&gimplify_ctxp->exit_label);
1504 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1505 *expr_p = expr;
1506
1507 return GS_OK;
1508 }
1509
1510 /* A helper function to be called via walk_tree. Mark all labels under *TP
1511 as being forced. To be called for DECL_INITIAL of static variables. */
1512
1513 tree
1514 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1515 {
1516 if (TYPE_P (*tp))
1517 *walk_subtrees = 0;
1518 if (TREE_CODE (*tp) == LABEL_DECL)
1519 FORCED_LABEL (*tp) = 1;
1520
1521 return NULL_TREE;
1522 }
1523
1524 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1525 different from its canonical type, wrap the whole thing inside a
1526 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1527 type.
1528
1529 The canonical type of a COMPONENT_REF is the type of the field being
1530 referenced--unless the field is a bit-field which can be read directly
1531 in a smaller mode, in which case the canonical type is the
1532 sign-appropriate type corresponding to that mode. */
1533
1534 static void
1535 canonicalize_component_ref (tree *expr_p)
1536 {
1537 tree expr = *expr_p;
1538 tree type;
1539
1540 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1541
1542 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1543 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1544 else
1545 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1546
1547 if (TREE_TYPE (expr) != type)
1548 {
1549 tree old_type = TREE_TYPE (expr);
1550
1551 /* Set the type of the COMPONENT_REF to the underlying type. */
1552 TREE_TYPE (expr) = type;
1553
1554 /* And wrap the whole thing inside a NOP_EXPR. */
1555 expr = build1 (NOP_EXPR, old_type, expr);
1556
1557 *expr_p = expr;
1558 }
1559 }
1560
1561 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1562 to foo, embed that change in the ADDR_EXPR by converting
1563 T array[U];
1564 (T *)&array
1565 ==>
1566 &array[L]
1567 where L is the lower bound. For simplicity, only do this for constant
1568 lower bound. */
1569
1570 static void
1571 canonicalize_addr_expr (tree *expr_p)
1572 {
1573 tree expr = *expr_p;
1574 tree ctype = TREE_TYPE (expr);
1575 tree addr_expr = TREE_OPERAND (expr, 0);
1576 tree atype = TREE_TYPE (addr_expr);
1577 tree dctype, datype, ddatype, otype, obj_expr;
1578
1579 /* Both cast and addr_expr types should be pointers. */
1580 if (!POINTER_TYPE_P (ctype) || !POINTER_TYPE_P (atype))
1581 return;
1582
1583 /* The addr_expr type should be a pointer to an array. */
1584 datype = TREE_TYPE (atype);
1585 if (TREE_CODE (datype) != ARRAY_TYPE)
1586 return;
1587
1588 /* Both cast and addr_expr types should address the same object type. */
1589 dctype = TREE_TYPE (ctype);
1590 ddatype = TREE_TYPE (datype);
1591 if (!lang_hooks.types_compatible_p (ddatype, dctype))
1592 return;
1593
1594 /* The addr_expr and the object type should match. */
1595 obj_expr = TREE_OPERAND (addr_expr, 0);
1596 otype = TREE_TYPE (obj_expr);
1597 if (!lang_hooks.types_compatible_p (otype, datype))
1598 return;
1599
1600 /* The lower bound and element sizes must be constant. */
1601 if (!TYPE_SIZE_UNIT (dctype)
1602 || TREE_CODE (TYPE_SIZE_UNIT (dctype)) != INTEGER_CST
1603 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1604 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1605 return;
1606
1607 /* All checks succeeded. Build a new node to merge the cast. */
1608 *expr_p = build4 (ARRAY_REF, dctype, obj_expr,
1609 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1610 NULL_TREE, NULL_TREE);
1611 *expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
1612 }
1613
1614 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1615 underneath as appropriate. */
1616
1617 static enum gimplify_status
1618 gimplify_conversion (tree *expr_p)
1619 {
1620 tree tem;
1621 gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
1622 || TREE_CODE (*expr_p) == CONVERT_EXPR);
1623
1624 /* Then strip away all but the outermost conversion. */
1625 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1626
1627 /* And remove the outermost conversion if it's useless. */
1628 if (tree_ssa_useless_type_conversion (*expr_p))
1629 *expr_p = TREE_OPERAND (*expr_p, 0);
1630
1631 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1632 For example this fold (subclass *)&A into &A->subclass avoiding
1633 a need for statement. */
1634 if (TREE_CODE (*expr_p) == NOP_EXPR
1635 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1636 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1637 && (tem = maybe_fold_offset_to_reference
1638 (TREE_OPERAND (*expr_p, 0),
1639 integer_zero_node, TREE_TYPE (TREE_TYPE (*expr_p)))))
1640 *expr_p = build_fold_addr_expr_with_type (tem, TREE_TYPE (*expr_p));
1641
1642 /* If we still have a conversion at the toplevel,
1643 then canonicalize some constructs. */
1644 if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
1645 {
1646 tree sub = TREE_OPERAND (*expr_p, 0);
1647
1648 /* If a NOP conversion is changing the type of a COMPONENT_REF
1649 expression, then canonicalize its type now in order to expose more
1650 redundant conversions. */
1651 if (TREE_CODE (sub) == COMPONENT_REF)
1652 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1653
1654 /* If a NOP conversion is changing a pointer to array of foo
1655 to a pointer to foo, embed that change in the ADDR_EXPR. */
1656 else if (TREE_CODE (sub) == ADDR_EXPR)
1657 canonicalize_addr_expr (expr_p);
1658 }
1659
1660 return GS_OK;
1661 }
1662
1663 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1664 DECL_VALUE_EXPR, and it's worth re-examining things. */
1665
1666 static enum gimplify_status
1667 gimplify_var_or_parm_decl (tree *expr_p)
1668 {
1669 tree decl = *expr_p;
1670
1671 /* ??? If this is a local variable, and it has not been seen in any
1672 outer BIND_EXPR, then it's probably the result of a duplicate
1673 declaration, for which we've already issued an error. It would
1674 be really nice if the front end wouldn't leak these at all.
1675 Currently the only known culprit is C++ destructors, as seen
1676 in g++.old-deja/g++.jason/binding.C. */
1677 if (TREE_CODE (decl) == VAR_DECL
1678 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1679 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1680 && decl_function_context (decl) == current_function_decl)
1681 {
1682 gcc_assert (errorcount || sorrycount);
1683 return GS_ERROR;
1684 }
1685
1686 /* When within an OpenMP context, notice uses of variables. */
1687 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1688 return GS_ALL_DONE;
1689
1690 /* If the decl is an alias for another expression, substitute it now. */
1691 if (DECL_HAS_VALUE_EXPR_P (decl))
1692 {
1693 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1694 return GS_OK;
1695 }
1696
1697 return GS_ALL_DONE;
1698 }
1699
1700
1701 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1702 node pointed to by EXPR_P.
1703
1704 compound_lval
1705 : min_lval '[' val ']'
1706 | min_lval '.' ID
1707 | compound_lval '[' val ']'
1708 | compound_lval '.' ID
1709
1710 This is not part of the original SIMPLE definition, which separates
1711 array and member references, but it seems reasonable to handle them
1712 together. Also, this way we don't run into problems with union
1713 aliasing; gcc requires that for accesses through a union to alias, the
1714 union reference must be explicit, which was not always the case when we
1715 were splitting up array and member refs.
1716
1717 PRE_P points to the list where side effects that must happen before
1718 *EXPR_P should be stored.
1719
1720 POST_P points to the list where side effects that must happen after
1721 *EXPR_P should be stored. */
1722
1723 static enum gimplify_status
1724 gimplify_compound_lval (tree *expr_p, tree *pre_p,
1725 tree *post_p, fallback_t fallback)
1726 {
1727 tree *p;
1728 VEC(tree,heap) *stack;
1729 enum gimplify_status ret = GS_OK, tret;
1730 int i;
1731
1732 /* Create a stack of the subexpressions so later we can walk them in
1733 order from inner to outer. */
1734 stack = VEC_alloc (tree, heap, 10);
1735
1736 /* We can handle anything that get_inner_reference can deal with. */
1737 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1738 {
1739 restart:
1740 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1741 if (TREE_CODE (*p) == INDIRECT_REF)
1742 *p = fold_indirect_ref (*p);
1743
1744 if (handled_component_p (*p))
1745 ;
1746 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1747 additional COMPONENT_REFs. */
1748 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1749 && gimplify_var_or_parm_decl (p) == GS_OK)
1750 goto restart;
1751 else
1752 break;
1753
1754 VEC_safe_push (tree, heap, stack, *p);
1755 }
1756
1757 gcc_assert (VEC_length (tree, stack));
1758
1759 /* Now STACK is a stack of pointers to all the refs we've walked through
1760 and P points to the innermost expression.
1761
1762 Java requires that we elaborated nodes in source order. That
1763 means we must gimplify the inner expression followed by each of
1764 the indices, in order. But we can't gimplify the inner
1765 expression until we deal with any variable bounds, sizes, or
1766 positions in order to deal with PLACEHOLDER_EXPRs.
1767
1768 So we do this in three steps. First we deal with the annotations
1769 for any variables in the components, then we gimplify the base,
1770 then we gimplify any indices, from left to right. */
1771 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1772 {
1773 tree t = VEC_index (tree, stack, i);
1774
1775 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1776 {
1777 /* Gimplify the low bound and element type size and put them into
1778 the ARRAY_REF. If these values are set, they have already been
1779 gimplified. */
1780 if (!TREE_OPERAND (t, 2))
1781 {
1782 tree low = unshare_expr (array_ref_low_bound (t));
1783 if (!is_gimple_min_invariant (low))
1784 {
1785 TREE_OPERAND (t, 2) = low;
1786 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1787 is_gimple_formal_tmp_reg, fb_rvalue);
1788 ret = MIN (ret, tret);
1789 }
1790 }
1791
1792 if (!TREE_OPERAND (t, 3))
1793 {
1794 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1795 tree elmt_size = unshare_expr (array_ref_element_size (t));
1796 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1797
1798 /* Divide the element size by the alignment of the element
1799 type (above). */
1800 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
1801
1802 if (!is_gimple_min_invariant (elmt_size))
1803 {
1804 TREE_OPERAND (t, 3) = elmt_size;
1805 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1806 is_gimple_formal_tmp_reg, fb_rvalue);
1807 ret = MIN (ret, tret);
1808 }
1809 }
1810 }
1811 else if (TREE_CODE (t) == COMPONENT_REF)
1812 {
1813 /* Set the field offset into T and gimplify it. */
1814 if (!TREE_OPERAND (t, 2))
1815 {
1816 tree offset = unshare_expr (component_ref_field_offset (t));
1817 tree field = TREE_OPERAND (t, 1);
1818 tree factor
1819 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1820
1821 /* Divide the offset by its alignment. */
1822 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
1823
1824 if (!is_gimple_min_invariant (offset))
1825 {
1826 TREE_OPERAND (t, 2) = offset;
1827 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1828 is_gimple_formal_tmp_reg, fb_rvalue);
1829 ret = MIN (ret, tret);
1830 }
1831 }
1832 }
1833 }
1834
1835 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1836 so as to match the min_lval predicate. Failure to do so may result
1837 in the creation of large aggregate temporaries. */
1838 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1839 fallback | fb_lvalue);
1840 ret = MIN (ret, tret);
1841
1842 /* And finally, the indices and operands to BIT_FIELD_REF. During this
1843 loop we also remove any useless conversions. */
1844 for (; VEC_length (tree, stack) > 0; )
1845 {
1846 tree t = VEC_pop (tree, stack);
1847
1848 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1849 {
1850 /* Gimplify the dimension.
1851 Temporary fix for gcc.c-torture/execute/20040313-1.c.
1852 Gimplify non-constant array indices into a temporary
1853 variable.
1854 FIXME - The real fix is to gimplify post-modify
1855 expressions into a minimal gimple lvalue. However, that
1856 exposes bugs in alias analysis. The alias analyzer does
1857 not handle &PTR->FIELD very well. Will fix after the
1858 branch is merged into mainline (dnovillo 2004-05-03). */
1859 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1860 {
1861 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1862 is_gimple_formal_tmp_reg, fb_rvalue);
1863 ret = MIN (ret, tret);
1864 }
1865 }
1866 else if (TREE_CODE (t) == BIT_FIELD_REF)
1867 {
1868 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1869 is_gimple_val, fb_rvalue);
1870 ret = MIN (ret, tret);
1871 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1872 is_gimple_val, fb_rvalue);
1873 ret = MIN (ret, tret);
1874 }
1875
1876 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1877
1878 /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
1879 set which would have caused all the outer expressions in EXPR_P
1880 leading to P to also have had TREE_SIDE_EFFECTS set. */
1881 recalculate_side_effects (t);
1882 }
1883
1884 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
1885 ret = MIN (ret, tret);
1886
1887 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1888 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1889 {
1890 canonicalize_component_ref (expr_p);
1891 ret = MIN (ret, GS_OK);
1892 }
1893
1894 VEC_free (tree, heap, stack);
1895
1896 return ret;
1897 }
1898
1899 /* Gimplify the self modifying expression pointed to by EXPR_P
1900 (++, --, +=, -=).
1901
1902 PRE_P points to the list where side effects that must happen before
1903 *EXPR_P should be stored.
1904
1905 POST_P points to the list where side effects that must happen after
1906 *EXPR_P should be stored.
1907
1908 WANT_VALUE is nonzero iff we want to use the value of this expression
1909 in another expression. */
1910
1911 static enum gimplify_status
1912 gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
1913 bool want_value)
1914 {
1915 enum tree_code code;
1916 tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
1917 bool postfix;
1918 enum tree_code arith_code;
1919 enum gimplify_status ret;
1920
1921 code = TREE_CODE (*expr_p);
1922
1923 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1924 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1925
1926 /* Prefix or postfix? */
1927 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1928 /* Faster to treat as prefix if result is not used. */
1929 postfix = want_value;
1930 else
1931 postfix = false;
1932
1933 /* For postfix, make sure the inner expression's post side effects
1934 are executed after side effects from this expression. */
1935 if (postfix)
1936 post_p = &post;
1937
1938 /* Add or subtract? */
1939 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1940 arith_code = PLUS_EXPR;
1941 else
1942 arith_code = MINUS_EXPR;
1943
1944 /* Gimplify the LHS into a GIMPLE lvalue. */
1945 lvalue = TREE_OPERAND (*expr_p, 0);
1946 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1947 if (ret == GS_ERROR)
1948 return ret;
1949
1950 /* Extract the operands to the arithmetic operation. */
1951 lhs = lvalue;
1952 rhs = TREE_OPERAND (*expr_p, 1);
1953
1954 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1955 that as the result value and in the postqueue operation. */
1956 if (postfix)
1957 {
1958 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1959 if (ret == GS_ERROR)
1960 return ret;
1961 }
1962
1963 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
1964 t1 = build_gimple_modify_stmt (lvalue, t1);
1965
1966 if (postfix)
1967 {
1968 gimplify_and_add (t1, orig_post_p);
1969 append_to_statement_list (post, orig_post_p);
1970 *expr_p = lhs;
1971 return GS_ALL_DONE;
1972 }
1973 else
1974 {
1975 *expr_p = t1;
1976 return GS_OK;
1977 }
1978 }
1979
1980 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1981
1982 static void
1983 maybe_with_size_expr (tree *expr_p)
1984 {
1985 tree expr = *expr_p;
1986 tree type = TREE_TYPE (expr);
1987 tree size;
1988
1989 /* If we've already wrapped this or the type is error_mark_node, we can't do
1990 anything. */
1991 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1992 || type == error_mark_node)
1993 return;
1994
1995 /* If the size isn't known or is a constant, we have nothing to do. */
1996 size = TYPE_SIZE_UNIT (type);
1997 if (!size || TREE_CODE (size) == INTEGER_CST)
1998 return;
1999
2000 /* Otherwise, make a WITH_SIZE_EXPR. */
2001 size = unshare_expr (size);
2002 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2003 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2004 }
2005
2006 /* Subroutine of gimplify_call_expr: Gimplify a single argument. */
2007
2008 static enum gimplify_status
2009 gimplify_arg (tree *expr_p, tree *pre_p)
2010 {
2011 bool (*test) (tree);
2012 fallback_t fb;
2013
2014 /* In general, we allow lvalues for function arguments to avoid
2015 extra overhead of copying large aggregates out of even larger
2016 aggregates into temporaries only to copy the temporaries to
2017 the argument list. Make optimizers happy by pulling out to
2018 temporaries those types that fit in registers. */
2019 if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
2020 test = is_gimple_val, fb = fb_rvalue;
2021 else
2022 test = is_gimple_lvalue, fb = fb_either;
2023
2024 /* If this is a variable sized type, we must remember the size. */
2025 maybe_with_size_expr (expr_p);
2026
2027 /* There is a sequence point before a function call. Side effects in
2028 the argument list must occur before the actual call. So, when
2029 gimplifying arguments, force gimplify_expr to use an internal
2030 post queue which is then appended to the end of PRE_P. */
2031 return gimplify_expr (expr_p, pre_p, NULL, test, fb);
2032 }
2033
2034 /* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
2035 list where side effects that must happen before *EXPR_P should be stored.
2036 WANT_VALUE is true if the result of the call is desired. */
2037
2038 static enum gimplify_status
2039 gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
2040 {
2041 tree decl;
2042 enum gimplify_status ret;
2043 int i, nargs;
2044
2045 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2046
2047 /* For reliable diagnostics during inlining, it is necessary that
2048 every call_expr be annotated with file and line. */
2049 if (! EXPR_HAS_LOCATION (*expr_p))
2050 SET_EXPR_LOCATION (*expr_p, input_location);
2051
2052 /* This may be a call to a builtin function.
2053
2054 Builtin function calls may be transformed into different
2055 (and more efficient) builtin function calls under certain
2056 circumstances. Unfortunately, gimplification can muck things
2057 up enough that the builtin expanders are not aware that certain
2058 transformations are still valid.
2059
2060 So we attempt transformation/gimplification of the call before
2061 we gimplify the CALL_EXPR. At this time we do not manage to
2062 transform all calls in the same manner as the expanders do, but
2063 we do transform most of them. */
2064 decl = get_callee_fndecl (*expr_p);
2065 if (decl && DECL_BUILT_IN (decl))
2066 {
2067 tree new = fold_call_expr (*expr_p, !want_value);
2068
2069 if (new && new != *expr_p)
2070 {
2071 /* There was a transformation of this call which computes the
2072 same value, but in a more efficient way. Return and try
2073 again. */
2074 *expr_p = new;
2075 return GS_OK;
2076 }
2077
2078 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2079 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
2080 {
2081 if (call_expr_nargs (*expr_p) < 2)
2082 {
2083 error ("too few arguments to function %<va_start%>");
2084 *expr_p = build_empty_stmt ();
2085 return GS_OK;
2086 }
2087
2088 if (fold_builtin_next_arg (*expr_p, true))
2089 {
2090 *expr_p = build_empty_stmt ();
2091 return GS_OK;
2092 }
2093 /* Avoid gimplifying the second argument to va_start, which needs
2094 to be the plain PARM_DECL. */
2095 return gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p);
2096 }
2097 }
2098
2099 /* There is a sequence point before the call, so any side effects in
2100 the calling expression must occur before the actual call. Force
2101 gimplify_expr to use an internal post queue. */
2102 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2103 is_gimple_call_addr, fb_rvalue);
2104
2105 nargs = call_expr_nargs (*expr_p);
2106
2107 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2108 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2109 PUSH_ARGS_REVERSED ? i-- : i++)
2110 {
2111 enum gimplify_status t;
2112
2113 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p);
2114
2115 if (t == GS_ERROR)
2116 ret = GS_ERROR;
2117 }
2118
2119 /* Try this again in case gimplification exposed something. */
2120 if (ret != GS_ERROR)
2121 {
2122 tree new = fold_call_expr (*expr_p, !want_value);
2123
2124 if (new && new != *expr_p)
2125 {
2126 /* There was a transformation of this call which computes the
2127 same value, but in a more efficient way. Return and try
2128 again. */
2129 *expr_p = new;
2130 return GS_OK;
2131 }
2132 }
2133
2134 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2135 decl. This allows us to eliminate redundant or useless
2136 calls to "const" functions. */
2137 if (TREE_CODE (*expr_p) == CALL_EXPR
2138 && (call_expr_flags (*expr_p) & (ECF_CONST | ECF_PURE)))
2139 TREE_SIDE_EFFECTS (*expr_p) = 0;
2140
2141 return ret;
2142 }
2143
2144 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2145 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2146
2147 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2148 condition is true or false, respectively. If null, we should generate
2149 our own to skip over the evaluation of this specific expression.
2150
2151 This function is the tree equivalent of do_jump.
2152
2153 shortcut_cond_r should only be called by shortcut_cond_expr. */
2154
2155 static tree
2156 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2157 {
2158 tree local_label = NULL_TREE;
2159 tree t, expr = NULL;
2160
2161 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2162 retain the shortcut semantics. Just insert the gotos here;
2163 shortcut_cond_expr will append the real blocks later. */
2164 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2165 {
2166 /* Turn if (a && b) into
2167
2168 if (a); else goto no;
2169 if (b) goto yes; else goto no;
2170 (no:) */
2171
2172 if (false_label_p == NULL)
2173 false_label_p = &local_label;
2174
2175 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2176 append_to_statement_list (t, &expr);
2177
2178 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2179 false_label_p);
2180 append_to_statement_list (t, &expr);
2181 }
2182 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2183 {
2184 /* Turn if (a || b) into
2185
2186 if (a) goto yes;
2187 if (b) goto yes; else goto no;
2188 (yes:) */
2189
2190 if (true_label_p == NULL)
2191 true_label_p = &local_label;
2192
2193 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2194 append_to_statement_list (t, &expr);
2195
2196 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2197 false_label_p);
2198 append_to_statement_list (t, &expr);
2199 }
2200 else if (TREE_CODE (pred) == COND_EXPR)
2201 {
2202 /* As long as we're messing with gotos, turn if (a ? b : c) into
2203 if (a)
2204 if (b) goto yes; else goto no;
2205 else
2206 if (c) goto yes; else goto no; */
2207 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2208 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2209 false_label_p),
2210 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2211 false_label_p));
2212 }
2213 else
2214 {
2215 expr = build3 (COND_EXPR, void_type_node, pred,
2216 build_and_jump (true_label_p),
2217 build_and_jump (false_label_p));
2218 }
2219
2220 if (local_label)
2221 {
2222 t = build1 (LABEL_EXPR, void_type_node, local_label);
2223 append_to_statement_list (t, &expr);
2224 }
2225
2226 return expr;
2227 }
2228
2229 static tree
2230 shortcut_cond_expr (tree expr)
2231 {
2232 tree pred = TREE_OPERAND (expr, 0);
2233 tree then_ = TREE_OPERAND (expr, 1);
2234 tree else_ = TREE_OPERAND (expr, 2);
2235 tree true_label, false_label, end_label, t;
2236 tree *true_label_p;
2237 tree *false_label_p;
2238 bool emit_end, emit_false, jump_over_else;
2239 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2240 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2241
2242 /* First do simple transformations. */
2243 if (!else_se)
2244 {
2245 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2246 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2247 {
2248 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2249 then_ = shortcut_cond_expr (expr);
2250 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2251 pred = TREE_OPERAND (pred, 0);
2252 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2253 }
2254 }
2255 if (!then_se)
2256 {
2257 /* If there is no 'then', turn
2258 if (a || b); else d
2259 into
2260 if (a); else if (b); else d. */
2261 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2262 {
2263 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2264 else_ = shortcut_cond_expr (expr);
2265 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2266 pred = TREE_OPERAND (pred, 0);
2267 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2268 }
2269 }
2270
2271 /* If we're done, great. */
2272 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2273 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2274 return expr;
2275
2276 /* Otherwise we need to mess with gotos. Change
2277 if (a) c; else d;
2278 to
2279 if (a); else goto no;
2280 c; goto end;
2281 no: d; end:
2282 and recursively gimplify the condition. */
2283
2284 true_label = false_label = end_label = NULL_TREE;
2285
2286 /* If our arms just jump somewhere, hijack those labels so we don't
2287 generate jumps to jumps. */
2288
2289 if (then_
2290 && TREE_CODE (then_) == GOTO_EXPR
2291 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2292 {
2293 true_label = GOTO_DESTINATION (then_);
2294 then_ = NULL;
2295 then_se = false;
2296 }
2297
2298 if (else_
2299 && TREE_CODE (else_) == GOTO_EXPR
2300 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2301 {
2302 false_label = GOTO_DESTINATION (else_);
2303 else_ = NULL;
2304 else_se = false;
2305 }
2306
2307 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2308 if (true_label)
2309 true_label_p = &true_label;
2310 else
2311 true_label_p = NULL;
2312
2313 /* The 'else' branch also needs a label if it contains interesting code. */
2314 if (false_label || else_se)
2315 false_label_p = &false_label;
2316 else
2317 false_label_p = NULL;
2318
2319 /* If there was nothing else in our arms, just forward the label(s). */
2320 if (!then_se && !else_se)
2321 return shortcut_cond_r (pred, true_label_p, false_label_p);
2322
2323 /* If our last subexpression already has a terminal label, reuse it. */
2324 if (else_se)
2325 expr = expr_last (else_);
2326 else if (then_se)
2327 expr = expr_last (then_);
2328 else
2329 expr = NULL;
2330 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2331 end_label = LABEL_EXPR_LABEL (expr);
2332
2333 /* If we don't care about jumping to the 'else' branch, jump to the end
2334 if the condition is false. */
2335 if (!false_label_p)
2336 false_label_p = &end_label;
2337
2338 /* We only want to emit these labels if we aren't hijacking them. */
2339 emit_end = (end_label == NULL_TREE);
2340 emit_false = (false_label == NULL_TREE);
2341
2342 /* We only emit the jump over the else clause if we have to--if the
2343 then clause may fall through. Otherwise we can wind up with a
2344 useless jump and a useless label at the end of gimplified code,
2345 which will cause us to think that this conditional as a whole
2346 falls through even if it doesn't. If we then inline a function
2347 which ends with such a condition, that can cause us to issue an
2348 inappropriate warning about control reaching the end of a
2349 non-void function. */
2350 jump_over_else = block_may_fallthru (then_);
2351
2352 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2353
2354 expr = NULL;
2355 append_to_statement_list (pred, &expr);
2356
2357 append_to_statement_list (then_, &expr);
2358 if (else_se)
2359 {
2360 if (jump_over_else)
2361 {
2362 t = build_and_jump (&end_label);
2363 append_to_statement_list (t, &expr);
2364 }
2365 if (emit_false)
2366 {
2367 t = build1 (LABEL_EXPR, void_type_node, false_label);
2368 append_to_statement_list (t, &expr);
2369 }
2370 append_to_statement_list (else_, &expr);
2371 }
2372 if (emit_end && end_label)
2373 {
2374 t = build1 (LABEL_EXPR, void_type_node, end_label);
2375 append_to_statement_list (t, &expr);
2376 }
2377
2378 return expr;
2379 }
2380
2381 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2382
2383 tree
2384 gimple_boolify (tree expr)
2385 {
2386 tree type = TREE_TYPE (expr);
2387
2388 if (TREE_CODE (type) == BOOLEAN_TYPE)
2389 return expr;
2390
2391 switch (TREE_CODE (expr))
2392 {
2393 case TRUTH_AND_EXPR:
2394 case TRUTH_OR_EXPR:
2395 case TRUTH_XOR_EXPR:
2396 case TRUTH_ANDIF_EXPR:
2397 case TRUTH_ORIF_EXPR:
2398 /* Also boolify the arguments of truth exprs. */
2399 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2400 /* FALLTHRU */
2401
2402 case TRUTH_NOT_EXPR:
2403 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2404 /* FALLTHRU */
2405
2406 case EQ_EXPR: case NE_EXPR:
2407 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2408 /* These expressions always produce boolean results. */
2409 TREE_TYPE (expr) = boolean_type_node;
2410 return expr;
2411
2412 default:
2413 /* Other expressions that get here must have boolean values, but
2414 might need to be converted to the appropriate mode. */
2415 return fold_convert (boolean_type_node, expr);
2416 }
2417 }
2418
2419 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2420 into
2421
2422 if (p) if (p)
2423 t1 = a; a;
2424 else or else
2425 t1 = b; b;
2426 t1;
2427
2428 The second form is used when *EXPR_P is of type void.
2429
2430 TARGET is the tree for T1 above.
2431
2432 PRE_P points to the list where side effects that must happen before
2433 *EXPR_P should be stored. */
2434
2435 static enum gimplify_status
2436 gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
2437 {
2438 tree expr = *expr_p;
2439 tree tmp, tmp2, type;
2440 enum gimplify_status ret;
2441
2442 type = TREE_TYPE (expr);
2443
2444 /* If this COND_EXPR has a value, copy the values into a temporary within
2445 the arms. */
2446 if (! VOID_TYPE_P (type))
2447 {
2448 tree result;
2449
2450 if ((fallback & fb_lvalue) == 0)
2451 {
2452 result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2453 ret = GS_ALL_DONE;
2454 }
2455 else
2456 {
2457 tree type = build_pointer_type (TREE_TYPE (expr));
2458
2459 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2460 TREE_OPERAND (expr, 1) =
2461 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2462
2463 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2464 TREE_OPERAND (expr, 2) =
2465 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2466
2467 tmp2 = tmp = create_tmp_var (type, "iftmp");
2468
2469 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2470 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2471
2472 result = build_fold_indirect_ref (tmp);
2473 ret = GS_ALL_DONE;
2474 }
2475
2476 /* Build the then clause, 't1 = a;'. But don't build an assignment
2477 if this branch is void; in C++ it can be, if it's a throw. */
2478 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2479 TREE_OPERAND (expr, 1)
2480 = build_gimple_modify_stmt (tmp, TREE_OPERAND (expr, 1));
2481
2482 /* Build the else clause, 't1 = b;'. */
2483 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2484 TREE_OPERAND (expr, 2)
2485 = build_gimple_modify_stmt (tmp2, TREE_OPERAND (expr, 2));
2486
2487 TREE_TYPE (expr) = void_type_node;
2488 recalculate_side_effects (expr);
2489
2490 /* Move the COND_EXPR to the prequeue. */
2491 gimplify_and_add (expr, pre_p);
2492
2493 *expr_p = result;
2494 return ret;
2495 }
2496
2497 /* Make sure the condition has BOOLEAN_TYPE. */
2498 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2499
2500 /* Break apart && and || conditions. */
2501 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2502 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2503 {
2504 expr = shortcut_cond_expr (expr);
2505
2506 if (expr != *expr_p)
2507 {
2508 *expr_p = expr;
2509
2510 /* We can't rely on gimplify_expr to re-gimplify the expanded
2511 form properly, as cleanups might cause the target labels to be
2512 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2513 set up a conditional context. */
2514 gimple_push_condition ();
2515 gimplify_stmt (expr_p);
2516 gimple_pop_condition (pre_p);
2517
2518 return GS_ALL_DONE;
2519 }
2520 }
2521
2522 /* Now do the normal gimplification. */
2523 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2524 is_gimple_condexpr, fb_rvalue);
2525
2526 gimple_push_condition ();
2527
2528 gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
2529 gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
2530 recalculate_side_effects (expr);
2531
2532 gimple_pop_condition (pre_p);
2533
2534 if (ret == GS_ERROR)
2535 ;
2536 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
2537 ret = GS_ALL_DONE;
2538 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
2539 /* Rewrite "if (a); else b" to "if (!a) b" */
2540 {
2541 TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
2542 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2543 is_gimple_condexpr, fb_rvalue);
2544
2545 tmp = TREE_OPERAND (expr, 1);
2546 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
2547 TREE_OPERAND (expr, 2) = tmp;
2548 }
2549 else
2550 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2551 expr = TREE_OPERAND (expr, 0);
2552
2553 *expr_p = expr;
2554 return ret;
2555 }
2556
2557 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2558 a call to __builtin_memcpy. */
2559
2560 static enum gimplify_status
2561 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
2562 {
2563 tree t, to, to_ptr, from, from_ptr;
2564
2565 to = GENERIC_TREE_OPERAND (*expr_p, 0);
2566 from = GENERIC_TREE_OPERAND (*expr_p, 1);
2567
2568 from_ptr = build_fold_addr_expr (from);
2569
2570 to_ptr = build_fold_addr_expr (to);
2571 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
2572 t = build_call_expr (t, 3, to_ptr, from_ptr, size);
2573
2574 if (want_value)
2575 {
2576 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2577 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2578 }
2579
2580 *expr_p = t;
2581 return GS_OK;
2582 }
2583
2584 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2585 a call to __builtin_memset. In this case we know that the RHS is
2586 a CONSTRUCTOR with an empty element list. */
2587
2588 static enum gimplify_status
2589 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
2590 {
2591 tree t, to, to_ptr;
2592
2593 to = GENERIC_TREE_OPERAND (*expr_p, 0);
2594
2595 to_ptr = build_fold_addr_expr (to);
2596 t = implicit_built_in_decls[BUILT_IN_MEMSET];
2597 t = build_call_expr (t, 3, to_ptr, integer_zero_node, size);
2598
2599 if (want_value)
2600 {
2601 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2602 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2603 }
2604
2605 *expr_p = t;
2606 return GS_OK;
2607 }
2608
2609 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
2610 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
2611 assignment. Returns non-null if we detect a potential overlap. */
2612
2613 struct gimplify_init_ctor_preeval_data
2614 {
2615 /* The base decl of the lhs object. May be NULL, in which case we
2616 have to assume the lhs is indirect. */
2617 tree lhs_base_decl;
2618
2619 /* The alias set of the lhs object. */
2620 int lhs_alias_set;
2621 };
2622
2623 static tree
2624 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
2625 {
2626 struct gimplify_init_ctor_preeval_data *data
2627 = (struct gimplify_init_ctor_preeval_data *) xdata;
2628 tree t = *tp;
2629
2630 /* If we find the base object, obviously we have overlap. */
2631 if (data->lhs_base_decl == t)
2632 return t;
2633
2634 /* If the constructor component is indirect, determine if we have a
2635 potential overlap with the lhs. The only bits of information we
2636 have to go on at this point are addressability and alias sets. */
2637 if (TREE_CODE (t) == INDIRECT_REF
2638 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2639 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
2640 return t;
2641
2642 /* If the constructor component is a call, determine if it can hide a
2643 potential overlap with the lhs through an INDIRECT_REF like above. */
2644 if (TREE_CODE (t) == CALL_EXPR)
2645 {
2646 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
2647
2648 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
2649 if (POINTER_TYPE_P (TREE_VALUE (type))
2650 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2651 && alias_sets_conflict_p (data->lhs_alias_set,
2652 get_alias_set
2653 (TREE_TYPE (TREE_VALUE (type)))))
2654 return t;
2655 }
2656
2657 if (IS_TYPE_OR_DECL_P (t))
2658 *walk_subtrees = 0;
2659 return NULL;
2660 }
2661
2662 /* A subroutine of gimplify_init_constructor. Pre-evaluate *EXPR_P,
2663 force values that overlap with the lhs (as described by *DATA)
2664 into temporaries. */
2665
2666 static void
2667 gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
2668 struct gimplify_init_ctor_preeval_data *data)
2669 {
2670 enum gimplify_status one;
2671
2672 /* If the value is invariant, then there's nothing to pre-evaluate.
2673 But ensure it doesn't have any side-effects since a SAVE_EXPR is
2674 invariant but has side effects and might contain a reference to
2675 the object we're initializing. */
2676 if (TREE_INVARIANT (*expr_p) && !TREE_SIDE_EFFECTS (*expr_p))
2677 return;
2678
2679 /* If the type has non-trivial constructors, we can't pre-evaluate. */
2680 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
2681 return;
2682
2683 /* Recurse for nested constructors. */
2684 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
2685 {
2686 unsigned HOST_WIDE_INT ix;
2687 constructor_elt *ce;
2688 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
2689
2690 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
2691 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
2692 return;
2693 }
2694
2695 /* If this is a variable sized type, we must remember the size. */
2696 maybe_with_size_expr (expr_p);
2697
2698 /* Gimplify the constructor element to something appropriate for the rhs
2699 of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
2700 the gimplifier will consider this a store to memory. Doing this
2701 gimplification now means that we won't have to deal with complicated
2702 language-specific trees, nor trees like SAVE_EXPR that can induce
2703 exponential search behavior. */
2704 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
2705 if (one == GS_ERROR)
2706 {
2707 *expr_p = NULL;
2708 return;
2709 }
2710
2711 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
2712 with the lhs, since "a = { .x=a }" doesn't make sense. This will
2713 always be true for all scalars, since is_gimple_mem_rhs insists on a
2714 temporary variable for them. */
2715 if (DECL_P (*expr_p))
2716 return;
2717
2718 /* If this is of variable size, we have no choice but to assume it doesn't
2719 overlap since we can't make a temporary for it. */
2720 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
2721 return;
2722
2723 /* Otherwise, we must search for overlap ... */
2724 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
2725 return;
2726
2727 /* ... and if found, force the value into a temporary. */
2728 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
2729 }
2730
2731 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
2732 a RANGE_EXPR in a CONSTRUCTOR for an array.
2733
2734 var = lower;
2735 loop_entry:
2736 object[var] = value;
2737 if (var == upper)
2738 goto loop_exit;
2739 var = var + 1;
2740 goto loop_entry;
2741 loop_exit:
2742
2743 We increment var _after_ the loop exit check because we might otherwise
2744 fail if upper == TYPE_MAX_VALUE (type for upper).
2745
2746 Note that we never have to deal with SAVE_EXPRs here, because this has
2747 already been taken care of for us, in gimplify_init_ctor_preeval(). */
2748
2749 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
2750 tree *, bool);
2751
2752 static void
2753 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
2754 tree value, tree array_elt_type,
2755 tree *pre_p, bool cleared)
2756 {
2757 tree loop_entry_label, loop_exit_label;
2758 tree var, var_type, cref, tmp;
2759
2760 loop_entry_label = create_artificial_label ();
2761 loop_exit_label = create_artificial_label ();
2762
2763 /* Create and initialize the index variable. */
2764 var_type = TREE_TYPE (upper);
2765 var = create_tmp_var (var_type, NULL);
2766 append_to_statement_list (build_gimple_modify_stmt (var, lower), pre_p);
2767
2768 /* Add the loop entry label. */
2769 append_to_statement_list (build1 (LABEL_EXPR,
2770 void_type_node,
2771 loop_entry_label),
2772 pre_p);
2773
2774 /* Build the reference. */
2775 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2776 var, NULL_TREE, NULL_TREE);
2777
2778 /* If we are a constructor, just call gimplify_init_ctor_eval to do
2779 the store. Otherwise just assign value to the reference. */
2780
2781 if (TREE_CODE (value) == CONSTRUCTOR)
2782 /* NB we might have to call ourself recursively through
2783 gimplify_init_ctor_eval if the value is a constructor. */
2784 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2785 pre_p, cleared);
2786 else
2787 append_to_statement_list (build_gimple_modify_stmt (cref, value), pre_p);
2788
2789 /* We exit the loop when the index var is equal to the upper bound. */
2790 gimplify_and_add (build3 (COND_EXPR, void_type_node,
2791 build2 (EQ_EXPR, boolean_type_node,
2792 var, upper),
2793 build1 (GOTO_EXPR,
2794 void_type_node,
2795 loop_exit_label),
2796 NULL_TREE),
2797 pre_p);
2798
2799 /* Otherwise, increment the index var... */
2800 tmp = build2 (PLUS_EXPR, var_type, var,
2801 fold_convert (var_type, integer_one_node));
2802 append_to_statement_list (build_gimple_modify_stmt (var, tmp), pre_p);
2803
2804 /* ...and jump back to the loop entry. */
2805 append_to_statement_list (build1 (GOTO_EXPR,
2806 void_type_node,
2807 loop_entry_label),
2808 pre_p);
2809
2810 /* Add the loop exit label. */
2811 append_to_statement_list (build1 (LABEL_EXPR,
2812 void_type_node,
2813 loop_exit_label),
2814 pre_p);
2815 }
2816
2817 /* Return true if FDECL is accessing a field that is zero sized. */
2818
2819 static bool
2820 zero_sized_field_decl (tree fdecl)
2821 {
2822 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
2823 && integer_zerop (DECL_SIZE (fdecl)))
2824 return true;
2825 return false;
2826 }
2827
2828 /* Return true if TYPE is zero sized. */
2829
2830 static bool
2831 zero_sized_type (tree type)
2832 {
2833 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
2834 && integer_zerop (TYPE_SIZE (type)))
2835 return true;
2836 return false;
2837 }
2838
2839 /* A subroutine of gimplify_init_constructor. Generate individual
2840 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
2841 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
2842 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
2843 zeroed first. */
2844
2845 static void
2846 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
2847 tree *pre_p, bool cleared)
2848 {
2849 tree array_elt_type = NULL;
2850 unsigned HOST_WIDE_INT ix;
2851 tree purpose, value;
2852
2853 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
2854 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
2855
2856 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
2857 {
2858 tree cref, init;
2859
2860 /* NULL values are created above for gimplification errors. */
2861 if (value == NULL)
2862 continue;
2863
2864 if (cleared && initializer_zerop (value))
2865 continue;
2866
2867 /* ??? Here's to hoping the front end fills in all of the indices,
2868 so we don't have to figure out what's missing ourselves. */
2869 gcc_assert (purpose);
2870
2871 /* Skip zero-sized fields, unless value has side-effects. This can
2872 happen with calls to functions returning a zero-sized type, which
2873 we shouldn't discard. As a number of downstream passes don't
2874 expect sets of zero-sized fields, we rely on the gimplification of
2875 the MODIFY_EXPR we make below to drop the assignment statement. */
2876 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
2877 continue;
2878
2879 /* If we have a RANGE_EXPR, we have to build a loop to assign the
2880 whole range. */
2881 if (TREE_CODE (purpose) == RANGE_EXPR)
2882 {
2883 tree lower = TREE_OPERAND (purpose, 0);
2884 tree upper = TREE_OPERAND (purpose, 1);
2885
2886 /* If the lower bound is equal to upper, just treat it as if
2887 upper was the index. */
2888 if (simple_cst_equal (lower, upper))
2889 purpose = upper;
2890 else
2891 {
2892 gimplify_init_ctor_eval_range (object, lower, upper, value,
2893 array_elt_type, pre_p, cleared);
2894 continue;
2895 }
2896 }
2897
2898 if (array_elt_type)
2899 {
2900 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2901 purpose, NULL_TREE, NULL_TREE);
2902 }
2903 else
2904 {
2905 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
2906 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
2907 unshare_expr (object), purpose, NULL_TREE);
2908 }
2909
2910 if (TREE_CODE (value) == CONSTRUCTOR
2911 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
2912 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2913 pre_p, cleared);
2914 else
2915 {
2916 init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
2917 gimplify_and_add (init, pre_p);
2918 }
2919 }
2920 }
2921
2922 /* A subroutine of gimplify_modify_expr. Break out elements of a
2923 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
2924
2925 Note that we still need to clear any elements that don't have explicit
2926 initializers, so if not all elements are initialized we keep the
2927 original MODIFY_EXPR, we just remove all of the constructor elements. */
2928
2929 static enum gimplify_status
2930 gimplify_init_constructor (tree *expr_p, tree *pre_p,
2931 tree *post_p, bool want_value)
2932 {
2933 tree object;
2934 tree ctor = GENERIC_TREE_OPERAND (*expr_p, 1);
2935 tree type = TREE_TYPE (ctor);
2936 enum gimplify_status ret;
2937 VEC(constructor_elt,gc) *elts;
2938
2939 if (TREE_CODE (ctor) != CONSTRUCTOR)
2940 return GS_UNHANDLED;
2941
2942 ret = gimplify_expr (&GENERIC_TREE_OPERAND (*expr_p, 0), pre_p, post_p,
2943 is_gimple_lvalue, fb_lvalue);
2944 if (ret == GS_ERROR)
2945 return ret;
2946 object = GENERIC_TREE_OPERAND (*expr_p, 0);
2947
2948 elts = CONSTRUCTOR_ELTS (ctor);
2949
2950 ret = GS_ALL_DONE;
2951 switch (TREE_CODE (type))
2952 {
2953 case RECORD_TYPE:
2954 case UNION_TYPE:
2955 case QUAL_UNION_TYPE:
2956 case ARRAY_TYPE:
2957 {
2958 struct gimplify_init_ctor_preeval_data preeval_data;
2959 HOST_WIDE_INT num_type_elements, num_ctor_elements;
2960 HOST_WIDE_INT num_nonzero_elements;
2961 bool cleared, valid_const_initializer;
2962
2963 /* Aggregate types must lower constructors to initialization of
2964 individual elements. The exception is that a CONSTRUCTOR node
2965 with no elements indicates zero-initialization of the whole. */
2966 if (VEC_empty (constructor_elt, elts))
2967 break;
2968
2969 /* Fetch information about the constructor to direct later processing.
2970 We might want to make static versions of it in various cases, and
2971 can only do so if it known to be a valid constant initializer. */
2972 valid_const_initializer
2973 = categorize_ctor_elements (ctor, &num_nonzero_elements,
2974 &num_ctor_elements, &cleared);
2975
2976 /* If a const aggregate variable is being initialized, then it
2977 should never be a lose to promote the variable to be static. */
2978 if (valid_const_initializer
2979 && num_nonzero_elements > 1
2980 && TREE_READONLY (object)
2981 && TREE_CODE (object) == VAR_DECL)
2982 {
2983 DECL_INITIAL (object) = ctor;
2984 TREE_STATIC (object) = 1;
2985 if (!DECL_NAME (object))
2986 DECL_NAME (object) = create_tmp_var_name ("C");
2987 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
2988
2989 /* ??? C++ doesn't automatically append a .<number> to the
2990 assembler name, and even when it does, it looks a FE private
2991 data structures to figure out what that number should be,
2992 which are not set for this variable. I suppose this is
2993 important for local statics for inline functions, which aren't
2994 "local" in the object file sense. So in order to get a unique
2995 TU-local symbol, we must invoke the lhd version now. */
2996 lhd_set_decl_assembler_name (object);
2997
2998 *expr_p = NULL_TREE;
2999 break;
3000 }
3001
3002 /* If there are "lots" of initialized elements, even discounting
3003 those that are not address constants (and thus *must* be
3004 computed at runtime), then partition the constructor into
3005 constant and non-constant parts. Block copy the constant
3006 parts in, then generate code for the non-constant parts. */
3007 /* TODO. There's code in cp/typeck.c to do this. */
3008
3009 num_type_elements = count_type_elements (type, true);
3010
3011 /* If count_type_elements could not determine number of type elements
3012 for a constant-sized object, assume clearing is needed.
3013 Don't do this for variable-sized objects, as store_constructor
3014 will ignore the clearing of variable-sized objects. */
3015 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3016 cleared = true;
3017 /* If there are "lots" of zeros, then block clear the object first. */
3018 else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO
3019 && num_nonzero_elements < num_type_elements/4)
3020 cleared = true;
3021 /* ??? This bit ought not be needed. For any element not present
3022 in the initializer, we should simply set them to zero. Except
3023 we'd need to *find* the elements that are not present, and that
3024 requires trickery to avoid quadratic compile-time behavior in
3025 large cases or excessive memory use in small cases. */
3026 else if (num_ctor_elements < num_type_elements)
3027 cleared = true;
3028
3029 /* If there are "lots" of initialized elements, and all of them
3030 are valid address constants, then the entire initializer can
3031 be dropped to memory, and then memcpy'd out. Don't do this
3032 for sparse arrays, though, as it's more efficient to follow
3033 the standard CONSTRUCTOR behavior of memset followed by
3034 individual element initialization. */
3035 if (valid_const_initializer && !cleared)
3036 {
3037 HOST_WIDE_INT size = int_size_in_bytes (type);
3038 unsigned int align;
3039
3040 /* ??? We can still get unbounded array types, at least
3041 from the C++ front end. This seems wrong, but attempt
3042 to work around it for now. */
3043 if (size < 0)
3044 {
3045 size = int_size_in_bytes (TREE_TYPE (object));
3046 if (size >= 0)
3047 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3048 }
3049
3050 /* Find the maximum alignment we can assume for the object. */
3051 /* ??? Make use of DECL_OFFSET_ALIGN. */
3052 if (DECL_P (object))
3053 align = DECL_ALIGN (object);
3054 else
3055 align = TYPE_ALIGN (type);
3056
3057 if (size > 0 && !can_move_by_pieces (size, align))
3058 {
3059 tree new = create_tmp_var_raw (type, "C");
3060
3061 gimple_add_tmp_var (new);
3062 TREE_STATIC (new) = 1;
3063 TREE_READONLY (new) = 1;
3064 DECL_INITIAL (new) = ctor;
3065 if (align > DECL_ALIGN (new))
3066 {
3067 DECL_ALIGN (new) = align;
3068 DECL_USER_ALIGN (new) = 1;
3069 }
3070 walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
3071
3072 GENERIC_TREE_OPERAND (*expr_p, 1) = new;
3073
3074 /* This is no longer an assignment of a CONSTRUCTOR, but
3075 we still may have processing to do on the LHS. So
3076 pretend we didn't do anything here to let that happen. */
3077 return GS_UNHANDLED;
3078 }
3079 }
3080
3081 /* If there are nonzero elements, pre-evaluate to capture elements
3082 overlapping with the lhs into temporaries. We must do this before
3083 clearing to fetch the values before they are zeroed-out. */
3084 if (num_nonzero_elements > 0)
3085 {
3086 preeval_data.lhs_base_decl = get_base_address (object);
3087 if (!DECL_P (preeval_data.lhs_base_decl))
3088 preeval_data.lhs_base_decl = NULL;
3089 preeval_data.lhs_alias_set = get_alias_set (object);
3090
3091 gimplify_init_ctor_preeval (&GENERIC_TREE_OPERAND (*expr_p, 1),
3092 pre_p, post_p, &preeval_data);
3093 }
3094
3095 if (cleared)
3096 {
3097 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3098 Note that we still have to gimplify, in order to handle the
3099 case of variable sized types. Avoid shared tree structures. */
3100 CONSTRUCTOR_ELTS (ctor) = NULL;
3101 object = unshare_expr (object);
3102 gimplify_stmt (expr_p);
3103 append_to_statement_list (*expr_p, pre_p);
3104 }
3105
3106 /* If we have not block cleared the object, or if there are nonzero
3107 elements in the constructor, add assignments to the individual
3108 scalar fields of the object. */
3109 if (!cleared || num_nonzero_elements > 0)
3110 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3111
3112 *expr_p = NULL_TREE;
3113 }
3114 break;
3115
3116 case COMPLEX_TYPE:
3117 {
3118 tree r, i;
3119
3120 /* Extract the real and imaginary parts out of the ctor. */
3121 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3122 r = VEC_index (constructor_elt, elts, 0)->value;
3123 i = VEC_index (constructor_elt, elts, 1)->value;
3124 if (r == NULL || i == NULL)
3125 {
3126 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3127 if (r == NULL)
3128 r = zero;
3129 if (i == NULL)
3130 i = zero;
3131 }
3132
3133 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3134 represent creation of a complex value. */
3135 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3136 {
3137 ctor = build_complex (type, r, i);
3138 TREE_OPERAND (*expr_p, 1) = ctor;
3139 }
3140 else
3141 {
3142 ctor = build2 (COMPLEX_EXPR, type, r, i);
3143 TREE_OPERAND (*expr_p, 1) = ctor;
3144 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
3145 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3146 fb_rvalue);
3147 }
3148 }
3149 break;
3150
3151 case VECTOR_TYPE:
3152 {
3153 unsigned HOST_WIDE_INT ix;
3154 constructor_elt *ce;
3155
3156 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3157 if (TREE_CONSTANT (ctor))
3158 {
3159 bool constant_p = true;
3160 tree value;
3161
3162 /* Even when ctor is constant, it might contain non-*_CST
3163 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
3164 belong into VECTOR_CST nodes. */
3165 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3166 if (!CONSTANT_CLASS_P (value))
3167 {
3168 constant_p = false;
3169 break;
3170 }
3171
3172 if (constant_p)
3173 {
3174 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3175 break;
3176 }
3177
3178 /* Don't reduce a TREE_CONSTANT vector ctor even if we can't
3179 make a VECTOR_CST. It won't do anything for us, and it'll
3180 prevent us from representing it as a single constant. */
3181 break;
3182 }
3183
3184 /* Vector types use CONSTRUCTOR all the way through gimple
3185 compilation as a general initializer. */
3186 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3187 {
3188 enum gimplify_status tret;
3189 tret = gimplify_expr (&ce->value, pre_p, post_p,
3190 is_gimple_val, fb_rvalue);
3191 if (tret == GS_ERROR)
3192 ret = GS_ERROR;
3193 }
3194 if (!is_gimple_reg (GENERIC_TREE_OPERAND (*expr_p, 0)))
3195 GENERIC_TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3196 }
3197 break;
3198
3199 default:
3200 /* So how did we get a CONSTRUCTOR for a scalar type? */
3201 gcc_unreachable ();
3202 }
3203
3204 if (ret == GS_ERROR)
3205 return GS_ERROR;
3206 else if (want_value)
3207 {
3208 append_to_statement_list (*expr_p, pre_p);
3209 *expr_p = object;
3210 return GS_OK;
3211 }
3212 else
3213 return GS_ALL_DONE;
3214 }
3215
3216 /* Given a pointer value OP0, return a simplified version of an
3217 indirection through OP0, or NULL_TREE if no simplification is
3218 possible. This may only be applied to a rhs of an expression.
3219 Note that the resulting type may be different from the type pointed
3220 to in the sense that it is still compatible from the langhooks
3221 point of view. */
3222
3223 static tree
3224 fold_indirect_ref_rhs (tree t)
3225 {
3226 tree type = TREE_TYPE (TREE_TYPE (t));
3227 tree sub = t;
3228 tree subtype;
3229
3230 STRIP_USELESS_TYPE_CONVERSION (sub);
3231 subtype = TREE_TYPE (sub);
3232 if (!POINTER_TYPE_P (subtype))
3233 return NULL_TREE;
3234
3235 if (TREE_CODE (sub) == ADDR_EXPR)
3236 {
3237 tree op = TREE_OPERAND (sub, 0);
3238 tree optype = TREE_TYPE (op);
3239 /* *&p => p */
3240 if (lang_hooks.types_compatible_p (type, optype))
3241 return op;
3242 /* *(foo *)&fooarray => fooarray[0] */
3243 else if (TREE_CODE (optype) == ARRAY_TYPE
3244 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
3245 {
3246 tree type_domain = TYPE_DOMAIN (optype);
3247 tree min_val = size_zero_node;
3248 if (type_domain && TYPE_MIN_VALUE (type_domain))
3249 min_val = TYPE_MIN_VALUE (type_domain);
3250 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3251 }
3252 }
3253
3254 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3255 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3256 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3257 {
3258 tree type_domain;
3259 tree min_val = size_zero_node;
3260 tree osub = sub;
3261 sub = fold_indirect_ref_rhs (sub);
3262 if (! sub)
3263 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3264 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3265 if (type_domain && TYPE_MIN_VALUE (type_domain))
3266 min_val = TYPE_MIN_VALUE (type_domain);
3267 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3268 }
3269
3270 return NULL_TREE;
3271 }
3272
3273 /* Subroutine of gimplify_modify_expr to do simplifications of MODIFY_EXPRs
3274 based on the code of the RHS. We loop for as long as something changes. */
3275
3276 static enum gimplify_status
3277 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
3278 tree *post_p, bool want_value)
3279 {
3280 enum gimplify_status ret = GS_OK;
3281
3282 while (ret != GS_UNHANDLED)
3283 switch (TREE_CODE (*from_p))
3284 {
3285 case INDIRECT_REF:
3286 {
3287 /* If we have code like
3288
3289 *(const A*)(A*)&x
3290
3291 where the type of "x" is a (possibly cv-qualified variant
3292 of "A"), treat the entire expression as identical to "x".
3293 This kind of code arises in C++ when an object is bound
3294 to a const reference, and if "x" is a TARGET_EXPR we want
3295 to take advantage of the optimization below. */
3296 tree t = fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3297 if (t)
3298 {
3299 *from_p = t;
3300 ret = GS_OK;
3301 }
3302 else
3303 ret = GS_UNHANDLED;
3304 break;
3305 }
3306
3307 case TARGET_EXPR:
3308 {
3309 /* If we are initializing something from a TARGET_EXPR, strip the
3310 TARGET_EXPR and initialize it directly, if possible. This can't
3311 be done if the initializer is void, since that implies that the
3312 temporary is set in some non-trivial way.
3313
3314 ??? What about code that pulls out the temp and uses it
3315 elsewhere? I think that such code never uses the TARGET_EXPR as
3316 an initializer. If I'm wrong, we'll die because the temp won't
3317 have any RTL. In that case, I guess we'll need to replace
3318 references somehow. */
3319 tree init = TARGET_EXPR_INITIAL (*from_p);
3320
3321 if (!VOID_TYPE_P (TREE_TYPE (init)))
3322 {
3323 *from_p = init;
3324 ret = GS_OK;
3325 }
3326 else
3327 ret = GS_UNHANDLED;
3328 }
3329 break;
3330
3331 case COMPOUND_EXPR:
3332 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3333 caught. */
3334 gimplify_compound_expr (from_p, pre_p, true);
3335 ret = GS_OK;
3336 break;
3337
3338 case CONSTRUCTOR:
3339 /* If we're initializing from a CONSTRUCTOR, break this into
3340 individual MODIFY_EXPRs. */
3341 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value);
3342
3343 case COND_EXPR:
3344 /* If we're assigning to a non-register type, push the assignment
3345 down into the branches. This is mandatory for ADDRESSABLE types,
3346 since we cannot generate temporaries for such, but it saves a
3347 copy in other cases as well. */
3348 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3349 {
3350 /* This code should mirror the code in gimplify_cond_expr. */
3351 enum tree_code code = TREE_CODE (*expr_p);
3352 tree cond = *from_p;
3353 tree result = *to_p;
3354
3355 ret = gimplify_expr (&result, pre_p, post_p,
3356 is_gimple_min_lval, fb_lvalue);
3357 if (ret != GS_ERROR)
3358 ret = GS_OK;
3359
3360 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3361 TREE_OPERAND (cond, 1)
3362 = build2 (code, void_type_node, result,
3363 TREE_OPERAND (cond, 1));
3364 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
3365 TREE_OPERAND (cond, 2)
3366 = build2 (code, void_type_node, unshare_expr (result),
3367 TREE_OPERAND (cond, 2));
3368
3369 TREE_TYPE (cond) = void_type_node;
3370 recalculate_side_effects (cond);
3371
3372 if (want_value)
3373 {
3374 gimplify_and_add (cond, pre_p);
3375 *expr_p = unshare_expr (result);
3376 }
3377 else
3378 *expr_p = cond;
3379 return ret;
3380 }
3381 else
3382 ret = GS_UNHANDLED;
3383 break;
3384
3385 case CALL_EXPR:
3386 /* For calls that return in memory, give *to_p as the CALL_EXPR's
3387 return slot so that we don't generate a temporary. */
3388 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
3389 && aggregate_value_p (*from_p, *from_p))
3390 {
3391 bool use_target;
3392
3393 if (!(rhs_predicate_for (*to_p))(*from_p))
3394 /* If we need a temporary, *to_p isn't accurate. */
3395 use_target = false;
3396 else if (TREE_CODE (*to_p) == RESULT_DECL
3397 && DECL_NAME (*to_p) == NULL_TREE
3398 && needs_to_live_in_memory (*to_p))
3399 /* It's OK to use the return slot directly unless it's an NRV. */
3400 use_target = true;
3401 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
3402 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
3403 /* Don't force regs into memory. */
3404 use_target = false;
3405 else if (TREE_CODE (*to_p) == VAR_DECL
3406 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
3407 /* Don't use the original target if it's a formal temp; we
3408 don't want to take their addresses. */
3409 use_target = false;
3410 else if (TREE_CODE (*expr_p) == INIT_EXPR)
3411 /* It's OK to use the target directly if it's being
3412 initialized. */
3413 use_target = true;
3414 else if (!is_gimple_non_addressable (*to_p))
3415 /* Don't use the original target if it's already addressable;
3416 if its address escapes, and the called function uses the
3417 NRV optimization, a conforming program could see *to_p
3418 change before the called function returns; see c++/19317.
3419 When optimizing, the return_slot pass marks more functions
3420 as safe after we have escape info. */
3421 use_target = false;
3422 else
3423 use_target = true;
3424
3425 if (use_target)
3426 {
3427 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
3428 lang_hooks.mark_addressable (*to_p);
3429 }
3430 }
3431
3432 ret = GS_UNHANDLED;
3433 break;
3434
3435 /* If we're initializing from a container, push the initialization
3436 inside it. */
3437 case CLEANUP_POINT_EXPR:
3438 case BIND_EXPR:
3439 case STATEMENT_LIST:
3440 {
3441 tree wrap = *from_p;
3442 tree t;
3443
3444 ret = gimplify_expr (to_p, pre_p, post_p,
3445 is_gimple_min_lval, fb_lvalue);
3446 if (ret != GS_ERROR)
3447 ret = GS_OK;
3448
3449 t = voidify_wrapper_expr (wrap, *expr_p);
3450 gcc_assert (t == *expr_p);
3451
3452 if (want_value)
3453 {
3454 gimplify_and_add (wrap, pre_p);
3455 *expr_p = unshare_expr (*to_p);
3456 }
3457 else
3458 *expr_p = wrap;
3459 return GS_OK;
3460 }
3461
3462 default:
3463 ret = GS_UNHANDLED;
3464 break;
3465 }
3466
3467 return ret;
3468 }
3469
3470 /* Destructively convert the TREE pointer in TP into a gimple tuple if
3471 appropriate. */
3472
3473 static void
3474 tree_to_gimple_tuple (tree *tp)
3475 {
3476
3477 switch (TREE_CODE (*tp))
3478 {
3479 case GIMPLE_MODIFY_STMT:
3480 return;
3481 case MODIFY_EXPR:
3482 {
3483 struct gimple_stmt *gs;
3484 tree lhs = TREE_OPERAND (*tp, 0);
3485 bool def_stmt_self_p = false;
3486
3487 if (TREE_CODE (lhs) == SSA_NAME)
3488 {
3489 if (SSA_NAME_DEF_STMT (lhs) == *tp)
3490 def_stmt_self_p = true;
3491 }
3492
3493 gs = &make_node (GIMPLE_MODIFY_STMT)->gstmt;
3494 gs->base = (*tp)->base;
3495 /* The set to base above overwrites the CODE. */
3496 TREE_SET_CODE ((tree) gs, GIMPLE_MODIFY_STMT);
3497
3498 gs->locus = EXPR_LOCUS (*tp);
3499 gs->operands[0] = TREE_OPERAND (*tp, 0);
3500 gs->operands[1] = TREE_OPERAND (*tp, 1);
3501 gs->block = TREE_BLOCK (*tp);
3502 *tp = (tree)gs;
3503
3504 /* If we re-gimplify a set to an SSA_NAME, we must change the
3505 SSA name's DEF_STMT link. */
3506 if (def_stmt_self_p)
3507 SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (*tp, 0)) = *tp;
3508
3509 return;
3510 }
3511 default:
3512 break;
3513 }
3514 }
3515
3516 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
3517 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
3518 DECL_GIMPLE_REG_P set. */
3519
3520 static enum gimplify_status
3521 gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
3522 {
3523 enum tree_code code, ocode;
3524 tree lhs, rhs, new_rhs, other, realpart, imagpart;
3525
3526 lhs = GENERIC_TREE_OPERAND (*expr_p, 0);
3527 rhs = GENERIC_TREE_OPERAND (*expr_p, 1);
3528 code = TREE_CODE (lhs);
3529 lhs = TREE_OPERAND (lhs, 0);
3530
3531 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
3532 other = build1 (ocode, TREE_TYPE (rhs), lhs);
3533 other = get_formal_tmp_var (other, pre_p);
3534
3535 realpart = code == REALPART_EXPR ? rhs : other;
3536 imagpart = code == REALPART_EXPR ? other : rhs;
3537
3538 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
3539 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
3540 else
3541 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
3542
3543 GENERIC_TREE_OPERAND (*expr_p, 0) = lhs;
3544 GENERIC_TREE_OPERAND (*expr_p, 1) = new_rhs;
3545
3546 if (want_value)
3547 {
3548 tree_to_gimple_tuple (expr_p);
3549
3550 append_to_statement_list (*expr_p, pre_p);
3551 *expr_p = rhs;
3552 }
3553
3554 return GS_ALL_DONE;
3555 }
3556
3557 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
3558
3559 modify_expr
3560 : varname '=' rhs
3561 | '*' ID '=' rhs
3562
3563 PRE_P points to the list where side effects that must happen before
3564 *EXPR_P should be stored.
3565
3566 POST_P points to the list where side effects that must happen after
3567 *EXPR_P should be stored.
3568
3569 WANT_VALUE is nonzero iff we want to use the value of this expression
3570 in another expression. */
3571
3572 static enum gimplify_status
3573 gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
3574 {
3575 tree *from_p = &GENERIC_TREE_OPERAND (*expr_p, 1);
3576 tree *to_p = &GENERIC_TREE_OPERAND (*expr_p, 0);
3577 enum gimplify_status ret = GS_UNHANDLED;
3578
3579 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
3580 || TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT
3581 || TREE_CODE (*expr_p) == INIT_EXPR);
3582
3583 /* For zero sized types only gimplify the left hand side and right hand side
3584 as statements and throw away the assignment. */
3585 if (zero_sized_type (TREE_TYPE (*from_p)))
3586 {
3587 gimplify_stmt (from_p);
3588 gimplify_stmt (to_p);
3589 append_to_statement_list (*from_p, pre_p);
3590 append_to_statement_list (*to_p, pre_p);
3591 *expr_p = NULL_TREE;
3592 return GS_ALL_DONE;
3593 }
3594
3595 /* See if any simplifications can be done based on what the RHS is. */
3596 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3597 want_value);
3598 if (ret != GS_UNHANDLED)
3599 return ret;
3600
3601 /* If the value being copied is of variable width, compute the length
3602 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
3603 before gimplifying any of the operands so that we can resolve any
3604 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
3605 the size of the expression to be copied, not of the destination, so
3606 that is what we must here. */
3607 maybe_with_size_expr (from_p);
3608
3609 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3610 if (ret == GS_ERROR)
3611 return ret;
3612
3613 ret = gimplify_expr (from_p, pre_p, post_p,
3614 rhs_predicate_for (*to_p), fb_rvalue);
3615 if (ret == GS_ERROR)
3616 return ret;
3617
3618 /* Now see if the above changed *from_p to something we handle specially. */
3619 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3620 want_value);
3621 if (ret != GS_UNHANDLED)
3622 return ret;
3623
3624 /* If we've got a variable sized assignment between two lvalues (i.e. does
3625 not involve a call), then we can make things a bit more straightforward
3626 by converting the assignment to memcpy or memset. */
3627 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
3628 {
3629 tree from = TREE_OPERAND (*from_p, 0);
3630 tree size = TREE_OPERAND (*from_p, 1);
3631
3632 if (TREE_CODE (from) == CONSTRUCTOR)
3633 return gimplify_modify_expr_to_memset (expr_p, size, want_value);
3634 if (is_gimple_addressable (from))
3635 {
3636 *from_p = from;
3637 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
3638 }
3639 }
3640
3641 /* Transform partial stores to non-addressable complex variables into
3642 total stores. This allows us to use real instead of virtual operands
3643 for these variables, which improves optimization. */
3644 if ((TREE_CODE (*to_p) == REALPART_EXPR
3645 || TREE_CODE (*to_p) == IMAGPART_EXPR)
3646 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
3647 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
3648
3649 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
3650 {
3651 /* If we've somehow already got an SSA_NAME on the LHS, then
3652 we're probably modified it twice. Not good. */
3653 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
3654 *to_p = make_ssa_name (*to_p, *expr_p);
3655 }
3656
3657 /* Try to alleviate the effects of the gimplification creating artificial
3658 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
3659 if (!gimplify_ctxp->into_ssa
3660 && DECL_P (*from_p) && DECL_IGNORED_P (*from_p)
3661 && DECL_P (*to_p) && !DECL_IGNORED_P (*to_p))
3662 {
3663 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
3664 DECL_NAME (*from_p)
3665 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
3666 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
3667 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
3668 }
3669
3670 if (want_value)
3671 {
3672 tree_to_gimple_tuple (expr_p);
3673
3674 append_to_statement_list (*expr_p, pre_p);
3675 *expr_p = *to_p;
3676 return GS_OK;
3677 }
3678
3679 return GS_ALL_DONE;
3680 }
3681
3682 /* Gimplify a comparison between two variable-sized objects. Do this
3683 with a call to BUILT_IN_MEMCMP. */
3684
3685 static enum gimplify_status
3686 gimplify_variable_sized_compare (tree *expr_p)
3687 {
3688 tree op0 = TREE_OPERAND (*expr_p, 0);
3689 tree op1 = TREE_OPERAND (*expr_p, 1);
3690 tree t, arg, dest, src;
3691
3692 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
3693 arg = unshare_expr (arg);
3694 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
3695 src = build_fold_addr_expr (op1);
3696 dest = build_fold_addr_expr (op0);
3697 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
3698 t = build_call_expr (t, 3, dest, src, arg);
3699 *expr_p
3700 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
3701
3702 return GS_OK;
3703 }
3704
3705 /* Gimplify a comparison between two aggregate objects of integral scalar
3706 mode as a comparison between the bitwise equivalent scalar values. */
3707
3708 static enum gimplify_status
3709 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
3710 {
3711 tree op0 = TREE_OPERAND (*expr_p, 0);
3712 tree op1 = TREE_OPERAND (*expr_p, 1);
3713
3714 tree type = TREE_TYPE (op0);
3715 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
3716
3717 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
3718 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
3719
3720 *expr_p
3721 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
3722
3723 return GS_OK;
3724 }
3725
3726 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
3727 points to the expression to gimplify.
3728
3729 Expressions of the form 'a && b' are gimplified to:
3730
3731 a && b ? true : false
3732
3733 gimplify_cond_expr will do the rest.
3734
3735 PRE_P points to the list where side effects that must happen before
3736 *EXPR_P should be stored. */
3737
3738 static enum gimplify_status
3739 gimplify_boolean_expr (tree *expr_p)
3740 {
3741 /* Preserve the original type of the expression. */
3742 tree type = TREE_TYPE (*expr_p);
3743
3744 *expr_p = build3 (COND_EXPR, type, *expr_p,
3745 fold_convert (type, boolean_true_node),
3746 fold_convert (type, boolean_false_node));
3747
3748 return GS_OK;
3749 }
3750
3751 /* Gimplifies an expression sequence. This function gimplifies each
3752 expression and re-writes the original expression with the last
3753 expression of the sequence in GIMPLE form.
3754
3755 PRE_P points to the list where the side effects for all the
3756 expressions in the sequence will be emitted.
3757
3758 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
3759 /* ??? Should rearrange to share the pre-queue with all the indirect
3760 invocations of gimplify_expr. Would probably save on creations
3761 of statement_list nodes. */
3762
3763 static enum gimplify_status
3764 gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
3765 {
3766 tree t = *expr_p;
3767
3768 do
3769 {
3770 tree *sub_p = &TREE_OPERAND (t, 0);
3771
3772 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
3773 gimplify_compound_expr (sub_p, pre_p, false);
3774 else
3775 gimplify_stmt (sub_p);
3776 append_to_statement_list (*sub_p, pre_p);
3777
3778 t = TREE_OPERAND (t, 1);
3779 }
3780 while (TREE_CODE (t) == COMPOUND_EXPR);
3781
3782 *expr_p = t;
3783 if (want_value)
3784 return GS_OK;
3785 else
3786 {
3787 gimplify_stmt (expr_p);
3788 return GS_ALL_DONE;
3789 }
3790 }
3791
3792 /* Gimplifies a statement list. These may be created either by an
3793 enlightened front-end, or by shortcut_cond_expr. */
3794
3795 static enum gimplify_status
3796 gimplify_statement_list (tree *expr_p, tree *pre_p)
3797 {
3798 tree temp = voidify_wrapper_expr (*expr_p, NULL);
3799
3800 tree_stmt_iterator i = tsi_start (*expr_p);
3801
3802 while (!tsi_end_p (i))
3803 {
3804 tree t;
3805
3806 gimplify_stmt (tsi_stmt_ptr (i));
3807
3808 t = tsi_stmt (i);
3809 if (t == NULL)
3810 tsi_delink (&i);
3811 else if (TREE_CODE (t) == STATEMENT_LIST)
3812 {
3813 tsi_link_before (&i, t, TSI_SAME_STMT);
3814 tsi_delink (&i);
3815 }
3816 else
3817 tsi_next (&i);
3818 }
3819
3820 if (temp)
3821 {
3822 append_to_statement_list (*expr_p, pre_p);
3823 *expr_p = temp;
3824 return GS_OK;
3825 }
3826
3827 return GS_ALL_DONE;
3828 }
3829
3830 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
3831 gimplify. After gimplification, EXPR_P will point to a new temporary
3832 that holds the original value of the SAVE_EXPR node.
3833
3834 PRE_P points to the list where side effects that must happen before
3835 *EXPR_P should be stored. */
3836
3837 static enum gimplify_status
3838 gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
3839 {
3840 enum gimplify_status ret = GS_ALL_DONE;
3841 tree val;
3842
3843 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
3844 val = TREE_OPERAND (*expr_p, 0);
3845
3846 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
3847 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
3848 {
3849 /* The operand may be a void-valued expression such as SAVE_EXPRs
3850 generated by the Java frontend for class initialization. It is
3851 being executed only for its side-effects. */
3852 if (TREE_TYPE (val) == void_type_node)
3853 {
3854 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3855 is_gimple_stmt, fb_none);
3856 append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
3857 val = NULL;
3858 }
3859 else
3860 val = get_initialized_tmp_var (val, pre_p, post_p);
3861
3862 TREE_OPERAND (*expr_p, 0) = val;
3863 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
3864 }
3865
3866 *expr_p = val;
3867
3868 return ret;
3869 }
3870
3871 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
3872
3873 unary_expr
3874 : ...
3875 | '&' varname
3876 ...
3877
3878 PRE_P points to the list where side effects that must happen before
3879 *EXPR_P should be stored.
3880
3881 POST_P points to the list where side effects that must happen after
3882 *EXPR_P should be stored. */
3883
3884 static enum gimplify_status
3885 gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
3886 {
3887 tree expr = *expr_p;
3888 tree op0 = TREE_OPERAND (expr, 0);
3889 enum gimplify_status ret;
3890
3891 switch (TREE_CODE (op0))
3892 {
3893 case INDIRECT_REF:
3894 case MISALIGNED_INDIRECT_REF:
3895 do_indirect_ref:
3896 /* Check if we are dealing with an expression of the form '&*ptr'.
3897 While the front end folds away '&*ptr' into 'ptr', these
3898 expressions may be generated internally by the compiler (e.g.,
3899 builtins like __builtin_va_end). */
3900 /* Caution: the silent array decomposition semantics we allow for
3901 ADDR_EXPR means we can't always discard the pair. */
3902 /* Gimplification of the ADDR_EXPR operand may drop
3903 cv-qualification conversions, so make sure we add them if
3904 needed. */
3905 {
3906 tree op00 = TREE_OPERAND (op0, 0);
3907 tree t_expr = TREE_TYPE (expr);
3908 tree t_op00 = TREE_TYPE (op00);
3909
3910 if (!lang_hooks.types_compatible_p (t_expr, t_op00))
3911 {
3912 #ifdef ENABLE_CHECKING
3913 tree t_op0 = TREE_TYPE (op0);
3914 gcc_assert (POINTER_TYPE_P (t_expr)
3915 && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
3916 ? TREE_TYPE (t_op0) : t_op0,
3917 TREE_TYPE (t_expr))
3918 && POINTER_TYPE_P (t_op00)
3919 && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
3920 #endif
3921 op00 = fold_convert (TREE_TYPE (expr), op00);
3922 }
3923 *expr_p = op00;
3924 ret = GS_OK;
3925 }
3926 break;
3927
3928 case VIEW_CONVERT_EXPR:
3929 /* Take the address of our operand and then convert it to the type of
3930 this ADDR_EXPR.
3931
3932 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
3933 all clear. The impact of this transformation is even less clear. */
3934
3935 /* If the operand is a useless conversion, look through it. Doing so
3936 guarantees that the ADDR_EXPR and its operand will remain of the
3937 same type. */
3938 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
3939 op0 = TREE_OPERAND (op0, 0);
3940
3941 *expr_p = fold_convert (TREE_TYPE (expr),
3942 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
3943 ret = GS_OK;
3944 break;
3945
3946 default:
3947 /* We use fb_either here because the C frontend sometimes takes
3948 the address of a call that returns a struct; see
3949 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
3950 the implied temporary explicit. */
3951 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
3952 is_gimple_addressable, fb_either);
3953 if (ret != GS_ERROR)
3954 {
3955 op0 = TREE_OPERAND (expr, 0);
3956
3957 /* For various reasons, the gimplification of the expression
3958 may have made a new INDIRECT_REF. */
3959 if (TREE_CODE (op0) == INDIRECT_REF)
3960 goto do_indirect_ref;
3961
3962 /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
3963 is set properly. */
3964 recompute_tree_invariant_for_addr_expr (expr);
3965
3966 /* Mark the RHS addressable. */
3967 lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
3968 }
3969 break;
3970 }
3971
3972 return ret;
3973 }
3974
3975 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
3976 value; output operands should be a gimple lvalue. */
3977
3978 static enum gimplify_status
3979 gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
3980 {
3981 tree expr = *expr_p;
3982 int noutputs = list_length (ASM_OUTPUTS (expr));
3983 const char **oconstraints
3984 = (const char **) alloca ((noutputs) * sizeof (const char *));
3985 int i;
3986 tree link;
3987 const char *constraint;
3988 bool allows_mem, allows_reg, is_inout;
3989 enum gimplify_status ret, tret;
3990
3991 ret = GS_ALL_DONE;
3992 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3993 {
3994 size_t constraint_len;
3995 oconstraints[i] = constraint
3996 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3997 constraint_len = strlen (constraint);
3998 if (constraint_len == 0)
3999 continue;
4000
4001 parse_output_constraint (&constraint, i, 0, 0,
4002 &allows_mem, &allows_reg, &is_inout);
4003
4004 if (!allows_reg && allows_mem)
4005 lang_hooks.mark_addressable (TREE_VALUE (link));
4006
4007 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4008 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4009 fb_lvalue | fb_mayfail);
4010 if (tret == GS_ERROR)
4011 {
4012 error ("invalid lvalue in asm output %d", i);
4013 ret = tret;
4014 }
4015
4016 if (is_inout)
4017 {
4018 /* An input/output operand. To give the optimizers more
4019 flexibility, split it into separate input and output
4020 operands. */
4021 tree input;
4022 char buf[10];
4023
4024 /* Turn the in/out constraint into an output constraint. */
4025 char *p = xstrdup (constraint);
4026 p[0] = '=';
4027 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4028
4029 /* And add a matching input constraint. */
4030 if (allows_reg)
4031 {
4032 sprintf (buf, "%d", i);
4033
4034 /* If there are multiple alternatives in the constraint,
4035 handle each of them individually. Those that allow register
4036 will be replaced with operand number, the others will stay
4037 unchanged. */
4038 if (strchr (p, ',') != NULL)
4039 {
4040 size_t len = 0, buflen = strlen (buf);
4041 char *beg, *end, *str, *dst;
4042
4043 for (beg = p + 1;;)
4044 {
4045 end = strchr (beg, ',');
4046 if (end == NULL)
4047 end = strchr (beg, '\0');
4048 if ((size_t) (end - beg) < buflen)
4049 len += buflen + 1;
4050 else
4051 len += end - beg + 1;
4052 if (*end)
4053 beg = end + 1;
4054 else
4055 break;
4056 }
4057
4058 str = (char *) alloca (len);
4059 for (beg = p + 1, dst = str;;)
4060 {
4061 const char *tem;
4062 bool mem_p, reg_p, inout_p;
4063
4064 end = strchr (beg, ',');
4065 if (end)
4066 *end = '\0';
4067 beg[-1] = '=';
4068 tem = beg - 1;
4069 parse_output_constraint (&tem, i, 0, 0,
4070 &mem_p, &reg_p, &inout_p);
4071 if (dst != str)
4072 *dst++ = ',';
4073 if (reg_p)
4074 {
4075 memcpy (dst, buf, buflen);
4076 dst += buflen;
4077 }
4078 else
4079 {
4080 if (end)
4081 len = end - beg;
4082 else
4083 len = strlen (beg);
4084 memcpy (dst, beg, len);
4085 dst += len;
4086 }
4087 if (end)
4088 beg = end + 1;
4089 else
4090 break;
4091 }
4092 *dst = '\0';
4093 input = build_string (dst - str, str);
4094 }
4095 else
4096 input = build_string (strlen (buf), buf);
4097 }
4098 else
4099 input = build_string (constraint_len - 1, constraint + 1);
4100
4101 free (p);
4102
4103 input = build_tree_list (build_tree_list (NULL_TREE, input),
4104 unshare_expr (TREE_VALUE (link)));
4105 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4106 }
4107 }
4108
4109 for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
4110 {
4111 constraint
4112 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4113 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4114 oconstraints, &allows_mem, &allows_reg);
4115
4116 /* If the operand is a memory input, it should be an lvalue. */
4117 if (!allows_reg && allows_mem)
4118 {
4119 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4120 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4121 lang_hooks.mark_addressable (TREE_VALUE (link));
4122 if (tret == GS_ERROR)
4123 {
4124 error ("memory input %d is not directly addressable", i);
4125 ret = tret;
4126 }
4127 }
4128 else
4129 {
4130 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4131 is_gimple_asm_val, fb_rvalue);
4132 if (tret == GS_ERROR)
4133 ret = tret;
4134 }
4135 }
4136
4137 return ret;
4138 }
4139
4140 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4141 WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4142 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4143 return to this function.
4144
4145 FIXME should we complexify the prequeue handling instead? Or use flags
4146 for all the cleanups and let the optimizer tighten them up? The current
4147 code seems pretty fragile; it will break on a cleanup within any
4148 non-conditional nesting. But any such nesting would be broken, anyway;
4149 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4150 and continues out of it. We can do that at the RTL level, though, so
4151 having an optimizer to tighten up try/finally regions would be a Good
4152 Thing. */
4153
4154 static enum gimplify_status
4155 gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
4156 {
4157 tree_stmt_iterator iter;
4158 tree body;
4159
4160 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4161
4162 /* We only care about the number of conditions between the innermost
4163 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4164 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4165 int old_conds = gimplify_ctxp->conditions;
4166 tree old_cleanups = gimplify_ctxp->conditional_cleanups;
4167 gimplify_ctxp->conditions = 0;
4168 gimplify_ctxp->conditional_cleanups = NULL_TREE;
4169
4170 body = TREE_OPERAND (*expr_p, 0);
4171 gimplify_to_stmt_list (&body);
4172
4173 gimplify_ctxp->conditions = old_conds;
4174 gimplify_ctxp->conditional_cleanups = old_cleanups;
4175
4176 for (iter = tsi_start (body); !tsi_end_p (iter); )
4177 {
4178 tree *wce_p = tsi_stmt_ptr (iter);
4179 tree wce = *wce_p;
4180
4181 if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
4182 {
4183 if (tsi_one_before_end_p (iter))
4184 {
4185 tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
4186 tsi_delink (&iter);
4187 break;
4188 }
4189 else
4190 {
4191 tree sl, tfe;
4192 enum tree_code code;
4193
4194 if (CLEANUP_EH_ONLY (wce))
4195 code = TRY_CATCH_EXPR;
4196 else
4197 code = TRY_FINALLY_EXPR;
4198
4199 sl = tsi_split_statement_list_after (&iter);
4200 tfe = build2 (code, void_type_node, sl, NULL_TREE);
4201 append_to_statement_list (TREE_OPERAND (wce, 0),
4202 &TREE_OPERAND (tfe, 1));
4203 *wce_p = tfe;
4204 iter = tsi_start (sl);
4205 }
4206 }
4207 else
4208 tsi_next (&iter);
4209 }
4210
4211 if (temp)
4212 {
4213 *expr_p = temp;
4214 append_to_statement_list (body, pre_p);
4215 return GS_OK;
4216 }
4217 else
4218 {
4219 *expr_p = body;
4220 return GS_ALL_DONE;
4221 }
4222 }
4223
4224 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4225 is the cleanup action required. */
4226
4227 static void
4228 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
4229 {
4230 tree wce;
4231
4232 /* Errors can result in improperly nested cleanups. Which results in
4233 confusion when trying to resolve the WITH_CLEANUP_EXPR. */
4234 if (errorcount || sorrycount)
4235 return;
4236
4237 if (gimple_conditional_context ())
4238 {
4239 /* If we're in a conditional context, this is more complex. We only
4240 want to run the cleanup if we actually ran the initialization that
4241 necessitates it, but we want to run it after the end of the
4242 conditional context. So we wrap the try/finally around the
4243 condition and use a flag to determine whether or not to actually
4244 run the destructor. Thus
4245
4246 test ? f(A()) : 0
4247
4248 becomes (approximately)
4249
4250 flag = 0;
4251 try {
4252 if (test) { A::A(temp); flag = 1; val = f(temp); }
4253 else { val = 0; }
4254 } finally {
4255 if (flag) A::~A(temp);
4256 }
4257 val
4258 */
4259
4260 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4261 tree ffalse = build_gimple_modify_stmt (flag, boolean_false_node);
4262 tree ftrue = build_gimple_modify_stmt (flag, boolean_true_node);
4263 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4264 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4265 append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
4266 append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
4267 append_to_statement_list (ftrue, pre_p);
4268
4269 /* Because of this manipulation, and the EH edges that jump
4270 threading cannot redirect, the temporary (VAR) will appear
4271 to be used uninitialized. Don't warn. */
4272 TREE_NO_WARNING (var) = 1;
4273 }
4274 else
4275 {
4276 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4277 CLEANUP_EH_ONLY (wce) = eh_only;
4278 append_to_statement_list (wce, pre_p);
4279 }
4280
4281 gimplify_stmt (&TREE_OPERAND (wce, 0));
4282 }
4283
4284 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4285
4286 static enum gimplify_status
4287 gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
4288 {
4289 tree targ = *expr_p;
4290 tree temp = TARGET_EXPR_SLOT (targ);
4291 tree init = TARGET_EXPR_INITIAL (targ);
4292 enum gimplify_status ret;
4293
4294 if (init)
4295 {
4296 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4297 to the temps list. */
4298 gimple_add_tmp_var (temp);
4299
4300 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4301 expression is supposed to initialize the slot. */
4302 if (VOID_TYPE_P (TREE_TYPE (init)))
4303 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4304 else
4305 {
4306 init = build2 (INIT_EXPR, void_type_node, temp, init);
4307 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
4308 fb_none);
4309 }
4310 if (ret == GS_ERROR)
4311 {
4312 /* PR c++/28266 Make sure this is expanded only once. */
4313 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4314 return GS_ERROR;
4315 }
4316 append_to_statement_list (init, pre_p);
4317
4318 /* If needed, push the cleanup for the temp. */
4319 if (TARGET_EXPR_CLEANUP (targ))
4320 {
4321 gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
4322 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4323 CLEANUP_EH_ONLY (targ), pre_p);
4324 }
4325
4326 /* Only expand this once. */
4327 TREE_OPERAND (targ, 3) = init;
4328 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4329 }
4330 else
4331 /* We should have expanded this before. */
4332 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4333
4334 *expr_p = temp;
4335 return GS_OK;
4336 }
4337
4338 /* Gimplification of expression trees. */
4339
4340 /* Gimplify an expression which appears at statement context; usually, this
4341 means replacing it with a suitably gimple STATEMENT_LIST. */
4342
4343 void
4344 gimplify_stmt (tree *stmt_p)
4345 {
4346 gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
4347 }
4348
4349 /* Similarly, but force the result to be a STATEMENT_LIST. */
4350
4351 void
4352 gimplify_to_stmt_list (tree *stmt_p)
4353 {
4354 gimplify_stmt (stmt_p);
4355 if (!*stmt_p)
4356 *stmt_p = alloc_stmt_list ();
4357 else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
4358 {
4359 tree t = *stmt_p;
4360 *stmt_p = alloc_stmt_list ();
4361 append_to_statement_list (t, stmt_p);
4362 }
4363 }
4364
4365
4366 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
4367 to CTX. If entries already exist, force them to be some flavor of private.
4368 If there is no enclosing parallel, do nothing. */
4369
4370 void
4371 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
4372 {
4373 splay_tree_node n;
4374
4375 if (decl == NULL || !DECL_P (decl))
4376 return;
4377
4378 do
4379 {
4380 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4381 if (n != NULL)
4382 {
4383 if (n->value & GOVD_SHARED)
4384 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
4385 else
4386 return;
4387 }
4388 else if (ctx->is_parallel)
4389 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
4390
4391 ctx = ctx->outer_context;
4392 }
4393 while (ctx);
4394 }
4395
4396 /* Similarly for each of the type sizes of TYPE. */
4397
4398 static void
4399 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
4400 {
4401 if (type == NULL || type == error_mark_node)
4402 return;
4403 type = TYPE_MAIN_VARIANT (type);
4404
4405 if (pointer_set_insert (ctx->privatized_types, type))
4406 return;
4407
4408 switch (TREE_CODE (type))
4409 {
4410 case INTEGER_TYPE:
4411 case ENUMERAL_TYPE:
4412 case BOOLEAN_TYPE:
4413 case REAL_TYPE:
4414 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
4415 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
4416 break;
4417
4418 case ARRAY_TYPE:
4419 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4420 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
4421 break;
4422
4423 case RECORD_TYPE:
4424 case UNION_TYPE:
4425 case QUAL_UNION_TYPE:
4426 {
4427 tree field;
4428 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4429 if (TREE_CODE (field) == FIELD_DECL)
4430 {
4431 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
4432 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
4433 }
4434 }
4435 break;
4436
4437 case POINTER_TYPE:
4438 case REFERENCE_TYPE:
4439 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4440 break;
4441
4442 default:
4443 break;
4444 }
4445
4446 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
4447 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
4448 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
4449 }
4450
4451 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
4452
4453 static void
4454 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
4455 {
4456 splay_tree_node n;
4457 unsigned int nflags;
4458 tree t;
4459
4460 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4461 return;
4462
4463 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
4464 there are constructors involved somewhere. */
4465 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
4466 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
4467 flags |= GOVD_SEEN;
4468
4469 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4470 if (n != NULL)
4471 {
4472 /* We shouldn't be re-adding the decl with the same data
4473 sharing class. */
4474 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
4475 /* The only combination of data sharing classes we should see is
4476 FIRSTPRIVATE and LASTPRIVATE. */
4477 nflags = n->value | flags;
4478 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
4479 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
4480 n->value = nflags;
4481 return;
4482 }
4483
4484 /* When adding a variable-sized variable, we have to handle all sorts
4485 of additional bits of data: the pointer replacement variable, and
4486 the parameters of the type. */
4487 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
4488 {
4489 /* Add the pointer replacement variable as PRIVATE if the variable
4490 replacement is private, else FIRSTPRIVATE since we'll need the
4491 address of the original variable either for SHARED, or for the
4492 copy into or out of the context. */
4493 if (!(flags & GOVD_LOCAL))
4494 {
4495 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
4496 nflags |= flags & GOVD_SEEN;
4497 t = DECL_VALUE_EXPR (decl);
4498 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
4499 t = TREE_OPERAND (t, 0);
4500 gcc_assert (DECL_P (t));
4501 omp_add_variable (ctx, t, nflags);
4502 }
4503
4504 /* Add all of the variable and type parameters (which should have
4505 been gimplified to a formal temporary) as FIRSTPRIVATE. */
4506 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
4507 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
4508 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4509
4510 /* The variable-sized variable itself is never SHARED, only some form
4511 of PRIVATE. The sharing would take place via the pointer variable
4512 which we remapped above. */
4513 if (flags & GOVD_SHARED)
4514 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
4515 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
4516
4517 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
4518 alloca statement we generate for the variable, so make sure it
4519 is available. This isn't automatically needed for the SHARED
4520 case, since we won't be allocating local storage then.
4521 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
4522 in this case omp_notice_variable will be called later
4523 on when it is gimplified. */
4524 else if (! (flags & GOVD_LOCAL))
4525 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
4526 }
4527 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
4528 {
4529 gcc_assert ((flags & GOVD_LOCAL) == 0);
4530 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4531
4532 /* Similar to the direct variable sized case above, we'll need the
4533 size of references being privatized. */
4534 if ((flags & GOVD_SHARED) == 0)
4535 {
4536 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4537 if (TREE_CODE (t) != INTEGER_CST)
4538 omp_notice_variable (ctx, t, true);
4539 }
4540 }
4541
4542 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
4543 }
4544
4545 /* Record the fact that DECL was used within the OpenMP context CTX.
4546 IN_CODE is true when real code uses DECL, and false when we should
4547 merely emit default(none) errors. Return true if DECL is going to
4548 be remapped and thus DECL shouldn't be gimplified into its
4549 DECL_VALUE_EXPR (if any). */
4550
4551 static bool
4552 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
4553 {
4554 splay_tree_node n;
4555 unsigned flags = in_code ? GOVD_SEEN : 0;
4556 bool ret = false, shared;
4557
4558 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4559 return false;
4560
4561 /* Threadprivate variables are predetermined. */
4562 if (is_global_var (decl))
4563 {
4564 if (DECL_THREAD_LOCAL_P (decl))
4565 return false;
4566
4567 if (DECL_HAS_VALUE_EXPR_P (decl))
4568 {
4569 tree value = get_base_address (DECL_VALUE_EXPR (decl));
4570
4571 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
4572 return false;
4573 }
4574 }
4575
4576 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4577 if (n == NULL)
4578 {
4579 enum omp_clause_default_kind default_kind, kind;
4580
4581 if (!ctx->is_parallel)
4582 goto do_outer;
4583
4584 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
4585 remapped firstprivate instead of shared. To some extent this is
4586 addressed in omp_firstprivatize_type_sizes, but not effectively. */
4587 default_kind = ctx->default_kind;
4588 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
4589 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
4590 default_kind = kind;
4591
4592 switch (default_kind)
4593 {
4594 case OMP_CLAUSE_DEFAULT_NONE:
4595 error ("%qs not specified in enclosing parallel",
4596 IDENTIFIER_POINTER (DECL_NAME (decl)));
4597 error ("%Henclosing parallel", &ctx->location);
4598 /* FALLTHRU */
4599 case OMP_CLAUSE_DEFAULT_SHARED:
4600 flags |= GOVD_SHARED;
4601 break;
4602 case OMP_CLAUSE_DEFAULT_PRIVATE:
4603 flags |= GOVD_PRIVATE;
4604 break;
4605 default:
4606 gcc_unreachable ();
4607 }
4608
4609 omp_add_variable (ctx, decl, flags);
4610
4611 shared = (flags & GOVD_SHARED) != 0;
4612 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4613 goto do_outer;
4614 }
4615
4616 shared = ((flags | n->value) & GOVD_SHARED) != 0;
4617 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4618
4619 /* If nothing changed, there's nothing left to do. */
4620 if ((n->value & flags) == flags)
4621 return ret;
4622 flags |= n->value;
4623 n->value = flags;
4624
4625 do_outer:
4626 /* If the variable is private in the current context, then we don't
4627 need to propagate anything to an outer context. */
4628 if (flags & GOVD_PRIVATE)
4629 return ret;
4630 if (ctx->outer_context
4631 && omp_notice_variable (ctx->outer_context, decl, in_code))
4632 return true;
4633 return ret;
4634 }
4635
4636 /* Verify that DECL is private within CTX. If there's specific information
4637 to the contrary in the innermost scope, generate an error. */
4638
4639 static bool
4640 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
4641 {
4642 splay_tree_node n;
4643
4644 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4645 if (n != NULL)
4646 {
4647 if (n->value & GOVD_SHARED)
4648 {
4649 if (ctx == gimplify_omp_ctxp)
4650 {
4651 error ("iteration variable %qs should be private",
4652 IDENTIFIER_POINTER (DECL_NAME (decl)));
4653 n->value = GOVD_PRIVATE;
4654 return true;
4655 }
4656 else
4657 return false;
4658 }
4659 else if ((n->value & GOVD_EXPLICIT) != 0
4660 && (ctx == gimplify_omp_ctxp
4661 || (ctx->is_combined_parallel
4662 && gimplify_omp_ctxp->outer_context == ctx)))
4663 {
4664 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4665 error ("iteration variable %qs should not be firstprivate",
4666 IDENTIFIER_POINTER (DECL_NAME (decl)));
4667 else if ((n->value & GOVD_REDUCTION) != 0)
4668 error ("iteration variable %qs should not be reduction",
4669 IDENTIFIER_POINTER (DECL_NAME (decl)));
4670 }
4671 return true;
4672 }
4673
4674 if (ctx->is_parallel)
4675 return false;
4676 else if (ctx->outer_context)
4677 return omp_is_private (ctx->outer_context, decl);
4678 else
4679 return !is_global_var (decl);
4680 }
4681
4682 /* Return true if DECL is private within a parallel region
4683 that binds to the current construct's context or in parallel
4684 region's REDUCTION clause. */
4685
4686 static bool
4687 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
4688 {
4689 splay_tree_node n;
4690
4691 do
4692 {
4693 ctx = ctx->outer_context;
4694 if (ctx == NULL)
4695 return !(is_global_var (decl)
4696 /* References might be private, but might be shared too. */
4697 || lang_hooks.decls.omp_privatize_by_reference (decl));
4698
4699 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4700 if (n != NULL)
4701 return (n->value & GOVD_SHARED) == 0;
4702 }
4703 while (!ctx->is_parallel);
4704 return false;
4705 }
4706
4707 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
4708 and previous omp contexts. */
4709
4710 static void
4711 gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
4712 bool in_combined_parallel)
4713 {
4714 struct gimplify_omp_ctx *ctx, *outer_ctx;
4715 tree c;
4716
4717 ctx = new_omp_context (in_parallel, in_combined_parallel);
4718 outer_ctx = ctx->outer_context;
4719
4720 while ((c = *list_p) != NULL)
4721 {
4722 enum gimplify_status gs;
4723 bool remove = false;
4724 bool notice_outer = true;
4725 const char *check_non_private = NULL;
4726 unsigned int flags;
4727 tree decl;
4728
4729 switch (OMP_CLAUSE_CODE (c))
4730 {
4731 case OMP_CLAUSE_PRIVATE:
4732 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
4733 notice_outer = false;
4734 goto do_add;
4735 case OMP_CLAUSE_SHARED:
4736 flags = GOVD_SHARED | GOVD_EXPLICIT;
4737 goto do_add;
4738 case OMP_CLAUSE_FIRSTPRIVATE:
4739 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
4740 check_non_private = "firstprivate";
4741 goto do_add;
4742 case OMP_CLAUSE_LASTPRIVATE:
4743 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
4744 check_non_private = "lastprivate";
4745 goto do_add;
4746 case OMP_CLAUSE_REDUCTION:
4747 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
4748 check_non_private = "reduction";
4749 goto do_add;
4750
4751 do_add:
4752 decl = OMP_CLAUSE_DECL (c);
4753 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4754 {
4755 remove = true;
4756 break;
4757 }
4758 omp_add_variable (ctx, decl, flags);
4759 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4760 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4761 {
4762 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
4763 GOVD_LOCAL | GOVD_SEEN);
4764 gimplify_omp_ctxp = ctx;
4765 push_gimplify_context ();
4766 gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
4767 pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
4768 push_gimplify_context ();
4769 gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
4770 pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
4771 gimplify_omp_ctxp = outer_ctx;
4772 }
4773 if (notice_outer)
4774 goto do_notice;
4775 break;
4776
4777 case OMP_CLAUSE_COPYIN:
4778 case OMP_CLAUSE_COPYPRIVATE:
4779 decl = OMP_CLAUSE_DECL (c);
4780 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4781 {
4782 remove = true;
4783 break;
4784 }
4785 do_notice:
4786 if (outer_ctx)
4787 omp_notice_variable (outer_ctx, decl, true);
4788 if (check_non_private
4789 && !in_parallel
4790 && omp_check_private (ctx, decl))
4791 {
4792 error ("%s variable %qs is private in outer context",
4793 check_non_private, IDENTIFIER_POINTER (DECL_NAME (decl)));
4794 remove = true;
4795 }
4796 break;
4797
4798 case OMP_CLAUSE_IF:
4799 OMP_CLAUSE_OPERAND (c, 0)
4800 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
4801 /* Fall through. */
4802
4803 case OMP_CLAUSE_SCHEDULE:
4804 case OMP_CLAUSE_NUM_THREADS:
4805 gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
4806 is_gimple_val, fb_rvalue);
4807 if (gs == GS_ERROR)
4808 remove = true;
4809 break;
4810
4811 case OMP_CLAUSE_NOWAIT:
4812 case OMP_CLAUSE_ORDERED:
4813 break;
4814
4815 case OMP_CLAUSE_DEFAULT:
4816 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
4817 break;
4818
4819 default:
4820 gcc_unreachable ();
4821 }
4822
4823 if (remove)
4824 *list_p = OMP_CLAUSE_CHAIN (c);
4825 else
4826 list_p = &OMP_CLAUSE_CHAIN (c);
4827 }
4828
4829 gimplify_omp_ctxp = ctx;
4830 }
4831
4832 /* For all variables that were not actually used within the context,
4833 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
4834
4835 static int
4836 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
4837 {
4838 tree *list_p = (tree *) data;
4839 tree decl = (tree) n->key;
4840 unsigned flags = n->value;
4841 enum omp_clause_code code;
4842 tree clause;
4843 bool private_debug;
4844
4845 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
4846 return 0;
4847 if ((flags & GOVD_SEEN) == 0)
4848 return 0;
4849 if (flags & GOVD_DEBUG_PRIVATE)
4850 {
4851 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
4852 private_debug = true;
4853 }
4854 else
4855 private_debug
4856 = lang_hooks.decls.omp_private_debug_clause (decl,
4857 !!(flags & GOVD_SHARED));
4858 if (private_debug)
4859 code = OMP_CLAUSE_PRIVATE;
4860 else if (flags & GOVD_SHARED)
4861 {
4862 if (is_global_var (decl))
4863 return 0;
4864 code = OMP_CLAUSE_SHARED;
4865 }
4866 else if (flags & GOVD_PRIVATE)
4867 code = OMP_CLAUSE_PRIVATE;
4868 else if (flags & GOVD_FIRSTPRIVATE)
4869 code = OMP_CLAUSE_FIRSTPRIVATE;
4870 else
4871 gcc_unreachable ();
4872
4873 clause = build_omp_clause (code);
4874 OMP_CLAUSE_DECL (clause) = decl;
4875 OMP_CLAUSE_CHAIN (clause) = *list_p;
4876 if (private_debug)
4877 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
4878 *list_p = clause;
4879
4880 return 0;
4881 }
4882
4883 static void
4884 gimplify_adjust_omp_clauses (tree *list_p)
4885 {
4886 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
4887 tree c, decl;
4888
4889 while ((c = *list_p) != NULL)
4890 {
4891 splay_tree_node n;
4892 bool remove = false;
4893
4894 switch (OMP_CLAUSE_CODE (c))
4895 {
4896 case OMP_CLAUSE_PRIVATE:
4897 case OMP_CLAUSE_SHARED:
4898 case OMP_CLAUSE_FIRSTPRIVATE:
4899 decl = OMP_CLAUSE_DECL (c);
4900 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4901 remove = !(n->value & GOVD_SEEN);
4902 if (! remove)
4903 {
4904 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
4905 if ((n->value & GOVD_DEBUG_PRIVATE)
4906 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
4907 {
4908 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
4909 || ((n->value & GOVD_DATA_SHARE_CLASS)
4910 == GOVD_PRIVATE));
4911 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
4912 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
4913 }
4914 }
4915 break;
4916
4917 case OMP_CLAUSE_LASTPRIVATE:
4918 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
4919 accurately reflect the presence of a FIRSTPRIVATE clause. */
4920 decl = OMP_CLAUSE_DECL (c);
4921 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4922 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4923 = (n->value & GOVD_FIRSTPRIVATE) != 0;
4924 break;
4925
4926 case OMP_CLAUSE_REDUCTION:
4927 case OMP_CLAUSE_COPYIN:
4928 case OMP_CLAUSE_COPYPRIVATE:
4929 case OMP_CLAUSE_IF:
4930 case OMP_CLAUSE_NUM_THREADS:
4931 case OMP_CLAUSE_SCHEDULE:
4932 case OMP_CLAUSE_NOWAIT:
4933 case OMP_CLAUSE_ORDERED:
4934 case OMP_CLAUSE_DEFAULT:
4935 break;
4936
4937 default:
4938 gcc_unreachable ();
4939 }
4940
4941 if (remove)
4942 *list_p = OMP_CLAUSE_CHAIN (c);
4943 else
4944 list_p = &OMP_CLAUSE_CHAIN (c);
4945 }
4946
4947 /* Add in any implicit data sharing. */
4948 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
4949
4950 gimplify_omp_ctxp = ctx->outer_context;
4951 delete_omp_context (ctx);
4952 }
4953
4954 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
4955 gimplification of the body, as well as scanning the body for used
4956 variables. We need to do this scan now, because variable-sized
4957 decls will be decomposed during gimplification. */
4958
4959 static enum gimplify_status
4960 gimplify_omp_parallel (tree *expr_p, tree *pre_p)
4961 {
4962 tree expr = *expr_p;
4963
4964 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
4965 OMP_PARALLEL_COMBINED (expr));
4966
4967 push_gimplify_context ();
4968
4969 gimplify_stmt (&OMP_PARALLEL_BODY (expr));
4970
4971 if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
4972 pop_gimplify_context (OMP_PARALLEL_BODY (expr));
4973 else
4974 pop_gimplify_context (NULL_TREE);
4975
4976 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
4977
4978 return GS_ALL_DONE;
4979 }
4980
4981 /* Gimplify the gross structure of an OMP_FOR statement. */
4982
4983 static enum gimplify_status
4984 gimplify_omp_for (tree *expr_p, tree *pre_p)
4985 {
4986 tree for_stmt, decl, t;
4987 enum gimplify_status ret = GS_OK;
4988
4989 for_stmt = *expr_p;
4990
4991 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
4992
4993 t = OMP_FOR_INIT (for_stmt);
4994 gcc_assert (TREE_CODE (t) == MODIFY_EXPR
4995 || TREE_CODE (t) == GIMPLE_MODIFY_STMT);
4996 decl = GENERIC_TREE_OPERAND (t, 0);
4997 gcc_assert (DECL_P (decl));
4998 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
4999
5000 /* Make sure the iteration variable is private. */
5001 if (omp_is_private (gimplify_omp_ctxp, decl))
5002 omp_notice_variable (gimplify_omp_ctxp, decl, true);
5003 else
5004 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
5005
5006 ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
5007 &OMP_FOR_PRE_BODY (for_stmt),
5008 NULL, is_gimple_val, fb_rvalue);
5009
5010 tree_to_gimple_tuple (&OMP_FOR_INIT (for_stmt));
5011
5012 t = OMP_FOR_COND (for_stmt);
5013 gcc_assert (COMPARISON_CLASS_P (t));
5014 gcc_assert (GENERIC_TREE_OPERAND (t, 0) == decl);
5015
5016 ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
5017 &OMP_FOR_PRE_BODY (for_stmt),
5018 NULL, is_gimple_val, fb_rvalue);
5019
5020 tree_to_gimple_tuple (&OMP_FOR_INCR (for_stmt));
5021 t = OMP_FOR_INCR (for_stmt);
5022 switch (TREE_CODE (t))
5023 {
5024 case PREINCREMENT_EXPR:
5025 case POSTINCREMENT_EXPR:
5026 t = build_int_cst (TREE_TYPE (decl), 1);
5027 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
5028 t = build_gimple_modify_stmt (decl, t);
5029 OMP_FOR_INCR (for_stmt) = t;
5030 break;
5031
5032 case PREDECREMENT_EXPR:
5033 case POSTDECREMENT_EXPR:
5034 t = build_int_cst (TREE_TYPE (decl), -1);
5035 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
5036 t = build_gimple_modify_stmt (decl, t);
5037 OMP_FOR_INCR (for_stmt) = t;
5038 break;
5039
5040 case GIMPLE_MODIFY_STMT:
5041 gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == decl);
5042 t = GIMPLE_STMT_OPERAND (t, 1);
5043 switch (TREE_CODE (t))
5044 {
5045 case PLUS_EXPR:
5046 if (TREE_OPERAND (t, 1) == decl)
5047 {
5048 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
5049 TREE_OPERAND (t, 0) = decl;
5050 break;
5051 }
5052 case MINUS_EXPR:
5053 gcc_assert (TREE_OPERAND (t, 0) == decl);
5054 break;
5055 default:
5056 gcc_unreachable ();
5057 }
5058
5059 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
5060 NULL, is_gimple_val, fb_rvalue);
5061 break;
5062
5063 default:
5064 gcc_unreachable ();
5065 }
5066
5067 gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
5068 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
5069
5070 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
5071 }
5072
5073 /* Gimplify the gross structure of other OpenMP worksharing constructs.
5074 In particular, OMP_SECTIONS and OMP_SINGLE. */
5075
5076 static enum gimplify_status
5077 gimplify_omp_workshare (tree *expr_p, tree *pre_p)
5078 {
5079 tree stmt = *expr_p;
5080
5081 gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
5082 gimplify_to_stmt_list (&OMP_BODY (stmt));
5083 gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
5084
5085 return GS_ALL_DONE;
5086 }
5087
5088 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
5089 stabilized the lhs of the atomic operation as *ADDR. Return true if
5090 EXPR is this stabilized form. */
5091
5092 static bool
5093 goa_lhs_expr_p (tree expr, tree addr)
5094 {
5095 /* Also include casts to other type variants. The C front end is fond
5096 of adding these for e.g. volatile variables. This is like
5097 STRIP_TYPE_NOPS but includes the main variant lookup. */
5098 while ((TREE_CODE (expr) == NOP_EXPR
5099 || TREE_CODE (expr) == CONVERT_EXPR
5100 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5101 && TREE_OPERAND (expr, 0) != error_mark_node
5102 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5103 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
5104 expr = TREE_OPERAND (expr, 0);
5105
5106 if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr)
5107 return true;
5108 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
5109 return true;
5110 return false;
5111 }
5112
5113 /* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic
5114 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
5115 size of the data type, and thus usable to find the index of the builtin
5116 decl. Returns GS_UNHANDLED if the expression is not of the proper form. */
5117
5118 static enum gimplify_status
5119 gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
5120 {
5121 enum built_in_function base;
5122 tree decl, itype;
5123 enum insn_code *optab;
5124
5125 /* Check for one of the supported fetch-op operations. */
5126 switch (TREE_CODE (rhs))
5127 {
5128 case PLUS_EXPR:
5129 base = BUILT_IN_FETCH_AND_ADD_N;
5130 optab = sync_add_optab;
5131 break;
5132 case MINUS_EXPR:
5133 base = BUILT_IN_FETCH_AND_SUB_N;
5134 optab = sync_add_optab;
5135 break;
5136 case BIT_AND_EXPR:
5137 base = BUILT_IN_FETCH_AND_AND_N;
5138 optab = sync_and_optab;
5139 break;
5140 case BIT_IOR_EXPR:
5141 base = BUILT_IN_FETCH_AND_OR_N;
5142 optab = sync_ior_optab;
5143 break;
5144 case BIT_XOR_EXPR:
5145 base = BUILT_IN_FETCH_AND_XOR_N;
5146 optab = sync_xor_optab;
5147 break;
5148 default:
5149 return GS_UNHANDLED;
5150 }
5151
5152 /* Make sure the expression is of the proper form. */
5153 if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr))
5154 rhs = TREE_OPERAND (rhs, 1);
5155 else if (commutative_tree_code (TREE_CODE (rhs))
5156 && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr))
5157 rhs = TREE_OPERAND (rhs, 0);
5158 else
5159 return GS_UNHANDLED;
5160
5161 decl = built_in_decls[base + index + 1];
5162 itype = TREE_TYPE (TREE_TYPE (decl));
5163
5164 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
5165 return GS_UNHANDLED;
5166
5167 *expr_p = build_call_expr (decl, 2, addr, fold_convert (itype, rhs));
5168 return GS_OK;
5169 }
5170
5171 /* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
5172 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
5173 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
5174 a subexpression, 0 if it did not, or -1 if an error was encountered. */
5175
5176 static int
5177 goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
5178 {
5179 tree expr = *expr_p;
5180 int saw_lhs;
5181
5182 if (goa_lhs_expr_p (expr, lhs_addr))
5183 {
5184 *expr_p = lhs_var;
5185 return 1;
5186 }
5187 if (is_gimple_val (expr))
5188 return 0;
5189
5190 saw_lhs = 0;
5191 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
5192 {
5193 case tcc_binary:
5194 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
5195 lhs_addr, lhs_var);
5196 case tcc_unary:
5197 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
5198 lhs_addr, lhs_var);
5199 break;
5200 default:
5201 break;
5202 }
5203
5204 if (saw_lhs == 0)
5205 {
5206 enum gimplify_status gs;
5207 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
5208 if (gs != GS_ALL_DONE)
5209 saw_lhs = -1;
5210 }
5211
5212 return saw_lhs;
5213 }
5214
5215 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5216
5217 oldval = *addr;
5218 repeat:
5219 newval = rhs; // with oldval replacing *addr in rhs
5220 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5221 if (oldval != newval)
5222 goto repeat;
5223
5224 INDEX is log2 of the size of the data type, and thus usable to find the
5225 index of the builtin decl. */
5226
5227 static enum gimplify_status
5228 gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr,
5229 tree rhs, int index)
5230 {
5231 tree oldval, oldival, oldival2, newval, newival, label;
5232 tree type, itype, cmpxchg, x, iaddr;
5233
5234 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
5235 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5236 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5237
5238 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
5239 return GS_UNHANDLED;
5240
5241 oldval = create_tmp_var (type, NULL);
5242 newval = create_tmp_var (type, NULL);
5243
5244 /* Precompute as much of RHS as possible. In the same walk, replace
5245 occurrences of the lhs value with our temporary. */
5246 if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0)
5247 return GS_ERROR;
5248
5249 x = build_fold_indirect_ref (addr);
5250 x = build_gimple_modify_stmt (oldval, x);
5251 gimplify_and_add (x, pre_p);
5252
5253 /* For floating-point values, we'll need to view-convert them to integers
5254 so that we can perform the atomic compare and swap. Simplify the
5255 following code by always setting up the "i"ntegral variables. */
5256 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5257 {
5258 oldival = oldval;
5259 newival = newval;
5260 iaddr = addr;
5261 }
5262 else
5263 {
5264 oldival = create_tmp_var (itype, NULL);
5265 newival = create_tmp_var (itype, NULL);
5266
5267 x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
5268 x = build_gimple_modify_stmt (oldival, x);
5269 gimplify_and_add (x, pre_p);
5270 iaddr = fold_convert (build_pointer_type (itype), addr);
5271 }
5272
5273 oldival2 = create_tmp_var (itype, NULL);
5274
5275 label = create_artificial_label ();
5276 x = build1 (LABEL_EXPR, void_type_node, label);
5277 gimplify_and_add (x, pre_p);
5278
5279 x = build_gimple_modify_stmt (newval, rhs);
5280 gimplify_and_add (x, pre_p);
5281
5282 if (newval != newival)
5283 {
5284 x = build1 (VIEW_CONVERT_EXPR, itype, newval);
5285 x = build_gimple_modify_stmt (newival, x);
5286 gimplify_and_add (x, pre_p);
5287 }
5288
5289 x = build_gimple_modify_stmt (oldival2, fold_convert (itype, oldival));
5290 gimplify_and_add (x, pre_p);
5291
5292 x = build_call_expr (cmpxchg, 3, iaddr, fold_convert (itype, oldival),
5293 fold_convert (itype, newival));
5294 if (oldval == oldival)
5295 x = fold_convert (type, x);
5296 x = build_gimple_modify_stmt (oldival, x);
5297 gimplify_and_add (x, pre_p);
5298
5299 /* For floating point, be prepared for the loop backedge. */
5300 if (oldval != oldival)
5301 {
5302 x = build1 (VIEW_CONVERT_EXPR, type, oldival);
5303 x = build_gimple_modify_stmt (oldval, x);
5304 gimplify_and_add (x, pre_p);
5305 }
5306
5307 /* Note that we always perform the comparison as an integer, even for
5308 floating point. This allows the atomic operation to properly
5309 succeed even with NaNs and -0.0. */
5310 x = build3 (COND_EXPR, void_type_node,
5311 build2 (NE_EXPR, boolean_type_node,
5312 fold_convert (itype, oldival), oldival2),
5313 build1 (GOTO_EXPR, void_type_node, label), NULL);
5314 gimplify_and_add (x, pre_p);
5315
5316 *expr_p = NULL;
5317 return GS_ALL_DONE;
5318 }
5319
5320 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5321
5322 GOMP_atomic_start ();
5323 *addr = rhs;
5324 GOMP_atomic_end ();
5325
5326 The result is not globally atomic, but works so long as all parallel
5327 references are within #pragma omp atomic directives. According to
5328 responses received from omp@openmp.org, appears to be within spec.
5329 Which makes sense, since that's how several other compilers handle
5330 this situation as well. */
5331
5332 static enum gimplify_status
5333 gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs)
5334 {
5335 tree t;
5336
5337 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
5338 t = build_call_expr (t, 0);
5339 gimplify_and_add (t, pre_p);
5340
5341 t = build_fold_indirect_ref (addr);
5342 t = build_gimple_modify_stmt (t, rhs);
5343 gimplify_and_add (t, pre_p);
5344
5345 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
5346 t = build_call_expr (t, 0);
5347 gimplify_and_add (t, pre_p);
5348
5349 *expr_p = NULL;
5350 return GS_ALL_DONE;
5351 }
5352
5353 /* Gimplify an OMP_ATOMIC statement. */
5354
5355 static enum gimplify_status
5356 gimplify_omp_atomic (tree *expr_p, tree *pre_p)
5357 {
5358 tree addr = TREE_OPERAND (*expr_p, 0);
5359 tree rhs = TREE_OPERAND (*expr_p, 1);
5360 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5361 HOST_WIDE_INT index;
5362
5363 /* Make sure the type is one of the supported sizes. */
5364 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5365 index = exact_log2 (index);
5366 if (index >= 0 && index <= 4)
5367 {
5368 enum gimplify_status gs;
5369 unsigned int align;
5370
5371 if (DECL_P (TREE_OPERAND (addr, 0)))
5372 align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0));
5373 else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF
5374 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1))
5375 == FIELD_DECL)
5376 align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1));
5377 else
5378 align = TYPE_ALIGN_UNIT (type);
5379
5380 /* __sync builtins require strict data alignment. */
5381 if (exact_log2 (align) >= index)
5382 {
5383 /* When possible, use specialized atomic update functions. */
5384 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5385 {
5386 gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index);
5387 if (gs != GS_UNHANDLED)
5388 return gs;
5389 }
5390
5391 /* If we don't have specialized __sync builtins, try and implement
5392 as a compare and swap loop. */
5393 gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index);
5394 if (gs != GS_UNHANDLED)
5395 return gs;
5396 }
5397 }
5398
5399 /* The ultimate fallback is wrapping the operation in a mutex. */
5400 return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs);
5401 }
5402
5403 /* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
5404 gimplification failed.
5405
5406 PRE_P points to the list where side effects that must happen before
5407 EXPR should be stored.
5408
5409 POST_P points to the list where side effects that must happen after
5410 EXPR should be stored, or NULL if there is no suitable list. In
5411 that case, we copy the result to a temporary, emit the
5412 post-effects, and then return the temporary.
5413
5414 GIMPLE_TEST_F points to a function that takes a tree T and
5415 returns nonzero if T is in the GIMPLE form requested by the
5416 caller. The GIMPLE predicates are in tree-gimple.c.
5417
5418 This test is used twice. Before gimplification, the test is
5419 invoked to determine whether *EXPR_P is already gimple enough. If
5420 that fails, *EXPR_P is gimplified according to its code and
5421 GIMPLE_TEST_F is called again. If the test still fails, then a new
5422 temporary variable is created and assigned the value of the
5423 gimplified expression.
5424
5425 FALLBACK tells the function what sort of a temporary we want. If the 1
5426 bit is set, an rvalue is OK. If the 2 bit is set, an lvalue is OK.
5427 If both are set, either is OK, but an lvalue is preferable.
5428
5429 The return value is either GS_ERROR or GS_ALL_DONE, since this function
5430 iterates until solution. */
5431
5432 enum gimplify_status
5433 gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
5434 bool (* gimple_test_f) (tree), fallback_t fallback)
5435 {
5436 tree tmp;
5437 tree internal_pre = NULL_TREE;
5438 tree internal_post = NULL_TREE;
5439 tree save_expr;
5440 int is_statement = (pre_p == NULL);
5441 location_t saved_location;
5442 enum gimplify_status ret;
5443
5444 save_expr = *expr_p;
5445 if (save_expr == NULL_TREE)
5446 return GS_ALL_DONE;
5447
5448 /* We used to check the predicate here and return immediately if it
5449 succeeds. This is wrong; the design is for gimplification to be
5450 idempotent, and for the predicates to only test for valid forms, not
5451 whether they are fully simplified. */
5452
5453 /* Set up our internal queues if needed. */
5454 if (pre_p == NULL)
5455 pre_p = &internal_pre;
5456 if (post_p == NULL)
5457 post_p = &internal_post;
5458
5459 saved_location = input_location;
5460 if (save_expr != error_mark_node
5461 && EXPR_HAS_LOCATION (*expr_p))
5462 input_location = EXPR_LOCATION (*expr_p);
5463
5464 /* Loop over the specific gimplifiers until the toplevel node
5465 remains the same. */
5466 do
5467 {
5468 /* Strip away as many useless type conversions as possible
5469 at the toplevel. */
5470 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
5471
5472 /* Remember the expr. */
5473 save_expr = *expr_p;
5474
5475 /* Die, die, die, my darling. */
5476 if (save_expr == error_mark_node
5477 || (!GIMPLE_STMT_P (save_expr)
5478 && TREE_TYPE (save_expr)
5479 && TREE_TYPE (save_expr) == error_mark_node))
5480 {
5481 ret = GS_ERROR;
5482 break;
5483 }
5484
5485 /* Do any language-specific gimplification. */
5486 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
5487 if (ret == GS_OK)
5488 {
5489 if (*expr_p == NULL_TREE)
5490 break;
5491 if (*expr_p != save_expr)
5492 continue;
5493 }
5494 else if (ret != GS_UNHANDLED)
5495 break;
5496
5497 ret = GS_OK;
5498 switch (TREE_CODE (*expr_p))
5499 {
5500 /* First deal with the special cases. */
5501
5502 case POSTINCREMENT_EXPR:
5503 case POSTDECREMENT_EXPR:
5504 case PREINCREMENT_EXPR:
5505 case PREDECREMENT_EXPR:
5506 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
5507 fallback != fb_none);
5508 break;
5509
5510 case ARRAY_REF:
5511 case ARRAY_RANGE_REF:
5512 case REALPART_EXPR:
5513 case IMAGPART_EXPR:
5514 case COMPONENT_REF:
5515 case VIEW_CONVERT_EXPR:
5516 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
5517 fallback ? fallback : fb_rvalue);
5518 break;
5519
5520 case COND_EXPR:
5521 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
5522 /* C99 code may assign to an array in a structure value of a
5523 conditional expression, and this has undefined behavior
5524 only on execution, so create a temporary if an lvalue is
5525 required. */
5526 if (fallback == fb_lvalue)
5527 {
5528 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5529 lang_hooks.mark_addressable (*expr_p);
5530 }
5531 break;
5532
5533 case CALL_EXPR:
5534 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
5535 /* C99 code may assign to an array in a structure returned
5536 from a function, and this has undefined behavior only on
5537 execution, so create a temporary if an lvalue is
5538 required. */
5539 if (fallback == fb_lvalue)
5540 {
5541 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5542 lang_hooks.mark_addressable (*expr_p);
5543 }
5544 break;
5545
5546 case TREE_LIST:
5547 gcc_unreachable ();
5548
5549 case COMPOUND_EXPR:
5550 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
5551 break;
5552
5553 case MODIFY_EXPR:
5554 case GIMPLE_MODIFY_STMT:
5555 case INIT_EXPR:
5556 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
5557 fallback != fb_none);
5558
5559 if (*expr_p)
5560 {
5561 /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
5562 useful. */
5563 if (TREE_CODE (*expr_p) == INIT_EXPR)
5564 TREE_SET_CODE (*expr_p, MODIFY_EXPR);
5565
5566 /* Convert MODIFY_EXPR to GIMPLE_MODIFY_STMT. */
5567 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
5568 tree_to_gimple_tuple (expr_p);
5569 }
5570
5571 break;
5572
5573 case TRUTH_ANDIF_EXPR:
5574 case TRUTH_ORIF_EXPR:
5575 ret = gimplify_boolean_expr (expr_p);
5576 break;
5577
5578 case TRUTH_NOT_EXPR:
5579 TREE_OPERAND (*expr_p, 0)
5580 = gimple_boolify (TREE_OPERAND (*expr_p, 0));
5581 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5582 is_gimple_val, fb_rvalue);
5583 recalculate_side_effects (*expr_p);
5584 break;
5585
5586 case ADDR_EXPR:
5587 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
5588 break;
5589
5590 case VA_ARG_EXPR:
5591 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
5592 break;
5593
5594 case CONVERT_EXPR:
5595 case NOP_EXPR:
5596 if (IS_EMPTY_STMT (*expr_p))
5597 {
5598 ret = GS_ALL_DONE;
5599 break;
5600 }
5601
5602 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
5603 || fallback == fb_none)
5604 {
5605 /* Just strip a conversion to void (or in void context) and
5606 try again. */
5607 *expr_p = TREE_OPERAND (*expr_p, 0);
5608 break;
5609 }
5610
5611 ret = gimplify_conversion (expr_p);
5612 if (ret == GS_ERROR)
5613 break;
5614 if (*expr_p != save_expr)
5615 break;
5616 /* FALLTHRU */
5617
5618 case FIX_TRUNC_EXPR:
5619 /* unary_expr: ... | '(' cast ')' val | ... */
5620 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5621 is_gimple_val, fb_rvalue);
5622 recalculate_side_effects (*expr_p);
5623 break;
5624
5625 case INDIRECT_REF:
5626 *expr_p = fold_indirect_ref (*expr_p);
5627 if (*expr_p != save_expr)
5628 break;
5629 /* else fall through. */
5630 case ALIGN_INDIRECT_REF:
5631 case MISALIGNED_INDIRECT_REF:
5632 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5633 is_gimple_reg, fb_rvalue);
5634 recalculate_side_effects (*expr_p);
5635 break;
5636
5637 /* Constants need not be gimplified. */
5638 case INTEGER_CST:
5639 case REAL_CST:
5640 case STRING_CST:
5641 case COMPLEX_CST:
5642 case VECTOR_CST:
5643 ret = GS_ALL_DONE;
5644 break;
5645
5646 case CONST_DECL:
5647 /* If we require an lvalue, such as for ADDR_EXPR, retain the
5648 CONST_DECL node. Otherwise the decl is replaceable by its
5649 value. */
5650 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
5651 if (fallback & fb_lvalue)
5652 ret = GS_ALL_DONE;
5653 else
5654 *expr_p = DECL_INITIAL (*expr_p);
5655 break;
5656
5657 case DECL_EXPR:
5658 ret = gimplify_decl_expr (expr_p);
5659 break;
5660
5661 case EXC_PTR_EXPR:
5662 /* FIXME make this a decl. */
5663 ret = GS_ALL_DONE;
5664 break;
5665
5666 case BIND_EXPR:
5667 ret = gimplify_bind_expr (expr_p, pre_p);
5668 break;
5669
5670 case LOOP_EXPR:
5671 ret = gimplify_loop_expr (expr_p, pre_p);
5672 break;
5673
5674 case SWITCH_EXPR:
5675 ret = gimplify_switch_expr (expr_p, pre_p);
5676 break;
5677
5678 case EXIT_EXPR:
5679 ret = gimplify_exit_expr (expr_p);
5680 break;
5681
5682 case GOTO_EXPR:
5683 /* If the target is not LABEL, then it is a computed jump
5684 and the target needs to be gimplified. */
5685 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
5686 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
5687 NULL, is_gimple_val, fb_rvalue);
5688 break;
5689
5690 case LABEL_EXPR:
5691 ret = GS_ALL_DONE;
5692 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
5693 == current_function_decl);
5694 break;
5695
5696 case CASE_LABEL_EXPR:
5697 ret = gimplify_case_label_expr (expr_p);
5698 break;
5699
5700 case RETURN_EXPR:
5701 ret = gimplify_return_expr (*expr_p, pre_p);
5702 break;
5703
5704 case CONSTRUCTOR:
5705 /* Don't reduce this in place; let gimplify_init_constructor work its
5706 magic. Buf if we're just elaborating this for side effects, just
5707 gimplify any element that has side-effects. */
5708 if (fallback == fb_none)
5709 {
5710 unsigned HOST_WIDE_INT ix;
5711 constructor_elt *ce;
5712 tree temp = NULL_TREE;
5713 for (ix = 0;
5714 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
5715 ix, ce);
5716 ix++)
5717 if (TREE_SIDE_EFFECTS (ce->value))
5718 append_to_statement_list (ce->value, &temp);
5719
5720 *expr_p = temp;
5721 ret = GS_OK;
5722 }
5723 /* C99 code may assign to an array in a constructed
5724 structure or union, and this has undefined behavior only
5725 on execution, so create a temporary if an lvalue is
5726 required. */
5727 else if (fallback == fb_lvalue)
5728 {
5729 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5730 lang_hooks.mark_addressable (*expr_p);
5731 }
5732 else
5733 ret = GS_ALL_DONE;
5734 break;
5735
5736 /* The following are special cases that are not handled by the
5737 original GIMPLE grammar. */
5738
5739 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
5740 eliminated. */
5741 case SAVE_EXPR:
5742 ret = gimplify_save_expr (expr_p, pre_p, post_p);
5743 break;
5744
5745 case BIT_FIELD_REF:
5746 {
5747 enum gimplify_status r0, r1, r2;
5748
5749 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5750 is_gimple_lvalue, fb_either);
5751 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5752 is_gimple_val, fb_rvalue);
5753 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
5754 is_gimple_val, fb_rvalue);
5755 recalculate_side_effects (*expr_p);
5756
5757 ret = MIN (r0, MIN (r1, r2));
5758 }
5759 break;
5760
5761 case NON_LVALUE_EXPR:
5762 /* This should have been stripped above. */
5763 gcc_unreachable ();
5764
5765 case ASM_EXPR:
5766 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
5767 break;
5768
5769 case TRY_FINALLY_EXPR:
5770 case TRY_CATCH_EXPR:
5771 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
5772 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
5773 ret = GS_ALL_DONE;
5774 break;
5775
5776 case CLEANUP_POINT_EXPR:
5777 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
5778 break;
5779
5780 case TARGET_EXPR:
5781 ret = gimplify_target_expr (expr_p, pre_p, post_p);
5782 break;
5783
5784 case CATCH_EXPR:
5785 gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
5786 ret = GS_ALL_DONE;
5787 break;
5788
5789 case EH_FILTER_EXPR:
5790 gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
5791 ret = GS_ALL_DONE;
5792 break;
5793
5794 case CHANGE_DYNAMIC_TYPE_EXPR:
5795 ret = gimplify_expr (&CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p),
5796 pre_p, post_p, is_gimple_reg, fb_lvalue);
5797 break;
5798
5799 case OBJ_TYPE_REF:
5800 {
5801 enum gimplify_status r0, r1;
5802 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
5803 is_gimple_val, fb_rvalue);
5804 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
5805 is_gimple_val, fb_rvalue);
5806 ret = MIN (r0, r1);
5807 }
5808 break;
5809
5810 case LABEL_DECL:
5811 /* We get here when taking the address of a label. We mark
5812 the label as "forced"; meaning it can never be removed and
5813 it is a potential target for any computed goto. */
5814 FORCED_LABEL (*expr_p) = 1;
5815 ret = GS_ALL_DONE;
5816 break;
5817
5818 case STATEMENT_LIST:
5819 ret = gimplify_statement_list (expr_p, pre_p);
5820 break;
5821
5822 case WITH_SIZE_EXPR:
5823 {
5824 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5825 post_p == &internal_post ? NULL : post_p,
5826 gimple_test_f, fallback);
5827 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5828 is_gimple_val, fb_rvalue);
5829 }
5830 break;
5831
5832 case VAR_DECL:
5833 case PARM_DECL:
5834 ret = gimplify_var_or_parm_decl (expr_p);
5835 break;
5836
5837 case RESULT_DECL:
5838 /* When within an OpenMP context, notice uses of variables. */
5839 if (gimplify_omp_ctxp)
5840 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
5841 ret = GS_ALL_DONE;
5842 break;
5843
5844 case SSA_NAME:
5845 /* Allow callbacks into the gimplifier during optimization. */
5846 ret = GS_ALL_DONE;
5847 break;
5848
5849 case OMP_PARALLEL:
5850 ret = gimplify_omp_parallel (expr_p, pre_p);
5851 break;
5852
5853 case OMP_FOR:
5854 ret = gimplify_omp_for (expr_p, pre_p);
5855 break;
5856
5857 case OMP_SECTIONS:
5858 case OMP_SINGLE:
5859 ret = gimplify_omp_workshare (expr_p, pre_p);
5860 break;
5861
5862 case OMP_SECTION:
5863 case OMP_MASTER:
5864 case OMP_ORDERED:
5865 case OMP_CRITICAL:
5866 gimplify_to_stmt_list (&OMP_BODY (*expr_p));
5867 break;
5868
5869 case OMP_ATOMIC:
5870 ret = gimplify_omp_atomic (expr_p, pre_p);
5871 break;
5872
5873 case OMP_RETURN:
5874 case OMP_CONTINUE:
5875 ret = GS_ALL_DONE;
5876 break;
5877
5878 case PLUS_EXPR:
5879 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
5880 The second is gimple immediate saving a need for extra statement.
5881 */
5882 if (POINTER_TYPE_P (TREE_TYPE (*expr_p))
5883 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
5884 && (tmp = maybe_fold_offset_to_reference
5885 (TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
5886 TREE_TYPE (TREE_TYPE (*expr_p)))))
5887 {
5888 *expr_p = build_fold_addr_expr_with_type (tmp,
5889 TREE_TYPE (*expr_p));
5890 break;
5891 }
5892 /* Convert (void *)&a + 4 into (void *)&a[1]. */
5893 if (POINTER_TYPE_P (TREE_TYPE (*expr_p))
5894 && TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
5895 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
5896 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
5897 0),0)))
5898 && (tmp = maybe_fold_offset_to_reference
5899 (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
5900 TREE_OPERAND (*expr_p, 1),
5901 TREE_TYPE (TREE_TYPE
5902 (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
5903 0))))))
5904 {
5905 tmp = build_fold_addr_expr (tmp);
5906 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
5907 break;
5908 }
5909 /* FALLTHRU */
5910 default:
5911 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
5912 {
5913 case tcc_comparison:
5914 /* Handle comparison of objects of non scalar mode aggregates
5915 with a call to memcmp. It would be nice to only have to do
5916 this for variable-sized objects, but then we'd have to allow
5917 the same nest of reference nodes we allow for MODIFY_EXPR and
5918 that's too complex.
5919
5920 Compare scalar mode aggregates as scalar mode values. Using
5921 memcmp for them would be very inefficient at best, and is
5922 plain wrong if bitfields are involved. */
5923
5924 {
5925 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
5926
5927 if (!AGGREGATE_TYPE_P (type))
5928 goto expr_2;
5929 else if (TYPE_MODE (type) != BLKmode)
5930 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
5931 else
5932 ret = gimplify_variable_sized_compare (expr_p);
5933
5934 break;
5935 }
5936
5937 /* If *EXPR_P does not need to be special-cased, handle it
5938 according to its class. */
5939 case tcc_unary:
5940 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5941 post_p, is_gimple_val, fb_rvalue);
5942 break;
5943
5944 case tcc_binary:
5945 expr_2:
5946 {
5947 enum gimplify_status r0, r1;
5948
5949 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5950 post_p, is_gimple_val, fb_rvalue);
5951 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
5952 post_p, is_gimple_val, fb_rvalue);
5953
5954 ret = MIN (r0, r1);
5955 break;
5956 }
5957
5958 case tcc_declaration:
5959 case tcc_constant:
5960 ret = GS_ALL_DONE;
5961 goto dont_recalculate;
5962
5963 default:
5964 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
5965 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
5966 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
5967 goto expr_2;
5968 }
5969
5970 recalculate_side_effects (*expr_p);
5971 dont_recalculate:
5972 break;
5973 }
5974
5975 /* If we replaced *expr_p, gimplify again. */
5976 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
5977 ret = GS_ALL_DONE;
5978 }
5979 while (ret == GS_OK);
5980
5981 /* If we encountered an error_mark somewhere nested inside, either
5982 stub out the statement or propagate the error back out. */
5983 if (ret == GS_ERROR)
5984 {
5985 if (is_statement)
5986 *expr_p = NULL;
5987 goto out;
5988 }
5989
5990 /* This was only valid as a return value from the langhook, which
5991 we handled. Make sure it doesn't escape from any other context. */
5992 gcc_assert (ret != GS_UNHANDLED);
5993
5994 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
5995 {
5996 /* We aren't looking for a value, and we don't have a valid
5997 statement. If it doesn't have side-effects, throw it away. */
5998 if (!TREE_SIDE_EFFECTS (*expr_p))
5999 *expr_p = NULL;
6000 else if (!TREE_THIS_VOLATILE (*expr_p))
6001 {
6002 /* This is probably a _REF that contains something nested that
6003 has side effects. Recurse through the operands to find it. */
6004 enum tree_code code = TREE_CODE (*expr_p);
6005
6006 switch (code)
6007 {
6008 case COMPONENT_REF:
6009 case REALPART_EXPR:
6010 case IMAGPART_EXPR:
6011 case VIEW_CONVERT_EXPR:
6012 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6013 gimple_test_f, fallback);
6014 break;
6015
6016 case ARRAY_REF:
6017 case ARRAY_RANGE_REF:
6018 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6019 gimple_test_f, fallback);
6020 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6021 gimple_test_f, fallback);
6022 break;
6023
6024 default:
6025 /* Anything else with side-effects must be converted to
6026 a valid statement before we get here. */
6027 gcc_unreachable ();
6028 }
6029
6030 *expr_p = NULL;
6031 }
6032 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
6033 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6034 {
6035 /* Historically, the compiler has treated a bare reference
6036 to a non-BLKmode volatile lvalue as forcing a load. */
6037 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
6038 /* Normally, we do not want to create a temporary for a
6039 TREE_ADDRESSABLE type because such a type should not be
6040 copied by bitwise-assignment. However, we make an
6041 exception here, as all we are doing here is ensuring that
6042 we read the bytes that make up the type. We use
6043 create_tmp_var_raw because create_tmp_var will abort when
6044 given a TREE_ADDRESSABLE type. */
6045 tree tmp = create_tmp_var_raw (type, "vol");
6046 gimple_add_tmp_var (tmp);
6047 *expr_p = build_gimple_modify_stmt (tmp, *expr_p);
6048 }
6049 else
6050 /* We can't do anything useful with a volatile reference to
6051 an incomplete type, so just throw it away. Likewise for
6052 a BLKmode type, since any implicit inner load should
6053 already have been turned into an explicit one by the
6054 gimplification process. */
6055 *expr_p = NULL;
6056 }
6057
6058 /* If we are gimplifying at the statement level, we're done. Tack
6059 everything together and replace the original statement with the
6060 gimplified form. */
6061 if (fallback == fb_none || is_statement)
6062 {
6063 if (internal_pre || internal_post)
6064 {
6065 append_to_statement_list (*expr_p, &internal_pre);
6066 append_to_statement_list (internal_post, &internal_pre);
6067 annotate_all_with_locus (&internal_pre, input_location);
6068 *expr_p = internal_pre;
6069 }
6070 else if (!*expr_p)
6071 ;
6072 else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
6073 annotate_all_with_locus (expr_p, input_location);
6074 else
6075 annotate_one_with_locus (*expr_p, input_location);
6076 goto out;
6077 }
6078
6079 /* Otherwise we're gimplifying a subexpression, so the resulting value is
6080 interesting. */
6081
6082 /* If it's sufficiently simple already, we're done. Unless we are
6083 handling some post-effects internally; if that's the case, we need to
6084 copy into a temp before adding the post-effects to the tree. */
6085 if (!internal_post && (*gimple_test_f) (*expr_p))
6086 goto out;
6087
6088 /* Otherwise, we need to create a new temporary for the gimplified
6089 expression. */
6090
6091 /* We can't return an lvalue if we have an internal postqueue. The
6092 object the lvalue refers to would (probably) be modified by the
6093 postqueue; we need to copy the value out first, which means an
6094 rvalue. */
6095 if ((fallback & fb_lvalue) && !internal_post
6096 && is_gimple_addressable (*expr_p))
6097 {
6098 /* An lvalue will do. Take the address of the expression, store it
6099 in a temporary, and replace the expression with an INDIRECT_REF of
6100 that temporary. */
6101 tmp = build_fold_addr_expr (*expr_p);
6102 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
6103 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
6104 }
6105 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
6106 {
6107 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
6108
6109 /* An rvalue will do. Assign the gimplified expression into a new
6110 temporary TMP and replace the original expression with TMP. */
6111
6112 if (internal_post || (fallback & fb_lvalue))
6113 /* The postqueue might change the value of the expression between
6114 the initialization and use of the temporary, so we can't use a
6115 formal temp. FIXME do we care? */
6116 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6117 else
6118 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6119
6120 if (TREE_CODE (*expr_p) != SSA_NAME)
6121 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
6122 }
6123 else
6124 {
6125 #ifdef ENABLE_CHECKING
6126 if (!(fallback & fb_mayfail))
6127 {
6128 fprintf (stderr, "gimplification failed:\n");
6129 print_generic_expr (stderr, *expr_p, 0);
6130 debug_tree (*expr_p);
6131 internal_error ("gimplification failed");
6132 }
6133 #endif
6134 gcc_assert (fallback & fb_mayfail);
6135 /* If this is an asm statement, and the user asked for the
6136 impossible, don't die. Fail and let gimplify_asm_expr
6137 issue an error. */
6138 ret = GS_ERROR;
6139 goto out;
6140 }
6141
6142 /* Make sure the temporary matches our predicate. */
6143 gcc_assert ((*gimple_test_f) (*expr_p));
6144
6145 if (internal_post)
6146 {
6147 annotate_all_with_locus (&internal_post, input_location);
6148 append_to_statement_list (internal_post, pre_p);
6149 }
6150
6151 out:
6152 input_location = saved_location;
6153 return ret;
6154 }
6155
6156 /* Look through TYPE for variable-sized objects and gimplify each such
6157 size that we find. Add to LIST_P any statements generated. */
6158
6159 void
6160 gimplify_type_sizes (tree type, tree *list_p)
6161 {
6162 tree field, t;
6163
6164 if (type == NULL || type == error_mark_node)
6165 return;
6166
6167 /* We first do the main variant, then copy into any other variants. */
6168 type = TYPE_MAIN_VARIANT (type);
6169
6170 /* Avoid infinite recursion. */
6171 if (TYPE_SIZES_GIMPLIFIED (type))
6172 return;
6173
6174 TYPE_SIZES_GIMPLIFIED (type) = 1;
6175
6176 switch (TREE_CODE (type))
6177 {
6178 case INTEGER_TYPE:
6179 case ENUMERAL_TYPE:
6180 case BOOLEAN_TYPE:
6181 case REAL_TYPE:
6182 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
6183 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
6184
6185 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6186 {
6187 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
6188 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
6189 }
6190 break;
6191
6192 case ARRAY_TYPE:
6193 /* These types may not have declarations, so handle them here. */
6194 gimplify_type_sizes (TREE_TYPE (type), list_p);
6195 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
6196 break;
6197
6198 case RECORD_TYPE:
6199 case UNION_TYPE:
6200 case QUAL_UNION_TYPE:
6201 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
6202 if (TREE_CODE (field) == FIELD_DECL)
6203 {
6204 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
6205 gimplify_type_sizes (TREE_TYPE (field), list_p);
6206 }
6207 break;
6208
6209 case POINTER_TYPE:
6210 case REFERENCE_TYPE:
6211 /* We used to recurse on the pointed-to type here, which turned out to
6212 be incorrect because its definition might refer to variables not
6213 yet initialized at this point if a forward declaration is involved.
6214
6215 It was actually useful for anonymous pointed-to types to ensure
6216 that the sizes evaluation dominates every possible later use of the
6217 values. Restricting to such types here would be safe since there
6218 is no possible forward declaration around, but would introduce an
6219 undesirable middle-end semantic to anonymity. We then defer to
6220 front-ends the responsibility of ensuring that the sizes are
6221 evaluated both early and late enough, e.g. by attaching artificial
6222 type declarations to the tree. */
6223 break;
6224
6225 default:
6226 break;
6227 }
6228
6229 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
6230 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
6231
6232 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6233 {
6234 TYPE_SIZE (t) = TYPE_SIZE (type);
6235 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
6236 TYPE_SIZES_GIMPLIFIED (t) = 1;
6237 }
6238 }
6239
6240 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
6241 a size or position, has had all of its SAVE_EXPRs evaluated.
6242 We add any required statements to STMT_P. */
6243
6244 void
6245 gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
6246 {
6247 tree type, expr = *expr_p;
6248
6249 /* We don't do anything if the value isn't there, is constant, or contains
6250 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
6251 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
6252 will want to replace it with a new variable, but that will cause problems
6253 if this type is from outside the function. It's OK to have that here. */
6254 if (expr == NULL_TREE || TREE_CONSTANT (expr)
6255 || TREE_CODE (expr) == VAR_DECL
6256 || CONTAINS_PLACEHOLDER_P (expr))
6257 return;
6258
6259 type = TREE_TYPE (expr);
6260 *expr_p = unshare_expr (expr);
6261
6262 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
6263 expr = *expr_p;
6264
6265 /* Verify that we've an exact type match with the original expression.
6266 In particular, we do not wish to drop a "sizetype" in favour of a
6267 type of similar dimensions. We don't want to pollute the generic
6268 type-stripping code with this knowledge because it doesn't matter
6269 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
6270 and friends retain their "sizetype-ness". */
6271 if (TREE_TYPE (expr) != type
6272 && TREE_CODE (type) == INTEGER_TYPE
6273 && TYPE_IS_SIZETYPE (type))
6274 {
6275 tree tmp;
6276
6277 *expr_p = create_tmp_var (type, NULL);
6278 tmp = build1 (NOP_EXPR, type, expr);
6279 tmp = build_gimple_modify_stmt (*expr_p, tmp);
6280 if (EXPR_HAS_LOCATION (expr))
6281 SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
6282 else
6283 SET_EXPR_LOCATION (tmp, input_location);
6284
6285 gimplify_and_add (tmp, stmt_p);
6286 }
6287 }
6288 \f
6289 #ifdef ENABLE_CHECKING
6290 /* Compare types A and B for a "close enough" match. */
6291
6292 static bool
6293 cpt_same_type (tree a, tree b)
6294 {
6295 if (lang_hooks.types_compatible_p (a, b))
6296 return true;
6297
6298 /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
6299 link them together. This routine is intended to catch type errors
6300 that will affect the optimizers, and the optimizers don't add new
6301 dereferences of function pointers, so ignore it. */
6302 if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
6303 && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
6304 return true;
6305
6306 /* ??? The C FE pushes type qualifiers after the fact into the type of
6307 the element from the type of the array. See build_unary_op's handling
6308 of ADDR_EXPR. This seems wrong -- if we were going to do this, we
6309 should have done it when creating the variable in the first place.
6310 Alternately, why aren't the two array types made variants? */
6311 if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
6312 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6313
6314 /* And because of those, we have to recurse down through pointers. */
6315 if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
6316 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6317
6318 return false;
6319 }
6320
6321 /* Check for some cases of the front end missing cast expressions.
6322 The type of a dereference should correspond to the pointer type;
6323 similarly the type of an address should match its object. */
6324
6325 static tree
6326 check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
6327 void *data ATTRIBUTE_UNUSED)
6328 {
6329 tree t = *tp;
6330 tree ptype, otype, dtype;
6331
6332 switch (TREE_CODE (t))
6333 {
6334 case INDIRECT_REF:
6335 case ARRAY_REF:
6336 otype = TREE_TYPE (t);
6337 ptype = TREE_TYPE (TREE_OPERAND (t, 0));
6338 dtype = TREE_TYPE (ptype);
6339 gcc_assert (cpt_same_type (otype, dtype));
6340 break;
6341
6342 case ADDR_EXPR:
6343 ptype = TREE_TYPE (t);
6344 otype = TREE_TYPE (TREE_OPERAND (t, 0));
6345 dtype = TREE_TYPE (ptype);
6346 if (!cpt_same_type (otype, dtype))
6347 {
6348 /* &array is allowed to produce a pointer to the element, rather than
6349 a pointer to the array type. We must allow this in order to
6350 properly represent assigning the address of an array in C into
6351 pointer to the element type. */
6352 gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
6353 && POINTER_TYPE_P (ptype)
6354 && cpt_same_type (TREE_TYPE (otype), dtype));
6355 break;
6356 }
6357 break;
6358
6359 default:
6360 return NULL_TREE;
6361 }
6362
6363
6364 return NULL_TREE;
6365 }
6366 #endif
6367
6368 /* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
6369 function decl containing BODY. */
6370
6371 void
6372 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6373 {
6374 location_t saved_location = input_location;
6375 tree body, parm_stmts;
6376
6377 timevar_push (TV_TREE_GIMPLIFY);
6378
6379 gcc_assert (gimplify_ctxp == NULL);
6380 push_gimplify_context ();
6381
6382 /* Unshare most shared trees in the body and in that of any nested functions.
6383 It would seem we don't have to do this for nested functions because
6384 they are supposed to be output and then the outer function gimplified
6385 first, but the g++ front end doesn't always do it that way. */
6386 unshare_body (body_p, fndecl);
6387 unvisit_body (body_p, fndecl);
6388
6389 /* Make sure input_location isn't set to something wierd. */
6390 input_location = DECL_SOURCE_LOCATION (fndecl);
6391
6392 /* Resolve callee-copies. This has to be done before processing
6393 the body so that DECL_VALUE_EXPR gets processed correctly. */
6394 parm_stmts = do_parms ? gimplify_parameters () : NULL;
6395
6396 /* Gimplify the function's body. */
6397 gimplify_stmt (body_p);
6398 body = *body_p;
6399
6400 if (!body)
6401 body = alloc_stmt_list ();
6402 else if (TREE_CODE (body) == STATEMENT_LIST)
6403 {
6404 tree t = expr_only (*body_p);
6405 if (t)
6406 body = t;
6407 }
6408
6409 /* If there isn't an outer BIND_EXPR, add one. */
6410 if (TREE_CODE (body) != BIND_EXPR)
6411 {
6412 tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
6413 NULL_TREE, NULL_TREE);
6414 TREE_SIDE_EFFECTS (b) = 1;
6415 append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
6416 body = b;
6417 }
6418
6419 /* If we had callee-copies statements, insert them at the beginning
6420 of the function. */
6421 if (parm_stmts)
6422 {
6423 append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
6424 BIND_EXPR_BODY (body) = parm_stmts;
6425 }
6426
6427 /* Unshare again, in case gimplification was sloppy. */
6428 unshare_all_trees (body);
6429
6430 *body_p = body;
6431
6432 pop_gimplify_context (body);
6433 gcc_assert (gimplify_ctxp == NULL);
6434
6435 #ifdef ENABLE_CHECKING
6436 walk_tree (body_p, check_pointer_types_r, NULL, NULL);
6437 #endif
6438
6439 timevar_pop (TV_TREE_GIMPLIFY);
6440 input_location = saved_location;
6441 }
6442
6443 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
6444 node for the function we want to gimplify. */
6445
6446 void
6447 gimplify_function_tree (tree fndecl)
6448 {
6449 tree oldfn, parm, ret;
6450
6451 oldfn = current_function_decl;
6452 current_function_decl = fndecl;
6453 cfun = DECL_STRUCT_FUNCTION (fndecl);
6454 if (cfun == NULL)
6455 allocate_struct_function (fndecl);
6456
6457 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
6458 {
6459 /* Preliminarily mark non-addressed complex variables as eligible
6460 for promotion to gimple registers. We'll transform their uses
6461 as we find them. */
6462 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
6463 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
6464 && !TREE_THIS_VOLATILE (parm)
6465 && !needs_to_live_in_memory (parm))
6466 DECL_GIMPLE_REG_P (parm) = 1;
6467 }
6468
6469 ret = DECL_RESULT (fndecl);
6470 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
6471 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
6472 && !needs_to_live_in_memory (ret))
6473 DECL_GIMPLE_REG_P (ret) = 1;
6474
6475 gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
6476
6477 /* If we're instrumenting function entry/exit, then prepend the call to
6478 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
6479 catch the exit hook. */
6480 /* ??? Add some way to ignore exceptions for this TFE. */
6481 if (flag_instrument_function_entry_exit
6482 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl))
6483 {
6484 tree tf, x, bind;
6485
6486 tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
6487 TREE_SIDE_EFFECTS (tf) = 1;
6488 x = DECL_SAVED_TREE (fndecl);
6489 append_to_statement_list (x, &TREE_OPERAND (tf, 0));
6490 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
6491 x = build_call_expr (x, 0);
6492 append_to_statement_list (x, &TREE_OPERAND (tf, 1));
6493
6494 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6495 TREE_SIDE_EFFECTS (bind) = 1;
6496 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
6497 x = build_call_expr (x, 0);
6498 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
6499 append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
6500
6501 DECL_SAVED_TREE (fndecl) = bind;
6502 }
6503
6504 cfun->gimplified = true;
6505 current_function_decl = oldfn;
6506 cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
6507 }
6508 \f
6509 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
6510 force the result to be either ssa_name or an invariant, otherwise
6511 just force it to be a rhs expression. If VAR is not NULL, make the
6512 base variable of the final destination be VAR if suitable. */
6513
6514 tree
6515 force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
6516 {
6517 tree t;
6518 enum gimplify_status ret;
6519 gimple_predicate gimple_test_f;
6520
6521 *stmts = NULL_TREE;
6522
6523 if (is_gimple_val (expr))
6524 return expr;
6525
6526 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
6527
6528 push_gimplify_context ();
6529 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
6530
6531 if (var)
6532 expr = build_gimple_modify_stmt (var, expr);
6533
6534 ret = gimplify_expr (&expr, stmts, NULL,
6535 gimple_test_f, fb_rvalue);
6536 gcc_assert (ret != GS_ERROR);
6537
6538 if (gimple_referenced_vars (cfun))
6539 {
6540 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
6541 add_referenced_var (t);
6542 }
6543
6544 pop_gimplify_context (NULL);
6545
6546 return expr;
6547 }
6548
6549 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
6550 some statements are produced, emits them before BSI. */
6551
6552 tree
6553 force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
6554 bool simple_p, tree var)
6555 {
6556 tree stmts;
6557
6558 expr = force_gimple_operand (expr, &stmts, simple_p, var);
6559 if (stmts)
6560 bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
6561
6562 return expr;
6563 }
6564
6565 #include "gt-gimplify.h"