]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
gimple.h: Remove all includes.
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
d1e082c2 3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
6de9cd9a
DN
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
6de9cd9a 26#include "tree.h"
d8a2d370 27#include "expr.h"
2fb9a547
AM
28#include "pointer-set.h"
29#include "hash-table.h"
30#include "basic-block.h"
31#include "tree-ssa-alias.h"
32#include "internal-fn.h"
33#include "gimple-fold.h"
34#include "tree-eh.h"
35#include "gimple-expr.h"
36#include "is-a.h"
18f429e2 37#include "gimple.h"
45b0be94 38#include "gimplify.h"
5be5c238 39#include "gimple-iterator.h"
d8a2d370
DN
40#include "stringpool.h"
41#include "calls.h"
42#include "varasm.h"
43#include "stor-layout.h"
44#include "stmt.h"
45#include "print-tree.h"
726a989a 46#include "tree-iterator.h"
6de9cd9a 47#include "tree-inline.h"
cf835838 48#include "tree-pretty-print.h"
6de9cd9a 49#include "langhooks.h"
442b4905
AM
50#include "bitmap.h"
51#include "gimple-ssa.h"
44de5aeb 52#include "cgraph.h"
442b4905
AM
53#include "tree-cfg.h"
54#include "tree-ssanames.h"
55#include "tree-ssa.h"
718f9c0f 56#include "diagnostic-core.h"
cd3ce9b4 57#include "target.h"
6be42dd4 58#include "splay-tree.h"
0645c1a2 59#include "omp-low.h"
4484a35a 60#include "gimple-low.h"
939b37da 61#include "cilk.h"
6de9cd9a 62
7ee2468b
SB
63#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
953ff289
DN
65
66enum gimplify_omp_var_data
67{
68 GOVD_SEEN = 1,
69 GOVD_EXPLICIT = 2,
70 GOVD_SHARED = 4,
71 GOVD_PRIVATE = 8,
72 GOVD_FIRSTPRIVATE = 16,
73 GOVD_LASTPRIVATE = 32,
74 GOVD_REDUCTION = 64,
75 GOVD_LOCAL = 128,
acf0174b
JJ
76 GOVD_MAP = 256,
77 GOVD_DEBUG_PRIVATE = 512,
78 GOVD_PRIVATE_OUTER_REF = 1024,
74bf76ed 79 GOVD_LINEAR = 2048,
acf0174b
JJ
80 GOVD_ALIGNED = 4096,
81 GOVD_MAP_TO_ONLY = 8192,
953ff289 82 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
74bf76ed
JJ
83 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
84 | GOVD_LOCAL)
953ff289
DN
85};
86
726a989a 87
a68ab351
JJ
88enum omp_region_type
89{
90 ORT_WORKSHARE = 0,
74bf76ed 91 ORT_SIMD = 1,
a68ab351 92 ORT_PARALLEL = 2,
f22f4340
JJ
93 ORT_COMBINED_PARALLEL = 3,
94 ORT_TASK = 4,
acf0174b
JJ
95 ORT_UNTIED_TASK = 5,
96 ORT_TEAMS = 8,
97 ORT_TARGET_DATA = 16,
98 ORT_TARGET = 32
a68ab351
JJ
99};
100
45852dcc
AM
101/* Gimplify hashtable helper. */
102
103struct gimplify_hasher : typed_free_remove <elt_t>
104{
105 typedef elt_t value_type;
106 typedef elt_t compare_type;
107 static inline hashval_t hash (const value_type *);
108 static inline bool equal (const value_type *, const compare_type *);
109};
110
111struct gimplify_ctx
112{
113 struct gimplify_ctx *prev_context;
114
115 vec<gimple> bind_expr_stack;
116 tree temps;
117 gimple_seq conditional_cleanups;
118 tree exit_label;
119 tree return_temp;
120
121 vec<tree> case_labels;
122 /* The formal temporary table. Should this be persistent? */
123 hash_table <gimplify_hasher> temp_htab;
124
125 int conditions;
126 bool save_stack;
127 bool into_ssa;
128 bool allow_rhs_cond_expr;
129 bool in_cleanup_point_expr;
130};
131
953ff289 132struct gimplify_omp_ctx
6de9cd9a 133{
953ff289
DN
134 struct gimplify_omp_ctx *outer_context;
135 splay_tree variables;
136 struct pointer_set_t *privatized_types;
137 location_t location;
138 enum omp_clause_default_kind default_kind;
a68ab351 139 enum omp_region_type region_type;
acf0174b 140 bool combined_loop;
953ff289
DN
141};
142
45852dcc 143static struct gimplify_ctx *gimplify_ctxp;
953ff289
DN
144static struct gimplify_omp_ctx *gimplify_omp_ctxp;
145
ad19c4be 146/* Forward declaration. */
726a989a 147static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
eb6127a4 148
a1a6c5b2
JJ
149/* Shorter alias name for the above function for use in gimplify.c
150 only. */
151
152static inline void
153gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
154{
155 gimple_seq_add_stmt_without_update (seq_p, gs);
156}
157
726a989a
RB
158/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
159 NULL, a new sequence is allocated. This function is
160 similar to gimple_seq_add_seq, but does not scan the operands.
161 During gimplification, we need to manipulate statement sequences
162 before the def/use vectors have been constructed. */
163
164static void
165gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
166{
167 gimple_stmt_iterator si;
168
169 if (src == NULL)
170 return;
171
726a989a
RB
172 si = gsi_last (*dst_p);
173 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
174}
175
45852dcc
AM
176
177/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
178 and popping gimplify contexts. */
179
180static struct gimplify_ctx *ctx_pool = NULL;
181
182/* Return a gimplify context struct from the pool. */
183
184static inline struct gimplify_ctx *
185ctx_alloc (void)
186{
187 struct gimplify_ctx * c = ctx_pool;
188
189 if (c)
190 ctx_pool = c->prev_context;
191 else
192 c = XNEW (struct gimplify_ctx);
193
194 memset (c, '\0', sizeof (*c));
195 return c;
196}
197
198/* Put gimplify context C back into the pool. */
199
200static inline void
201ctx_free (struct gimplify_ctx *c)
202{
203 c->prev_context = ctx_pool;
204 ctx_pool = c;
205}
206
207/* Free allocated ctx stack memory. */
208
209void
210free_gimplify_stack (void)
211{
212 struct gimplify_ctx *c;
213
214 while ((c = ctx_pool))
215 {
216 ctx_pool = c->prev_context;
217 free (c);
218 }
219}
220
221
6de9cd9a
DN
222/* Set up a context for the gimplifier. */
223
224void
45852dcc 225push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
6de9cd9a 226{
45852dcc
AM
227 struct gimplify_ctx *c = ctx_alloc ();
228
953ff289 229 c->prev_context = gimplify_ctxp;
953ff289 230 gimplify_ctxp = c;
45852dcc
AM
231 gimplify_ctxp->into_ssa = in_ssa;
232 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
6de9cd9a
DN
233}
234
235/* Tear down a context for the gimplifier. If BODY is non-null, then
236 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
237 in the local_decls.
238
239 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
240
241void
726a989a 242pop_gimplify_context (gimple body)
6de9cd9a 243{
953ff289 244 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 245
9771b263
DN
246 gcc_assert (c
247 && (!c->bind_expr_stack.exists ()
248 || c->bind_expr_stack.is_empty ()));
249 c->bind_expr_stack.release ();
953ff289 250 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
251
252 if (body)
5123ad09 253 declare_vars (c->temps, body, false);
6de9cd9a 254 else
953ff289 255 record_vars (c->temps);
6de9cd9a 256
4a8fb1a1
LC
257 if (c->temp_htab.is_created ())
258 c->temp_htab.dispose ();
45852dcc 259 ctx_free (c);
6de9cd9a
DN
260}
261
ad19c4be
EB
262/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
263
c24b7de9 264static void
726a989a 265gimple_push_bind_expr (gimple gimple_bind)
6de9cd9a 266{
9771b263
DN
267 gimplify_ctxp->bind_expr_stack.reserve (8);
268 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
6de9cd9a
DN
269}
270
ad19c4be
EB
271/* Pop the first element off the stack of bindings. */
272
c24b7de9 273static void
6de9cd9a
DN
274gimple_pop_bind_expr (void)
275{
9771b263 276 gimplify_ctxp->bind_expr_stack.pop ();
6de9cd9a
DN
277}
278
ad19c4be
EB
279/* Return the first element of the stack of bindings. */
280
726a989a 281gimple
6de9cd9a
DN
282gimple_current_bind_expr (void)
283{
9771b263 284 return gimplify_ctxp->bind_expr_stack.last ();
726a989a
RB
285}
286
ad19c4be 287/* Return the stack of bindings created during gimplification. */
726a989a 288
9771b263 289vec<gimple>
726a989a
RB
290gimple_bind_expr_stack (void)
291{
292 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
293}
294
ad19c4be 295/* Return true iff there is a COND_EXPR between us and the innermost
6de9cd9a
DN
296 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
297
298static bool
299gimple_conditional_context (void)
300{
301 return gimplify_ctxp->conditions > 0;
302}
303
304/* Note that we've entered a COND_EXPR. */
305
306static void
307gimple_push_condition (void)
308{
726a989a 309#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 310 if (gimplify_ctxp->conditions == 0)
726a989a 311 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 312#endif
6de9cd9a
DN
313 ++(gimplify_ctxp->conditions);
314}
315
316/* Note that we've left a COND_EXPR. If we're back at unconditional scope
317 now, add any conditional cleanups we've seen to the prequeue. */
318
319static void
726a989a 320gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
321{
322 int conds = --(gimplify_ctxp->conditions);
aa4a53af 323
282899df 324 gcc_assert (conds >= 0);
6de9cd9a
DN
325 if (conds == 0)
326 {
726a989a
RB
327 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
328 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 329 }
6de9cd9a
DN
330}
331
953ff289
DN
332/* A stable comparison routine for use with splay trees and DECLs. */
333
334static int
335splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
336{
337 tree a = (tree) xa;
338 tree b = (tree) xb;
339
340 return DECL_UID (a) - DECL_UID (b);
341}
342
343/* Create a new omp construct that deals with variable remapping. */
344
345static struct gimplify_omp_ctx *
a68ab351 346new_omp_context (enum omp_region_type region_type)
953ff289
DN
347{
348 struct gimplify_omp_ctx *c;
349
350 c = XCNEW (struct gimplify_omp_ctx);
351 c->outer_context = gimplify_omp_ctxp;
352 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
353 c->privatized_types = pointer_set_create ();
354 c->location = input_location;
a68ab351 355 c->region_type = region_type;
f22f4340 356 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
357 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
358 else
359 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
953ff289
DN
360
361 return c;
362}
363
364/* Destroy an omp construct that deals with variable remapping. */
365
366static void
367delete_omp_context (struct gimplify_omp_ctx *c)
368{
369 splay_tree_delete (c->variables);
370 pointer_set_destroy (c->privatized_types);
371 XDELETE (c);
372}
373
374static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
375static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
376
726a989a
RB
377/* Both gimplify the statement T and append it to *SEQ_P. This function
378 behaves exactly as gimplify_stmt, but you don't have to pass T as a
379 reference. */
cd3ce9b4
JM
380
381void
726a989a
RB
382gimplify_and_add (tree t, gimple_seq *seq_p)
383{
384 gimplify_stmt (&t, seq_p);
385}
386
387/* Gimplify statement T into sequence *SEQ_P, and return the first
388 tuple in the sequence of generated tuples for this statement.
389 Return NULL if gimplifying T produced no tuples. */
390
391static gimple
392gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 393{
726a989a
RB
394 gimple_stmt_iterator last = gsi_last (*seq_p);
395
396 gimplify_and_add (t, seq_p);
397
398 if (!gsi_end_p (last))
399 {
400 gsi_next (&last);
401 return gsi_stmt (last);
402 }
403 else
404 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
405}
406
216820a4
RG
407/* Returns true iff T is a valid RHS for an assignment to an un-renamed
408 LHS, or for a call argument. */
409
410static bool
411is_gimple_mem_rhs (tree t)
412{
413 /* If we're dealing with a renamable type, either source or dest must be
414 a renamed variable. */
415 if (is_gimple_reg_type (TREE_TYPE (t)))
416 return is_gimple_val (t);
417 else
418 return is_gimple_val (t) || is_gimple_lvalue (t);
419}
420
726a989a 421/* Return true if T is a CALL_EXPR or an expression that can be
12947319 422 assigned to a temporary. Note that this predicate should only be
726a989a
RB
423 used during gimplification. See the rationale for this in
424 gimplify_modify_expr. */
425
426static bool
ba4d8f9d 427is_gimple_reg_rhs_or_call (tree t)
726a989a 428{
ba4d8f9d
RG
429 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
430 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
431}
432
433/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
434 this predicate should only be used during gimplification. See the
435 rationale for this in gimplify_modify_expr. */
436
437static bool
ba4d8f9d 438is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
439{
440 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
441 a renamed variable. */
442 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
443 return is_gimple_val (t);
444 else
ba4d8f9d
RG
445 return (is_gimple_val (t) || is_gimple_lvalue (t)
446 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
447}
448
2ad728d2
RG
449/* Create a temporary with a name derived from VAL. Subroutine of
450 lookup_tmp_var; nobody else should call this function. */
451
452static inline tree
453create_tmp_from_val (tree val, bool is_formal)
454{
455 /* Drop all qualifiers and address-space information from the value type. */
456 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
457 tree var = create_tmp_var (type, get_name (val));
458 if (is_formal
459 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
460 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
461 DECL_GIMPLE_REG_P (var) = 1;
462 return var;
463}
464
465/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
466 an existing expression temporary. */
467
468static tree
469lookup_tmp_var (tree val, bool is_formal)
470{
471 tree ret;
472
473 /* If not optimizing, never really reuse a temporary. local-alloc
474 won't allocate any variable that is used in more than one basic
475 block, which means it will go into memory, causing much extra
476 work in reload and final and poorer code generation, outweighing
477 the extra memory allocation here. */
478 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
479 ret = create_tmp_from_val (val, is_formal);
480 else
481 {
482 elt_t elt, *elt_p;
4a8fb1a1 483 elt_t **slot;
2ad728d2
RG
484
485 elt.val = val;
4a8fb1a1
LC
486 if (!gimplify_ctxp->temp_htab.is_created ())
487 gimplify_ctxp->temp_htab.create (1000);
488 slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
2ad728d2
RG
489 if (*slot == NULL)
490 {
491 elt_p = XNEW (elt_t);
492 elt_p->val = val;
493 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
4a8fb1a1 494 *slot = elt_p;
2ad728d2
RG
495 }
496 else
497 {
4a8fb1a1 498 elt_p = *slot;
2ad728d2
RG
499 ret = elt_p->temp;
500 }
501 }
502
503 return ret;
504}
505
ba4d8f9d 506/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
507
508static tree
726a989a
RB
509internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
510 bool is_formal)
6de9cd9a
DN
511{
512 tree t, mod;
6de9cd9a 513
726a989a
RB
514 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
515 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 516 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 517 fb_rvalue);
6de9cd9a 518
2ad728d2
RG
519 if (gimplify_ctxp->into_ssa
520 && is_gimple_reg_type (TREE_TYPE (val)))
521 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
522 else
523 t = lookup_tmp_var (val, is_formal);
e41d82f5 524
2e929cf3 525 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 526
8400e75e 527 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
6de9cd9a 528
fff34d35
RK
529 /* gimplify_modify_expr might want to reduce this further. */
530 gimplify_and_add (mod, pre_p);
726a989a 531 ggc_free (mod);
8b11a64c 532
6de9cd9a
DN
533 return t;
534}
535
ad19c4be 536/* Return a formal temporary variable initialized with VAL. PRE_P is as
ba4d8f9d
RG
537 in gimplify_expr. Only use this function if:
538
539 1) The value of the unfactored expression represented by VAL will not
540 change between the initialization and use of the temporary, and
541 2) The temporary will not be otherwise modified.
542
543 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
544 and #2 means it is inappropriate for && temps.
545
546 For other cases, use get_initialized_tmp_var instead. */
50674e96 547
6de9cd9a 548tree
726a989a 549get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a
DN
550{
551 return internal_get_tmp_var (val, pre_p, NULL, true);
552}
553
ad19c4be 554/* Return a temporary variable initialized with VAL. PRE_P and POST_P
6de9cd9a
DN
555 are as in gimplify_expr. */
556
557tree
726a989a 558get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
559{
560 return internal_get_tmp_var (val, pre_p, post_p, false);
561}
562
ad19c4be
EB
563/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
564 generate debug info for them; otherwise don't. */
6de9cd9a
DN
565
566void
726a989a 567declare_vars (tree vars, gimple scope, bool debug_info)
6de9cd9a
DN
568{
569 tree last = vars;
570 if (last)
571 {
5123ad09 572 tree temps, block;
6de9cd9a 573
726a989a 574 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
6de9cd9a
DN
575
576 temps = nreverse (last);
5123ad09 577
524d9a45 578 block = gimple_bind_block (scope);
726a989a 579 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
580 if (!block || !debug_info)
581 {
910ad8de 582 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 583 gimple_bind_set_vars (scope, temps);
5123ad09
EB
584 }
585 else
586 {
587 /* We need to attach the nodes both to the BIND_EXPR and to its
588 associated BLOCK for debugging purposes. The key point here
589 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
590 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
591 if (BLOCK_VARS (block))
592 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
593 else
594 {
726a989a
RB
595 gimple_bind_set_vars (scope,
596 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
597 BLOCK_VARS (block) = temps;
598 }
599 }
6de9cd9a
DN
600 }
601}
602
a441447f
OH
603/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
604 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
605 no such upper bound can be obtained. */
606
607static void
608force_constant_size (tree var)
609{
610 /* The only attempt we make is by querying the maximum size of objects
611 of the variable's type. */
612
613 HOST_WIDE_INT max_size;
614
615 gcc_assert (TREE_CODE (var) == VAR_DECL);
616
617 max_size = max_int_size_in_bytes (TREE_TYPE (var));
618
619 gcc_assert (max_size >= 0);
620
621 DECL_SIZE_UNIT (var)
622 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
623 DECL_SIZE (var)
624 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
625}
626
ad19c4be
EB
627/* Push the temporary variable TMP into the current binding. */
628
6de9cd9a
DN
629void
630gimple_add_tmp_var (tree tmp)
631{
910ad8de 632 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 633
a441447f
OH
634 /* Later processing assumes that the object size is constant, which might
635 not be true at this point. Force the use of a constant upper bound in
636 this case. */
cc269bb6 637 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
a441447f
OH
638 force_constant_size (tmp);
639
6de9cd9a 640 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 641 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
642
643 if (gimplify_ctxp)
644 {
910ad8de 645 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 646 gimplify_ctxp->temps = tmp;
953ff289
DN
647
648 /* Mark temporaries local within the nearest enclosing parallel. */
649 if (gimplify_omp_ctxp)
650 {
651 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
652 while (ctx
653 && (ctx->region_type == ORT_WORKSHARE
654 || ctx->region_type == ORT_SIMD))
953ff289
DN
655 ctx = ctx->outer_context;
656 if (ctx)
657 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
658 }
6de9cd9a
DN
659 }
660 else if (cfun)
661 record_vars (tmp);
662 else
726a989a
RB
663 {
664 gimple_seq body_seq;
665
666 /* This case is for nested functions. We need to expose the locals
667 they create. */
668 body_seq = gimple_body (current_function_decl);
669 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
670 }
671}
672
726a989a 673
616f1431
EB
674\f
675/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
676 nodes that are referenced more than once in GENERIC functions. This is
677 necessary because gimplification (translation into GIMPLE) is performed
678 by modifying tree nodes in-place, so gimplication of a shared node in a
679 first context could generate an invalid GIMPLE form in a second context.
680
681 This is achieved with a simple mark/copy/unmark algorithm that walks the
682 GENERIC representation top-down, marks nodes with TREE_VISITED the first
683 time it encounters them, duplicates them if they already have TREE_VISITED
684 set, and finally removes the TREE_VISITED marks it has set.
685
686 The algorithm works only at the function level, i.e. it generates a GENERIC
687 representation of a function with no nodes shared within the function when
688 passed a GENERIC function (except for nodes that are allowed to be shared).
689
690 At the global level, it is also necessary to unshare tree nodes that are
691 referenced in more than one function, for the same aforementioned reason.
692 This requires some cooperation from the front-end. There are 2 strategies:
693
694 1. Manual unsharing. The front-end needs to call unshare_expr on every
695 expression that might end up being shared across functions.
696
697 2. Deep unsharing. This is an extension of regular unsharing. Instead
698 of calling unshare_expr on expressions that might be shared across
699 functions, the front-end pre-marks them with TREE_VISITED. This will
700 ensure that they are unshared on the first reference within functions
701 when the regular unsharing algorithm runs. The counterpart is that
702 this algorithm must look deeper than for manual unsharing, which is
703 specified by LANG_HOOKS_DEEP_UNSHARING.
704
705 If there are only few specific cases of node sharing across functions, it is
706 probably easier for a front-end to unshare the expressions manually. On the
707 contrary, if the expressions generated at the global level are as widespread
708 as expressions generated within functions, deep unsharing is very likely the
709 way to go. */
710
711/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
3ad065ef
EB
712 These nodes model computations that must be done once. If we were to
713 unshare something like SAVE_EXPR(i++), the gimplification process would
714 create wrong code. However, if DATA is non-null, it must hold a pointer
715 set that is used to unshare the subtrees of these nodes. */
6de9cd9a
DN
716
717static tree
718mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
719{
616f1431
EB
720 tree t = *tp;
721 enum tree_code code = TREE_CODE (t);
722
6687b740
EB
723 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
724 copy their subtrees if we can make sure to do it only once. */
725 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431
EB
726 {
727 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
728 ;
729 else
730 *walk_subtrees = 0;
731 }
732
733 /* Stop at types, decls, constants like copy_tree_r. */
734 else if (TREE_CODE_CLASS (code) == tcc_type
735 || TREE_CODE_CLASS (code) == tcc_declaration
736 || TREE_CODE_CLASS (code) == tcc_constant
737 /* We can't do anything sensible with a BLOCK used as an
738 expression, but we also can't just die when we see it
739 because of non-expression uses. So we avert our eyes
740 and cross our fingers. Silly Java. */
741 || code == BLOCK)
6de9cd9a 742 *walk_subtrees = 0;
616f1431
EB
743
744 /* Cope with the statement expression extension. */
745 else if (code == STATEMENT_LIST)
746 ;
747
748 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 749 else
6687b740 750 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
751
752 return NULL_TREE;
753}
754
3ad065ef
EB
755/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
756 If *TP has been visited already, then *TP is deeply copied by calling
757 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
6de9cd9a
DN
758
759static tree
616f1431 760copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 761{
f0638e1d
RH
762 tree t = *tp;
763 enum tree_code code = TREE_CODE (t);
764
44de5aeb
RK
765 /* Skip types, decls, and constants. But we do want to look at their
766 types and the bounds of types. Mark them as visited so we properly
767 unmark their subtrees on the unmark pass. If we've already seen them,
768 don't look down further. */
6615c446
JO
769 if (TREE_CODE_CLASS (code) == tcc_type
770 || TREE_CODE_CLASS (code) == tcc_declaration
771 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
772 {
773 if (TREE_VISITED (t))
774 *walk_subtrees = 0;
775 else
776 TREE_VISITED (t) = 1;
777 }
f0638e1d 778
6de9cd9a
DN
779 /* If this node has been visited already, unshare it and don't look
780 any deeper. */
f0638e1d 781 else if (TREE_VISITED (t))
6de9cd9a 782 {
616f1431 783 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
784 *walk_subtrees = 0;
785 }
f0638e1d 786
616f1431 787 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 788 else
77c9db77 789 TREE_VISITED (t) = 1;
f0638e1d 790
6de9cd9a
DN
791 return NULL_TREE;
792}
793
3ad065ef
EB
794/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
795 copy_if_shared_r callback unmodified. */
6de9cd9a 796
616f1431 797static inline void
3ad065ef 798copy_if_shared (tree *tp, void *data)
616f1431 799{
3ad065ef 800 walk_tree (tp, copy_if_shared_r, data, NULL);
6de9cd9a
DN
801}
802
3ad065ef
EB
803/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
804 any nested functions. */
44de5aeb
RK
805
806static void
3ad065ef 807unshare_body (tree fndecl)
44de5aeb 808{
9f9ebcdf 809 struct cgraph_node *cgn = cgraph_get_node (fndecl);
3ad065ef
EB
810 /* If the language requires deep unsharing, we need a pointer set to make
811 sure we don't repeatedly unshare subtrees of unshareable nodes. */
812 struct pointer_set_t *visited
813 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
44de5aeb 814
3ad065ef
EB
815 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
816 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
817 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
818
819 if (visited)
820 pointer_set_destroy (visited);
616f1431 821
3ad065ef 822 if (cgn)
48eb4e53 823 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 824 unshare_body (cgn->decl);
44de5aeb
RK
825}
826
616f1431
EB
827/* Callback for walk_tree to unmark the visited trees rooted at *TP.
828 Subtrees are walked until the first unvisited node is encountered. */
829
830static tree
831unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
832{
833 tree t = *tp;
834
835 /* If this node has been visited, unmark it and keep looking. */
836 if (TREE_VISITED (t))
837 TREE_VISITED (t) = 0;
838
839 /* Otherwise, don't look any deeper. */
840 else
841 *walk_subtrees = 0;
842
843 return NULL_TREE;
844}
845
846/* Unmark the visited trees rooted at *TP. */
847
848static inline void
849unmark_visited (tree *tp)
850{
851 walk_tree (tp, unmark_visited_r, NULL, NULL);
852}
853
44de5aeb
RK
854/* Likewise, but mark all trees as not visited. */
855
856static void
3ad065ef 857unvisit_body (tree fndecl)
44de5aeb 858{
9f9ebcdf 859 struct cgraph_node *cgn = cgraph_get_node (fndecl);
44de5aeb 860
3ad065ef
EB
861 unmark_visited (&DECL_SAVED_TREE (fndecl));
862 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
863 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
616f1431 864
3ad065ef 865 if (cgn)
48eb4e53 866 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 867 unvisit_body (cgn->decl);
44de5aeb
RK
868}
869
6de9cd9a
DN
870/* Unconditionally make an unshared copy of EXPR. This is used when using
871 stored expressions which span multiple functions, such as BINFO_VTABLE,
872 as the normal unsharing process can't tell that they're shared. */
873
874tree
875unshare_expr (tree expr)
876{
877 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
878 return expr;
879}
d1f98542
RB
880
881/* Worker for unshare_expr_without_location. */
882
883static tree
884prune_expr_location (tree *tp, int *walk_subtrees, void *)
885{
886 if (EXPR_P (*tp))
887 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
888 else
889 *walk_subtrees = 0;
890 return NULL_TREE;
891}
892
893/* Similar to unshare_expr but also prune all expression locations
894 from EXPR. */
895
896tree
897unshare_expr_without_location (tree expr)
898{
899 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
900 if (EXPR_P (expr))
901 walk_tree (&expr, prune_expr_location, NULL, NULL);
902 return expr;
903}
6de9cd9a
DN
904\f
905/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
906 contain statements and have a value. Assign its value to a temporary
ad19c4be 907 and give it void_type_node. Return the temporary, or NULL_TREE if
6de9cd9a
DN
908 WRAPPER was already void. */
909
910tree
325c3691 911voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 912{
4832214a
JM
913 tree type = TREE_TYPE (wrapper);
914 if (type && !VOID_TYPE_P (type))
6de9cd9a 915 {
c6c7698d 916 tree *p;
6de9cd9a 917
c6c7698d
JM
918 /* Set p to point to the body of the wrapper. Loop until we find
919 something that isn't a wrapper. */
920 for (p = &wrapper; p && *p; )
d3147f64 921 {
c6c7698d 922 switch (TREE_CODE (*p))
6de9cd9a 923 {
c6c7698d
JM
924 case BIND_EXPR:
925 TREE_SIDE_EFFECTS (*p) = 1;
926 TREE_TYPE (*p) = void_type_node;
927 /* For a BIND_EXPR, the body is operand 1. */
928 p = &BIND_EXPR_BODY (*p);
929 break;
930
931 case CLEANUP_POINT_EXPR:
932 case TRY_FINALLY_EXPR:
933 case TRY_CATCH_EXPR:
6de9cd9a
DN
934 TREE_SIDE_EFFECTS (*p) = 1;
935 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
936 p = &TREE_OPERAND (*p, 0);
937 break;
938
939 case STATEMENT_LIST:
940 {
941 tree_stmt_iterator i = tsi_last (*p);
942 TREE_SIDE_EFFECTS (*p) = 1;
943 TREE_TYPE (*p) = void_type_node;
944 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
945 }
946 break;
947
948 case COMPOUND_EXPR:
ad19c4be
EB
949 /* Advance to the last statement. Set all container types to
950 void. */
c6c7698d
JM
951 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
952 {
953 TREE_SIDE_EFFECTS (*p) = 1;
954 TREE_TYPE (*p) = void_type_node;
955 }
956 break;
957
0a35513e
AH
958 case TRANSACTION_EXPR:
959 TREE_SIDE_EFFECTS (*p) = 1;
960 TREE_TYPE (*p) = void_type_node;
961 p = &TRANSACTION_EXPR_BODY (*p);
962 break;
963
c6c7698d 964 default:
5f23640f
TR
965 /* Assume that any tree upon which voidify_wrapper_expr is
966 directly called is a wrapper, and that its body is op0. */
967 if (p == &wrapper)
968 {
969 TREE_SIDE_EFFECTS (*p) = 1;
970 TREE_TYPE (*p) = void_type_node;
971 p = &TREE_OPERAND (*p, 0);
972 break;
973 }
c6c7698d 974 goto out;
6de9cd9a
DN
975 }
976 }
977
c6c7698d 978 out:
325c3691 979 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
980 temp = NULL_TREE;
981 else if (temp)
6de9cd9a 982 {
c6c7698d
JM
983 /* The wrapper is on the RHS of an assignment that we're pushing
984 down. */
985 gcc_assert (TREE_CODE (temp) == INIT_EXPR
986 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 987 TREE_OPERAND (temp, 1) = *p;
c6c7698d 988 *p = temp;
6de9cd9a
DN
989 }
990 else
991 {
c6c7698d
JM
992 temp = create_tmp_var (type, "retval");
993 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
994 }
995
6de9cd9a
DN
996 return temp;
997 }
998
999 return NULL_TREE;
1000}
1001
1002/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 1003 a temporary through which they communicate. */
6de9cd9a
DN
1004
1005static void
726a989a 1006build_stack_save_restore (gimple *save, gimple *restore)
6de9cd9a 1007{
726a989a 1008 tree tmp_var;
6de9cd9a 1009
e79983f4 1010 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
6de9cd9a 1011 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 1012 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 1013
ad19c4be 1014 *restore
e79983f4 1015 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
ad19c4be 1016 1, tmp_var);
6de9cd9a
DN
1017}
1018
1019/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1020
1021static enum gimplify_status
726a989a 1022gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1023{
1024 tree bind_expr = *expr_p;
6de9cd9a
DN
1025 bool old_save_stack = gimplify_ctxp->save_stack;
1026 tree t;
726a989a 1027 gimple gimple_bind;
47598145
MM
1028 gimple_seq body, cleanup;
1029 gimple stack_save;
6de9cd9a 1030
c6c7698d 1031 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 1032
6de9cd9a 1033 /* Mark variables seen in this bind expr. */
910ad8de 1034 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 1035 {
820cc88f 1036 if (TREE_CODE (t) == VAR_DECL)
8cb86b65
JJ
1037 {
1038 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1039
1040 /* Mark variable as local. */
144f4153 1041 if (ctx && !DECL_EXTERNAL (t)
8cb86b65
JJ
1042 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1043 || splay_tree_lookup (ctx->variables,
1044 (splay_tree_key) t) == NULL))
1045 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1046
1047 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
1048
1049 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1050 cfun->has_local_explicit_reg_vars = true;
8cb86b65 1051 }
e41d82f5
RH
1052
1053 /* Preliminarily mark non-addressed complex variables as eligible
1054 for promotion to gimple registers. We'll transform their uses
bd2e63a1
RG
1055 as we find them. */
1056 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1057 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
e41d82f5
RH
1058 && !TREE_THIS_VOLATILE (t)
1059 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1060 && !needs_to_live_in_memory (t))
0890b981 1061 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 1062 }
6de9cd9a 1063
726a989a
RB
1064 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1065 BIND_EXPR_BLOCK (bind_expr));
1066 gimple_push_bind_expr (gimple_bind);
1067
6de9cd9a
DN
1068 gimplify_ctxp->save_stack = false;
1069
726a989a
RB
1070 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1071 body = NULL;
1072 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1073 gimple_bind_set_body (gimple_bind, body);
6de9cd9a 1074
47598145
MM
1075 cleanup = NULL;
1076 stack_save = NULL;
6de9cd9a
DN
1077 if (gimplify_ctxp->save_stack)
1078 {
47598145 1079 gimple stack_restore;
6de9cd9a
DN
1080
1081 /* Save stack on entry and restore it on exit. Add a try_finally
98906124 1082 block to achieve this. */
6de9cd9a
DN
1083 build_stack_save_restore (&stack_save, &stack_restore);
1084
726a989a 1085 gimplify_seq_add_stmt (&cleanup, stack_restore);
47598145
MM
1086 }
1087
1088 /* Add clobbers for all variables that go out of scope. */
1089 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1090 {
1091 if (TREE_CODE (t) == VAR_DECL
1092 && !is_global_var (t)
1093 && DECL_CONTEXT (t) == current_function_decl
1094 && !DECL_HARD_REGISTER (t)
1095 && !TREE_THIS_VOLATILE (t)
1096 && !DECL_HAS_VALUE_EXPR_P (t)
1097 /* Only care for variables that have to be in memory. Others
1098 will be rewritten into SSA names, hence moved to the top-level. */
87e2a8fd
XDL
1099 && !is_gimple_reg (t)
1100 && flag_stack_reuse != SR_NONE)
47598145 1101 {
9771b263
DN
1102 tree clobber = build_constructor (TREE_TYPE (t),
1103 NULL);
47598145
MM
1104 TREE_THIS_VOLATILE (clobber) = 1;
1105 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1106 }
1107 }
1108
1109 if (cleanup)
1110 {
1111 gimple gs;
1112 gimple_seq new_body;
1113
1114 new_body = NULL;
726a989a
RB
1115 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1116 GIMPLE_TRY_FINALLY);
6de9cd9a 1117
47598145
MM
1118 if (stack_save)
1119 gimplify_seq_add_stmt (&new_body, stack_save);
726a989a
RB
1120 gimplify_seq_add_stmt (&new_body, gs);
1121 gimple_bind_set_body (gimple_bind, new_body);
6de9cd9a
DN
1122 }
1123
1124 gimplify_ctxp->save_stack = old_save_stack;
1125 gimple_pop_bind_expr ();
1126
726a989a
RB
1127 gimplify_seq_add_stmt (pre_p, gimple_bind);
1128
6de9cd9a
DN
1129 if (temp)
1130 {
1131 *expr_p = temp;
6de9cd9a
DN
1132 return GS_OK;
1133 }
726a989a
RB
1134
1135 *expr_p = NULL_TREE;
1136 return GS_ALL_DONE;
6de9cd9a
DN
1137}
1138
1139/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1140 GIMPLE value, it is assigned to a new temporary and the statement is
1141 re-written to return the temporary.
1142
726a989a 1143 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1144 STMT should be stored. */
1145
1146static enum gimplify_status
726a989a 1147gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1148{
726a989a 1149 gimple ret;
6de9cd9a 1150 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1151 tree result_decl, result;
6de9cd9a 1152
726a989a
RB
1153 if (ret_expr == error_mark_node)
1154 return GS_ERROR;
1155
939b37da
BI
1156 /* Implicit _Cilk_sync must be inserted right before any return statement
1157 if there is a _Cilk_spawn in the function. If the user has provided a
1158 _Cilk_sync, the optimizer should remove this duplicate one. */
1159 if (fn_contains_cilk_spawn_p (cfun))
1160 {
1161 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1162 gimplify_and_add (impl_sync, pre_p);
1163 }
1164
726a989a
RB
1165 if (!ret_expr
1166 || TREE_CODE (ret_expr) == RESULT_DECL
55e99d52 1167 || ret_expr == error_mark_node)
726a989a
RB
1168 {
1169 gimple ret = gimple_build_return (ret_expr);
1170 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1171 gimplify_seq_add_stmt (pre_p, ret);
1172 return GS_ALL_DONE;
1173 }
6de9cd9a 1174
6de9cd9a 1175 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1176 result_decl = NULL_TREE;
6de9cd9a
DN
1177 else
1178 {
726a989a
RB
1179 result_decl = TREE_OPERAND (ret_expr, 0);
1180
1181 /* See through a return by reference. */
cc77ae10 1182 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1183 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1184
1185 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1186 || TREE_CODE (ret_expr) == INIT_EXPR)
1187 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1188 }
1189
71877985
RH
1190 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1191 Recall that aggregate_value_p is FALSE for any aggregate type that is
1192 returned in registers. If we're returning values in registers, then
1193 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1194 across another call. In addition, for those aggregates for which
535a42b1 1195 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1196 expansion of structure assignments; there's special code in expand_return
1197 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1198 if (!result_decl)
1199 result = NULL_TREE;
1200 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1201 {
1202 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1203 {
1204 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1205 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1206 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1207 should be effectively allocated by the caller, i.e. all calls to
1208 this function must be subject to the Return Slot Optimization. */
1209 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1210 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1211 }
1212 result = result_decl;
1213 }
71877985
RH
1214 else if (gimplify_ctxp->return_temp)
1215 result = gimplify_ctxp->return_temp;
1216 else
1217 {
acd63801 1218 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
ff98621c
RH
1219
1220 /* ??? With complex control flow (usually involving abnormal edges),
1221 we can wind up warning about an uninitialized value for this. Due
1222 to how this variable is constructed and initialized, this is never
1223 true. Give up and never warn. */
1224 TREE_NO_WARNING (result) = 1;
1225
71877985
RH
1226 gimplify_ctxp->return_temp = result;
1227 }
1228
726a989a 1229 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1230 Then gimplify the whole thing. */
1231 if (result != result_decl)
726a989a 1232 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1233
1234 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1235
726a989a
RB
1236 ret = gimple_build_return (result);
1237 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1238 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1239
6de9cd9a
DN
1240 return GS_ALL_DONE;
1241}
1242
ad19c4be
EB
1243/* Gimplify a variable-length array DECL. */
1244
786025ea 1245static void
726a989a 1246gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1247{
1248 /* This is a variable-sized decl. Simplify its size and mark it
98906124 1249 for deferred expansion. */
786025ea
JJ
1250 tree t, addr, ptr_type;
1251
726a989a
RB
1252 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1253 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea 1254
0138d6b2
JM
1255 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1256 if (DECL_HAS_VALUE_EXPR_P (decl))
1257 return;
1258
786025ea
JJ
1259 /* All occurrences of this decl in final gimplified code will be
1260 replaced by indirection. Setting DECL_VALUE_EXPR does two
1261 things: First, it lets the rest of the gimplifier know what
1262 replacement to use. Second, it lets the debug info know
1263 where to find the value. */
1264 ptr_type = build_pointer_type (TREE_TYPE (decl));
1265 addr = create_tmp_var (ptr_type, get_name (decl));
1266 DECL_IGNORED_P (addr) = 0;
1267 t = build_fold_indirect_ref (addr);
31408f60 1268 TREE_THIS_NOTRAP (t) = 1;
786025ea
JJ
1269 SET_DECL_VALUE_EXPR (decl, t);
1270 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1271
e79983f4 1272 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13e49da9
TV
1273 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1274 size_int (DECL_ALIGN (decl)));
d3c12306 1275 /* The call has been built for a variable-sized object. */
63d2a353 1276 CALL_ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1277 t = fold_convert (ptr_type, t);
726a989a 1278 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1279
726a989a 1280 gimplify_and_add (t, seq_p);
786025ea
JJ
1281
1282 /* Indicate that we need to restore the stack level when the
1283 enclosing BIND_EXPR is exited. */
1284 gimplify_ctxp->save_stack = true;
1285}
1286
45b0be94
AM
1287/* A helper function to be called via walk_tree. Mark all labels under *TP
1288 as being forced. To be called for DECL_INITIAL of static variables. */
1289
1290static tree
1291force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1292{
1293 if (TYPE_P (*tp))
1294 *walk_subtrees = 0;
1295 if (TREE_CODE (*tp) == LABEL_DECL)
1296 FORCED_LABEL (*tp) = 1;
1297
1298 return NULL_TREE;
1299}
1300
ad19c4be 1301/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
350fae66
RK
1302 and initialization explicit. */
1303
1304static enum gimplify_status
726a989a 1305gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1306{
1307 tree stmt = *stmt_p;
1308 tree decl = DECL_EXPR_DECL (stmt);
1309
1310 *stmt_p = NULL_TREE;
1311
1312 if (TREE_TYPE (decl) == error_mark_node)
1313 return GS_ERROR;
1314
8e0a600b
JJ
1315 if ((TREE_CODE (decl) == TYPE_DECL
1316 || TREE_CODE (decl) == VAR_DECL)
1317 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
726a989a 1318 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
350fae66 1319
d400d17e
EB
1320 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1321 in case its size expressions contain problematic nodes like CALL_EXPR. */
1322 if (TREE_CODE (decl) == TYPE_DECL
1323 && DECL_ORIGINAL_TYPE (decl)
1324 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1325 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1326
8e0a600b 1327 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
350fae66
RK
1328 {
1329 tree init = DECL_INITIAL (decl);
1330
b38f3813
EB
1331 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1332 || (!TREE_STATIC (decl)
1333 && flag_stack_check == GENERIC_STACK_CHECK
1334 && compare_tree_int (DECL_SIZE_UNIT (decl),
1335 STACK_CHECK_MAX_VAR_SIZE) > 0))
726a989a 1336 gimplify_vla_decl (decl, seq_p);
350fae66 1337
22192559
JM
1338 /* Some front ends do not explicitly declare all anonymous
1339 artificial variables. We compensate here by declaring the
1340 variables, though it would be better if the front ends would
1341 explicitly declare them. */
1342 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1343 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1344 gimple_add_tmp_var (decl);
1345
350fae66
RK
1346 if (init && init != error_mark_node)
1347 {
1348 if (!TREE_STATIC (decl))
1349 {
1350 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1351 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1352 gimplify_and_add (init, seq_p);
1353 ggc_free (init);
350fae66
RK
1354 }
1355 else
1356 /* We must still examine initializers for static variables
1357 as they may contain a label address. */
1358 walk_tree (&init, force_labels_r, NULL, NULL);
1359 }
350fae66
RK
1360 }
1361
1362 return GS_ALL_DONE;
1363}
1364
6de9cd9a
DN
1365/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1366 and replacing the LOOP_EXPR with goto, but if the loop contains an
1367 EXIT_EXPR, we need to append a label for it to jump to. */
1368
1369static enum gimplify_status
726a989a 1370gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1371{
1372 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1373 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1374
726a989a 1375 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1376
1377 gimplify_ctxp->exit_label = NULL_TREE;
1378
fff34d35 1379 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1380
726a989a
RB
1381 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1382
6de9cd9a 1383 if (gimplify_ctxp->exit_label)
ad19c4be
EB
1384 gimplify_seq_add_stmt (pre_p,
1385 gimple_build_label (gimplify_ctxp->exit_label));
726a989a
RB
1386
1387 gimplify_ctxp->exit_label = saved_label;
1388
1389 *expr_p = NULL;
1390 return GS_ALL_DONE;
1391}
1392
ad19c4be 1393/* Gimplify a statement list onto a sequence. These may be created either
726a989a
RB
1394 by an enlightened front-end, or by shortcut_cond_expr. */
1395
1396static enum gimplify_status
1397gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1398{
1399 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1400
1401 tree_stmt_iterator i = tsi_start (*expr_p);
1402
1403 while (!tsi_end_p (i))
6de9cd9a 1404 {
726a989a
RB
1405 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1406 tsi_delink (&i);
6de9cd9a 1407 }
6de9cd9a 1408
726a989a
RB
1409 if (temp)
1410 {
1411 *expr_p = temp;
1412 return GS_OK;
1413 }
6de9cd9a
DN
1414
1415 return GS_ALL_DONE;
1416}
0f1f6967 1417
68e72840
SB
1418\f
1419/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
6de9cd9a
DN
1420 branch to. */
1421
1422static enum gimplify_status
726a989a 1423gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1424{
1425 tree switch_expr = *expr_p;
726a989a 1426 gimple_seq switch_body_seq = NULL;
6de9cd9a 1427 enum gimplify_status ret;
0cd2402d
SB
1428 tree index_type = TREE_TYPE (switch_expr);
1429 if (index_type == NULL_TREE)
1430 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
6de9cd9a 1431
726a989a
RB
1432 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1433 fb_rvalue);
1434 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1435 return ret;
6de9cd9a
DN
1436
1437 if (SWITCH_BODY (switch_expr))
1438 {
9771b263
DN
1439 vec<tree> labels;
1440 vec<tree> saved_labels;
726a989a 1441 tree default_case = NULL_TREE;
726a989a 1442 gimple gimple_switch;
b8698a0f 1443
6de9cd9a
DN
1444 /* If someone can be bothered to fill in the labels, they can
1445 be bothered to null out the body too. */
282899df 1446 gcc_assert (!SWITCH_LABELS (switch_expr));
6de9cd9a 1447
0cd2402d 1448 /* Save old labels, get new ones from body, then restore the old
726a989a 1449 labels. Save all the things from the switch body to append after. */
6de9cd9a 1450 saved_labels = gimplify_ctxp->case_labels;
9771b263 1451 gimplify_ctxp->case_labels.create (8);
6de9cd9a 1452
726a989a 1453 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
6de9cd9a
DN
1454 labels = gimplify_ctxp->case_labels;
1455 gimplify_ctxp->case_labels = saved_labels;
b8698a0f 1456
68e72840
SB
1457 preprocess_case_label_vec_for_gimple (labels, index_type,
1458 &default_case);
32f579f6 1459
726a989a 1460 if (!default_case)
6de9cd9a 1461 {
68e72840 1462 gimple new_default;
6de9cd9a 1463
68e72840
SB
1464 default_case
1465 = build_case_label (NULL_TREE, NULL_TREE,
1466 create_artificial_label (UNKNOWN_LOCATION));
1467 new_default = gimple_build_label (CASE_LABEL (default_case));
1468 gimplify_seq_add_stmt (&switch_body_seq, new_default);
32f579f6 1469 }
f667741c 1470
fd8d363e
SB
1471 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1472 default_case, labels);
726a989a
RB
1473 gimplify_seq_add_stmt (pre_p, gimple_switch);
1474 gimplify_seq_add_seq (pre_p, switch_body_seq);
9771b263 1475 labels.release ();
6de9cd9a 1476 }
282899df
NS
1477 else
1478 gcc_assert (SWITCH_LABELS (switch_expr));
6de9cd9a 1479
726a989a 1480 return GS_ALL_DONE;
6de9cd9a
DN
1481}
1482
ad19c4be 1483/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
726a989a 1484
6de9cd9a 1485static enum gimplify_status
726a989a 1486gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 1487{
953ff289 1488 struct gimplify_ctx *ctxp;
726a989a 1489 gimple gimple_label;
953ff289
DN
1490
1491 /* Invalid OpenMP programs can play Duff's Device type games with
1492 #pragma omp parallel. At least in the C front end, we don't
1493 detect such invalid branches until after gimplification. */
1494 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
9771b263 1495 if (ctxp->case_labels.exists ())
953ff289 1496 break;
282899df 1497
726a989a 1498 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
9771b263 1499 ctxp->case_labels.safe_push (*expr_p);
726a989a
RB
1500 gimplify_seq_add_stmt (pre_p, gimple_label);
1501
6de9cd9a
DN
1502 return GS_ALL_DONE;
1503}
1504
6de9cd9a
DN
1505/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1506 if necessary. */
1507
1508tree
1509build_and_jump (tree *label_p)
1510{
1511 if (label_p == NULL)
1512 /* If there's nowhere to jump, just fall through. */
65355d53 1513 return NULL_TREE;
6de9cd9a
DN
1514
1515 if (*label_p == NULL_TREE)
1516 {
c2255bc4 1517 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
1518 *label_p = label;
1519 }
1520
1521 return build1 (GOTO_EXPR, void_type_node, *label_p);
1522}
1523
1524/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1525 This also involves building a label to jump to and communicating it to
1526 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1527
1528static enum gimplify_status
1529gimplify_exit_expr (tree *expr_p)
1530{
1531 tree cond = TREE_OPERAND (*expr_p, 0);
1532 tree expr;
1533
1534 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 1535 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
1536 *expr_p = expr;
1537
1538 return GS_OK;
1539}
1540
26d44ae2
RH
1541/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1542 different from its canonical type, wrap the whole thing inside a
1543 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1544 type.
6de9cd9a 1545
26d44ae2
RH
1546 The canonical type of a COMPONENT_REF is the type of the field being
1547 referenced--unless the field is a bit-field which can be read directly
1548 in a smaller mode, in which case the canonical type is the
1549 sign-appropriate type corresponding to that mode. */
6de9cd9a 1550
26d44ae2
RH
1551static void
1552canonicalize_component_ref (tree *expr_p)
6de9cd9a 1553{
26d44ae2
RH
1554 tree expr = *expr_p;
1555 tree type;
6de9cd9a 1556
282899df 1557 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 1558
26d44ae2
RH
1559 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1560 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1561 else
1562 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 1563
b26c6d55
RG
1564 /* One could argue that all the stuff below is not necessary for
1565 the non-bitfield case and declare it a FE error if type
1566 adjustment would be needed. */
26d44ae2 1567 if (TREE_TYPE (expr) != type)
6de9cd9a 1568 {
b26c6d55 1569#ifdef ENABLE_TYPES_CHECKING
26d44ae2 1570 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
1571#endif
1572 int type_quals;
1573
1574 /* We need to preserve qualifiers and propagate them from
1575 operand 0. */
1576 type_quals = TYPE_QUALS (type)
1577 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1578 if (TYPE_QUALS (type) != type_quals)
1579 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 1580
26d44ae2
RH
1581 /* Set the type of the COMPONENT_REF to the underlying type. */
1582 TREE_TYPE (expr) = type;
6de9cd9a 1583
b26c6d55
RG
1584#ifdef ENABLE_TYPES_CHECKING
1585 /* It is now a FE error, if the conversion from the canonical
1586 type to the original expression type is not useless. */
1587 gcc_assert (useless_type_conversion_p (old_type, type));
1588#endif
26d44ae2
RH
1589 }
1590}
6de9cd9a 1591
26d44ae2 1592/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 1593 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
1594 T array[U];
1595 (T *)&array
1596 ==>
1597 &array[L]
1598 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
1599 lower bound.
1600 The constraint is that the type of &array[L] is trivially convertible
1601 to T *. */
6de9cd9a 1602
26d44ae2
RH
1603static void
1604canonicalize_addr_expr (tree *expr_p)
1605{
1606 tree expr = *expr_p;
26d44ae2 1607 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 1608 tree datype, ddatype, pddatype;
6de9cd9a 1609
04d86531
RG
1610 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1611 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1612 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 1613 return;
6de9cd9a 1614
26d44ae2 1615 /* The addr_expr type should be a pointer to an array. */
04d86531 1616 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
1617 if (TREE_CODE (datype) != ARRAY_TYPE)
1618 return;
6de9cd9a 1619
04d86531
RG
1620 /* The pointer to element type shall be trivially convertible to
1621 the expression pointer type. */
26d44ae2 1622 ddatype = TREE_TYPE (datype);
04d86531 1623 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
1624 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1625 pddatype))
26d44ae2 1626 return;
6de9cd9a 1627
26d44ae2 1628 /* The lower bound and element sizes must be constant. */
04d86531
RG
1629 if (!TYPE_SIZE_UNIT (ddatype)
1630 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
1631 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1632 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1633 return;
6de9cd9a 1634
26d44ae2 1635 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 1636 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 1637 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 1638 NULL_TREE, NULL_TREE);
04d86531 1639 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
1640
1641 /* We can have stripped a required restrict qualifier above. */
1642 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1643 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 1644}
6de9cd9a 1645
26d44ae2
RH
1646/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1647 underneath as appropriate. */
6de9cd9a 1648
26d44ae2
RH
1649static enum gimplify_status
1650gimplify_conversion (tree *expr_p)
d3147f64 1651{
db3927fb 1652 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 1653 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 1654
0710ccff
NS
1655 /* Then strip away all but the outermost conversion. */
1656 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1657
1658 /* And remove the outermost conversion if it's useless. */
1659 if (tree_ssa_useless_type_conversion (*expr_p))
1660 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1661
26d44ae2
RH
1662 /* If we still have a conversion at the toplevel,
1663 then canonicalize some constructs. */
1043771b 1664 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
1665 {
1666 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1667
26d44ae2
RH
1668 /* If a NOP conversion is changing the type of a COMPONENT_REF
1669 expression, then canonicalize its type now in order to expose more
1670 redundant conversions. */
1671 if (TREE_CODE (sub) == COMPONENT_REF)
1672 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 1673
26d44ae2
RH
1674 /* If a NOP conversion is changing a pointer to array of foo
1675 to a pointer to foo, embed that change in the ADDR_EXPR. */
1676 else if (TREE_CODE (sub) == ADDR_EXPR)
1677 canonicalize_addr_expr (expr_p);
1678 }
6de9cd9a 1679
8b17cc05
RG
1680 /* If we have a conversion to a non-register type force the
1681 use of a VIEW_CONVERT_EXPR instead. */
4f934809 1682 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 1683 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 1684 TREE_OPERAND (*expr_p, 0));
8b17cc05 1685
6de9cd9a
DN
1686 return GS_OK;
1687}
1688
77f2a970
JJ
1689/* Nonlocal VLAs seen in the current function. */
1690static struct pointer_set_t *nonlocal_vlas;
1691
ad19c4be 1692/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
a9f7c570
RH
1693 DECL_VALUE_EXPR, and it's worth re-examining things. */
1694
1695static enum gimplify_status
1696gimplify_var_or_parm_decl (tree *expr_p)
1697{
1698 tree decl = *expr_p;
1699
1700 /* ??? If this is a local variable, and it has not been seen in any
1701 outer BIND_EXPR, then it's probably the result of a duplicate
1702 declaration, for which we've already issued an error. It would
1703 be really nice if the front end wouldn't leak these at all.
1704 Currently the only known culprit is C++ destructors, as seen
1705 in g++.old-deja/g++.jason/binding.C. */
1706 if (TREE_CODE (decl) == VAR_DECL
1707 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1708 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1709 && decl_function_context (decl) == current_function_decl)
1710 {
1da2ed5f 1711 gcc_assert (seen_error ());
a9f7c570
RH
1712 return GS_ERROR;
1713 }
1714
953ff289
DN
1715 /* When within an OpenMP context, notice uses of variables. */
1716 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1717 return GS_ALL_DONE;
1718
a9f7c570
RH
1719 /* If the decl is an alias for another expression, substitute it now. */
1720 if (DECL_HAS_VALUE_EXPR_P (decl))
1721 {
77f2a970
JJ
1722 tree value_expr = DECL_VALUE_EXPR (decl);
1723
1724 /* For referenced nonlocal VLAs add a decl for debugging purposes
1725 to the current function. */
1726 if (TREE_CODE (decl) == VAR_DECL
1727 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1728 && nonlocal_vlas != NULL
1729 && TREE_CODE (value_expr) == INDIRECT_REF
1730 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1731 && decl_function_context (decl) != current_function_decl)
1732 {
1733 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
1734 while (ctx
1735 && (ctx->region_type == ORT_WORKSHARE
1736 || ctx->region_type == ORT_SIMD))
77f2a970
JJ
1737 ctx = ctx->outer_context;
1738 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1739 {
1740 tree copy = copy_node (decl), block;
1741
1742 lang_hooks.dup_lang_specific_decl (copy);
2eb79bbb 1743 SET_DECL_RTL (copy, 0);
77f2a970
JJ
1744 TREE_USED (copy) = 1;
1745 block = DECL_INITIAL (current_function_decl);
910ad8de 1746 DECL_CHAIN (copy) = BLOCK_VARS (block);
77f2a970
JJ
1747 BLOCK_VARS (block) = copy;
1748 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1749 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1750 }
1751 }
1752
1753 *expr_p = unshare_expr (value_expr);
a9f7c570
RH
1754 return GS_OK;
1755 }
1756
1757 return GS_ALL_DONE;
1758}
1759
2fb9a547
AM
1760void
1761recalculate_side_effects (tree t)
1762{
1763 enum tree_code code = TREE_CODE (t);
1764 int len = TREE_OPERAND_LENGTH (t);
1765 int i;
1766
1767 switch (TREE_CODE_CLASS (code))
1768 {
1769 case tcc_expression:
1770 switch (code)
1771 {
1772 case INIT_EXPR:
1773 case MODIFY_EXPR:
1774 case VA_ARG_EXPR:
1775 case PREDECREMENT_EXPR:
1776 case PREINCREMENT_EXPR:
1777 case POSTDECREMENT_EXPR:
1778 case POSTINCREMENT_EXPR:
1779 /* All of these have side-effects, no matter what their
1780 operands are. */
1781 return;
1782
1783 default:
1784 break;
1785 }
1786 /* Fall through. */
1787
1788 case tcc_comparison: /* a comparison expression */
1789 case tcc_unary: /* a unary arithmetic expression */
1790 case tcc_binary: /* a binary arithmetic expression */
1791 case tcc_reference: /* a reference */
1792 case tcc_vl_exp: /* a function call */
1793 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1794 for (i = 0; i < len; ++i)
1795 {
1796 tree op = TREE_OPERAND (t, i);
1797 if (op && TREE_SIDE_EFFECTS (op))
1798 TREE_SIDE_EFFECTS (t) = 1;
1799 }
1800 break;
1801
1802 case tcc_constant:
1803 /* No side-effects. */
1804 return;
1805
1806 default:
1807 gcc_unreachable ();
1808 }
1809}
1810
6de9cd9a 1811/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 1812 node *EXPR_P.
6de9cd9a
DN
1813
1814 compound_lval
1815 : min_lval '[' val ']'
1816 | min_lval '.' ID
1817 | compound_lval '[' val ']'
1818 | compound_lval '.' ID
1819
1820 This is not part of the original SIMPLE definition, which separates
1821 array and member references, but it seems reasonable to handle them
1822 together. Also, this way we don't run into problems with union
1823 aliasing; gcc requires that for accesses through a union to alias, the
1824 union reference must be explicit, which was not always the case when we
1825 were splitting up array and member refs.
1826
726a989a 1827 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1828 *EXPR_P should be stored.
1829
726a989a 1830 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
1831 *EXPR_P should be stored. */
1832
1833static enum gimplify_status
726a989a
RB
1834gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1835 fallback_t fallback)
6de9cd9a
DN
1836{
1837 tree *p;
941f78d1 1838 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 1839 int i;
db3927fb 1840 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 1841 tree expr = *expr_p;
6de9cd9a 1842
6de9cd9a 1843 /* Create a stack of the subexpressions so later we can walk them in
ec234842 1844 order from inner to outer. */
07687835 1845 stack_vec<tree, 10> expr_stack;
6de9cd9a 1846
afe84921 1847 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
1848 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1849 {
a9f7c570 1850 restart:
6a720599
JM
1851 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1852 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 1853 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
1854
1855 if (handled_component_p (*p))
1856 ;
1857 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1858 additional COMPONENT_REFs. */
1859 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1860 && gimplify_var_or_parm_decl (p) == GS_OK)
1861 goto restart;
1862 else
6a720599 1863 break;
b8698a0f 1864
9771b263 1865 expr_stack.safe_push (*p);
6a720599 1866 }
6de9cd9a 1867
9771b263 1868 gcc_assert (expr_stack.length ());
9e51aaf5 1869
0823efed
DN
1870 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1871 walked through and P points to the innermost expression.
6de9cd9a 1872
af72267c
RK
1873 Java requires that we elaborated nodes in source order. That
1874 means we must gimplify the inner expression followed by each of
1875 the indices, in order. But we can't gimplify the inner
1876 expression until we deal with any variable bounds, sizes, or
1877 positions in order to deal with PLACEHOLDER_EXPRs.
1878
1879 So we do this in three steps. First we deal with the annotations
1880 for any variables in the components, then we gimplify the base,
1881 then we gimplify any indices, from left to right. */
9771b263 1882 for (i = expr_stack.length () - 1; i >= 0; i--)
6de9cd9a 1883 {
9771b263 1884 tree t = expr_stack[i];
44de5aeb
RK
1885
1886 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 1887 {
44de5aeb
RK
1888 /* Gimplify the low bound and element type size and put them into
1889 the ARRAY_REF. If these values are set, they have already been
1890 gimplified. */
726a989a 1891 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 1892 {
a7cc468a
RH
1893 tree low = unshare_expr (array_ref_low_bound (t));
1894 if (!is_gimple_min_invariant (low))
44de5aeb 1895 {
726a989a
RB
1896 TREE_OPERAND (t, 2) = low;
1897 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 1898 post_p, is_gimple_reg,
726a989a 1899 fb_rvalue);
44de5aeb
RK
1900 ret = MIN (ret, tret);
1901 }
1902 }
19c44640
JJ
1903 else
1904 {
1905 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1906 is_gimple_reg, fb_rvalue);
1907 ret = MIN (ret, tret);
1908 }
44de5aeb 1909
19c44640 1910 if (TREE_OPERAND (t, 3) == NULL_TREE)
44de5aeb
RK
1911 {
1912 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1913 tree elmt_size = unshare_expr (array_ref_element_size (t));
a4e9ffe5 1914 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
44de5aeb
RK
1915
1916 /* Divide the element size by the alignment of the element
1917 type (above). */
ad19c4be
EB
1918 elmt_size
1919 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
44de5aeb 1920
a7cc468a 1921 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 1922 {
726a989a
RB
1923 TREE_OPERAND (t, 3) = elmt_size;
1924 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 1925 post_p, is_gimple_reg,
726a989a 1926 fb_rvalue);
44de5aeb
RK
1927 ret = MIN (ret, tret);
1928 }
6de9cd9a 1929 }
19c44640
JJ
1930 else
1931 {
1932 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1933 is_gimple_reg, fb_rvalue);
1934 ret = MIN (ret, tret);
1935 }
6de9cd9a 1936 }
44de5aeb
RK
1937 else if (TREE_CODE (t) == COMPONENT_REF)
1938 {
1939 /* Set the field offset into T and gimplify it. */
19c44640 1940 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb
RK
1941 {
1942 tree offset = unshare_expr (component_ref_field_offset (t));
1943 tree field = TREE_OPERAND (t, 1);
1944 tree factor
1945 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1946
1947 /* Divide the offset by its alignment. */
db3927fb 1948 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
44de5aeb 1949
a7cc468a 1950 if (!is_gimple_min_invariant (offset))
44de5aeb 1951 {
726a989a
RB
1952 TREE_OPERAND (t, 2) = offset;
1953 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 1954 post_p, is_gimple_reg,
726a989a 1955 fb_rvalue);
44de5aeb
RK
1956 ret = MIN (ret, tret);
1957 }
1958 }
19c44640
JJ
1959 else
1960 {
1961 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1962 is_gimple_reg, fb_rvalue);
1963 ret = MIN (ret, tret);
1964 }
44de5aeb 1965 }
af72267c
RK
1966 }
1967
a9f7c570
RH
1968 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1969 so as to match the min_lval predicate. Failure to do so may result
1970 in the creation of large aggregate temporaries. */
1971 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1972 fallback | fb_lvalue);
af72267c
RK
1973 ret = MIN (ret, tret);
1974
ea814c66 1975 /* And finally, the indices and operands of ARRAY_REF. During this
48eb4e53 1976 loop we also remove any useless conversions. */
9771b263 1977 for (; expr_stack.length () > 0; )
af72267c 1978 {
9771b263 1979 tree t = expr_stack.pop ();
af72267c
RK
1980
1981 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1982 {
ba4d8f9d 1983 /* Gimplify the dimension. */
af72267c
RK
1984 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1985 {
1986 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 1987 is_gimple_val, fb_rvalue);
af72267c
RK
1988 ret = MIN (ret, tret);
1989 }
1990 }
48eb4e53
RK
1991
1992 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1993
726a989a
RB
1994 /* The innermost expression P may have originally had
1995 TREE_SIDE_EFFECTS set which would have caused all the outer
1996 expressions in *EXPR_P leading to P to also have had
1997 TREE_SIDE_EFFECTS set. */
6de9cd9a 1998 recalculate_side_effects (t);
6de9cd9a
DN
1999 }
2000
2001 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 2002 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
2003 {
2004 canonicalize_component_ref (expr_p);
6de9cd9a
DN
2005 }
2006
9771b263 2007 expr_stack.release ();
07724022 2008
941f78d1
JM
2009 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2010
6de9cd9a
DN
2011 return ret;
2012}
2013
206048bd
VR
2014/* Gimplify the self modifying expression pointed to by EXPR_P
2015 (++, --, +=, -=).
6de9cd9a
DN
2016
2017 PRE_P points to the list where side effects that must happen before
2018 *EXPR_P should be stored.
2019
2020 POST_P points to the list where side effects that must happen after
2021 *EXPR_P should be stored.
2022
2023 WANT_VALUE is nonzero iff we want to use the value of this expression
cc3c4f62 2024 in another expression.
6de9cd9a 2025
cc3c4f62
RB
2026 ARITH_TYPE is the type the computation should be performed in. */
2027
2028enum gimplify_status
726a989a 2029gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
cc3c4f62 2030 bool want_value, tree arith_type)
6de9cd9a
DN
2031{
2032 enum tree_code code;
726a989a
RB
2033 tree lhs, lvalue, rhs, t1;
2034 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
2035 bool postfix;
2036 enum tree_code arith_code;
2037 enum gimplify_status ret;
db3927fb 2038 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
2039
2040 code = TREE_CODE (*expr_p);
2041
282899df
NS
2042 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2043 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
2044
2045 /* Prefix or postfix? */
2046 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2047 /* Faster to treat as prefix if result is not used. */
2048 postfix = want_value;
2049 else
2050 postfix = false;
2051
82181741
JJ
2052 /* For postfix, make sure the inner expression's post side effects
2053 are executed after side effects from this expression. */
2054 if (postfix)
2055 post_p = &post;
2056
6de9cd9a
DN
2057 /* Add or subtract? */
2058 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2059 arith_code = PLUS_EXPR;
2060 else
2061 arith_code = MINUS_EXPR;
2062
2063 /* Gimplify the LHS into a GIMPLE lvalue. */
2064 lvalue = TREE_OPERAND (*expr_p, 0);
2065 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2066 if (ret == GS_ERROR)
2067 return ret;
2068
2069 /* Extract the operands to the arithmetic operation. */
2070 lhs = lvalue;
2071 rhs = TREE_OPERAND (*expr_p, 1);
2072
2073 /* For postfix operator, we evaluate the LHS to an rvalue and then use
d97c9b22 2074 that as the result value and in the postqueue operation. */
6de9cd9a
DN
2075 if (postfix)
2076 {
2077 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2078 if (ret == GS_ERROR)
2079 return ret;
6de9cd9a 2080
d97c9b22
JJ
2081 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2082 }
cc3c4f62 2083
5be014d5
AP
2084 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2085 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2086 {
0d82a1c8 2087 rhs = convert_to_ptrofftype_loc (loc, rhs);
5be014d5 2088 if (arith_code == MINUS_EXPR)
db3927fb 2089 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
cc3c4f62 2090 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
5be014d5 2091 }
cc3c4f62
RB
2092 else
2093 t1 = fold_convert (TREE_TYPE (*expr_p),
2094 fold_build2 (arith_code, arith_type,
2095 fold_convert (arith_type, lhs),
2096 fold_convert (arith_type, rhs)));
5be014d5 2097
6de9cd9a
DN
2098 if (postfix)
2099 {
cf1867a0 2100 gimplify_assign (lvalue, t1, pre_p);
726a989a 2101 gimplify_seq_add_seq (orig_post_p, post);
cc3c4f62 2102 *expr_p = lhs;
6de9cd9a
DN
2103 return GS_ALL_DONE;
2104 }
2105 else
2106 {
726a989a 2107 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
2108 return GS_OK;
2109 }
2110}
2111
d25cee4d
RH
2112/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2113
2114static void
2115maybe_with_size_expr (tree *expr_p)
2116{
61025d1b
RK
2117 tree expr = *expr_p;
2118 tree type = TREE_TYPE (expr);
2119 tree size;
d25cee4d 2120
61025d1b
RK
2121 /* If we've already wrapped this or the type is error_mark_node, we can't do
2122 anything. */
2123 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2124 || type == error_mark_node)
d25cee4d
RH
2125 return;
2126
61025d1b 2127 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 2128 size = TYPE_SIZE_UNIT (type);
61025d1b
RK
2129 if (!size || TREE_CODE (size) == INTEGER_CST)
2130 return;
2131
2132 /* Otherwise, make a WITH_SIZE_EXPR. */
2133 size = unshare_expr (size);
2134 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2135 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
2136}
2137
726a989a 2138/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f
AH
2139 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2140 the CALL_EXPR. */
e4f78bd4
JM
2141
2142static enum gimplify_status
1282697f 2143gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
e4f78bd4
JM
2144{
2145 bool (*test) (tree);
2146 fallback_t fb;
2147
2148 /* In general, we allow lvalues for function arguments to avoid
2149 extra overhead of copying large aggregates out of even larger
2150 aggregates into temporaries only to copy the temporaries to
2151 the argument list. Make optimizers happy by pulling out to
2152 temporaries those types that fit in registers. */
726a989a 2153 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
2154 test = is_gimple_val, fb = fb_rvalue;
2155 else
b4ef8aac
JM
2156 {
2157 test = is_gimple_lvalue, fb = fb_either;
2158 /* Also strip a TARGET_EXPR that would force an extra copy. */
2159 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2160 {
2161 tree init = TARGET_EXPR_INITIAL (*arg_p);
2162 if (init
2163 && !VOID_TYPE_P (TREE_TYPE (init)))
2164 *arg_p = init;
2165 }
2166 }
e4f78bd4 2167
d25cee4d 2168 /* If this is a variable sized type, we must remember the size. */
726a989a 2169 maybe_with_size_expr (arg_p);
d25cee4d 2170
c2255bc4 2171 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
2172 /* Make sure arguments have the same location as the function call
2173 itself. */
2174 protected_set_expr_location (*arg_p, call_location);
2175
e4f78bd4
JM
2176 /* There is a sequence point before a function call. Side effects in
2177 the argument list must occur before the actual call. So, when
2178 gimplifying arguments, force gimplify_expr to use an internal
2179 post queue which is then appended to the end of PRE_P. */
726a989a 2180 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
e4f78bd4
JM
2181}
2182
726a989a 2183/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 2184 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
2185
2186static enum gimplify_status
726a989a 2187gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 2188{
f20ca725 2189 tree fndecl, parms, p, fnptrtype;
6de9cd9a 2190 enum gimplify_status ret;
5039610b 2191 int i, nargs;
726a989a
RB
2192 gimple call;
2193 bool builtin_va_start_p = FALSE;
db3927fb 2194 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 2195
282899df 2196 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 2197
d3147f64 2198 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 2199 every call_expr be annotated with file and line. */
a281759f
PB
2200 if (! EXPR_HAS_LOCATION (*expr_p))
2201 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a 2202
939b37da
BI
2203 if (fn_contains_cilk_spawn_p (cfun)
2204 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
2205 && !seen_error ())
2206 return (enum gimplify_status)
2207 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, NULL);
2208
6de9cd9a
DN
2209 /* This may be a call to a builtin function.
2210
2211 Builtin function calls may be transformed into different
2212 (and more efficient) builtin function calls under certain
2213 circumstances. Unfortunately, gimplification can muck things
2214 up enough that the builtin expanders are not aware that certain
2215 transformations are still valid.
2216
2217 So we attempt transformation/gimplification of the call before
2218 we gimplify the CALL_EXPR. At this time we do not manage to
2219 transform all calls in the same manner as the expanders do, but
2220 we do transform most of them. */
726a989a 2221 fndecl = get_callee_fndecl (*expr_p);
3537a0cd
RG
2222 if (fndecl
2223 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2224 switch (DECL_FUNCTION_CODE (fndecl))
2225 {
2226 case BUILT_IN_VA_START:
2efcfa4e 2227 {
726a989a 2228 builtin_va_start_p = TRUE;
5039610b 2229 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
2230 {
2231 error ("too few arguments to function %<va_start%>");
c2255bc4 2232 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2233 return GS_OK;
2234 }
b8698a0f 2235
5039610b 2236 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 2237 {
c2255bc4 2238 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2239 return GS_OK;
2240 }
3537a0cd
RG
2241 break;
2242 }
2243 case BUILT_IN_LINE:
2244 {
2245 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2246 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2247 return GS_OK;
2248 }
2249 case BUILT_IN_FILE:
2250 {
2251 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2252 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2253 return GS_OK;
2254 }
2255 case BUILT_IN_FUNCTION:
2256 {
2257 const char *function;
2258 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2259 *expr_p = build_string_literal (strlen (function) + 1, function);
2260 return GS_OK;
2261 }
2262 default:
2263 ;
2264 }
2265 if (fndecl && DECL_BUILT_IN (fndecl))
2266 {
2267 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2268 if (new_tree && new_tree != *expr_p)
2269 {
2270 /* There was a transformation of this call which computes the
2271 same value, but in a more efficient way. Return and try
2272 again. */
2273 *expr_p = new_tree;
2274 return GS_OK;
2efcfa4e 2275 }
6de9cd9a
DN
2276 }
2277
f20ca725
RG
2278 /* Remember the original function pointer type. */
2279 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2280
6de9cd9a
DN
2281 /* There is a sequence point before the call, so any side effects in
2282 the calling expression must occur before the actual call. Force
2283 gimplify_expr to use an internal post queue. */
5039610b 2284 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 2285 is_gimple_call_addr, fb_rvalue);
6de9cd9a 2286
5039610b
SL
2287 nargs = call_expr_nargs (*expr_p);
2288
e36711f3 2289 /* Get argument types for verification. */
726a989a 2290 fndecl = get_callee_fndecl (*expr_p);
e36711f3 2291 parms = NULL_TREE;
726a989a
RB
2292 if (fndecl)
2293 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
e36711f3
RG
2294 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2295 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2296
726a989a 2297 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 2298 p = DECL_ARGUMENTS (fndecl);
004e2fa7 2299 else if (parms)
f9487002 2300 p = parms;
6ef5231b 2301 else
498e51ca 2302 p = NULL_TREE;
f9487002
JJ
2303 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2304 ;
6ef5231b
JJ
2305
2306 /* If the last argument is __builtin_va_arg_pack () and it is not
2307 passed as a named argument, decrease the number of CALL_EXPR
2308 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2309 if (!p
2310 && i < nargs
2311 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2312 {
2313 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2314 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2315
2316 if (last_arg_fndecl
2317 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2318 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2319 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2320 {
2321 tree call = *expr_p;
2322
2323 --nargs;
db3927fb
AH
2324 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2325 CALL_EXPR_FN (call),
2326 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
2327
2328 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
2329 CALL_EXPR_VA_ARG_PACK flag. */
2330 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2331 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2332 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2333 = CALL_EXPR_RETURN_SLOT_OPT (call);
2334 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
5e278028 2335 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
726a989a 2336
6ef5231b
JJ
2337 /* Set CALL_EXPR_VA_ARG_PACK. */
2338 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2339 }
2340 }
e36711f3
RG
2341
2342 /* Finally, gimplify the function arguments. */
726a989a 2343 if (nargs > 0)
6de9cd9a 2344 {
726a989a
RB
2345 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2346 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2347 PUSH_ARGS_REVERSED ? i-- : i++)
2348 {
2349 enum gimplify_status t;
6de9cd9a 2350
726a989a
RB
2351 /* Avoid gimplifying the second argument to va_start, which needs to
2352 be the plain PARM_DECL. */
2353 if ((i != 1) || !builtin_va_start_p)
2354 {
1282697f
AH
2355 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2356 EXPR_LOCATION (*expr_p));
6de9cd9a 2357
726a989a
RB
2358 if (t == GS_ERROR)
2359 ret = GS_ERROR;
2360 }
2361 }
6de9cd9a 2362 }
6de9cd9a 2363
33922890
RG
2364 /* Verify the function result. */
2365 if (want_value && fndecl
f20ca725 2366 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
33922890
RG
2367 {
2368 error_at (loc, "using result of function returning %<void%>");
2369 ret = GS_ERROR;
2370 }
2371
6de9cd9a 2372 /* Try this again in case gimplification exposed something. */
6f538523 2373 if (ret != GS_ERROR)
6de9cd9a 2374 {
db3927fb 2375 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 2376
82d6e6fc 2377 if (new_tree && new_tree != *expr_p)
5039610b
SL
2378 {
2379 /* There was a transformation of this call which computes the
2380 same value, but in a more efficient way. Return and try
2381 again. */
82d6e6fc 2382 *expr_p = new_tree;
5039610b 2383 return GS_OK;
6de9cd9a
DN
2384 }
2385 }
726a989a
RB
2386 else
2387 {
df8fa700 2388 *expr_p = error_mark_node;
726a989a
RB
2389 return GS_ERROR;
2390 }
6de9cd9a
DN
2391
2392 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2393 decl. This allows us to eliminate redundant or useless
2394 calls to "const" functions. */
becfd6e5
KZ
2395 if (TREE_CODE (*expr_p) == CALL_EXPR)
2396 {
2397 int flags = call_expr_flags (*expr_p);
2398 if (flags & (ECF_CONST | ECF_PURE)
2399 /* An infinite loop is considered a side effect. */
2400 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2401 TREE_SIDE_EFFECTS (*expr_p) = 0;
2402 }
726a989a
RB
2403
2404 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2405 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2406 form and delegate the creation of a GIMPLE_CALL to
2407 gimplify_modify_expr. This is always possible because when
2408 WANT_VALUE is true, the caller wants the result of this call into
2409 a temporary, which means that we will emit an INIT_EXPR in
2410 internal_get_tmp_var which will then be handled by
2411 gimplify_modify_expr. */
2412 if (!want_value)
2413 {
2414 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2415 have to do is replicate it as a GIMPLE_CALL tuple. */
64e0f5ff 2416 gimple_stmt_iterator gsi;
726a989a 2417 call = gimple_build_call_from_tree (*expr_p);
f20ca725 2418 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
f6b64c35 2419 notice_special_calls (call);
726a989a 2420 gimplify_seq_add_stmt (pre_p, call);
64e0f5ff 2421 gsi = gsi_last (*pre_p);
acf0174b
JJ
2422 /* Don't fold stmts inside of target construct. We'll do it
2423 during omplower pass instead. */
2424 struct gimplify_omp_ctx *ctx;
2425 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2426 if (ctx->region_type == ORT_TARGET)
2427 break;
2428 if (ctx == NULL)
2429 fold_stmt (&gsi);
726a989a
RB
2430 *expr_p = NULL_TREE;
2431 }
f20ca725
RG
2432 else
2433 /* Remember the original function type. */
2434 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2435 CALL_EXPR_FN (*expr_p));
726a989a 2436
6de9cd9a
DN
2437 return ret;
2438}
2439
2440/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2441 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2442
2443 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2444 condition is true or false, respectively. If null, we should generate
2445 our own to skip over the evaluation of this specific expression.
2446
ca80e52b
EB
2447 LOCUS is the source location of the COND_EXPR.
2448
6de9cd9a
DN
2449 This function is the tree equivalent of do_jump.
2450
2451 shortcut_cond_r should only be called by shortcut_cond_expr. */
2452
2453static tree
ca80e52b
EB
2454shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2455 location_t locus)
6de9cd9a
DN
2456{
2457 tree local_label = NULL_TREE;
2458 tree t, expr = NULL;
2459
2460 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2461 retain the shortcut semantics. Just insert the gotos here;
2462 shortcut_cond_expr will append the real blocks later. */
2463 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2464 {
ca80e52b
EB
2465 location_t new_locus;
2466
6de9cd9a
DN
2467 /* Turn if (a && b) into
2468
2469 if (a); else goto no;
2470 if (b) goto yes; else goto no;
2471 (no:) */
2472
2473 if (false_label_p == NULL)
2474 false_label_p = &local_label;
2475
ca80e52b
EB
2476 /* Keep the original source location on the first 'if'. */
2477 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
2478 append_to_statement_list (t, &expr);
2479
ca80e52b
EB
2480 /* Set the source location of the && on the second 'if'. */
2481 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2482 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2483 new_locus);
6de9cd9a
DN
2484 append_to_statement_list (t, &expr);
2485 }
2486 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2487 {
ca80e52b
EB
2488 location_t new_locus;
2489
6de9cd9a
DN
2490 /* Turn if (a || b) into
2491
2492 if (a) goto yes;
2493 if (b) goto yes; else goto no;
2494 (yes:) */
2495
2496 if (true_label_p == NULL)
2497 true_label_p = &local_label;
2498
ca80e52b
EB
2499 /* Keep the original source location on the first 'if'. */
2500 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
2501 append_to_statement_list (t, &expr);
2502
ca80e52b
EB
2503 /* Set the source location of the || on the second 'if'. */
2504 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2505 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2506 new_locus);
6de9cd9a
DN
2507 append_to_statement_list (t, &expr);
2508 }
1537737f
JJ
2509 else if (TREE_CODE (pred) == COND_EXPR
2510 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2511 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
6de9cd9a 2512 {
ca80e52b
EB
2513 location_t new_locus;
2514
6de9cd9a
DN
2515 /* As long as we're messing with gotos, turn if (a ? b : c) into
2516 if (a)
2517 if (b) goto yes; else goto no;
2518 else
1537737f
JJ
2519 if (c) goto yes; else goto no;
2520
2521 Don't do this if one of the arms has void type, which can happen
2522 in C++ when the arm is throw. */
ca80e52b
EB
2523
2524 /* Keep the original source location on the first 'if'. Set the source
2525 location of the ? on the second 'if'. */
2526 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
b4257cfc
RG
2527 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2528 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 2529 false_label_p, locus),
b4257cfc 2530 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 2531 false_label_p, new_locus));
6de9cd9a
DN
2532 }
2533 else
2534 {
b4257cfc
RG
2535 expr = build3 (COND_EXPR, void_type_node, pred,
2536 build_and_jump (true_label_p),
2537 build_and_jump (false_label_p));
ca80e52b 2538 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2539 }
2540
2541 if (local_label)
2542 {
2543 t = build1 (LABEL_EXPR, void_type_node, local_label);
2544 append_to_statement_list (t, &expr);
2545 }
2546
2547 return expr;
2548}
2549
726a989a
RB
2550/* Given a conditional expression EXPR with short-circuit boolean
2551 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
073a8998 2552 predicate apart into the equivalent sequence of conditionals. */
726a989a 2553
6de9cd9a
DN
2554static tree
2555shortcut_cond_expr (tree expr)
2556{
2557 tree pred = TREE_OPERAND (expr, 0);
2558 tree then_ = TREE_OPERAND (expr, 1);
2559 tree else_ = TREE_OPERAND (expr, 2);
2560 tree true_label, false_label, end_label, t;
2561 tree *true_label_p;
2562 tree *false_label_p;
089efaa4 2563 bool emit_end, emit_false, jump_over_else;
65355d53
RH
2564 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2565 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
2566
2567 /* First do simple transformations. */
65355d53 2568 if (!else_se)
6de9cd9a 2569 {
ca80e52b
EB
2570 /* If there is no 'else', turn
2571 if (a && b) then c
2572 into
2573 if (a) if (b) then c. */
6de9cd9a
DN
2574 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2575 {
ca80e52b 2576 /* Keep the original source location on the first 'if'. */
8400e75e 2577 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 2578 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2579 /* Set the source location of the && on the second 'if'. */
2580 if (EXPR_HAS_LOCATION (pred))
2581 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2582 then_ = shortcut_cond_expr (expr);
4356a1bf 2583 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 2584 pred = TREE_OPERAND (pred, 0);
b4257cfc 2585 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 2586 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2587 }
2588 }
726a989a 2589
65355d53 2590 if (!then_se)
6de9cd9a
DN
2591 {
2592 /* If there is no 'then', turn
2593 if (a || b); else d
2594 into
2595 if (a); else if (b); else d. */
2596 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2597 {
ca80e52b 2598 /* Keep the original source location on the first 'if'. */
8400e75e 2599 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 2600 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2601 /* Set the source location of the || on the second 'if'. */
2602 if (EXPR_HAS_LOCATION (pred))
2603 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2604 else_ = shortcut_cond_expr (expr);
4356a1bf 2605 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 2606 pred = TREE_OPERAND (pred, 0);
b4257cfc 2607 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 2608 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2609 }
2610 }
2611
2612 /* If we're done, great. */
2613 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2614 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2615 return expr;
2616
2617 /* Otherwise we need to mess with gotos. Change
2618 if (a) c; else d;
2619 to
2620 if (a); else goto no;
2621 c; goto end;
2622 no: d; end:
2623 and recursively gimplify the condition. */
2624
2625 true_label = false_label = end_label = NULL_TREE;
2626
2627 /* If our arms just jump somewhere, hijack those labels so we don't
2628 generate jumps to jumps. */
2629
65355d53
RH
2630 if (then_
2631 && TREE_CODE (then_) == GOTO_EXPR
6de9cd9a
DN
2632 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2633 {
2634 true_label = GOTO_DESTINATION (then_);
65355d53
RH
2635 then_ = NULL;
2636 then_se = false;
6de9cd9a
DN
2637 }
2638
65355d53
RH
2639 if (else_
2640 && TREE_CODE (else_) == GOTO_EXPR
6de9cd9a
DN
2641 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2642 {
2643 false_label = GOTO_DESTINATION (else_);
65355d53
RH
2644 else_ = NULL;
2645 else_se = false;
6de9cd9a
DN
2646 }
2647
9cf737f8 2648 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
2649 if (true_label)
2650 true_label_p = &true_label;
2651 else
2652 true_label_p = NULL;
2653
2654 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 2655 if (false_label || else_se)
6de9cd9a
DN
2656 false_label_p = &false_label;
2657 else
2658 false_label_p = NULL;
2659
2660 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 2661 if (!then_se && !else_se)
ca80e52b 2662 return shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 2663 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
2664
2665 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 2666 if (else_se)
ca80e52b 2667 t = expr_last (else_);
65355d53 2668 else if (then_se)
ca80e52b 2669 t = expr_last (then_);
65355d53 2670 else
ca80e52b
EB
2671 t = NULL;
2672 if (t && TREE_CODE (t) == LABEL_EXPR)
2673 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
2674
2675 /* If we don't care about jumping to the 'else' branch, jump to the end
2676 if the condition is false. */
2677 if (!false_label_p)
2678 false_label_p = &end_label;
2679
2680 /* We only want to emit these labels if we aren't hijacking them. */
2681 emit_end = (end_label == NULL_TREE);
2682 emit_false = (false_label == NULL_TREE);
2683
089efaa4
ILT
2684 /* We only emit the jump over the else clause if we have to--if the
2685 then clause may fall through. Otherwise we can wind up with a
2686 useless jump and a useless label at the end of gimplified code,
2687 which will cause us to think that this conditional as a whole
2688 falls through even if it doesn't. If we then inline a function
2689 which ends with such a condition, that can cause us to issue an
2690 inappropriate warning about control reaching the end of a
2691 non-void function. */
2692 jump_over_else = block_may_fallthru (then_);
2693
ca80e52b 2694 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 2695 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
2696
2697 expr = NULL;
2698 append_to_statement_list (pred, &expr);
2699
2700 append_to_statement_list (then_, &expr);
65355d53 2701 if (else_se)
6de9cd9a 2702 {
089efaa4
ILT
2703 if (jump_over_else)
2704 {
ca80e52b 2705 tree last = expr_last (expr);
089efaa4 2706 t = build_and_jump (&end_label);
ca80e52b
EB
2707 if (EXPR_HAS_LOCATION (last))
2708 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
089efaa4
ILT
2709 append_to_statement_list (t, &expr);
2710 }
6de9cd9a
DN
2711 if (emit_false)
2712 {
2713 t = build1 (LABEL_EXPR, void_type_node, false_label);
2714 append_to_statement_list (t, &expr);
2715 }
2716 append_to_statement_list (else_, &expr);
2717 }
2718 if (emit_end && end_label)
2719 {
2720 t = build1 (LABEL_EXPR, void_type_node, end_label);
2721 append_to_statement_list (t, &expr);
2722 }
2723
2724 return expr;
2725}
2726
2727/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2728
50674e96 2729tree
6de9cd9a
DN
2730gimple_boolify (tree expr)
2731{
2732 tree type = TREE_TYPE (expr);
db3927fb 2733 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 2734
554cf330
JJ
2735 if (TREE_CODE (expr) == NE_EXPR
2736 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2737 && integer_zerop (TREE_OPERAND (expr, 1)))
2738 {
2739 tree call = TREE_OPERAND (expr, 0);
2740 tree fn = get_callee_fndecl (call);
2741
d53c73e0
JJ
2742 /* For __builtin_expect ((long) (x), y) recurse into x as well
2743 if x is truth_value_p. */
554cf330
JJ
2744 if (fn
2745 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2746 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2747 && call_expr_nargs (call) == 2)
2748 {
2749 tree arg = CALL_EXPR_ARG (call, 0);
2750 if (arg)
2751 {
2752 if (TREE_CODE (arg) == NOP_EXPR
2753 && TREE_TYPE (arg) == TREE_TYPE (call))
2754 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
2755 if (truth_value_p (TREE_CODE (arg)))
2756 {
2757 arg = gimple_boolify (arg);
2758 CALL_EXPR_ARG (call, 0)
2759 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2760 }
554cf330
JJ
2761 }
2762 }
2763 }
2764
6de9cd9a
DN
2765 switch (TREE_CODE (expr))
2766 {
2767 case TRUTH_AND_EXPR:
2768 case TRUTH_OR_EXPR:
2769 case TRUTH_XOR_EXPR:
2770 case TRUTH_ANDIF_EXPR:
2771 case TRUTH_ORIF_EXPR:
2772 /* Also boolify the arguments of truth exprs. */
2773 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2774 /* FALLTHRU */
2775
2776 case TRUTH_NOT_EXPR:
2777 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
6de9cd9a 2778
6de9cd9a 2779 /* These expressions always produce boolean results. */
7f3ff782
KT
2780 if (TREE_CODE (type) != BOOLEAN_TYPE)
2781 TREE_TYPE (expr) = boolean_type_node;
6de9cd9a 2782 return expr;
d3147f64 2783
8170608b
TB
2784 case ANNOTATE_EXPR:
2785 if ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))
2786 == annot_expr_ivdep_kind)
2787 {
2788 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2789 if (TREE_CODE (type) != BOOLEAN_TYPE)
2790 TREE_TYPE (expr) = boolean_type_node;
2791 return expr;
2792 }
2793 /* FALLTHRU */
2794
6de9cd9a 2795 default:
7f3ff782
KT
2796 if (COMPARISON_CLASS_P (expr))
2797 {
2798 /* There expressions always prduce boolean results. */
2799 if (TREE_CODE (type) != BOOLEAN_TYPE)
2800 TREE_TYPE (expr) = boolean_type_node;
2801 return expr;
2802 }
6de9cd9a
DN
2803 /* Other expressions that get here must have boolean values, but
2804 might need to be converted to the appropriate mode. */
7f3ff782 2805 if (TREE_CODE (type) == BOOLEAN_TYPE)
1d15f620 2806 return expr;
db3927fb 2807 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
2808 }
2809}
2810
aea74440
JJ
2811/* Given a conditional expression *EXPR_P without side effects, gimplify
2812 its operands. New statements are inserted to PRE_P. */
2813
2814static enum gimplify_status
726a989a 2815gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
2816{
2817 tree expr = *expr_p, cond;
2818 enum gimplify_status ret, tret;
2819 enum tree_code code;
2820
2821 cond = gimple_boolify (COND_EXPR_COND (expr));
2822
2823 /* We need to handle && and || specially, as their gimplification
2824 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2825 code = TREE_CODE (cond);
2826 if (code == TRUTH_ANDIF_EXPR)
2827 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2828 else if (code == TRUTH_ORIF_EXPR)
2829 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 2830 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
2831 COND_EXPR_COND (*expr_p) = cond;
2832
2833 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2834 is_gimple_val, fb_rvalue);
2835 ret = MIN (ret, tret);
2836 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2837 is_gimple_val, fb_rvalue);
2838
2839 return MIN (ret, tret);
2840}
2841
ad19c4be 2842/* Return true if evaluating EXPR could trap.
aea74440
JJ
2843 EXPR is GENERIC, while tree_could_trap_p can be called
2844 only on GIMPLE. */
2845
2846static bool
2847generic_expr_could_trap_p (tree expr)
2848{
2849 unsigned i, n;
2850
2851 if (!expr || is_gimple_val (expr))
2852 return false;
2853
2854 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2855 return true;
2856
2857 n = TREE_OPERAND_LENGTH (expr);
2858 for (i = 0; i < n; i++)
2859 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2860 return true;
2861
2862 return false;
2863}
2864
206048bd 2865/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
2866 into
2867
2868 if (p) if (p)
2869 t1 = a; a;
2870 else or else
2871 t1 = b; b;
2872 t1;
2873
2874 The second form is used when *EXPR_P is of type void.
2875
2876 PRE_P points to the list where side effects that must happen before
dae7ec87 2877 *EXPR_P should be stored. */
6de9cd9a
DN
2878
2879static enum gimplify_status
726a989a 2880gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
2881{
2882 tree expr = *expr_p;
06ec59e6
EB
2883 tree type = TREE_TYPE (expr);
2884 location_t loc = EXPR_LOCATION (expr);
2885 tree tmp, arm1, arm2;
6de9cd9a 2886 enum gimplify_status ret;
726a989a
RB
2887 tree label_true, label_false, label_cont;
2888 bool have_then_clause_p, have_else_clause_p;
2889 gimple gimple_cond;
2890 enum tree_code pred_code;
2891 gimple_seq seq = NULL;
26d44ae2
RH
2892
2893 /* If this COND_EXPR has a value, copy the values into a temporary within
2894 the arms. */
06ec59e6 2895 if (!VOID_TYPE_P (type))
26d44ae2 2896 {
06ec59e6 2897 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
2898 tree result;
2899
06ec59e6
EB
2900 /* If either an rvalue is ok or we do not require an lvalue, create the
2901 temporary. But we cannot do that if the type is addressable. */
2902 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c3e203cf 2903 && !TREE_ADDRESSABLE (type))
aff98faf 2904 {
aea74440
JJ
2905 if (gimplify_ctxp->allow_rhs_cond_expr
2906 /* If either branch has side effects or could trap, it can't be
2907 evaluated unconditionally. */
06ec59e6
EB
2908 && !TREE_SIDE_EFFECTS (then_)
2909 && !generic_expr_could_trap_p (then_)
2910 && !TREE_SIDE_EFFECTS (else_)
2911 && !generic_expr_could_trap_p (else_))
aea74440
JJ
2912 return gimplify_pure_cond_expr (expr_p, pre_p);
2913
06ec59e6
EB
2914 tmp = create_tmp_var (type, "iftmp");
2915 result = tmp;
aff98faf 2916 }
06ec59e6
EB
2917
2918 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
2919 else
2920 {
06ec59e6 2921 type = build_pointer_type (type);
aff98faf 2922
06ec59e6
EB
2923 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2924 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 2925
06ec59e6
EB
2926 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2927 else_ = build_fold_addr_expr_loc (loc, else_);
2928
2929 expr
2930 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 2931
726a989a 2932 tmp = create_tmp_var (type, "iftmp");
70f34814 2933 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
2934 }
2935
06ec59e6
EB
2936 /* Build the new then clause, `tmp = then_;'. But don't build the
2937 assignment if the value is void; in C++ it can be if it's a throw. */
2938 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2939 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
26d44ae2 2940
06ec59e6
EB
2941 /* Similarly, build the new else clause, `tmp = else_;'. */
2942 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2943 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
26d44ae2
RH
2944
2945 TREE_TYPE (expr) = void_type_node;
2946 recalculate_side_effects (expr);
2947
d91ba7b0 2948 /* Move the COND_EXPR to the prequeue. */
726a989a 2949 gimplify_stmt (&expr, pre_p);
26d44ae2 2950
aff98faf 2951 *expr_p = result;
726a989a 2952 return GS_ALL_DONE;
26d44ae2
RH
2953 }
2954
f2f81d57
EB
2955 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
2956 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
2957 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
2958 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
2959
26d44ae2
RH
2960 /* Make sure the condition has BOOLEAN_TYPE. */
2961 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2962
2963 /* Break apart && and || conditions. */
2964 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2965 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2966 {
2967 expr = shortcut_cond_expr (expr);
2968
2969 if (expr != *expr_p)
2970 {
2971 *expr_p = expr;
2972
2973 /* We can't rely on gimplify_expr to re-gimplify the expanded
2974 form properly, as cleanups might cause the target labels to be
2975 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2976 set up a conditional context. */
2977 gimple_push_condition ();
726a989a 2978 gimplify_stmt (expr_p, &seq);
26d44ae2 2979 gimple_pop_condition (pre_p);
726a989a 2980 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
2981
2982 return GS_ALL_DONE;
2983 }
2984 }
2985
2986 /* Now do the normal gimplification. */
26d44ae2 2987
726a989a
RB
2988 /* Gimplify condition. */
2989 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2990 fb_rvalue);
26d44ae2 2991 if (ret == GS_ERROR)
726a989a
RB
2992 return GS_ERROR;
2993 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2994
2995 gimple_push_condition ();
26d44ae2 2996
726a989a
RB
2997 have_then_clause_p = have_else_clause_p = false;
2998 if (TREE_OPERAND (expr, 1) != NULL
2999 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3000 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3001 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3002 == current_function_decl)
3003 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3004 have different locations, otherwise we end up with incorrect
3005 location information on the branches. */
3006 && (optimize
3007 || !EXPR_HAS_LOCATION (expr)
3008 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3009 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3010 {
3011 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3012 have_then_clause_p = true;
26d44ae2
RH
3013 }
3014 else
c2255bc4 3015 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3016 if (TREE_OPERAND (expr, 2) != NULL
3017 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3018 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3019 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3020 == current_function_decl)
3021 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3022 have different locations, otherwise we end up with incorrect
3023 location information on the branches. */
3024 && (optimize
3025 || !EXPR_HAS_LOCATION (expr)
3026 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3027 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3028 {
3029 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3030 have_else_clause_p = true;
3031 }
3032 else
c2255bc4 3033 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 3034
726a989a
RB
3035 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3036 &arm2);
26d44ae2 3037
726a989a
RB
3038 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3039 label_false);
26d44ae2 3040
726a989a
RB
3041 gimplify_seq_add_stmt (&seq, gimple_cond);
3042 label_cont = NULL_TREE;
3043 if (!have_then_clause_p)
3044 {
3045 /* For if (...) {} else { code; } put label_true after
3046 the else block. */
3047 if (TREE_OPERAND (expr, 1) == NULL_TREE
3048 && !have_else_clause_p
3049 && TREE_OPERAND (expr, 2) != NULL_TREE)
3050 label_cont = label_true;
3051 else
3052 {
3053 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3054 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3055 /* For if (...) { code; } else {} or
3056 if (...) { code; } else goto label; or
3057 if (...) { code; return; } else { ... }
3058 label_cont isn't needed. */
3059 if (!have_else_clause_p
3060 && TREE_OPERAND (expr, 2) != NULL_TREE
3061 && gimple_seq_may_fallthru (seq))
3062 {
3063 gimple g;
c2255bc4 3064 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3065
3066 g = gimple_build_goto (label_cont);
3067
3068 /* GIMPLE_COND's are very low level; they have embedded
3069 gotos. This particular embedded goto should not be marked
3070 with the location of the original COND_EXPR, as it would
3071 correspond to the COND_EXPR's condition, not the ELSE or the
3072 THEN arms. To avoid marking it with the wrong location, flag
3073 it as "no location". */
3074 gimple_set_do_not_emit_location (g);
3075
3076 gimplify_seq_add_stmt (&seq, g);
3077 }
3078 }
3079 }
3080 if (!have_else_clause_p)
3081 {
3082 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3083 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3084 }
3085 if (label_cont)
3086 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3087
3088 gimple_pop_condition (pre_p);
3089 gimple_seq_add_seq (pre_p, seq);
3090
3091 if (ret == GS_ERROR)
3092 ; /* Do nothing. */
3093 else if (have_then_clause_p || have_else_clause_p)
3094 ret = GS_ALL_DONE;
3095 else
3096 {
3097 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3098 expr = TREE_OPERAND (expr, 0);
3099 gimplify_stmt (&expr, pre_p);
3100 }
3101
3102 *expr_p = NULL;
3103 return ret;
3104}
3105
f76d6e6f
EB
3106/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3107 to be marked addressable.
3108
3109 We cannot rely on such an expression being directly markable if a temporary
3110 has been created by the gimplification. In this case, we create another
3111 temporary and initialize it with a copy, which will become a store after we
3112 mark it addressable. This can happen if the front-end passed us something
3113 that it could not mark addressable yet, like a Fortran pass-by-reference
3114 parameter (int) floatvar. */
3115
3116static void
3117prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3118{
3119 while (handled_component_p (*expr_p))
3120 expr_p = &TREE_OPERAND (*expr_p, 0);
3121 if (is_gimple_reg (*expr_p))
3122 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3123}
3124
726a989a
RB
3125/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3126 a call to __builtin_memcpy. */
3127
3128static enum gimplify_status
3129gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3130 gimple_seq *seq_p)
26d44ae2 3131{
5039610b 3132 tree t, to, to_ptr, from, from_ptr;
726a989a 3133 gimple gs;
db3927fb 3134 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3135
726a989a
RB
3136 to = TREE_OPERAND (*expr_p, 0);
3137 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 3138
f76d6e6f
EB
3139 /* Mark the RHS addressable. Beware that it may not be possible to do so
3140 directly if a temporary has been created by the gimplification. */
3141 prepare_gimple_addressable (&from, seq_p);
3142
628c189e 3143 mark_addressable (from);
db3927fb
AH
3144 from_ptr = build_fold_addr_expr_loc (loc, from);
3145 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 3146
628c189e 3147 mark_addressable (to);
db3927fb
AH
3148 to_ptr = build_fold_addr_expr_loc (loc, to);
3149 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 3150
e79983f4 3151 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
726a989a
RB
3152
3153 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
3154
3155 if (want_value)
3156 {
726a989a
RB
3157 /* tmp = memcpy() */
3158 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3159 gimple_call_set_lhs (gs, t);
3160 gimplify_seq_add_stmt (seq_p, gs);
3161
70f34814 3162 *expr_p = build_simple_mem_ref (t);
726a989a 3163 return GS_ALL_DONE;
26d44ae2
RH
3164 }
3165
726a989a
RB
3166 gimplify_seq_add_stmt (seq_p, gs);
3167 *expr_p = NULL;
3168 return GS_ALL_DONE;
26d44ae2
RH
3169}
3170
3171/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3172 a call to __builtin_memset. In this case we know that the RHS is
3173 a CONSTRUCTOR with an empty element list. */
3174
3175static enum gimplify_status
726a989a
RB
3176gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3177 gimple_seq *seq_p)
26d44ae2 3178{
1a13360e 3179 tree t, from, to, to_ptr;
726a989a 3180 gimple gs;
db3927fb 3181 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3182
1a13360e
OH
3183 /* Assert our assumptions, to abort instead of producing wrong code
3184 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3185 not be immediately exposed. */
b8698a0f 3186 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
3187 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3188 from = TREE_OPERAND (from, 0);
3189
3190 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
9771b263 3191 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
1a13360e
OH
3192
3193 /* Now proceed. */
726a989a 3194 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 3195
db3927fb
AH
3196 to_ptr = build_fold_addr_expr_loc (loc, to);
3197 gimplify_arg (&to_ptr, seq_p, loc);
e79983f4 3198 t = builtin_decl_implicit (BUILT_IN_MEMSET);
726a989a
RB
3199
3200 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
3201
3202 if (want_value)
3203 {
726a989a
RB
3204 /* tmp = memset() */
3205 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3206 gimple_call_set_lhs (gs, t);
3207 gimplify_seq_add_stmt (seq_p, gs);
3208
3209 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3210 return GS_ALL_DONE;
26d44ae2
RH
3211 }
3212
726a989a
RB
3213 gimplify_seq_add_stmt (seq_p, gs);
3214 *expr_p = NULL;
3215 return GS_ALL_DONE;
26d44ae2
RH
3216}
3217
57d1dd87
RH
3218/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3219 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
ad19c4be 3220 assignment. Return non-null if we detect a potential overlap. */
57d1dd87
RH
3221
3222struct gimplify_init_ctor_preeval_data
3223{
3224 /* The base decl of the lhs object. May be NULL, in which case we
3225 have to assume the lhs is indirect. */
3226 tree lhs_base_decl;
3227
3228 /* The alias set of the lhs object. */
4862826d 3229 alias_set_type lhs_alias_set;
57d1dd87
RH
3230};
3231
3232static tree
3233gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3234{
3235 struct gimplify_init_ctor_preeval_data *data
3236 = (struct gimplify_init_ctor_preeval_data *) xdata;
3237 tree t = *tp;
3238
3239 /* If we find the base object, obviously we have overlap. */
3240 if (data->lhs_base_decl == t)
3241 return t;
3242
3243 /* If the constructor component is indirect, determine if we have a
3244 potential overlap with the lhs. The only bits of information we
3245 have to go on at this point are addressability and alias sets. */
70f34814
RG
3246 if ((INDIRECT_REF_P (t)
3247 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
3248 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3249 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3250 return t;
3251
df10ee2a 3252 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
3253 potential overlap with the lhs through an INDIRECT_REF like above.
3254 ??? Ugh - this is completely broken. In fact this whole analysis
3255 doesn't look conservative. */
df10ee2a
EB
3256 if (TREE_CODE (t) == CALL_EXPR)
3257 {
3258 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3259
3260 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3261 if (POINTER_TYPE_P (TREE_VALUE (type))
3262 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3263 && alias_sets_conflict_p (data->lhs_alias_set,
3264 get_alias_set
3265 (TREE_TYPE (TREE_VALUE (type)))))
3266 return t;
3267 }
3268
6615c446 3269 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
3270 *walk_subtrees = 0;
3271 return NULL;
3272}
3273
726a989a 3274/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
3275 force values that overlap with the lhs (as described by *DATA)
3276 into temporaries. */
3277
3278static void
726a989a 3279gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
3280 struct gimplify_init_ctor_preeval_data *data)
3281{
3282 enum gimplify_status one;
3283
51eed280
PB
3284 /* If the value is constant, then there's nothing to pre-evaluate. */
3285 if (TREE_CONSTANT (*expr_p))
3286 {
3287 /* Ensure it does not have side effects, it might contain a reference to
3288 the object we're initializing. */
3289 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3290 return;
3291 }
57d1dd87
RH
3292
3293 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3294 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3295 return;
3296
3297 /* Recurse for nested constructors. */
3298 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3299 {
4038c495
GB
3300 unsigned HOST_WIDE_INT ix;
3301 constructor_elt *ce;
9771b263 3302 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4038c495 3303
9771b263 3304 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4038c495 3305 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 3306
57d1dd87
RH
3307 return;
3308 }
3309
0461b801
EB
3310 /* If this is a variable sized type, we must remember the size. */
3311 maybe_with_size_expr (expr_p);
57d1dd87
RH
3312
3313 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 3314 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 3315 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
3316 gimplification now means that we won't have to deal with complicated
3317 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 3318 exponential search behavior. */
57d1dd87
RH
3319 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3320 if (one == GS_ERROR)
3321 {
3322 *expr_p = NULL;
3323 return;
3324 }
3325
3326 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3327 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3328 always be true for all scalars, since is_gimple_mem_rhs insists on a
3329 temporary variable for them. */
3330 if (DECL_P (*expr_p))
3331 return;
3332
3333 /* If this is of variable size, we have no choice but to assume it doesn't
3334 overlap since we can't make a temporary for it. */
4c923c28 3335 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
3336 return;
3337
3338 /* Otherwise, we must search for overlap ... */
3339 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3340 return;
3341
3342 /* ... and if found, force the value into a temporary. */
3343 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3344}
3345
6fa91b48
SB
3346/* A subroutine of gimplify_init_ctor_eval. Create a loop for
3347 a RANGE_EXPR in a CONSTRUCTOR for an array.
3348
3349 var = lower;
3350 loop_entry:
3351 object[var] = value;
3352 if (var == upper)
3353 goto loop_exit;
3354 var = var + 1;
3355 goto loop_entry;
3356 loop_exit:
3357
3358 We increment var _after_ the loop exit check because we might otherwise
3359 fail if upper == TYPE_MAX_VALUE (type for upper).
3360
3361 Note that we never have to deal with SAVE_EXPRs here, because this has
3362 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3363
9771b263 3364static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
726a989a 3365 gimple_seq *, bool);
6fa91b48
SB
3366
3367static void
3368gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3369 tree value, tree array_elt_type,
726a989a 3370 gimple_seq *pre_p, bool cleared)
6fa91b48 3371{
726a989a 3372 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 3373 tree var, var_type, cref, tmp;
6fa91b48 3374
c2255bc4
AH
3375 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3376 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3377 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
3378
3379 /* Create and initialize the index variable. */
3380 var_type = TREE_TYPE (upper);
3381 var = create_tmp_var (var_type, NULL);
726a989a 3382 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
3383
3384 /* Add the loop entry label. */
726a989a 3385 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
3386
3387 /* Build the reference. */
3388 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3389 var, NULL_TREE, NULL_TREE);
3390
3391 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3392 the store. Otherwise just assign value to the reference. */
3393
3394 if (TREE_CODE (value) == CONSTRUCTOR)
3395 /* NB we might have to call ourself recursively through
3396 gimplify_init_ctor_eval if the value is a constructor. */
3397 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3398 pre_p, cleared);
3399 else
726a989a 3400 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
3401
3402 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
3403 gimplify_seq_add_stmt (pre_p,
3404 gimple_build_cond (EQ_EXPR, var, upper,
3405 loop_exit_label, fall_thru_label));
3406
3407 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
3408
3409 /* Otherwise, increment the index var... */
b56b9fe3
RS
3410 tmp = build2 (PLUS_EXPR, var_type, var,
3411 fold_convert (var_type, integer_one_node));
726a989a 3412 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
3413
3414 /* ...and jump back to the loop entry. */
726a989a 3415 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
3416
3417 /* Add the loop exit label. */
726a989a 3418 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
3419}
3420
292a398f 3421/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 3422
292a398f 3423static bool
22ea9ec0 3424zero_sized_field_decl (const_tree fdecl)
292a398f 3425{
b8698a0f 3426 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
3427 && integer_zerop (DECL_SIZE (fdecl)))
3428 return true;
3429 return false;
3430}
3431
d06526b7 3432/* Return true if TYPE is zero sized. */
b8698a0f 3433
d06526b7 3434static bool
22ea9ec0 3435zero_sized_type (const_tree type)
d06526b7
AP
3436{
3437 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3438 && integer_zerop (TYPE_SIZE (type)))
3439 return true;
3440 return false;
3441}
3442
57d1dd87
RH
3443/* A subroutine of gimplify_init_constructor. Generate individual
3444 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 3445 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
3446 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3447 zeroed first. */
3448
3449static void
9771b263 3450gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
726a989a 3451 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
3452{
3453 tree array_elt_type = NULL;
4038c495
GB
3454 unsigned HOST_WIDE_INT ix;
3455 tree purpose, value;
57d1dd87
RH
3456
3457 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3458 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3459
4038c495 3460 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 3461 {
726a989a 3462 tree cref;
57d1dd87
RH
3463
3464 /* NULL values are created above for gimplification errors. */
3465 if (value == NULL)
3466 continue;
3467
3468 if (cleared && initializer_zerop (value))
3469 continue;
3470
6fa91b48
SB
3471 /* ??? Here's to hoping the front end fills in all of the indices,
3472 so we don't have to figure out what's missing ourselves. */
3473 gcc_assert (purpose);
3474
816fa80a
OH
3475 /* Skip zero-sized fields, unless value has side-effects. This can
3476 happen with calls to functions returning a zero-sized type, which
3477 we shouldn't discard. As a number of downstream passes don't
3478 expect sets of zero-sized fields, we rely on the gimplification of
3479 the MODIFY_EXPR we make below to drop the assignment statement. */
3480 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
3481 continue;
3482
6fa91b48
SB
3483 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3484 whole range. */
3485 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 3486 {
6fa91b48
SB
3487 tree lower = TREE_OPERAND (purpose, 0);
3488 tree upper = TREE_OPERAND (purpose, 1);
3489
3490 /* If the lower bound is equal to upper, just treat it as if
3491 upper was the index. */
3492 if (simple_cst_equal (lower, upper))
3493 purpose = upper;
3494 else
3495 {
3496 gimplify_init_ctor_eval_range (object, lower, upper, value,
3497 array_elt_type, pre_p, cleared);
3498 continue;
3499 }
3500 }
57d1dd87 3501
6fa91b48
SB
3502 if (array_elt_type)
3503 {
1a1640db
RG
3504 /* Do not use bitsizetype for ARRAY_REF indices. */
3505 if (TYPE_DOMAIN (TREE_TYPE (object)))
ad19c4be
EB
3506 purpose
3507 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3508 purpose);
b4257cfc
RG
3509 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3510 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
3511 }
3512 else
cf0efa6a
ILT
3513 {
3514 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
3515 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3516 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 3517 }
57d1dd87 3518
cf0efa6a
ILT
3519 if (TREE_CODE (value) == CONSTRUCTOR
3520 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
3521 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3522 pre_p, cleared);
3523 else
3524 {
726a989a 3525 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 3526 gimplify_and_add (init, pre_p);
726a989a 3527 ggc_free (init);
57d1dd87
RH
3528 }
3529 }
3530}
3531
ad19c4be 3532/* Return the appropriate RHS predicate for this LHS. */
726a989a 3533
18f429e2 3534gimple_predicate
726a989a
RB
3535rhs_predicate_for (tree lhs)
3536{
ba4d8f9d
RG
3537 if (is_gimple_reg (lhs))
3538 return is_gimple_reg_rhs_or_call;
726a989a 3539 else
ba4d8f9d 3540 return is_gimple_mem_rhs_or_call;
726a989a
RB
3541}
3542
2ec5deb5
PB
3543/* Gimplify a C99 compound literal expression. This just means adding
3544 the DECL_EXPR before the current statement and using its anonymous
3545 decl instead. */
3546
3547static enum gimplify_status
a845a7f5 3548gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4c53d183 3549 bool (*gimple_test_f) (tree),
a845a7f5 3550 fallback_t fallback)
2ec5deb5
PB
3551{
3552 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3553 tree decl = DECL_EXPR_DECL (decl_s);
4c53d183 3554 tree init = DECL_INITIAL (decl);
2ec5deb5
PB
3555 /* Mark the decl as addressable if the compound literal
3556 expression is addressable now, otherwise it is marked too late
3557 after we gimplify the initialization expression. */
3558 if (TREE_ADDRESSABLE (*expr_p))
3559 TREE_ADDRESSABLE (decl) = 1;
4c53d183
MM
3560 /* Otherwise, if we don't need an lvalue and have a literal directly
3561 substitute it. Check if it matches the gimple predicate, as
3562 otherwise we'd generate a new temporary, and we can as well just
3563 use the decl we already have. */
3564 else if (!TREE_ADDRESSABLE (decl)
3565 && init
3566 && (fallback & fb_lvalue) == 0
3567 && gimple_test_f (init))
3568 {
3569 *expr_p = init;
3570 return GS_OK;
3571 }
2ec5deb5
PB
3572
3573 /* Preliminarily mark non-addressed complex variables as eligible
3574 for promotion to gimple registers. We'll transform their uses
3575 as we find them. */
3576 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3577 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3578 && !TREE_THIS_VOLATILE (decl)
3579 && !needs_to_live_in_memory (decl))
3580 DECL_GIMPLE_REG_P (decl) = 1;
3581
a845a7f5
ILT
3582 /* If the decl is not addressable, then it is being used in some
3583 expression or on the right hand side of a statement, and it can
3584 be put into a readonly data section. */
3585 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3586 TREE_READONLY (decl) = 1;
3587
2ec5deb5
PB
3588 /* This decl isn't mentioned in the enclosing block, so add it to the
3589 list of temps. FIXME it seems a bit of a kludge to say that
3590 anonymous artificial vars aren't pushed, but everything else is. */
3591 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3592 gimple_add_tmp_var (decl);
3593
3594 gimplify_and_add (decl_s, pre_p);
3595 *expr_p = decl;
3596 return GS_OK;
3597}
3598
3599/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3600 return a new CONSTRUCTOR if something changed. */
3601
3602static tree
3603optimize_compound_literals_in_ctor (tree orig_ctor)
3604{
3605 tree ctor = orig_ctor;
9771b263
DN
3606 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3607 unsigned int idx, num = vec_safe_length (elts);
2ec5deb5
PB
3608
3609 for (idx = 0; idx < num; idx++)
3610 {
9771b263 3611 tree value = (*elts)[idx].value;
2ec5deb5
PB
3612 tree newval = value;
3613 if (TREE_CODE (value) == CONSTRUCTOR)
3614 newval = optimize_compound_literals_in_ctor (value);
3615 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3616 {
3617 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3618 tree decl = DECL_EXPR_DECL (decl_s);
3619 tree init = DECL_INITIAL (decl);
3620
3621 if (!TREE_ADDRESSABLE (value)
3622 && !TREE_ADDRESSABLE (decl)
6f8f67e9
JJ
3623 && init
3624 && TREE_CODE (init) == CONSTRUCTOR)
2ec5deb5
PB
3625 newval = optimize_compound_literals_in_ctor (init);
3626 }
3627 if (newval == value)
3628 continue;
3629
3630 if (ctor == orig_ctor)
3631 {
3632 ctor = copy_node (orig_ctor);
9771b263 3633 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
2ec5deb5
PB
3634 elts = CONSTRUCTOR_ELTS (ctor);
3635 }
9771b263 3636 (*elts)[idx].value = newval;
2ec5deb5
PB
3637 }
3638 return ctor;
3639}
3640
26d44ae2
RH
3641/* A subroutine of gimplify_modify_expr. Break out elements of a
3642 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3643
3644 Note that we still need to clear any elements that don't have explicit
3645 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
3646 original MODIFY_EXPR, we just remove all of the constructor elements.
3647
3648 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3649 GS_ERROR if we would have to create a temporary when gimplifying
3650 this constructor. Otherwise, return GS_OK.
3651
3652 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
3653
3654static enum gimplify_status
726a989a
RB
3655gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3656 bool want_value, bool notify_temp_creation)
26d44ae2 3657{
f5a1f0d0 3658 tree object, ctor, type;
26d44ae2 3659 enum gimplify_status ret;
9771b263 3660 vec<constructor_elt, va_gc> *elts;
26d44ae2 3661
f5a1f0d0 3662 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 3663
ffed8a01
AH
3664 if (!notify_temp_creation)
3665 {
726a989a 3666 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
3667 is_gimple_lvalue, fb_lvalue);
3668 if (ret == GS_ERROR)
3669 return ret;
3670 }
57d1dd87 3671
726a989a 3672 object = TREE_OPERAND (*expr_p, 0);
f5a1f0d0
PB
3673 ctor = TREE_OPERAND (*expr_p, 1) =
3674 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3675 type = TREE_TYPE (ctor);
3676 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 3677 ret = GS_ALL_DONE;
726a989a 3678
26d44ae2
RH
3679 switch (TREE_CODE (type))
3680 {
3681 case RECORD_TYPE:
3682 case UNION_TYPE:
3683 case QUAL_UNION_TYPE:
3684 case ARRAY_TYPE:
3685 {
57d1dd87 3686 struct gimplify_init_ctor_preeval_data preeval_data;
953d0c90
RS
3687 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3688 bool cleared, complete_p, valid_const_initializer;
26d44ae2
RH
3689
3690 /* Aggregate types must lower constructors to initialization of
3691 individual elements. The exception is that a CONSTRUCTOR node
3692 with no elements indicates zero-initialization of the whole. */
9771b263 3693 if (vec_safe_is_empty (elts))
ffed8a01
AH
3694 {
3695 if (notify_temp_creation)
3696 return GS_OK;
3697 break;
3698 }
b8698a0f 3699
fe24d485
OH
3700 /* Fetch information about the constructor to direct later processing.
3701 We might want to make static versions of it in various cases, and
3702 can only do so if it known to be a valid constant initializer. */
3703 valid_const_initializer
3704 = categorize_ctor_elements (ctor, &num_nonzero_elements,
953d0c90 3705 &num_ctor_elements, &complete_p);
26d44ae2
RH
3706
3707 /* If a const aggregate variable is being initialized, then it
3708 should never be a lose to promote the variable to be static. */
fe24d485 3709 if (valid_const_initializer
6f642f98 3710 && num_nonzero_elements > 1
26d44ae2 3711 && TREE_READONLY (object)
d0ea0759
SE
3712 && TREE_CODE (object) == VAR_DECL
3713 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
26d44ae2 3714 {
ffed8a01
AH
3715 if (notify_temp_creation)
3716 return GS_ERROR;
26d44ae2
RH
3717 DECL_INITIAL (object) = ctor;
3718 TREE_STATIC (object) = 1;
3719 if (!DECL_NAME (object))
3720 DECL_NAME (object) = create_tmp_var_name ("C");
3721 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3722
3723 /* ??? C++ doesn't automatically append a .<number> to the
6bdf3519 3724 assembler name, and even when it does, it looks at FE private
26d44ae2
RH
3725 data structures to figure out what that number should be,
3726 which are not set for this variable. I suppose this is
3727 important for local statics for inline functions, which aren't
3728 "local" in the object file sense. So in order to get a unique
3729 TU-local symbol, we must invoke the lhd version now. */
3730 lhd_set_decl_assembler_name (object);
3731
3732 *expr_p = NULL_TREE;
3733 break;
3734 }
3735
cce70747
JC
3736 /* If there are "lots" of initialized elements, even discounting
3737 those that are not address constants (and thus *must* be
3738 computed at runtime), then partition the constructor into
3739 constant and non-constant parts. Block copy the constant
3740 parts in, then generate code for the non-constant parts. */
3741 /* TODO. There's code in cp/typeck.c to do this. */
3742
953d0c90
RS
3743 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3744 /* store_constructor will ignore the clearing of variable-sized
3745 objects. Initializers for such objects must explicitly set
3746 every field that needs to be set. */
3747 cleared = false;
d368135f 3748 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
953d0c90 3749 /* If the constructor isn't complete, clear the whole object
d368135f 3750 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
953d0c90
RS
3751
3752 ??? This ought not to be needed. For any element not present
3753 in the initializer, we should simply set them to zero. Except
3754 we'd need to *find* the elements that are not present, and that
3755 requires trickery to avoid quadratic compile-time behavior in
3756 large cases or excessive memory use in small cases. */
73ed17ff 3757 cleared = true;
953d0c90 3758 else if (num_ctor_elements - num_nonzero_elements
e04ad03d 3759 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
953d0c90
RS
3760 && num_nonzero_elements < num_ctor_elements / 4)
3761 /* If there are "lots" of zeros, it's more efficient to clear
3762 the memory and then set the nonzero elements. */
cce70747 3763 cleared = true;
953d0c90
RS
3764 else
3765 cleared = false;
cce70747 3766
26d44ae2
RH
3767 /* If there are "lots" of initialized elements, and all of them
3768 are valid address constants, then the entire initializer can
cce70747
JC
3769 be dropped to memory, and then memcpy'd out. Don't do this
3770 for sparse arrays, though, as it's more efficient to follow
3771 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
3772 individual element initialization. Also don't do this for small
3773 all-zero initializers (which aren't big enough to merit
3774 clearing), and don't try to make bitwise copies of
0038da66
IE
3775 TREE_ADDRESSABLE types.
3776
3777 We cannot apply such transformation when compiling chkp static
3778 initializer because creation of initializer image in the memory
3779 will require static initialization of bounds for it. It should
3780 result in another gimplification of similar initializer and we
3781 may fall into infinite loop. */
8afd015a
JM
3782 if (valid_const_initializer
3783 && !(cleared || num_nonzero_elements == 0)
0038da66
IE
3784 && !TREE_ADDRESSABLE (type)
3785 && (!current_function_decl
3786 || !lookup_attribute ("chkp ctor",
3787 DECL_ATTRIBUTES (current_function_decl))))
26d44ae2
RH
3788 {
3789 HOST_WIDE_INT size = int_size_in_bytes (type);
3790 unsigned int align;
3791
3792 /* ??? We can still get unbounded array types, at least
3793 from the C++ front end. This seems wrong, but attempt
3794 to work around it for now. */
3795 if (size < 0)
3796 {
3797 size = int_size_in_bytes (TREE_TYPE (object));
3798 if (size >= 0)
3799 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3800 }
3801
3802 /* Find the maximum alignment we can assume for the object. */
3803 /* ??? Make use of DECL_OFFSET_ALIGN. */
3804 if (DECL_P (object))
3805 align = DECL_ALIGN (object);
3806 else
3807 align = TYPE_ALIGN (type);
3808
f301837e
EB
3809 /* Do a block move either if the size is so small as to make
3810 each individual move a sub-unit move on average, or if it
3811 is so large as to make individual moves inefficient. */
329ad380
JJ
3812 if (size > 0
3813 && num_nonzero_elements > 1
f301837e
EB
3814 && (size < num_nonzero_elements
3815 || !can_move_by_pieces (size, align)))
26d44ae2 3816 {
ffed8a01
AH
3817 if (notify_temp_creation)
3818 return GS_ERROR;
3819
46314d3e
EB
3820 walk_tree (&ctor, force_labels_r, NULL, NULL);
3821 ctor = tree_output_constant_def (ctor);
3822 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3823 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3824 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
3825
3826 /* This is no longer an assignment of a CONSTRUCTOR, but
3827 we still may have processing to do on the LHS. So
3828 pretend we didn't do anything here to let that happen. */
3829 return GS_UNHANDLED;
26d44ae2
RH
3830 }
3831 }
3832
558af7ca
EB
3833 /* If the target is volatile, we have non-zero elements and more than
3834 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
3835 if (TREE_THIS_VOLATILE (object)
3836 && !TREE_ADDRESSABLE (type)
558af7ca 3837 && num_nonzero_elements > 0
9771b263 3838 && vec_safe_length (elts) > 1)
61c7cbf8
RG
3839 {
3840 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3841 TREE_OPERAND (*expr_p, 0) = temp;
3842 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3843 *expr_p,
3844 build2 (MODIFY_EXPR, void_type_node,
3845 object, temp));
3846 return GS_OK;
3847 }
3848
ffed8a01
AH
3849 if (notify_temp_creation)
3850 return GS_OK;
3851
675c873b
EB
3852 /* If there are nonzero elements and if needed, pre-evaluate to capture
3853 elements overlapping with the lhs into temporaries. We must do this
3854 before clearing to fetch the values before they are zeroed-out. */
3855 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
3856 {
3857 preeval_data.lhs_base_decl = get_base_address (object);
3858 if (!DECL_P (preeval_data.lhs_base_decl))
3859 preeval_data.lhs_base_decl = NULL;
3860 preeval_data.lhs_alias_set = get_alias_set (object);
3861
726a989a 3862 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
3863 pre_p, post_p, &preeval_data);
3864 }
3865
26d44ae2
RH
3866 if (cleared)
3867 {
3868 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3869 Note that we still have to gimplify, in order to handle the
57d1dd87 3870 case of variable sized types. Avoid shared tree structures. */
4038c495 3871 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 3872 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 3873 object = unshare_expr (object);
726a989a 3874 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
3875 }
3876
6fa91b48
SB
3877 /* If we have not block cleared the object, or if there are nonzero
3878 elements in the constructor, add assignments to the individual
3879 scalar fields of the object. */
3880 if (!cleared || num_nonzero_elements > 0)
85d89e76 3881 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
3882
3883 *expr_p = NULL_TREE;
3884 }
3885 break;
3886
3887 case COMPLEX_TYPE:
3888 {
3889 tree r, i;
3890
ffed8a01
AH
3891 if (notify_temp_creation)
3892 return GS_OK;
3893
26d44ae2 3894 /* Extract the real and imaginary parts out of the ctor. */
9771b263
DN
3895 gcc_assert (elts->length () == 2);
3896 r = (*elts)[0].value;
3897 i = (*elts)[1].value;
26d44ae2
RH
3898 if (r == NULL || i == NULL)
3899 {
e8160c9a 3900 tree zero = build_zero_cst (TREE_TYPE (type));
26d44ae2
RH
3901 if (r == NULL)
3902 r = zero;
3903 if (i == NULL)
3904 i = zero;
3905 }
3906
3907 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3908 represent creation of a complex value. */
3909 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3910 {
3911 ctor = build_complex (type, r, i);
3912 TREE_OPERAND (*expr_p, 1) = ctor;
3913 }
3914 else
3915 {
b4257cfc 3916 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 3917 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
3918 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3919 pre_p,
3920 post_p,
17ad5b5e
RH
3921 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3922 fb_rvalue);
26d44ae2
RH
3923 }
3924 }
3925 break;
506e2710 3926
26d44ae2 3927 case VECTOR_TYPE:
4038c495
GB
3928 {
3929 unsigned HOST_WIDE_INT ix;
3930 constructor_elt *ce;
e89be13b 3931
ffed8a01
AH
3932 if (notify_temp_creation)
3933 return GS_OK;
3934
4038c495
GB
3935 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3936 if (TREE_CONSTANT (ctor))
3937 {
3938 bool constant_p = true;
3939 tree value;
3940
3941 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
3942 elements, such as addresses or trapping values like
3943 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3944 in VECTOR_CST nodes. */
4038c495
GB
3945 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3946 if (!CONSTANT_CLASS_P (value))
3947 {
3948 constant_p = false;
3949 break;
3950 }
e89be13b 3951
4038c495
GB
3952 if (constant_p)
3953 {
3954 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3955 break;
3956 }
84816907 3957
9f1da821 3958 /* Don't reduce an initializer constant even if we can't
84816907
JM
3959 make a VECTOR_CST. It won't do anything for us, and it'll
3960 prevent us from representing it as a single constant. */
9f1da821
RS
3961 if (initializer_constant_valid_p (ctor, type))
3962 break;
3963
3964 TREE_CONSTANT (ctor) = 0;
4038c495 3965 }
e89be13b 3966
4038c495
GB
3967 /* Vector types use CONSTRUCTOR all the way through gimple
3968 compilation as a general initializer. */
9771b263 3969 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038c495
GB
3970 {
3971 enum gimplify_status tret;
726a989a
RB
3972 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3973 fb_rvalue);
4038c495
GB
3974 if (tret == GS_ERROR)
3975 ret = GS_ERROR;
3976 }
726a989a
RB
3977 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3978 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 3979 }
26d44ae2 3980 break;
6de9cd9a 3981
26d44ae2
RH
3982 default:
3983 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 3984 gcc_unreachable ();
26d44ae2 3985 }
6de9cd9a 3986
26d44ae2
RH
3987 if (ret == GS_ERROR)
3988 return GS_ERROR;
3989 else if (want_value)
3990 {
26d44ae2
RH
3991 *expr_p = object;
3992 return GS_OK;
6de9cd9a 3993 }
26d44ae2 3994 else
726a989a
RB
3995 {
3996 /* If we have gimplified both sides of the initializer but have
3997 not emitted an assignment, do so now. */
3998 if (*expr_p)
3999 {
4000 tree lhs = TREE_OPERAND (*expr_p, 0);
4001 tree rhs = TREE_OPERAND (*expr_p, 1);
4002 gimple init = gimple_build_assign (lhs, rhs);
4003 gimplify_seq_add_stmt (pre_p, init);
4004 *expr_p = NULL;
4005 }
4006
4007 return GS_ALL_DONE;
4008 }
26d44ae2 4009}
6de9cd9a 4010
de4af523
JJ
4011/* Given a pointer value OP0, return a simplified version of an
4012 indirection through OP0, or NULL_TREE if no simplification is
4013 possible. This may only be applied to a rhs of an expression.
4014 Note that the resulting type may be different from the type pointed
4015 to in the sense that it is still compatible from the langhooks
4016 point of view. */
4017
4018static tree
4019gimple_fold_indirect_ref_rhs (tree t)
4020{
4021 return gimple_fold_indirect_ref (t);
4022}
4023
4caa08da
AH
4024/* Subroutine of gimplify_modify_expr to do simplifications of
4025 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4026 something changes. */
6de9cd9a 4027
26d44ae2 4028static enum gimplify_status
726a989a
RB
4029gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4030 gimple_seq *pre_p, gimple_seq *post_p,
4031 bool want_value)
26d44ae2 4032{
6d729f28
JM
4033 enum gimplify_status ret = GS_UNHANDLED;
4034 bool changed;
6de9cd9a 4035
6d729f28
JM
4036 do
4037 {
4038 changed = false;
4039 switch (TREE_CODE (*from_p))
4040 {
4041 case VAR_DECL:
4042 /* If we're assigning from a read-only variable initialized with
4043 a constructor, do the direct assignment from the constructor,
4044 but only if neither source nor target are volatile since this
4045 latter assignment might end up being done on a per-field basis. */
4046 if (DECL_INITIAL (*from_p)
4047 && TREE_READONLY (*from_p)
4048 && !TREE_THIS_VOLATILE (*from_p)
4049 && !TREE_THIS_VOLATILE (*to_p)
4050 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4051 {
4052 tree old_from = *from_p;
4053 enum gimplify_status subret;
4054
4055 /* Move the constructor into the RHS. */
4056 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4057
4058 /* Let's see if gimplify_init_constructor will need to put
4059 it in memory. */
4060 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4061 false, true);
4062 if (subret == GS_ERROR)
4063 {
4064 /* If so, revert the change. */
4065 *from_p = old_from;
4066 }
4067 else
4068 {
4069 ret = GS_OK;
4070 changed = true;
4071 }
4072 }
4073 break;
4074 case INDIRECT_REF:
4caa08da 4075 {
6d729f28 4076 /* If we have code like
ffed8a01 4077
6d729f28 4078 *(const A*)(A*)&x
ffed8a01 4079
6d729f28
JM
4080 where the type of "x" is a (possibly cv-qualified variant
4081 of "A"), treat the entire expression as identical to "x".
4082 This kind of code arises in C++ when an object is bound
4083 to a const reference, and if "x" is a TARGET_EXPR we want
4084 to take advantage of the optimization below. */
06baaba3 4085 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6d729f28
JM
4086 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4087 if (t)
ffed8a01 4088 {
06baaba3
RG
4089 if (TREE_THIS_VOLATILE (t) != volatile_p)
4090 {
4091 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4092 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4093 build_fold_addr_expr (t));
4094 if (REFERENCE_CLASS_P (t))
4095 TREE_THIS_VOLATILE (t) = volatile_p;
4096 }
6d729f28
JM
4097 *from_p = t;
4098 ret = GS_OK;
4099 changed = true;
ffed8a01 4100 }
6d729f28
JM
4101 break;
4102 }
4103
4104 case TARGET_EXPR:
4105 {
4106 /* If we are initializing something from a TARGET_EXPR, strip the
4107 TARGET_EXPR and initialize it directly, if possible. This can't
4108 be done if the initializer is void, since that implies that the
4109 temporary is set in some non-trivial way.
4110
4111 ??? What about code that pulls out the temp and uses it
4112 elsewhere? I think that such code never uses the TARGET_EXPR as
4113 an initializer. If I'm wrong, we'll die because the temp won't
4114 have any RTL. In that case, I guess we'll need to replace
4115 references somehow. */
4116 tree init = TARGET_EXPR_INITIAL (*from_p);
4117
4118 if (init
4119 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 4120 {
6d729f28 4121 *from_p = init;
ffed8a01 4122 ret = GS_OK;
6d729f28 4123 changed = true;
ffed8a01 4124 }
4caa08da 4125 }
6d729f28 4126 break;
f98625f6 4127
6d729f28
JM
4128 case COMPOUND_EXPR:
4129 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4130 caught. */
4131 gimplify_compound_expr (from_p, pre_p, true);
4132 ret = GS_OK;
4133 changed = true;
4134 break;
f98625f6 4135
6d729f28 4136 case CONSTRUCTOR:
ce3beba3
JM
4137 /* If we already made some changes, let the front end have a
4138 crack at this before we break it down. */
4139 if (ret != GS_UNHANDLED)
4140 break;
6d729f28
JM
4141 /* If we're initializing from a CONSTRUCTOR, break this into
4142 individual MODIFY_EXPRs. */
4143 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4144 false);
4145
4146 case COND_EXPR:
4147 /* If we're assigning to a non-register type, push the assignment
4148 down into the branches. This is mandatory for ADDRESSABLE types,
4149 since we cannot generate temporaries for such, but it saves a
4150 copy in other cases as well. */
4151 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 4152 {
6d729f28
JM
4153 /* This code should mirror the code in gimplify_cond_expr. */
4154 enum tree_code code = TREE_CODE (*expr_p);
4155 tree cond = *from_p;
4156 tree result = *to_p;
4157
4158 ret = gimplify_expr (&result, pre_p, post_p,
4159 is_gimple_lvalue, fb_lvalue);
4160 if (ret != GS_ERROR)
4161 ret = GS_OK;
4162
4163 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4164 TREE_OPERAND (cond, 1)
4165 = build2 (code, void_type_node, result,
4166 TREE_OPERAND (cond, 1));
4167 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4168 TREE_OPERAND (cond, 2)
4169 = build2 (code, void_type_node, unshare_expr (result),
4170 TREE_OPERAND (cond, 2));
4171
4172 TREE_TYPE (cond) = void_type_node;
4173 recalculate_side_effects (cond);
4174
4175 if (want_value)
4176 {
4177 gimplify_and_add (cond, pre_p);
4178 *expr_p = unshare_expr (result);
4179 }
4180 else
4181 *expr_p = cond;
4182 return ret;
f98625f6 4183 }
f98625f6 4184 break;
f98625f6 4185
6d729f28
JM
4186 case CALL_EXPR:
4187 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4188 return slot so that we don't generate a temporary. */
4189 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4190 && aggregate_value_p (*from_p, *from_p))
26d44ae2 4191 {
6d729f28
JM
4192 bool use_target;
4193
4194 if (!(rhs_predicate_for (*to_p))(*from_p))
4195 /* If we need a temporary, *to_p isn't accurate. */
4196 use_target = false;
ad19c4be 4197 /* It's OK to use the return slot directly unless it's an NRV. */
6d729f28
JM
4198 else if (TREE_CODE (*to_p) == RESULT_DECL
4199 && DECL_NAME (*to_p) == NULL_TREE
4200 && needs_to_live_in_memory (*to_p))
6d729f28
JM
4201 use_target = true;
4202 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4203 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4204 /* Don't force regs into memory. */
4205 use_target = false;
4206 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4207 /* It's OK to use the target directly if it's being
4208 initialized. */
4209 use_target = true;
aabb90e5
RG
4210 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4211 /* Always use the target and thus RSO for variable-sized types.
4212 GIMPLE cannot deal with a variable-sized assignment
4213 embedded in a call statement. */
4214 use_target = true;
345ae177
AH
4215 else if (TREE_CODE (*to_p) != SSA_NAME
4216 && (!is_gimple_variable (*to_p)
4217 || needs_to_live_in_memory (*to_p)))
6d729f28
JM
4218 /* Don't use the original target if it's already addressable;
4219 if its address escapes, and the called function uses the
4220 NRV optimization, a conforming program could see *to_p
4221 change before the called function returns; see c++/19317.
4222 When optimizing, the return_slot pass marks more functions
4223 as safe after we have escape info. */
4224 use_target = false;
4225 else
4226 use_target = true;
4227
4228 if (use_target)
4229 {
4230 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4231 mark_addressable (*to_p);
4232 }
26d44ae2 4233 }
6d729f28 4234 break;
6de9cd9a 4235
6d729f28
JM
4236 case WITH_SIZE_EXPR:
4237 /* Likewise for calls that return an aggregate of non-constant size,
4238 since we would not be able to generate a temporary at all. */
4239 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4240 {
4241 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
4242 /* We don't change ret in this case because the
4243 WITH_SIZE_EXPR might have been added in
4244 gimplify_modify_expr, so returning GS_OK would lead to an
4245 infinite loop. */
6d729f28
JM
4246 changed = true;
4247 }
4248 break;
6de9cd9a 4249
6d729f28
JM
4250 /* If we're initializing from a container, push the initialization
4251 inside it. */
4252 case CLEANUP_POINT_EXPR:
4253 case BIND_EXPR:
4254 case STATEMENT_LIST:
26d44ae2 4255 {
6d729f28
JM
4256 tree wrap = *from_p;
4257 tree t;
dae7ec87 4258
6d729f28
JM
4259 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4260 fb_lvalue);
dae7ec87
JM
4261 if (ret != GS_ERROR)
4262 ret = GS_OK;
4263
6d729f28
JM
4264 t = voidify_wrapper_expr (wrap, *expr_p);
4265 gcc_assert (t == *expr_p);
dae7ec87
JM
4266
4267 if (want_value)
4268 {
6d729f28
JM
4269 gimplify_and_add (wrap, pre_p);
4270 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
4271 }
4272 else
6d729f28
JM
4273 *expr_p = wrap;
4274 return GS_OK;
26d44ae2 4275 }
6de9cd9a 4276
6d729f28 4277 case COMPOUND_LITERAL_EXPR:
fa47911c 4278 {
6d729f28
JM
4279 tree complit = TREE_OPERAND (*expr_p, 1);
4280 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4281 tree decl = DECL_EXPR_DECL (decl_s);
4282 tree init = DECL_INITIAL (decl);
4283
4284 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4285 into struct T x = { 0, 1, 2 } if the address of the
4286 compound literal has never been taken. */
4287 if (!TREE_ADDRESSABLE (complit)
4288 && !TREE_ADDRESSABLE (decl)
4289 && init)
fa47911c 4290 {
6d729f28
JM
4291 *expr_p = copy_node (*expr_p);
4292 TREE_OPERAND (*expr_p, 1) = init;
4293 return GS_OK;
fa47911c
JM
4294 }
4295 }
4296
6d729f28
JM
4297 default:
4298 break;
2ec5deb5 4299 }
6d729f28
JM
4300 }
4301 while (changed);
6de9cd9a 4302
6de9cd9a
DN
4303 return ret;
4304}
4305
216820a4
RG
4306
4307/* Return true if T looks like a valid GIMPLE statement. */
4308
4309static bool
4310is_gimple_stmt (tree t)
4311{
4312 const enum tree_code code = TREE_CODE (t);
4313
4314 switch (code)
4315 {
4316 case NOP_EXPR:
4317 /* The only valid NOP_EXPR is the empty statement. */
4318 return IS_EMPTY_STMT (t);
4319
4320 case BIND_EXPR:
4321 case COND_EXPR:
4322 /* These are only valid if they're void. */
4323 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4324
4325 case SWITCH_EXPR:
4326 case GOTO_EXPR:
4327 case RETURN_EXPR:
4328 case LABEL_EXPR:
4329 case CASE_LABEL_EXPR:
4330 case TRY_CATCH_EXPR:
4331 case TRY_FINALLY_EXPR:
4332 case EH_FILTER_EXPR:
4333 case CATCH_EXPR:
4334 case ASM_EXPR:
4335 case STATEMENT_LIST:
4336 case OMP_PARALLEL:
4337 case OMP_FOR:
74bf76ed 4338 case OMP_SIMD:
c02065fc 4339 case CILK_SIMD:
acf0174b 4340 case OMP_DISTRIBUTE:
216820a4
RG
4341 case OMP_SECTIONS:
4342 case OMP_SECTION:
4343 case OMP_SINGLE:
4344 case OMP_MASTER:
acf0174b 4345 case OMP_TASKGROUP:
216820a4
RG
4346 case OMP_ORDERED:
4347 case OMP_CRITICAL:
4348 case OMP_TASK:
4349 /* These are always void. */
4350 return true;
4351
4352 case CALL_EXPR:
4353 case MODIFY_EXPR:
4354 case PREDICT_EXPR:
4355 /* These are valid regardless of their type. */
4356 return true;
4357
4358 default:
4359 return false;
4360 }
4361}
4362
4363
d9c2d296
AP
4364/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4365 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
7b7e6ecd
EB
4366 DECL_GIMPLE_REG_P set.
4367
4368 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4369 other, unmodified part of the complex object just before the total store.
4370 As a consequence, if the object is still uninitialized, an undefined value
4371 will be loaded into a register, which may result in a spurious exception
4372 if the register is floating-point and the value happens to be a signaling
4373 NaN for example. Then the fully-fledged complex operations lowering pass
4374 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
4375
4376static enum gimplify_status
726a989a
RB
4377gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4378 bool want_value)
d9c2d296
AP
4379{
4380 enum tree_code code, ocode;
4381 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4382
726a989a
RB
4383 lhs = TREE_OPERAND (*expr_p, 0);
4384 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
4385 code = TREE_CODE (lhs);
4386 lhs = TREE_OPERAND (lhs, 0);
4387
4388 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4389 other = build1 (ocode, TREE_TYPE (rhs), lhs);
8d2b0410 4390 TREE_NO_WARNING (other) = 1;
d9c2d296
AP
4391 other = get_formal_tmp_var (other, pre_p);
4392
4393 realpart = code == REALPART_EXPR ? rhs : other;
4394 imagpart = code == REALPART_EXPR ? other : rhs;
4395
4396 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4397 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4398 else
4399 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4400
726a989a
RB
4401 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4402 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
4403
4404 return GS_ALL_DONE;
4405}
4406
206048bd 4407/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
4408
4409 modify_expr
4410 : varname '=' rhs
4411 | '*' ID '=' rhs
4412
4413 PRE_P points to the list where side effects that must happen before
4414 *EXPR_P should be stored.
4415
4416 POST_P points to the list where side effects that must happen after
4417 *EXPR_P should be stored.
4418
4419 WANT_VALUE is nonzero iff we want to use the value of this expression
4420 in another expression. */
4421
4422static enum gimplify_status
726a989a
RB
4423gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4424 bool want_value)
6de9cd9a 4425{
726a989a
RB
4426 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4427 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 4428 enum gimplify_status ret = GS_UNHANDLED;
726a989a 4429 gimple assign;
db3927fb 4430 location_t loc = EXPR_LOCATION (*expr_p);
6da8be89 4431 gimple_stmt_iterator gsi;
6de9cd9a 4432
282899df
NS
4433 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4434 || TREE_CODE (*expr_p) == INIT_EXPR);
939b37da
BI
4435
4436 if (fn_contains_cilk_spawn_p (cfun)
4437 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p)
4438 && !seen_error ())
4439 return (enum gimplify_status)
4440 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p, post_p);
6de9cd9a 4441
d0ad58f9
JM
4442 /* Trying to simplify a clobber using normal logic doesn't work,
4443 so handle it here. */
4444 if (TREE_CLOBBER_P (*from_p))
4445 {
5d751b0c
JJ
4446 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4447 if (ret == GS_ERROR)
4448 return ret;
4449 gcc_assert (!want_value
4450 && (TREE_CODE (*to_p) == VAR_DECL
4451 || TREE_CODE (*to_p) == MEM_REF));
d0ad58f9
JM
4452 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4453 *expr_p = NULL;
4454 return GS_ALL_DONE;
4455 }
4456
1b24a790
RG
4457 /* Insert pointer conversions required by the middle-end that are not
4458 required by the frontend. This fixes middle-end type checking for
4459 for example gcc.dg/redecl-6.c. */
daad0278 4460 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
4461 {
4462 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4463 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 4464 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
4465 }
4466
83d7e8f0
JM
4467 /* See if any simplifications can be done based on what the RHS is. */
4468 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4469 want_value);
4470 if (ret != GS_UNHANDLED)
4471 return ret;
4472
4473 /* For zero sized types only gimplify the left hand side and right hand
4474 side as statements and throw away the assignment. Do this after
4475 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4476 types properly. */
753b34d7 4477 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
412f8986 4478 {
726a989a
RB
4479 gimplify_stmt (from_p, pre_p);
4480 gimplify_stmt (to_p, pre_p);
412f8986
AP
4481 *expr_p = NULL_TREE;
4482 return GS_ALL_DONE;
4483 }
6de9cd9a 4484
d25cee4d
RH
4485 /* If the value being copied is of variable width, compute the length
4486 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4487 before gimplifying any of the operands so that we can resolve any
4488 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4489 the size of the expression to be copied, not of the destination, so
726a989a 4490 that is what we must do here. */
d25cee4d 4491 maybe_with_size_expr (from_p);
6de9cd9a 4492
44de5aeb
RK
4493 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4494 if (ret == GS_ERROR)
4495 return ret;
6de9cd9a 4496
726a989a
RB
4497 /* As a special case, we have to temporarily allow for assignments
4498 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4499 a toplevel statement, when gimplifying the GENERIC expression
4500 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4501 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4502
4503 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4504 prevent gimplify_expr from trying to create a new temporary for
4505 foo's LHS, we tell it that it should only gimplify until it
4506 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4507 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4508 and all we need to do here is set 'a' to be its LHS. */
4509 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4510 fb_rvalue);
6de9cd9a
DN
4511 if (ret == GS_ERROR)
4512 return ret;
4513
44de5aeb
RK
4514 /* Now see if the above changed *from_p to something we handle specially. */
4515 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4516 want_value);
6de9cd9a
DN
4517 if (ret != GS_UNHANDLED)
4518 return ret;
4519
d25cee4d
RH
4520 /* If we've got a variable sized assignment between two lvalues (i.e. does
4521 not involve a call), then we can make things a bit more straightforward
4522 by converting the assignment to memcpy or memset. */
4523 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4524 {
4525 tree from = TREE_OPERAND (*from_p, 0);
4526 tree size = TREE_OPERAND (*from_p, 1);
4527
4528 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
4529 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4530
e847cc68 4531 if (is_gimple_addressable (from))
d25cee4d
RH
4532 {
4533 *from_p = from;
726a989a
RB
4534 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4535 pre_p);
d25cee4d
RH
4536 }
4537 }
4538
e41d82f5
RH
4539 /* Transform partial stores to non-addressable complex variables into
4540 total stores. This allows us to use real instead of virtual operands
4541 for these variables, which improves optimization. */
4542 if ((TREE_CODE (*to_p) == REALPART_EXPR
4543 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4544 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4545 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4546
f173837a
EB
4547 /* Try to alleviate the effects of the gimplification creating artificial
4548 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4549 if (!gimplify_ctxp->into_ssa
f2896bc9 4550 && TREE_CODE (*from_p) == VAR_DECL
726a989a
RB
4551 && DECL_IGNORED_P (*from_p)
4552 && DECL_P (*to_p)
4553 && !DECL_IGNORED_P (*to_p))
f173837a
EB
4554 {
4555 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4556 DECL_NAME (*from_p)
4557 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
839b422f 4558 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
f173837a 4559 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
4560 }
4561
8f0fe813
NS
4562 if (want_value && TREE_THIS_VOLATILE (*to_p))
4563 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4564
726a989a
RB
4565 if (TREE_CODE (*from_p) == CALL_EXPR)
4566 {
4567 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4568 instead of a GIMPLE_ASSIGN. */
f20ca725
RG
4569 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4570 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4571 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
726a989a 4572 assign = gimple_build_call_from_tree (*from_p);
f20ca725 4573 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
f6b64c35 4574 notice_special_calls (assign);
5de8da9b
AO
4575 if (!gimple_call_noreturn_p (assign))
4576 gimple_call_set_lhs (assign, *to_p);
f173837a 4577 }
726a989a 4578 else
c2255bc4
AH
4579 {
4580 assign = gimple_build_assign (*to_p, *from_p);
4581 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4582 }
f173837a 4583
726a989a 4584 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 4585 {
2ad728d2
RG
4586 /* We should have got an SSA name from the start. */
4587 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
726a989a 4588 }
07beea0d 4589
6da8be89
MM
4590 gimplify_seq_add_stmt (pre_p, assign);
4591 gsi = gsi_last (*pre_p);
acf0174b
JJ
4592 /* Don't fold stmts inside of target construct. We'll do it
4593 during omplower pass instead. */
4594 struct gimplify_omp_ctx *ctx;
4595 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
4596 if (ctx->region_type == ORT_TARGET)
4597 break;
4598 if (ctx == NULL)
4599 fold_stmt (&gsi);
6da8be89 4600
726a989a
RB
4601 if (want_value)
4602 {
8f0fe813 4603 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 4604 return GS_OK;
6de9cd9a 4605 }
726a989a
RB
4606 else
4607 *expr_p = NULL;
6de9cd9a 4608
17ad5b5e 4609 return GS_ALL_DONE;
6de9cd9a
DN
4610}
4611
ad19c4be
EB
4612/* Gimplify a comparison between two variable-sized objects. Do this
4613 with a call to BUILT_IN_MEMCMP. */
44de5aeb
RK
4614
4615static enum gimplify_status
4616gimplify_variable_sized_compare (tree *expr_p)
4617{
692ad9aa 4618 location_t loc = EXPR_LOCATION (*expr_p);
44de5aeb
RK
4619 tree op0 = TREE_OPERAND (*expr_p, 0);
4620 tree op1 = TREE_OPERAND (*expr_p, 1);
692ad9aa 4621 tree t, arg, dest, src, expr;
5039610b
SL
4622
4623 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4624 arg = unshare_expr (arg);
4625 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
4626 src = build_fold_addr_expr_loc (loc, op1);
4627 dest = build_fold_addr_expr_loc (loc, op0);
e79983f4 4628 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
db3927fb 4629 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
692ad9aa
EB
4630
4631 expr
b4257cfc 4632 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
692ad9aa
EB
4633 SET_EXPR_LOCATION (expr, loc);
4634 *expr_p = expr;
44de5aeb
RK
4635
4636 return GS_OK;
4637}
4638
ad19c4be
EB
4639/* Gimplify a comparison between two aggregate objects of integral scalar
4640 mode as a comparison between the bitwise equivalent scalar values. */
61c25908
OH
4641
4642static enum gimplify_status
4643gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4644{
db3927fb 4645 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
4646 tree op0 = TREE_OPERAND (*expr_p, 0);
4647 tree op1 = TREE_OPERAND (*expr_p, 1);
4648
4649 tree type = TREE_TYPE (op0);
4650 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4651
db3927fb
AH
4652 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4653 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
4654
4655 *expr_p
db3927fb 4656 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
4657
4658 return GS_OK;
4659}
4660
ad19c4be
EB
4661/* Gimplify an expression sequence. This function gimplifies each
4662 expression and rewrites the original expression with the last
6de9cd9a
DN
4663 expression of the sequence in GIMPLE form.
4664
4665 PRE_P points to the list where the side effects for all the
4666 expressions in the sequence will be emitted.
d3147f64 4667
6de9cd9a 4668 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
4669
4670static enum gimplify_status
726a989a 4671gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
4672{
4673 tree t = *expr_p;
4674
4675 do
4676 {
4677 tree *sub_p = &TREE_OPERAND (t, 0);
4678
4679 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4680 gimplify_compound_expr (sub_p, pre_p, false);
4681 else
726a989a 4682 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
4683
4684 t = TREE_OPERAND (t, 1);
4685 }
4686 while (TREE_CODE (t) == COMPOUND_EXPR);
4687
4688 *expr_p = t;
4689 if (want_value)
4690 return GS_OK;
4691 else
4692 {
726a989a 4693 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
4694 return GS_ALL_DONE;
4695 }
4696}
4697
726a989a
RB
4698/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4699 gimplify. After gimplification, EXPR_P will point to a new temporary
4700 that holds the original value of the SAVE_EXPR node.
6de9cd9a 4701
726a989a 4702 PRE_P points to the list where side effects that must happen before
ad19c4be 4703 *EXPR_P should be stored. */
6de9cd9a
DN
4704
4705static enum gimplify_status
726a989a 4706gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4707{
4708 enum gimplify_status ret = GS_ALL_DONE;
4709 tree val;
4710
282899df 4711 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
4712 val = TREE_OPERAND (*expr_p, 0);
4713
7f5e6307
RH
4714 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4715 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 4716 {
7f5e6307
RH
4717 /* The operand may be a void-valued expression such as SAVE_EXPRs
4718 generated by the Java frontend for class initialization. It is
4719 being executed only for its side-effects. */
4720 if (TREE_TYPE (val) == void_type_node)
4721 {
4722 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4723 is_gimple_stmt, fb_none);
7f5e6307
RH
4724 val = NULL;
4725 }
4726 else
4727 val = get_initialized_tmp_var (val, pre_p, post_p);
4728
4729 TREE_OPERAND (*expr_p, 0) = val;
4730 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 4731 }
6de9cd9a 4732
7f5e6307
RH
4733 *expr_p = val;
4734
6de9cd9a
DN
4735 return ret;
4736}
4737
ad19c4be 4738/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
4739
4740 unary_expr
4741 : ...
4742 | '&' varname
4743 ...
4744
4745 PRE_P points to the list where side effects that must happen before
4746 *EXPR_P should be stored.
4747
4748 POST_P points to the list where side effects that must happen after
4749 *EXPR_P should be stored. */
4750
4751static enum gimplify_status
726a989a 4752gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4753{
4754 tree expr = *expr_p;
4755 tree op0 = TREE_OPERAND (expr, 0);
4756 enum gimplify_status ret;
db3927fb 4757 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
4758
4759 switch (TREE_CODE (op0))
4760 {
4761 case INDIRECT_REF:
67f23620 4762 do_indirect_ref:
6de9cd9a
DN
4763 /* Check if we are dealing with an expression of the form '&*ptr'.
4764 While the front end folds away '&*ptr' into 'ptr', these
4765 expressions may be generated internally by the compiler (e.g.,
4766 builtins like __builtin_va_end). */
67f23620
RH
4767 /* Caution: the silent array decomposition semantics we allow for
4768 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
4769 /* Gimplification of the ADDR_EXPR operand may drop
4770 cv-qualification conversions, so make sure we add them if
4771 needed. */
67f23620
RH
4772 {
4773 tree op00 = TREE_OPERAND (op0, 0);
4774 tree t_expr = TREE_TYPE (expr);
4775 tree t_op00 = TREE_TYPE (op00);
4776
f4088621 4777 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 4778 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
4779 *expr_p = op00;
4780 ret = GS_OK;
4781 }
6de9cd9a
DN
4782 break;
4783
44de5aeb
RK
4784 case VIEW_CONVERT_EXPR:
4785 /* Take the address of our operand and then convert it to the type of
af72267c
RK
4786 this ADDR_EXPR.
4787
4788 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4789 all clear. The impact of this transformation is even less clear. */
91804752
EB
4790
4791 /* If the operand is a useless conversion, look through it. Doing so
4792 guarantees that the ADDR_EXPR and its operand will remain of the
4793 same type. */
4794 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 4795 op0 = TREE_OPERAND (op0, 0);
91804752 4796
db3927fb
AH
4797 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4798 build_fold_addr_expr_loc (loc,
4799 TREE_OPERAND (op0, 0)));
44de5aeb 4800 ret = GS_OK;
6de9cd9a
DN
4801 break;
4802
4803 default:
4804 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
4805 the address of a call that returns a struct; see
4806 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4807 the implied temporary explicit. */
936d04b6 4808
f76d6e6f 4809 /* Make the operand addressable. */
6de9cd9a 4810 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 4811 is_gimple_addressable, fb_either);
8b17cc05
RG
4812 if (ret == GS_ERROR)
4813 break;
67f23620 4814
f76d6e6f
EB
4815 /* Then mark it. Beware that it may not be possible to do so directly
4816 if a temporary has been created by the gimplification. */
4817 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 4818
8b17cc05 4819 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 4820
8b17cc05
RG
4821 /* For various reasons, the gimplification of the expression
4822 may have made a new INDIRECT_REF. */
4823 if (TREE_CODE (op0) == INDIRECT_REF)
4824 goto do_indirect_ref;
4825
6b8b9e42
RG
4826 mark_addressable (TREE_OPERAND (expr, 0));
4827
4828 /* The FEs may end up building ADDR_EXPRs early on a decl with
4829 an incomplete type. Re-build ADDR_EXPRs in canonical form
4830 here. */
4831 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4832 *expr_p = build_fold_addr_expr (op0);
4833
8b17cc05 4834 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
4835 recompute_tree_invariant_for_addr_expr (*expr_p);
4836
4837 /* If we re-built the ADDR_EXPR add a conversion to the original type
4838 if required. */
4839 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4840 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 4841
6de9cd9a
DN
4842 break;
4843 }
4844
6de9cd9a
DN
4845 return ret;
4846}
4847
4848/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4849 value; output operands should be a gimple lvalue. */
4850
4851static enum gimplify_status
726a989a 4852gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 4853{
726a989a
RB
4854 tree expr;
4855 int noutputs;
4856 const char **oconstraints;
6de9cd9a
DN
4857 int i;
4858 tree link;
4859 const char *constraint;
4860 bool allows_mem, allows_reg, is_inout;
4861 enum gimplify_status ret, tret;
726a989a 4862 gimple stmt;
9771b263
DN
4863 vec<tree, va_gc> *inputs;
4864 vec<tree, va_gc> *outputs;
4865 vec<tree, va_gc> *clobbers;
4866 vec<tree, va_gc> *labels;
726a989a 4867 tree link_next;
b8698a0f 4868
726a989a
RB
4869 expr = *expr_p;
4870 noutputs = list_length (ASM_OUTPUTS (expr));
4871 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4872
9771b263
DN
4873 inputs = NULL;
4874 outputs = NULL;
4875 clobbers = NULL;
4876 labels = NULL;
6de9cd9a 4877
6de9cd9a 4878 ret = GS_ALL_DONE;
726a989a
RB
4879 link_next = NULL_TREE;
4880 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 4881 {
2c68ba8e 4882 bool ok;
726a989a
RB
4883 size_t constraint_len;
4884
4885 link_next = TREE_CHAIN (link);
4886
4887 oconstraints[i]
4888 = constraint
6de9cd9a 4889 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
4890 constraint_len = strlen (constraint);
4891 if (constraint_len == 0)
4892 continue;
6de9cd9a 4893
2c68ba8e
LB
4894 ok = parse_output_constraint (&constraint, i, 0, 0,
4895 &allows_mem, &allows_reg, &is_inout);
4896 if (!ok)
4897 {
4898 ret = GS_ERROR;
4899 is_inout = false;
4900 }
6de9cd9a
DN
4901
4902 if (!allows_reg && allows_mem)
936d04b6 4903 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
4904
4905 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4906 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4907 fb_lvalue | fb_mayfail);
4908 if (tret == GS_ERROR)
4909 {
4910 error ("invalid lvalue in asm output %d", i);
4911 ret = tret;
4912 }
4913
9771b263 4914 vec_safe_push (outputs, link);
726a989a
RB
4915 TREE_CHAIN (link) = NULL_TREE;
4916
6de9cd9a
DN
4917 if (is_inout)
4918 {
4919 /* An input/output operand. To give the optimizers more
4920 flexibility, split it into separate input and output
4921 operands. */
4922 tree input;
4923 char buf[10];
6de9cd9a
DN
4924
4925 /* Turn the in/out constraint into an output constraint. */
4926 char *p = xstrdup (constraint);
4927 p[0] = '=';
4928 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
4929
4930 /* And add a matching input constraint. */
4931 if (allows_reg)
4932 {
4933 sprintf (buf, "%d", i);
372d72d9
JJ
4934
4935 /* If there are multiple alternatives in the constraint,
4936 handle each of them individually. Those that allow register
4937 will be replaced with operand number, the others will stay
4938 unchanged. */
4939 if (strchr (p, ',') != NULL)
4940 {
4941 size_t len = 0, buflen = strlen (buf);
4942 char *beg, *end, *str, *dst;
4943
4944 for (beg = p + 1;;)
4945 {
4946 end = strchr (beg, ',');
4947 if (end == NULL)
4948 end = strchr (beg, '\0');
4949 if ((size_t) (end - beg) < buflen)
4950 len += buflen + 1;
4951 else
4952 len += end - beg + 1;
4953 if (*end)
4954 beg = end + 1;
4955 else
4956 break;
4957 }
4958
858904db 4959 str = (char *) alloca (len);
372d72d9
JJ
4960 for (beg = p + 1, dst = str;;)
4961 {
4962 const char *tem;
4963 bool mem_p, reg_p, inout_p;
4964
4965 end = strchr (beg, ',');
4966 if (end)
4967 *end = '\0';
4968 beg[-1] = '=';
4969 tem = beg - 1;
4970 parse_output_constraint (&tem, i, 0, 0,
4971 &mem_p, &reg_p, &inout_p);
4972 if (dst != str)
4973 *dst++ = ',';
4974 if (reg_p)
4975 {
4976 memcpy (dst, buf, buflen);
4977 dst += buflen;
4978 }
4979 else
4980 {
4981 if (end)
4982 len = end - beg;
4983 else
4984 len = strlen (beg);
4985 memcpy (dst, beg, len);
4986 dst += len;
4987 }
4988 if (end)
4989 beg = end + 1;
4990 else
4991 break;
4992 }
4993 *dst = '\0';
4994 input = build_string (dst - str, str);
4995 }
4996 else
4997 input = build_string (strlen (buf), buf);
6de9cd9a
DN
4998 }
4999 else
5000 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
5001
5002 free (p);
5003
6de9cd9a
DN
5004 input = build_tree_list (build_tree_list (NULL_TREE, input),
5005 unshare_expr (TREE_VALUE (link)));
5006 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5007 }
5008 }
5009
726a989a
RB
5010 link_next = NULL_TREE;
5011 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5012 {
726a989a
RB
5013 link_next = TREE_CHAIN (link);
5014 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
5015 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5016 oconstraints, &allows_mem, &allows_reg);
5017
f497c16c
JJ
5018 /* If we can't make copies, we can only accept memory. */
5019 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5020 {
5021 if (allows_mem)
5022 allows_reg = 0;
5023 else
5024 {
5025 error ("impossible constraint in %<asm%>");
5026 error ("non-memory input %d must stay in memory", i);
5027 return GS_ERROR;
5028 }
5029 }
5030
6de9cd9a
DN
5031 /* If the operand is a memory input, it should be an lvalue. */
5032 if (!allows_reg && allows_mem)
5033 {
502c5084
JJ
5034 tree inputv = TREE_VALUE (link);
5035 STRIP_NOPS (inputv);
5036 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5037 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5038 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5039 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5040 TREE_VALUE (link) = error_mark_node;
6de9cd9a
DN
5041 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5042 is_gimple_lvalue, fb_lvalue | fb_mayfail);
936d04b6 5043 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5044 if (tret == GS_ERROR)
5045 {
6a3799eb
AH
5046 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5047 input_location = EXPR_LOCATION (TREE_VALUE (link));
6de9cd9a
DN
5048 error ("memory input %d is not directly addressable", i);
5049 ret = tret;
5050 }
5051 }
5052 else
5053 {
5054 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 5055 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
5056 if (tret == GS_ERROR)
5057 ret = tret;
5058 }
726a989a
RB
5059
5060 TREE_CHAIN (link) = NULL_TREE;
9771b263 5061 vec_safe_push (inputs, link);
6de9cd9a 5062 }
b8698a0f 5063
ca081cc8
EB
5064 link_next = NULL_TREE;
5065 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5066 {
5067 link_next = TREE_CHAIN (link);
5068 TREE_CHAIN (link) = NULL_TREE;
5069 vec_safe_push (clobbers, link);
5070 }
1c384bf1 5071
ca081cc8
EB
5072 link_next = NULL_TREE;
5073 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5074 {
5075 link_next = TREE_CHAIN (link);
5076 TREE_CHAIN (link) = NULL_TREE;
5077 vec_safe_push (labels, link);
5078 }
726a989a 5079
a406865a
RG
5080 /* Do not add ASMs with errors to the gimple IL stream. */
5081 if (ret != GS_ERROR)
5082 {
5083 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 5084 inputs, outputs, clobbers, labels);
726a989a 5085
a406865a
RG
5086 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5087 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5088
5089 gimplify_seq_add_stmt (pre_p, stmt);
5090 }
6de9cd9a
DN
5091
5092 return ret;
5093}
5094
5095/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 5096 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
5097 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5098 return to this function.
5099
5100 FIXME should we complexify the prequeue handling instead? Or use flags
5101 for all the cleanups and let the optimizer tighten them up? The current
5102 code seems pretty fragile; it will break on a cleanup within any
5103 non-conditional nesting. But any such nesting would be broken, anyway;
5104 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5105 and continues out of it. We can do that at the RTL level, though, so
5106 having an optimizer to tighten up try/finally regions would be a Good
5107 Thing. */
5108
5109static enum gimplify_status
726a989a 5110gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 5111{
726a989a
RB
5112 gimple_stmt_iterator iter;
5113 gimple_seq body_sequence = NULL;
6de9cd9a 5114
325c3691 5115 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
5116
5117 /* We only care about the number of conditions between the innermost
df77f454
JM
5118 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5119 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 5120 int old_conds = gimplify_ctxp->conditions;
726a989a 5121 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
32be32af 5122 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6de9cd9a 5123 gimplify_ctxp->conditions = 0;
726a989a 5124 gimplify_ctxp->conditional_cleanups = NULL;
32be32af 5125 gimplify_ctxp->in_cleanup_point_expr = true;
6de9cd9a 5126
726a989a 5127 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
5128
5129 gimplify_ctxp->conditions = old_conds;
df77f454 5130 gimplify_ctxp->conditional_cleanups = old_cleanups;
32be32af 5131 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6de9cd9a 5132
726a989a 5133 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 5134 {
726a989a 5135 gimple wce = gsi_stmt (iter);
6de9cd9a 5136
726a989a 5137 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 5138 {
726a989a 5139 if (gsi_one_before_end_p (iter))
6de9cd9a 5140 {
726a989a
RB
5141 /* Note that gsi_insert_seq_before and gsi_remove do not
5142 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
5143 if (!gimple_wce_cleanup_eh_only (wce))
5144 gsi_insert_seq_before_without_update (&iter,
5145 gimple_wce_cleanup (wce),
5146 GSI_SAME_STMT);
726a989a 5147 gsi_remove (&iter, true);
6de9cd9a
DN
5148 break;
5149 }
5150 else
5151 {
daa6e488 5152 gimple_statement_try *gtry;
726a989a
RB
5153 gimple_seq seq;
5154 enum gimple_try_flags kind;
40aac948 5155
726a989a
RB
5156 if (gimple_wce_cleanup_eh_only (wce))
5157 kind = GIMPLE_TRY_CATCH;
40aac948 5158 else
726a989a
RB
5159 kind = GIMPLE_TRY_FINALLY;
5160 seq = gsi_split_seq_after (iter);
5161
82d6e6fc 5162 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
5163 /* Do not use gsi_replace here, as it may scan operands.
5164 We want to do a simple structural modification only. */
355a7673 5165 gsi_set_stmt (&iter, gtry);
daa6e488 5166 iter = gsi_start (gtry->eval);
6de9cd9a
DN
5167 }
5168 }
5169 else
726a989a 5170 gsi_next (&iter);
6de9cd9a
DN
5171 }
5172
726a989a 5173 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
5174 if (temp)
5175 {
5176 *expr_p = temp;
6de9cd9a
DN
5177 return GS_OK;
5178 }
5179 else
5180 {
726a989a 5181 *expr_p = NULL;
6de9cd9a
DN
5182 return GS_ALL_DONE;
5183 }
5184}
5185
5186/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a
RB
5187 is the cleanup action required. EH_ONLY is true if the cleanup should
5188 only be executed if an exception is thrown, not on normal exit. */
6de9cd9a
DN
5189
5190static void
726a989a 5191gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
6de9cd9a 5192{
726a989a
RB
5193 gimple wce;
5194 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
5195
5196 /* Errors can result in improperly nested cleanups. Which results in
726a989a 5197 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 5198 if (seen_error ())
6de9cd9a
DN
5199 return;
5200
5201 if (gimple_conditional_context ())
5202 {
5203 /* If we're in a conditional context, this is more complex. We only
5204 want to run the cleanup if we actually ran the initialization that
5205 necessitates it, but we want to run it after the end of the
5206 conditional context. So we wrap the try/finally around the
5207 condition and use a flag to determine whether or not to actually
5208 run the destructor. Thus
5209
5210 test ? f(A()) : 0
5211
5212 becomes (approximately)
5213
5214 flag = 0;
5215 try {
5216 if (test) { A::A(temp); flag = 1; val = f(temp); }
5217 else { val = 0; }
5218 } finally {
5219 if (flag) A::~A(temp);
5220 }
5221 val
5222 */
6de9cd9a 5223 tree flag = create_tmp_var (boolean_type_node, "cleanup");
726a989a
RB
5224 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5225 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5226
b4257cfc 5227 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
726a989a
RB
5228 gimplify_stmt (&cleanup, &cleanup_stmts);
5229 wce = gimple_build_wce (cleanup_stmts);
5230
5231 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5232 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5233 gimplify_seq_add_stmt (pre_p, ftrue);
6de9cd9a
DN
5234
5235 /* Because of this manipulation, and the EH edges that jump
5236 threading cannot redirect, the temporary (VAR) will appear
5237 to be used uninitialized. Don't warn. */
5238 TREE_NO_WARNING (var) = 1;
5239 }
5240 else
5241 {
726a989a
RB
5242 gimplify_stmt (&cleanup, &cleanup_stmts);
5243 wce = gimple_build_wce (cleanup_stmts);
5244 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5245 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 5246 }
6de9cd9a
DN
5247}
5248
5249/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5250
5251static enum gimplify_status
726a989a 5252gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5253{
5254 tree targ = *expr_p;
5255 tree temp = TARGET_EXPR_SLOT (targ);
5256 tree init = TARGET_EXPR_INITIAL (targ);
5257 enum gimplify_status ret;
5258
5259 if (init)
5260 {
d0ad58f9
JM
5261 tree cleanup = NULL_TREE;
5262
3a5b9284 5263 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea
JJ
5264 to the temps list. Handle also variable length TARGET_EXPRs. */
5265 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5266 {
5267 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5268 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5269 gimplify_vla_decl (temp, pre_p);
5270 }
5271 else
5272 gimple_add_tmp_var (temp);
6de9cd9a 5273
3a5b9284
RH
5274 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5275 expression is supposed to initialize the slot. */
5276 if (VOID_TYPE_P (TREE_TYPE (init)))
5277 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5278 else
325c3691 5279 {
726a989a
RB
5280 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5281 init = init_expr;
5282 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5283 init = NULL;
5284 ggc_free (init_expr);
325c3691 5285 }
3a5b9284 5286 if (ret == GS_ERROR)
abc67de1
SM
5287 {
5288 /* PR c++/28266 Make sure this is expanded only once. */
5289 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5290 return GS_ERROR;
5291 }
726a989a
RB
5292 if (init)
5293 gimplify_and_add (init, pre_p);
6de9cd9a
DN
5294
5295 /* If needed, push the cleanup for the temp. */
5296 if (TARGET_EXPR_CLEANUP (targ))
d0ad58f9
JM
5297 {
5298 if (CLEANUP_EH_ONLY (targ))
5299 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5300 CLEANUP_EH_ONLY (targ), pre_p);
5301 else
5302 cleanup = TARGET_EXPR_CLEANUP (targ);
5303 }
5304
5305 /* Add a clobber for the temporary going out of scope, like
5306 gimplify_bind_expr. */
32be32af 5307 if (gimplify_ctxp->in_cleanup_point_expr
87e2a8fd
XDL
5308 && needs_to_live_in_memory (temp)
5309 && flag_stack_reuse == SR_ALL)
d0ad58f9 5310 {
9771b263
DN
5311 tree clobber = build_constructor (TREE_TYPE (temp),
5312 NULL);
d0ad58f9
JM
5313 TREE_THIS_VOLATILE (clobber) = true;
5314 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5315 if (cleanup)
5316 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5317 clobber);
5318 else
5319 cleanup = clobber;
5320 }
5321
5322 if (cleanup)
5323 gimple_push_cleanup (temp, cleanup, false, pre_p);
6de9cd9a
DN
5324
5325 /* Only expand this once. */
5326 TREE_OPERAND (targ, 3) = init;
5327 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5328 }
282899df 5329 else
6de9cd9a 5330 /* We should have expanded this before. */
282899df 5331 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
5332
5333 *expr_p = temp;
5334 return GS_OK;
5335}
5336
5337/* Gimplification of expression trees. */
5338
726a989a
RB
5339/* Gimplify an expression which appears at statement context. The
5340 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5341 NULL, a new sequence is allocated.
6de9cd9a 5342
726a989a
RB
5343 Return true if we actually added a statement to the queue. */
5344
5345bool
5346gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 5347{
726a989a 5348 gimple_seq_node last;
6de9cd9a 5349
726a989a
RB
5350 last = gimple_seq_last (*seq_p);
5351 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5352 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
5353}
5354
953ff289
DN
5355/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5356 to CTX. If entries already exist, force them to be some flavor of private.
5357 If there is no enclosing parallel, do nothing. */
5358
5359void
5360omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5361{
5362 splay_tree_node n;
5363
5364 if (decl == NULL || !DECL_P (decl))
5365 return;
5366
5367 do
5368 {
5369 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5370 if (n != NULL)
5371 {
5372 if (n->value & GOVD_SHARED)
5373 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
acf0174b
JJ
5374 else if (n->value & GOVD_MAP)
5375 n->value |= GOVD_MAP_TO_ONLY;
953ff289
DN
5376 else
5377 return;
5378 }
acf0174b
JJ
5379 else if (ctx->region_type == ORT_TARGET)
5380 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
74bf76ed 5381 else if (ctx->region_type != ORT_WORKSHARE
acf0174b
JJ
5382 && ctx->region_type != ORT_SIMD
5383 && ctx->region_type != ORT_TARGET_DATA)
953ff289
DN
5384 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5385
5386 ctx = ctx->outer_context;
5387 }
5388 while (ctx);
5389}
5390
5391/* Similarly for each of the type sizes of TYPE. */
5392
5393static void
5394omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5395{
5396 if (type == NULL || type == error_mark_node)
5397 return;
5398 type = TYPE_MAIN_VARIANT (type);
5399
5400 if (pointer_set_insert (ctx->privatized_types, type))
5401 return;
5402
5403 switch (TREE_CODE (type))
5404 {
5405 case INTEGER_TYPE:
5406 case ENUMERAL_TYPE:
5407 case BOOLEAN_TYPE:
953ff289 5408 case REAL_TYPE:
325217ed 5409 case FIXED_POINT_TYPE:
953ff289
DN
5410 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5411 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5412 break;
5413
5414 case ARRAY_TYPE:
5415 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5416 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5417 break;
5418
5419 case RECORD_TYPE:
5420 case UNION_TYPE:
5421 case QUAL_UNION_TYPE:
5422 {
5423 tree field;
910ad8de 5424 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
5425 if (TREE_CODE (field) == FIELD_DECL)
5426 {
5427 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5428 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5429 }
5430 }
5431 break;
5432
5433 case POINTER_TYPE:
5434 case REFERENCE_TYPE:
5435 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5436 break;
5437
5438 default:
5439 break;
5440 }
5441
5442 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5443 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5444 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5445}
5446
5447/* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5448
5449static void
5450omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5451{
5452 splay_tree_node n;
5453 unsigned int nflags;
5454 tree t;
5455
b504a918 5456 if (error_operand_p (decl))
953ff289
DN
5457 return;
5458
5459 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5460 there are constructors involved somewhere. */
5461 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5462 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5463 flags |= GOVD_SEEN;
5464
5465 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
acf0174b 5466 if (n != NULL && n->value != GOVD_ALIGNED)
953ff289
DN
5467 {
5468 /* We shouldn't be re-adding the decl with the same data
5469 sharing class. */
5470 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5471 /* The only combination of data sharing classes we should see is
5472 FIRSTPRIVATE and LASTPRIVATE. */
5473 nflags = n->value | flags;
5474 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
74bf76ed
JJ
5475 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5476 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
953ff289
DN
5477 n->value = nflags;
5478 return;
5479 }
5480
5481 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 5482 of additional bits of data: the pointer replacement variable, and
953ff289 5483 the parameters of the type. */
4c923c28 5484 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
5485 {
5486 /* Add the pointer replacement variable as PRIVATE if the variable
5487 replacement is private, else FIRSTPRIVATE since we'll need the
5488 address of the original variable either for SHARED, or for the
5489 copy into or out of the context. */
5490 if (!(flags & GOVD_LOCAL))
5491 {
acf0174b
JJ
5492 nflags = flags & GOVD_MAP
5493 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5494 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
953ff289
DN
5495 nflags |= flags & GOVD_SEEN;
5496 t = DECL_VALUE_EXPR (decl);
5497 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5498 t = TREE_OPERAND (t, 0);
5499 gcc_assert (DECL_P (t));
5500 omp_add_variable (ctx, t, nflags);
5501 }
5502
5503 /* Add all of the variable and type parameters (which should have
5504 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5505 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5506 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5507 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5508
5509 /* The variable-sized variable itself is never SHARED, only some form
5510 of PRIVATE. The sharing would take place via the pointer variable
5511 which we remapped above. */
5512 if (flags & GOVD_SHARED)
5513 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5514 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5515
b8698a0f 5516 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
5517 alloca statement we generate for the variable, so make sure it
5518 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
5519 case, since we won't be allocating local storage then.
5520 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5521 in this case omp_notice_variable will be called later
5522 on when it is gimplified. */
acf0174b 5523 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
423ed416 5524 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
953ff289
DN
5525 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5526 }
acf0174b
JJ
5527 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5528 && lang_hooks.decls.omp_privatize_by_reference (decl))
953ff289 5529 {
953ff289
DN
5530 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5531
5532 /* Similar to the direct variable sized case above, we'll need the
5533 size of references being privatized. */
5534 if ((flags & GOVD_SHARED) == 0)
5535 {
5536 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4c923c28 5537 if (TREE_CODE (t) != INTEGER_CST)
953ff289
DN
5538 omp_notice_variable (ctx, t, true);
5539 }
5540 }
5541
74bf76ed
JJ
5542 if (n != NULL)
5543 n->value |= flags;
5544 else
5545 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
953ff289
DN
5546}
5547
f22f4340
JJ
5548/* Notice a threadprivate variable DECL used in OpenMP context CTX.
5549 This just prints out diagnostics about threadprivate variable uses
5550 in untied tasks. If DECL2 is non-NULL, prevent this warning
5551 on that variable. */
5552
5553static bool
5554omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5555 tree decl2)
5556{
5557 splay_tree_node n;
acf0174b
JJ
5558 struct gimplify_omp_ctx *octx;
5559
5560 for (octx = ctx; octx; octx = octx->outer_context)
5561 if (octx->region_type == ORT_TARGET)
5562 {
5563 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5564 if (n == NULL)
5565 {
5566 error ("threadprivate variable %qE used in target region",
5567 DECL_NAME (decl));
5568 error_at (octx->location, "enclosing target region");
5569 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5570 }
5571 if (decl2)
5572 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5573 }
f22f4340
JJ
5574
5575 if (ctx->region_type != ORT_UNTIED_TASK)
5576 return false;
5577 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5578 if (n == NULL)
5579 {
ad19c4be
EB
5580 error ("threadprivate variable %qE used in untied task",
5581 DECL_NAME (decl));
f22f4340
JJ
5582 error_at (ctx->location, "enclosing task");
5583 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5584 }
5585 if (decl2)
5586 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5587 return false;
5588}
5589
953ff289
DN
5590/* Record the fact that DECL was used within the OpenMP context CTX.
5591 IN_CODE is true when real code uses DECL, and false when we should
5592 merely emit default(none) errors. Return true if DECL is going to
5593 be remapped and thus DECL shouldn't be gimplified into its
5594 DECL_VALUE_EXPR (if any). */
5595
5596static bool
5597omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5598{
5599 splay_tree_node n;
5600 unsigned flags = in_code ? GOVD_SEEN : 0;
5601 bool ret = false, shared;
5602
b504a918 5603 if (error_operand_p (decl))
953ff289
DN
5604 return false;
5605
5606 /* Threadprivate variables are predetermined. */
5607 if (is_global_var (decl))
5608 {
5609 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 5610 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
5611
5612 if (DECL_HAS_VALUE_EXPR_P (decl))
5613 {
5614 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5615
5616 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
f22f4340 5617 return omp_notice_threadprivate_variable (ctx, decl, value);
953ff289
DN
5618 }
5619 }
5620
5621 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
acf0174b
JJ
5622 if (ctx->region_type == ORT_TARGET)
5623 {
5624 if (n == NULL)
5625 {
5626 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5627 {
5628 error ("%qD referenced in target region does not have "
5629 "a mappable type", decl);
5630 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5631 }
5632 else
5633 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5634 }
5635 else
5636 n->value |= flags;
5637 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5638 goto do_outer;
5639 }
5640
953ff289
DN
5641 if (n == NULL)
5642 {
5643 enum omp_clause_default_kind default_kind, kind;
a68ab351 5644 struct gimplify_omp_ctx *octx;
953ff289 5645
74bf76ed 5646 if (ctx->region_type == ORT_WORKSHARE
acf0174b
JJ
5647 || ctx->region_type == ORT_SIMD
5648 || ctx->region_type == ORT_TARGET_DATA)
953ff289
DN
5649 goto do_outer;
5650
5651 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5652 remapped firstprivate instead of shared. To some extent this is
5653 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5654 default_kind = ctx->default_kind;
5655 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5656 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5657 default_kind = kind;
5658
5659 switch (default_kind)
5660 {
5661 case OMP_CLAUSE_DEFAULT_NONE:
f22f4340 5662 if ((ctx->region_type & ORT_TASK) != 0)
acf0174b
JJ
5663 {
5664 error ("%qE not specified in enclosing task",
5665 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5666 error_at (ctx->location, "enclosing task");
5667 }
5668 else if (ctx->region_type == ORT_TEAMS)
5669 {
5670 error ("%qE not specified in enclosing teams construct",
5671 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5672 error_at (ctx->location, "enclosing teams construct");
5673 }
f22f4340 5674 else
acf0174b
JJ
5675 {
5676 error ("%qE not specified in enclosing parallel",
5677 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5678 error_at (ctx->location, "enclosing parallel");
5679 }
953ff289
DN
5680 /* FALLTHRU */
5681 case OMP_CLAUSE_DEFAULT_SHARED:
5682 flags |= GOVD_SHARED;
5683 break;
5684 case OMP_CLAUSE_DEFAULT_PRIVATE:
5685 flags |= GOVD_PRIVATE;
5686 break;
a68ab351
JJ
5687 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5688 flags |= GOVD_FIRSTPRIVATE;
5689 break;
5690 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5691 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
f22f4340 5692 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
a68ab351
JJ
5693 if (ctx->outer_context)
5694 omp_notice_variable (ctx->outer_context, decl, in_code);
5695 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5696 {
5697 splay_tree_node n2;
5698
acf0174b
JJ
5699 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5700 continue;
a68ab351
JJ
5701 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5702 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5703 {
5704 flags |= GOVD_FIRSTPRIVATE;
5705 break;
5706 }
acf0174b 5707 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
a68ab351
JJ
5708 break;
5709 }
5710 if (flags & GOVD_FIRSTPRIVATE)
5711 break;
5712 if (octx == NULL
5713 && (TREE_CODE (decl) == PARM_DECL
5714 || (!is_global_var (decl)
5715 && DECL_CONTEXT (decl) == current_function_decl)))
5716 {
5717 flags |= GOVD_FIRSTPRIVATE;
5718 break;
5719 }
5720 flags |= GOVD_SHARED;
5721 break;
953ff289
DN
5722 default:
5723 gcc_unreachable ();
5724 }
5725
a68ab351
JJ
5726 if ((flags & GOVD_PRIVATE)
5727 && lang_hooks.decls.omp_private_outer_ref (decl))
5728 flags |= GOVD_PRIVATE_OUTER_REF;
5729
953ff289
DN
5730 omp_add_variable (ctx, decl, flags);
5731
5732 shared = (flags & GOVD_SHARED) != 0;
5733 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5734 goto do_outer;
5735 }
5736
3ad6b266
JJ
5737 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5738 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5739 && DECL_SIZE (decl)
5740 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5741 {
5742 splay_tree_node n2;
5743 tree t = DECL_VALUE_EXPR (decl);
5744 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5745 t = TREE_OPERAND (t, 0);
5746 gcc_assert (DECL_P (t));
5747 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5748 n2->value |= GOVD_SEEN;
5749 }
5750
953ff289
DN
5751 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5752 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5753
5754 /* If nothing changed, there's nothing left to do. */
5755 if ((n->value & flags) == flags)
5756 return ret;
5757 flags |= n->value;
5758 n->value = flags;
5759
5760 do_outer:
5761 /* If the variable is private in the current context, then we don't
5762 need to propagate anything to an outer context. */
a68ab351 5763 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289
DN
5764 return ret;
5765 if (ctx->outer_context
5766 && omp_notice_variable (ctx->outer_context, decl, in_code))
5767 return true;
5768 return ret;
5769}
5770
5771/* Verify that DECL is private within CTX. If there's specific information
5772 to the contrary in the innermost scope, generate an error. */
5773
5774static bool
74bf76ed 5775omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, bool simd)
953ff289
DN
5776{
5777 splay_tree_node n;
5778
5779 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5780 if (n != NULL)
5781 {
5782 if (n->value & GOVD_SHARED)
5783 {
5784 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 5785 {
74bf76ed
JJ
5786 if (simd)
5787 error ("iteration variable %qE is predetermined linear",
5788 DECL_NAME (decl));
5789 else
5790 error ("iteration variable %qE should be private",
5791 DECL_NAME (decl));
f6a5ffbf
JJ
5792 n->value = GOVD_PRIVATE;
5793 return true;
5794 }
5795 else
5796 return false;
953ff289 5797 }
761041be
JJ
5798 else if ((n->value & GOVD_EXPLICIT) != 0
5799 && (ctx == gimplify_omp_ctxp
a68ab351 5800 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
5801 && gimplify_omp_ctxp->outer_context == ctx)))
5802 {
5803 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
5804 error ("iteration variable %qE should not be firstprivate",
5805 DECL_NAME (decl));
761041be 5806 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
5807 error ("iteration variable %qE should not be reduction",
5808 DECL_NAME (decl));
74bf76ed
JJ
5809 else if (simd && (n->value & GOVD_LASTPRIVATE) != 0)
5810 error ("iteration variable %qE should not be lastprivate",
5811 DECL_NAME (decl));
5812 else if (simd && (n->value & GOVD_PRIVATE) != 0)
5813 error ("iteration variable %qE should not be private",
5814 DECL_NAME (decl));
5815 else if (simd && (n->value & GOVD_LINEAR) != 0)
5816 error ("iteration variable %qE is predetermined linear",
5817 DECL_NAME (decl));
761041be 5818 }
ca2b1311
JJ
5819 return (ctx == gimplify_omp_ctxp
5820 || (ctx->region_type == ORT_COMBINED_PARALLEL
5821 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
5822 }
5823
74bf76ed
JJ
5824 if (ctx->region_type != ORT_WORKSHARE
5825 && ctx->region_type != ORT_SIMD)
953ff289 5826 return false;
f6a5ffbf 5827 else if (ctx->outer_context)
74bf76ed 5828 return omp_is_private (ctx->outer_context, decl, simd);
ca2b1311 5829 return false;
953ff289
DN
5830}
5831
07b7aade
JJ
5832/* Return true if DECL is private within a parallel region
5833 that binds to the current construct's context or in parallel
5834 region's REDUCTION clause. */
5835
5836static bool
5837omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5838{
5839 splay_tree_node n;
5840
5841 do
5842 {
5843 ctx = ctx->outer_context;
5844 if (ctx == NULL)
5845 return !(is_global_var (decl)
5846 /* References might be private, but might be shared too. */
5847 || lang_hooks.decls.omp_privatize_by_reference (decl));
5848
acf0174b
JJ
5849 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
5850 continue;
5851
07b7aade
JJ
5852 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5853 if (n != NULL)
5854 return (n->value & GOVD_SHARED) == 0;
5855 }
74bf76ed
JJ
5856 while (ctx->region_type == ORT_WORKSHARE
5857 || ctx->region_type == ORT_SIMD);
07b7aade
JJ
5858 return false;
5859}
5860
953ff289
DN
5861/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5862 and previous omp contexts. */
5863
5864static void
726a989a 5865gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
a68ab351 5866 enum omp_region_type region_type)
953ff289
DN
5867{
5868 struct gimplify_omp_ctx *ctx, *outer_ctx;
5869 tree c;
5870
a68ab351 5871 ctx = new_omp_context (region_type);
953ff289
DN
5872 outer_ctx = ctx->outer_context;
5873
5874 while ((c = *list_p) != NULL)
5875 {
953ff289
DN
5876 bool remove = false;
5877 bool notice_outer = true;
07b7aade 5878 const char *check_non_private = NULL;
953ff289
DN
5879 unsigned int flags;
5880 tree decl;
5881
aaf46ef9 5882 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
5883 {
5884 case OMP_CLAUSE_PRIVATE:
5885 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
5886 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5887 {
5888 flags |= GOVD_PRIVATE_OUTER_REF;
5889 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5890 }
5891 else
5892 notice_outer = false;
953ff289
DN
5893 goto do_add;
5894 case OMP_CLAUSE_SHARED:
5895 flags = GOVD_SHARED | GOVD_EXPLICIT;
5896 goto do_add;
5897 case OMP_CLAUSE_FIRSTPRIVATE:
5898 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 5899 check_non_private = "firstprivate";
953ff289
DN
5900 goto do_add;
5901 case OMP_CLAUSE_LASTPRIVATE:
5902 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 5903 check_non_private = "lastprivate";
953ff289
DN
5904 goto do_add;
5905 case OMP_CLAUSE_REDUCTION:
5906 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 5907 check_non_private = "reduction";
953ff289 5908 goto do_add;
acf0174b
JJ
5909 case OMP_CLAUSE_LINEAR:
5910 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
5911 is_gimple_val, fb_rvalue) == GS_ERROR)
5912 {
5913 remove = true;
5914 break;
5915 }
5916 flags = GOVD_LINEAR | GOVD_EXPLICIT;
5917 goto do_add;
5918
5919 case OMP_CLAUSE_MAP:
5920 if (OMP_CLAUSE_SIZE (c)
5921 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5922 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5923 {
5924 remove = true;
5925 break;
5926 }
5927 decl = OMP_CLAUSE_DECL (c);
5928 if (!DECL_P (decl))
5929 {
5930 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5931 NULL, is_gimple_lvalue, fb_lvalue)
5932 == GS_ERROR)
5933 {
5934 remove = true;
5935 break;
5936 }
5937 break;
5938 }
5939 flags = GOVD_MAP | GOVD_EXPLICIT;
5940 goto do_add;
5941
5942 case OMP_CLAUSE_DEPEND:
5943 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
5944 {
5945 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
5946 NULL, is_gimple_val, fb_rvalue);
5947 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5948 }
5949 if (error_operand_p (OMP_CLAUSE_DECL (c)))
5950 {
5951 remove = true;
5952 break;
5953 }
5954 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
5955 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
5956 is_gimple_val, fb_rvalue) == GS_ERROR)
5957 {
5958 remove = true;
5959 break;
5960 }
5961 break;
5962
5963 case OMP_CLAUSE_TO:
5964 case OMP_CLAUSE_FROM:
5965 if (OMP_CLAUSE_SIZE (c)
5966 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5967 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5968 {
5969 remove = true;
5970 break;
5971 }
5972 decl = OMP_CLAUSE_DECL (c);
5973 if (error_operand_p (decl))
5974 {
5975 remove = true;
5976 break;
5977 }
5978 if (!DECL_P (decl))
5979 {
5980 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5981 NULL, is_gimple_lvalue, fb_lvalue)
5982 == GS_ERROR)
5983 {
5984 remove = true;
5985 break;
5986 }
5987 break;
5988 }
5989 goto do_notice;
953ff289
DN
5990
5991 do_add:
5992 decl = OMP_CLAUSE_DECL (c);
b504a918 5993 if (error_operand_p (decl))
953ff289
DN
5994 {
5995 remove = true;
5996 break;
5997 }
5998 omp_add_variable (ctx, decl, flags);
693d710f 5999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
6000 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6001 {
6002 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 6003 GOVD_LOCAL | GOVD_SEEN);
953ff289 6004 gimplify_omp_ctxp = ctx;
45852dcc 6005 push_gimplify_context ();
726a989a 6006
355a7673
MM
6007 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6008 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
726a989a
RB
6009
6010 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6011 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6012 pop_gimplify_context
6013 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
45852dcc 6014 push_gimplify_context ();
726a989a
RB
6015 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6016 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 6017 pop_gimplify_context
726a989a
RB
6018 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6019 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6020 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6021
953ff289
DN
6022 gimplify_omp_ctxp = outer_ctx;
6023 }
a68ab351
JJ
6024 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6025 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6026 {
6027 gimplify_omp_ctxp = ctx;
45852dcc 6028 push_gimplify_context ();
a68ab351
JJ
6029 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6030 {
6031 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6032 NULL, NULL);
6033 TREE_SIDE_EFFECTS (bind) = 1;
6034 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6035 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6036 }
726a989a
RB
6037 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6038 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6039 pop_gimplify_context
6040 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6041 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6042
a68ab351
JJ
6043 gimplify_omp_ctxp = outer_ctx;
6044 }
953ff289
DN
6045 if (notice_outer)
6046 goto do_notice;
6047 break;
6048
6049 case OMP_CLAUSE_COPYIN:
6050 case OMP_CLAUSE_COPYPRIVATE:
6051 decl = OMP_CLAUSE_DECL (c);
b504a918 6052 if (error_operand_p (decl))
953ff289
DN
6053 {
6054 remove = true;
6055 break;
6056 }
6057 do_notice:
6058 if (outer_ctx)
6059 omp_notice_variable (outer_ctx, decl, true);
07b7aade 6060 if (check_non_private
a68ab351 6061 && region_type == ORT_WORKSHARE
07b7aade
JJ
6062 && omp_check_private (ctx, decl))
6063 {
4f1e4960
JM
6064 error ("%s variable %qE is private in outer context",
6065 check_non_private, DECL_NAME (decl));
07b7aade
JJ
6066 remove = true;
6067 }
953ff289
DN
6068 break;
6069
20906c66 6070 case OMP_CLAUSE_FINAL:
953ff289 6071 case OMP_CLAUSE_IF:
d568d1a8
RS
6072 OMP_CLAUSE_OPERAND (c, 0)
6073 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6074 /* Fall through. */
6075
6076 case OMP_CLAUSE_SCHEDULE:
953ff289 6077 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
6078 case OMP_CLAUSE_NUM_TEAMS:
6079 case OMP_CLAUSE_THREAD_LIMIT:
6080 case OMP_CLAUSE_DIST_SCHEDULE:
6081 case OMP_CLAUSE_DEVICE:
726a989a
RB
6082 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6083 is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b 6084 remove = true;
953ff289
DN
6085 break;
6086
6087 case OMP_CLAUSE_NOWAIT:
6088 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
6089 case OMP_CLAUSE_UNTIED:
6090 case OMP_CLAUSE_COLLAPSE:
20906c66 6091 case OMP_CLAUSE_MERGEABLE:
acf0174b 6092 case OMP_CLAUSE_PROC_BIND:
74bf76ed 6093 case OMP_CLAUSE_SAFELEN:
953ff289
DN
6094 break;
6095
acf0174b
JJ
6096 case OMP_CLAUSE_ALIGNED:
6097 decl = OMP_CLAUSE_DECL (c);
6098 if (error_operand_p (decl))
6099 {
6100 remove = true;
6101 break;
6102 }
6103 if (!is_global_var (decl)
6104 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6105 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6106 break;
6107
953ff289
DN
6108 case OMP_CLAUSE_DEFAULT:
6109 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6110 break;
6111
6112 default:
6113 gcc_unreachable ();
6114 }
6115
6116 if (remove)
6117 *list_p = OMP_CLAUSE_CHAIN (c);
6118 else
6119 list_p = &OMP_CLAUSE_CHAIN (c);
6120 }
6121
6122 gimplify_omp_ctxp = ctx;
6123}
6124
6125/* For all variables that were not actually used within the context,
6126 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6127
6128static int
6129gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6130{
6131 tree *list_p = (tree *) data;
6132 tree decl = (tree) n->key;
6133 unsigned flags = n->value;
aaf46ef9 6134 enum omp_clause_code code;
953ff289
DN
6135 tree clause;
6136 bool private_debug;
6137
6138 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6139 return 0;
6140 if ((flags & GOVD_SEEN) == 0)
6141 return 0;
6142 if (flags & GOVD_DEBUG_PRIVATE)
6143 {
6144 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6145 private_debug = true;
6146 }
acf0174b
JJ
6147 else if (flags & GOVD_MAP)
6148 private_debug = false;
953ff289
DN
6149 else
6150 private_debug
6151 = lang_hooks.decls.omp_private_debug_clause (decl,
6152 !!(flags & GOVD_SHARED));
6153 if (private_debug)
6154 code = OMP_CLAUSE_PRIVATE;
acf0174b
JJ
6155 else if (flags & GOVD_MAP)
6156 code = OMP_CLAUSE_MAP;
953ff289
DN
6157 else if (flags & GOVD_SHARED)
6158 {
6159 if (is_global_var (decl))
64964499
JJ
6160 {
6161 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6162 while (ctx != NULL)
6163 {
6164 splay_tree_node on
6165 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6166 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
74bf76ed
JJ
6167 | GOVD_PRIVATE | GOVD_REDUCTION
6168 | GOVD_LINEAR)) != 0)
64964499
JJ
6169 break;
6170 ctx = ctx->outer_context;
6171 }
6172 if (ctx == NULL)
6173 return 0;
6174 }
953ff289
DN
6175 code = OMP_CLAUSE_SHARED;
6176 }
6177 else if (flags & GOVD_PRIVATE)
6178 code = OMP_CLAUSE_PRIVATE;
6179 else if (flags & GOVD_FIRSTPRIVATE)
6180 code = OMP_CLAUSE_FIRSTPRIVATE;
74bf76ed
JJ
6181 else if (flags & GOVD_LASTPRIVATE)
6182 code = OMP_CLAUSE_LASTPRIVATE;
acf0174b
JJ
6183 else if (flags & GOVD_ALIGNED)
6184 return 0;
953ff289
DN
6185 else
6186 gcc_unreachable ();
6187
c2255bc4 6188 clause = build_omp_clause (input_location, code);
aaf46ef9 6189 OMP_CLAUSE_DECL (clause) = decl;
953ff289
DN
6190 OMP_CLAUSE_CHAIN (clause) = *list_p;
6191 if (private_debug)
6192 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
6193 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6194 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
acf0174b
JJ
6195 else if (code == OMP_CLAUSE_MAP)
6196 {
6197 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6198 ? OMP_CLAUSE_MAP_TO
6199 : OMP_CLAUSE_MAP_TOFROM;
6200 if (DECL_SIZE (decl)
6201 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6202 {
6203 tree decl2 = DECL_VALUE_EXPR (decl);
6204 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6205 decl2 = TREE_OPERAND (decl2, 0);
6206 gcc_assert (DECL_P (decl2));
6207 tree mem = build_simple_mem_ref (decl2);
6208 OMP_CLAUSE_DECL (clause) = mem;
6209 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6210 if (gimplify_omp_ctxp->outer_context)
6211 {
6212 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6213 omp_notice_variable (ctx, decl2, true);
6214 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6215 }
6216 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6217 OMP_CLAUSE_MAP);
6218 OMP_CLAUSE_DECL (nc) = decl;
6219 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6220 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6221 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6222 OMP_CLAUSE_CHAIN (clause) = nc;
6223 }
6224 }
953ff289 6225 *list_p = clause;
a68ab351 6226 lang_hooks.decls.omp_finish_clause (clause);
953ff289
DN
6227
6228 return 0;
6229}
6230
6231static void
6232gimplify_adjust_omp_clauses (tree *list_p)
6233{
6234 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6235 tree c, decl;
6236
6237 while ((c = *list_p) != NULL)
6238 {
6239 splay_tree_node n;
6240 bool remove = false;
6241
aaf46ef9 6242 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6243 {
6244 case OMP_CLAUSE_PRIVATE:
6245 case OMP_CLAUSE_SHARED:
6246 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 6247 case OMP_CLAUSE_LINEAR:
953ff289
DN
6248 decl = OMP_CLAUSE_DECL (c);
6249 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6250 remove = !(n->value & GOVD_SEEN);
6251 if (! remove)
6252 {
aaf46ef9 6253 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
6254 if ((n->value & GOVD_DEBUG_PRIVATE)
6255 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6256 {
6257 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6258 || ((n->value & GOVD_DATA_SHARE_CLASS)
6259 == GOVD_PRIVATE));
aaf46ef9 6260 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
6261 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6262 }
74bf76ed
JJ
6263 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6264 && ctx->outer_context
6265 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6266 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6267 && !is_global_var (decl))
6268 {
6269 if (ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
6270 {
6271 n = splay_tree_lookup (ctx->outer_context->variables,
6272 (splay_tree_key) decl);
6273 if (n == NULL
6274 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6275 {
6276 int flags = OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6277 ? GOVD_LASTPRIVATE : GOVD_SHARED;
6278 if (n == NULL)
6279 omp_add_variable (ctx->outer_context, decl,
6280 flags | GOVD_SEEN);
6281 else
6282 n->value |= flags | GOVD_SEEN;
6283 }
6284 }
6285 else
6286 omp_notice_variable (ctx->outer_context, decl, true);
6287 }
953ff289
DN
6288 }
6289 break;
6290
6291 case OMP_CLAUSE_LASTPRIVATE:
6292 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6293 accurately reflect the presence of a FIRSTPRIVATE clause. */
6294 decl = OMP_CLAUSE_DECL (c);
6295 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6296 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6297 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6298 break;
b8698a0f 6299
acf0174b
JJ
6300 case OMP_CLAUSE_ALIGNED:
6301 decl = OMP_CLAUSE_DECL (c);
6302 if (!is_global_var (decl))
6303 {
6304 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6305 remove = n == NULL || !(n->value & GOVD_SEEN);
6306 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6307 {
6308 struct gimplify_omp_ctx *octx;
6309 if (n != NULL
6310 && (n->value & (GOVD_DATA_SHARE_CLASS
6311 & ~GOVD_FIRSTPRIVATE)))
6312 remove = true;
6313 else
6314 for (octx = ctx->outer_context; octx;
6315 octx = octx->outer_context)
6316 {
6317 n = splay_tree_lookup (octx->variables,
6318 (splay_tree_key) decl);
6319 if (n == NULL)
6320 continue;
6321 if (n->value & GOVD_LOCAL)
6322 break;
6323 /* We have to avoid assigning a shared variable
6324 to itself when trying to add
6325 __builtin_assume_aligned. */
6326 if (n->value & GOVD_SHARED)
6327 {
6328 remove = true;
6329 break;
6330 }
6331 }
6332 }
6333 }
6334 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6335 {
6336 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6337 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6338 remove = true;
6339 }
6340 break;
6341
6342 case OMP_CLAUSE_MAP:
6343 decl = OMP_CLAUSE_DECL (c);
6344 if (!DECL_P (decl))
6345 break;
6346 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6347 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6348 remove = true;
6349 else if (DECL_SIZE (decl)
6350 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6351 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6352 {
6353 tree decl2 = DECL_VALUE_EXPR (decl);
6354 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6355 decl2 = TREE_OPERAND (decl2, 0);
6356 gcc_assert (DECL_P (decl2));
6357 tree mem = build_simple_mem_ref (decl2);
6358 OMP_CLAUSE_DECL (c) = mem;
6359 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6360 if (ctx->outer_context)
6361 {
6362 omp_notice_variable (ctx->outer_context, decl2, true);
6363 omp_notice_variable (ctx->outer_context,
6364 OMP_CLAUSE_SIZE (c), true);
6365 }
6366 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6367 OMP_CLAUSE_MAP);
6368 OMP_CLAUSE_DECL (nc) = decl;
6369 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6370 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6371 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6372 OMP_CLAUSE_CHAIN (c) = nc;
6373 c = nc;
6374 }
6375 break;
6376
6377 case OMP_CLAUSE_TO:
6378 case OMP_CLAUSE_FROM:
6379 decl = OMP_CLAUSE_DECL (c);
6380 if (!DECL_P (decl))
6381 break;
6382 if (DECL_SIZE (decl)
6383 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6384 {
6385 tree decl2 = DECL_VALUE_EXPR (decl);
6386 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6387 decl2 = TREE_OPERAND (decl2, 0);
6388 gcc_assert (DECL_P (decl2));
6389 tree mem = build_simple_mem_ref (decl2);
6390 OMP_CLAUSE_DECL (c) = mem;
6391 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6392 if (ctx->outer_context)
6393 {
6394 omp_notice_variable (ctx->outer_context, decl2, true);
6395 omp_notice_variable (ctx->outer_context,
6396 OMP_CLAUSE_SIZE (c), true);
6397 }
6398 }
6399 break;
6400
953ff289
DN
6401 case OMP_CLAUSE_REDUCTION:
6402 case OMP_CLAUSE_COPYIN:
6403 case OMP_CLAUSE_COPYPRIVATE:
6404 case OMP_CLAUSE_IF:
6405 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
6406 case OMP_CLAUSE_NUM_TEAMS:
6407 case OMP_CLAUSE_THREAD_LIMIT:
6408 case OMP_CLAUSE_DIST_SCHEDULE:
6409 case OMP_CLAUSE_DEVICE:
953ff289
DN
6410 case OMP_CLAUSE_SCHEDULE:
6411 case OMP_CLAUSE_NOWAIT:
6412 case OMP_CLAUSE_ORDERED:
6413 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
6414 case OMP_CLAUSE_UNTIED:
6415 case OMP_CLAUSE_COLLAPSE:
20906c66
JJ
6416 case OMP_CLAUSE_FINAL:
6417 case OMP_CLAUSE_MERGEABLE:
acf0174b 6418 case OMP_CLAUSE_PROC_BIND:
74bf76ed 6419 case OMP_CLAUSE_SAFELEN:
acf0174b 6420 case OMP_CLAUSE_DEPEND:
953ff289
DN
6421 break;
6422
6423 default:
6424 gcc_unreachable ();
6425 }
6426
6427 if (remove)
6428 *list_p = OMP_CLAUSE_CHAIN (c);
6429 else
6430 list_p = &OMP_CLAUSE_CHAIN (c);
6431 }
6432
6433 /* Add in any implicit data sharing. */
6434 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
b8698a0f 6435
953ff289
DN
6436 gimplify_omp_ctxp = ctx->outer_context;
6437 delete_omp_context (ctx);
6438}
6439
6440/* Gimplify the contents of an OMP_PARALLEL statement. This involves
6441 gimplification of the body, as well as scanning the body for used
6442 variables. We need to do this scan now, because variable-sized
6443 decls will be decomposed during gimplification. */
6444
726a989a
RB
6445static void
6446gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
6447{
6448 tree expr = *expr_p;
726a989a
RB
6449 gimple g;
6450 gimple_seq body = NULL;
953ff289 6451
a68ab351
JJ
6452 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6453 OMP_PARALLEL_COMBINED (expr)
6454 ? ORT_COMBINED_PARALLEL
6455 : ORT_PARALLEL);
953ff289 6456
45852dcc 6457 push_gimplify_context ();
953ff289 6458
726a989a
RB
6459 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6460 if (gimple_code (g) == GIMPLE_BIND)
6461 pop_gimplify_context (g);
50674e96 6462 else
726a989a 6463 pop_gimplify_context (NULL);
953ff289
DN
6464
6465 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6466
726a989a
RB
6467 g = gimple_build_omp_parallel (body,
6468 OMP_PARALLEL_CLAUSES (expr),
6469 NULL_TREE, NULL_TREE);
6470 if (OMP_PARALLEL_COMBINED (expr))
6471 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6472 gimplify_seq_add_stmt (pre_p, g);
6473 *expr_p = NULL_TREE;
953ff289
DN
6474}
6475
a68ab351
JJ
6476/* Gimplify the contents of an OMP_TASK statement. This involves
6477 gimplification of the body, as well as scanning the body for used
6478 variables. We need to do this scan now, because variable-sized
6479 decls will be decomposed during gimplification. */
953ff289 6480
726a989a
RB
6481static void
6482gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 6483{
a68ab351 6484 tree expr = *expr_p;
726a989a
RB
6485 gimple g;
6486 gimple_seq body = NULL;
953ff289 6487
f22f4340
JJ
6488 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6489 find_omp_clause (OMP_TASK_CLAUSES (expr),
6490 OMP_CLAUSE_UNTIED)
6491 ? ORT_UNTIED_TASK : ORT_TASK);
953ff289 6492
45852dcc 6493 push_gimplify_context ();
953ff289 6494
726a989a
RB
6495 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6496 if (gimple_code (g) == GIMPLE_BIND)
6497 pop_gimplify_context (g);
953ff289 6498 else
726a989a 6499 pop_gimplify_context (NULL);
953ff289 6500
a68ab351 6501 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
917948d3 6502
726a989a
RB
6503 g = gimple_build_omp_task (body,
6504 OMP_TASK_CLAUSES (expr),
6505 NULL_TREE, NULL_TREE,
6506 NULL_TREE, NULL_TREE, NULL_TREE);
6507 gimplify_seq_add_stmt (pre_p, g);
6508 *expr_p = NULL_TREE;
a68ab351
JJ
6509}
6510
acf0174b
JJ
6511/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6512 with non-NULL OMP_FOR_INIT. */
6513
6514static tree
6515find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6516{
6517 *walk_subtrees = 0;
6518 switch (TREE_CODE (*tp))
6519 {
6520 case OMP_FOR:
6521 *walk_subtrees = 1;
6522 /* FALLTHRU */
6523 case OMP_SIMD:
6524 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6525 return *tp;
6526 break;
6527 case BIND_EXPR:
6528 case STATEMENT_LIST:
6529 case OMP_PARALLEL:
6530 *walk_subtrees = 1;
6531 break;
6532 default:
6533 break;
6534 }
6535 return NULL_TREE;
6536}
6537
a68ab351
JJ
6538/* Gimplify the gross structure of an OMP_FOR statement. */
6539
6540static enum gimplify_status
726a989a 6541gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 6542{
acf0174b 6543 tree for_stmt, orig_for_stmt, decl, var, t;
32e8bb8e
ILT
6544 enum gimplify_status ret = GS_ALL_DONE;
6545 enum gimplify_status tret;
726a989a
RB
6546 gimple gfor;
6547 gimple_seq for_body, for_pre_body;
a68ab351 6548 int i;
74bf76ed
JJ
6549 bool simd;
6550 bitmap has_decl_expr = NULL;
a68ab351 6551
acf0174b 6552 orig_for_stmt = for_stmt = *expr_p;
a68ab351 6553
c02065fc
AH
6554 simd = TREE_CODE (for_stmt) == OMP_SIMD
6555 || TREE_CODE (for_stmt) == CILK_SIMD;
a68ab351 6556 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
74bf76ed 6557 simd ? ORT_SIMD : ORT_WORKSHARE);
917948d3 6558
726a989a
RB
6559 /* Handle OMP_FOR_INIT. */
6560 for_pre_body = NULL;
74bf76ed
JJ
6561 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6562 {
6563 has_decl_expr = BITMAP_ALLOC (NULL);
6564 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6565 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
acf0174b 6566 == VAR_DECL)
74bf76ed
JJ
6567 {
6568 t = OMP_FOR_PRE_BODY (for_stmt);
6569 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6570 }
6571 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6572 {
6573 tree_stmt_iterator si;
6574 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6575 tsi_next (&si))
6576 {
6577 t = tsi_stmt (si);
6578 if (TREE_CODE (t) == DECL_EXPR
6579 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6580 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6581 }
6582 }
6583 }
726a989a
RB
6584 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6585 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 6586
acf0174b
JJ
6587 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6588 {
6589 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6590 NULL, NULL);
6591 gcc_assert (for_stmt != NULL_TREE);
6592 gimplify_omp_ctxp->combined_loop = true;
6593 }
6594
355a7673 6595 for_body = NULL;
a68ab351
JJ
6596 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6597 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6598 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6599 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6600 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6601 {
6602 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
6603 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6604 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
6605 gcc_assert (DECL_P (decl));
6606 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6607 || POINTER_TYPE_P (TREE_TYPE (decl)));
6608
6609 /* Make sure the iteration variable is private. */
74bf76ed 6610 tree c = NULL_TREE;
acf0174b
JJ
6611 if (orig_for_stmt != for_stmt)
6612 /* Do this only on innermost construct for combined ones. */;
6613 else if (simd)
74bf76ed
JJ
6614 {
6615 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6616 (splay_tree_key)decl);
6617 omp_is_private (gimplify_omp_ctxp, decl, simd);
6618 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6619 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6620 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6621 {
6622 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6623 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6624 if (has_decl_expr
6625 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6626 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6627 OMP_CLAUSE_DECL (c) = decl;
6628 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6629 OMP_FOR_CLAUSES (for_stmt) = c;
6630 omp_add_variable (gimplify_omp_ctxp, decl,
6631 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6632 }
6633 else
6634 {
6635 bool lastprivate
6636 = (!has_decl_expr
6637 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6638 c = build_omp_clause (input_location,
6639 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6640 : OMP_CLAUSE_PRIVATE);
6641 OMP_CLAUSE_DECL (c) = decl;
6642 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6643 omp_add_variable (gimplify_omp_ctxp, decl,
6644 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6645 | GOVD_SEEN);
6646 c = NULL_TREE;
6647 }
6648 }
6649 else if (omp_is_private (gimplify_omp_ctxp, decl, simd))
a68ab351
JJ
6650 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6651 else
6652 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6653
6654 /* If DECL is not a gimple register, create a temporary variable to act
6655 as an iteration counter. This is valid, since DECL cannot be
6656 modified in the body of the loop. */
acf0174b
JJ
6657 if (orig_for_stmt != for_stmt)
6658 var = decl;
6659 else if (!is_gimple_reg (decl))
a68ab351
JJ
6660 {
6661 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
726a989a 6662 TREE_OPERAND (t, 0) = var;
b8698a0f 6663
726a989a 6664 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 6665
a68ab351
JJ
6666 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6667 }
6668 else
6669 var = decl;
07beea0d 6670
32e8bb8e 6671 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6672 is_gimple_val, fb_rvalue);
32e8bb8e 6673 ret = MIN (ret, tret);
726a989a
RB
6674 if (ret == GS_ERROR)
6675 return ret;
953ff289 6676
726a989a 6677 /* Handle OMP_FOR_COND. */
a68ab351
JJ
6678 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6679 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 6680 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 6681
32e8bb8e 6682 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6683 is_gimple_val, fb_rvalue);
32e8bb8e 6684 ret = MIN (ret, tret);
917948d3 6685
726a989a 6686 /* Handle OMP_FOR_INCR. */
a68ab351 6687 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
6688 switch (TREE_CODE (t))
6689 {
a68ab351
JJ
6690 case PREINCREMENT_EXPR:
6691 case POSTINCREMENT_EXPR:
c02065fc
AH
6692 {
6693 tree decl = TREE_OPERAND (t, 0);
6694 // c_omp_for_incr_canonicalize_ptr() should have been
6695 // called to massage things appropriately.
6696 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6697
6698 if (orig_for_stmt != for_stmt)
6699 break;
6700 t = build_int_cst (TREE_TYPE (decl), 1);
6701 if (c)
6702 OMP_CLAUSE_LINEAR_STEP (c) = t;
6703 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6704 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6705 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
acf0174b 6706 break;
c02065fc 6707 }
a68ab351
JJ
6708
6709 case PREDECREMENT_EXPR:
6710 case POSTDECREMENT_EXPR:
acf0174b
JJ
6711 if (orig_for_stmt != for_stmt)
6712 break;
a68ab351 6713 t = build_int_cst (TREE_TYPE (decl), -1);
74bf76ed
JJ
6714 if (c)
6715 OMP_CLAUSE_LINEAR_STEP (c) = t;
a68ab351 6716 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 6717 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
6718 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6719 break;
6720
726a989a
RB
6721 case MODIFY_EXPR:
6722 gcc_assert (TREE_OPERAND (t, 0) == decl);
6723 TREE_OPERAND (t, 0) = var;
a68ab351 6724
726a989a 6725 t = TREE_OPERAND (t, 1);
a68ab351 6726 switch (TREE_CODE (t))
953ff289 6727 {
a68ab351
JJ
6728 case PLUS_EXPR:
6729 if (TREE_OPERAND (t, 1) == decl)
6730 {
6731 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6732 TREE_OPERAND (t, 0) = var;
6733 break;
6734 }
6735
6736 /* Fallthru. */
6737 case MINUS_EXPR:
6738 case POINTER_PLUS_EXPR:
6739 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 6740 TREE_OPERAND (t, 0) = var;
953ff289 6741 break;
a68ab351
JJ
6742 default:
6743 gcc_unreachable ();
953ff289 6744 }
917948d3 6745
32e8bb8e 6746 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6747 is_gimple_val, fb_rvalue);
32e8bb8e 6748 ret = MIN (ret, tret);
74bf76ed
JJ
6749 if (c)
6750 {
6751 OMP_CLAUSE_LINEAR_STEP (c) = TREE_OPERAND (t, 1);
6752 if (TREE_CODE (t) == MINUS_EXPR)
6753 {
6754 t = TREE_OPERAND (t, 1);
6755 OMP_CLAUSE_LINEAR_STEP (c)
6756 = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
6757 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
6758 &for_pre_body, NULL,
6759 is_gimple_val, fb_rvalue);
6760 ret = MIN (ret, tret);
6761 }
6762 }
953ff289 6763 break;
a68ab351 6764
953ff289
DN
6765 default:
6766 gcc_unreachable ();
6767 }
6768
acf0174b
JJ
6769 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6770 && orig_for_stmt == for_stmt)
a68ab351 6771 {
a68ab351 6772 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
726a989a
RB
6773 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6774 && OMP_CLAUSE_DECL (c) == decl
6775 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6776 {
6777 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6778 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6779 gcc_assert (TREE_OPERAND (t, 0) == var);
6780 t = TREE_OPERAND (t, 1);
6781 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6782 || TREE_CODE (t) == MINUS_EXPR
6783 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6784 gcc_assert (TREE_OPERAND (t, 0) == var);
6785 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6786 TREE_OPERAND (t, 1));
6787 gimplify_assign (decl, t,
6788 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
a68ab351
JJ
6789 }
6790 }
953ff289
DN
6791 }
6792
74bf76ed
JJ
6793 BITMAP_FREE (has_decl_expr);
6794
acf0174b 6795 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
726a989a 6796
acf0174b
JJ
6797 if (orig_for_stmt != for_stmt)
6798 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6799 {
6800 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6801 decl = TREE_OPERAND (t, 0);
6802 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6803 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6804 TREE_OPERAND (t, 0) = var;
6805 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6806 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
6807 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
6808 }
6809
6810 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt));
953ff289 6811
74bf76ed 6812 int kind;
acf0174b 6813 switch (TREE_CODE (orig_for_stmt))
74bf76ed
JJ
6814 {
6815 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
6816 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
c02065fc 6817 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
acf0174b 6818 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
74bf76ed
JJ
6819 default:
6820 gcc_unreachable ();
6821 }
acf0174b 6822 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
726a989a
RB
6823 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6824 for_pre_body);
acf0174b
JJ
6825 if (orig_for_stmt != for_stmt)
6826 gimple_omp_for_set_combined_p (gfor, true);
6827 if (gimplify_omp_ctxp
6828 && (gimplify_omp_ctxp->combined_loop
6829 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
6830 && gimplify_omp_ctxp->outer_context
6831 && gimplify_omp_ctxp->outer_context->combined_loop)))
6832 {
6833 gimple_omp_for_set_combined_into_p (gfor, true);
6834 if (gimplify_omp_ctxp->combined_loop)
6835 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
6836 else
6837 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
6838 }
726a989a
RB
6839
6840 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6841 {
6842 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6843 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6844 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6845 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6846 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6847 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6848 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6849 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6850 }
6851
6852 gimplify_seq_add_stmt (pre_p, gfor);
74bf76ed
JJ
6853 if (ret != GS_ALL_DONE)
6854 return GS_ERROR;
6855 *expr_p = NULL_TREE;
6856 return GS_ALL_DONE;
953ff289
DN
6857}
6858
acf0174b
JJ
6859/* Gimplify the gross structure of other OpenMP constructs.
6860 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
6861 and OMP_TEAMS. */
953ff289 6862
726a989a
RB
6863static void
6864gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 6865{
726a989a
RB
6866 tree expr = *expr_p;
6867 gimple stmt;
6868 gimple_seq body = NULL;
acf0174b 6869 enum omp_region_type ort = ORT_WORKSHARE;
953ff289 6870
acf0174b
JJ
6871 switch (TREE_CODE (expr))
6872 {
6873 case OMP_SECTIONS:
6874 case OMP_SINGLE:
6875 break;
6876 case OMP_TARGET:
6877 ort = ORT_TARGET;
6878 break;
6879 case OMP_TARGET_DATA:
6880 ort = ORT_TARGET_DATA;
6881 break;
6882 case OMP_TEAMS:
6883 ort = ORT_TEAMS;
6884 break;
6885 default:
6886 gcc_unreachable ();
6887 }
6888 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
6889 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
6890 {
45852dcc 6891 push_gimplify_context ();
acf0174b
JJ
6892 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
6893 if (gimple_code (g) == GIMPLE_BIND)
6894 pop_gimplify_context (g);
6895 else
6896 pop_gimplify_context (NULL);
6897 if (ort == ORT_TARGET_DATA)
6898 {
6899 gimple_seq cleanup = NULL;
6900 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
6901 g = gimple_build_call (fn, 0);
6902 gimple_seq_add_stmt (&cleanup, g);
6903 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
6904 body = NULL;
6905 gimple_seq_add_stmt (&body, g);
6906 }
6907 }
6908 else
6909 gimplify_and_add (OMP_BODY (expr), &body);
726a989a 6910 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
953ff289 6911
acf0174b
JJ
6912 switch (TREE_CODE (expr))
6913 {
6914 case OMP_SECTIONS:
6915 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6916 break;
6917 case OMP_SINGLE:
6918 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6919 break;
6920 case OMP_TARGET:
6921 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
6922 OMP_CLAUSES (expr));
6923 break;
6924 case OMP_TARGET_DATA:
6925 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
6926 OMP_CLAUSES (expr));
6927 break;
6928 case OMP_TEAMS:
6929 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
6930 break;
6931 default:
6932 gcc_unreachable ();
6933 }
6934
6935 gimplify_seq_add_stmt (pre_p, stmt);
6936 *expr_p = NULL_TREE;
6937}
6938
6939/* Gimplify the gross structure of OpenMP target update construct. */
6940
6941static void
6942gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
6943{
6944 tree expr = *expr_p;
6945 gimple stmt;
6946
6947 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
6948 ORT_WORKSHARE);
6949 gimplify_adjust_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr));
6950 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
6951 OMP_TARGET_UPDATE_CLAUSES (expr));
726a989a
RB
6952
6953 gimplify_seq_add_stmt (pre_p, stmt);
acf0174b 6954 *expr_p = NULL_TREE;
953ff289
DN
6955}
6956
6957/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 6958 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
6959 EXPR is this stabilized form. */
6960
6961static bool
a509ebb5 6962goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
6963{
6964 /* Also include casts to other type variants. The C front end is fond
b8698a0f 6965 of adding these for e.g. volatile variables. This is like
953ff289 6966 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 6967 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 6968
78e47463
JJ
6969 if (TREE_CODE (expr) == INDIRECT_REF)
6970 {
6971 expr = TREE_OPERAND (expr, 0);
6972 while (expr != addr
1043771b 6973 && (CONVERT_EXPR_P (expr)
78e47463
JJ
6974 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6975 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 6976 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
6977 {
6978 expr = TREE_OPERAND (expr, 0);
6979 addr = TREE_OPERAND (addr, 0);
6980 }
251923f5
JJ
6981 if (expr == addr)
6982 return true;
71458b8a
JJ
6983 return (TREE_CODE (addr) == ADDR_EXPR
6984 && TREE_CODE (expr) == ADDR_EXPR
251923f5 6985 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 6986 }
953ff289
DN
6987 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6988 return true;
6989 return false;
6990}
6991
ad19c4be
EB
6992/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
6993 expression does not involve the lhs, evaluate it into a temporary.
6994 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6995 or -1 if an error was encountered. */
953ff289
DN
6996
6997static int
726a989a
RB
6998goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6999 tree lhs_var)
953ff289
DN
7000{
7001 tree expr = *expr_p;
7002 int saw_lhs;
7003
7004 if (goa_lhs_expr_p (expr, lhs_addr))
7005 {
7006 *expr_p = lhs_var;
7007 return 1;
7008 }
7009 if (is_gimple_val (expr))
7010 return 0;
b8698a0f 7011
953ff289
DN
7012 saw_lhs = 0;
7013 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7014 {
7015 case tcc_binary:
067dd3c9 7016 case tcc_comparison:
726a989a
RB
7017 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7018 lhs_var);
953ff289 7019 case tcc_unary:
726a989a
RB
7020 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7021 lhs_var);
953ff289 7022 break;
067dd3c9
JJ
7023 case tcc_expression:
7024 switch (TREE_CODE (expr))
7025 {
7026 case TRUTH_ANDIF_EXPR:
7027 case TRUTH_ORIF_EXPR:
f2b11865
JJ
7028 case TRUTH_AND_EXPR:
7029 case TRUTH_OR_EXPR:
7030 case TRUTH_XOR_EXPR:
067dd3c9
JJ
7031 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7032 lhs_addr, lhs_var);
f2b11865 7033 case TRUTH_NOT_EXPR:
067dd3c9
JJ
7034 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7035 lhs_addr, lhs_var);
7036 break;
4063e61b
JM
7037 case COMPOUND_EXPR:
7038 /* Break out any preevaluations from cp_build_modify_expr. */
7039 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7040 expr = TREE_OPERAND (expr, 1))
7041 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7042 *expr_p = expr;
7043 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
067dd3c9
JJ
7044 default:
7045 break;
7046 }
7047 break;
953ff289
DN
7048 default:
7049 break;
7050 }
7051
7052 if (saw_lhs == 0)
7053 {
7054 enum gimplify_status gs;
7055 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7056 if (gs != GS_ALL_DONE)
7057 saw_lhs = -1;
7058 }
7059
7060 return saw_lhs;
7061}
7062
953ff289
DN
7063/* Gimplify an OMP_ATOMIC statement. */
7064
7065static enum gimplify_status
726a989a 7066gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
7067{
7068 tree addr = TREE_OPERAND (*expr_p, 0);
20906c66
JJ
7069 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7070 ? NULL : TREE_OPERAND (*expr_p, 1);
953ff289 7071 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 7072 tree tmp_load;
20906c66 7073 gimple loadstmt, storestmt;
953ff289 7074
20906c66
JJ
7075 tmp_load = create_tmp_reg (type, NULL);
7076 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7077 return GS_ERROR;
7078
7079 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7080 != GS_ALL_DONE)
7081 return GS_ERROR;
953ff289 7082
20906c66
JJ
7083 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7084 gimplify_seq_add_stmt (pre_p, loadstmt);
7085 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7086 != GS_ALL_DONE)
7087 return GS_ERROR;
953ff289 7088
20906c66
JJ
7089 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7090 rhs = tmp_load;
7091 storestmt = gimple_build_omp_atomic_store (rhs);
7092 gimplify_seq_add_stmt (pre_p, storestmt);
acf0174b
JJ
7093 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7094 {
7095 gimple_omp_atomic_set_seq_cst (loadstmt);
7096 gimple_omp_atomic_set_seq_cst (storestmt);
7097 }
20906c66
JJ
7098 switch (TREE_CODE (*expr_p))
7099 {
7100 case OMP_ATOMIC_READ:
7101 case OMP_ATOMIC_CAPTURE_OLD:
7102 *expr_p = tmp_load;
7103 gimple_omp_atomic_set_need_value (loadstmt);
7104 break;
7105 case OMP_ATOMIC_CAPTURE_NEW:
7106 *expr_p = rhs;
7107 gimple_omp_atomic_set_need_value (storestmt);
7108 break;
7109 default:
7110 *expr_p = NULL;
7111 break;
7112 }
a509ebb5 7113
acf0174b 7114 return GS_ALL_DONE;
953ff289 7115}
6de9cd9a 7116
0a35513e
AH
7117/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7118 body, and adding some EH bits. */
7119
7120static enum gimplify_status
7121gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7122{
7123 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7124 gimple g;
7125 gimple_seq body = NULL;
0a35513e
AH
7126 int subcode = 0;
7127
7128 /* Wrap the transaction body in a BIND_EXPR so we have a context
7129 where to put decls for OpenMP. */
7130 if (TREE_CODE (tbody) != BIND_EXPR)
7131 {
7132 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7133 TREE_SIDE_EFFECTS (bind) = 1;
7134 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7135 TRANSACTION_EXPR_BODY (expr) = bind;
7136 }
7137
45852dcc 7138 push_gimplify_context ();
0a35513e
AH
7139 temp = voidify_wrapper_expr (*expr_p, NULL);
7140
7141 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7142 pop_gimplify_context (g);
7143
7144 g = gimple_build_transaction (body, NULL);
7145 if (TRANSACTION_EXPR_OUTER (expr))
7146 subcode = GTMA_IS_OUTER;
7147 else if (TRANSACTION_EXPR_RELAXED (expr))
7148 subcode = GTMA_IS_RELAXED;
7149 gimple_transaction_set_subcode (g, subcode);
7150
7151 gimplify_seq_add_stmt (pre_p, g);
7152
7153 if (temp)
7154 {
7155 *expr_p = temp;
7156 return GS_OK;
7157 }
7158
7159 *expr_p = NULL_TREE;
7160 return GS_ALL_DONE;
7161}
7162
ad19c4be 7163/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
726a989a
RB
7164 expression produces a value to be used as an operand inside a GIMPLE
7165 statement, the value will be stored back in *EXPR_P. This value will
7166 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7167 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7168 emitted in PRE_P and POST_P.
7169
7170 Additionally, this process may overwrite parts of the input
7171 expression during gimplification. Ideally, it should be
7172 possible to do non-destructive gimplification.
7173
7174 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7175 the expression needs to evaluate to a value to be used as
7176 an operand in a GIMPLE statement, this value will be stored in
7177 *EXPR_P on exit. This happens when the caller specifies one
7178 of fb_lvalue or fb_rvalue fallback flags.
7179
7180 PRE_P will contain the sequence of GIMPLE statements corresponding
7181 to the evaluation of EXPR and all the side-effects that must
7182 be executed before the main expression. On exit, the last
7183 statement of PRE_P is the core statement being gimplified. For
7184 instance, when gimplifying 'if (++a)' the last statement in
7185 PRE_P will be 'if (t.1)' where t.1 is the result of
7186 pre-incrementing 'a'.
7187
7188 POST_P will contain the sequence of GIMPLE statements corresponding
7189 to the evaluation of all the side-effects that must be executed
7190 after the main expression. If this is NULL, the post
7191 side-effects are stored at the end of PRE_P.
7192
7193 The reason why the output is split in two is to handle post
7194 side-effects explicitly. In some cases, an expression may have
7195 inner and outer post side-effects which need to be emitted in
7196 an order different from the one given by the recursive
7197 traversal. For instance, for the expression (*p--)++ the post
7198 side-effects of '--' must actually occur *after* the post
7199 side-effects of '++'. However, gimplification will first visit
7200 the inner expression, so if a separate POST sequence was not
7201 used, the resulting sequence would be:
7202
7203 1 t.1 = *p
7204 2 p = p - 1
7205 3 t.2 = t.1 + 1
7206 4 *p = t.2
7207
7208 However, the post-decrement operation in line #2 must not be
7209 evaluated until after the store to *p at line #4, so the
7210 correct sequence should be:
7211
7212 1 t.1 = *p
7213 2 t.2 = t.1 + 1
7214 3 *p = t.2
7215 4 p = p - 1
7216
7217 So, by specifying a separate post queue, it is possible
7218 to emit the post side-effects in the correct order.
7219 If POST_P is NULL, an internal queue will be used. Before
7220 returning to the caller, the sequence POST_P is appended to
7221 the main output sequence PRE_P.
7222
7223 GIMPLE_TEST_F points to a function that takes a tree T and
7224 returns nonzero if T is in the GIMPLE form requested by the
12947319 7225 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
7226
7227 FALLBACK tells the function what sort of a temporary we want if
7228 gimplification cannot produce an expression that complies with
7229 GIMPLE_TEST_F.
7230
7231 fb_none means that no temporary should be generated
7232 fb_rvalue means that an rvalue is OK to generate
7233 fb_lvalue means that an lvalue is OK to generate
7234 fb_either means that either is OK, but an lvalue is preferable.
7235 fb_mayfail means that gimplification may fail (in which case
7236 GS_ERROR will be returned)
7237
7238 The return value is either GS_ERROR or GS_ALL_DONE, since this
7239 function iterates until EXPR is completely gimplified or an error
7240 occurs. */
6de9cd9a
DN
7241
7242enum gimplify_status
726a989a
RB
7243gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7244 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
7245{
7246 tree tmp;
726a989a
RB
7247 gimple_seq internal_pre = NULL;
7248 gimple_seq internal_post = NULL;
6de9cd9a 7249 tree save_expr;
726a989a 7250 bool is_statement;
6de9cd9a
DN
7251 location_t saved_location;
7252 enum gimplify_status ret;
726a989a 7253 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6de9cd9a
DN
7254
7255 save_expr = *expr_p;
7256 if (save_expr == NULL_TREE)
7257 return GS_ALL_DONE;
7258
726a989a
RB
7259 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7260 is_statement = gimple_test_f == is_gimple_stmt;
7261 if (is_statement)
7262 gcc_assert (pre_p);
7263
7264 /* Consistency checks. */
7265 if (gimple_test_f == is_gimple_reg)
7266 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7267 else if (gimple_test_f == is_gimple_val
726a989a
RB
7268 || gimple_test_f == is_gimple_call_addr
7269 || gimple_test_f == is_gimple_condexpr
7270 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 7271 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 7272 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 7273 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
7274 || gimple_test_f == is_gimple_asm_val
7275 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
7276 gcc_assert (fallback & fb_rvalue);
7277 else if (gimple_test_f == is_gimple_min_lval
7278 || gimple_test_f == is_gimple_lvalue)
7279 gcc_assert (fallback & fb_lvalue);
7280 else if (gimple_test_f == is_gimple_addressable)
7281 gcc_assert (fallback & fb_either);
7282 else if (gimple_test_f == is_gimple_stmt)
7283 gcc_assert (fallback == fb_none);
7284 else
7285 {
7286 /* We should have recognized the GIMPLE_TEST_F predicate to
7287 know what kind of fallback to use in case a temporary is
7288 needed to hold the value or address of *EXPR_P. */
7289 gcc_unreachable ();
7290 }
7291
6de9cd9a
DN
7292 /* We used to check the predicate here and return immediately if it
7293 succeeds. This is wrong; the design is for gimplification to be
7294 idempotent, and for the predicates to only test for valid forms, not
7295 whether they are fully simplified. */
6de9cd9a
DN
7296 if (pre_p == NULL)
7297 pre_p = &internal_pre;
726a989a 7298
6de9cd9a
DN
7299 if (post_p == NULL)
7300 post_p = &internal_post;
7301
726a989a
RB
7302 /* Remember the last statements added to PRE_P and POST_P. Every
7303 new statement added by the gimplification helpers needs to be
7304 annotated with location information. To centralize the
7305 responsibility, we remember the last statement that had been
7306 added to both queues before gimplifying *EXPR_P. If
7307 gimplification produces new statements in PRE_P and POST_P, those
7308 statements will be annotated with the same location information
7309 as *EXPR_P. */
7310 pre_last_gsi = gsi_last (*pre_p);
7311 post_last_gsi = gsi_last (*post_p);
7312
6de9cd9a 7313 saved_location = input_location;
a281759f
PB
7314 if (save_expr != error_mark_node
7315 && EXPR_HAS_LOCATION (*expr_p))
7316 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
7317
7318 /* Loop over the specific gimplifiers until the toplevel node
7319 remains the same. */
7320 do
7321 {
73d6ddef
RK
7322 /* Strip away as many useless type conversions as possible
7323 at the toplevel. */
7324 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
7325
7326 /* Remember the expr. */
7327 save_expr = *expr_p;
7328
7329 /* Die, die, die, my darling. */
7330 if (save_expr == error_mark_node
726a989a 7331 || (TREE_TYPE (save_expr)
65355d53 7332 && TREE_TYPE (save_expr) == error_mark_node))
6de9cd9a
DN
7333 {
7334 ret = GS_ERROR;
7335 break;
7336 }
7337
7338 /* Do any language-specific gimplification. */
32e8bb8e
ILT
7339 ret = ((enum gimplify_status)
7340 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
7341 if (ret == GS_OK)
7342 {
7343 if (*expr_p == NULL_TREE)
7344 break;
7345 if (*expr_p != save_expr)
7346 continue;
7347 }
7348 else if (ret != GS_UNHANDLED)
7349 break;
7350
941f78d1
JM
7351 /* Make sure that all the cases set 'ret' appropriately. */
7352 ret = GS_UNHANDLED;
6de9cd9a
DN
7353 switch (TREE_CODE (*expr_p))
7354 {
7355 /* First deal with the special cases. */
7356
7357 case POSTINCREMENT_EXPR:
7358 case POSTDECREMENT_EXPR:
7359 case PREINCREMENT_EXPR:
7360 case PREDECREMENT_EXPR:
7361 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
cc3c4f62
RB
7362 fallback != fb_none,
7363 TREE_TYPE (*expr_p));
6de9cd9a
DN
7364 break;
7365
7366 case ARRAY_REF:
44de5aeb
RK
7367 case ARRAY_RANGE_REF:
7368 case REALPART_EXPR:
7369 case IMAGPART_EXPR:
6de9cd9a 7370 case COMPONENT_REF:
9e51aaf5 7371 case VIEW_CONVERT_EXPR:
6de9cd9a 7372 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 7373 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
7374 break;
7375
7376 case COND_EXPR:
dae7ec87 7377 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 7378
0223e4f5
JM
7379 /* C99 code may assign to an array in a structure value of a
7380 conditional expression, and this has undefined behavior
7381 only on execution, so create a temporary if an lvalue is
7382 required. */
7383 if (fallback == fb_lvalue)
7384 {
7385 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7386 mark_addressable (*expr_p);
941f78d1 7387 ret = GS_OK;
0223e4f5 7388 }
6de9cd9a
DN
7389 break;
7390
939b37da
BI
7391 case CILK_SPAWN_STMT:
7392 gcc_assert
7393 (fn_contains_cilk_spawn_p (cfun)
7394 && lang_hooks.cilkplus.cilk_detect_spawn_and_unwrap (expr_p));
7395 if (!seen_error ())
7396 {
7397 ret = (enum gimplify_status)
7398 lang_hooks.cilkplus.gimplify_cilk_spawn (expr_p, pre_p,
7399 post_p);
7400 break;
7401 }
7402 /* If errors are seen, then just process it as a CALL_EXPR. */
7403
6de9cd9a 7404 case CALL_EXPR:
90051e16 7405 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 7406
0223e4f5
JM
7407 /* C99 code may assign to an array in a structure returned
7408 from a function, and this has undefined behavior only on
7409 execution, so create a temporary if an lvalue is
7410 required. */
7411 if (fallback == fb_lvalue)
7412 {
7413 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7414 mark_addressable (*expr_p);
941f78d1 7415 ret = GS_OK;
0223e4f5 7416 }
6de9cd9a
DN
7417 break;
7418
7419 case TREE_LIST:
282899df 7420 gcc_unreachable ();
6de9cd9a
DN
7421
7422 case COMPOUND_EXPR:
7423 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7424 break;
7425
2ec5deb5 7426 case COMPOUND_LITERAL_EXPR:
4c53d183
MM
7427 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7428 gimple_test_f, fallback);
2ec5deb5
PB
7429 break;
7430
6de9cd9a
DN
7431 case MODIFY_EXPR:
7432 case INIT_EXPR:
ebad5233
JM
7433 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7434 fallback != fb_none);
6de9cd9a
DN
7435 break;
7436
7437 case TRUTH_ANDIF_EXPR:
7438 case TRUTH_ORIF_EXPR:
1d15f620
KT
7439 {
7440 /* Preserve the original type of the expression and the
7441 source location of the outer expression. */
7442 tree org_type = TREE_TYPE (*expr_p);
7443 *expr_p = gimple_boolify (*expr_p);
4b4455e5 7444 *expr_p = build3_loc (input_location, COND_EXPR,
1d15f620
KT
7445 org_type, *expr_p,
7446 fold_convert_loc
4b4455e5 7447 (input_location,
1d15f620
KT
7448 org_type, boolean_true_node),
7449 fold_convert_loc
4b4455e5 7450 (input_location,
1d15f620
KT
7451 org_type, boolean_false_node));
7452 ret = GS_OK;
7453 break;
7454 }
6de9cd9a
DN
7455
7456 case TRUTH_NOT_EXPR:
3c6cbf7a 7457 {
53020648
RG
7458 tree type = TREE_TYPE (*expr_p);
7459 /* The parsers are careful to generate TRUTH_NOT_EXPR
7460 only with operands that are always zero or one.
7461 We do not fold here but handle the only interesting case
7462 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
3c6cbf7a 7463 *expr_p = gimple_boolify (*expr_p);
53020648
RG
7464 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7465 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7466 TREE_TYPE (*expr_p),
7467 TREE_OPERAND (*expr_p, 0));
7468 else
7469 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7470 TREE_TYPE (*expr_p),
7471 TREE_OPERAND (*expr_p, 0),
7472 build_int_cst (TREE_TYPE (*expr_p), 1));
7473 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7474 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7475 ret = GS_OK;
bd5d002e 7476 break;
3c6cbf7a 7477 }
67339062 7478
6de9cd9a
DN
7479 case ADDR_EXPR:
7480 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7481 break;
7482
8170608b
TB
7483 case ANNOTATE_EXPR:
7484 {
7485 tree cond = TREE_OPERAND (*expr_p, 0);
7486 tree id = TREE_OPERAND (*expr_p, 1);
7487 tree tmp = create_tmp_var_raw (TREE_TYPE(cond), NULL);
7488 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
7489 gimple call = gimple_build_call_internal (IFN_ANNOTATE, 2,
7490 cond, id);
7491 gimple_call_set_lhs (call, tmp);
7492 gimplify_seq_add_stmt (pre_p, call);
7493 *expr_p = tmp;
7494 ret = GS_ALL_DONE;
7495 break;
7496 }
7497
6de9cd9a 7498 case VA_ARG_EXPR:
cd3ce9b4 7499 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
7500 break;
7501
1043771b 7502 CASE_CONVERT:
6de9cd9a
DN
7503 if (IS_EMPTY_STMT (*expr_p))
7504 {
7505 ret = GS_ALL_DONE;
7506 break;
7507 }
7508
7509 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7510 || fallback == fb_none)
7511 {
7512 /* Just strip a conversion to void (or in void context) and
7513 try again. */
7514 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 7515 ret = GS_OK;
6de9cd9a
DN
7516 break;
7517 }
7518
7519 ret = gimplify_conversion (expr_p);
7520 if (ret == GS_ERROR)
7521 break;
7522 if (*expr_p != save_expr)
7523 break;
7524 /* FALLTHRU */
7525
7526 case FIX_TRUNC_EXPR:
6de9cd9a
DN
7527 /* unary_expr: ... | '(' cast ')' val | ... */
7528 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7529 is_gimple_val, fb_rvalue);
7530 recalculate_side_effects (*expr_p);
7531 break;
7532
6a720599 7533 case INDIRECT_REF:
70f34814
RG
7534 {
7535 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
3748f5c9 7536 bool notrap = TREE_THIS_NOTRAP (*expr_p);
70f34814
RG
7537 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7538
7539 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7540 if (*expr_p != save_expr)
7541 {
7542 ret = GS_OK;
7543 break;
7544 }
7545
7546 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7547 is_gimple_reg, fb_rvalue);
dca26746
RG
7548 if (ret == GS_ERROR)
7549 break;
70f34814 7550
dca26746 7551 recalculate_side_effects (*expr_p);
70f34814
RG
7552 *expr_p = fold_build2_loc (input_location, MEM_REF,
7553 TREE_TYPE (*expr_p),
7554 TREE_OPERAND (*expr_p, 0),
7555 build_int_cst (saved_ptr_type, 0));
7556 TREE_THIS_VOLATILE (*expr_p) = volatilep;
3748f5c9 7557 TREE_THIS_NOTRAP (*expr_p) = notrap;
70f34814
RG
7558 ret = GS_OK;
7559 break;
7560 }
7561
7562 /* We arrive here through the various re-gimplifcation paths. */
7563 case MEM_REF:
7564 /* First try re-folding the whole thing. */
7565 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7566 TREE_OPERAND (*expr_p, 0),
7567 TREE_OPERAND (*expr_p, 1));
7568 if (tmp)
941f78d1 7569 {
70f34814
RG
7570 *expr_p = tmp;
7571 recalculate_side_effects (*expr_p);
941f78d1
JM
7572 ret = GS_OK;
7573 break;
7574 }
01718e96
RG
7575 /* Avoid re-gimplifying the address operand if it is already
7576 in suitable form. Re-gimplifying would mark the address
7577 operand addressable. Always gimplify when not in SSA form
7578 as we still may have to gimplify decls with value-exprs. */
7579 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7580 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7581 {
7582 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7583 is_gimple_mem_ref_addr, fb_rvalue);
7584 if (ret == GS_ERROR)
7585 break;
7586 }
6de9cd9a 7587 recalculate_side_effects (*expr_p);
70f34814 7588 ret = GS_ALL_DONE;
6de9cd9a
DN
7589 break;
7590
01718e96 7591 /* Constants need not be gimplified. */
6de9cd9a
DN
7592 case INTEGER_CST:
7593 case REAL_CST:
325217ed 7594 case FIXED_CST:
6de9cd9a
DN
7595 case STRING_CST:
7596 case COMPLEX_CST:
7597 case VECTOR_CST:
3f5c390d
RB
7598 /* Drop the overflow flag on constants, we do not want
7599 that in the GIMPLE IL. */
7600 if (TREE_OVERFLOW_P (*expr_p))
7601 *expr_p = drop_tree_overflow (*expr_p);
6de9cd9a
DN
7602 ret = GS_ALL_DONE;
7603 break;
7604
7605 case CONST_DECL:
0534fa56 7606 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 7607 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
7608 value. */
7609 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7610 if (fallback & fb_lvalue)
7611 ret = GS_ALL_DONE;
7612 else
941f78d1
JM
7613 {
7614 *expr_p = DECL_INITIAL (*expr_p);
7615 ret = GS_OK;
7616 }
6de9cd9a
DN
7617 break;
7618
350fae66 7619 case DECL_EXPR:
726a989a 7620 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
7621 break;
7622
6de9cd9a 7623 case BIND_EXPR:
c6c7698d 7624 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
7625 break;
7626
7627 case LOOP_EXPR:
7628 ret = gimplify_loop_expr (expr_p, pre_p);
7629 break;
7630
7631 case SWITCH_EXPR:
7632 ret = gimplify_switch_expr (expr_p, pre_p);
7633 break;
7634
6de9cd9a
DN
7635 case EXIT_EXPR:
7636 ret = gimplify_exit_expr (expr_p);
7637 break;
7638
7639 case GOTO_EXPR:
7640 /* If the target is not LABEL, then it is a computed jump
7641 and the target needs to be gimplified. */
7642 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
7643 {
7644 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7645 NULL, is_gimple_val, fb_rvalue);
7646 if (ret == GS_ERROR)
7647 break;
7648 }
726a989a
RB
7649 gimplify_seq_add_stmt (pre_p,
7650 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 7651 ret = GS_ALL_DONE;
6de9cd9a
DN
7652 break;
7653
2e28e797 7654 case PREDICT_EXPR:
726a989a
RB
7655 gimplify_seq_add_stmt (pre_p,
7656 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7657 PREDICT_EXPR_OUTCOME (*expr_p)));
7658 ret = GS_ALL_DONE;
7659 break;
2e28e797 7660
6de9cd9a
DN
7661 case LABEL_EXPR:
7662 ret = GS_ALL_DONE;
282899df
NS
7663 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7664 == current_function_decl);
726a989a
RB
7665 gimplify_seq_add_stmt (pre_p,
7666 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6de9cd9a
DN
7667 break;
7668
7669 case CASE_LABEL_EXPR:
726a989a 7670 ret = gimplify_case_label_expr (expr_p, pre_p);
6de9cd9a
DN
7671 break;
7672
7673 case RETURN_EXPR:
7674 ret = gimplify_return_expr (*expr_p, pre_p);
7675 break;
7676
7677 case CONSTRUCTOR:
48eb4e53
RK
7678 /* Don't reduce this in place; let gimplify_init_constructor work its
7679 magic. Buf if we're just elaborating this for side effects, just
7680 gimplify any element that has side-effects. */
7681 if (fallback == fb_none)
7682 {
4038c495 7683 unsigned HOST_WIDE_INT ix;
ac47786e 7684 tree val;
08330ec2 7685 tree temp = NULL_TREE;
ac47786e
NF
7686 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7687 if (TREE_SIDE_EFFECTS (val))
7688 append_to_statement_list (val, &temp);
48eb4e53 7689
08330ec2 7690 *expr_p = temp;
941f78d1 7691 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 7692 }
ca0b7d18
AP
7693 /* C99 code may assign to an array in a constructed
7694 structure or union, and this has undefined behavior only
7695 on execution, so create a temporary if an lvalue is
7696 required. */
7697 else if (fallback == fb_lvalue)
7698 {
7699 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7700 mark_addressable (*expr_p);
941f78d1 7701 ret = GS_OK;
ca0b7d18 7702 }
08330ec2
AP
7703 else
7704 ret = GS_ALL_DONE;
6de9cd9a
DN
7705 break;
7706
7707 /* The following are special cases that are not handled by the
7708 original GIMPLE grammar. */
7709
7710 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7711 eliminated. */
7712 case SAVE_EXPR:
7713 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7714 break;
7715
7716 case BIT_FIELD_REF:
ea814c66
EB
7717 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7718 post_p, is_gimple_lvalue, fb_either);
7719 recalculate_side_effects (*expr_p);
6de9cd9a
DN
7720 break;
7721
150e3929
RG
7722 case TARGET_MEM_REF:
7723 {
7724 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7725
23a534a1 7726 if (TMR_BASE (*expr_p))
150e3929 7727 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
4d948885 7728 post_p, is_gimple_mem_ref_addr, fb_either);
150e3929
RG
7729 if (TMR_INDEX (*expr_p))
7730 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7731 post_p, is_gimple_val, fb_rvalue);
4d948885
RG
7732 if (TMR_INDEX2 (*expr_p))
7733 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7734 post_p, is_gimple_val, fb_rvalue);
150e3929
RG
7735 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7736 ret = MIN (r0, r1);
7737 }
7738 break;
7739
6de9cd9a
DN
7740 case NON_LVALUE_EXPR:
7741 /* This should have been stripped above. */
282899df 7742 gcc_unreachable ();
6de9cd9a
DN
7743
7744 case ASM_EXPR:
7745 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7746 break;
7747
7748 case TRY_FINALLY_EXPR:
7749 case TRY_CATCH_EXPR:
726a989a
RB
7750 {
7751 gimple_seq eval, cleanup;
7752 gimple try_;
7753
820055a0
DC
7754 /* Calls to destructors are generated automatically in FINALLY/CATCH
7755 block. They should have location as UNKNOWN_LOCATION. However,
7756 gimplify_call_expr will reset these call stmts to input_location
7757 if it finds stmt's location is unknown. To prevent resetting for
7758 destructors, we set the input_location to unknown.
7759 Note that this only affects the destructor calls in FINALLY/CATCH
7760 block, and will automatically reset to its original value by the
7761 end of gimplify_expr. */
7762 input_location = UNKNOWN_LOCATION;
726a989a
RB
7763 eval = cleanup = NULL;
7764 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7765 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
7766 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7767 if (gimple_seq_empty_p (cleanup))
7768 {
7769 gimple_seq_add_seq (pre_p, eval);
7770 ret = GS_ALL_DONE;
7771 break;
7772 }
726a989a
RB
7773 try_ = gimple_build_try (eval, cleanup,
7774 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7775 ? GIMPLE_TRY_FINALLY
7776 : GIMPLE_TRY_CATCH);
e368f44f
DC
7777 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
7778 gimple_set_location (try_, saved_location);
7779 else
7780 gimple_set_location (try_, EXPR_LOCATION (save_expr));
726a989a
RB
7781 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7782 gimple_try_set_catch_is_cleanup (try_,
7783 TRY_CATCH_IS_CLEANUP (*expr_p));
7784 gimplify_seq_add_stmt (pre_p, try_);
7785 ret = GS_ALL_DONE;
7786 break;
7787 }
6de9cd9a
DN
7788
7789 case CLEANUP_POINT_EXPR:
7790 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7791 break;
7792
7793 case TARGET_EXPR:
7794 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7795 break;
7796
7797 case CATCH_EXPR:
726a989a
RB
7798 {
7799 gimple c;
7800 gimple_seq handler = NULL;
7801 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7802 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7803 gimplify_seq_add_stmt (pre_p, c);
7804 ret = GS_ALL_DONE;
7805 break;
7806 }
6de9cd9a
DN
7807
7808 case EH_FILTER_EXPR:
726a989a
RB
7809 {
7810 gimple ehf;
7811 gimple_seq failure = NULL;
7812
7813 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7814 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 7815 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
7816 gimplify_seq_add_stmt (pre_p, ehf);
7817 ret = GS_ALL_DONE;
7818 break;
7819 }
6de9cd9a 7820
0f59171d
RH
7821 case OBJ_TYPE_REF:
7822 {
7823 enum gimplify_status r0, r1;
726a989a
RB
7824 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7825 post_p, is_gimple_val, fb_rvalue);
7826 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7827 post_p, is_gimple_val, fb_rvalue);
0f3a057a 7828 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
7829 ret = MIN (r0, r1);
7830 }
6de9cd9a
DN
7831 break;
7832
6de9cd9a
DN
7833 case LABEL_DECL:
7834 /* We get here when taking the address of a label. We mark
7835 the label as "forced"; meaning it can never be removed and
7836 it is a potential target for any computed goto. */
7837 FORCED_LABEL (*expr_p) = 1;
7838 ret = GS_ALL_DONE;
7839 break;
7840
7841 case STATEMENT_LIST:
c6c7698d 7842 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
7843 break;
7844
d25cee4d
RH
7845 case WITH_SIZE_EXPR:
7846 {
70e2829d
KH
7847 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7848 post_p == &internal_post ? NULL : post_p,
7849 gimple_test_f, fallback);
7850 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7851 is_gimple_val, fb_rvalue);
941f78d1 7852 ret = GS_ALL_DONE;
d25cee4d
RH
7853 }
7854 break;
7855
6de9cd9a 7856 case VAR_DECL:
4744afba 7857 case PARM_DECL:
a9f7c570 7858 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
7859 break;
7860
077b0dfb
JJ
7861 case RESULT_DECL:
7862 /* When within an OpenMP context, notice uses of variables. */
7863 if (gimplify_omp_ctxp)
7864 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7865 ret = GS_ALL_DONE;
7866 break;
7867
71956db3
RH
7868 case SSA_NAME:
7869 /* Allow callbacks into the gimplifier during optimization. */
7870 ret = GS_ALL_DONE;
7871 break;
7872
953ff289 7873 case OMP_PARALLEL:
726a989a
RB
7874 gimplify_omp_parallel (expr_p, pre_p);
7875 ret = GS_ALL_DONE;
953ff289
DN
7876 break;
7877
a68ab351 7878 case OMP_TASK:
726a989a
RB
7879 gimplify_omp_task (expr_p, pre_p);
7880 ret = GS_ALL_DONE;
a68ab351
JJ
7881 break;
7882
953ff289 7883 case OMP_FOR:
74bf76ed 7884 case OMP_SIMD:
c02065fc 7885 case CILK_SIMD:
acf0174b 7886 case OMP_DISTRIBUTE:
953ff289
DN
7887 ret = gimplify_omp_for (expr_p, pre_p);
7888 break;
7889
7890 case OMP_SECTIONS:
7891 case OMP_SINGLE:
acf0174b
JJ
7892 case OMP_TARGET:
7893 case OMP_TARGET_DATA:
7894 case OMP_TEAMS:
726a989a
RB
7895 gimplify_omp_workshare (expr_p, pre_p);
7896 ret = GS_ALL_DONE;
953ff289
DN
7897 break;
7898
acf0174b
JJ
7899 case OMP_TARGET_UPDATE:
7900 gimplify_omp_target_update (expr_p, pre_p);
7901 ret = GS_ALL_DONE;
7902 break;
7903
953ff289
DN
7904 case OMP_SECTION:
7905 case OMP_MASTER:
acf0174b 7906 case OMP_TASKGROUP:
953ff289
DN
7907 case OMP_ORDERED:
7908 case OMP_CRITICAL:
726a989a
RB
7909 {
7910 gimple_seq body = NULL;
7911 gimple g;
7912
7913 gimplify_and_add (OMP_BODY (*expr_p), &body);
7914 switch (TREE_CODE (*expr_p))
7915 {
7916 case OMP_SECTION:
7917 g = gimple_build_omp_section (body);
7918 break;
7919 case OMP_MASTER:
7920 g = gimple_build_omp_master (body);
7921 break;
acf0174b
JJ
7922 case OMP_TASKGROUP:
7923 {
7924 gimple_seq cleanup = NULL;
7925 tree fn
7926 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
7927 g = gimple_build_call (fn, 0);
7928 gimple_seq_add_stmt (&cleanup, g);
7929 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7930 body = NULL;
7931 gimple_seq_add_stmt (&body, g);
7932 g = gimple_build_omp_taskgroup (body);
7933 }
7934 break;
726a989a
RB
7935 case OMP_ORDERED:
7936 g = gimple_build_omp_ordered (body);
7937 break;
7938 case OMP_CRITICAL:
7939 g = gimple_build_omp_critical (body,
7940 OMP_CRITICAL_NAME (*expr_p));
7941 break;
7942 default:
7943 gcc_unreachable ();
7944 }
7945 gimplify_seq_add_stmt (pre_p, g);
7946 ret = GS_ALL_DONE;
7947 break;
7948 }
953ff289
DN
7949
7950 case OMP_ATOMIC:
20906c66
JJ
7951 case OMP_ATOMIC_READ:
7952 case OMP_ATOMIC_CAPTURE_OLD:
7953 case OMP_ATOMIC_CAPTURE_NEW:
953ff289
DN
7954 ret = gimplify_omp_atomic (expr_p, pre_p);
7955 break;
7956
0a35513e
AH
7957 case TRANSACTION_EXPR:
7958 ret = gimplify_transaction (expr_p, pre_p);
7959 break;
7960
16949072
RG
7961 case TRUTH_AND_EXPR:
7962 case TRUTH_OR_EXPR:
7963 case TRUTH_XOR_EXPR:
1d15f620 7964 {
bd5d002e 7965 tree orig_type = TREE_TYPE (*expr_p);
fc1f4caf 7966 tree new_type, xop0, xop1;
1d15f620 7967 *expr_p = gimple_boolify (*expr_p);
fc1f4caf
KT
7968 new_type = TREE_TYPE (*expr_p);
7969 if (!useless_type_conversion_p (orig_type, new_type))
1d15f620 7970 {
4b4455e5 7971 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
1d15f620
KT
7972 ret = GS_OK;
7973 break;
7974 }
da5fb469 7975
bd5d002e
RG
7976 /* Boolified binary truth expressions are semantically equivalent
7977 to bitwise binary expressions. Canonicalize them to the
7978 bitwise variant. */
7979 switch (TREE_CODE (*expr_p))
7980 {
7981 case TRUTH_AND_EXPR:
7982 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7983 break;
7984 case TRUTH_OR_EXPR:
7985 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7986 break;
7987 case TRUTH_XOR_EXPR:
7988 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7989 break;
7990 default:
7991 break;
7992 }
fc1f4caf
KT
7993 /* Now make sure that operands have compatible type to
7994 expression's new_type. */
7995 xop0 = TREE_OPERAND (*expr_p, 0);
7996 xop1 = TREE_OPERAND (*expr_p, 1);
7997 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7998 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7999 new_type,
8000 xop0);
8001 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8002 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8003 new_type,
8004 xop1);
bd5d002e
RG
8005 /* Continue classified as tcc_binary. */
8006 goto expr_2;
da5fb469 8007 }
16949072
RG
8008
8009 case FMA_EXPR:
e6ed43b0 8010 case VEC_COND_EXPR:
2205ed25 8011 case VEC_PERM_EXPR:
16949072
RG
8012 /* Classified as tcc_expression. */
8013 goto expr_3;
8014
5be014d5 8015 case POINTER_PLUS_EXPR:
315f5f1b
RG
8016 {
8017 enum gimplify_status r0, r1;
8018 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8019 post_p, is_gimple_val, fb_rvalue);
8020 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8021 post_p, is_gimple_val, fb_rvalue);
8022 recalculate_side_effects (*expr_p);
8023 ret = MIN (r0, r1);
8024 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
8025 after gimplifying operands - this is similar to how
8026 it would be folding all gimplified stmts on creation
8027 to have them canonicalized, which is what we eventually
8028 should do anyway. */
8029 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8030 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8031 {
8032 *expr_p = build_fold_addr_expr_with_type_loc
8033 (input_location,
8034 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8035 TREE_OPERAND (*expr_p, 0),
8036 fold_convert (ptr_type_node,
8037 TREE_OPERAND (*expr_p, 1))),
8038 TREE_TYPE (*expr_p));
8039 ret = MIN (ret, GS_OK);
8040 }
8041 break;
8042 }
726a989a 8043
939b37da
BI
8044 case CILK_SYNC_STMT:
8045 {
8046 if (!fn_contains_cilk_spawn_p (cfun))
8047 {
8048 error_at (EXPR_LOCATION (*expr_p),
8049 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8050 ret = GS_ERROR;
8051 }
8052 else
8053 {
8054 gimplify_cilk_sync (expr_p, pre_p);
8055 ret = GS_ALL_DONE;
8056 }
8057 break;
8058 }
8059
6de9cd9a 8060 default:
282899df 8061 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 8062 {
6615c446 8063 case tcc_comparison:
61c25908
OH
8064 /* Handle comparison of objects of non scalar mode aggregates
8065 with a call to memcmp. It would be nice to only have to do
8066 this for variable-sized objects, but then we'd have to allow
8067 the same nest of reference nodes we allow for MODIFY_EXPR and
8068 that's too complex.
8069
8070 Compare scalar mode aggregates as scalar mode values. Using
8071 memcmp for them would be very inefficient at best, and is
8072 plain wrong if bitfields are involved. */
726a989a
RB
8073 {
8074 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 8075
544d960a
AS
8076 /* Vector comparisons need no boolification. */
8077 if (TREE_CODE (type) == VECTOR_TYPE)
8078 goto expr_2;
8079 else if (!AGGREGATE_TYPE_P (type))
7f3ff782
KT
8080 {
8081 tree org_type = TREE_TYPE (*expr_p);
8082 *expr_p = gimple_boolify (*expr_p);
8083 if (!useless_type_conversion_p (org_type,
8084 TREE_TYPE (*expr_p)))
8085 {
8086 *expr_p = fold_convert_loc (input_location,
8087 org_type, *expr_p);
8088 ret = GS_OK;
8089 }
8090 else
8091 goto expr_2;
8092 }
726a989a
RB
8093 else if (TYPE_MODE (type) != BLKmode)
8094 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8095 else
8096 ret = gimplify_variable_sized_compare (expr_p);
61c25908 8097
726a989a 8098 break;
61c25908 8099 }
d3147f64 8100
282899df
NS
8101 /* If *EXPR_P does not need to be special-cased, handle it
8102 according to its class. */
6615c446 8103 case tcc_unary:
282899df
NS
8104 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8105 post_p, is_gimple_val, fb_rvalue);
8106 break;
6de9cd9a 8107
6615c446 8108 case tcc_binary:
282899df
NS
8109 expr_2:
8110 {
8111 enum gimplify_status r0, r1;
d3147f64 8112
282899df 8113 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 8114 post_p, is_gimple_val, fb_rvalue);
282899df
NS
8115 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8116 post_p, is_gimple_val, fb_rvalue);
d3147f64 8117
282899df
NS
8118 ret = MIN (r0, r1);
8119 break;
8120 }
d3147f64 8121
16949072
RG
8122 expr_3:
8123 {
8124 enum gimplify_status r0, r1, r2;
8125
8126 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8127 post_p, is_gimple_val, fb_rvalue);
8128 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8129 post_p, is_gimple_val, fb_rvalue);
8130 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8131 post_p, is_gimple_val, fb_rvalue);
8132
8133 ret = MIN (MIN (r0, r1), r2);
8134 break;
8135 }
8136
6615c446
JO
8137 case tcc_declaration:
8138 case tcc_constant:
6de9cd9a 8139 ret = GS_ALL_DONE;
282899df 8140 goto dont_recalculate;
d3147f64 8141
282899df 8142 default:
16949072 8143 gcc_unreachable ();
6de9cd9a 8144 }
6de9cd9a
DN
8145
8146 recalculate_side_effects (*expr_p);
726a989a 8147
282899df 8148 dont_recalculate:
6de9cd9a
DN
8149 break;
8150 }
d3147f64 8151
941f78d1 8152 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
8153 }
8154 while (ret == GS_OK);
8155
8156 /* If we encountered an error_mark somewhere nested inside, either
8157 stub out the statement or propagate the error back out. */
8158 if (ret == GS_ERROR)
8159 {
8160 if (is_statement)
65355d53 8161 *expr_p = NULL;
6de9cd9a
DN
8162 goto out;
8163 }
8164
6de9cd9a
DN
8165 /* This was only valid as a return value from the langhook, which
8166 we handled. Make sure it doesn't escape from any other context. */
282899df 8167 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 8168
65355d53 8169 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
8170 {
8171 /* We aren't looking for a value, and we don't have a valid
8172 statement. If it doesn't have side-effects, throw it away. */
8173 if (!TREE_SIDE_EFFECTS (*expr_p))
65355d53 8174 *expr_p = NULL;
6de9cd9a 8175 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
8176 {
8177 /* This is probably a _REF that contains something nested that
8178 has side effects. Recurse through the operands to find it. */
8179 enum tree_code code = TREE_CODE (*expr_p);
8180
282899df 8181 switch (code)
44de5aeb 8182 {
282899df 8183 case COMPONENT_REF:
02a5eac4
EB
8184 case REALPART_EXPR:
8185 case IMAGPART_EXPR:
8186 case VIEW_CONVERT_EXPR:
282899df
NS
8187 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8188 gimple_test_f, fallback);
8189 break;
8190
a9e64c63
EB
8191 case ARRAY_REF:
8192 case ARRAY_RANGE_REF:
44de5aeb
RK
8193 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8194 gimple_test_f, fallback);
8195 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
8196 gimple_test_f, fallback);
8197 break;
8198
8199 default:
8200 /* Anything else with side-effects must be converted to
a9e64c63 8201 a valid statement before we get here. */
282899df 8202 gcc_unreachable ();
44de5aeb 8203 }
44de5aeb 8204
65355d53 8205 *expr_p = NULL;
44de5aeb 8206 }
a9e64c63
EB
8207 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8208 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 8209 {
a9e64c63
EB
8210 /* Historically, the compiler has treated a bare reference
8211 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 8212 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 8213
c22b1771 8214 /* Normally, we do not want to create a temporary for a
a38578e1
MM
8215 TREE_ADDRESSABLE type because such a type should not be
8216 copied by bitwise-assignment. However, we make an
8217 exception here, as all we are doing here is ensuring that
8218 we read the bytes that make up the type. We use
8219 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 8220 given a TREE_ADDRESSABLE type. */
a38578e1
MM
8221 tree tmp = create_tmp_var_raw (type, "vol");
8222 gimple_add_tmp_var (tmp);
726a989a
RB
8223 gimplify_assign (tmp, *expr_p, pre_p);
8224 *expr_p = NULL;
6de9cd9a
DN
8225 }
8226 else
8227 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
8228 an incomplete type, so just throw it away. Likewise for
8229 a BLKmode type, since any implicit inner load should
8230 already have been turned into an explicit one by the
8231 gimplification process. */
65355d53 8232 *expr_p = NULL;
6de9cd9a
DN
8233 }
8234
8235 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 8236 everything together and return. */
325c3691 8237 if (fallback == fb_none || is_statement)
6de9cd9a 8238 {
726a989a
RB
8239 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8240 it out for GC to reclaim it. */
8241 *expr_p = NULL_TREE;
8242
8243 if (!gimple_seq_empty_p (internal_pre)
8244 || !gimple_seq_empty_p (internal_post))
be00f578 8245 {
726a989a
RB
8246 gimplify_seq_add_seq (&internal_pre, internal_post);
8247 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 8248 }
726a989a
RB
8249
8250 /* The result of gimplifying *EXPR_P is going to be the last few
8251 statements in *PRE_P and *POST_P. Add location information
8252 to all the statements that were added by the gimplification
8253 helpers. */
8254 if (!gimple_seq_empty_p (*pre_p))
8255 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8256
8257 if (!gimple_seq_empty_p (*post_p))
8258 annotate_all_with_location_after (*post_p, post_last_gsi,
8259 input_location);
8260
6de9cd9a
DN
8261 goto out;
8262 }
8263
726a989a
RB
8264#ifdef ENABLE_GIMPLE_CHECKING
8265 if (*expr_p)
8266 {
8267 enum tree_code code = TREE_CODE (*expr_p);
8268 /* These expressions should already be in gimple IR form. */
8269 gcc_assert (code != MODIFY_EXPR
8270 && code != ASM_EXPR
8271 && code != BIND_EXPR
8272 && code != CATCH_EXPR
6fc4fb06 8273 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
8274 && code != EH_FILTER_EXPR
8275 && code != GOTO_EXPR
8276 && code != LABEL_EXPR
8277 && code != LOOP_EXPR
726a989a
RB
8278 && code != SWITCH_EXPR
8279 && code != TRY_FINALLY_EXPR
8280 && code != OMP_CRITICAL
8281 && code != OMP_FOR
8282 && code != OMP_MASTER
acf0174b 8283 && code != OMP_TASKGROUP
726a989a
RB
8284 && code != OMP_ORDERED
8285 && code != OMP_PARALLEL
8286 && code != OMP_SECTIONS
8287 && code != OMP_SECTION
8288 && code != OMP_SINGLE);
8289 }
8290#endif
6de9cd9a 8291
726a989a
RB
8292 /* Otherwise we're gimplifying a subexpression, so the resulting
8293 value is interesting. If it's a valid operand that matches
8294 GIMPLE_TEST_F, we're done. Unless we are handling some
8295 post-effects internally; if that's the case, we need to copy into
8296 a temporary before adding the post-effects to POST_P. */
8297 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
8298 goto out;
8299
8300 /* Otherwise, we need to create a new temporary for the gimplified
8301 expression. */
8302
8303 /* We can't return an lvalue if we have an internal postqueue. The
8304 object the lvalue refers to would (probably) be modified by the
8305 postqueue; we need to copy the value out first, which means an
8306 rvalue. */
726a989a
RB
8307 if ((fallback & fb_lvalue)
8308 && gimple_seq_empty_p (internal_post)
e847cc68 8309 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
8310 {
8311 /* An lvalue will do. Take the address of the expression, store it
8312 in a temporary, and replace the expression with an INDIRECT_REF of
8313 that temporary. */
db3927fb 8314 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 8315 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7f5ad6d7 8316 *expr_p = build_simple_mem_ref (tmp);
6de9cd9a 8317 }
ba4d8f9d 8318 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 8319 {
726a989a
RB
8320 /* An rvalue will do. Assign the gimplified expression into a
8321 new temporary TMP and replace the original expression with
8322 TMP. First, make sure that the expression has a type so that
8323 it can be assigned into a temporary. */
282899df 8324 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
412a1d9e 8325 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6de9cd9a 8326 }
282899df 8327 else
6de9cd9a 8328 {
726a989a 8329#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
8330 if (!(fallback & fb_mayfail))
8331 {
8332 fprintf (stderr, "gimplification failed:\n");
8333 print_generic_expr (stderr, *expr_p, 0);
8334 debug_tree (*expr_p);
8335 internal_error ("gimplification failed");
8336 }
8337#endif
8338 gcc_assert (fallback & fb_mayfail);
726a989a 8339
282899df 8340 /* If this is an asm statement, and the user asked for the
535a42b1 8341 impossible, don't die. Fail and let gimplify_asm_expr
282899df 8342 issue an error. */
6de9cd9a
DN
8343 ret = GS_ERROR;
8344 goto out;
8345 }
6de9cd9a 8346
6de9cd9a 8347 /* Make sure the temporary matches our predicate. */
282899df 8348 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 8349
726a989a 8350 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 8351 {
726a989a
RB
8352 annotate_all_with_location (internal_post, input_location);
8353 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
8354 }
8355
8356 out:
8357 input_location = saved_location;
8358 return ret;
8359}
8360
44de5aeb 8361/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 8362 size that we find. Add to LIST_P any statements generated. */
44de5aeb 8363
65355d53 8364void
726a989a 8365gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 8366{
ad50bc8d
RH
8367 tree field, t;
8368
19dbbf36 8369 if (type == NULL || type == error_mark_node)
8e0a600b 8370 return;
ad50bc8d 8371
6c6cfbfd 8372 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 8373 type = TYPE_MAIN_VARIANT (type);
44de5aeb 8374
8e0a600b 8375 /* Avoid infinite recursion. */
19dbbf36 8376 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
8377 return;
8378
8379 TYPE_SIZES_GIMPLIFIED (type) = 1;
8380
44de5aeb
RK
8381 switch (TREE_CODE (type))
8382 {
44de5aeb
RK
8383 case INTEGER_TYPE:
8384 case ENUMERAL_TYPE:
8385 case BOOLEAN_TYPE:
44de5aeb 8386 case REAL_TYPE:
325217ed 8387 case FIXED_POINT_TYPE:
65355d53
RH
8388 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8389 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
8390
8391 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8392 {
8393 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8394 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 8395 }
44de5aeb
RK
8396 break;
8397
8398 case ARRAY_TYPE:
ad50bc8d 8399 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
8400 gimplify_type_sizes (TREE_TYPE (type), list_p);
8401 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
8402 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8403 with assigned stack slots, for -O1+ -g they should be tracked
8404 by VTA. */
08d78391
EB
8405 if (!(TYPE_NAME (type)
8406 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8407 && DECL_IGNORED_P (TYPE_NAME (type)))
8408 && TYPE_DOMAIN (type)
802e9f8e
JJ
8409 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8410 {
8411 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8412 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8413 DECL_IGNORED_P (t) = 0;
8414 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8415 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8416 DECL_IGNORED_P (t) = 0;
8417 }
44de5aeb
RK
8418 break;
8419
8420 case RECORD_TYPE:
8421 case UNION_TYPE:
8422 case QUAL_UNION_TYPE:
910ad8de 8423 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 8424 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
8425 {
8426 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
8427 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8428 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
8429 gimplify_type_sizes (TREE_TYPE (field), list_p);
8430 }
8431 break;
8432
8433 case POINTER_TYPE:
8434 case REFERENCE_TYPE:
706c4bb7
OH
8435 /* We used to recurse on the pointed-to type here, which turned out to
8436 be incorrect because its definition might refer to variables not
8437 yet initialized at this point if a forward declaration is involved.
8438
8439 It was actually useful for anonymous pointed-to types to ensure
8440 that the sizes evaluation dominates every possible later use of the
8441 values. Restricting to such types here would be safe since there
f63645be
KH
8442 is no possible forward declaration around, but would introduce an
8443 undesirable middle-end semantic to anonymity. We then defer to
8444 front-ends the responsibility of ensuring that the sizes are
8445 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 8446 type declarations to the tree. */
44de5aeb
RK
8447 break;
8448
8449 default:
8450 break;
8451 }
8452
65355d53
RH
8453 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8454 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 8455
ad50bc8d 8456 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 8457 {
ad50bc8d
RH
8458 TYPE_SIZE (t) = TYPE_SIZE (type);
8459 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8460 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 8461 }
b4830636
RH
8462}
8463
8464/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8465 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 8466 We add any required statements to *STMT_P. */
44de5aeb
RK
8467
8468void
726a989a 8469gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 8470{
3ac8781c 8471 tree expr = *expr_p;
a9c5ddf9 8472
44de5aeb 8473 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 8474 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 8475 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
8476 will want to replace it with a new variable, but that will cause problems
8477 if this type is from outside the function. It's OK to have that here. */
848be094 8478 if (is_gimple_sizepos (expr))
44de5aeb
RK
8479 return;
8480
a9c5ddf9
RH
8481 *expr_p = unshare_expr (expr);
8482
ad50bc8d 8483 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
44de5aeb 8484}
6de9cd9a 8485
3ad065ef
EB
8486/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8487 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8488 is true, also gimplify the parameters. */
726a989a
RB
8489
8490gimple
3ad065ef 8491gimplify_body (tree fndecl, bool do_parms)
6de9cd9a
DN
8492{
8493 location_t saved_location = input_location;
726a989a
RB
8494 gimple_seq parm_stmts, seq;
8495 gimple outer_bind;
9f9ebcdf 8496 struct cgraph_node *cgn;
6de9cd9a
DN
8497
8498 timevar_push (TV_TREE_GIMPLIFY);
953ff289 8499
f66d6761
SB
8500 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8501 gimplification. */
8502 default_rtl_profile ();
8503
953ff289 8504 gcc_assert (gimplify_ctxp == NULL);
45852dcc 8505 push_gimplify_context ();
6de9cd9a 8506
acf0174b
JJ
8507 if (flag_openmp)
8508 {
8509 gcc_assert (gimplify_omp_ctxp == NULL);
8510 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8511 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8512 }
8513
44de5aeb
RK
8514 /* Unshare most shared trees in the body and in that of any nested functions.
8515 It would seem we don't have to do this for nested functions because
8516 they are supposed to be output and then the outer function gimplified
8517 first, but the g++ front end doesn't always do it that way. */
3ad065ef
EB
8518 unshare_body (fndecl);
8519 unvisit_body (fndecl);
6de9cd9a 8520
9f9ebcdf
MJ
8521 cgn = cgraph_get_node (fndecl);
8522 if (cgn && cgn->origin)
77f2a970
JJ
8523 nonlocal_vlas = pointer_set_create ();
8524
fa10beec 8525 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
8526 input_location = DECL_SOURCE_LOCATION (fndecl);
8527
4744afba
RH
8528 /* Resolve callee-copies. This has to be done before processing
8529 the body so that DECL_VALUE_EXPR gets processed correctly. */
3ad065ef 8530 parm_stmts = do_parms ? gimplify_parameters () : NULL;
4744afba 8531
6de9cd9a 8532 /* Gimplify the function's body. */
726a989a 8533 seq = NULL;
3ad065ef 8534 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
726a989a
RB
8535 outer_bind = gimple_seq_first_stmt (seq);
8536 if (!outer_bind)
6de9cd9a 8537 {
726a989a
RB
8538 outer_bind = gimple_build_nop ();
8539 gimplify_seq_add_stmt (&seq, outer_bind);
6de9cd9a 8540 }
44de5aeb 8541
726a989a
RB
8542 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8543 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8544 if (gimple_code (outer_bind) == GIMPLE_BIND
8545 && gimple_seq_first (seq) == gimple_seq_last (seq))
8546 ;
8547 else
8548 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8549
3ad065ef 8550 DECL_SAVED_TREE (fndecl) = NULL_TREE;
4744afba
RH
8551
8552 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 8553 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 8554 if (!gimple_seq_empty_p (parm_stmts))
4744afba 8555 {
f0c10f0f
RG
8556 tree parm;
8557
726a989a
RB
8558 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8559 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
8560
8561 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 8562 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
8563 if (DECL_HAS_VALUE_EXPR_P (parm))
8564 {
8565 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8566 DECL_IGNORED_P (parm) = 0;
8567 }
4744afba
RH
8568 }
8569
77f2a970
JJ
8570 if (nonlocal_vlas)
8571 {
8572 pointer_set_destroy (nonlocal_vlas);
8573 nonlocal_vlas = NULL;
8574 }
8575
6d7f7e0a 8576 if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp)
acf0174b
JJ
8577 {
8578 delete_omp_context (gimplify_omp_ctxp);
8579 gimplify_omp_ctxp = NULL;
8580 }
8581
726a989a 8582 pop_gimplify_context (outer_bind);
953ff289 8583 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 8584
07c5a154 8585#ifdef ENABLE_CHECKING
1da2ed5f 8586 if (!seen_error ())
34019e28 8587 verify_gimple_in_seq (gimple_bind_body (outer_bind));
07c5a154 8588#endif
6de9cd9a
DN
8589
8590 timevar_pop (TV_TREE_GIMPLIFY);
8591 input_location = saved_location;
726a989a
RB
8592
8593 return outer_bind;
6de9cd9a
DN
8594}
8595
6a1f6c9c 8596typedef char *char_p; /* For DEF_VEC_P. */
6a1f6c9c
JM
8597
8598/* Return whether we should exclude FNDECL from instrumentation. */
8599
8600static bool
8601flag_instrument_functions_exclude_p (tree fndecl)
8602{
9771b263 8603 vec<char_p> *v;
6a1f6c9c 8604
9771b263
DN
8605 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8606 if (v && v->length () > 0)
6a1f6c9c
JM
8607 {
8608 const char *name;
8609 int i;
8610 char *s;
8611
8612 name = lang_hooks.decl_printable_name (fndecl, 0);
9771b263 8613 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
8614 if (strstr (name, s) != NULL)
8615 return true;
8616 }
8617
9771b263
DN
8618 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8619 if (v && v->length () > 0)
6a1f6c9c
JM
8620 {
8621 const char *name;
8622 int i;
8623 char *s;
8624
8625 name = DECL_SOURCE_FILE (fndecl);
9771b263 8626 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
8627 if (strstr (name, s) != NULL)
8628 return true;
8629 }
8630
8631 return false;
8632}
8633
6de9cd9a 8634/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 8635 node for the function we want to gimplify.
b8698a0f 8636
ad19c4be 8637 Return the sequence of GIMPLE statements corresponding to the body
726a989a 8638 of FNDECL. */
6de9cd9a
DN
8639
8640void
8641gimplify_function_tree (tree fndecl)
8642{
af16bc76 8643 tree parm, ret;
726a989a
RB
8644 gimple_seq seq;
8645 gimple bind;
6de9cd9a 8646
a406865a
RG
8647 gcc_assert (!gimple_body (fndecl));
8648
db2960f4
SL
8649 if (DECL_STRUCT_FUNCTION (fndecl))
8650 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8651 else
8652 push_struct_function (fndecl);
6de9cd9a 8653
910ad8de 8654 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
e41d82f5
RH
8655 {
8656 /* Preliminarily mark non-addressed complex variables as eligible
8657 for promotion to gimple registers. We'll transform their uses
8658 as we find them. */
0890b981
AP
8659 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8660 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
e41d82f5
RH
8661 && !TREE_THIS_VOLATILE (parm)
8662 && !needs_to_live_in_memory (parm))
0890b981 8663 DECL_GIMPLE_REG_P (parm) = 1;
e41d82f5
RH
8664 }
8665
8666 ret = DECL_RESULT (fndecl);
0890b981 8667 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7b7e6ecd 8668 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
e41d82f5 8669 && !needs_to_live_in_memory (ret))
0890b981 8670 DECL_GIMPLE_REG_P (ret) = 1;
e41d82f5 8671
3ad065ef 8672 bind = gimplify_body (fndecl, true);
726a989a
RB
8673
8674 /* The tree body of the function is no longer needed, replace it
8675 with the new GIMPLE body. */
355a7673 8676 seq = NULL;
726a989a
RB
8677 gimple_seq_add_stmt (&seq, bind);
8678 gimple_set_body (fndecl, seq);
6de9cd9a
DN
8679
8680 /* If we're instrumenting function entry/exit, then prepend the call to
8681 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8682 catch the exit hook. */
8683 /* ??? Add some way to ignore exceptions for this TFE. */
8684 if (flag_instrument_function_entry_exit
8d5a7d1f
ILT
8685 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8686 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 8687 {
726a989a
RB
8688 tree x;
8689 gimple new_bind;
8690 gimple tf;
8691 gimple_seq cleanup = NULL, body = NULL;
b01890ff
JH
8692 tree tmp_var;
8693 gimple call;
8694
e79983f4 8695 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 8696 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
8697 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8698 gimple_call_set_lhs (call, tmp_var);
8699 gimplify_seq_add_stmt (&cleanup, call);
e79983f4 8700 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
b01890ff
JH
8701 call = gimple_build_call (x, 2,
8702 build_fold_addr_expr (current_function_decl),
8703 tmp_var);
8704 gimplify_seq_add_stmt (&cleanup, call);
726a989a 8705 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 8706
e79983f4 8707 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 8708 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
8709 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8710 gimple_call_set_lhs (call, tmp_var);
8711 gimplify_seq_add_stmt (&body, call);
e79983f4 8712 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
b01890ff
JH
8713 call = gimple_build_call (x, 2,
8714 build_fold_addr_expr (current_function_decl),
8715 tmp_var);
8716 gimplify_seq_add_stmt (&body, call);
726a989a 8717 gimplify_seq_add_stmt (&body, tf);
32001f69 8718 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
726a989a
RB
8719 /* Clear the block for BIND, since it is no longer directly inside
8720 the function, but within a try block. */
32001f69 8721 gimple_bind_set_block (bind, NULL);
6de9cd9a 8722
726a989a
RB
8723 /* Replace the current function body with the body
8724 wrapped in the try/finally TF. */
355a7673 8725 seq = NULL;
726a989a
RB
8726 gimple_seq_add_stmt (&seq, new_bind);
8727 gimple_set_body (fndecl, seq);
6de9cd9a
DN
8728 }
8729
726a989a 8730 DECL_SAVED_TREE (fndecl) = NULL_TREE;
a406865a 8731 cfun->curr_properties = PROP_gimple_any;
726a989a 8732
db2960f4 8733 pop_cfun ();
6de9cd9a 8734}
726a989a 8735
4a7cb16f
AM
8736/* Return a dummy expression of type TYPE in order to keep going after an
8737 error. */
b184c8f1 8738
4a7cb16f
AM
8739static tree
8740dummy_object (tree type)
b184c8f1 8741{
4a7cb16f
AM
8742 tree t = build_int_cst (build_pointer_type (type), 0);
8743 return build2 (MEM_REF, type, t, t);
b184c8f1
AM
8744}
8745
4a7cb16f
AM
8746/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
8747 builtin function, but a very special sort of operator. */
b184c8f1 8748
4a7cb16f
AM
8749enum gimplify_status
8750gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8751{
8752 tree promoted_type, have_va_type;
8753 tree valist = TREE_OPERAND (*expr_p, 0);
8754 tree type = TREE_TYPE (*expr_p);
8755 tree t;
8756 location_t loc = EXPR_LOCATION (*expr_p);
b184c8f1 8757
4a7cb16f
AM
8758 /* Verify that valist is of the proper type. */
8759 have_va_type = TREE_TYPE (valist);
8760 if (have_va_type == error_mark_node)
8761 return GS_ERROR;
8762 have_va_type = targetm.canonical_va_list_type (have_va_type);
b184c8f1 8763
4a7cb16f
AM
8764 if (have_va_type == NULL_TREE)
8765 {
8766 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
8767 return GS_ERROR;
8768 }
b184c8f1 8769
4a7cb16f
AM
8770 /* Generate a diagnostic for requesting data of a type that cannot
8771 be passed through `...' due to type promotion at the call site. */
8772 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
8773 != type)
8774 {
8775 static bool gave_help;
8776 bool warned;
b184c8f1 8777
4a7cb16f
AM
8778 /* Unfortunately, this is merely undefined, rather than a constraint
8779 violation, so we cannot make this an error. If this call is never
8780 executed, the program is still strictly conforming. */
8781 warned = warning_at (loc, 0,
8782 "%qT is promoted to %qT when passed through %<...%>",
8783 type, promoted_type);
8784 if (!gave_help && warned)
8785 {
8786 gave_help = true;
8787 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
8788 promoted_type, type);
8789 }
b184c8f1 8790
4a7cb16f
AM
8791 /* We can, however, treat "undefined" any way we please.
8792 Call abort to encourage the user to fix the program. */
8793 if (warned)
8794 inform (loc, "if this code is reached, the program will abort");
8795 /* Before the abort, allow the evaluation of the va_list
8796 expression to exit or longjmp. */
8797 gimplify_and_add (valist, pre_p);
8798 t = build_call_expr_loc (loc,
8799 builtin_decl_implicit (BUILT_IN_TRAP), 0);
b184c8f1
AM
8800 gimplify_and_add (t, pre_p);
8801
4a7cb16f
AM
8802 /* This is dead code, but go ahead and finish so that the
8803 mode of the result comes out right. */
8804 *expr_p = dummy_object (type);
8805 return GS_ALL_DONE;
b184c8f1
AM
8806 }
8807 else
b184c8f1 8808 {
4a7cb16f
AM
8809 /* Make it easier for the backends by protecting the valist argument
8810 from multiple evaluations. */
8811 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
8812 {
8813 /* For this case, the backends will be expecting a pointer to
8814 TREE_TYPE (abi), but it's possible we've
8815 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
8816 So fix it. */
8817 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8818 {
8819 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
8820 valist = fold_convert_loc (loc, p1,
8821 build_fold_addr_expr_loc (loc, valist));
8822 }
b184c8f1 8823
4a7cb16f
AM
8824 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
8825 }
8826 else
8827 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
b184c8f1 8828
4a7cb16f
AM
8829 if (!targetm.gimplify_va_arg_expr)
8830 /* FIXME: Once most targets are converted we should merely
8831 assert this is non-null. */
8832 return GS_ALL_DONE;
b184c8f1 8833
4a7cb16f
AM
8834 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
8835 return GS_OK;
b184c8f1 8836 }
b184c8f1 8837}
bcf71673 8838
45b0be94
AM
8839/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
8840
8841 DST/SRC are the destination and source respectively. You can pass
8842 ungimplified trees in DST or SRC, in which case they will be
8843 converted to a gimple operand if necessary.
8844
8845 This function returns the newly created GIMPLE_ASSIGN tuple. */
8846
8847gimple
8848gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
8849{
8850 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8851 gimplify_and_add (t, seq_p);
8852 ggc_free (t);
8853 return gimple_seq_last_stmt (*seq_p);
8854}
8855
18f429e2
AM
8856inline hashval_t
8857gimplify_hasher::hash (const value_type *p)
8858{
8859 tree t = p->val;
8860 return iterative_hash_expr (t, 0);
8861}
8862
8863inline bool
8864gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
8865{
8866 tree t1 = p1->val;
8867 tree t2 = p2->val;
8868 enum tree_code code = TREE_CODE (t1);
8869
8870 if (TREE_CODE (t2) != code
8871 || TREE_TYPE (t1) != TREE_TYPE (t2))
8872 return false;
8873
8874 if (!operand_equal_p (t1, t2, 0))
8875 return false;
8876
8877#ifdef ENABLE_CHECKING
8878 /* Only allow them to compare equal if they also hash equal; otherwise
8879 results are nondeterminate, and we fail bootstrap comparison. */
8880 gcc_assert (hash (p1) == hash (p2));
8881#endif
8882
8883 return true;
8884}