]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
genattrtab.c (write_header): Include hash-set.h...
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
5624e564 3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
6de9cd9a
DN
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
40e23961
MC
26#include "hash-set.h"
27#include "machmode.h"
28#include "vec.h"
29#include "double-int.h"
30#include "input.h"
31#include "alias.h"
32#include "symtab.h"
33#include "options.h"
34#include "wide-int.h"
35#include "inchash.h"
6de9cd9a 36#include "tree.h"
40e23961 37#include "fold-const.h"
d8a2d370 38#include "expr.h"
60393bbc 39#include "predict.h"
60393bbc
AM
40#include "tm.h"
41#include "hard-reg-set.h"
42#include "input.h"
43#include "function.h"
2fb9a547
AM
44#include "basic-block.h"
45#include "tree-ssa-alias.h"
46#include "internal-fn.h"
47#include "gimple-fold.h"
48#include "tree-eh.h"
49#include "gimple-expr.h"
50#include "is-a.h"
18f429e2 51#include "gimple.h"
45b0be94 52#include "gimplify.h"
5be5c238 53#include "gimple-iterator.h"
d8a2d370
DN
54#include "stringpool.h"
55#include "calls.h"
56#include "varasm.h"
57#include "stor-layout.h"
58#include "stmt.h"
59#include "print-tree.h"
726a989a 60#include "tree-iterator.h"
6de9cd9a 61#include "tree-inline.h"
cf835838 62#include "tree-pretty-print.h"
6de9cd9a 63#include "langhooks.h"
442b4905
AM
64#include "bitmap.h"
65#include "gimple-ssa.h"
c582198b
AM
66#include "hash-map.h"
67#include "plugin-api.h"
68#include "ipa-ref.h"
44de5aeb 69#include "cgraph.h"
442b4905
AM
70#include "tree-cfg.h"
71#include "tree-ssanames.h"
72#include "tree-ssa.h"
718f9c0f 73#include "diagnostic-core.h"
cd3ce9b4 74#include "target.h"
6be42dd4 75#include "splay-tree.h"
0645c1a2 76#include "omp-low.h"
4484a35a 77#include "gimple-low.h"
939b37da 78#include "cilk.h"
6de9cd9a 79
7ee2468b
SB
80#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
81#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
9b2b7279 82#include "builtins.h"
953ff289
DN
83
84enum gimplify_omp_var_data
85{
86 GOVD_SEEN = 1,
87 GOVD_EXPLICIT = 2,
88 GOVD_SHARED = 4,
89 GOVD_PRIVATE = 8,
90 GOVD_FIRSTPRIVATE = 16,
91 GOVD_LASTPRIVATE = 32,
92 GOVD_REDUCTION = 64,
93 GOVD_LOCAL = 128,
acf0174b
JJ
94 GOVD_MAP = 256,
95 GOVD_DEBUG_PRIVATE = 512,
96 GOVD_PRIVATE_OUTER_REF = 1024,
74bf76ed 97 GOVD_LINEAR = 2048,
acf0174b
JJ
98 GOVD_ALIGNED = 4096,
99 GOVD_MAP_TO_ONLY = 8192,
953ff289 100 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
74bf76ed
JJ
101 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
102 | GOVD_LOCAL)
953ff289
DN
103};
104
726a989a 105
a68ab351
JJ
106enum omp_region_type
107{
108 ORT_WORKSHARE = 0,
74bf76ed 109 ORT_SIMD = 1,
a68ab351 110 ORT_PARALLEL = 2,
f22f4340
JJ
111 ORT_COMBINED_PARALLEL = 3,
112 ORT_TASK = 4,
acf0174b
JJ
113 ORT_UNTIED_TASK = 5,
114 ORT_TEAMS = 8,
115 ORT_TARGET_DATA = 16,
116 ORT_TARGET = 32
a68ab351
JJ
117};
118
45852dcc
AM
119/* Gimplify hashtable helper. */
120
121struct gimplify_hasher : typed_free_remove <elt_t>
122{
123 typedef elt_t value_type;
124 typedef elt_t compare_type;
125 static inline hashval_t hash (const value_type *);
126 static inline bool equal (const value_type *, const compare_type *);
127};
128
129struct gimplify_ctx
130{
131 struct gimplify_ctx *prev_context;
132
538dd0b7 133 vec<gbind *> bind_expr_stack;
45852dcc
AM
134 tree temps;
135 gimple_seq conditional_cleanups;
136 tree exit_label;
137 tree return_temp;
138
139 vec<tree> case_labels;
140 /* The formal temporary table. Should this be persistent? */
c203e8a7 141 hash_table<gimplify_hasher> *temp_htab;
45852dcc
AM
142
143 int conditions;
144 bool save_stack;
145 bool into_ssa;
146 bool allow_rhs_cond_expr;
147 bool in_cleanup_point_expr;
148};
149
953ff289 150struct gimplify_omp_ctx
6de9cd9a 151{
953ff289
DN
152 struct gimplify_omp_ctx *outer_context;
153 splay_tree variables;
6e2830c3 154 hash_set<tree> *privatized_types;
953ff289
DN
155 location_t location;
156 enum omp_clause_default_kind default_kind;
a68ab351 157 enum omp_region_type region_type;
acf0174b 158 bool combined_loop;
9cf32741 159 bool distribute;
953ff289
DN
160};
161
45852dcc 162static struct gimplify_ctx *gimplify_ctxp;
953ff289
DN
163static struct gimplify_omp_ctx *gimplify_omp_ctxp;
164
ad19c4be 165/* Forward declaration. */
726a989a 166static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
eb6127a4 167
a1a6c5b2
JJ
168/* Shorter alias name for the above function for use in gimplify.c
169 only. */
170
171static inline void
172gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
173{
174 gimple_seq_add_stmt_without_update (seq_p, gs);
175}
176
726a989a
RB
177/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
178 NULL, a new sequence is allocated. This function is
179 similar to gimple_seq_add_seq, but does not scan the operands.
180 During gimplification, we need to manipulate statement sequences
181 before the def/use vectors have been constructed. */
182
183static void
184gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
185{
186 gimple_stmt_iterator si;
187
188 if (src == NULL)
189 return;
190
726a989a
RB
191 si = gsi_last (*dst_p);
192 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
193}
194
45852dcc
AM
195
196/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
197 and popping gimplify contexts. */
198
199static struct gimplify_ctx *ctx_pool = NULL;
200
201/* Return a gimplify context struct from the pool. */
202
203static inline struct gimplify_ctx *
204ctx_alloc (void)
205{
206 struct gimplify_ctx * c = ctx_pool;
207
208 if (c)
209 ctx_pool = c->prev_context;
210 else
211 c = XNEW (struct gimplify_ctx);
212
213 memset (c, '\0', sizeof (*c));
214 return c;
215}
216
217/* Put gimplify context C back into the pool. */
218
219static inline void
220ctx_free (struct gimplify_ctx *c)
221{
222 c->prev_context = ctx_pool;
223 ctx_pool = c;
224}
225
226/* Free allocated ctx stack memory. */
227
228void
229free_gimplify_stack (void)
230{
231 struct gimplify_ctx *c;
232
233 while ((c = ctx_pool))
234 {
235 ctx_pool = c->prev_context;
236 free (c);
237 }
238}
239
240
6de9cd9a
DN
241/* Set up a context for the gimplifier. */
242
243void
45852dcc 244push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
6de9cd9a 245{
45852dcc
AM
246 struct gimplify_ctx *c = ctx_alloc ();
247
953ff289 248 c->prev_context = gimplify_ctxp;
953ff289 249 gimplify_ctxp = c;
45852dcc
AM
250 gimplify_ctxp->into_ssa = in_ssa;
251 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
6de9cd9a
DN
252}
253
254/* Tear down a context for the gimplifier. If BODY is non-null, then
255 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
256 in the local_decls.
257
258 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
259
260void
726a989a 261pop_gimplify_context (gimple body)
6de9cd9a 262{
953ff289 263 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 264
9771b263
DN
265 gcc_assert (c
266 && (!c->bind_expr_stack.exists ()
267 || c->bind_expr_stack.is_empty ()));
268 c->bind_expr_stack.release ();
953ff289 269 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
270
271 if (body)
5123ad09 272 declare_vars (c->temps, body, false);
6de9cd9a 273 else
953ff289 274 record_vars (c->temps);
6de9cd9a 275
c203e8a7
TS
276 delete c->temp_htab;
277 c->temp_htab = NULL;
45852dcc 278 ctx_free (c);
6de9cd9a
DN
279}
280
ad19c4be
EB
281/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
282
c24b7de9 283static void
538dd0b7 284gimple_push_bind_expr (gbind *bind_stmt)
6de9cd9a 285{
9771b263 286 gimplify_ctxp->bind_expr_stack.reserve (8);
538dd0b7 287 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
6de9cd9a
DN
288}
289
ad19c4be
EB
290/* Pop the first element off the stack of bindings. */
291
c24b7de9 292static void
6de9cd9a
DN
293gimple_pop_bind_expr (void)
294{
9771b263 295 gimplify_ctxp->bind_expr_stack.pop ();
6de9cd9a
DN
296}
297
ad19c4be
EB
298/* Return the first element of the stack of bindings. */
299
538dd0b7 300gbind *
6de9cd9a
DN
301gimple_current_bind_expr (void)
302{
9771b263 303 return gimplify_ctxp->bind_expr_stack.last ();
726a989a
RB
304}
305
ad19c4be 306/* Return the stack of bindings created during gimplification. */
726a989a 307
538dd0b7 308vec<gbind *>
726a989a
RB
309gimple_bind_expr_stack (void)
310{
311 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
312}
313
ad19c4be 314/* Return true iff there is a COND_EXPR between us and the innermost
6de9cd9a
DN
315 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
316
317static bool
318gimple_conditional_context (void)
319{
320 return gimplify_ctxp->conditions > 0;
321}
322
323/* Note that we've entered a COND_EXPR. */
324
325static void
326gimple_push_condition (void)
327{
726a989a 328#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 329 if (gimplify_ctxp->conditions == 0)
726a989a 330 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 331#endif
6de9cd9a
DN
332 ++(gimplify_ctxp->conditions);
333}
334
335/* Note that we've left a COND_EXPR. If we're back at unconditional scope
336 now, add any conditional cleanups we've seen to the prequeue. */
337
338static void
726a989a 339gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
340{
341 int conds = --(gimplify_ctxp->conditions);
aa4a53af 342
282899df 343 gcc_assert (conds >= 0);
6de9cd9a
DN
344 if (conds == 0)
345 {
726a989a
RB
346 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
347 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 348 }
6de9cd9a
DN
349}
350
953ff289
DN
351/* A stable comparison routine for use with splay trees and DECLs. */
352
353static int
354splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
355{
356 tree a = (tree) xa;
357 tree b = (tree) xb;
358
359 return DECL_UID (a) - DECL_UID (b);
360}
361
362/* Create a new omp construct that deals with variable remapping. */
363
364static struct gimplify_omp_ctx *
a68ab351 365new_omp_context (enum omp_region_type region_type)
953ff289
DN
366{
367 struct gimplify_omp_ctx *c;
368
369 c = XCNEW (struct gimplify_omp_ctx);
370 c->outer_context = gimplify_omp_ctxp;
371 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
6e2830c3 372 c->privatized_types = new hash_set<tree>;
953ff289 373 c->location = input_location;
a68ab351 374 c->region_type = region_type;
f22f4340 375 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
376 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
377 else
378 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
953ff289
DN
379
380 return c;
381}
382
383/* Destroy an omp construct that deals with variable remapping. */
384
385static void
386delete_omp_context (struct gimplify_omp_ctx *c)
387{
388 splay_tree_delete (c->variables);
6e2830c3 389 delete c->privatized_types;
953ff289
DN
390 XDELETE (c);
391}
392
393static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
394static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
395
726a989a
RB
396/* Both gimplify the statement T and append it to *SEQ_P. This function
397 behaves exactly as gimplify_stmt, but you don't have to pass T as a
398 reference. */
cd3ce9b4
JM
399
400void
726a989a
RB
401gimplify_and_add (tree t, gimple_seq *seq_p)
402{
403 gimplify_stmt (&t, seq_p);
404}
405
406/* Gimplify statement T into sequence *SEQ_P, and return the first
407 tuple in the sequence of generated tuples for this statement.
408 Return NULL if gimplifying T produced no tuples. */
409
410static gimple
411gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 412{
726a989a
RB
413 gimple_stmt_iterator last = gsi_last (*seq_p);
414
415 gimplify_and_add (t, seq_p);
416
417 if (!gsi_end_p (last))
418 {
419 gsi_next (&last);
420 return gsi_stmt (last);
421 }
422 else
423 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
424}
425
216820a4
RG
426/* Returns true iff T is a valid RHS for an assignment to an un-renamed
427 LHS, or for a call argument. */
428
429static bool
430is_gimple_mem_rhs (tree t)
431{
432 /* If we're dealing with a renamable type, either source or dest must be
433 a renamed variable. */
434 if (is_gimple_reg_type (TREE_TYPE (t)))
435 return is_gimple_val (t);
436 else
437 return is_gimple_val (t) || is_gimple_lvalue (t);
438}
439
726a989a 440/* Return true if T is a CALL_EXPR or an expression that can be
12947319 441 assigned to a temporary. Note that this predicate should only be
726a989a
RB
442 used during gimplification. See the rationale for this in
443 gimplify_modify_expr. */
444
445static bool
ba4d8f9d 446is_gimple_reg_rhs_or_call (tree t)
726a989a 447{
ba4d8f9d
RG
448 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
449 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
450}
451
452/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
453 this predicate should only be used during gimplification. See the
454 rationale for this in gimplify_modify_expr. */
455
456static bool
ba4d8f9d 457is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
458{
459 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
460 a renamed variable. */
461 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
462 return is_gimple_val (t);
463 else
ba4d8f9d
RG
464 return (is_gimple_val (t) || is_gimple_lvalue (t)
465 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
466}
467
2ad728d2
RG
468/* Create a temporary with a name derived from VAL. Subroutine of
469 lookup_tmp_var; nobody else should call this function. */
470
471static inline tree
947ca6a0 472create_tmp_from_val (tree val)
2ad728d2
RG
473{
474 /* Drop all qualifiers and address-space information from the value type. */
475 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
476 tree var = create_tmp_var (type, get_name (val));
947ca6a0
RB
477 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
478 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
2ad728d2
RG
479 DECL_GIMPLE_REG_P (var) = 1;
480 return var;
481}
482
483/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
484 an existing expression temporary. */
485
486static tree
487lookup_tmp_var (tree val, bool is_formal)
488{
489 tree ret;
490
491 /* If not optimizing, never really reuse a temporary. local-alloc
492 won't allocate any variable that is used in more than one basic
493 block, which means it will go into memory, causing much extra
494 work in reload and final and poorer code generation, outweighing
495 the extra memory allocation here. */
496 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
947ca6a0 497 ret = create_tmp_from_val (val);
2ad728d2
RG
498 else
499 {
500 elt_t elt, *elt_p;
4a8fb1a1 501 elt_t **slot;
2ad728d2
RG
502
503 elt.val = val;
c203e8a7
TS
504 if (!gimplify_ctxp->temp_htab)
505 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
506 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
2ad728d2
RG
507 if (*slot == NULL)
508 {
509 elt_p = XNEW (elt_t);
510 elt_p->val = val;
947ca6a0 511 elt_p->temp = ret = create_tmp_from_val (val);
4a8fb1a1 512 *slot = elt_p;
2ad728d2
RG
513 }
514 else
515 {
4a8fb1a1 516 elt_p = *slot;
2ad728d2
RG
517 ret = elt_p->temp;
518 }
519 }
520
521 return ret;
522}
523
ba4d8f9d 524/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
525
526static tree
726a989a
RB
527internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
528 bool is_formal)
6de9cd9a
DN
529{
530 tree t, mod;
6de9cd9a 531
726a989a
RB
532 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
533 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 534 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 535 fb_rvalue);
6de9cd9a 536
2ad728d2
RG
537 if (gimplify_ctxp->into_ssa
538 && is_gimple_reg_type (TREE_TYPE (val)))
b731b390 539 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
2ad728d2
RG
540 else
541 t = lookup_tmp_var (val, is_formal);
e41d82f5 542
2e929cf3 543 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 544
8400e75e 545 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
6de9cd9a 546
fff34d35
RK
547 /* gimplify_modify_expr might want to reduce this further. */
548 gimplify_and_add (mod, pre_p);
726a989a 549 ggc_free (mod);
8b11a64c 550
6de9cd9a
DN
551 return t;
552}
553
ad19c4be 554/* Return a formal temporary variable initialized with VAL. PRE_P is as
ba4d8f9d
RG
555 in gimplify_expr. Only use this function if:
556
557 1) The value of the unfactored expression represented by VAL will not
558 change between the initialization and use of the temporary, and
559 2) The temporary will not be otherwise modified.
560
561 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
562 and #2 means it is inappropriate for && temps.
563
564 For other cases, use get_initialized_tmp_var instead. */
50674e96 565
6de9cd9a 566tree
726a989a 567get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a
DN
568{
569 return internal_get_tmp_var (val, pre_p, NULL, true);
570}
571
ad19c4be 572/* Return a temporary variable initialized with VAL. PRE_P and POST_P
6de9cd9a
DN
573 are as in gimplify_expr. */
574
575tree
726a989a 576get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
577{
578 return internal_get_tmp_var (val, pre_p, post_p, false);
579}
580
ad19c4be
EB
581/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
582 generate debug info for them; otherwise don't. */
6de9cd9a
DN
583
584void
538dd0b7 585declare_vars (tree vars, gimple gs, bool debug_info)
6de9cd9a
DN
586{
587 tree last = vars;
588 if (last)
589 {
5123ad09 590 tree temps, block;
6de9cd9a 591
538dd0b7 592 gbind *scope = as_a <gbind *> (gs);
6de9cd9a
DN
593
594 temps = nreverse (last);
5123ad09 595
524d9a45 596 block = gimple_bind_block (scope);
726a989a 597 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
598 if (!block || !debug_info)
599 {
910ad8de 600 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 601 gimple_bind_set_vars (scope, temps);
5123ad09
EB
602 }
603 else
604 {
605 /* We need to attach the nodes both to the BIND_EXPR and to its
606 associated BLOCK for debugging purposes. The key point here
607 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
608 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
609 if (BLOCK_VARS (block))
610 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
611 else
612 {
726a989a
RB
613 gimple_bind_set_vars (scope,
614 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
615 BLOCK_VARS (block) = temps;
616 }
617 }
6de9cd9a
DN
618 }
619}
620
a441447f
OH
621/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
622 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
623 no such upper bound can be obtained. */
624
625static void
626force_constant_size (tree var)
627{
628 /* The only attempt we make is by querying the maximum size of objects
629 of the variable's type. */
630
631 HOST_WIDE_INT max_size;
632
633 gcc_assert (TREE_CODE (var) == VAR_DECL);
634
635 max_size = max_int_size_in_bytes (TREE_TYPE (var));
636
637 gcc_assert (max_size >= 0);
638
639 DECL_SIZE_UNIT (var)
640 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
641 DECL_SIZE (var)
642 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
643}
644
ad19c4be
EB
645/* Push the temporary variable TMP into the current binding. */
646
45b62594
RB
647void
648gimple_add_tmp_var_fn (struct function *fn, tree tmp)
649{
650 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
651
652 /* Later processing assumes that the object size is constant, which might
653 not be true at this point. Force the use of a constant upper bound in
654 this case. */
655 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
656 force_constant_size (tmp);
657
658 DECL_CONTEXT (tmp) = fn->decl;
659 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
660
661 record_vars_into (tmp, fn->decl);
662}
663
664/* Push the temporary variable TMP into the current binding. */
665
6de9cd9a
DN
666void
667gimple_add_tmp_var (tree tmp)
668{
910ad8de 669 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 670
a441447f
OH
671 /* Later processing assumes that the object size is constant, which might
672 not be true at this point. Force the use of a constant upper bound in
673 this case. */
cc269bb6 674 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
a441447f
OH
675 force_constant_size (tmp);
676
6de9cd9a 677 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 678 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
679
680 if (gimplify_ctxp)
681 {
910ad8de 682 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 683 gimplify_ctxp->temps = tmp;
953ff289
DN
684
685 /* Mark temporaries local within the nearest enclosing parallel. */
686 if (gimplify_omp_ctxp)
687 {
688 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
689 while (ctx
690 && (ctx->region_type == ORT_WORKSHARE
691 || ctx->region_type == ORT_SIMD))
953ff289
DN
692 ctx = ctx->outer_context;
693 if (ctx)
694 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
695 }
6de9cd9a
DN
696 }
697 else if (cfun)
698 record_vars (tmp);
699 else
726a989a
RB
700 {
701 gimple_seq body_seq;
702
703 /* This case is for nested functions. We need to expose the locals
704 they create. */
705 body_seq = gimple_body (current_function_decl);
706 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
707 }
708}
709
726a989a 710
616f1431
EB
711\f
712/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
713 nodes that are referenced more than once in GENERIC functions. This is
714 necessary because gimplification (translation into GIMPLE) is performed
715 by modifying tree nodes in-place, so gimplication of a shared node in a
716 first context could generate an invalid GIMPLE form in a second context.
717
718 This is achieved with a simple mark/copy/unmark algorithm that walks the
719 GENERIC representation top-down, marks nodes with TREE_VISITED the first
720 time it encounters them, duplicates them if they already have TREE_VISITED
721 set, and finally removes the TREE_VISITED marks it has set.
722
723 The algorithm works only at the function level, i.e. it generates a GENERIC
724 representation of a function with no nodes shared within the function when
725 passed a GENERIC function (except for nodes that are allowed to be shared).
726
727 At the global level, it is also necessary to unshare tree nodes that are
728 referenced in more than one function, for the same aforementioned reason.
729 This requires some cooperation from the front-end. There are 2 strategies:
730
731 1. Manual unsharing. The front-end needs to call unshare_expr on every
732 expression that might end up being shared across functions.
733
734 2. Deep unsharing. This is an extension of regular unsharing. Instead
735 of calling unshare_expr on expressions that might be shared across
736 functions, the front-end pre-marks them with TREE_VISITED. This will
737 ensure that they are unshared on the first reference within functions
738 when the regular unsharing algorithm runs. The counterpart is that
739 this algorithm must look deeper than for manual unsharing, which is
740 specified by LANG_HOOKS_DEEP_UNSHARING.
741
742 If there are only few specific cases of node sharing across functions, it is
743 probably easier for a front-end to unshare the expressions manually. On the
744 contrary, if the expressions generated at the global level are as widespread
745 as expressions generated within functions, deep unsharing is very likely the
746 way to go. */
747
748/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
3ad065ef
EB
749 These nodes model computations that must be done once. If we were to
750 unshare something like SAVE_EXPR(i++), the gimplification process would
751 create wrong code. However, if DATA is non-null, it must hold a pointer
752 set that is used to unshare the subtrees of these nodes. */
6de9cd9a
DN
753
754static tree
755mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
756{
616f1431
EB
757 tree t = *tp;
758 enum tree_code code = TREE_CODE (t);
759
6687b740
EB
760 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
761 copy their subtrees if we can make sure to do it only once. */
762 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431 763 {
6e2830c3 764 if (data && !((hash_set<tree> *)data)->add (t))
616f1431
EB
765 ;
766 else
767 *walk_subtrees = 0;
768 }
769
770 /* Stop at types, decls, constants like copy_tree_r. */
771 else if (TREE_CODE_CLASS (code) == tcc_type
772 || TREE_CODE_CLASS (code) == tcc_declaration
773 || TREE_CODE_CLASS (code) == tcc_constant
774 /* We can't do anything sensible with a BLOCK used as an
775 expression, but we also can't just die when we see it
776 because of non-expression uses. So we avert our eyes
777 and cross our fingers. Silly Java. */
778 || code == BLOCK)
6de9cd9a 779 *walk_subtrees = 0;
616f1431
EB
780
781 /* Cope with the statement expression extension. */
782 else if (code == STATEMENT_LIST)
783 ;
784
785 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 786 else
6687b740 787 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
788
789 return NULL_TREE;
790}
791
3ad065ef
EB
792/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
793 If *TP has been visited already, then *TP is deeply copied by calling
794 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
6de9cd9a
DN
795
796static tree
616f1431 797copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 798{
f0638e1d
RH
799 tree t = *tp;
800 enum tree_code code = TREE_CODE (t);
801
44de5aeb
RK
802 /* Skip types, decls, and constants. But we do want to look at their
803 types and the bounds of types. Mark them as visited so we properly
804 unmark their subtrees on the unmark pass. If we've already seen them,
805 don't look down further. */
6615c446
JO
806 if (TREE_CODE_CLASS (code) == tcc_type
807 || TREE_CODE_CLASS (code) == tcc_declaration
808 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
809 {
810 if (TREE_VISITED (t))
811 *walk_subtrees = 0;
812 else
813 TREE_VISITED (t) = 1;
814 }
f0638e1d 815
6de9cd9a
DN
816 /* If this node has been visited already, unshare it and don't look
817 any deeper. */
f0638e1d 818 else if (TREE_VISITED (t))
6de9cd9a 819 {
616f1431 820 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
821 *walk_subtrees = 0;
822 }
f0638e1d 823
616f1431 824 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 825 else
77c9db77 826 TREE_VISITED (t) = 1;
f0638e1d 827
6de9cd9a
DN
828 return NULL_TREE;
829}
830
3ad065ef
EB
831/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
832 copy_if_shared_r callback unmodified. */
6de9cd9a 833
616f1431 834static inline void
3ad065ef 835copy_if_shared (tree *tp, void *data)
616f1431 836{
3ad065ef 837 walk_tree (tp, copy_if_shared_r, data, NULL);
6de9cd9a
DN
838}
839
3ad065ef
EB
840/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
841 any nested functions. */
44de5aeb
RK
842
843static void
3ad065ef 844unshare_body (tree fndecl)
44de5aeb 845{
d52f5295 846 struct cgraph_node *cgn = cgraph_node::get (fndecl);
3ad065ef
EB
847 /* If the language requires deep unsharing, we need a pointer set to make
848 sure we don't repeatedly unshare subtrees of unshareable nodes. */
6e2830c3
TS
849 hash_set<tree> *visited
850 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
44de5aeb 851
3ad065ef
EB
852 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
853 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
854 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
855
6e2830c3 856 delete visited;
616f1431 857
3ad065ef 858 if (cgn)
48eb4e53 859 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 860 unshare_body (cgn->decl);
44de5aeb
RK
861}
862
616f1431
EB
863/* Callback for walk_tree to unmark the visited trees rooted at *TP.
864 Subtrees are walked until the first unvisited node is encountered. */
865
866static tree
867unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
868{
869 tree t = *tp;
870
871 /* If this node has been visited, unmark it and keep looking. */
872 if (TREE_VISITED (t))
873 TREE_VISITED (t) = 0;
874
875 /* Otherwise, don't look any deeper. */
876 else
877 *walk_subtrees = 0;
878
879 return NULL_TREE;
880}
881
882/* Unmark the visited trees rooted at *TP. */
883
884static inline void
885unmark_visited (tree *tp)
886{
887 walk_tree (tp, unmark_visited_r, NULL, NULL);
888}
889
44de5aeb
RK
890/* Likewise, but mark all trees as not visited. */
891
892static void
3ad065ef 893unvisit_body (tree fndecl)
44de5aeb 894{
d52f5295 895 struct cgraph_node *cgn = cgraph_node::get (fndecl);
44de5aeb 896
3ad065ef
EB
897 unmark_visited (&DECL_SAVED_TREE (fndecl));
898 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
899 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
616f1431 900
3ad065ef 901 if (cgn)
48eb4e53 902 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 903 unvisit_body (cgn->decl);
44de5aeb
RK
904}
905
6de9cd9a
DN
906/* Unconditionally make an unshared copy of EXPR. This is used when using
907 stored expressions which span multiple functions, such as BINFO_VTABLE,
908 as the normal unsharing process can't tell that they're shared. */
909
910tree
911unshare_expr (tree expr)
912{
913 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
914 return expr;
915}
d1f98542
RB
916
917/* Worker for unshare_expr_without_location. */
918
919static tree
920prune_expr_location (tree *tp, int *walk_subtrees, void *)
921{
922 if (EXPR_P (*tp))
923 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
924 else
925 *walk_subtrees = 0;
926 return NULL_TREE;
927}
928
929/* Similar to unshare_expr but also prune all expression locations
930 from EXPR. */
931
932tree
933unshare_expr_without_location (tree expr)
934{
935 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
936 if (EXPR_P (expr))
937 walk_tree (&expr, prune_expr_location, NULL, NULL);
938 return expr;
939}
6de9cd9a
DN
940\f
941/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
942 contain statements and have a value. Assign its value to a temporary
ad19c4be 943 and give it void_type_node. Return the temporary, or NULL_TREE if
6de9cd9a
DN
944 WRAPPER was already void. */
945
946tree
325c3691 947voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 948{
4832214a
JM
949 tree type = TREE_TYPE (wrapper);
950 if (type && !VOID_TYPE_P (type))
6de9cd9a 951 {
c6c7698d 952 tree *p;
6de9cd9a 953
c6c7698d
JM
954 /* Set p to point to the body of the wrapper. Loop until we find
955 something that isn't a wrapper. */
956 for (p = &wrapper; p && *p; )
d3147f64 957 {
c6c7698d 958 switch (TREE_CODE (*p))
6de9cd9a 959 {
c6c7698d
JM
960 case BIND_EXPR:
961 TREE_SIDE_EFFECTS (*p) = 1;
962 TREE_TYPE (*p) = void_type_node;
963 /* For a BIND_EXPR, the body is operand 1. */
964 p = &BIND_EXPR_BODY (*p);
965 break;
966
967 case CLEANUP_POINT_EXPR:
968 case TRY_FINALLY_EXPR:
969 case TRY_CATCH_EXPR:
6de9cd9a
DN
970 TREE_SIDE_EFFECTS (*p) = 1;
971 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
972 p = &TREE_OPERAND (*p, 0);
973 break;
974
975 case STATEMENT_LIST:
976 {
977 tree_stmt_iterator i = tsi_last (*p);
978 TREE_SIDE_EFFECTS (*p) = 1;
979 TREE_TYPE (*p) = void_type_node;
980 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
981 }
982 break;
983
984 case COMPOUND_EXPR:
ad19c4be
EB
985 /* Advance to the last statement. Set all container types to
986 void. */
c6c7698d
JM
987 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
988 {
989 TREE_SIDE_EFFECTS (*p) = 1;
990 TREE_TYPE (*p) = void_type_node;
991 }
992 break;
993
0a35513e
AH
994 case TRANSACTION_EXPR:
995 TREE_SIDE_EFFECTS (*p) = 1;
996 TREE_TYPE (*p) = void_type_node;
997 p = &TRANSACTION_EXPR_BODY (*p);
998 break;
999
c6c7698d 1000 default:
5f23640f
TR
1001 /* Assume that any tree upon which voidify_wrapper_expr is
1002 directly called is a wrapper, and that its body is op0. */
1003 if (p == &wrapper)
1004 {
1005 TREE_SIDE_EFFECTS (*p) = 1;
1006 TREE_TYPE (*p) = void_type_node;
1007 p = &TREE_OPERAND (*p, 0);
1008 break;
1009 }
c6c7698d 1010 goto out;
6de9cd9a
DN
1011 }
1012 }
1013
c6c7698d 1014 out:
325c3691 1015 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
1016 temp = NULL_TREE;
1017 else if (temp)
6de9cd9a 1018 {
c6c7698d
JM
1019 /* The wrapper is on the RHS of an assignment that we're pushing
1020 down. */
1021 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1022 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 1023 TREE_OPERAND (temp, 1) = *p;
c6c7698d 1024 *p = temp;
6de9cd9a
DN
1025 }
1026 else
1027 {
c6c7698d
JM
1028 temp = create_tmp_var (type, "retval");
1029 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
1030 }
1031
6de9cd9a
DN
1032 return temp;
1033 }
1034
1035 return NULL_TREE;
1036}
1037
1038/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 1039 a temporary through which they communicate. */
6de9cd9a
DN
1040
1041static void
538dd0b7 1042build_stack_save_restore (gcall **save, gcall **restore)
6de9cd9a 1043{
726a989a 1044 tree tmp_var;
6de9cd9a 1045
e79983f4 1046 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
6de9cd9a 1047 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 1048 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 1049
ad19c4be 1050 *restore
e79983f4 1051 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
ad19c4be 1052 1, tmp_var);
6de9cd9a
DN
1053}
1054
1055/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1056
1057static enum gimplify_status
726a989a 1058gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1059{
1060 tree bind_expr = *expr_p;
6de9cd9a
DN
1061 bool old_save_stack = gimplify_ctxp->save_stack;
1062 tree t;
538dd0b7 1063 gbind *bind_stmt;
47598145 1064 gimple_seq body, cleanup;
538dd0b7 1065 gcall *stack_save;
a5852bea 1066 location_t start_locus = 0, end_locus = 0;
6de9cd9a 1067
c6c7698d 1068 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 1069
6de9cd9a 1070 /* Mark variables seen in this bind expr. */
910ad8de 1071 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 1072 {
820cc88f 1073 if (TREE_CODE (t) == VAR_DECL)
8cb86b65
JJ
1074 {
1075 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1076
1077 /* Mark variable as local. */
144f4153 1078 if (ctx && !DECL_EXTERNAL (t)
8cb86b65
JJ
1079 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1080 || splay_tree_lookup (ctx->variables,
1081 (splay_tree_key) t) == NULL))
c74559df
JJ
1082 {
1083 if (ctx->region_type == ORT_SIMD
1084 && TREE_ADDRESSABLE (t)
1085 && !TREE_STATIC (t))
1086 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1087 else
1088 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1089 }
8cb86b65
JJ
1090
1091 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
1092
1093 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1094 cfun->has_local_explicit_reg_vars = true;
8cb86b65 1095 }
e41d82f5
RH
1096
1097 /* Preliminarily mark non-addressed complex variables as eligible
1098 for promotion to gimple registers. We'll transform their uses
bd2e63a1
RG
1099 as we find them. */
1100 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1101 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
e41d82f5
RH
1102 && !TREE_THIS_VOLATILE (t)
1103 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1104 && !needs_to_live_in_memory (t))
0890b981 1105 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 1106 }
6de9cd9a 1107
538dd0b7 1108 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
726a989a 1109 BIND_EXPR_BLOCK (bind_expr));
538dd0b7 1110 gimple_push_bind_expr (bind_stmt);
726a989a 1111
6de9cd9a
DN
1112 gimplify_ctxp->save_stack = false;
1113
726a989a
RB
1114 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1115 body = NULL;
1116 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
538dd0b7 1117 gimple_bind_set_body (bind_stmt, body);
6de9cd9a 1118
a5852bea
OH
1119 /* Source location wise, the cleanup code (stack_restore and clobbers)
1120 belongs to the end of the block, so propagate what we have. The
1121 stack_save operation belongs to the beginning of block, which we can
1122 infer from the bind_expr directly if the block has no explicit
1123 assignment. */
1124 if (BIND_EXPR_BLOCK (bind_expr))
1125 {
1126 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1127 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1128 }
1129 if (start_locus == 0)
1130 start_locus = EXPR_LOCATION (bind_expr);
1131
47598145
MM
1132 cleanup = NULL;
1133 stack_save = NULL;
6de9cd9a
DN
1134 if (gimplify_ctxp->save_stack)
1135 {
538dd0b7 1136 gcall *stack_restore;
6de9cd9a
DN
1137
1138 /* Save stack on entry and restore it on exit. Add a try_finally
98906124 1139 block to achieve this. */
6de9cd9a
DN
1140 build_stack_save_restore (&stack_save, &stack_restore);
1141
a5852bea
OH
1142 gimple_set_location (stack_save, start_locus);
1143 gimple_set_location (stack_restore, end_locus);
1144
726a989a 1145 gimplify_seq_add_stmt (&cleanup, stack_restore);
47598145
MM
1146 }
1147
1148 /* Add clobbers for all variables that go out of scope. */
1149 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1150 {
1151 if (TREE_CODE (t) == VAR_DECL
1152 && !is_global_var (t)
1153 && DECL_CONTEXT (t) == current_function_decl
1154 && !DECL_HARD_REGISTER (t)
1155 && !TREE_THIS_VOLATILE (t)
1156 && !DECL_HAS_VALUE_EXPR_P (t)
1157 /* Only care for variables that have to be in memory. Others
1158 will be rewritten into SSA names, hence moved to the top-level. */
87e2a8fd
XDL
1159 && !is_gimple_reg (t)
1160 && flag_stack_reuse != SR_NONE)
47598145 1161 {
a5852bea
OH
1162 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1163 gimple clobber_stmt;
47598145 1164 TREE_THIS_VOLATILE (clobber) = 1;
a5852bea
OH
1165 clobber_stmt = gimple_build_assign (t, clobber);
1166 gimple_set_location (clobber_stmt, end_locus);
1167 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
47598145
MM
1168 }
1169 }
1170
1171 if (cleanup)
1172 {
538dd0b7 1173 gtry *gs;
47598145
MM
1174 gimple_seq new_body;
1175
1176 new_body = NULL;
538dd0b7 1177 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
726a989a 1178 GIMPLE_TRY_FINALLY);
6de9cd9a 1179
47598145
MM
1180 if (stack_save)
1181 gimplify_seq_add_stmt (&new_body, stack_save);
726a989a 1182 gimplify_seq_add_stmt (&new_body, gs);
538dd0b7 1183 gimple_bind_set_body (bind_stmt, new_body);
6de9cd9a
DN
1184 }
1185
1186 gimplify_ctxp->save_stack = old_save_stack;
1187 gimple_pop_bind_expr ();
1188
538dd0b7 1189 gimplify_seq_add_stmt (pre_p, bind_stmt);
726a989a 1190
6de9cd9a
DN
1191 if (temp)
1192 {
1193 *expr_p = temp;
6de9cd9a
DN
1194 return GS_OK;
1195 }
726a989a
RB
1196
1197 *expr_p = NULL_TREE;
1198 return GS_ALL_DONE;
6de9cd9a
DN
1199}
1200
1201/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1202 GIMPLE value, it is assigned to a new temporary and the statement is
1203 re-written to return the temporary.
1204
726a989a 1205 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1206 STMT should be stored. */
1207
1208static enum gimplify_status
726a989a 1209gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1210{
538dd0b7 1211 greturn *ret;
6de9cd9a 1212 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1213 tree result_decl, result;
6de9cd9a 1214
726a989a
RB
1215 if (ret_expr == error_mark_node)
1216 return GS_ERROR;
1217
939b37da
BI
1218 /* Implicit _Cilk_sync must be inserted right before any return statement
1219 if there is a _Cilk_spawn in the function. If the user has provided a
1220 _Cilk_sync, the optimizer should remove this duplicate one. */
1221 if (fn_contains_cilk_spawn_p (cfun))
1222 {
1223 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1224 gimplify_and_add (impl_sync, pre_p);
1225 }
1226
726a989a
RB
1227 if (!ret_expr
1228 || TREE_CODE (ret_expr) == RESULT_DECL
55e99d52 1229 || ret_expr == error_mark_node)
726a989a 1230 {
538dd0b7 1231 greturn *ret = gimple_build_return (ret_expr);
726a989a
RB
1232 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1233 gimplify_seq_add_stmt (pre_p, ret);
1234 return GS_ALL_DONE;
1235 }
6de9cd9a 1236
6de9cd9a 1237 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1238 result_decl = NULL_TREE;
6de9cd9a
DN
1239 else
1240 {
726a989a
RB
1241 result_decl = TREE_OPERAND (ret_expr, 0);
1242
1243 /* See through a return by reference. */
cc77ae10 1244 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1245 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1246
1247 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1248 || TREE_CODE (ret_expr) == INIT_EXPR)
1249 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1250 }
1251
71877985
RH
1252 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1253 Recall that aggregate_value_p is FALSE for any aggregate type that is
1254 returned in registers. If we're returning values in registers, then
1255 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1256 across another call. In addition, for those aggregates for which
535a42b1 1257 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1258 expansion of structure assignments; there's special code in expand_return
1259 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1260 if (!result_decl)
1261 result = NULL_TREE;
1262 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1263 {
1264 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1265 {
1266 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1267 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1268 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1269 should be effectively allocated by the caller, i.e. all calls to
1270 this function must be subject to the Return Slot Optimization. */
1271 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1272 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1273 }
1274 result = result_decl;
1275 }
71877985
RH
1276 else if (gimplify_ctxp->return_temp)
1277 result = gimplify_ctxp->return_temp;
1278 else
1279 {
b731b390 1280 result = create_tmp_reg (TREE_TYPE (result_decl));
ff98621c
RH
1281
1282 /* ??? With complex control flow (usually involving abnormal edges),
1283 we can wind up warning about an uninitialized value for this. Due
1284 to how this variable is constructed and initialized, this is never
1285 true. Give up and never warn. */
1286 TREE_NO_WARNING (result) = 1;
1287
71877985
RH
1288 gimplify_ctxp->return_temp = result;
1289 }
1290
726a989a 1291 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1292 Then gimplify the whole thing. */
1293 if (result != result_decl)
726a989a 1294 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1295
1296 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1297
726a989a
RB
1298 ret = gimple_build_return (result);
1299 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1300 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1301
6de9cd9a
DN
1302 return GS_ALL_DONE;
1303}
1304
ad19c4be
EB
1305/* Gimplify a variable-length array DECL. */
1306
786025ea 1307static void
726a989a 1308gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1309{
1310 /* This is a variable-sized decl. Simplify its size and mark it
98906124 1311 for deferred expansion. */
786025ea
JJ
1312 tree t, addr, ptr_type;
1313
726a989a
RB
1314 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1315 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea 1316
0138d6b2
JM
1317 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1318 if (DECL_HAS_VALUE_EXPR_P (decl))
1319 return;
1320
786025ea
JJ
1321 /* All occurrences of this decl in final gimplified code will be
1322 replaced by indirection. Setting DECL_VALUE_EXPR does two
1323 things: First, it lets the rest of the gimplifier know what
1324 replacement to use. Second, it lets the debug info know
1325 where to find the value. */
1326 ptr_type = build_pointer_type (TREE_TYPE (decl));
1327 addr = create_tmp_var (ptr_type, get_name (decl));
1328 DECL_IGNORED_P (addr) = 0;
1329 t = build_fold_indirect_ref (addr);
31408f60 1330 TREE_THIS_NOTRAP (t) = 1;
786025ea
JJ
1331 SET_DECL_VALUE_EXPR (decl, t);
1332 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1333
e79983f4 1334 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13e49da9
TV
1335 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1336 size_int (DECL_ALIGN (decl)));
d3c12306 1337 /* The call has been built for a variable-sized object. */
63d2a353 1338 CALL_ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1339 t = fold_convert (ptr_type, t);
726a989a 1340 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1341
726a989a 1342 gimplify_and_add (t, seq_p);
786025ea
JJ
1343
1344 /* Indicate that we need to restore the stack level when the
1345 enclosing BIND_EXPR is exited. */
1346 gimplify_ctxp->save_stack = true;
1347}
1348
45b0be94
AM
1349/* A helper function to be called via walk_tree. Mark all labels under *TP
1350 as being forced. To be called for DECL_INITIAL of static variables. */
1351
1352static tree
1353force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1354{
1355 if (TYPE_P (*tp))
1356 *walk_subtrees = 0;
1357 if (TREE_CODE (*tp) == LABEL_DECL)
1358 FORCED_LABEL (*tp) = 1;
1359
1360 return NULL_TREE;
1361}
1362
ad19c4be 1363/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
350fae66
RK
1364 and initialization explicit. */
1365
1366static enum gimplify_status
726a989a 1367gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1368{
1369 tree stmt = *stmt_p;
1370 tree decl = DECL_EXPR_DECL (stmt);
1371
1372 *stmt_p = NULL_TREE;
1373
1374 if (TREE_TYPE (decl) == error_mark_node)
1375 return GS_ERROR;
1376
8e0a600b
JJ
1377 if ((TREE_CODE (decl) == TYPE_DECL
1378 || TREE_CODE (decl) == VAR_DECL)
1379 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
726a989a 1380 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
350fae66 1381
d400d17e
EB
1382 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1383 in case its size expressions contain problematic nodes like CALL_EXPR. */
1384 if (TREE_CODE (decl) == TYPE_DECL
1385 && DECL_ORIGINAL_TYPE (decl)
1386 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1387 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1388
8e0a600b 1389 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
350fae66
RK
1390 {
1391 tree init = DECL_INITIAL (decl);
1392
b38f3813
EB
1393 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1394 || (!TREE_STATIC (decl)
1395 && flag_stack_check == GENERIC_STACK_CHECK
1396 && compare_tree_int (DECL_SIZE_UNIT (decl),
1397 STACK_CHECK_MAX_VAR_SIZE) > 0))
726a989a 1398 gimplify_vla_decl (decl, seq_p);
350fae66 1399
22192559
JM
1400 /* Some front ends do not explicitly declare all anonymous
1401 artificial variables. We compensate here by declaring the
1402 variables, though it would be better if the front ends would
1403 explicitly declare them. */
1404 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1405 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1406 gimple_add_tmp_var (decl);
1407
350fae66
RK
1408 if (init && init != error_mark_node)
1409 {
1410 if (!TREE_STATIC (decl))
1411 {
1412 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1413 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1414 gimplify_and_add (init, seq_p);
1415 ggc_free (init);
350fae66
RK
1416 }
1417 else
1418 /* We must still examine initializers for static variables
1419 as they may contain a label address. */
1420 walk_tree (&init, force_labels_r, NULL, NULL);
1421 }
350fae66
RK
1422 }
1423
1424 return GS_ALL_DONE;
1425}
1426
6de9cd9a
DN
1427/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1428 and replacing the LOOP_EXPR with goto, but if the loop contains an
1429 EXIT_EXPR, we need to append a label for it to jump to. */
1430
1431static enum gimplify_status
726a989a 1432gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1433{
1434 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1435 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1436
726a989a 1437 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1438
1439 gimplify_ctxp->exit_label = NULL_TREE;
1440
fff34d35 1441 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1442
726a989a
RB
1443 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1444
6de9cd9a 1445 if (gimplify_ctxp->exit_label)
ad19c4be
EB
1446 gimplify_seq_add_stmt (pre_p,
1447 gimple_build_label (gimplify_ctxp->exit_label));
726a989a
RB
1448
1449 gimplify_ctxp->exit_label = saved_label;
1450
1451 *expr_p = NULL;
1452 return GS_ALL_DONE;
1453}
1454
ad19c4be 1455/* Gimplify a statement list onto a sequence. These may be created either
726a989a
RB
1456 by an enlightened front-end, or by shortcut_cond_expr. */
1457
1458static enum gimplify_status
1459gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1460{
1461 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1462
1463 tree_stmt_iterator i = tsi_start (*expr_p);
1464
1465 while (!tsi_end_p (i))
6de9cd9a 1466 {
726a989a
RB
1467 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1468 tsi_delink (&i);
6de9cd9a 1469 }
6de9cd9a 1470
726a989a
RB
1471 if (temp)
1472 {
1473 *expr_p = temp;
1474 return GS_OK;
1475 }
6de9cd9a
DN
1476
1477 return GS_ALL_DONE;
1478}
0f1f6967 1479
68e72840
SB
1480\f
1481/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
6de9cd9a
DN
1482 branch to. */
1483
1484static enum gimplify_status
726a989a 1485gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1486{
1487 tree switch_expr = *expr_p;
726a989a 1488 gimple_seq switch_body_seq = NULL;
6de9cd9a 1489 enum gimplify_status ret;
0cd2402d
SB
1490 tree index_type = TREE_TYPE (switch_expr);
1491 if (index_type == NULL_TREE)
1492 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
6de9cd9a 1493
726a989a
RB
1494 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1495 fb_rvalue);
1496 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1497 return ret;
6de9cd9a
DN
1498
1499 if (SWITCH_BODY (switch_expr))
1500 {
9771b263
DN
1501 vec<tree> labels;
1502 vec<tree> saved_labels;
726a989a 1503 tree default_case = NULL_TREE;
538dd0b7 1504 gswitch *switch_stmt;
b8698a0f 1505
6de9cd9a
DN
1506 /* If someone can be bothered to fill in the labels, they can
1507 be bothered to null out the body too. */
282899df 1508 gcc_assert (!SWITCH_LABELS (switch_expr));
6de9cd9a 1509
0cd2402d 1510 /* Save old labels, get new ones from body, then restore the old
726a989a 1511 labels. Save all the things from the switch body to append after. */
6de9cd9a 1512 saved_labels = gimplify_ctxp->case_labels;
9771b263 1513 gimplify_ctxp->case_labels.create (8);
6de9cd9a 1514
726a989a 1515 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
6de9cd9a
DN
1516 labels = gimplify_ctxp->case_labels;
1517 gimplify_ctxp->case_labels = saved_labels;
b8698a0f 1518
68e72840
SB
1519 preprocess_case_label_vec_for_gimple (labels, index_type,
1520 &default_case);
32f579f6 1521
726a989a 1522 if (!default_case)
6de9cd9a 1523 {
538dd0b7 1524 glabel *new_default;
6de9cd9a 1525
68e72840
SB
1526 default_case
1527 = build_case_label (NULL_TREE, NULL_TREE,
1528 create_artificial_label (UNKNOWN_LOCATION));
1529 new_default = gimple_build_label (CASE_LABEL (default_case));
1530 gimplify_seq_add_stmt (&switch_body_seq, new_default);
32f579f6 1531 }
f667741c 1532
538dd0b7 1533 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
fd8d363e 1534 default_case, labels);
538dd0b7 1535 gimplify_seq_add_stmt (pre_p, switch_stmt);
726a989a 1536 gimplify_seq_add_seq (pre_p, switch_body_seq);
9771b263 1537 labels.release ();
6de9cd9a 1538 }
282899df
NS
1539 else
1540 gcc_assert (SWITCH_LABELS (switch_expr));
6de9cd9a 1541
726a989a 1542 return GS_ALL_DONE;
6de9cd9a
DN
1543}
1544
ad19c4be 1545/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
726a989a 1546
6de9cd9a 1547static enum gimplify_status
726a989a 1548gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 1549{
953ff289 1550 struct gimplify_ctx *ctxp;
538dd0b7 1551 glabel *label_stmt;
953ff289
DN
1552
1553 /* Invalid OpenMP programs can play Duff's Device type games with
1554 #pragma omp parallel. At least in the C front end, we don't
1555 detect such invalid branches until after gimplification. */
1556 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
9771b263 1557 if (ctxp->case_labels.exists ())
953ff289 1558 break;
282899df 1559
538dd0b7 1560 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
9771b263 1561 ctxp->case_labels.safe_push (*expr_p);
538dd0b7 1562 gimplify_seq_add_stmt (pre_p, label_stmt);
726a989a 1563
6de9cd9a
DN
1564 return GS_ALL_DONE;
1565}
1566
6de9cd9a
DN
1567/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1568 if necessary. */
1569
1570tree
1571build_and_jump (tree *label_p)
1572{
1573 if (label_p == NULL)
1574 /* If there's nowhere to jump, just fall through. */
65355d53 1575 return NULL_TREE;
6de9cd9a
DN
1576
1577 if (*label_p == NULL_TREE)
1578 {
c2255bc4 1579 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
1580 *label_p = label;
1581 }
1582
1583 return build1 (GOTO_EXPR, void_type_node, *label_p);
1584}
1585
1586/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1587 This also involves building a label to jump to and communicating it to
1588 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1589
1590static enum gimplify_status
1591gimplify_exit_expr (tree *expr_p)
1592{
1593 tree cond = TREE_OPERAND (*expr_p, 0);
1594 tree expr;
1595
1596 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 1597 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
1598 *expr_p = expr;
1599
1600 return GS_OK;
1601}
1602
26d44ae2
RH
1603/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1604 different from its canonical type, wrap the whole thing inside a
1605 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1606 type.
6de9cd9a 1607
26d44ae2
RH
1608 The canonical type of a COMPONENT_REF is the type of the field being
1609 referenced--unless the field is a bit-field which can be read directly
1610 in a smaller mode, in which case the canonical type is the
1611 sign-appropriate type corresponding to that mode. */
6de9cd9a 1612
26d44ae2
RH
1613static void
1614canonicalize_component_ref (tree *expr_p)
6de9cd9a 1615{
26d44ae2
RH
1616 tree expr = *expr_p;
1617 tree type;
6de9cd9a 1618
282899df 1619 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 1620
26d44ae2
RH
1621 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1622 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1623 else
1624 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 1625
b26c6d55
RG
1626 /* One could argue that all the stuff below is not necessary for
1627 the non-bitfield case and declare it a FE error if type
1628 adjustment would be needed. */
26d44ae2 1629 if (TREE_TYPE (expr) != type)
6de9cd9a 1630 {
b26c6d55 1631#ifdef ENABLE_TYPES_CHECKING
26d44ae2 1632 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
1633#endif
1634 int type_quals;
1635
1636 /* We need to preserve qualifiers and propagate them from
1637 operand 0. */
1638 type_quals = TYPE_QUALS (type)
1639 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1640 if (TYPE_QUALS (type) != type_quals)
1641 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 1642
26d44ae2
RH
1643 /* Set the type of the COMPONENT_REF to the underlying type. */
1644 TREE_TYPE (expr) = type;
6de9cd9a 1645
b26c6d55
RG
1646#ifdef ENABLE_TYPES_CHECKING
1647 /* It is now a FE error, if the conversion from the canonical
1648 type to the original expression type is not useless. */
1649 gcc_assert (useless_type_conversion_p (old_type, type));
1650#endif
26d44ae2
RH
1651 }
1652}
6de9cd9a 1653
26d44ae2 1654/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 1655 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
1656 T array[U];
1657 (T *)&array
1658 ==>
1659 &array[L]
1660 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
1661 lower bound.
1662 The constraint is that the type of &array[L] is trivially convertible
1663 to T *. */
6de9cd9a 1664
26d44ae2
RH
1665static void
1666canonicalize_addr_expr (tree *expr_p)
1667{
1668 tree expr = *expr_p;
26d44ae2 1669 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 1670 tree datype, ddatype, pddatype;
6de9cd9a 1671
04d86531
RG
1672 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1673 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1674 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 1675 return;
6de9cd9a 1676
26d44ae2 1677 /* The addr_expr type should be a pointer to an array. */
04d86531 1678 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
1679 if (TREE_CODE (datype) != ARRAY_TYPE)
1680 return;
6de9cd9a 1681
04d86531
RG
1682 /* The pointer to element type shall be trivially convertible to
1683 the expression pointer type. */
26d44ae2 1684 ddatype = TREE_TYPE (datype);
04d86531 1685 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
1686 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1687 pddatype))
26d44ae2 1688 return;
6de9cd9a 1689
26d44ae2 1690 /* The lower bound and element sizes must be constant. */
04d86531
RG
1691 if (!TYPE_SIZE_UNIT (ddatype)
1692 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
1693 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1694 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1695 return;
6de9cd9a 1696
26d44ae2 1697 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 1698 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 1699 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 1700 NULL_TREE, NULL_TREE);
04d86531 1701 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
1702
1703 /* We can have stripped a required restrict qualifier above. */
1704 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1705 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 1706}
6de9cd9a 1707
26d44ae2
RH
1708/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1709 underneath as appropriate. */
6de9cd9a 1710
26d44ae2
RH
1711static enum gimplify_status
1712gimplify_conversion (tree *expr_p)
d3147f64 1713{
db3927fb 1714 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 1715 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 1716
0710ccff
NS
1717 /* Then strip away all but the outermost conversion. */
1718 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1719
1720 /* And remove the outermost conversion if it's useless. */
1721 if (tree_ssa_useless_type_conversion (*expr_p))
1722 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1723
26d44ae2
RH
1724 /* If we still have a conversion at the toplevel,
1725 then canonicalize some constructs. */
1043771b 1726 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
1727 {
1728 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1729
26d44ae2
RH
1730 /* If a NOP conversion is changing the type of a COMPONENT_REF
1731 expression, then canonicalize its type now in order to expose more
1732 redundant conversions. */
1733 if (TREE_CODE (sub) == COMPONENT_REF)
1734 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 1735
26d44ae2
RH
1736 /* If a NOP conversion is changing a pointer to array of foo
1737 to a pointer to foo, embed that change in the ADDR_EXPR. */
1738 else if (TREE_CODE (sub) == ADDR_EXPR)
1739 canonicalize_addr_expr (expr_p);
1740 }
6de9cd9a 1741
8b17cc05
RG
1742 /* If we have a conversion to a non-register type force the
1743 use of a VIEW_CONVERT_EXPR instead. */
4f934809 1744 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 1745 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 1746 TREE_OPERAND (*expr_p, 0));
8b17cc05 1747
741233cf
RB
1748 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1749 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1750 TREE_SET_CODE (*expr_p, NOP_EXPR);
1751
6de9cd9a
DN
1752 return GS_OK;
1753}
1754
77f2a970 1755/* Nonlocal VLAs seen in the current function. */
6e2830c3 1756static hash_set<tree> *nonlocal_vlas;
77f2a970 1757
96ddb7ec
JJ
1758/* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1759static tree nonlocal_vla_vars;
1760
ad19c4be 1761/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
a9f7c570
RH
1762 DECL_VALUE_EXPR, and it's worth re-examining things. */
1763
1764static enum gimplify_status
1765gimplify_var_or_parm_decl (tree *expr_p)
1766{
1767 tree decl = *expr_p;
1768
1769 /* ??? If this is a local variable, and it has not been seen in any
1770 outer BIND_EXPR, then it's probably the result of a duplicate
1771 declaration, for which we've already issued an error. It would
1772 be really nice if the front end wouldn't leak these at all.
1773 Currently the only known culprit is C++ destructors, as seen
1774 in g++.old-deja/g++.jason/binding.C. */
1775 if (TREE_CODE (decl) == VAR_DECL
1776 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1777 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1778 && decl_function_context (decl) == current_function_decl)
1779 {
1da2ed5f 1780 gcc_assert (seen_error ());
a9f7c570
RH
1781 return GS_ERROR;
1782 }
1783
953ff289
DN
1784 /* When within an OpenMP context, notice uses of variables. */
1785 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1786 return GS_ALL_DONE;
1787
a9f7c570
RH
1788 /* If the decl is an alias for another expression, substitute it now. */
1789 if (DECL_HAS_VALUE_EXPR_P (decl))
1790 {
77f2a970
JJ
1791 tree value_expr = DECL_VALUE_EXPR (decl);
1792
1793 /* For referenced nonlocal VLAs add a decl for debugging purposes
1794 to the current function. */
1795 if (TREE_CODE (decl) == VAR_DECL
1796 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1797 && nonlocal_vlas != NULL
1798 && TREE_CODE (value_expr) == INDIRECT_REF
1799 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1800 && decl_function_context (decl) != current_function_decl)
1801 {
1802 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
1803 while (ctx
1804 && (ctx->region_type == ORT_WORKSHARE
1805 || ctx->region_type == ORT_SIMD))
77f2a970 1806 ctx = ctx->outer_context;
6e2830c3 1807 if (!ctx && !nonlocal_vlas->add (decl))
77f2a970 1808 {
96ddb7ec 1809 tree copy = copy_node (decl);
77f2a970
JJ
1810
1811 lang_hooks.dup_lang_specific_decl (copy);
2eb79bbb 1812 SET_DECL_RTL (copy, 0);
77f2a970 1813 TREE_USED (copy) = 1;
96ddb7ec
JJ
1814 DECL_CHAIN (copy) = nonlocal_vla_vars;
1815 nonlocal_vla_vars = copy;
77f2a970
JJ
1816 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1817 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1818 }
1819 }
1820
1821 *expr_p = unshare_expr (value_expr);
a9f7c570
RH
1822 return GS_OK;
1823 }
1824
1825 return GS_ALL_DONE;
1826}
1827
66c14933
EB
1828/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1829
1830static void
2fb9a547
AM
1831recalculate_side_effects (tree t)
1832{
1833 enum tree_code code = TREE_CODE (t);
1834 int len = TREE_OPERAND_LENGTH (t);
1835 int i;
1836
1837 switch (TREE_CODE_CLASS (code))
1838 {
1839 case tcc_expression:
1840 switch (code)
1841 {
1842 case INIT_EXPR:
1843 case MODIFY_EXPR:
1844 case VA_ARG_EXPR:
1845 case PREDECREMENT_EXPR:
1846 case PREINCREMENT_EXPR:
1847 case POSTDECREMENT_EXPR:
1848 case POSTINCREMENT_EXPR:
1849 /* All of these have side-effects, no matter what their
1850 operands are. */
1851 return;
1852
1853 default:
1854 break;
1855 }
1856 /* Fall through. */
1857
1858 case tcc_comparison: /* a comparison expression */
1859 case tcc_unary: /* a unary arithmetic expression */
1860 case tcc_binary: /* a binary arithmetic expression */
1861 case tcc_reference: /* a reference */
1862 case tcc_vl_exp: /* a function call */
1863 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1864 for (i = 0; i < len; ++i)
1865 {
1866 tree op = TREE_OPERAND (t, i);
1867 if (op && TREE_SIDE_EFFECTS (op))
1868 TREE_SIDE_EFFECTS (t) = 1;
1869 }
1870 break;
1871
1872 case tcc_constant:
1873 /* No side-effects. */
1874 return;
1875
1876 default:
1877 gcc_unreachable ();
1878 }
1879}
1880
6de9cd9a 1881/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 1882 node *EXPR_P.
6de9cd9a
DN
1883
1884 compound_lval
1885 : min_lval '[' val ']'
1886 | min_lval '.' ID
1887 | compound_lval '[' val ']'
1888 | compound_lval '.' ID
1889
1890 This is not part of the original SIMPLE definition, which separates
1891 array and member references, but it seems reasonable to handle them
1892 together. Also, this way we don't run into problems with union
1893 aliasing; gcc requires that for accesses through a union to alias, the
1894 union reference must be explicit, which was not always the case when we
1895 were splitting up array and member refs.
1896
726a989a 1897 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1898 *EXPR_P should be stored.
1899
726a989a 1900 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
1901 *EXPR_P should be stored. */
1902
1903static enum gimplify_status
726a989a
RB
1904gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1905 fallback_t fallback)
6de9cd9a
DN
1906{
1907 tree *p;
941f78d1 1908 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 1909 int i;
db3927fb 1910 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 1911 tree expr = *expr_p;
6de9cd9a 1912
6de9cd9a 1913 /* Create a stack of the subexpressions so later we can walk them in
ec234842 1914 order from inner to outer. */
00f96dc9 1915 auto_vec<tree, 10> expr_stack;
6de9cd9a 1916
afe84921 1917 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
1918 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1919 {
a9f7c570 1920 restart:
6a720599
JM
1921 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1922 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 1923 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
1924
1925 if (handled_component_p (*p))
1926 ;
1927 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1928 additional COMPONENT_REFs. */
1929 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1930 && gimplify_var_or_parm_decl (p) == GS_OK)
1931 goto restart;
1932 else
6a720599 1933 break;
b8698a0f 1934
9771b263 1935 expr_stack.safe_push (*p);
6a720599 1936 }
6de9cd9a 1937
9771b263 1938 gcc_assert (expr_stack.length ());
9e51aaf5 1939
0823efed
DN
1940 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1941 walked through and P points to the innermost expression.
6de9cd9a 1942
af72267c
RK
1943 Java requires that we elaborated nodes in source order. That
1944 means we must gimplify the inner expression followed by each of
1945 the indices, in order. But we can't gimplify the inner
1946 expression until we deal with any variable bounds, sizes, or
1947 positions in order to deal with PLACEHOLDER_EXPRs.
1948
1949 So we do this in three steps. First we deal with the annotations
1950 for any variables in the components, then we gimplify the base,
1951 then we gimplify any indices, from left to right. */
9771b263 1952 for (i = expr_stack.length () - 1; i >= 0; i--)
6de9cd9a 1953 {
9771b263 1954 tree t = expr_stack[i];
44de5aeb
RK
1955
1956 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 1957 {
44de5aeb
RK
1958 /* Gimplify the low bound and element type size and put them into
1959 the ARRAY_REF. If these values are set, they have already been
1960 gimplified. */
726a989a 1961 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 1962 {
a7cc468a
RH
1963 tree low = unshare_expr (array_ref_low_bound (t));
1964 if (!is_gimple_min_invariant (low))
44de5aeb 1965 {
726a989a
RB
1966 TREE_OPERAND (t, 2) = low;
1967 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 1968 post_p, is_gimple_reg,
726a989a 1969 fb_rvalue);
44de5aeb
RK
1970 ret = MIN (ret, tret);
1971 }
1972 }
19c44640
JJ
1973 else
1974 {
1975 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1976 is_gimple_reg, fb_rvalue);
1977 ret = MIN (ret, tret);
1978 }
44de5aeb 1979
19c44640 1980 if (TREE_OPERAND (t, 3) == NULL_TREE)
44de5aeb
RK
1981 {
1982 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1983 tree elmt_size = unshare_expr (array_ref_element_size (t));
a4e9ffe5 1984 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
44de5aeb
RK
1985
1986 /* Divide the element size by the alignment of the element
1987 type (above). */
ad19c4be
EB
1988 elmt_size
1989 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
44de5aeb 1990
a7cc468a 1991 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 1992 {
726a989a
RB
1993 TREE_OPERAND (t, 3) = elmt_size;
1994 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 1995 post_p, is_gimple_reg,
726a989a 1996 fb_rvalue);
44de5aeb
RK
1997 ret = MIN (ret, tret);
1998 }
6de9cd9a 1999 }
19c44640
JJ
2000 else
2001 {
2002 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2003 is_gimple_reg, fb_rvalue);
2004 ret = MIN (ret, tret);
2005 }
6de9cd9a 2006 }
44de5aeb
RK
2007 else if (TREE_CODE (t) == COMPONENT_REF)
2008 {
2009 /* Set the field offset into T and gimplify it. */
19c44640 2010 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb
RK
2011 {
2012 tree offset = unshare_expr (component_ref_field_offset (t));
2013 tree field = TREE_OPERAND (t, 1);
2014 tree factor
2015 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2016
2017 /* Divide the offset by its alignment. */
db3927fb 2018 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
44de5aeb 2019
a7cc468a 2020 if (!is_gimple_min_invariant (offset))
44de5aeb 2021 {
726a989a
RB
2022 TREE_OPERAND (t, 2) = offset;
2023 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2024 post_p, is_gimple_reg,
726a989a 2025 fb_rvalue);
44de5aeb
RK
2026 ret = MIN (ret, tret);
2027 }
2028 }
19c44640
JJ
2029 else
2030 {
2031 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2032 is_gimple_reg, fb_rvalue);
2033 ret = MIN (ret, tret);
2034 }
44de5aeb 2035 }
af72267c
RK
2036 }
2037
a9f7c570
RH
2038 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2039 so as to match the min_lval predicate. Failure to do so may result
2040 in the creation of large aggregate temporaries. */
2041 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2042 fallback | fb_lvalue);
af72267c
RK
2043 ret = MIN (ret, tret);
2044
ea814c66 2045 /* And finally, the indices and operands of ARRAY_REF. During this
48eb4e53 2046 loop we also remove any useless conversions. */
9771b263 2047 for (; expr_stack.length () > 0; )
af72267c 2048 {
9771b263 2049 tree t = expr_stack.pop ();
af72267c
RK
2050
2051 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2052 {
ba4d8f9d 2053 /* Gimplify the dimension. */
af72267c
RK
2054 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2055 {
2056 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 2057 is_gimple_val, fb_rvalue);
af72267c
RK
2058 ret = MIN (ret, tret);
2059 }
2060 }
48eb4e53
RK
2061
2062 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2063
726a989a
RB
2064 /* The innermost expression P may have originally had
2065 TREE_SIDE_EFFECTS set which would have caused all the outer
2066 expressions in *EXPR_P leading to P to also have had
2067 TREE_SIDE_EFFECTS set. */
6de9cd9a 2068 recalculate_side_effects (t);
6de9cd9a
DN
2069 }
2070
2071 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 2072 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
2073 {
2074 canonicalize_component_ref (expr_p);
6de9cd9a
DN
2075 }
2076
9771b263 2077 expr_stack.release ();
07724022 2078
941f78d1
JM
2079 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2080
6de9cd9a
DN
2081 return ret;
2082}
2083
206048bd
VR
2084/* Gimplify the self modifying expression pointed to by EXPR_P
2085 (++, --, +=, -=).
6de9cd9a
DN
2086
2087 PRE_P points to the list where side effects that must happen before
2088 *EXPR_P should be stored.
2089
2090 POST_P points to the list where side effects that must happen after
2091 *EXPR_P should be stored.
2092
2093 WANT_VALUE is nonzero iff we want to use the value of this expression
cc3c4f62 2094 in another expression.
6de9cd9a 2095
cc3c4f62
RB
2096 ARITH_TYPE is the type the computation should be performed in. */
2097
2098enum gimplify_status
726a989a 2099gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
cc3c4f62 2100 bool want_value, tree arith_type)
6de9cd9a
DN
2101{
2102 enum tree_code code;
726a989a
RB
2103 tree lhs, lvalue, rhs, t1;
2104 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
2105 bool postfix;
2106 enum tree_code arith_code;
2107 enum gimplify_status ret;
db3927fb 2108 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
2109
2110 code = TREE_CODE (*expr_p);
2111
282899df
NS
2112 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2113 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
2114
2115 /* Prefix or postfix? */
2116 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2117 /* Faster to treat as prefix if result is not used. */
2118 postfix = want_value;
2119 else
2120 postfix = false;
2121
82181741
JJ
2122 /* For postfix, make sure the inner expression's post side effects
2123 are executed after side effects from this expression. */
2124 if (postfix)
2125 post_p = &post;
2126
6de9cd9a
DN
2127 /* Add or subtract? */
2128 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2129 arith_code = PLUS_EXPR;
2130 else
2131 arith_code = MINUS_EXPR;
2132
2133 /* Gimplify the LHS into a GIMPLE lvalue. */
2134 lvalue = TREE_OPERAND (*expr_p, 0);
2135 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2136 if (ret == GS_ERROR)
2137 return ret;
2138
2139 /* Extract the operands to the arithmetic operation. */
2140 lhs = lvalue;
2141 rhs = TREE_OPERAND (*expr_p, 1);
2142
2143 /* For postfix operator, we evaluate the LHS to an rvalue and then use
d97c9b22 2144 that as the result value and in the postqueue operation. */
6de9cd9a
DN
2145 if (postfix)
2146 {
2147 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2148 if (ret == GS_ERROR)
2149 return ret;
6de9cd9a 2150
d97c9b22
JJ
2151 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2152 }
cc3c4f62 2153
5be014d5
AP
2154 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2155 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2156 {
0d82a1c8 2157 rhs = convert_to_ptrofftype_loc (loc, rhs);
5be014d5 2158 if (arith_code == MINUS_EXPR)
db3927fb 2159 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
cc3c4f62 2160 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
5be014d5 2161 }
cc3c4f62
RB
2162 else
2163 t1 = fold_convert (TREE_TYPE (*expr_p),
2164 fold_build2 (arith_code, arith_type,
2165 fold_convert (arith_type, lhs),
2166 fold_convert (arith_type, rhs)));
5be014d5 2167
6de9cd9a
DN
2168 if (postfix)
2169 {
cf1867a0 2170 gimplify_assign (lvalue, t1, pre_p);
726a989a 2171 gimplify_seq_add_seq (orig_post_p, post);
cc3c4f62 2172 *expr_p = lhs;
6de9cd9a
DN
2173 return GS_ALL_DONE;
2174 }
2175 else
2176 {
726a989a 2177 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
2178 return GS_OK;
2179 }
2180}
2181
d25cee4d
RH
2182/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2183
2184static void
2185maybe_with_size_expr (tree *expr_p)
2186{
61025d1b
RK
2187 tree expr = *expr_p;
2188 tree type = TREE_TYPE (expr);
2189 tree size;
d25cee4d 2190
61025d1b
RK
2191 /* If we've already wrapped this or the type is error_mark_node, we can't do
2192 anything. */
2193 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2194 || type == error_mark_node)
d25cee4d
RH
2195 return;
2196
61025d1b 2197 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 2198 size = TYPE_SIZE_UNIT (type);
61025d1b
RK
2199 if (!size || TREE_CODE (size) == INTEGER_CST)
2200 return;
2201
2202 /* Otherwise, make a WITH_SIZE_EXPR. */
2203 size = unshare_expr (size);
2204 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2205 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
2206}
2207
726a989a 2208/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f
AH
2209 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2210 the CALL_EXPR. */
e4f78bd4 2211
fe6ebcf1 2212enum gimplify_status
1282697f 2213gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
e4f78bd4
JM
2214{
2215 bool (*test) (tree);
2216 fallback_t fb;
2217
2218 /* In general, we allow lvalues for function arguments to avoid
2219 extra overhead of copying large aggregates out of even larger
2220 aggregates into temporaries only to copy the temporaries to
2221 the argument list. Make optimizers happy by pulling out to
2222 temporaries those types that fit in registers. */
726a989a 2223 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
2224 test = is_gimple_val, fb = fb_rvalue;
2225 else
b4ef8aac
JM
2226 {
2227 test = is_gimple_lvalue, fb = fb_either;
2228 /* Also strip a TARGET_EXPR that would force an extra copy. */
2229 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2230 {
2231 tree init = TARGET_EXPR_INITIAL (*arg_p);
2232 if (init
2233 && !VOID_TYPE_P (TREE_TYPE (init)))
2234 *arg_p = init;
2235 }
2236 }
e4f78bd4 2237
d25cee4d 2238 /* If this is a variable sized type, we must remember the size. */
726a989a 2239 maybe_with_size_expr (arg_p);
d25cee4d 2240
c2255bc4 2241 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
2242 /* Make sure arguments have the same location as the function call
2243 itself. */
2244 protected_set_expr_location (*arg_p, call_location);
2245
e4f78bd4
JM
2246 /* There is a sequence point before a function call. Side effects in
2247 the argument list must occur before the actual call. So, when
2248 gimplifying arguments, force gimplify_expr to use an internal
2249 post queue which is then appended to the end of PRE_P. */
726a989a 2250 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
e4f78bd4
JM
2251}
2252
88ac13da
TS
2253/* Don't fold STMT inside ORT_TARGET, because it can break code by adding decl
2254 references that weren't in the source. We'll do it during omplower pass
2255 instead. */
2256
2257static bool
2258maybe_fold_stmt (gimple_stmt_iterator *gsi)
2259{
2260 struct gimplify_omp_ctx *ctx;
2261 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2262 if (ctx->region_type == ORT_TARGET)
2263 return false;
2264 return fold_stmt (gsi);
2265}
2266
726a989a 2267/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 2268 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
2269
2270static enum gimplify_status
726a989a 2271gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 2272{
f20ca725 2273 tree fndecl, parms, p, fnptrtype;
6de9cd9a 2274 enum gimplify_status ret;
5039610b 2275 int i, nargs;
538dd0b7 2276 gcall *call;
ed9c79e1 2277 bool builtin_va_start_p = false;
db3927fb 2278 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 2279
282899df 2280 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 2281
d3147f64 2282 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 2283 every call_expr be annotated with file and line. */
a281759f
PB
2284 if (! EXPR_HAS_LOCATION (*expr_p))
2285 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a 2286
0e37a2f3
MP
2287 /* Gimplify internal functions created in the FEs. */
2288 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2289 {
1304953e
JJ
2290 if (want_value)
2291 return GS_ALL_DONE;
2292
0e37a2f3
MP
2293 nargs = call_expr_nargs (*expr_p);
2294 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2295 auto_vec<tree> vargs (nargs);
2296
2297 for (i = 0; i < nargs; i++)
2298 {
2299 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2300 EXPR_LOCATION (*expr_p));
2301 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2302 }
2303 gimple call = gimple_build_call_internal_vec (ifn, vargs);
2304 gimplify_seq_add_stmt (pre_p, call);
2305 return GS_ALL_DONE;
2306 }
2307
6de9cd9a
DN
2308 /* This may be a call to a builtin function.
2309
2310 Builtin function calls may be transformed into different
2311 (and more efficient) builtin function calls under certain
2312 circumstances. Unfortunately, gimplification can muck things
2313 up enough that the builtin expanders are not aware that certain
2314 transformations are still valid.
2315
2316 So we attempt transformation/gimplification of the call before
2317 we gimplify the CALL_EXPR. At this time we do not manage to
2318 transform all calls in the same manner as the expanders do, but
2319 we do transform most of them. */
726a989a 2320 fndecl = get_callee_fndecl (*expr_p);
3537a0cd
RG
2321 if (fndecl
2322 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2323 switch (DECL_FUNCTION_CODE (fndecl))
2324 {
2325 case BUILT_IN_VA_START:
2efcfa4e 2326 {
726a989a 2327 builtin_va_start_p = TRUE;
5039610b 2328 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
2329 {
2330 error ("too few arguments to function %<va_start%>");
c2255bc4 2331 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2332 return GS_OK;
2333 }
b8698a0f 2334
5039610b 2335 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 2336 {
c2255bc4 2337 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2338 return GS_OK;
2339 }
3537a0cd
RG
2340 break;
2341 }
2342 case BUILT_IN_LINE:
2343 {
bb42836b
MLI
2344 *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2345 LOCATION_LINE (EXPR_LOCATION (*expr_p)));
3537a0cd
RG
2346 return GS_OK;
2347 }
2348 case BUILT_IN_FILE:
2349 {
bb42836b
MLI
2350 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2351 *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
3537a0cd
RG
2352 return GS_OK;
2353 }
2354 case BUILT_IN_FUNCTION:
2355 {
2356 const char *function;
2357 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2358 *expr_p = build_string_literal (strlen (function) + 1, function);
2359 return GS_OK;
2360 }
2361 default:
2362 ;
2363 }
2364 if (fndecl && DECL_BUILT_IN (fndecl))
2365 {
2366 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2367 if (new_tree && new_tree != *expr_p)
2368 {
2369 /* There was a transformation of this call which computes the
2370 same value, but in a more efficient way. Return and try
2371 again. */
2372 *expr_p = new_tree;
2373 return GS_OK;
2efcfa4e 2374 }
6de9cd9a
DN
2375 }
2376
f20ca725
RG
2377 /* Remember the original function pointer type. */
2378 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2379
6de9cd9a
DN
2380 /* There is a sequence point before the call, so any side effects in
2381 the calling expression must occur before the actual call. Force
2382 gimplify_expr to use an internal post queue. */
5039610b 2383 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 2384 is_gimple_call_addr, fb_rvalue);
6de9cd9a 2385
5039610b
SL
2386 nargs = call_expr_nargs (*expr_p);
2387
e36711f3 2388 /* Get argument types for verification. */
726a989a 2389 fndecl = get_callee_fndecl (*expr_p);
e36711f3 2390 parms = NULL_TREE;
726a989a
RB
2391 if (fndecl)
2392 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
a96c6a62
RB
2393 else
2394 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
e36711f3 2395
726a989a 2396 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 2397 p = DECL_ARGUMENTS (fndecl);
004e2fa7 2398 else if (parms)
f9487002 2399 p = parms;
6ef5231b 2400 else
498e51ca 2401 p = NULL_TREE;
f9487002
JJ
2402 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2403 ;
6ef5231b
JJ
2404
2405 /* If the last argument is __builtin_va_arg_pack () and it is not
2406 passed as a named argument, decrease the number of CALL_EXPR
2407 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2408 if (!p
2409 && i < nargs
2410 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2411 {
2412 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2413 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2414
2415 if (last_arg_fndecl
2416 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2417 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2418 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2419 {
2420 tree call = *expr_p;
2421
2422 --nargs;
db3927fb
AH
2423 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2424 CALL_EXPR_FN (call),
2425 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
2426
2427 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
2428 CALL_EXPR_VA_ARG_PACK flag. */
2429 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2430 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2431 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2432 = CALL_EXPR_RETURN_SLOT_OPT (call);
2433 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
5e278028 2434 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
726a989a 2435
6ef5231b
JJ
2436 /* Set CALL_EXPR_VA_ARG_PACK. */
2437 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2438 }
2439 }
e36711f3 2440
f2d3d07e 2441 /* Gimplify the function arguments. */
726a989a 2442 if (nargs > 0)
6de9cd9a 2443 {
726a989a
RB
2444 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2445 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2446 PUSH_ARGS_REVERSED ? i-- : i++)
2447 {
2448 enum gimplify_status t;
6de9cd9a 2449
726a989a
RB
2450 /* Avoid gimplifying the second argument to va_start, which needs to
2451 be the plain PARM_DECL. */
2452 if ((i != 1) || !builtin_va_start_p)
2453 {
1282697f
AH
2454 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2455 EXPR_LOCATION (*expr_p));
6de9cd9a 2456
726a989a
RB
2457 if (t == GS_ERROR)
2458 ret = GS_ERROR;
2459 }
2460 }
6de9cd9a 2461 }
6de9cd9a 2462
f2d3d07e
RH
2463 /* Gimplify the static chain. */
2464 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2465 {
2466 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2467 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2468 else
2469 {
2470 enum gimplify_status t;
2471 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2472 EXPR_LOCATION (*expr_p));
2473 if (t == GS_ERROR)
2474 ret = GS_ERROR;
2475 }
2476 }
2477
33922890
RG
2478 /* Verify the function result. */
2479 if (want_value && fndecl
f20ca725 2480 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
33922890
RG
2481 {
2482 error_at (loc, "using result of function returning %<void%>");
2483 ret = GS_ERROR;
2484 }
2485
6de9cd9a 2486 /* Try this again in case gimplification exposed something. */
6f538523 2487 if (ret != GS_ERROR)
6de9cd9a 2488 {
db3927fb 2489 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 2490
82d6e6fc 2491 if (new_tree && new_tree != *expr_p)
5039610b
SL
2492 {
2493 /* There was a transformation of this call which computes the
2494 same value, but in a more efficient way. Return and try
2495 again. */
82d6e6fc 2496 *expr_p = new_tree;
5039610b 2497 return GS_OK;
6de9cd9a
DN
2498 }
2499 }
726a989a
RB
2500 else
2501 {
df8fa700 2502 *expr_p = error_mark_node;
726a989a
RB
2503 return GS_ERROR;
2504 }
6de9cd9a
DN
2505
2506 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2507 decl. This allows us to eliminate redundant or useless
2508 calls to "const" functions. */
becfd6e5
KZ
2509 if (TREE_CODE (*expr_p) == CALL_EXPR)
2510 {
2511 int flags = call_expr_flags (*expr_p);
2512 if (flags & (ECF_CONST | ECF_PURE)
2513 /* An infinite loop is considered a side effect. */
2514 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2515 TREE_SIDE_EFFECTS (*expr_p) = 0;
2516 }
726a989a
RB
2517
2518 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2519 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2520 form and delegate the creation of a GIMPLE_CALL to
2521 gimplify_modify_expr. This is always possible because when
2522 WANT_VALUE is true, the caller wants the result of this call into
2523 a temporary, which means that we will emit an INIT_EXPR in
2524 internal_get_tmp_var which will then be handled by
2525 gimplify_modify_expr. */
2526 if (!want_value)
2527 {
2528 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2529 have to do is replicate it as a GIMPLE_CALL tuple. */
64e0f5ff 2530 gimple_stmt_iterator gsi;
726a989a 2531 call = gimple_build_call_from_tree (*expr_p);
f20ca725 2532 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
f6b64c35 2533 notice_special_calls (call);
726a989a 2534 gimplify_seq_add_stmt (pre_p, call);
64e0f5ff 2535 gsi = gsi_last (*pre_p);
88ac13da 2536 maybe_fold_stmt (&gsi);
726a989a
RB
2537 *expr_p = NULL_TREE;
2538 }
f20ca725
RG
2539 else
2540 /* Remember the original function type. */
2541 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2542 CALL_EXPR_FN (*expr_p));
726a989a 2543
6de9cd9a
DN
2544 return ret;
2545}
2546
2547/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2548 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2549
2550 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2551 condition is true or false, respectively. If null, we should generate
2552 our own to skip over the evaluation of this specific expression.
2553
ca80e52b
EB
2554 LOCUS is the source location of the COND_EXPR.
2555
6de9cd9a
DN
2556 This function is the tree equivalent of do_jump.
2557
2558 shortcut_cond_r should only be called by shortcut_cond_expr. */
2559
2560static tree
ca80e52b
EB
2561shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2562 location_t locus)
6de9cd9a
DN
2563{
2564 tree local_label = NULL_TREE;
2565 tree t, expr = NULL;
2566
2567 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2568 retain the shortcut semantics. Just insert the gotos here;
2569 shortcut_cond_expr will append the real blocks later. */
2570 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2571 {
ca80e52b
EB
2572 location_t new_locus;
2573
6de9cd9a
DN
2574 /* Turn if (a && b) into
2575
2576 if (a); else goto no;
2577 if (b) goto yes; else goto no;
2578 (no:) */
2579
2580 if (false_label_p == NULL)
2581 false_label_p = &local_label;
2582
ca80e52b
EB
2583 /* Keep the original source location on the first 'if'. */
2584 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
2585 append_to_statement_list (t, &expr);
2586
ca80e52b
EB
2587 /* Set the source location of the && on the second 'if'. */
2588 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2589 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2590 new_locus);
6de9cd9a
DN
2591 append_to_statement_list (t, &expr);
2592 }
2593 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2594 {
ca80e52b
EB
2595 location_t new_locus;
2596
6de9cd9a
DN
2597 /* Turn if (a || b) into
2598
2599 if (a) goto yes;
2600 if (b) goto yes; else goto no;
2601 (yes:) */
2602
2603 if (true_label_p == NULL)
2604 true_label_p = &local_label;
2605
ca80e52b
EB
2606 /* Keep the original source location on the first 'if'. */
2607 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
2608 append_to_statement_list (t, &expr);
2609
ca80e52b
EB
2610 /* Set the source location of the || on the second 'if'. */
2611 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2612 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2613 new_locus);
6de9cd9a
DN
2614 append_to_statement_list (t, &expr);
2615 }
1537737f
JJ
2616 else if (TREE_CODE (pred) == COND_EXPR
2617 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2618 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
6de9cd9a 2619 {
ca80e52b
EB
2620 location_t new_locus;
2621
6de9cd9a
DN
2622 /* As long as we're messing with gotos, turn if (a ? b : c) into
2623 if (a)
2624 if (b) goto yes; else goto no;
2625 else
1537737f
JJ
2626 if (c) goto yes; else goto no;
2627
2628 Don't do this if one of the arms has void type, which can happen
2629 in C++ when the arm is throw. */
ca80e52b
EB
2630
2631 /* Keep the original source location on the first 'if'. Set the source
2632 location of the ? on the second 'if'. */
2633 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
b4257cfc
RG
2634 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2635 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 2636 false_label_p, locus),
b4257cfc 2637 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 2638 false_label_p, new_locus));
6de9cd9a
DN
2639 }
2640 else
2641 {
b4257cfc
RG
2642 expr = build3 (COND_EXPR, void_type_node, pred,
2643 build_and_jump (true_label_p),
2644 build_and_jump (false_label_p));
ca80e52b 2645 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2646 }
2647
2648 if (local_label)
2649 {
2650 t = build1 (LABEL_EXPR, void_type_node, local_label);
2651 append_to_statement_list (t, &expr);
2652 }
2653
2654 return expr;
2655}
2656
726a989a
RB
2657/* Given a conditional expression EXPR with short-circuit boolean
2658 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
073a8998 2659 predicate apart into the equivalent sequence of conditionals. */
726a989a 2660
6de9cd9a
DN
2661static tree
2662shortcut_cond_expr (tree expr)
2663{
2664 tree pred = TREE_OPERAND (expr, 0);
2665 tree then_ = TREE_OPERAND (expr, 1);
2666 tree else_ = TREE_OPERAND (expr, 2);
2667 tree true_label, false_label, end_label, t;
2668 tree *true_label_p;
2669 tree *false_label_p;
089efaa4 2670 bool emit_end, emit_false, jump_over_else;
65355d53
RH
2671 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2672 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
2673
2674 /* First do simple transformations. */
65355d53 2675 if (!else_se)
6de9cd9a 2676 {
ca80e52b
EB
2677 /* If there is no 'else', turn
2678 if (a && b) then c
2679 into
2680 if (a) if (b) then c. */
6de9cd9a
DN
2681 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2682 {
ca80e52b 2683 /* Keep the original source location on the first 'if'. */
8400e75e 2684 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 2685 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2686 /* Set the source location of the && on the second 'if'. */
2687 if (EXPR_HAS_LOCATION (pred))
2688 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2689 then_ = shortcut_cond_expr (expr);
4356a1bf 2690 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 2691 pred = TREE_OPERAND (pred, 0);
b4257cfc 2692 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 2693 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2694 }
2695 }
726a989a 2696
65355d53 2697 if (!then_se)
6de9cd9a
DN
2698 {
2699 /* If there is no 'then', turn
2700 if (a || b); else d
2701 into
2702 if (a); else if (b); else d. */
2703 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2704 {
ca80e52b 2705 /* Keep the original source location on the first 'if'. */
8400e75e 2706 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 2707 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2708 /* Set the source location of the || on the second 'if'. */
2709 if (EXPR_HAS_LOCATION (pred))
2710 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2711 else_ = shortcut_cond_expr (expr);
4356a1bf 2712 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 2713 pred = TREE_OPERAND (pred, 0);
b4257cfc 2714 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 2715 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2716 }
2717 }
2718
2719 /* If we're done, great. */
2720 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2721 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2722 return expr;
2723
2724 /* Otherwise we need to mess with gotos. Change
2725 if (a) c; else d;
2726 to
2727 if (a); else goto no;
2728 c; goto end;
2729 no: d; end:
2730 and recursively gimplify the condition. */
2731
2732 true_label = false_label = end_label = NULL_TREE;
2733
2734 /* If our arms just jump somewhere, hijack those labels so we don't
2735 generate jumps to jumps. */
2736
65355d53
RH
2737 if (then_
2738 && TREE_CODE (then_) == GOTO_EXPR
6de9cd9a
DN
2739 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2740 {
2741 true_label = GOTO_DESTINATION (then_);
65355d53
RH
2742 then_ = NULL;
2743 then_se = false;
6de9cd9a
DN
2744 }
2745
65355d53
RH
2746 if (else_
2747 && TREE_CODE (else_) == GOTO_EXPR
6de9cd9a
DN
2748 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2749 {
2750 false_label = GOTO_DESTINATION (else_);
65355d53
RH
2751 else_ = NULL;
2752 else_se = false;
6de9cd9a
DN
2753 }
2754
9cf737f8 2755 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
2756 if (true_label)
2757 true_label_p = &true_label;
2758 else
2759 true_label_p = NULL;
2760
2761 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 2762 if (false_label || else_se)
6de9cd9a
DN
2763 false_label_p = &false_label;
2764 else
2765 false_label_p = NULL;
2766
2767 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 2768 if (!then_se && !else_se)
ca80e52b 2769 return shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 2770 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
2771
2772 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 2773 if (else_se)
ca80e52b 2774 t = expr_last (else_);
65355d53 2775 else if (then_se)
ca80e52b 2776 t = expr_last (then_);
65355d53 2777 else
ca80e52b
EB
2778 t = NULL;
2779 if (t && TREE_CODE (t) == LABEL_EXPR)
2780 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
2781
2782 /* If we don't care about jumping to the 'else' branch, jump to the end
2783 if the condition is false. */
2784 if (!false_label_p)
2785 false_label_p = &end_label;
2786
2787 /* We only want to emit these labels if we aren't hijacking them. */
2788 emit_end = (end_label == NULL_TREE);
2789 emit_false = (false_label == NULL_TREE);
2790
089efaa4
ILT
2791 /* We only emit the jump over the else clause if we have to--if the
2792 then clause may fall through. Otherwise we can wind up with a
2793 useless jump and a useless label at the end of gimplified code,
2794 which will cause us to think that this conditional as a whole
2795 falls through even if it doesn't. If we then inline a function
2796 which ends with such a condition, that can cause us to issue an
2797 inappropriate warning about control reaching the end of a
2798 non-void function. */
2799 jump_over_else = block_may_fallthru (then_);
2800
ca80e52b 2801 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 2802 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
2803
2804 expr = NULL;
2805 append_to_statement_list (pred, &expr);
2806
2807 append_to_statement_list (then_, &expr);
65355d53 2808 if (else_se)
6de9cd9a 2809 {
089efaa4
ILT
2810 if (jump_over_else)
2811 {
ca80e52b 2812 tree last = expr_last (expr);
089efaa4 2813 t = build_and_jump (&end_label);
ca80e52b
EB
2814 if (EXPR_HAS_LOCATION (last))
2815 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
089efaa4
ILT
2816 append_to_statement_list (t, &expr);
2817 }
6de9cd9a
DN
2818 if (emit_false)
2819 {
2820 t = build1 (LABEL_EXPR, void_type_node, false_label);
2821 append_to_statement_list (t, &expr);
2822 }
2823 append_to_statement_list (else_, &expr);
2824 }
2825 if (emit_end && end_label)
2826 {
2827 t = build1 (LABEL_EXPR, void_type_node, end_label);
2828 append_to_statement_list (t, &expr);
2829 }
2830
2831 return expr;
2832}
2833
2834/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2835
50674e96 2836tree
6de9cd9a
DN
2837gimple_boolify (tree expr)
2838{
2839 tree type = TREE_TYPE (expr);
db3927fb 2840 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 2841
554cf330
JJ
2842 if (TREE_CODE (expr) == NE_EXPR
2843 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2844 && integer_zerop (TREE_OPERAND (expr, 1)))
2845 {
2846 tree call = TREE_OPERAND (expr, 0);
2847 tree fn = get_callee_fndecl (call);
2848
d53c73e0
JJ
2849 /* For __builtin_expect ((long) (x), y) recurse into x as well
2850 if x is truth_value_p. */
554cf330
JJ
2851 if (fn
2852 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2853 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2854 && call_expr_nargs (call) == 2)
2855 {
2856 tree arg = CALL_EXPR_ARG (call, 0);
2857 if (arg)
2858 {
2859 if (TREE_CODE (arg) == NOP_EXPR
2860 && TREE_TYPE (arg) == TREE_TYPE (call))
2861 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
2862 if (truth_value_p (TREE_CODE (arg)))
2863 {
2864 arg = gimple_boolify (arg);
2865 CALL_EXPR_ARG (call, 0)
2866 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2867 }
554cf330
JJ
2868 }
2869 }
2870 }
2871
6de9cd9a
DN
2872 switch (TREE_CODE (expr))
2873 {
2874 case TRUTH_AND_EXPR:
2875 case TRUTH_OR_EXPR:
2876 case TRUTH_XOR_EXPR:
2877 case TRUTH_ANDIF_EXPR:
2878 case TRUTH_ORIF_EXPR:
2879 /* Also boolify the arguments of truth exprs. */
2880 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2881 /* FALLTHRU */
2882
2883 case TRUTH_NOT_EXPR:
2884 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
6de9cd9a 2885
6de9cd9a 2886 /* These expressions always produce boolean results. */
7f3ff782
KT
2887 if (TREE_CODE (type) != BOOLEAN_TYPE)
2888 TREE_TYPE (expr) = boolean_type_node;
6de9cd9a 2889 return expr;
d3147f64 2890
8170608b 2891 case ANNOTATE_EXPR:
718c4601 2892 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
8170608b 2893 {
718c4601
EB
2894 case annot_expr_ivdep_kind:
2895 case annot_expr_no_vector_kind:
2896 case annot_expr_vector_kind:
8170608b
TB
2897 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2898 if (TREE_CODE (type) != BOOLEAN_TYPE)
2899 TREE_TYPE (expr) = boolean_type_node;
2900 return expr;
718c4601
EB
2901 default:
2902 gcc_unreachable ();
8170608b 2903 }
8170608b 2904
6de9cd9a 2905 default:
7f3ff782
KT
2906 if (COMPARISON_CLASS_P (expr))
2907 {
2908 /* There expressions always prduce boolean results. */
2909 if (TREE_CODE (type) != BOOLEAN_TYPE)
2910 TREE_TYPE (expr) = boolean_type_node;
2911 return expr;
2912 }
6de9cd9a
DN
2913 /* Other expressions that get here must have boolean values, but
2914 might need to be converted to the appropriate mode. */
7f3ff782 2915 if (TREE_CODE (type) == BOOLEAN_TYPE)
1d15f620 2916 return expr;
db3927fb 2917 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
2918 }
2919}
2920
aea74440
JJ
2921/* Given a conditional expression *EXPR_P without side effects, gimplify
2922 its operands. New statements are inserted to PRE_P. */
2923
2924static enum gimplify_status
726a989a 2925gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
2926{
2927 tree expr = *expr_p, cond;
2928 enum gimplify_status ret, tret;
2929 enum tree_code code;
2930
2931 cond = gimple_boolify (COND_EXPR_COND (expr));
2932
2933 /* We need to handle && and || specially, as their gimplification
2934 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2935 code = TREE_CODE (cond);
2936 if (code == TRUTH_ANDIF_EXPR)
2937 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2938 else if (code == TRUTH_ORIF_EXPR)
2939 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 2940 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
2941 COND_EXPR_COND (*expr_p) = cond;
2942
2943 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2944 is_gimple_val, fb_rvalue);
2945 ret = MIN (ret, tret);
2946 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2947 is_gimple_val, fb_rvalue);
2948
2949 return MIN (ret, tret);
2950}
2951
ad19c4be 2952/* Return true if evaluating EXPR could trap.
aea74440
JJ
2953 EXPR is GENERIC, while tree_could_trap_p can be called
2954 only on GIMPLE. */
2955
2956static bool
2957generic_expr_could_trap_p (tree expr)
2958{
2959 unsigned i, n;
2960
2961 if (!expr || is_gimple_val (expr))
2962 return false;
2963
2964 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2965 return true;
2966
2967 n = TREE_OPERAND_LENGTH (expr);
2968 for (i = 0; i < n; i++)
2969 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2970 return true;
2971
2972 return false;
2973}
2974
206048bd 2975/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
2976 into
2977
2978 if (p) if (p)
2979 t1 = a; a;
2980 else or else
2981 t1 = b; b;
2982 t1;
2983
2984 The second form is used when *EXPR_P is of type void.
2985
2986 PRE_P points to the list where side effects that must happen before
dae7ec87 2987 *EXPR_P should be stored. */
6de9cd9a
DN
2988
2989static enum gimplify_status
726a989a 2990gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
2991{
2992 tree expr = *expr_p;
06ec59e6
EB
2993 tree type = TREE_TYPE (expr);
2994 location_t loc = EXPR_LOCATION (expr);
2995 tree tmp, arm1, arm2;
6de9cd9a 2996 enum gimplify_status ret;
726a989a
RB
2997 tree label_true, label_false, label_cont;
2998 bool have_then_clause_p, have_else_clause_p;
538dd0b7 2999 gcond *cond_stmt;
726a989a
RB
3000 enum tree_code pred_code;
3001 gimple_seq seq = NULL;
26d44ae2
RH
3002
3003 /* If this COND_EXPR has a value, copy the values into a temporary within
3004 the arms. */
06ec59e6 3005 if (!VOID_TYPE_P (type))
26d44ae2 3006 {
06ec59e6 3007 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
3008 tree result;
3009
06ec59e6
EB
3010 /* If either an rvalue is ok or we do not require an lvalue, create the
3011 temporary. But we cannot do that if the type is addressable. */
3012 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c3e203cf 3013 && !TREE_ADDRESSABLE (type))
aff98faf 3014 {
aea74440
JJ
3015 if (gimplify_ctxp->allow_rhs_cond_expr
3016 /* If either branch has side effects or could trap, it can't be
3017 evaluated unconditionally. */
06ec59e6
EB
3018 && !TREE_SIDE_EFFECTS (then_)
3019 && !generic_expr_could_trap_p (then_)
3020 && !TREE_SIDE_EFFECTS (else_)
3021 && !generic_expr_could_trap_p (else_))
aea74440
JJ
3022 return gimplify_pure_cond_expr (expr_p, pre_p);
3023
06ec59e6
EB
3024 tmp = create_tmp_var (type, "iftmp");
3025 result = tmp;
aff98faf 3026 }
06ec59e6
EB
3027
3028 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
3029 else
3030 {
06ec59e6 3031 type = build_pointer_type (type);
aff98faf 3032
06ec59e6
EB
3033 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3034 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 3035
06ec59e6
EB
3036 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3037 else_ = build_fold_addr_expr_loc (loc, else_);
3038
3039 expr
3040 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 3041
726a989a 3042 tmp = create_tmp_var (type, "iftmp");
70f34814 3043 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
3044 }
3045
06ec59e6
EB
3046 /* Build the new then clause, `tmp = then_;'. But don't build the
3047 assignment if the value is void; in C++ it can be if it's a throw. */
3048 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3049 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
26d44ae2 3050
06ec59e6
EB
3051 /* Similarly, build the new else clause, `tmp = else_;'. */
3052 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3053 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
26d44ae2
RH
3054
3055 TREE_TYPE (expr) = void_type_node;
3056 recalculate_side_effects (expr);
3057
d91ba7b0 3058 /* Move the COND_EXPR to the prequeue. */
726a989a 3059 gimplify_stmt (&expr, pre_p);
26d44ae2 3060
aff98faf 3061 *expr_p = result;
726a989a 3062 return GS_ALL_DONE;
26d44ae2
RH
3063 }
3064
f2f81d57
EB
3065 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3066 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3067 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3068 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3069
26d44ae2
RH
3070 /* Make sure the condition has BOOLEAN_TYPE. */
3071 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3072
3073 /* Break apart && and || conditions. */
3074 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3075 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3076 {
3077 expr = shortcut_cond_expr (expr);
3078
3079 if (expr != *expr_p)
3080 {
3081 *expr_p = expr;
3082
3083 /* We can't rely on gimplify_expr to re-gimplify the expanded
3084 form properly, as cleanups might cause the target labels to be
3085 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3086 set up a conditional context. */
3087 gimple_push_condition ();
726a989a 3088 gimplify_stmt (expr_p, &seq);
26d44ae2 3089 gimple_pop_condition (pre_p);
726a989a 3090 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
3091
3092 return GS_ALL_DONE;
3093 }
3094 }
3095
3096 /* Now do the normal gimplification. */
26d44ae2 3097
726a989a
RB
3098 /* Gimplify condition. */
3099 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3100 fb_rvalue);
26d44ae2 3101 if (ret == GS_ERROR)
726a989a
RB
3102 return GS_ERROR;
3103 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3104
3105 gimple_push_condition ();
26d44ae2 3106
726a989a
RB
3107 have_then_clause_p = have_else_clause_p = false;
3108 if (TREE_OPERAND (expr, 1) != NULL
3109 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3110 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3111 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3112 == current_function_decl)
3113 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3114 have different locations, otherwise we end up with incorrect
3115 location information on the branches. */
3116 && (optimize
3117 || !EXPR_HAS_LOCATION (expr)
3118 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3119 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3120 {
3121 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3122 have_then_clause_p = true;
26d44ae2
RH
3123 }
3124 else
c2255bc4 3125 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3126 if (TREE_OPERAND (expr, 2) != NULL
3127 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3128 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3129 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3130 == current_function_decl)
3131 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3132 have different locations, otherwise we end up with incorrect
3133 location information on the branches. */
3134 && (optimize
3135 || !EXPR_HAS_LOCATION (expr)
3136 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3137 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3138 {
3139 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3140 have_else_clause_p = true;
3141 }
3142 else
c2255bc4 3143 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 3144
726a989a
RB
3145 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3146 &arm2);
26d44ae2 3147
538dd0b7 3148 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
726a989a 3149 label_false);
26d44ae2 3150
538dd0b7 3151 gimplify_seq_add_stmt (&seq, cond_stmt);
726a989a
RB
3152 label_cont = NULL_TREE;
3153 if (!have_then_clause_p)
3154 {
3155 /* For if (...) {} else { code; } put label_true after
3156 the else block. */
3157 if (TREE_OPERAND (expr, 1) == NULL_TREE
3158 && !have_else_clause_p
3159 && TREE_OPERAND (expr, 2) != NULL_TREE)
3160 label_cont = label_true;
3161 else
3162 {
3163 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3164 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3165 /* For if (...) { code; } else {} or
3166 if (...) { code; } else goto label; or
3167 if (...) { code; return; } else { ... }
3168 label_cont isn't needed. */
3169 if (!have_else_clause_p
3170 && TREE_OPERAND (expr, 2) != NULL_TREE
3171 && gimple_seq_may_fallthru (seq))
3172 {
3173 gimple g;
c2255bc4 3174 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3175
3176 g = gimple_build_goto (label_cont);
3177
3178 /* GIMPLE_COND's are very low level; they have embedded
3179 gotos. This particular embedded goto should not be marked
3180 with the location of the original COND_EXPR, as it would
3181 correspond to the COND_EXPR's condition, not the ELSE or the
3182 THEN arms. To avoid marking it with the wrong location, flag
3183 it as "no location". */
3184 gimple_set_do_not_emit_location (g);
3185
3186 gimplify_seq_add_stmt (&seq, g);
3187 }
3188 }
3189 }
3190 if (!have_else_clause_p)
3191 {
3192 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3193 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3194 }
3195 if (label_cont)
3196 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3197
3198 gimple_pop_condition (pre_p);
3199 gimple_seq_add_seq (pre_p, seq);
3200
3201 if (ret == GS_ERROR)
3202 ; /* Do nothing. */
3203 else if (have_then_clause_p || have_else_clause_p)
3204 ret = GS_ALL_DONE;
3205 else
3206 {
3207 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3208 expr = TREE_OPERAND (expr, 0);
3209 gimplify_stmt (&expr, pre_p);
3210 }
3211
3212 *expr_p = NULL;
3213 return ret;
3214}
3215
f76d6e6f
EB
3216/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3217 to be marked addressable.
3218
3219 We cannot rely on such an expression being directly markable if a temporary
3220 has been created by the gimplification. In this case, we create another
3221 temporary and initialize it with a copy, which will become a store after we
3222 mark it addressable. This can happen if the front-end passed us something
3223 that it could not mark addressable yet, like a Fortran pass-by-reference
3224 parameter (int) floatvar. */
3225
3226static void
3227prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3228{
3229 while (handled_component_p (*expr_p))
3230 expr_p = &TREE_OPERAND (*expr_p, 0);
3231 if (is_gimple_reg (*expr_p))
947ca6a0
RB
3232 {
3233 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3234 DECL_GIMPLE_REG_P (var) = 0;
3235 *expr_p = var;
3236 }
f76d6e6f
EB
3237}
3238
726a989a
RB
3239/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3240 a call to __builtin_memcpy. */
3241
3242static enum gimplify_status
3243gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3244 gimple_seq *seq_p)
26d44ae2 3245{
5039610b 3246 tree t, to, to_ptr, from, from_ptr;
538dd0b7 3247 gcall *gs;
db3927fb 3248 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3249
726a989a
RB
3250 to = TREE_OPERAND (*expr_p, 0);
3251 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 3252
f76d6e6f
EB
3253 /* Mark the RHS addressable. Beware that it may not be possible to do so
3254 directly if a temporary has been created by the gimplification. */
3255 prepare_gimple_addressable (&from, seq_p);
3256
628c189e 3257 mark_addressable (from);
db3927fb
AH
3258 from_ptr = build_fold_addr_expr_loc (loc, from);
3259 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 3260
628c189e 3261 mark_addressable (to);
db3927fb
AH
3262 to_ptr = build_fold_addr_expr_loc (loc, to);
3263 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 3264
e79983f4 3265 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
726a989a
RB
3266
3267 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
3268
3269 if (want_value)
3270 {
726a989a 3271 /* tmp = memcpy() */
b731b390 3272 t = create_tmp_var (TREE_TYPE (to_ptr));
726a989a
RB
3273 gimple_call_set_lhs (gs, t);
3274 gimplify_seq_add_stmt (seq_p, gs);
3275
70f34814 3276 *expr_p = build_simple_mem_ref (t);
726a989a 3277 return GS_ALL_DONE;
26d44ae2
RH
3278 }
3279
726a989a
RB
3280 gimplify_seq_add_stmt (seq_p, gs);
3281 *expr_p = NULL;
3282 return GS_ALL_DONE;
26d44ae2
RH
3283}
3284
3285/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3286 a call to __builtin_memset. In this case we know that the RHS is
3287 a CONSTRUCTOR with an empty element list. */
3288
3289static enum gimplify_status
726a989a
RB
3290gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3291 gimple_seq *seq_p)
26d44ae2 3292{
1a13360e 3293 tree t, from, to, to_ptr;
538dd0b7 3294 gcall *gs;
db3927fb 3295 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3296
1a13360e
OH
3297 /* Assert our assumptions, to abort instead of producing wrong code
3298 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3299 not be immediately exposed. */
b8698a0f 3300 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
3301 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3302 from = TREE_OPERAND (from, 0);
3303
3304 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
9771b263 3305 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
1a13360e
OH
3306
3307 /* Now proceed. */
726a989a 3308 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 3309
db3927fb
AH
3310 to_ptr = build_fold_addr_expr_loc (loc, to);
3311 gimplify_arg (&to_ptr, seq_p, loc);
e79983f4 3312 t = builtin_decl_implicit (BUILT_IN_MEMSET);
726a989a
RB
3313
3314 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
3315
3316 if (want_value)
3317 {
726a989a 3318 /* tmp = memset() */
b731b390 3319 t = create_tmp_var (TREE_TYPE (to_ptr));
726a989a
RB
3320 gimple_call_set_lhs (gs, t);
3321 gimplify_seq_add_stmt (seq_p, gs);
3322
3323 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3324 return GS_ALL_DONE;
26d44ae2
RH
3325 }
3326
726a989a
RB
3327 gimplify_seq_add_stmt (seq_p, gs);
3328 *expr_p = NULL;
3329 return GS_ALL_DONE;
26d44ae2
RH
3330}
3331
57d1dd87
RH
3332/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3333 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
ad19c4be 3334 assignment. Return non-null if we detect a potential overlap. */
57d1dd87
RH
3335
3336struct gimplify_init_ctor_preeval_data
3337{
3338 /* The base decl of the lhs object. May be NULL, in which case we
3339 have to assume the lhs is indirect. */
3340 tree lhs_base_decl;
3341
3342 /* The alias set of the lhs object. */
4862826d 3343 alias_set_type lhs_alias_set;
57d1dd87
RH
3344};
3345
3346static tree
3347gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3348{
3349 struct gimplify_init_ctor_preeval_data *data
3350 = (struct gimplify_init_ctor_preeval_data *) xdata;
3351 tree t = *tp;
3352
3353 /* If we find the base object, obviously we have overlap. */
3354 if (data->lhs_base_decl == t)
3355 return t;
3356
3357 /* If the constructor component is indirect, determine if we have a
3358 potential overlap with the lhs. The only bits of information we
3359 have to go on at this point are addressability and alias sets. */
70f34814
RG
3360 if ((INDIRECT_REF_P (t)
3361 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
3362 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3363 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3364 return t;
3365
df10ee2a 3366 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
3367 potential overlap with the lhs through an INDIRECT_REF like above.
3368 ??? Ugh - this is completely broken. In fact this whole analysis
3369 doesn't look conservative. */
df10ee2a
EB
3370 if (TREE_CODE (t) == CALL_EXPR)
3371 {
3372 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3373
3374 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3375 if (POINTER_TYPE_P (TREE_VALUE (type))
3376 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3377 && alias_sets_conflict_p (data->lhs_alias_set,
3378 get_alias_set
3379 (TREE_TYPE (TREE_VALUE (type)))))
3380 return t;
3381 }
3382
6615c446 3383 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
3384 *walk_subtrees = 0;
3385 return NULL;
3386}
3387
726a989a 3388/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
3389 force values that overlap with the lhs (as described by *DATA)
3390 into temporaries. */
3391
3392static void
726a989a 3393gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
3394 struct gimplify_init_ctor_preeval_data *data)
3395{
3396 enum gimplify_status one;
3397
51eed280
PB
3398 /* If the value is constant, then there's nothing to pre-evaluate. */
3399 if (TREE_CONSTANT (*expr_p))
3400 {
3401 /* Ensure it does not have side effects, it might contain a reference to
3402 the object we're initializing. */
3403 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3404 return;
3405 }
57d1dd87
RH
3406
3407 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3408 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3409 return;
3410
3411 /* Recurse for nested constructors. */
3412 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3413 {
4038c495
GB
3414 unsigned HOST_WIDE_INT ix;
3415 constructor_elt *ce;
9771b263 3416 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4038c495 3417
9771b263 3418 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4038c495 3419 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 3420
57d1dd87
RH
3421 return;
3422 }
3423
0461b801
EB
3424 /* If this is a variable sized type, we must remember the size. */
3425 maybe_with_size_expr (expr_p);
57d1dd87
RH
3426
3427 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 3428 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 3429 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
3430 gimplification now means that we won't have to deal with complicated
3431 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 3432 exponential search behavior. */
57d1dd87
RH
3433 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3434 if (one == GS_ERROR)
3435 {
3436 *expr_p = NULL;
3437 return;
3438 }
3439
3440 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3441 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3442 always be true for all scalars, since is_gimple_mem_rhs insists on a
3443 temporary variable for them. */
3444 if (DECL_P (*expr_p))
3445 return;
3446
3447 /* If this is of variable size, we have no choice but to assume it doesn't
3448 overlap since we can't make a temporary for it. */
4c923c28 3449 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
3450 return;
3451
3452 /* Otherwise, we must search for overlap ... */
3453 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3454 return;
3455
3456 /* ... and if found, force the value into a temporary. */
3457 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3458}
3459
6fa91b48
SB
3460/* A subroutine of gimplify_init_ctor_eval. Create a loop for
3461 a RANGE_EXPR in a CONSTRUCTOR for an array.
3462
3463 var = lower;
3464 loop_entry:
3465 object[var] = value;
3466 if (var == upper)
3467 goto loop_exit;
3468 var = var + 1;
3469 goto loop_entry;
3470 loop_exit:
3471
3472 We increment var _after_ the loop exit check because we might otherwise
3473 fail if upper == TYPE_MAX_VALUE (type for upper).
3474
3475 Note that we never have to deal with SAVE_EXPRs here, because this has
3476 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3477
9771b263 3478static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
726a989a 3479 gimple_seq *, bool);
6fa91b48
SB
3480
3481static void
3482gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3483 tree value, tree array_elt_type,
726a989a 3484 gimple_seq *pre_p, bool cleared)
6fa91b48 3485{
726a989a 3486 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 3487 tree var, var_type, cref, tmp;
6fa91b48 3488
c2255bc4
AH
3489 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3490 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3491 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
3492
3493 /* Create and initialize the index variable. */
3494 var_type = TREE_TYPE (upper);
b731b390 3495 var = create_tmp_var (var_type);
726a989a 3496 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
3497
3498 /* Add the loop entry label. */
726a989a 3499 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
3500
3501 /* Build the reference. */
3502 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3503 var, NULL_TREE, NULL_TREE);
3504
3505 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3506 the store. Otherwise just assign value to the reference. */
3507
3508 if (TREE_CODE (value) == CONSTRUCTOR)
3509 /* NB we might have to call ourself recursively through
3510 gimplify_init_ctor_eval if the value is a constructor. */
3511 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3512 pre_p, cleared);
3513 else
726a989a 3514 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
3515
3516 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
3517 gimplify_seq_add_stmt (pre_p,
3518 gimple_build_cond (EQ_EXPR, var, upper,
3519 loop_exit_label, fall_thru_label));
3520
3521 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
3522
3523 /* Otherwise, increment the index var... */
b56b9fe3
RS
3524 tmp = build2 (PLUS_EXPR, var_type, var,
3525 fold_convert (var_type, integer_one_node));
726a989a 3526 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
3527
3528 /* ...and jump back to the loop entry. */
726a989a 3529 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
3530
3531 /* Add the loop exit label. */
726a989a 3532 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
3533}
3534
292a398f 3535/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 3536
292a398f 3537static bool
22ea9ec0 3538zero_sized_field_decl (const_tree fdecl)
292a398f 3539{
b8698a0f 3540 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
3541 && integer_zerop (DECL_SIZE (fdecl)))
3542 return true;
3543 return false;
3544}
3545
d06526b7 3546/* Return true if TYPE is zero sized. */
b8698a0f 3547
d06526b7 3548static bool
22ea9ec0 3549zero_sized_type (const_tree type)
d06526b7
AP
3550{
3551 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3552 && integer_zerop (TYPE_SIZE (type)))
3553 return true;
3554 return false;
3555}
3556
57d1dd87
RH
3557/* A subroutine of gimplify_init_constructor. Generate individual
3558 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 3559 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
3560 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3561 zeroed first. */
3562
3563static void
9771b263 3564gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
726a989a 3565 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
3566{
3567 tree array_elt_type = NULL;
4038c495
GB
3568 unsigned HOST_WIDE_INT ix;
3569 tree purpose, value;
57d1dd87
RH
3570
3571 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3572 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3573
4038c495 3574 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 3575 {
726a989a 3576 tree cref;
57d1dd87
RH
3577
3578 /* NULL values are created above for gimplification errors. */
3579 if (value == NULL)
3580 continue;
3581
3582 if (cleared && initializer_zerop (value))
3583 continue;
3584
6fa91b48
SB
3585 /* ??? Here's to hoping the front end fills in all of the indices,
3586 so we don't have to figure out what's missing ourselves. */
3587 gcc_assert (purpose);
3588
816fa80a
OH
3589 /* Skip zero-sized fields, unless value has side-effects. This can
3590 happen with calls to functions returning a zero-sized type, which
3591 we shouldn't discard. As a number of downstream passes don't
3592 expect sets of zero-sized fields, we rely on the gimplification of
3593 the MODIFY_EXPR we make below to drop the assignment statement. */
3594 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
3595 continue;
3596
6fa91b48
SB
3597 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3598 whole range. */
3599 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 3600 {
6fa91b48
SB
3601 tree lower = TREE_OPERAND (purpose, 0);
3602 tree upper = TREE_OPERAND (purpose, 1);
3603
3604 /* If the lower bound is equal to upper, just treat it as if
3605 upper was the index. */
3606 if (simple_cst_equal (lower, upper))
3607 purpose = upper;
3608 else
3609 {
3610 gimplify_init_ctor_eval_range (object, lower, upper, value,
3611 array_elt_type, pre_p, cleared);
3612 continue;
3613 }
3614 }
57d1dd87 3615
6fa91b48
SB
3616 if (array_elt_type)
3617 {
1a1640db
RG
3618 /* Do not use bitsizetype for ARRAY_REF indices. */
3619 if (TYPE_DOMAIN (TREE_TYPE (object)))
ad19c4be
EB
3620 purpose
3621 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3622 purpose);
b4257cfc
RG
3623 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3624 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
3625 }
3626 else
cf0efa6a
ILT
3627 {
3628 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
3629 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3630 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 3631 }
57d1dd87 3632
cf0efa6a
ILT
3633 if (TREE_CODE (value) == CONSTRUCTOR
3634 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
3635 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3636 pre_p, cleared);
3637 else
3638 {
726a989a 3639 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 3640 gimplify_and_add (init, pre_p);
726a989a 3641 ggc_free (init);
57d1dd87
RH
3642 }
3643 }
3644}
3645
ad19c4be 3646/* Return the appropriate RHS predicate for this LHS. */
726a989a 3647
18f429e2 3648gimple_predicate
726a989a
RB
3649rhs_predicate_for (tree lhs)
3650{
ba4d8f9d
RG
3651 if (is_gimple_reg (lhs))
3652 return is_gimple_reg_rhs_or_call;
726a989a 3653 else
ba4d8f9d 3654 return is_gimple_mem_rhs_or_call;
726a989a
RB
3655}
3656
2ec5deb5
PB
3657/* Gimplify a C99 compound literal expression. This just means adding
3658 the DECL_EXPR before the current statement and using its anonymous
3659 decl instead. */
3660
3661static enum gimplify_status
a845a7f5 3662gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4c53d183 3663 bool (*gimple_test_f) (tree),
a845a7f5 3664 fallback_t fallback)
2ec5deb5
PB
3665{
3666 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3667 tree decl = DECL_EXPR_DECL (decl_s);
4c53d183 3668 tree init = DECL_INITIAL (decl);
2ec5deb5
PB
3669 /* Mark the decl as addressable if the compound literal
3670 expression is addressable now, otherwise it is marked too late
3671 after we gimplify the initialization expression. */
3672 if (TREE_ADDRESSABLE (*expr_p))
3673 TREE_ADDRESSABLE (decl) = 1;
4c53d183
MM
3674 /* Otherwise, if we don't need an lvalue and have a literal directly
3675 substitute it. Check if it matches the gimple predicate, as
3676 otherwise we'd generate a new temporary, and we can as well just
3677 use the decl we already have. */
3678 else if (!TREE_ADDRESSABLE (decl)
3679 && init
3680 && (fallback & fb_lvalue) == 0
3681 && gimple_test_f (init))
3682 {
3683 *expr_p = init;
3684 return GS_OK;
3685 }
2ec5deb5
PB
3686
3687 /* Preliminarily mark non-addressed complex variables as eligible
3688 for promotion to gimple registers. We'll transform their uses
3689 as we find them. */
3690 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3691 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3692 && !TREE_THIS_VOLATILE (decl)
3693 && !needs_to_live_in_memory (decl))
3694 DECL_GIMPLE_REG_P (decl) = 1;
3695
a845a7f5
ILT
3696 /* If the decl is not addressable, then it is being used in some
3697 expression or on the right hand side of a statement, and it can
3698 be put into a readonly data section. */
3699 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3700 TREE_READONLY (decl) = 1;
3701
2ec5deb5
PB
3702 /* This decl isn't mentioned in the enclosing block, so add it to the
3703 list of temps. FIXME it seems a bit of a kludge to say that
3704 anonymous artificial vars aren't pushed, but everything else is. */
3705 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3706 gimple_add_tmp_var (decl);
3707
3708 gimplify_and_add (decl_s, pre_p);
3709 *expr_p = decl;
3710 return GS_OK;
3711}
3712
3713/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3714 return a new CONSTRUCTOR if something changed. */
3715
3716static tree
3717optimize_compound_literals_in_ctor (tree orig_ctor)
3718{
3719 tree ctor = orig_ctor;
9771b263
DN
3720 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3721 unsigned int idx, num = vec_safe_length (elts);
2ec5deb5
PB
3722
3723 for (idx = 0; idx < num; idx++)
3724 {
9771b263 3725 tree value = (*elts)[idx].value;
2ec5deb5
PB
3726 tree newval = value;
3727 if (TREE_CODE (value) == CONSTRUCTOR)
3728 newval = optimize_compound_literals_in_ctor (value);
3729 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3730 {
3731 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3732 tree decl = DECL_EXPR_DECL (decl_s);
3733 tree init = DECL_INITIAL (decl);
3734
3735 if (!TREE_ADDRESSABLE (value)
3736 && !TREE_ADDRESSABLE (decl)
6f8f67e9
JJ
3737 && init
3738 && TREE_CODE (init) == CONSTRUCTOR)
2ec5deb5
PB
3739 newval = optimize_compound_literals_in_ctor (init);
3740 }
3741 if (newval == value)
3742 continue;
3743
3744 if (ctor == orig_ctor)
3745 {
3746 ctor = copy_node (orig_ctor);
9771b263 3747 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
2ec5deb5
PB
3748 elts = CONSTRUCTOR_ELTS (ctor);
3749 }
9771b263 3750 (*elts)[idx].value = newval;
2ec5deb5
PB
3751 }
3752 return ctor;
3753}
3754
26d44ae2
RH
3755/* A subroutine of gimplify_modify_expr. Break out elements of a
3756 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3757
3758 Note that we still need to clear any elements that don't have explicit
3759 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
3760 original MODIFY_EXPR, we just remove all of the constructor elements.
3761
3762 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3763 GS_ERROR if we would have to create a temporary when gimplifying
3764 this constructor. Otherwise, return GS_OK.
3765
3766 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
3767
3768static enum gimplify_status
726a989a
RB
3769gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3770 bool want_value, bool notify_temp_creation)
26d44ae2 3771{
f5a1f0d0 3772 tree object, ctor, type;
26d44ae2 3773 enum gimplify_status ret;
9771b263 3774 vec<constructor_elt, va_gc> *elts;
26d44ae2 3775
f5a1f0d0 3776 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 3777
ffed8a01
AH
3778 if (!notify_temp_creation)
3779 {
726a989a 3780 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
3781 is_gimple_lvalue, fb_lvalue);
3782 if (ret == GS_ERROR)
3783 return ret;
3784 }
57d1dd87 3785
726a989a 3786 object = TREE_OPERAND (*expr_p, 0);
f5a1f0d0
PB
3787 ctor = TREE_OPERAND (*expr_p, 1) =
3788 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3789 type = TREE_TYPE (ctor);
3790 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 3791 ret = GS_ALL_DONE;
726a989a 3792
26d44ae2
RH
3793 switch (TREE_CODE (type))
3794 {
3795 case RECORD_TYPE:
3796 case UNION_TYPE:
3797 case QUAL_UNION_TYPE:
3798 case ARRAY_TYPE:
3799 {
57d1dd87 3800 struct gimplify_init_ctor_preeval_data preeval_data;
953d0c90
RS
3801 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3802 bool cleared, complete_p, valid_const_initializer;
26d44ae2
RH
3803
3804 /* Aggregate types must lower constructors to initialization of
3805 individual elements. The exception is that a CONSTRUCTOR node
3806 with no elements indicates zero-initialization of the whole. */
9771b263 3807 if (vec_safe_is_empty (elts))
ffed8a01
AH
3808 {
3809 if (notify_temp_creation)
3810 return GS_OK;
3811 break;
3812 }
b8698a0f 3813
fe24d485
OH
3814 /* Fetch information about the constructor to direct later processing.
3815 We might want to make static versions of it in various cases, and
3816 can only do so if it known to be a valid constant initializer. */
3817 valid_const_initializer
3818 = categorize_ctor_elements (ctor, &num_nonzero_elements,
953d0c90 3819 &num_ctor_elements, &complete_p);
26d44ae2
RH
3820
3821 /* If a const aggregate variable is being initialized, then it
3822 should never be a lose to promote the variable to be static. */
fe24d485 3823 if (valid_const_initializer
6f642f98 3824 && num_nonzero_elements > 1
26d44ae2 3825 && TREE_READONLY (object)
d0ea0759
SE
3826 && TREE_CODE (object) == VAR_DECL
3827 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
26d44ae2 3828 {
ffed8a01
AH
3829 if (notify_temp_creation)
3830 return GS_ERROR;
26d44ae2
RH
3831 DECL_INITIAL (object) = ctor;
3832 TREE_STATIC (object) = 1;
3833 if (!DECL_NAME (object))
3834 DECL_NAME (object) = create_tmp_var_name ("C");
3835 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3836
3837 /* ??? C++ doesn't automatically append a .<number> to the
6bdf3519 3838 assembler name, and even when it does, it looks at FE private
26d44ae2
RH
3839 data structures to figure out what that number should be,
3840 which are not set for this variable. I suppose this is
3841 important for local statics for inline functions, which aren't
3842 "local" in the object file sense. So in order to get a unique
3843 TU-local symbol, we must invoke the lhd version now. */
3844 lhd_set_decl_assembler_name (object);
3845
3846 *expr_p = NULL_TREE;
3847 break;
3848 }
3849
cce70747
JC
3850 /* If there are "lots" of initialized elements, even discounting
3851 those that are not address constants (and thus *must* be
3852 computed at runtime), then partition the constructor into
3853 constant and non-constant parts. Block copy the constant
3854 parts in, then generate code for the non-constant parts. */
3855 /* TODO. There's code in cp/typeck.c to do this. */
3856
953d0c90
RS
3857 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3858 /* store_constructor will ignore the clearing of variable-sized
3859 objects. Initializers for such objects must explicitly set
3860 every field that needs to be set. */
3861 cleared = false;
d368135f 3862 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
953d0c90 3863 /* If the constructor isn't complete, clear the whole object
d368135f 3864 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
953d0c90
RS
3865
3866 ??? This ought not to be needed. For any element not present
3867 in the initializer, we should simply set them to zero. Except
3868 we'd need to *find* the elements that are not present, and that
3869 requires trickery to avoid quadratic compile-time behavior in
3870 large cases or excessive memory use in small cases. */
73ed17ff 3871 cleared = true;
953d0c90 3872 else if (num_ctor_elements - num_nonzero_elements
e04ad03d 3873 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
953d0c90
RS
3874 && num_nonzero_elements < num_ctor_elements / 4)
3875 /* If there are "lots" of zeros, it's more efficient to clear
3876 the memory and then set the nonzero elements. */
cce70747 3877 cleared = true;
953d0c90
RS
3878 else
3879 cleared = false;
cce70747 3880
26d44ae2
RH
3881 /* If there are "lots" of initialized elements, and all of them
3882 are valid address constants, then the entire initializer can
cce70747
JC
3883 be dropped to memory, and then memcpy'd out. Don't do this
3884 for sparse arrays, though, as it's more efficient to follow
3885 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
3886 individual element initialization. Also don't do this for small
3887 all-zero initializers (which aren't big enough to merit
3888 clearing), and don't try to make bitwise copies of
d5e254e1
IE
3889 TREE_ADDRESSABLE types.
3890
3891 We cannot apply such transformation when compiling chkp static
3892 initializer because creation of initializer image in the memory
3893 will require static initialization of bounds for it. It should
3894 result in another gimplification of similar initializer and we
3895 may fall into infinite loop. */
8afd015a
JM
3896 if (valid_const_initializer
3897 && !(cleared || num_nonzero_elements == 0)
d5e254e1
IE
3898 && !TREE_ADDRESSABLE (type)
3899 && (!current_function_decl
3900 || !lookup_attribute ("chkp ctor",
3901 DECL_ATTRIBUTES (current_function_decl))))
26d44ae2
RH
3902 {
3903 HOST_WIDE_INT size = int_size_in_bytes (type);
3904 unsigned int align;
3905
3906 /* ??? We can still get unbounded array types, at least
3907 from the C++ front end. This seems wrong, but attempt
3908 to work around it for now. */
3909 if (size < 0)
3910 {
3911 size = int_size_in_bytes (TREE_TYPE (object));
3912 if (size >= 0)
3913 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3914 }
3915
3916 /* Find the maximum alignment we can assume for the object. */
3917 /* ??? Make use of DECL_OFFSET_ALIGN. */
3918 if (DECL_P (object))
3919 align = DECL_ALIGN (object);
3920 else
3921 align = TYPE_ALIGN (type);
3922
f301837e
EB
3923 /* Do a block move either if the size is so small as to make
3924 each individual move a sub-unit move on average, or if it
3925 is so large as to make individual moves inefficient. */
329ad380
JJ
3926 if (size > 0
3927 && num_nonzero_elements > 1
f301837e
EB
3928 && (size < num_nonzero_elements
3929 || !can_move_by_pieces (size, align)))
26d44ae2 3930 {
ffed8a01
AH
3931 if (notify_temp_creation)
3932 return GS_ERROR;
3933
46314d3e
EB
3934 walk_tree (&ctor, force_labels_r, NULL, NULL);
3935 ctor = tree_output_constant_def (ctor);
3936 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3937 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3938 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
3939
3940 /* This is no longer an assignment of a CONSTRUCTOR, but
3941 we still may have processing to do on the LHS. So
3942 pretend we didn't do anything here to let that happen. */
3943 return GS_UNHANDLED;
26d44ae2
RH
3944 }
3945 }
3946
558af7ca
EB
3947 /* If the target is volatile, we have non-zero elements and more than
3948 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
3949 if (TREE_THIS_VOLATILE (object)
3950 && !TREE_ADDRESSABLE (type)
558af7ca 3951 && num_nonzero_elements > 0
9771b263 3952 && vec_safe_length (elts) > 1)
61c7cbf8 3953 {
b731b390 3954 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
61c7cbf8
RG
3955 TREE_OPERAND (*expr_p, 0) = temp;
3956 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3957 *expr_p,
3958 build2 (MODIFY_EXPR, void_type_node,
3959 object, temp));
3960 return GS_OK;
3961 }
3962
ffed8a01
AH
3963 if (notify_temp_creation)
3964 return GS_OK;
3965
675c873b
EB
3966 /* If there are nonzero elements and if needed, pre-evaluate to capture
3967 elements overlapping with the lhs into temporaries. We must do this
3968 before clearing to fetch the values before they are zeroed-out. */
3969 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
3970 {
3971 preeval_data.lhs_base_decl = get_base_address (object);
3972 if (!DECL_P (preeval_data.lhs_base_decl))
3973 preeval_data.lhs_base_decl = NULL;
3974 preeval_data.lhs_alias_set = get_alias_set (object);
3975
726a989a 3976 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
3977 pre_p, post_p, &preeval_data);
3978 }
3979
26d44ae2
RH
3980 if (cleared)
3981 {
3982 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3983 Note that we still have to gimplify, in order to handle the
57d1dd87 3984 case of variable sized types. Avoid shared tree structures. */
4038c495 3985 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 3986 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 3987 object = unshare_expr (object);
726a989a 3988 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
3989 }
3990
6fa91b48
SB
3991 /* If we have not block cleared the object, or if there are nonzero
3992 elements in the constructor, add assignments to the individual
3993 scalar fields of the object. */
3994 if (!cleared || num_nonzero_elements > 0)
85d89e76 3995 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
3996
3997 *expr_p = NULL_TREE;
3998 }
3999 break;
4000
4001 case COMPLEX_TYPE:
4002 {
4003 tree r, i;
4004
ffed8a01
AH
4005 if (notify_temp_creation)
4006 return GS_OK;
4007
26d44ae2 4008 /* Extract the real and imaginary parts out of the ctor. */
9771b263
DN
4009 gcc_assert (elts->length () == 2);
4010 r = (*elts)[0].value;
4011 i = (*elts)[1].value;
26d44ae2
RH
4012 if (r == NULL || i == NULL)
4013 {
e8160c9a 4014 tree zero = build_zero_cst (TREE_TYPE (type));
26d44ae2
RH
4015 if (r == NULL)
4016 r = zero;
4017 if (i == NULL)
4018 i = zero;
4019 }
4020
4021 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4022 represent creation of a complex value. */
4023 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4024 {
4025 ctor = build_complex (type, r, i);
4026 TREE_OPERAND (*expr_p, 1) = ctor;
4027 }
4028 else
4029 {
b4257cfc 4030 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 4031 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
4032 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4033 pre_p,
4034 post_p,
17ad5b5e
RH
4035 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4036 fb_rvalue);
26d44ae2
RH
4037 }
4038 }
4039 break;
506e2710 4040
26d44ae2 4041 case VECTOR_TYPE:
4038c495
GB
4042 {
4043 unsigned HOST_WIDE_INT ix;
4044 constructor_elt *ce;
e89be13b 4045
ffed8a01
AH
4046 if (notify_temp_creation)
4047 return GS_OK;
4048
4038c495
GB
4049 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4050 if (TREE_CONSTANT (ctor))
4051 {
4052 bool constant_p = true;
4053 tree value;
4054
4055 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
4056 elements, such as addresses or trapping values like
4057 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4058 in VECTOR_CST nodes. */
4038c495
GB
4059 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4060 if (!CONSTANT_CLASS_P (value))
4061 {
4062 constant_p = false;
4063 break;
4064 }
e89be13b 4065
4038c495
GB
4066 if (constant_p)
4067 {
4068 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4069 break;
4070 }
84816907 4071
9f1da821 4072 TREE_CONSTANT (ctor) = 0;
4038c495 4073 }
e89be13b 4074
4038c495
GB
4075 /* Vector types use CONSTRUCTOR all the way through gimple
4076 compilation as a general initializer. */
9771b263 4077 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038c495
GB
4078 {
4079 enum gimplify_status tret;
726a989a
RB
4080 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4081 fb_rvalue);
4038c495
GB
4082 if (tret == GS_ERROR)
4083 ret = GS_ERROR;
4084 }
726a989a
RB
4085 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4086 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 4087 }
26d44ae2 4088 break;
6de9cd9a 4089
26d44ae2
RH
4090 default:
4091 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 4092 gcc_unreachable ();
26d44ae2 4093 }
6de9cd9a 4094
26d44ae2
RH
4095 if (ret == GS_ERROR)
4096 return GS_ERROR;
4097 else if (want_value)
4098 {
26d44ae2
RH
4099 *expr_p = object;
4100 return GS_OK;
6de9cd9a 4101 }
26d44ae2 4102 else
726a989a
RB
4103 {
4104 /* If we have gimplified both sides of the initializer but have
4105 not emitted an assignment, do so now. */
4106 if (*expr_p)
4107 {
4108 tree lhs = TREE_OPERAND (*expr_p, 0);
4109 tree rhs = TREE_OPERAND (*expr_p, 1);
538dd0b7 4110 gassign *init = gimple_build_assign (lhs, rhs);
726a989a
RB
4111 gimplify_seq_add_stmt (pre_p, init);
4112 *expr_p = NULL;
4113 }
4114
4115 return GS_ALL_DONE;
4116 }
26d44ae2 4117}
6de9cd9a 4118
de4af523
JJ
4119/* Given a pointer value OP0, return a simplified version of an
4120 indirection through OP0, or NULL_TREE if no simplification is
4121 possible. This may only be applied to a rhs of an expression.
4122 Note that the resulting type may be different from the type pointed
4123 to in the sense that it is still compatible from the langhooks
4124 point of view. */
4125
4126static tree
4127gimple_fold_indirect_ref_rhs (tree t)
4128{
4129 return gimple_fold_indirect_ref (t);
4130}
4131
4caa08da
AH
4132/* Subroutine of gimplify_modify_expr to do simplifications of
4133 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4134 something changes. */
6de9cd9a 4135
26d44ae2 4136static enum gimplify_status
726a989a
RB
4137gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4138 gimple_seq *pre_p, gimple_seq *post_p,
4139 bool want_value)
26d44ae2 4140{
6d729f28
JM
4141 enum gimplify_status ret = GS_UNHANDLED;
4142 bool changed;
6de9cd9a 4143
6d729f28
JM
4144 do
4145 {
4146 changed = false;
4147 switch (TREE_CODE (*from_p))
4148 {
4149 case VAR_DECL:
4150 /* If we're assigning from a read-only variable initialized with
4151 a constructor, do the direct assignment from the constructor,
4152 but only if neither source nor target are volatile since this
4153 latter assignment might end up being done on a per-field basis. */
4154 if (DECL_INITIAL (*from_p)
4155 && TREE_READONLY (*from_p)
4156 && !TREE_THIS_VOLATILE (*from_p)
4157 && !TREE_THIS_VOLATILE (*to_p)
4158 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4159 {
4160 tree old_from = *from_p;
4161 enum gimplify_status subret;
4162
4163 /* Move the constructor into the RHS. */
4164 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4165
4166 /* Let's see if gimplify_init_constructor will need to put
4167 it in memory. */
4168 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4169 false, true);
4170 if (subret == GS_ERROR)
4171 {
4172 /* If so, revert the change. */
4173 *from_p = old_from;
4174 }
4175 else
4176 {
4177 ret = GS_OK;
4178 changed = true;
4179 }
4180 }
4181 break;
4182 case INDIRECT_REF:
4caa08da 4183 {
6d729f28 4184 /* If we have code like
ffed8a01 4185
6d729f28 4186 *(const A*)(A*)&x
ffed8a01 4187
6d729f28
JM
4188 where the type of "x" is a (possibly cv-qualified variant
4189 of "A"), treat the entire expression as identical to "x".
4190 This kind of code arises in C++ when an object is bound
4191 to a const reference, and if "x" is a TARGET_EXPR we want
4192 to take advantage of the optimization below. */
06baaba3 4193 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6d729f28
JM
4194 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4195 if (t)
ffed8a01 4196 {
06baaba3
RG
4197 if (TREE_THIS_VOLATILE (t) != volatile_p)
4198 {
4199 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4200 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4201 build_fold_addr_expr (t));
4202 if (REFERENCE_CLASS_P (t))
4203 TREE_THIS_VOLATILE (t) = volatile_p;
4204 }
6d729f28
JM
4205 *from_p = t;
4206 ret = GS_OK;
4207 changed = true;
ffed8a01 4208 }
6d729f28
JM
4209 break;
4210 }
4211
4212 case TARGET_EXPR:
4213 {
4214 /* If we are initializing something from a TARGET_EXPR, strip the
4215 TARGET_EXPR and initialize it directly, if possible. This can't
4216 be done if the initializer is void, since that implies that the
4217 temporary is set in some non-trivial way.
4218
4219 ??? What about code that pulls out the temp and uses it
4220 elsewhere? I think that such code never uses the TARGET_EXPR as
4221 an initializer. If I'm wrong, we'll die because the temp won't
4222 have any RTL. In that case, I guess we'll need to replace
4223 references somehow. */
4224 tree init = TARGET_EXPR_INITIAL (*from_p);
4225
4226 if (init
4227 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 4228 {
6d729f28 4229 *from_p = init;
ffed8a01 4230 ret = GS_OK;
6d729f28 4231 changed = true;
ffed8a01 4232 }
4caa08da 4233 }
6d729f28 4234 break;
f98625f6 4235
6d729f28
JM
4236 case COMPOUND_EXPR:
4237 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4238 caught. */
4239 gimplify_compound_expr (from_p, pre_p, true);
4240 ret = GS_OK;
4241 changed = true;
4242 break;
f98625f6 4243
6d729f28 4244 case CONSTRUCTOR:
ce3beba3
JM
4245 /* If we already made some changes, let the front end have a
4246 crack at this before we break it down. */
4247 if (ret != GS_UNHANDLED)
4248 break;
6d729f28
JM
4249 /* If we're initializing from a CONSTRUCTOR, break this into
4250 individual MODIFY_EXPRs. */
4251 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4252 false);
4253
4254 case COND_EXPR:
4255 /* If we're assigning to a non-register type, push the assignment
4256 down into the branches. This is mandatory for ADDRESSABLE types,
4257 since we cannot generate temporaries for such, but it saves a
4258 copy in other cases as well. */
4259 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 4260 {
6d729f28
JM
4261 /* This code should mirror the code in gimplify_cond_expr. */
4262 enum tree_code code = TREE_CODE (*expr_p);
4263 tree cond = *from_p;
4264 tree result = *to_p;
4265
4266 ret = gimplify_expr (&result, pre_p, post_p,
4267 is_gimple_lvalue, fb_lvalue);
4268 if (ret != GS_ERROR)
4269 ret = GS_OK;
4270
4271 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4272 TREE_OPERAND (cond, 1)
4273 = build2 (code, void_type_node, result,
4274 TREE_OPERAND (cond, 1));
4275 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4276 TREE_OPERAND (cond, 2)
4277 = build2 (code, void_type_node, unshare_expr (result),
4278 TREE_OPERAND (cond, 2));
4279
4280 TREE_TYPE (cond) = void_type_node;
4281 recalculate_side_effects (cond);
4282
4283 if (want_value)
4284 {
4285 gimplify_and_add (cond, pre_p);
4286 *expr_p = unshare_expr (result);
4287 }
4288 else
4289 *expr_p = cond;
4290 return ret;
f98625f6 4291 }
f98625f6 4292 break;
f98625f6 4293
6d729f28
JM
4294 case CALL_EXPR:
4295 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4296 return slot so that we don't generate a temporary. */
4297 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4298 && aggregate_value_p (*from_p, *from_p))
26d44ae2 4299 {
6d729f28
JM
4300 bool use_target;
4301
4302 if (!(rhs_predicate_for (*to_p))(*from_p))
4303 /* If we need a temporary, *to_p isn't accurate. */
4304 use_target = false;
ad19c4be 4305 /* It's OK to use the return slot directly unless it's an NRV. */
6d729f28
JM
4306 else if (TREE_CODE (*to_p) == RESULT_DECL
4307 && DECL_NAME (*to_p) == NULL_TREE
4308 && needs_to_live_in_memory (*to_p))
6d729f28
JM
4309 use_target = true;
4310 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4311 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4312 /* Don't force regs into memory. */
4313 use_target = false;
4314 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4315 /* It's OK to use the target directly if it's being
4316 initialized. */
4317 use_target = true;
aabb90e5
RG
4318 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4319 /* Always use the target and thus RSO for variable-sized types.
4320 GIMPLE cannot deal with a variable-sized assignment
4321 embedded in a call statement. */
4322 use_target = true;
345ae177
AH
4323 else if (TREE_CODE (*to_p) != SSA_NAME
4324 && (!is_gimple_variable (*to_p)
4325 || needs_to_live_in_memory (*to_p)))
6d729f28
JM
4326 /* Don't use the original target if it's already addressable;
4327 if its address escapes, and the called function uses the
4328 NRV optimization, a conforming program could see *to_p
4329 change before the called function returns; see c++/19317.
4330 When optimizing, the return_slot pass marks more functions
4331 as safe after we have escape info. */
4332 use_target = false;
4333 else
4334 use_target = true;
4335
4336 if (use_target)
4337 {
4338 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4339 mark_addressable (*to_p);
4340 }
26d44ae2 4341 }
6d729f28 4342 break;
6de9cd9a 4343
6d729f28
JM
4344 case WITH_SIZE_EXPR:
4345 /* Likewise for calls that return an aggregate of non-constant size,
4346 since we would not be able to generate a temporary at all. */
4347 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4348 {
4349 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
4350 /* We don't change ret in this case because the
4351 WITH_SIZE_EXPR might have been added in
4352 gimplify_modify_expr, so returning GS_OK would lead to an
4353 infinite loop. */
6d729f28
JM
4354 changed = true;
4355 }
4356 break;
6de9cd9a 4357
6d729f28
JM
4358 /* If we're initializing from a container, push the initialization
4359 inside it. */
4360 case CLEANUP_POINT_EXPR:
4361 case BIND_EXPR:
4362 case STATEMENT_LIST:
26d44ae2 4363 {
6d729f28
JM
4364 tree wrap = *from_p;
4365 tree t;
dae7ec87 4366
6d729f28
JM
4367 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4368 fb_lvalue);
dae7ec87
JM
4369 if (ret != GS_ERROR)
4370 ret = GS_OK;
4371
6d729f28
JM
4372 t = voidify_wrapper_expr (wrap, *expr_p);
4373 gcc_assert (t == *expr_p);
dae7ec87
JM
4374
4375 if (want_value)
4376 {
6d729f28
JM
4377 gimplify_and_add (wrap, pre_p);
4378 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
4379 }
4380 else
6d729f28
JM
4381 *expr_p = wrap;
4382 return GS_OK;
26d44ae2 4383 }
6de9cd9a 4384
6d729f28 4385 case COMPOUND_LITERAL_EXPR:
fa47911c 4386 {
6d729f28
JM
4387 tree complit = TREE_OPERAND (*expr_p, 1);
4388 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4389 tree decl = DECL_EXPR_DECL (decl_s);
4390 tree init = DECL_INITIAL (decl);
4391
4392 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4393 into struct T x = { 0, 1, 2 } if the address of the
4394 compound literal has never been taken. */
4395 if (!TREE_ADDRESSABLE (complit)
4396 && !TREE_ADDRESSABLE (decl)
4397 && init)
fa47911c 4398 {
6d729f28
JM
4399 *expr_p = copy_node (*expr_p);
4400 TREE_OPERAND (*expr_p, 1) = init;
4401 return GS_OK;
fa47911c
JM
4402 }
4403 }
4404
6d729f28
JM
4405 default:
4406 break;
2ec5deb5 4407 }
6d729f28
JM
4408 }
4409 while (changed);
6de9cd9a 4410
6de9cd9a
DN
4411 return ret;
4412}
4413
216820a4
RG
4414
4415/* Return true if T looks like a valid GIMPLE statement. */
4416
4417static bool
4418is_gimple_stmt (tree t)
4419{
4420 const enum tree_code code = TREE_CODE (t);
4421
4422 switch (code)
4423 {
4424 case NOP_EXPR:
4425 /* The only valid NOP_EXPR is the empty statement. */
4426 return IS_EMPTY_STMT (t);
4427
4428 case BIND_EXPR:
4429 case COND_EXPR:
4430 /* These are only valid if they're void. */
4431 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4432
4433 case SWITCH_EXPR:
4434 case GOTO_EXPR:
4435 case RETURN_EXPR:
4436 case LABEL_EXPR:
4437 case CASE_LABEL_EXPR:
4438 case TRY_CATCH_EXPR:
4439 case TRY_FINALLY_EXPR:
4440 case EH_FILTER_EXPR:
4441 case CATCH_EXPR:
4442 case ASM_EXPR:
4443 case STATEMENT_LIST:
4444 case OMP_PARALLEL:
4445 case OMP_FOR:
74bf76ed 4446 case OMP_SIMD:
c02065fc 4447 case CILK_SIMD:
acf0174b 4448 case OMP_DISTRIBUTE:
216820a4
RG
4449 case OMP_SECTIONS:
4450 case OMP_SECTION:
4451 case OMP_SINGLE:
4452 case OMP_MASTER:
acf0174b 4453 case OMP_TASKGROUP:
216820a4
RG
4454 case OMP_ORDERED:
4455 case OMP_CRITICAL:
4456 case OMP_TASK:
4457 /* These are always void. */
4458 return true;
4459
4460 case CALL_EXPR:
4461 case MODIFY_EXPR:
4462 case PREDICT_EXPR:
4463 /* These are valid regardless of their type. */
4464 return true;
4465
4466 default:
4467 return false;
4468 }
4469}
4470
4471
d9c2d296
AP
4472/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4473 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
7b7e6ecd
EB
4474 DECL_GIMPLE_REG_P set.
4475
4476 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4477 other, unmodified part of the complex object just before the total store.
4478 As a consequence, if the object is still uninitialized, an undefined value
4479 will be loaded into a register, which may result in a spurious exception
4480 if the register is floating-point and the value happens to be a signaling
4481 NaN for example. Then the fully-fledged complex operations lowering pass
4482 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
4483
4484static enum gimplify_status
726a989a
RB
4485gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4486 bool want_value)
d9c2d296
AP
4487{
4488 enum tree_code code, ocode;
4489 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4490
726a989a
RB
4491 lhs = TREE_OPERAND (*expr_p, 0);
4492 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
4493 code = TREE_CODE (lhs);
4494 lhs = TREE_OPERAND (lhs, 0);
4495
4496 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4497 other = build1 (ocode, TREE_TYPE (rhs), lhs);
8d2b0410 4498 TREE_NO_WARNING (other) = 1;
d9c2d296
AP
4499 other = get_formal_tmp_var (other, pre_p);
4500
4501 realpart = code == REALPART_EXPR ? rhs : other;
4502 imagpart = code == REALPART_EXPR ? other : rhs;
4503
4504 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4505 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4506 else
4507 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4508
726a989a
RB
4509 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4510 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
4511
4512 return GS_ALL_DONE;
4513}
4514
206048bd 4515/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
4516
4517 modify_expr
4518 : varname '=' rhs
4519 | '*' ID '=' rhs
4520
4521 PRE_P points to the list where side effects that must happen before
4522 *EXPR_P should be stored.
4523
4524 POST_P points to the list where side effects that must happen after
4525 *EXPR_P should be stored.
4526
4527 WANT_VALUE is nonzero iff we want to use the value of this expression
4528 in another expression. */
4529
4530static enum gimplify_status
726a989a
RB
4531gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4532 bool want_value)
6de9cd9a 4533{
726a989a
RB
4534 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4535 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 4536 enum gimplify_status ret = GS_UNHANDLED;
726a989a 4537 gimple assign;
db3927fb 4538 location_t loc = EXPR_LOCATION (*expr_p);
6da8be89 4539 gimple_stmt_iterator gsi;
6de9cd9a 4540
282899df
NS
4541 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4542 || TREE_CODE (*expr_p) == INIT_EXPR);
6de9cd9a 4543
d0ad58f9
JM
4544 /* Trying to simplify a clobber using normal logic doesn't work,
4545 so handle it here. */
4546 if (TREE_CLOBBER_P (*from_p))
4547 {
5d751b0c
JJ
4548 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4549 if (ret == GS_ERROR)
4550 return ret;
4551 gcc_assert (!want_value
4552 && (TREE_CODE (*to_p) == VAR_DECL
4553 || TREE_CODE (*to_p) == MEM_REF));
d0ad58f9
JM
4554 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4555 *expr_p = NULL;
4556 return GS_ALL_DONE;
4557 }
4558
1b24a790
RG
4559 /* Insert pointer conversions required by the middle-end that are not
4560 required by the frontend. This fixes middle-end type checking for
4561 for example gcc.dg/redecl-6.c. */
daad0278 4562 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
4563 {
4564 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4565 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 4566 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
4567 }
4568
83d7e8f0
JM
4569 /* See if any simplifications can be done based on what the RHS is. */
4570 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4571 want_value);
4572 if (ret != GS_UNHANDLED)
4573 return ret;
4574
4575 /* For zero sized types only gimplify the left hand side and right hand
4576 side as statements and throw away the assignment. Do this after
4577 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4578 types properly. */
753b34d7 4579 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
412f8986 4580 {
726a989a
RB
4581 gimplify_stmt (from_p, pre_p);
4582 gimplify_stmt (to_p, pre_p);
412f8986
AP
4583 *expr_p = NULL_TREE;
4584 return GS_ALL_DONE;
4585 }
6de9cd9a 4586
d25cee4d
RH
4587 /* If the value being copied is of variable width, compute the length
4588 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4589 before gimplifying any of the operands so that we can resolve any
4590 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4591 the size of the expression to be copied, not of the destination, so
726a989a 4592 that is what we must do here. */
d25cee4d 4593 maybe_with_size_expr (from_p);
6de9cd9a 4594
44de5aeb
RK
4595 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4596 if (ret == GS_ERROR)
4597 return ret;
6de9cd9a 4598
726a989a
RB
4599 /* As a special case, we have to temporarily allow for assignments
4600 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4601 a toplevel statement, when gimplifying the GENERIC expression
4602 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4603 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4604
4605 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4606 prevent gimplify_expr from trying to create a new temporary for
4607 foo's LHS, we tell it that it should only gimplify until it
4608 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4609 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4610 and all we need to do here is set 'a' to be its LHS. */
4611 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4612 fb_rvalue);
6de9cd9a
DN
4613 if (ret == GS_ERROR)
4614 return ret;
4615
44de5aeb
RK
4616 /* Now see if the above changed *from_p to something we handle specially. */
4617 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4618 want_value);
6de9cd9a
DN
4619 if (ret != GS_UNHANDLED)
4620 return ret;
4621
d25cee4d
RH
4622 /* If we've got a variable sized assignment between two lvalues (i.e. does
4623 not involve a call), then we can make things a bit more straightforward
4624 by converting the assignment to memcpy or memset. */
4625 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4626 {
4627 tree from = TREE_OPERAND (*from_p, 0);
4628 tree size = TREE_OPERAND (*from_p, 1);
4629
4630 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
4631 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4632
e847cc68 4633 if (is_gimple_addressable (from))
d25cee4d
RH
4634 {
4635 *from_p = from;
726a989a
RB
4636 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4637 pre_p);
d25cee4d
RH
4638 }
4639 }
4640
e41d82f5
RH
4641 /* Transform partial stores to non-addressable complex variables into
4642 total stores. This allows us to use real instead of virtual operands
4643 for these variables, which improves optimization. */
4644 if ((TREE_CODE (*to_p) == REALPART_EXPR
4645 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4646 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4647 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4648
f173837a
EB
4649 /* Try to alleviate the effects of the gimplification creating artificial
4650 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4651 if (!gimplify_ctxp->into_ssa
f2896bc9 4652 && TREE_CODE (*from_p) == VAR_DECL
726a989a
RB
4653 && DECL_IGNORED_P (*from_p)
4654 && DECL_P (*to_p)
4655 && !DECL_IGNORED_P (*to_p))
f173837a
EB
4656 {
4657 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4658 DECL_NAME (*from_p)
4659 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
839b422f 4660 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
f173837a 4661 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
4662 }
4663
8f0fe813
NS
4664 if (want_value && TREE_THIS_VOLATILE (*to_p))
4665 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4666
726a989a
RB
4667 if (TREE_CODE (*from_p) == CALL_EXPR)
4668 {
4669 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4670 instead of a GIMPLE_ASSIGN. */
538dd0b7 4671 gcall *call_stmt;
1304953e
JJ
4672 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4673 {
4674 /* Gimplify internal functions created in the FEs. */
4675 int nargs = call_expr_nargs (*from_p), i;
4676 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4677 auto_vec<tree> vargs (nargs);
4678
4679 for (i = 0; i < nargs; i++)
4680 {
4681 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4682 EXPR_LOCATION (*from_p));
4683 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4684 }
538dd0b7
DM
4685 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4686 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
1304953e 4687 }
ed9c79e1
JJ
4688 else
4689 {
1304953e
JJ
4690 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4691 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4692 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4693 tree fndecl = get_callee_fndecl (*from_p);
4694 if (fndecl
4695 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4696 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4697 && call_expr_nargs (*from_p) == 3)
538dd0b7
DM
4698 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4699 CALL_EXPR_ARG (*from_p, 0),
4700 CALL_EXPR_ARG (*from_p, 1),
4701 CALL_EXPR_ARG (*from_p, 2));
1304953e
JJ
4702 else
4703 {
538dd0b7
DM
4704 call_stmt = gimple_build_call_from_tree (*from_p);
4705 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
1304953e 4706 }
ed9c79e1 4707 }
538dd0b7
DM
4708 notice_special_calls (call_stmt);
4709 if (!gimple_call_noreturn_p (call_stmt))
4710 gimple_call_set_lhs (call_stmt, *to_p);
4711 assign = call_stmt;
f173837a 4712 }
726a989a 4713 else
c2255bc4
AH
4714 {
4715 assign = gimple_build_assign (*to_p, *from_p);
4716 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4717 }
f173837a 4718
726a989a 4719 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 4720 {
2ad728d2
RG
4721 /* We should have got an SSA name from the start. */
4722 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
726a989a 4723 }
07beea0d 4724
6da8be89
MM
4725 gimplify_seq_add_stmt (pre_p, assign);
4726 gsi = gsi_last (*pre_p);
88ac13da 4727 maybe_fold_stmt (&gsi);
6da8be89 4728
726a989a
RB
4729 if (want_value)
4730 {
8f0fe813 4731 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 4732 return GS_OK;
6de9cd9a 4733 }
726a989a
RB
4734 else
4735 *expr_p = NULL;
6de9cd9a 4736
17ad5b5e 4737 return GS_ALL_DONE;
6de9cd9a
DN
4738}
4739
ad19c4be
EB
4740/* Gimplify a comparison between two variable-sized objects. Do this
4741 with a call to BUILT_IN_MEMCMP. */
44de5aeb
RK
4742
4743static enum gimplify_status
4744gimplify_variable_sized_compare (tree *expr_p)
4745{
692ad9aa 4746 location_t loc = EXPR_LOCATION (*expr_p);
44de5aeb
RK
4747 tree op0 = TREE_OPERAND (*expr_p, 0);
4748 tree op1 = TREE_OPERAND (*expr_p, 1);
692ad9aa 4749 tree t, arg, dest, src, expr;
5039610b
SL
4750
4751 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4752 arg = unshare_expr (arg);
4753 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
4754 src = build_fold_addr_expr_loc (loc, op1);
4755 dest = build_fold_addr_expr_loc (loc, op0);
e79983f4 4756 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
db3927fb 4757 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
692ad9aa
EB
4758
4759 expr
b4257cfc 4760 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
692ad9aa
EB
4761 SET_EXPR_LOCATION (expr, loc);
4762 *expr_p = expr;
44de5aeb
RK
4763
4764 return GS_OK;
4765}
4766
ad19c4be
EB
4767/* Gimplify a comparison between two aggregate objects of integral scalar
4768 mode as a comparison between the bitwise equivalent scalar values. */
61c25908
OH
4769
4770static enum gimplify_status
4771gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4772{
db3927fb 4773 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
4774 tree op0 = TREE_OPERAND (*expr_p, 0);
4775 tree op1 = TREE_OPERAND (*expr_p, 1);
4776
4777 tree type = TREE_TYPE (op0);
4778 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4779
db3927fb
AH
4780 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4781 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
4782
4783 *expr_p
db3927fb 4784 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
4785
4786 return GS_OK;
4787}
4788
ad19c4be
EB
4789/* Gimplify an expression sequence. This function gimplifies each
4790 expression and rewrites the original expression with the last
6de9cd9a
DN
4791 expression of the sequence in GIMPLE form.
4792
4793 PRE_P points to the list where the side effects for all the
4794 expressions in the sequence will be emitted.
d3147f64 4795
6de9cd9a 4796 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
4797
4798static enum gimplify_status
726a989a 4799gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
4800{
4801 tree t = *expr_p;
4802
4803 do
4804 {
4805 tree *sub_p = &TREE_OPERAND (t, 0);
4806
4807 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4808 gimplify_compound_expr (sub_p, pre_p, false);
4809 else
726a989a 4810 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
4811
4812 t = TREE_OPERAND (t, 1);
4813 }
4814 while (TREE_CODE (t) == COMPOUND_EXPR);
4815
4816 *expr_p = t;
4817 if (want_value)
4818 return GS_OK;
4819 else
4820 {
726a989a 4821 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
4822 return GS_ALL_DONE;
4823 }
4824}
4825
726a989a
RB
4826/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4827 gimplify. After gimplification, EXPR_P will point to a new temporary
4828 that holds the original value of the SAVE_EXPR node.
6de9cd9a 4829
726a989a 4830 PRE_P points to the list where side effects that must happen before
ad19c4be 4831 *EXPR_P should be stored. */
6de9cd9a
DN
4832
4833static enum gimplify_status
726a989a 4834gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4835{
4836 enum gimplify_status ret = GS_ALL_DONE;
4837 tree val;
4838
282899df 4839 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
4840 val = TREE_OPERAND (*expr_p, 0);
4841
7f5e6307
RH
4842 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4843 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 4844 {
7f5e6307
RH
4845 /* The operand may be a void-valued expression such as SAVE_EXPRs
4846 generated by the Java frontend for class initialization. It is
4847 being executed only for its side-effects. */
4848 if (TREE_TYPE (val) == void_type_node)
4849 {
4850 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4851 is_gimple_stmt, fb_none);
7f5e6307
RH
4852 val = NULL;
4853 }
4854 else
4855 val = get_initialized_tmp_var (val, pre_p, post_p);
4856
4857 TREE_OPERAND (*expr_p, 0) = val;
4858 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 4859 }
6de9cd9a 4860
7f5e6307
RH
4861 *expr_p = val;
4862
6de9cd9a
DN
4863 return ret;
4864}
4865
ad19c4be 4866/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
4867
4868 unary_expr
4869 : ...
4870 | '&' varname
4871 ...
4872
4873 PRE_P points to the list where side effects that must happen before
4874 *EXPR_P should be stored.
4875
4876 POST_P points to the list where side effects that must happen after
4877 *EXPR_P should be stored. */
4878
4879static enum gimplify_status
726a989a 4880gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4881{
4882 tree expr = *expr_p;
4883 tree op0 = TREE_OPERAND (expr, 0);
4884 enum gimplify_status ret;
db3927fb 4885 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
4886
4887 switch (TREE_CODE (op0))
4888 {
4889 case INDIRECT_REF:
67f23620 4890 do_indirect_ref:
6de9cd9a
DN
4891 /* Check if we are dealing with an expression of the form '&*ptr'.
4892 While the front end folds away '&*ptr' into 'ptr', these
4893 expressions may be generated internally by the compiler (e.g.,
4894 builtins like __builtin_va_end). */
67f23620
RH
4895 /* Caution: the silent array decomposition semantics we allow for
4896 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
4897 /* Gimplification of the ADDR_EXPR operand may drop
4898 cv-qualification conversions, so make sure we add them if
4899 needed. */
67f23620
RH
4900 {
4901 tree op00 = TREE_OPERAND (op0, 0);
4902 tree t_expr = TREE_TYPE (expr);
4903 tree t_op00 = TREE_TYPE (op00);
4904
f4088621 4905 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 4906 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
4907 *expr_p = op00;
4908 ret = GS_OK;
4909 }
6de9cd9a
DN
4910 break;
4911
44de5aeb
RK
4912 case VIEW_CONVERT_EXPR:
4913 /* Take the address of our operand and then convert it to the type of
af72267c
RK
4914 this ADDR_EXPR.
4915
4916 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4917 all clear. The impact of this transformation is even less clear. */
91804752
EB
4918
4919 /* If the operand is a useless conversion, look through it. Doing so
4920 guarantees that the ADDR_EXPR and its operand will remain of the
4921 same type. */
4922 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 4923 op0 = TREE_OPERAND (op0, 0);
91804752 4924
db3927fb
AH
4925 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4926 build_fold_addr_expr_loc (loc,
4927 TREE_OPERAND (op0, 0)));
44de5aeb 4928 ret = GS_OK;
6de9cd9a
DN
4929 break;
4930
4931 default:
4932 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
4933 the address of a call that returns a struct; see
4934 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4935 the implied temporary explicit. */
936d04b6 4936
f76d6e6f 4937 /* Make the operand addressable. */
6de9cd9a 4938 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 4939 is_gimple_addressable, fb_either);
8b17cc05
RG
4940 if (ret == GS_ERROR)
4941 break;
67f23620 4942
f76d6e6f
EB
4943 /* Then mark it. Beware that it may not be possible to do so directly
4944 if a temporary has been created by the gimplification. */
4945 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 4946
8b17cc05 4947 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 4948
8b17cc05
RG
4949 /* For various reasons, the gimplification of the expression
4950 may have made a new INDIRECT_REF. */
4951 if (TREE_CODE (op0) == INDIRECT_REF)
4952 goto do_indirect_ref;
4953
6b8b9e42
RG
4954 mark_addressable (TREE_OPERAND (expr, 0));
4955
4956 /* The FEs may end up building ADDR_EXPRs early on a decl with
4957 an incomplete type. Re-build ADDR_EXPRs in canonical form
4958 here. */
4959 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4960 *expr_p = build_fold_addr_expr (op0);
4961
8b17cc05 4962 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
4963 recompute_tree_invariant_for_addr_expr (*expr_p);
4964
4965 /* If we re-built the ADDR_EXPR add a conversion to the original type
4966 if required. */
4967 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4968 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 4969
6de9cd9a
DN
4970 break;
4971 }
4972
6de9cd9a
DN
4973 return ret;
4974}
4975
4976/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4977 value; output operands should be a gimple lvalue. */
4978
4979static enum gimplify_status
726a989a 4980gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 4981{
726a989a
RB
4982 tree expr;
4983 int noutputs;
4984 const char **oconstraints;
6de9cd9a
DN
4985 int i;
4986 tree link;
4987 const char *constraint;
4988 bool allows_mem, allows_reg, is_inout;
4989 enum gimplify_status ret, tret;
538dd0b7 4990 gasm *stmt;
9771b263
DN
4991 vec<tree, va_gc> *inputs;
4992 vec<tree, va_gc> *outputs;
4993 vec<tree, va_gc> *clobbers;
4994 vec<tree, va_gc> *labels;
726a989a 4995 tree link_next;
b8698a0f 4996
726a989a
RB
4997 expr = *expr_p;
4998 noutputs = list_length (ASM_OUTPUTS (expr));
4999 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5000
9771b263
DN
5001 inputs = NULL;
5002 outputs = NULL;
5003 clobbers = NULL;
5004 labels = NULL;
6de9cd9a 5005
6de9cd9a 5006 ret = GS_ALL_DONE;
726a989a
RB
5007 link_next = NULL_TREE;
5008 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5009 {
2c68ba8e 5010 bool ok;
726a989a
RB
5011 size_t constraint_len;
5012
5013 link_next = TREE_CHAIN (link);
5014
5015 oconstraints[i]
5016 = constraint
6de9cd9a 5017 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
5018 constraint_len = strlen (constraint);
5019 if (constraint_len == 0)
5020 continue;
6de9cd9a 5021
2c68ba8e
LB
5022 ok = parse_output_constraint (&constraint, i, 0, 0,
5023 &allows_mem, &allows_reg, &is_inout);
5024 if (!ok)
5025 {
5026 ret = GS_ERROR;
5027 is_inout = false;
5028 }
6de9cd9a
DN
5029
5030 if (!allows_reg && allows_mem)
936d04b6 5031 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5032
5033 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5034 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5035 fb_lvalue | fb_mayfail);
5036 if (tret == GS_ERROR)
5037 {
5038 error ("invalid lvalue in asm output %d", i);
5039 ret = tret;
5040 }
5041
9771b263 5042 vec_safe_push (outputs, link);
726a989a
RB
5043 TREE_CHAIN (link) = NULL_TREE;
5044
6de9cd9a
DN
5045 if (is_inout)
5046 {
5047 /* An input/output operand. To give the optimizers more
5048 flexibility, split it into separate input and output
5049 operands. */
5050 tree input;
5051 char buf[10];
6de9cd9a
DN
5052
5053 /* Turn the in/out constraint into an output constraint. */
5054 char *p = xstrdup (constraint);
5055 p[0] = '=';
5056 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
5057
5058 /* And add a matching input constraint. */
5059 if (allows_reg)
5060 {
5061 sprintf (buf, "%d", i);
372d72d9
JJ
5062
5063 /* If there are multiple alternatives in the constraint,
5064 handle each of them individually. Those that allow register
5065 will be replaced with operand number, the others will stay
5066 unchanged. */
5067 if (strchr (p, ',') != NULL)
5068 {
5069 size_t len = 0, buflen = strlen (buf);
5070 char *beg, *end, *str, *dst;
5071
5072 for (beg = p + 1;;)
5073 {
5074 end = strchr (beg, ',');
5075 if (end == NULL)
5076 end = strchr (beg, '\0');
5077 if ((size_t) (end - beg) < buflen)
5078 len += buflen + 1;
5079 else
5080 len += end - beg + 1;
5081 if (*end)
5082 beg = end + 1;
5083 else
5084 break;
5085 }
5086
858904db 5087 str = (char *) alloca (len);
372d72d9
JJ
5088 for (beg = p + 1, dst = str;;)
5089 {
5090 const char *tem;
5091 bool mem_p, reg_p, inout_p;
5092
5093 end = strchr (beg, ',');
5094 if (end)
5095 *end = '\0';
5096 beg[-1] = '=';
5097 tem = beg - 1;
5098 parse_output_constraint (&tem, i, 0, 0,
5099 &mem_p, &reg_p, &inout_p);
5100 if (dst != str)
5101 *dst++ = ',';
5102 if (reg_p)
5103 {
5104 memcpy (dst, buf, buflen);
5105 dst += buflen;
5106 }
5107 else
5108 {
5109 if (end)
5110 len = end - beg;
5111 else
5112 len = strlen (beg);
5113 memcpy (dst, beg, len);
5114 dst += len;
5115 }
5116 if (end)
5117 beg = end + 1;
5118 else
5119 break;
5120 }
5121 *dst = '\0';
5122 input = build_string (dst - str, str);
5123 }
5124 else
5125 input = build_string (strlen (buf), buf);
6de9cd9a
DN
5126 }
5127 else
5128 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
5129
5130 free (p);
5131
6de9cd9a
DN
5132 input = build_tree_list (build_tree_list (NULL_TREE, input),
5133 unshare_expr (TREE_VALUE (link)));
5134 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5135 }
5136 }
5137
726a989a
RB
5138 link_next = NULL_TREE;
5139 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5140 {
726a989a
RB
5141 link_next = TREE_CHAIN (link);
5142 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
5143 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5144 oconstraints, &allows_mem, &allows_reg);
5145
f497c16c
JJ
5146 /* If we can't make copies, we can only accept memory. */
5147 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5148 {
5149 if (allows_mem)
5150 allows_reg = 0;
5151 else
5152 {
5153 error ("impossible constraint in %<asm%>");
5154 error ("non-memory input %d must stay in memory", i);
5155 return GS_ERROR;
5156 }
5157 }
5158
6de9cd9a
DN
5159 /* If the operand is a memory input, it should be an lvalue. */
5160 if (!allows_reg && allows_mem)
5161 {
502c5084
JJ
5162 tree inputv = TREE_VALUE (link);
5163 STRIP_NOPS (inputv);
5164 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5165 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5166 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5167 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5168 TREE_VALUE (link) = error_mark_node;
6de9cd9a
DN
5169 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5170 is_gimple_lvalue, fb_lvalue | fb_mayfail);
936d04b6 5171 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5172 if (tret == GS_ERROR)
5173 {
6a3799eb
AH
5174 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5175 input_location = EXPR_LOCATION (TREE_VALUE (link));
6de9cd9a
DN
5176 error ("memory input %d is not directly addressable", i);
5177 ret = tret;
5178 }
5179 }
5180 else
5181 {
5182 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 5183 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
5184 if (tret == GS_ERROR)
5185 ret = tret;
5186 }
726a989a
RB
5187
5188 TREE_CHAIN (link) = NULL_TREE;
9771b263 5189 vec_safe_push (inputs, link);
6de9cd9a 5190 }
b8698a0f 5191
ca081cc8
EB
5192 link_next = NULL_TREE;
5193 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5194 {
5195 link_next = TREE_CHAIN (link);
5196 TREE_CHAIN (link) = NULL_TREE;
5197 vec_safe_push (clobbers, link);
5198 }
1c384bf1 5199
ca081cc8
EB
5200 link_next = NULL_TREE;
5201 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5202 {
5203 link_next = TREE_CHAIN (link);
5204 TREE_CHAIN (link) = NULL_TREE;
5205 vec_safe_push (labels, link);
5206 }
726a989a 5207
a406865a
RG
5208 /* Do not add ASMs with errors to the gimple IL stream. */
5209 if (ret != GS_ERROR)
5210 {
5211 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 5212 inputs, outputs, clobbers, labels);
726a989a 5213
a406865a
RG
5214 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5215 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5216
5217 gimplify_seq_add_stmt (pre_p, stmt);
5218 }
6de9cd9a
DN
5219
5220 return ret;
5221}
5222
5223/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 5224 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
5225 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5226 return to this function.
5227
5228 FIXME should we complexify the prequeue handling instead? Or use flags
5229 for all the cleanups and let the optimizer tighten them up? The current
5230 code seems pretty fragile; it will break on a cleanup within any
5231 non-conditional nesting. But any such nesting would be broken, anyway;
5232 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5233 and continues out of it. We can do that at the RTL level, though, so
5234 having an optimizer to tighten up try/finally regions would be a Good
5235 Thing. */
5236
5237static enum gimplify_status
726a989a 5238gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 5239{
726a989a
RB
5240 gimple_stmt_iterator iter;
5241 gimple_seq body_sequence = NULL;
6de9cd9a 5242
325c3691 5243 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
5244
5245 /* We only care about the number of conditions between the innermost
df77f454
JM
5246 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5247 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 5248 int old_conds = gimplify_ctxp->conditions;
726a989a 5249 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
32be32af 5250 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6de9cd9a 5251 gimplify_ctxp->conditions = 0;
726a989a 5252 gimplify_ctxp->conditional_cleanups = NULL;
32be32af 5253 gimplify_ctxp->in_cleanup_point_expr = true;
6de9cd9a 5254
726a989a 5255 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
5256
5257 gimplify_ctxp->conditions = old_conds;
df77f454 5258 gimplify_ctxp->conditional_cleanups = old_cleanups;
32be32af 5259 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6de9cd9a 5260
726a989a 5261 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 5262 {
726a989a 5263 gimple wce = gsi_stmt (iter);
6de9cd9a 5264
726a989a 5265 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 5266 {
726a989a 5267 if (gsi_one_before_end_p (iter))
6de9cd9a 5268 {
726a989a
RB
5269 /* Note that gsi_insert_seq_before and gsi_remove do not
5270 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
5271 if (!gimple_wce_cleanup_eh_only (wce))
5272 gsi_insert_seq_before_without_update (&iter,
5273 gimple_wce_cleanup (wce),
5274 GSI_SAME_STMT);
726a989a 5275 gsi_remove (&iter, true);
6de9cd9a
DN
5276 break;
5277 }
5278 else
5279 {
538dd0b7 5280 gtry *gtry;
726a989a
RB
5281 gimple_seq seq;
5282 enum gimple_try_flags kind;
40aac948 5283
726a989a
RB
5284 if (gimple_wce_cleanup_eh_only (wce))
5285 kind = GIMPLE_TRY_CATCH;
40aac948 5286 else
726a989a
RB
5287 kind = GIMPLE_TRY_FINALLY;
5288 seq = gsi_split_seq_after (iter);
5289
82d6e6fc 5290 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
5291 /* Do not use gsi_replace here, as it may scan operands.
5292 We want to do a simple structural modification only. */
355a7673 5293 gsi_set_stmt (&iter, gtry);
daa6e488 5294 iter = gsi_start (gtry->eval);
6de9cd9a
DN
5295 }
5296 }
5297 else
726a989a 5298 gsi_next (&iter);
6de9cd9a
DN
5299 }
5300
726a989a 5301 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
5302 if (temp)
5303 {
5304 *expr_p = temp;
6de9cd9a
DN
5305 return GS_OK;
5306 }
5307 else
5308 {
726a989a 5309 *expr_p = NULL;
6de9cd9a
DN
5310 return GS_ALL_DONE;
5311 }
5312}
5313
5314/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a
RB
5315 is the cleanup action required. EH_ONLY is true if the cleanup should
5316 only be executed if an exception is thrown, not on normal exit. */
6de9cd9a
DN
5317
5318static void
726a989a 5319gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
6de9cd9a 5320{
726a989a
RB
5321 gimple wce;
5322 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
5323
5324 /* Errors can result in improperly nested cleanups. Which results in
726a989a 5325 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 5326 if (seen_error ())
6de9cd9a
DN
5327 return;
5328
5329 if (gimple_conditional_context ())
5330 {
5331 /* If we're in a conditional context, this is more complex. We only
5332 want to run the cleanup if we actually ran the initialization that
5333 necessitates it, but we want to run it after the end of the
5334 conditional context. So we wrap the try/finally around the
5335 condition and use a flag to determine whether or not to actually
5336 run the destructor. Thus
5337
5338 test ? f(A()) : 0
5339
5340 becomes (approximately)
5341
5342 flag = 0;
5343 try {
5344 if (test) { A::A(temp); flag = 1; val = f(temp); }
5345 else { val = 0; }
5346 } finally {
5347 if (flag) A::~A(temp);
5348 }
5349 val
5350 */
6de9cd9a 5351 tree flag = create_tmp_var (boolean_type_node, "cleanup");
538dd0b7
DM
5352 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5353 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
726a989a 5354
b4257cfc 5355 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
726a989a
RB
5356 gimplify_stmt (&cleanup, &cleanup_stmts);
5357 wce = gimple_build_wce (cleanup_stmts);
5358
5359 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5360 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5361 gimplify_seq_add_stmt (pre_p, ftrue);
6de9cd9a
DN
5362
5363 /* Because of this manipulation, and the EH edges that jump
5364 threading cannot redirect, the temporary (VAR) will appear
5365 to be used uninitialized. Don't warn. */
5366 TREE_NO_WARNING (var) = 1;
5367 }
5368 else
5369 {
726a989a
RB
5370 gimplify_stmt (&cleanup, &cleanup_stmts);
5371 wce = gimple_build_wce (cleanup_stmts);
5372 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5373 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 5374 }
6de9cd9a
DN
5375}
5376
5377/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5378
5379static enum gimplify_status
726a989a 5380gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5381{
5382 tree targ = *expr_p;
5383 tree temp = TARGET_EXPR_SLOT (targ);
5384 tree init = TARGET_EXPR_INITIAL (targ);
5385 enum gimplify_status ret;
5386
5387 if (init)
5388 {
d0ad58f9
JM
5389 tree cleanup = NULL_TREE;
5390
3a5b9284 5391 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea
JJ
5392 to the temps list. Handle also variable length TARGET_EXPRs. */
5393 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5394 {
5395 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5396 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5397 gimplify_vla_decl (temp, pre_p);
5398 }
5399 else
5400 gimple_add_tmp_var (temp);
6de9cd9a 5401
3a5b9284
RH
5402 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5403 expression is supposed to initialize the slot. */
5404 if (VOID_TYPE_P (TREE_TYPE (init)))
5405 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5406 else
325c3691 5407 {
726a989a
RB
5408 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5409 init = init_expr;
5410 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5411 init = NULL;
5412 ggc_free (init_expr);
325c3691 5413 }
3a5b9284 5414 if (ret == GS_ERROR)
abc67de1
SM
5415 {
5416 /* PR c++/28266 Make sure this is expanded only once. */
5417 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5418 return GS_ERROR;
5419 }
726a989a
RB
5420 if (init)
5421 gimplify_and_add (init, pre_p);
6de9cd9a
DN
5422
5423 /* If needed, push the cleanup for the temp. */
5424 if (TARGET_EXPR_CLEANUP (targ))
d0ad58f9
JM
5425 {
5426 if (CLEANUP_EH_ONLY (targ))
5427 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5428 CLEANUP_EH_ONLY (targ), pre_p);
5429 else
5430 cleanup = TARGET_EXPR_CLEANUP (targ);
5431 }
5432
5433 /* Add a clobber for the temporary going out of scope, like
5434 gimplify_bind_expr. */
32be32af 5435 if (gimplify_ctxp->in_cleanup_point_expr
87e2a8fd
XDL
5436 && needs_to_live_in_memory (temp)
5437 && flag_stack_reuse == SR_ALL)
d0ad58f9 5438 {
9771b263
DN
5439 tree clobber = build_constructor (TREE_TYPE (temp),
5440 NULL);
d0ad58f9
JM
5441 TREE_THIS_VOLATILE (clobber) = true;
5442 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5443 if (cleanup)
5444 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5445 clobber);
5446 else
5447 cleanup = clobber;
5448 }
5449
5450 if (cleanup)
5451 gimple_push_cleanup (temp, cleanup, false, pre_p);
6de9cd9a
DN
5452
5453 /* Only expand this once. */
5454 TREE_OPERAND (targ, 3) = init;
5455 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5456 }
282899df 5457 else
6de9cd9a 5458 /* We should have expanded this before. */
282899df 5459 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
5460
5461 *expr_p = temp;
5462 return GS_OK;
5463}
5464
5465/* Gimplification of expression trees. */
5466
726a989a
RB
5467/* Gimplify an expression which appears at statement context. The
5468 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5469 NULL, a new sequence is allocated.
6de9cd9a 5470
726a989a
RB
5471 Return true if we actually added a statement to the queue. */
5472
5473bool
5474gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 5475{
726a989a 5476 gimple_seq_node last;
6de9cd9a 5477
726a989a
RB
5478 last = gimple_seq_last (*seq_p);
5479 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5480 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
5481}
5482
953ff289
DN
5483/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5484 to CTX. If entries already exist, force them to be some flavor of private.
5485 If there is no enclosing parallel, do nothing. */
5486
5487void
5488omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5489{
5490 splay_tree_node n;
5491
5492 if (decl == NULL || !DECL_P (decl))
5493 return;
5494
5495 do
5496 {
5497 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5498 if (n != NULL)
5499 {
5500 if (n->value & GOVD_SHARED)
5501 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
acf0174b
JJ
5502 else if (n->value & GOVD_MAP)
5503 n->value |= GOVD_MAP_TO_ONLY;
953ff289
DN
5504 else
5505 return;
5506 }
acf0174b
JJ
5507 else if (ctx->region_type == ORT_TARGET)
5508 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
74bf76ed 5509 else if (ctx->region_type != ORT_WORKSHARE
acf0174b
JJ
5510 && ctx->region_type != ORT_SIMD
5511 && ctx->region_type != ORT_TARGET_DATA)
953ff289
DN
5512 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5513
5514 ctx = ctx->outer_context;
5515 }
5516 while (ctx);
5517}
5518
5519/* Similarly for each of the type sizes of TYPE. */
5520
5521static void
5522omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5523{
5524 if (type == NULL || type == error_mark_node)
5525 return;
5526 type = TYPE_MAIN_VARIANT (type);
5527
6e2830c3 5528 if (ctx->privatized_types->add (type))
953ff289
DN
5529 return;
5530
5531 switch (TREE_CODE (type))
5532 {
5533 case INTEGER_TYPE:
5534 case ENUMERAL_TYPE:
5535 case BOOLEAN_TYPE:
953ff289 5536 case REAL_TYPE:
325217ed 5537 case FIXED_POINT_TYPE:
953ff289
DN
5538 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5539 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5540 break;
5541
5542 case ARRAY_TYPE:
5543 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5544 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5545 break;
5546
5547 case RECORD_TYPE:
5548 case UNION_TYPE:
5549 case QUAL_UNION_TYPE:
5550 {
5551 tree field;
910ad8de 5552 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
5553 if (TREE_CODE (field) == FIELD_DECL)
5554 {
5555 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5556 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5557 }
5558 }
5559 break;
5560
5561 case POINTER_TYPE:
5562 case REFERENCE_TYPE:
5563 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5564 break;
5565
5566 default:
5567 break;
5568 }
5569
5570 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5571 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5572 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5573}
5574
5575/* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5576
5577static void
5578omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5579{
5580 splay_tree_node n;
5581 unsigned int nflags;
5582 tree t;
5583
b504a918 5584 if (error_operand_p (decl))
953ff289
DN
5585 return;
5586
5587 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5588 there are constructors involved somewhere. */
5589 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5590 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5591 flags |= GOVD_SEEN;
5592
5593 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
acf0174b 5594 if (n != NULL && n->value != GOVD_ALIGNED)
953ff289
DN
5595 {
5596 /* We shouldn't be re-adding the decl with the same data
5597 sharing class. */
5598 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5599 /* The only combination of data sharing classes we should see is
5600 FIRSTPRIVATE and LASTPRIVATE. */
5601 nflags = n->value | flags;
5602 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
74bf76ed
JJ
5603 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5604 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
953ff289
DN
5605 n->value = nflags;
5606 return;
5607 }
5608
5609 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 5610 of additional bits of data: the pointer replacement variable, and
953ff289 5611 the parameters of the type. */
4c923c28 5612 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
5613 {
5614 /* Add the pointer replacement variable as PRIVATE if the variable
5615 replacement is private, else FIRSTPRIVATE since we'll need the
5616 address of the original variable either for SHARED, or for the
5617 copy into or out of the context. */
5618 if (!(flags & GOVD_LOCAL))
5619 {
acf0174b
JJ
5620 nflags = flags & GOVD_MAP
5621 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5622 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
953ff289
DN
5623 nflags |= flags & GOVD_SEEN;
5624 t = DECL_VALUE_EXPR (decl);
5625 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5626 t = TREE_OPERAND (t, 0);
5627 gcc_assert (DECL_P (t));
5628 omp_add_variable (ctx, t, nflags);
5629 }
5630
5631 /* Add all of the variable and type parameters (which should have
5632 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5633 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5634 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5635 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5636
5637 /* The variable-sized variable itself is never SHARED, only some form
5638 of PRIVATE. The sharing would take place via the pointer variable
5639 which we remapped above. */
5640 if (flags & GOVD_SHARED)
5641 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5642 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5643
b8698a0f 5644 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
5645 alloca statement we generate for the variable, so make sure it
5646 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
5647 case, since we won't be allocating local storage then.
5648 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5649 in this case omp_notice_variable will be called later
5650 on when it is gimplified. */
acf0174b 5651 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
423ed416 5652 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
953ff289
DN
5653 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5654 }
acf0174b
JJ
5655 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5656 && lang_hooks.decls.omp_privatize_by_reference (decl))
953ff289 5657 {
953ff289
DN
5658 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5659
5660 /* Similar to the direct variable sized case above, we'll need the
5661 size of references being privatized. */
5662 if ((flags & GOVD_SHARED) == 0)
5663 {
5664 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4c923c28 5665 if (TREE_CODE (t) != INTEGER_CST)
953ff289
DN
5666 omp_notice_variable (ctx, t, true);
5667 }
5668 }
5669
74bf76ed
JJ
5670 if (n != NULL)
5671 n->value |= flags;
5672 else
5673 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
953ff289
DN
5674}
5675
f22f4340
JJ
5676/* Notice a threadprivate variable DECL used in OpenMP context CTX.
5677 This just prints out diagnostics about threadprivate variable uses
5678 in untied tasks. If DECL2 is non-NULL, prevent this warning
5679 on that variable. */
5680
5681static bool
5682omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5683 tree decl2)
5684{
5685 splay_tree_node n;
acf0174b
JJ
5686 struct gimplify_omp_ctx *octx;
5687
5688 for (octx = ctx; octx; octx = octx->outer_context)
5689 if (octx->region_type == ORT_TARGET)
5690 {
5691 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5692 if (n == NULL)
5693 {
5694 error ("threadprivate variable %qE used in target region",
5695 DECL_NAME (decl));
5696 error_at (octx->location, "enclosing target region");
5697 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5698 }
5699 if (decl2)
5700 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5701 }
f22f4340
JJ
5702
5703 if (ctx->region_type != ORT_UNTIED_TASK)
5704 return false;
5705 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5706 if (n == NULL)
5707 {
ad19c4be
EB
5708 error ("threadprivate variable %qE used in untied task",
5709 DECL_NAME (decl));
f22f4340
JJ
5710 error_at (ctx->location, "enclosing task");
5711 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5712 }
5713 if (decl2)
5714 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5715 return false;
5716}
5717
953ff289
DN
5718/* Record the fact that DECL was used within the OpenMP context CTX.
5719 IN_CODE is true when real code uses DECL, and false when we should
5720 merely emit default(none) errors. Return true if DECL is going to
5721 be remapped and thus DECL shouldn't be gimplified into its
5722 DECL_VALUE_EXPR (if any). */
5723
5724static bool
5725omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5726{
5727 splay_tree_node n;
5728 unsigned flags = in_code ? GOVD_SEEN : 0;
5729 bool ret = false, shared;
5730
b504a918 5731 if (error_operand_p (decl))
953ff289
DN
5732 return false;
5733
5734 /* Threadprivate variables are predetermined. */
5735 if (is_global_var (decl))
5736 {
5737 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 5738 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
5739
5740 if (DECL_HAS_VALUE_EXPR_P (decl))
5741 {
5742 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5743
5744 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
f22f4340 5745 return omp_notice_threadprivate_variable (ctx, decl, value);
953ff289
DN
5746 }
5747 }
5748
5749 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
acf0174b
JJ
5750 if (ctx->region_type == ORT_TARGET)
5751 {
f014c653 5752 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
acf0174b
JJ
5753 if (n == NULL)
5754 {
5755 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5756 {
5757 error ("%qD referenced in target region does not have "
5758 "a mappable type", decl);
5759 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5760 }
5761 else
5762 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5763 }
5764 else
f014c653
JJ
5765 {
5766 /* If nothing changed, there's nothing left to do. */
5767 if ((n->value & flags) == flags)
5768 return ret;
5769 n->value |= flags;
5770 }
acf0174b
JJ
5771 goto do_outer;
5772 }
5773
953ff289
DN
5774 if (n == NULL)
5775 {
5776 enum omp_clause_default_kind default_kind, kind;
a68ab351 5777 struct gimplify_omp_ctx *octx;
953ff289 5778
74bf76ed 5779 if (ctx->region_type == ORT_WORKSHARE
acf0174b
JJ
5780 || ctx->region_type == ORT_SIMD
5781 || ctx->region_type == ORT_TARGET_DATA)
953ff289
DN
5782 goto do_outer;
5783
5784 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5785 remapped firstprivate instead of shared. To some extent this is
5786 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5787 default_kind = ctx->default_kind;
5788 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5789 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5790 default_kind = kind;
5791
5792 switch (default_kind)
5793 {
5794 case OMP_CLAUSE_DEFAULT_NONE:
09af4b4c 5795 if ((ctx->region_type & ORT_PARALLEL) != 0)
a5a5434f
TS
5796 {
5797 error ("%qE not specified in enclosing parallel",
5798 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5799 error_at (ctx->location, "enclosing parallel");
5800 }
5801 else if ((ctx->region_type & ORT_TASK) != 0)
acf0174b
JJ
5802 {
5803 error ("%qE not specified in enclosing task",
5804 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5805 error_at (ctx->location, "enclosing task");
5806 }
5807 else if (ctx->region_type == ORT_TEAMS)
5808 {
5809 error ("%qE not specified in enclosing teams construct",
5810 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5811 error_at (ctx->location, "enclosing teams construct");
5812 }
f22f4340 5813 else
a5a5434f 5814 gcc_unreachable ();
953ff289
DN
5815 /* FALLTHRU */
5816 case OMP_CLAUSE_DEFAULT_SHARED:
5817 flags |= GOVD_SHARED;
5818 break;
5819 case OMP_CLAUSE_DEFAULT_PRIVATE:
5820 flags |= GOVD_PRIVATE;
5821 break;
a68ab351
JJ
5822 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5823 flags |= GOVD_FIRSTPRIVATE;
5824 break;
5825 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5826 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
f22f4340 5827 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
a68ab351
JJ
5828 if (ctx->outer_context)
5829 omp_notice_variable (ctx->outer_context, decl, in_code);
5830 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5831 {
5832 splay_tree_node n2;
5833
acf0174b
JJ
5834 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5835 continue;
a68ab351
JJ
5836 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5837 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5838 {
5839 flags |= GOVD_FIRSTPRIVATE;
5840 break;
5841 }
acf0174b 5842 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
a68ab351
JJ
5843 break;
5844 }
5845 if (flags & GOVD_FIRSTPRIVATE)
5846 break;
5847 if (octx == NULL
5848 && (TREE_CODE (decl) == PARM_DECL
5849 || (!is_global_var (decl)
5850 && DECL_CONTEXT (decl) == current_function_decl)))
5851 {
5852 flags |= GOVD_FIRSTPRIVATE;
5853 break;
5854 }
5855 flags |= GOVD_SHARED;
5856 break;
953ff289
DN
5857 default:
5858 gcc_unreachable ();
5859 }
5860
a68ab351
JJ
5861 if ((flags & GOVD_PRIVATE)
5862 && lang_hooks.decls.omp_private_outer_ref (decl))
5863 flags |= GOVD_PRIVATE_OUTER_REF;
5864
953ff289
DN
5865 omp_add_variable (ctx, decl, flags);
5866
5867 shared = (flags & GOVD_SHARED) != 0;
5868 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5869 goto do_outer;
5870 }
5871
3ad6b266
JJ
5872 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5873 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5874 && DECL_SIZE (decl)
5875 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5876 {
5877 splay_tree_node n2;
5878 tree t = DECL_VALUE_EXPR (decl);
5879 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5880 t = TREE_OPERAND (t, 0);
5881 gcc_assert (DECL_P (t));
5882 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5883 n2->value |= GOVD_SEEN;
5884 }
5885
953ff289
DN
5886 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5887 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5888
5889 /* If nothing changed, there's nothing left to do. */
5890 if ((n->value & flags) == flags)
5891 return ret;
5892 flags |= n->value;
5893 n->value = flags;
5894
5895 do_outer:
5896 /* If the variable is private in the current context, then we don't
5897 need to propagate anything to an outer context. */
a68ab351 5898 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289
DN
5899 return ret;
5900 if (ctx->outer_context
5901 && omp_notice_variable (ctx->outer_context, decl, in_code))
5902 return true;
5903 return ret;
5904}
5905
5906/* Verify that DECL is private within CTX. If there's specific information
5907 to the contrary in the innermost scope, generate an error. */
5908
5909static bool
f7468577 5910omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
953ff289
DN
5911{
5912 splay_tree_node n;
5913
5914 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5915 if (n != NULL)
5916 {
5917 if (n->value & GOVD_SHARED)
5918 {
5919 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 5920 {
74bf76ed
JJ
5921 if (simd)
5922 error ("iteration variable %qE is predetermined linear",
5923 DECL_NAME (decl));
5924 else
5925 error ("iteration variable %qE should be private",
5926 DECL_NAME (decl));
f6a5ffbf
JJ
5927 n->value = GOVD_PRIVATE;
5928 return true;
5929 }
5930 else
5931 return false;
953ff289 5932 }
761041be
JJ
5933 else if ((n->value & GOVD_EXPLICIT) != 0
5934 && (ctx == gimplify_omp_ctxp
a68ab351 5935 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
5936 && gimplify_omp_ctxp->outer_context == ctx)))
5937 {
5938 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
5939 error ("iteration variable %qE should not be firstprivate",
5940 DECL_NAME (decl));
761041be 5941 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
5942 error ("iteration variable %qE should not be reduction",
5943 DECL_NAME (decl));
f7468577 5944 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
74bf76ed
JJ
5945 error ("iteration variable %qE should not be lastprivate",
5946 DECL_NAME (decl));
5947 else if (simd && (n->value & GOVD_PRIVATE) != 0)
5948 error ("iteration variable %qE should not be private",
5949 DECL_NAME (decl));
f7468577 5950 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
74bf76ed
JJ
5951 error ("iteration variable %qE is predetermined linear",
5952 DECL_NAME (decl));
761041be 5953 }
ca2b1311
JJ
5954 return (ctx == gimplify_omp_ctxp
5955 || (ctx->region_type == ORT_COMBINED_PARALLEL
5956 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
5957 }
5958
74bf76ed
JJ
5959 if (ctx->region_type != ORT_WORKSHARE
5960 && ctx->region_type != ORT_SIMD)
953ff289 5961 return false;
f6a5ffbf 5962 else if (ctx->outer_context)
74bf76ed 5963 return omp_is_private (ctx->outer_context, decl, simd);
ca2b1311 5964 return false;
953ff289
DN
5965}
5966
07b7aade
JJ
5967/* Return true if DECL is private within a parallel region
5968 that binds to the current construct's context or in parallel
5969 region's REDUCTION clause. */
5970
5971static bool
cab37c89 5972omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
07b7aade
JJ
5973{
5974 splay_tree_node n;
5975
5976 do
5977 {
5978 ctx = ctx->outer_context;
5979 if (ctx == NULL)
5980 return !(is_global_var (decl)
cab37c89
JJ
5981 /* References might be private, but might be shared too,
5982 when checking for copyprivate, assume they might be
5983 private, otherwise assume they might be shared. */
5984 || (!copyprivate
5985 && lang_hooks.decls.omp_privatize_by_reference (decl)));
07b7aade 5986
acf0174b
JJ
5987 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
5988 continue;
5989
07b7aade
JJ
5990 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5991 if (n != NULL)
5992 return (n->value & GOVD_SHARED) == 0;
5993 }
74bf76ed
JJ
5994 while (ctx->region_type == ORT_WORKSHARE
5995 || ctx->region_type == ORT_SIMD);
07b7aade
JJ
5996 return false;
5997}
5998
953ff289
DN
5999/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
6000 and previous omp contexts. */
6001
6002static void
726a989a 6003gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
a68ab351 6004 enum omp_region_type region_type)
953ff289
DN
6005{
6006 struct gimplify_omp_ctx *ctx, *outer_ctx;
6007 tree c;
6008
a68ab351 6009 ctx = new_omp_context (region_type);
953ff289
DN
6010 outer_ctx = ctx->outer_context;
6011
6012 while ((c = *list_p) != NULL)
6013 {
953ff289
DN
6014 bool remove = false;
6015 bool notice_outer = true;
07b7aade 6016 const char *check_non_private = NULL;
953ff289
DN
6017 unsigned int flags;
6018 tree decl;
6019
aaf46ef9 6020 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6021 {
6022 case OMP_CLAUSE_PRIVATE:
6023 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
6024 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6025 {
6026 flags |= GOVD_PRIVATE_OUTER_REF;
6027 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6028 }
6029 else
6030 notice_outer = false;
953ff289
DN
6031 goto do_add;
6032 case OMP_CLAUSE_SHARED:
6033 flags = GOVD_SHARED | GOVD_EXPLICIT;
6034 goto do_add;
6035 case OMP_CLAUSE_FIRSTPRIVATE:
6036 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 6037 check_non_private = "firstprivate";
953ff289
DN
6038 goto do_add;
6039 case OMP_CLAUSE_LASTPRIVATE:
6040 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 6041 check_non_private = "lastprivate";
953ff289
DN
6042 goto do_add;
6043 case OMP_CLAUSE_REDUCTION:
6044 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 6045 check_non_private = "reduction";
953ff289 6046 goto do_add;
acf0174b
JJ
6047 case OMP_CLAUSE_LINEAR:
6048 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6049 is_gimple_val, fb_rvalue) == GS_ERROR)
6050 {
6051 remove = true;
6052 break;
6053 }
6054 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6055 goto do_add;
6056
6057 case OMP_CLAUSE_MAP:
b46ebd6c
JJ
6058 decl = OMP_CLAUSE_DECL (c);
6059 if (error_operand_p (decl))
6060 {
6061 remove = true;
6062 break;
6063 }
6064 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6065 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6066 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6067 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6068 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
6069 {
6070 remove = true;
6071 break;
6072 }
acf0174b
JJ
6073 if (!DECL_P (decl))
6074 {
6075 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6076 NULL, is_gimple_lvalue, fb_lvalue)
6077 == GS_ERROR)
6078 {
6079 remove = true;
6080 break;
6081 }
6082 break;
6083 }
6084 flags = GOVD_MAP | GOVD_EXPLICIT;
6085 goto do_add;
6086
6087 case OMP_CLAUSE_DEPEND:
6088 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6089 {
6090 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6091 NULL, is_gimple_val, fb_rvalue);
6092 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6093 }
6094 if (error_operand_p (OMP_CLAUSE_DECL (c)))
6095 {
6096 remove = true;
6097 break;
6098 }
6099 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6100 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6101 is_gimple_val, fb_rvalue) == GS_ERROR)
6102 {
6103 remove = true;
6104 break;
6105 }
6106 break;
6107
6108 case OMP_CLAUSE_TO:
6109 case OMP_CLAUSE_FROM:
b46ebd6c
JJ
6110 decl = OMP_CLAUSE_DECL (c);
6111 if (error_operand_p (decl))
acf0174b
JJ
6112 {
6113 remove = true;
6114 break;
6115 }
b46ebd6c
JJ
6116 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6117 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6118 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6119 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6120 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
6121 {
6122 remove = true;
6123 break;
6124 }
6125 if (!DECL_P (decl))
6126 {
6127 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6128 NULL, is_gimple_lvalue, fb_lvalue)
6129 == GS_ERROR)
6130 {
6131 remove = true;
6132 break;
6133 }
6134 break;
6135 }
6136 goto do_notice;
953ff289
DN
6137
6138 do_add:
6139 decl = OMP_CLAUSE_DECL (c);
b504a918 6140 if (error_operand_p (decl))
953ff289
DN
6141 {
6142 remove = true;
6143 break;
6144 }
6145 omp_add_variable (ctx, decl, flags);
693d710f 6146 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
6147 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6148 {
6149 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 6150 GOVD_LOCAL | GOVD_SEEN);
953ff289 6151 gimplify_omp_ctxp = ctx;
45852dcc 6152 push_gimplify_context ();
726a989a 6153
355a7673
MM
6154 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6155 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
726a989a
RB
6156
6157 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6158 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6159 pop_gimplify_context
6160 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
45852dcc 6161 push_gimplify_context ();
726a989a
RB
6162 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6163 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 6164 pop_gimplify_context
726a989a
RB
6165 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6166 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6167 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6168
953ff289
DN
6169 gimplify_omp_ctxp = outer_ctx;
6170 }
a68ab351
JJ
6171 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6172 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6173 {
6174 gimplify_omp_ctxp = ctx;
45852dcc 6175 push_gimplify_context ();
a68ab351
JJ
6176 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6177 {
6178 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6179 NULL, NULL);
6180 TREE_SIDE_EFFECTS (bind) = 1;
6181 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6182 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6183 }
726a989a
RB
6184 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6185 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6186 pop_gimplify_context
6187 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6188 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6189
dd2fc525
JJ
6190 gimplify_omp_ctxp = outer_ctx;
6191 }
6192 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6193 && OMP_CLAUSE_LINEAR_STMT (c))
6194 {
6195 gimplify_omp_ctxp = ctx;
6196 push_gimplify_context ();
6197 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
6198 {
6199 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6200 NULL, NULL);
6201 TREE_SIDE_EFFECTS (bind) = 1;
6202 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
6203 OMP_CLAUSE_LINEAR_STMT (c) = bind;
6204 }
6205 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
6206 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6207 pop_gimplify_context
6208 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
6209 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
6210
a68ab351
JJ
6211 gimplify_omp_ctxp = outer_ctx;
6212 }
953ff289
DN
6213 if (notice_outer)
6214 goto do_notice;
6215 break;
6216
6217 case OMP_CLAUSE_COPYIN:
6218 case OMP_CLAUSE_COPYPRIVATE:
6219 decl = OMP_CLAUSE_DECL (c);
b504a918 6220 if (error_operand_p (decl))
953ff289
DN
6221 {
6222 remove = true;
6223 break;
6224 }
cab37c89
JJ
6225 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
6226 && !remove
6227 && !omp_check_private (ctx, decl, true))
6228 {
6229 remove = true;
6230 if (is_global_var (decl))
6231 {
6232 if (DECL_THREAD_LOCAL_P (decl))
6233 remove = false;
6234 else if (DECL_HAS_VALUE_EXPR_P (decl))
6235 {
6236 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6237
6238 if (value
6239 && DECL_P (value)
6240 && DECL_THREAD_LOCAL_P (value))
6241 remove = false;
6242 }
6243 }
6244 if (remove)
6245 error_at (OMP_CLAUSE_LOCATION (c),
6246 "copyprivate variable %qE is not threadprivate"
6247 " or private in outer context", DECL_NAME (decl));
6248 }
953ff289
DN
6249 do_notice:
6250 if (outer_ctx)
6251 omp_notice_variable (outer_ctx, decl, true);
07b7aade 6252 if (check_non_private
a68ab351 6253 && region_type == ORT_WORKSHARE
cab37c89 6254 && omp_check_private (ctx, decl, false))
07b7aade 6255 {
4f1e4960
JM
6256 error ("%s variable %qE is private in outer context",
6257 check_non_private, DECL_NAME (decl));
07b7aade
JJ
6258 remove = true;
6259 }
953ff289
DN
6260 break;
6261
20906c66 6262 case OMP_CLAUSE_FINAL:
953ff289 6263 case OMP_CLAUSE_IF:
d568d1a8
RS
6264 OMP_CLAUSE_OPERAND (c, 0)
6265 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6266 /* Fall through. */
6267
6268 case OMP_CLAUSE_SCHEDULE:
953ff289 6269 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
6270 case OMP_CLAUSE_NUM_TEAMS:
6271 case OMP_CLAUSE_THREAD_LIMIT:
6272 case OMP_CLAUSE_DIST_SCHEDULE:
6273 case OMP_CLAUSE_DEVICE:
9a771876 6274 case OMP_CLAUSE__CILK_FOR_COUNT_:
726a989a
RB
6275 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6276 is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b 6277 remove = true;
953ff289
DN
6278 break;
6279
6280 case OMP_CLAUSE_NOWAIT:
6281 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
6282 case OMP_CLAUSE_UNTIED:
6283 case OMP_CLAUSE_COLLAPSE:
20906c66 6284 case OMP_CLAUSE_MERGEABLE:
acf0174b 6285 case OMP_CLAUSE_PROC_BIND:
74bf76ed 6286 case OMP_CLAUSE_SAFELEN:
953ff289
DN
6287 break;
6288
acf0174b
JJ
6289 case OMP_CLAUSE_ALIGNED:
6290 decl = OMP_CLAUSE_DECL (c);
6291 if (error_operand_p (decl))
6292 {
6293 remove = true;
6294 break;
6295 }
b46ebd6c
JJ
6296 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
6297 is_gimple_val, fb_rvalue) == GS_ERROR)
6298 {
6299 remove = true;
6300 break;
6301 }
acf0174b
JJ
6302 if (!is_global_var (decl)
6303 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6304 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6305 break;
6306
953ff289
DN
6307 case OMP_CLAUSE_DEFAULT:
6308 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6309 break;
6310
6311 default:
6312 gcc_unreachable ();
6313 }
6314
6315 if (remove)
6316 *list_p = OMP_CLAUSE_CHAIN (c);
6317 else
6318 list_p = &OMP_CLAUSE_CHAIN (c);
6319 }
6320
6321 gimplify_omp_ctxp = ctx;
6322}
6323
f014c653
JJ
6324struct gimplify_adjust_omp_clauses_data
6325{
6326 tree *list_p;
6327 gimple_seq *pre_p;
6328};
6329
953ff289
DN
6330/* For all variables that were not actually used within the context,
6331 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6332
6333static int
6334gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6335{
f014c653
JJ
6336 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
6337 gimple_seq *pre_p
6338 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
953ff289
DN
6339 tree decl = (tree) n->key;
6340 unsigned flags = n->value;
aaf46ef9 6341 enum omp_clause_code code;
953ff289
DN
6342 tree clause;
6343 bool private_debug;
6344
6345 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6346 return 0;
6347 if ((flags & GOVD_SEEN) == 0)
6348 return 0;
6349 if (flags & GOVD_DEBUG_PRIVATE)
6350 {
6351 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6352 private_debug = true;
6353 }
acf0174b
JJ
6354 else if (flags & GOVD_MAP)
6355 private_debug = false;
953ff289
DN
6356 else
6357 private_debug
6358 = lang_hooks.decls.omp_private_debug_clause (decl,
6359 !!(flags & GOVD_SHARED));
6360 if (private_debug)
6361 code = OMP_CLAUSE_PRIVATE;
acf0174b
JJ
6362 else if (flags & GOVD_MAP)
6363 code = OMP_CLAUSE_MAP;
953ff289
DN
6364 else if (flags & GOVD_SHARED)
6365 {
6366 if (is_global_var (decl))
64964499
JJ
6367 {
6368 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6369 while (ctx != NULL)
6370 {
6371 splay_tree_node on
6372 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6373 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
74bf76ed 6374 | GOVD_PRIVATE | GOVD_REDUCTION
7de20fbd 6375 | GOVD_LINEAR | GOVD_MAP)) != 0)
64964499
JJ
6376 break;
6377 ctx = ctx->outer_context;
6378 }
6379 if (ctx == NULL)
6380 return 0;
6381 }
953ff289
DN
6382 code = OMP_CLAUSE_SHARED;
6383 }
6384 else if (flags & GOVD_PRIVATE)
6385 code = OMP_CLAUSE_PRIVATE;
6386 else if (flags & GOVD_FIRSTPRIVATE)
6387 code = OMP_CLAUSE_FIRSTPRIVATE;
74bf76ed
JJ
6388 else if (flags & GOVD_LASTPRIVATE)
6389 code = OMP_CLAUSE_LASTPRIVATE;
acf0174b
JJ
6390 else if (flags & GOVD_ALIGNED)
6391 return 0;
953ff289
DN
6392 else
6393 gcc_unreachable ();
6394
c2255bc4 6395 clause = build_omp_clause (input_location, code);
aaf46ef9 6396 OMP_CLAUSE_DECL (clause) = decl;
953ff289
DN
6397 OMP_CLAUSE_CHAIN (clause) = *list_p;
6398 if (private_debug)
6399 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
6400 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6401 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
acf0174b
JJ
6402 else if (code == OMP_CLAUSE_MAP)
6403 {
6404 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6405 ? OMP_CLAUSE_MAP_TO
6406 : OMP_CLAUSE_MAP_TOFROM;
6407 if (DECL_SIZE (decl)
6408 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6409 {
6410 tree decl2 = DECL_VALUE_EXPR (decl);
6411 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6412 decl2 = TREE_OPERAND (decl2, 0);
6413 gcc_assert (DECL_P (decl2));
6414 tree mem = build_simple_mem_ref (decl2);
6415 OMP_CLAUSE_DECL (clause) = mem;
6416 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6417 if (gimplify_omp_ctxp->outer_context)
6418 {
6419 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6420 omp_notice_variable (ctx, decl2, true);
6421 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6422 }
6423 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6424 OMP_CLAUSE_MAP);
6425 OMP_CLAUSE_DECL (nc) = decl;
6426 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6427 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6428 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6429 OMP_CLAUSE_CHAIN (clause) = nc;
6430 }
b46ebd6c
JJ
6431 else
6432 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
acf0174b 6433 }
95782571
JJ
6434 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
6435 {
6436 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
6437 OMP_CLAUSE_DECL (nc) = decl;
6438 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
6439 OMP_CLAUSE_CHAIN (nc) = *list_p;
6440 OMP_CLAUSE_CHAIN (clause) = nc;
f014c653
JJ
6441 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6442 gimplify_omp_ctxp = ctx->outer_context;
6443 lang_hooks.decls.omp_finish_clause (nc, pre_p);
6444 gimplify_omp_ctxp = ctx;
95782571 6445 }
953ff289 6446 *list_p = clause;
f014c653
JJ
6447 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6448 gimplify_omp_ctxp = ctx->outer_context;
6449 lang_hooks.decls.omp_finish_clause (clause, pre_p);
6450 gimplify_omp_ctxp = ctx;
953ff289
DN
6451 return 0;
6452}
6453
6454static void
f014c653 6455gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p)
953ff289
DN
6456{
6457 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6458 tree c, decl;
6459
6460 while ((c = *list_p) != NULL)
6461 {
6462 splay_tree_node n;
6463 bool remove = false;
6464
aaf46ef9 6465 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6466 {
6467 case OMP_CLAUSE_PRIVATE:
6468 case OMP_CLAUSE_SHARED:
6469 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 6470 case OMP_CLAUSE_LINEAR:
953ff289
DN
6471 decl = OMP_CLAUSE_DECL (c);
6472 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6473 remove = !(n->value & GOVD_SEEN);
6474 if (! remove)
6475 {
aaf46ef9 6476 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
6477 if ((n->value & GOVD_DEBUG_PRIVATE)
6478 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6479 {
6480 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6481 || ((n->value & GOVD_DATA_SHARE_CLASS)
6482 == GOVD_PRIVATE));
aaf46ef9 6483 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
6484 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6485 }
74bf76ed
JJ
6486 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6487 && ctx->outer_context
6488 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
95782571 6489 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
74bf76ed 6490 {
95782571
JJ
6491 if (ctx->outer_context->combined_loop
6492 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
74bf76ed
JJ
6493 {
6494 n = splay_tree_lookup (ctx->outer_context->variables,
6495 (splay_tree_key) decl);
6496 if (n == NULL
6497 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6498 {
9cf32741
JJ
6499 int flags = GOVD_FIRSTPRIVATE;
6500 /* #pragma omp distribute does not allow
6501 lastprivate clause. */
6502 if (!ctx->outer_context->distribute)
6503 flags |= GOVD_LASTPRIVATE;
74bf76ed
JJ
6504 if (n == NULL)
6505 omp_add_variable (ctx->outer_context, decl,
6506 flags | GOVD_SEEN);
6507 else
6508 n->value |= flags | GOVD_SEEN;
6509 }
6510 }
95782571 6511 else if (!is_global_var (decl))
74bf76ed
JJ
6512 omp_notice_variable (ctx->outer_context, decl, true);
6513 }
953ff289
DN
6514 }
6515 break;
6516
6517 case OMP_CLAUSE_LASTPRIVATE:
6518 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6519 accurately reflect the presence of a FIRSTPRIVATE clause. */
6520 decl = OMP_CLAUSE_DECL (c);
6521 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6522 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6523 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6524 break;
b8698a0f 6525
acf0174b
JJ
6526 case OMP_CLAUSE_ALIGNED:
6527 decl = OMP_CLAUSE_DECL (c);
6528 if (!is_global_var (decl))
6529 {
6530 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6531 remove = n == NULL || !(n->value & GOVD_SEEN);
6532 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6533 {
6534 struct gimplify_omp_ctx *octx;
6535 if (n != NULL
6536 && (n->value & (GOVD_DATA_SHARE_CLASS
6537 & ~GOVD_FIRSTPRIVATE)))
6538 remove = true;
6539 else
6540 for (octx = ctx->outer_context; octx;
6541 octx = octx->outer_context)
6542 {
6543 n = splay_tree_lookup (octx->variables,
6544 (splay_tree_key) decl);
6545 if (n == NULL)
6546 continue;
6547 if (n->value & GOVD_LOCAL)
6548 break;
6549 /* We have to avoid assigning a shared variable
6550 to itself when trying to add
6551 __builtin_assume_aligned. */
6552 if (n->value & GOVD_SHARED)
6553 {
6554 remove = true;
6555 break;
6556 }
6557 }
6558 }
6559 }
6560 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6561 {
6562 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6563 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6564 remove = true;
6565 }
6566 break;
6567
6568 case OMP_CLAUSE_MAP:
6569 decl = OMP_CLAUSE_DECL (c);
6570 if (!DECL_P (decl))
6571 break;
6572 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6573 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6574 remove = true;
6575 else if (DECL_SIZE (decl)
6576 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6577 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6578 {
6579 tree decl2 = DECL_VALUE_EXPR (decl);
6580 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6581 decl2 = TREE_OPERAND (decl2, 0);
6582 gcc_assert (DECL_P (decl2));
6583 tree mem = build_simple_mem_ref (decl2);
6584 OMP_CLAUSE_DECL (c) = mem;
6585 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6586 if (ctx->outer_context)
6587 {
6588 omp_notice_variable (ctx->outer_context, decl2, true);
6589 omp_notice_variable (ctx->outer_context,
6590 OMP_CLAUSE_SIZE (c), true);
6591 }
6592 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6593 OMP_CLAUSE_MAP);
6594 OMP_CLAUSE_DECL (nc) = decl;
6595 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6596 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6597 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6598 OMP_CLAUSE_CHAIN (c) = nc;
6599 c = nc;
6600 }
b46ebd6c
JJ
6601 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6602 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
acf0174b
JJ
6603 break;
6604
6605 case OMP_CLAUSE_TO:
6606 case OMP_CLAUSE_FROM:
6607 decl = OMP_CLAUSE_DECL (c);
6608 if (!DECL_P (decl))
6609 break;
6610 if (DECL_SIZE (decl)
6611 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6612 {
6613 tree decl2 = DECL_VALUE_EXPR (decl);
6614 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6615 decl2 = TREE_OPERAND (decl2, 0);
6616 gcc_assert (DECL_P (decl2));
6617 tree mem = build_simple_mem_ref (decl2);
6618 OMP_CLAUSE_DECL (c) = mem;
6619 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6620 if (ctx->outer_context)
6621 {
6622 omp_notice_variable (ctx->outer_context, decl2, true);
6623 omp_notice_variable (ctx->outer_context,
6624 OMP_CLAUSE_SIZE (c), true);
6625 }
6626 }
b46ebd6c
JJ
6627 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6628 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
acf0174b
JJ
6629 break;
6630
953ff289
DN
6631 case OMP_CLAUSE_REDUCTION:
6632 case OMP_CLAUSE_COPYIN:
6633 case OMP_CLAUSE_COPYPRIVATE:
6634 case OMP_CLAUSE_IF:
6635 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
6636 case OMP_CLAUSE_NUM_TEAMS:
6637 case OMP_CLAUSE_THREAD_LIMIT:
6638 case OMP_CLAUSE_DIST_SCHEDULE:
6639 case OMP_CLAUSE_DEVICE:
953ff289
DN
6640 case OMP_CLAUSE_SCHEDULE:
6641 case OMP_CLAUSE_NOWAIT:
6642 case OMP_CLAUSE_ORDERED:
6643 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
6644 case OMP_CLAUSE_UNTIED:
6645 case OMP_CLAUSE_COLLAPSE:
20906c66
JJ
6646 case OMP_CLAUSE_FINAL:
6647 case OMP_CLAUSE_MERGEABLE:
acf0174b 6648 case OMP_CLAUSE_PROC_BIND:
74bf76ed 6649 case OMP_CLAUSE_SAFELEN:
acf0174b 6650 case OMP_CLAUSE_DEPEND:
9a771876 6651 case OMP_CLAUSE__CILK_FOR_COUNT_:
953ff289
DN
6652 break;
6653
6654 default:
6655 gcc_unreachable ();
6656 }
6657
6658 if (remove)
6659 *list_p = OMP_CLAUSE_CHAIN (c);
6660 else
6661 list_p = &OMP_CLAUSE_CHAIN (c);
6662 }
6663
6664 /* Add in any implicit data sharing. */
f014c653
JJ
6665 struct gimplify_adjust_omp_clauses_data data;
6666 data.list_p = list_p;
6667 data.pre_p = pre_p;
6668 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
b8698a0f 6669
953ff289
DN
6670 gimplify_omp_ctxp = ctx->outer_context;
6671 delete_omp_context (ctx);
6672}
6673
6674/* Gimplify the contents of an OMP_PARALLEL statement. This involves
6675 gimplification of the body, as well as scanning the body for used
6676 variables. We need to do this scan now, because variable-sized
6677 decls will be decomposed during gimplification. */
6678
726a989a
RB
6679static void
6680gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
6681{
6682 tree expr = *expr_p;
726a989a
RB
6683 gimple g;
6684 gimple_seq body = NULL;
953ff289 6685
a68ab351
JJ
6686 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6687 OMP_PARALLEL_COMBINED (expr)
6688 ? ORT_COMBINED_PARALLEL
6689 : ORT_PARALLEL);
953ff289 6690
45852dcc 6691 push_gimplify_context ();
953ff289 6692
726a989a
RB
6693 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6694 if (gimple_code (g) == GIMPLE_BIND)
6695 pop_gimplify_context (g);
50674e96 6696 else
726a989a 6697 pop_gimplify_context (NULL);
953ff289 6698
f014c653 6699 gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr));
953ff289 6700
726a989a
RB
6701 g = gimple_build_omp_parallel (body,
6702 OMP_PARALLEL_CLAUSES (expr),
6703 NULL_TREE, NULL_TREE);
6704 if (OMP_PARALLEL_COMBINED (expr))
6705 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6706 gimplify_seq_add_stmt (pre_p, g);
6707 *expr_p = NULL_TREE;
953ff289
DN
6708}
6709
a68ab351
JJ
6710/* Gimplify the contents of an OMP_TASK statement. This involves
6711 gimplification of the body, as well as scanning the body for used
6712 variables. We need to do this scan now, because variable-sized
6713 decls will be decomposed during gimplification. */
953ff289 6714
726a989a
RB
6715static void
6716gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 6717{
a68ab351 6718 tree expr = *expr_p;
726a989a
RB
6719 gimple g;
6720 gimple_seq body = NULL;
953ff289 6721
f22f4340
JJ
6722 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6723 find_omp_clause (OMP_TASK_CLAUSES (expr),
6724 OMP_CLAUSE_UNTIED)
6725 ? ORT_UNTIED_TASK : ORT_TASK);
953ff289 6726
45852dcc 6727 push_gimplify_context ();
953ff289 6728
726a989a
RB
6729 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6730 if (gimple_code (g) == GIMPLE_BIND)
6731 pop_gimplify_context (g);
953ff289 6732 else
726a989a 6733 pop_gimplify_context (NULL);
953ff289 6734
f014c653 6735 gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr));
917948d3 6736
726a989a
RB
6737 g = gimple_build_omp_task (body,
6738 OMP_TASK_CLAUSES (expr),
6739 NULL_TREE, NULL_TREE,
6740 NULL_TREE, NULL_TREE, NULL_TREE);
6741 gimplify_seq_add_stmt (pre_p, g);
6742 *expr_p = NULL_TREE;
a68ab351
JJ
6743}
6744
acf0174b
JJ
6745/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6746 with non-NULL OMP_FOR_INIT. */
6747
6748static tree
6749find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6750{
6751 *walk_subtrees = 0;
6752 switch (TREE_CODE (*tp))
6753 {
6754 case OMP_FOR:
6755 *walk_subtrees = 1;
6756 /* FALLTHRU */
6757 case OMP_SIMD:
6758 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6759 return *tp;
6760 break;
6761 case BIND_EXPR:
6762 case STATEMENT_LIST:
6763 case OMP_PARALLEL:
6764 *walk_subtrees = 1;
6765 break;
6766 default:
6767 break;
6768 }
6769 return NULL_TREE;
6770}
6771
a68ab351
JJ
6772/* Gimplify the gross structure of an OMP_FOR statement. */
6773
6774static enum gimplify_status
726a989a 6775gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 6776{
acf0174b 6777 tree for_stmt, orig_for_stmt, decl, var, t;
32e8bb8e
ILT
6778 enum gimplify_status ret = GS_ALL_DONE;
6779 enum gimplify_status tret;
538dd0b7 6780 gomp_for *gfor;
726a989a 6781 gimple_seq for_body, for_pre_body;
a68ab351 6782 int i;
74bf76ed
JJ
6783 bool simd;
6784 bitmap has_decl_expr = NULL;
a68ab351 6785
acf0174b 6786 orig_for_stmt = for_stmt = *expr_p;
a68ab351 6787
f7468577
JJ
6788 simd = (TREE_CODE (for_stmt) == OMP_SIMD
6789 || TREE_CODE (for_stmt) == CILK_SIMD);
a68ab351 6790 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
74bf76ed 6791 simd ? ORT_SIMD : ORT_WORKSHARE);
9cf32741
JJ
6792 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
6793 gimplify_omp_ctxp->distribute = true;
917948d3 6794
726a989a
RB
6795 /* Handle OMP_FOR_INIT. */
6796 for_pre_body = NULL;
74bf76ed
JJ
6797 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6798 {
6799 has_decl_expr = BITMAP_ALLOC (NULL);
6800 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6801 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
acf0174b 6802 == VAR_DECL)
74bf76ed
JJ
6803 {
6804 t = OMP_FOR_PRE_BODY (for_stmt);
6805 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6806 }
6807 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6808 {
6809 tree_stmt_iterator si;
6810 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6811 tsi_next (&si))
6812 {
6813 t = tsi_stmt (si);
6814 if (TREE_CODE (t) == DECL_EXPR
6815 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6816 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6817 }
6818 }
6819 }
726a989a
RB
6820 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6821 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 6822
acf0174b
JJ
6823 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6824 {
6825 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6826 NULL, NULL);
6827 gcc_assert (for_stmt != NULL_TREE);
6828 gimplify_omp_ctxp->combined_loop = true;
6829 }
6830
355a7673 6831 for_body = NULL;
a68ab351
JJ
6832 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6833 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6834 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6835 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6836 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6837 {
6838 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
6839 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6840 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
6841 gcc_assert (DECL_P (decl));
6842 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6843 || POINTER_TYPE_P (TREE_TYPE (decl)));
6844
6845 /* Make sure the iteration variable is private. */
74bf76ed 6846 tree c = NULL_TREE;
f7468577 6847 tree c2 = NULL_TREE;
acf0174b
JJ
6848 if (orig_for_stmt != for_stmt)
6849 /* Do this only on innermost construct for combined ones. */;
6850 else if (simd)
74bf76ed
JJ
6851 {
6852 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6853 (splay_tree_key)decl);
f7468577
JJ
6854 omp_is_private (gimplify_omp_ctxp, decl,
6855 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6856 != 1));
74bf76ed
JJ
6857 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6858 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6859 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6860 {
6861 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6862 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6863 if (has_decl_expr
6864 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6865 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6866 OMP_CLAUSE_DECL (c) = decl;
6867 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6868 OMP_FOR_CLAUSES (for_stmt) = c;
6869 omp_add_variable (gimplify_omp_ctxp, decl,
6870 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6871 }
6872 else
6873 {
6874 bool lastprivate
6875 = (!has_decl_expr
6876 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
56ad0e38
JJ
6877 if (lastprivate
6878 && gimplify_omp_ctxp->outer_context
6879 && gimplify_omp_ctxp->outer_context->region_type
6880 == ORT_WORKSHARE
6881 && gimplify_omp_ctxp->outer_context->combined_loop
6882 && !gimplify_omp_ctxp->outer_context->distribute)
6883 {
6884 struct gimplify_omp_ctx *outer
6885 = gimplify_omp_ctxp->outer_context;
6886 n = splay_tree_lookup (outer->variables,
6887 (splay_tree_key) decl);
6888 if (n != NULL
6889 && (n->value & GOVD_DATA_SHARE_CLASS) == GOVD_LOCAL)
6890 lastprivate = false;
6891 else if (omp_check_private (outer, decl, false))
6892 error ("lastprivate variable %qE is private in outer "
6893 "context", DECL_NAME (decl));
6894 else
6895 {
6896 omp_add_variable (outer, decl,
6897 GOVD_LASTPRIVATE | GOVD_SEEN);
6898 if (outer->outer_context)
6899 omp_notice_variable (outer->outer_context, decl, true);
6900 }
6901 }
74bf76ed
JJ
6902 c = build_omp_clause (input_location,
6903 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6904 : OMP_CLAUSE_PRIVATE);
6905 OMP_CLAUSE_DECL (c) = decl;
6906 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
f7468577 6907 OMP_FOR_CLAUSES (for_stmt) = c;
74bf76ed
JJ
6908 omp_add_variable (gimplify_omp_ctxp, decl,
6909 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
f7468577 6910 | GOVD_EXPLICIT | GOVD_SEEN);
74bf76ed
JJ
6911 c = NULL_TREE;
6912 }
6913 }
f7468577 6914 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
a68ab351
JJ
6915 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6916 else
6917 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6918
6919 /* If DECL is not a gimple register, create a temporary variable to act
6920 as an iteration counter. This is valid, since DECL cannot be
56ad0e38
JJ
6921 modified in the body of the loop. Similarly for any iteration vars
6922 in simd with collapse > 1 where the iterator vars must be
6923 lastprivate. */
acf0174b
JJ
6924 if (orig_for_stmt != for_stmt)
6925 var = decl;
56ad0e38
JJ
6926 else if (!is_gimple_reg (decl)
6927 || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
a68ab351
JJ
6928 {
6929 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
726a989a 6930 TREE_OPERAND (t, 0) = var;
b8698a0f 6931
726a989a 6932 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 6933
f7468577
JJ
6934 if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6935 {
6936 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6937 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
6938 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
6939 OMP_CLAUSE_DECL (c2) = var;
6940 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
6941 OMP_FOR_CLAUSES (for_stmt) = c2;
6942 omp_add_variable (gimplify_omp_ctxp, var,
6943 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6944 if (c == NULL_TREE)
6945 {
6946 c = c2;
6947 c2 = NULL_TREE;
6948 }
6949 }
6950 else
6951 omp_add_variable (gimplify_omp_ctxp, var,
6952 GOVD_PRIVATE | GOVD_SEEN);
a68ab351
JJ
6953 }
6954 else
6955 var = decl;
07beea0d 6956
32e8bb8e 6957 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6958 is_gimple_val, fb_rvalue);
32e8bb8e 6959 ret = MIN (ret, tret);
726a989a
RB
6960 if (ret == GS_ERROR)
6961 return ret;
953ff289 6962
726a989a 6963 /* Handle OMP_FOR_COND. */
a68ab351
JJ
6964 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6965 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 6966 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 6967
32e8bb8e 6968 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6969 is_gimple_val, fb_rvalue);
32e8bb8e 6970 ret = MIN (ret, tret);
917948d3 6971
726a989a 6972 /* Handle OMP_FOR_INCR. */
a68ab351 6973 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
6974 switch (TREE_CODE (t))
6975 {
a68ab351
JJ
6976 case PREINCREMENT_EXPR:
6977 case POSTINCREMENT_EXPR:
c02065fc
AH
6978 {
6979 tree decl = TREE_OPERAND (t, 0);
da6f124d
JJ
6980 /* c_omp_for_incr_canonicalize_ptr() should have been
6981 called to massage things appropriately. */
c02065fc
AH
6982 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6983
6984 if (orig_for_stmt != for_stmt)
6985 break;
6986 t = build_int_cst (TREE_TYPE (decl), 1);
6987 if (c)
6988 OMP_CLAUSE_LINEAR_STEP (c) = t;
6989 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6990 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6991 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
acf0174b 6992 break;
c02065fc 6993 }
a68ab351
JJ
6994
6995 case PREDECREMENT_EXPR:
6996 case POSTDECREMENT_EXPR:
da6f124d
JJ
6997 /* c_omp_for_incr_canonicalize_ptr() should have been
6998 called to massage things appropriately. */
6999 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
acf0174b
JJ
7000 if (orig_for_stmt != for_stmt)
7001 break;
a68ab351 7002 t = build_int_cst (TREE_TYPE (decl), -1);
74bf76ed
JJ
7003 if (c)
7004 OMP_CLAUSE_LINEAR_STEP (c) = t;
a68ab351 7005 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 7006 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
7007 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
7008 break;
7009
726a989a
RB
7010 case MODIFY_EXPR:
7011 gcc_assert (TREE_OPERAND (t, 0) == decl);
7012 TREE_OPERAND (t, 0) = var;
a68ab351 7013
726a989a 7014 t = TREE_OPERAND (t, 1);
a68ab351 7015 switch (TREE_CODE (t))
953ff289 7016 {
a68ab351
JJ
7017 case PLUS_EXPR:
7018 if (TREE_OPERAND (t, 1) == decl)
7019 {
7020 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
7021 TREE_OPERAND (t, 0) = var;
7022 break;
7023 }
7024
7025 /* Fallthru. */
7026 case MINUS_EXPR:
7027 case POINTER_PLUS_EXPR:
7028 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 7029 TREE_OPERAND (t, 0) = var;
953ff289 7030 break;
a68ab351
JJ
7031 default:
7032 gcc_unreachable ();
953ff289 7033 }
917948d3 7034
32e8bb8e 7035 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 7036 is_gimple_val, fb_rvalue);
32e8bb8e 7037 ret = MIN (ret, tret);
74bf76ed
JJ
7038 if (c)
7039 {
da6f124d
JJ
7040 tree step = TREE_OPERAND (t, 1);
7041 tree stept = TREE_TYPE (decl);
7042 if (POINTER_TYPE_P (stept))
7043 stept = sizetype;
7044 step = fold_convert (stept, step);
74bf76ed 7045 if (TREE_CODE (t) == MINUS_EXPR)
da6f124d
JJ
7046 step = fold_build1 (NEGATE_EXPR, stept, step);
7047 OMP_CLAUSE_LINEAR_STEP (c) = step;
7048 if (step != TREE_OPERAND (t, 1))
74bf76ed 7049 {
74bf76ed
JJ
7050 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
7051 &for_pre_body, NULL,
7052 is_gimple_val, fb_rvalue);
7053 ret = MIN (ret, tret);
7054 }
7055 }
953ff289 7056 break;
a68ab351 7057
953ff289
DN
7058 default:
7059 gcc_unreachable ();
7060 }
7061
f7468577
JJ
7062 if (c2)
7063 {
7064 gcc_assert (c);
7065 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
7066 }
7067
acf0174b
JJ
7068 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
7069 && orig_for_stmt == for_stmt)
a68ab351 7070 {
a68ab351 7071 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
f7468577
JJ
7072 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7073 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
7074 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7075 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
7076 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
7077 && OMP_CLAUSE_DECL (c) == decl)
726a989a
RB
7078 {
7079 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7080 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7081 gcc_assert (TREE_OPERAND (t, 0) == var);
7082 t = TREE_OPERAND (t, 1);
7083 gcc_assert (TREE_CODE (t) == PLUS_EXPR
7084 || TREE_CODE (t) == MINUS_EXPR
7085 || TREE_CODE (t) == POINTER_PLUS_EXPR);
7086 gcc_assert (TREE_OPERAND (t, 0) == var);
7087 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
7088 TREE_OPERAND (t, 1));
f7468577
JJ
7089 gimple_seq *seq;
7090 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
7091 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
7092 else
7093 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
7094 gimplify_assign (decl, t, seq);
a68ab351
JJ
7095 }
7096 }
953ff289
DN
7097 }
7098
74bf76ed
JJ
7099 BITMAP_FREE (has_decl_expr);
7100
acf0174b 7101 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
726a989a 7102
acf0174b
JJ
7103 if (orig_for_stmt != for_stmt)
7104 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7105 {
7106 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7107 decl = TREE_OPERAND (t, 0);
7108 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7109 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7110 TREE_OPERAND (t, 0) = var;
7111 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7112 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7113 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7114 }
7115
f014c653 7116 gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt));
953ff289 7117
74bf76ed 7118 int kind;
acf0174b 7119 switch (TREE_CODE (orig_for_stmt))
74bf76ed
JJ
7120 {
7121 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7122 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
c02065fc 7123 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9a771876 7124 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
acf0174b 7125 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
74bf76ed
JJ
7126 default:
7127 gcc_unreachable ();
7128 }
acf0174b 7129 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
726a989a
RB
7130 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7131 for_pre_body);
acf0174b
JJ
7132 if (orig_for_stmt != for_stmt)
7133 gimple_omp_for_set_combined_p (gfor, true);
7134 if (gimplify_omp_ctxp
7135 && (gimplify_omp_ctxp->combined_loop
7136 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7137 && gimplify_omp_ctxp->outer_context
7138 && gimplify_omp_ctxp->outer_context->combined_loop)))
7139 {
7140 gimple_omp_for_set_combined_into_p (gfor, true);
7141 if (gimplify_omp_ctxp->combined_loop)
7142 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7143 else
7144 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7145 }
726a989a
RB
7146
7147 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7148 {
7149 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7150 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7151 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7152 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7153 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7154 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7155 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7156 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7157 }
7158
7159 gimplify_seq_add_stmt (pre_p, gfor);
74bf76ed
JJ
7160 if (ret != GS_ALL_DONE)
7161 return GS_ERROR;
7162 *expr_p = NULL_TREE;
7163 return GS_ALL_DONE;
953ff289
DN
7164}
7165
acf0174b
JJ
7166/* Gimplify the gross structure of other OpenMP constructs.
7167 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
7168 and OMP_TEAMS. */
953ff289 7169
726a989a
RB
7170static void
7171gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 7172{
726a989a
RB
7173 tree expr = *expr_p;
7174 gimple stmt;
7175 gimple_seq body = NULL;
acf0174b 7176 enum omp_region_type ort = ORT_WORKSHARE;
953ff289 7177
acf0174b
JJ
7178 switch (TREE_CODE (expr))
7179 {
7180 case OMP_SECTIONS:
7181 case OMP_SINGLE:
7182 break;
7183 case OMP_TARGET:
7184 ort = ORT_TARGET;
7185 break;
7186 case OMP_TARGET_DATA:
7187 ort = ORT_TARGET_DATA;
7188 break;
7189 case OMP_TEAMS:
7190 ort = ORT_TEAMS;
7191 break;
7192 default:
7193 gcc_unreachable ();
7194 }
7195 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7196 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7197 {
45852dcc 7198 push_gimplify_context ();
acf0174b
JJ
7199 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7200 if (gimple_code (g) == GIMPLE_BIND)
7201 pop_gimplify_context (g);
7202 else
7203 pop_gimplify_context (NULL);
7204 if (ort == ORT_TARGET_DATA)
7205 {
7206 gimple_seq cleanup = NULL;
7207 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
7208 g = gimple_build_call (fn, 0);
7209 gimple_seq_add_stmt (&cleanup, g);
7210 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7211 body = NULL;
7212 gimple_seq_add_stmt (&body, g);
7213 }
7214 }
7215 else
7216 gimplify_and_add (OMP_BODY (expr), &body);
f014c653 7217 gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr));
953ff289 7218
acf0174b
JJ
7219 switch (TREE_CODE (expr))
7220 {
7221 case OMP_SECTIONS:
7222 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7223 break;
7224 case OMP_SINGLE:
7225 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7226 break;
7227 case OMP_TARGET:
7228 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7229 OMP_CLAUSES (expr));
7230 break;
7231 case OMP_TARGET_DATA:
7232 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7233 OMP_CLAUSES (expr));
7234 break;
7235 case OMP_TEAMS:
7236 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7237 break;
7238 default:
7239 gcc_unreachable ();
7240 }
7241
7242 gimplify_seq_add_stmt (pre_p, stmt);
7243 *expr_p = NULL_TREE;
7244}
7245
7246/* Gimplify the gross structure of OpenMP target update construct. */
7247
7248static void
7249gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7250{
7251 tree expr = *expr_p;
538dd0b7 7252 gomp_target *stmt;
acf0174b
JJ
7253
7254 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
7255 ORT_WORKSHARE);
f014c653 7256 gimplify_adjust_omp_clauses (pre_p, &OMP_TARGET_UPDATE_CLAUSES (expr));
acf0174b
JJ
7257 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
7258 OMP_TARGET_UPDATE_CLAUSES (expr));
726a989a
RB
7259
7260 gimplify_seq_add_stmt (pre_p, stmt);
acf0174b 7261 *expr_p = NULL_TREE;
953ff289
DN
7262}
7263
7264/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 7265 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
7266 EXPR is this stabilized form. */
7267
7268static bool
a509ebb5 7269goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
7270{
7271 /* Also include casts to other type variants. The C front end is fond
b8698a0f 7272 of adding these for e.g. volatile variables. This is like
953ff289 7273 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 7274 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 7275
78e47463
JJ
7276 if (TREE_CODE (expr) == INDIRECT_REF)
7277 {
7278 expr = TREE_OPERAND (expr, 0);
7279 while (expr != addr
1043771b 7280 && (CONVERT_EXPR_P (expr)
78e47463
JJ
7281 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7282 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 7283 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
7284 {
7285 expr = TREE_OPERAND (expr, 0);
7286 addr = TREE_OPERAND (addr, 0);
7287 }
251923f5
JJ
7288 if (expr == addr)
7289 return true;
71458b8a
JJ
7290 return (TREE_CODE (addr) == ADDR_EXPR
7291 && TREE_CODE (expr) == ADDR_EXPR
251923f5 7292 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 7293 }
953ff289
DN
7294 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7295 return true;
7296 return false;
7297}
7298
ad19c4be
EB
7299/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
7300 expression does not involve the lhs, evaluate it into a temporary.
7301 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7302 or -1 if an error was encountered. */
953ff289
DN
7303
7304static int
726a989a
RB
7305goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7306 tree lhs_var)
953ff289
DN
7307{
7308 tree expr = *expr_p;
7309 int saw_lhs;
7310
7311 if (goa_lhs_expr_p (expr, lhs_addr))
7312 {
7313 *expr_p = lhs_var;
7314 return 1;
7315 }
7316 if (is_gimple_val (expr))
7317 return 0;
b8698a0f 7318
953ff289
DN
7319 saw_lhs = 0;
7320 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7321 {
7322 case tcc_binary:
067dd3c9 7323 case tcc_comparison:
726a989a
RB
7324 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7325 lhs_var);
953ff289 7326 case tcc_unary:
726a989a
RB
7327 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7328 lhs_var);
953ff289 7329 break;
067dd3c9
JJ
7330 case tcc_expression:
7331 switch (TREE_CODE (expr))
7332 {
7333 case TRUTH_ANDIF_EXPR:
7334 case TRUTH_ORIF_EXPR:
f2b11865
JJ
7335 case TRUTH_AND_EXPR:
7336 case TRUTH_OR_EXPR:
7337 case TRUTH_XOR_EXPR:
067dd3c9
JJ
7338 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7339 lhs_addr, lhs_var);
f2b11865 7340 case TRUTH_NOT_EXPR:
067dd3c9
JJ
7341 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7342 lhs_addr, lhs_var);
7343 break;
4063e61b
JM
7344 case COMPOUND_EXPR:
7345 /* Break out any preevaluations from cp_build_modify_expr. */
7346 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7347 expr = TREE_OPERAND (expr, 1))
7348 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7349 *expr_p = expr;
7350 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
067dd3c9
JJ
7351 default:
7352 break;
7353 }
7354 break;
953ff289
DN
7355 default:
7356 break;
7357 }
7358
7359 if (saw_lhs == 0)
7360 {
7361 enum gimplify_status gs;
7362 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7363 if (gs != GS_ALL_DONE)
7364 saw_lhs = -1;
7365 }
7366
7367 return saw_lhs;
7368}
7369
953ff289
DN
7370/* Gimplify an OMP_ATOMIC statement. */
7371
7372static enum gimplify_status
726a989a 7373gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
7374{
7375 tree addr = TREE_OPERAND (*expr_p, 0);
20906c66
JJ
7376 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7377 ? NULL : TREE_OPERAND (*expr_p, 1);
953ff289 7378 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 7379 tree tmp_load;
538dd0b7
DM
7380 gomp_atomic_load *loadstmt;
7381 gomp_atomic_store *storestmt;
953ff289 7382
b731b390 7383 tmp_load = create_tmp_reg (type);
20906c66
JJ
7384 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7385 return GS_ERROR;
7386
7387 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7388 != GS_ALL_DONE)
7389 return GS_ERROR;
953ff289 7390
20906c66
JJ
7391 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7392 gimplify_seq_add_stmt (pre_p, loadstmt);
7393 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7394 != GS_ALL_DONE)
7395 return GS_ERROR;
953ff289 7396
20906c66
JJ
7397 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7398 rhs = tmp_load;
7399 storestmt = gimple_build_omp_atomic_store (rhs);
7400 gimplify_seq_add_stmt (pre_p, storestmt);
acf0174b
JJ
7401 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7402 {
7403 gimple_omp_atomic_set_seq_cst (loadstmt);
7404 gimple_omp_atomic_set_seq_cst (storestmt);
7405 }
20906c66
JJ
7406 switch (TREE_CODE (*expr_p))
7407 {
7408 case OMP_ATOMIC_READ:
7409 case OMP_ATOMIC_CAPTURE_OLD:
7410 *expr_p = tmp_load;
7411 gimple_omp_atomic_set_need_value (loadstmt);
7412 break;
7413 case OMP_ATOMIC_CAPTURE_NEW:
7414 *expr_p = rhs;
7415 gimple_omp_atomic_set_need_value (storestmt);
7416 break;
7417 default:
7418 *expr_p = NULL;
7419 break;
7420 }
a509ebb5 7421
acf0174b 7422 return GS_ALL_DONE;
953ff289 7423}
6de9cd9a 7424
0a35513e
AH
7425/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7426 body, and adding some EH bits. */
7427
7428static enum gimplify_status
7429gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7430{
7431 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
538dd0b7
DM
7432 gimple body_stmt;
7433 gtransaction *trans_stmt;
0a35513e 7434 gimple_seq body = NULL;
0a35513e
AH
7435 int subcode = 0;
7436
7437 /* Wrap the transaction body in a BIND_EXPR so we have a context
7438 where to put decls for OpenMP. */
7439 if (TREE_CODE (tbody) != BIND_EXPR)
7440 {
7441 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7442 TREE_SIDE_EFFECTS (bind) = 1;
7443 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7444 TRANSACTION_EXPR_BODY (expr) = bind;
7445 }
7446
45852dcc 7447 push_gimplify_context ();
0a35513e
AH
7448 temp = voidify_wrapper_expr (*expr_p, NULL);
7449
538dd0b7
DM
7450 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7451 pop_gimplify_context (body_stmt);
0a35513e 7452
538dd0b7 7453 trans_stmt = gimple_build_transaction (body, NULL);
0a35513e
AH
7454 if (TRANSACTION_EXPR_OUTER (expr))
7455 subcode = GTMA_IS_OUTER;
7456 else if (TRANSACTION_EXPR_RELAXED (expr))
7457 subcode = GTMA_IS_RELAXED;
538dd0b7 7458 gimple_transaction_set_subcode (trans_stmt, subcode);
0a35513e 7459
538dd0b7 7460 gimplify_seq_add_stmt (pre_p, trans_stmt);
0a35513e
AH
7461
7462 if (temp)
7463 {
7464 *expr_p = temp;
7465 return GS_OK;
7466 }
7467
7468 *expr_p = NULL_TREE;
7469 return GS_ALL_DONE;
7470}
7471
ad19c4be 7472/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
726a989a
RB
7473 expression produces a value to be used as an operand inside a GIMPLE
7474 statement, the value will be stored back in *EXPR_P. This value will
7475 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7476 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7477 emitted in PRE_P and POST_P.
7478
7479 Additionally, this process may overwrite parts of the input
7480 expression during gimplification. Ideally, it should be
7481 possible to do non-destructive gimplification.
7482
7483 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7484 the expression needs to evaluate to a value to be used as
7485 an operand in a GIMPLE statement, this value will be stored in
7486 *EXPR_P on exit. This happens when the caller specifies one
7487 of fb_lvalue or fb_rvalue fallback flags.
7488
7489 PRE_P will contain the sequence of GIMPLE statements corresponding
7490 to the evaluation of EXPR and all the side-effects that must
7491 be executed before the main expression. On exit, the last
7492 statement of PRE_P is the core statement being gimplified. For
7493 instance, when gimplifying 'if (++a)' the last statement in
7494 PRE_P will be 'if (t.1)' where t.1 is the result of
7495 pre-incrementing 'a'.
7496
7497 POST_P will contain the sequence of GIMPLE statements corresponding
7498 to the evaluation of all the side-effects that must be executed
7499 after the main expression. If this is NULL, the post
7500 side-effects are stored at the end of PRE_P.
7501
7502 The reason why the output is split in two is to handle post
7503 side-effects explicitly. In some cases, an expression may have
7504 inner and outer post side-effects which need to be emitted in
7505 an order different from the one given by the recursive
7506 traversal. For instance, for the expression (*p--)++ the post
7507 side-effects of '--' must actually occur *after* the post
7508 side-effects of '++'. However, gimplification will first visit
7509 the inner expression, so if a separate POST sequence was not
7510 used, the resulting sequence would be:
7511
7512 1 t.1 = *p
7513 2 p = p - 1
7514 3 t.2 = t.1 + 1
7515 4 *p = t.2
7516
7517 However, the post-decrement operation in line #2 must not be
7518 evaluated until after the store to *p at line #4, so the
7519 correct sequence should be:
7520
7521 1 t.1 = *p
7522 2 t.2 = t.1 + 1
7523 3 *p = t.2
7524 4 p = p - 1
7525
7526 So, by specifying a separate post queue, it is possible
7527 to emit the post side-effects in the correct order.
7528 If POST_P is NULL, an internal queue will be used. Before
7529 returning to the caller, the sequence POST_P is appended to
7530 the main output sequence PRE_P.
7531
7532 GIMPLE_TEST_F points to a function that takes a tree T and
7533 returns nonzero if T is in the GIMPLE form requested by the
12947319 7534 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
7535
7536 FALLBACK tells the function what sort of a temporary we want if
7537 gimplification cannot produce an expression that complies with
7538 GIMPLE_TEST_F.
7539
7540 fb_none means that no temporary should be generated
7541 fb_rvalue means that an rvalue is OK to generate
7542 fb_lvalue means that an lvalue is OK to generate
7543 fb_either means that either is OK, but an lvalue is preferable.
7544 fb_mayfail means that gimplification may fail (in which case
7545 GS_ERROR will be returned)
7546
7547 The return value is either GS_ERROR or GS_ALL_DONE, since this
7548 function iterates until EXPR is completely gimplified or an error
7549 occurs. */
6de9cd9a
DN
7550
7551enum gimplify_status
726a989a
RB
7552gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7553 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
7554{
7555 tree tmp;
726a989a
RB
7556 gimple_seq internal_pre = NULL;
7557 gimple_seq internal_post = NULL;
6de9cd9a 7558 tree save_expr;
726a989a 7559 bool is_statement;
6de9cd9a
DN
7560 location_t saved_location;
7561 enum gimplify_status ret;
726a989a 7562 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6de9cd9a
DN
7563
7564 save_expr = *expr_p;
7565 if (save_expr == NULL_TREE)
7566 return GS_ALL_DONE;
7567
726a989a
RB
7568 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7569 is_statement = gimple_test_f == is_gimple_stmt;
7570 if (is_statement)
7571 gcc_assert (pre_p);
7572
7573 /* Consistency checks. */
7574 if (gimple_test_f == is_gimple_reg)
7575 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7576 else if (gimple_test_f == is_gimple_val
726a989a
RB
7577 || gimple_test_f == is_gimple_call_addr
7578 || gimple_test_f == is_gimple_condexpr
7579 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 7580 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 7581 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 7582 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
7583 || gimple_test_f == is_gimple_asm_val
7584 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
7585 gcc_assert (fallback & fb_rvalue);
7586 else if (gimple_test_f == is_gimple_min_lval
7587 || gimple_test_f == is_gimple_lvalue)
7588 gcc_assert (fallback & fb_lvalue);
7589 else if (gimple_test_f == is_gimple_addressable)
7590 gcc_assert (fallback & fb_either);
7591 else if (gimple_test_f == is_gimple_stmt)
7592 gcc_assert (fallback == fb_none);
7593 else
7594 {
7595 /* We should have recognized the GIMPLE_TEST_F predicate to
7596 know what kind of fallback to use in case a temporary is
7597 needed to hold the value or address of *EXPR_P. */
7598 gcc_unreachable ();
7599 }
7600
6de9cd9a
DN
7601 /* We used to check the predicate here and return immediately if it
7602 succeeds. This is wrong; the design is for gimplification to be
7603 idempotent, and for the predicates to only test for valid forms, not
7604 whether they are fully simplified. */
6de9cd9a
DN
7605 if (pre_p == NULL)
7606 pre_p = &internal_pre;
726a989a 7607
6de9cd9a
DN
7608 if (post_p == NULL)
7609 post_p = &internal_post;
7610
726a989a
RB
7611 /* Remember the last statements added to PRE_P and POST_P. Every
7612 new statement added by the gimplification helpers needs to be
7613 annotated with location information. To centralize the
7614 responsibility, we remember the last statement that had been
7615 added to both queues before gimplifying *EXPR_P. If
7616 gimplification produces new statements in PRE_P and POST_P, those
7617 statements will be annotated with the same location information
7618 as *EXPR_P. */
7619 pre_last_gsi = gsi_last (*pre_p);
7620 post_last_gsi = gsi_last (*post_p);
7621
6de9cd9a 7622 saved_location = input_location;
a281759f
PB
7623 if (save_expr != error_mark_node
7624 && EXPR_HAS_LOCATION (*expr_p))
7625 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
7626
7627 /* Loop over the specific gimplifiers until the toplevel node
7628 remains the same. */
7629 do
7630 {
73d6ddef
RK
7631 /* Strip away as many useless type conversions as possible
7632 at the toplevel. */
7633 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
7634
7635 /* Remember the expr. */
7636 save_expr = *expr_p;
7637
7638 /* Die, die, die, my darling. */
7639 if (save_expr == error_mark_node
726a989a 7640 || (TREE_TYPE (save_expr)
65355d53 7641 && TREE_TYPE (save_expr) == error_mark_node))
6de9cd9a
DN
7642 {
7643 ret = GS_ERROR;
7644 break;
7645 }
7646
7647 /* Do any language-specific gimplification. */
32e8bb8e
ILT
7648 ret = ((enum gimplify_status)
7649 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
7650 if (ret == GS_OK)
7651 {
7652 if (*expr_p == NULL_TREE)
7653 break;
7654 if (*expr_p != save_expr)
7655 continue;
7656 }
7657 else if (ret != GS_UNHANDLED)
7658 break;
7659
941f78d1
JM
7660 /* Make sure that all the cases set 'ret' appropriately. */
7661 ret = GS_UNHANDLED;
6de9cd9a
DN
7662 switch (TREE_CODE (*expr_p))
7663 {
7664 /* First deal with the special cases. */
7665
7666 case POSTINCREMENT_EXPR:
7667 case POSTDECREMENT_EXPR:
7668 case PREINCREMENT_EXPR:
7669 case PREDECREMENT_EXPR:
7670 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
cc3c4f62
RB
7671 fallback != fb_none,
7672 TREE_TYPE (*expr_p));
6de9cd9a
DN
7673 break;
7674
0bd34ae4
RB
7675 case VIEW_CONVERT_EXPR:
7676 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
7677 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
7678 {
7679 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7680 post_p, is_gimple_val, fb_rvalue);
7681 recalculate_side_effects (*expr_p);
7682 break;
7683 }
7684 /* Fallthru. */
7685
6de9cd9a 7686 case ARRAY_REF:
44de5aeb
RK
7687 case ARRAY_RANGE_REF:
7688 case REALPART_EXPR:
7689 case IMAGPART_EXPR:
6de9cd9a
DN
7690 case COMPONENT_REF:
7691 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 7692 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
7693 break;
7694
7695 case COND_EXPR:
dae7ec87 7696 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 7697
0223e4f5
JM
7698 /* C99 code may assign to an array in a structure value of a
7699 conditional expression, and this has undefined behavior
7700 only on execution, so create a temporary if an lvalue is
7701 required. */
7702 if (fallback == fb_lvalue)
7703 {
7704 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7705 mark_addressable (*expr_p);
941f78d1 7706 ret = GS_OK;
0223e4f5 7707 }
6de9cd9a
DN
7708 break;
7709
7710 case CALL_EXPR:
90051e16 7711 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 7712
0223e4f5
JM
7713 /* C99 code may assign to an array in a structure returned
7714 from a function, and this has undefined behavior only on
7715 execution, so create a temporary if an lvalue is
7716 required. */
7717 if (fallback == fb_lvalue)
7718 {
7719 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 7720 mark_addressable (*expr_p);
941f78d1 7721 ret = GS_OK;
0223e4f5 7722 }
6de9cd9a
DN
7723 break;
7724
7725 case TREE_LIST:
282899df 7726 gcc_unreachable ();
6de9cd9a
DN
7727
7728 case COMPOUND_EXPR:
7729 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7730 break;
7731
2ec5deb5 7732 case COMPOUND_LITERAL_EXPR:
4c53d183
MM
7733 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7734 gimple_test_f, fallback);
2ec5deb5
PB
7735 break;
7736
6de9cd9a
DN
7737 case MODIFY_EXPR:
7738 case INIT_EXPR:
ebad5233
JM
7739 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7740 fallback != fb_none);
6de9cd9a
DN
7741 break;
7742
7743 case TRUTH_ANDIF_EXPR:
7744 case TRUTH_ORIF_EXPR:
1d15f620
KT
7745 {
7746 /* Preserve the original type of the expression and the
7747 source location of the outer expression. */
7748 tree org_type = TREE_TYPE (*expr_p);
7749 *expr_p = gimple_boolify (*expr_p);
4b4455e5 7750 *expr_p = build3_loc (input_location, COND_EXPR,
1d15f620
KT
7751 org_type, *expr_p,
7752 fold_convert_loc
4b4455e5 7753 (input_location,
1d15f620
KT
7754 org_type, boolean_true_node),
7755 fold_convert_loc
4b4455e5 7756 (input_location,
1d15f620
KT
7757 org_type, boolean_false_node));
7758 ret = GS_OK;
7759 break;
7760 }
6de9cd9a
DN
7761
7762 case TRUTH_NOT_EXPR:
3c6cbf7a 7763 {
53020648
RG
7764 tree type = TREE_TYPE (*expr_p);
7765 /* The parsers are careful to generate TRUTH_NOT_EXPR
7766 only with operands that are always zero or one.
7767 We do not fold here but handle the only interesting case
7768 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
3c6cbf7a 7769 *expr_p = gimple_boolify (*expr_p);
53020648
RG
7770 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7771 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7772 TREE_TYPE (*expr_p),
7773 TREE_OPERAND (*expr_p, 0));
7774 else
7775 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7776 TREE_TYPE (*expr_p),
7777 TREE_OPERAND (*expr_p, 0),
7778 build_int_cst (TREE_TYPE (*expr_p), 1));
7779 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7780 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7781 ret = GS_OK;
bd5d002e 7782 break;
3c6cbf7a 7783 }
67339062 7784
6de9cd9a
DN
7785 case ADDR_EXPR:
7786 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7787 break;
7788
8170608b
TB
7789 case ANNOTATE_EXPR:
7790 {
7791 tree cond = TREE_OPERAND (*expr_p, 0);
718c4601 7792 tree kind = TREE_OPERAND (*expr_p, 1);
664ceb1e
JJ
7793 tree type = TREE_TYPE (cond);
7794 if (!INTEGRAL_TYPE_P (type))
7795 {
7796 *expr_p = cond;
7797 ret = GS_OK;
7798 break;
7799 }
b731b390 7800 tree tmp = create_tmp_var (type);
8170608b 7801 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
538dd0b7 7802 gcall *call
718c4601 7803 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
8170608b
TB
7804 gimple_call_set_lhs (call, tmp);
7805 gimplify_seq_add_stmt (pre_p, call);
7806 *expr_p = tmp;
7807 ret = GS_ALL_DONE;
7808 break;
7809 }
7810
6de9cd9a 7811 case VA_ARG_EXPR:
cd3ce9b4 7812 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
7813 break;
7814
1043771b 7815 CASE_CONVERT:
6de9cd9a
DN
7816 if (IS_EMPTY_STMT (*expr_p))
7817 {
7818 ret = GS_ALL_DONE;
7819 break;
7820 }
7821
7822 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7823 || fallback == fb_none)
7824 {
7825 /* Just strip a conversion to void (or in void context) and
7826 try again. */
7827 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 7828 ret = GS_OK;
6de9cd9a
DN
7829 break;
7830 }
7831
7832 ret = gimplify_conversion (expr_p);
7833 if (ret == GS_ERROR)
7834 break;
7835 if (*expr_p != save_expr)
7836 break;
7837 /* FALLTHRU */
7838
7839 case FIX_TRUNC_EXPR:
6de9cd9a
DN
7840 /* unary_expr: ... | '(' cast ')' val | ... */
7841 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7842 is_gimple_val, fb_rvalue);
7843 recalculate_side_effects (*expr_p);
7844 break;
7845
6a720599 7846 case INDIRECT_REF:
70f34814
RG
7847 {
7848 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
3748f5c9 7849 bool notrap = TREE_THIS_NOTRAP (*expr_p);
70f34814
RG
7850 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7851
7852 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7853 if (*expr_p != save_expr)
7854 {
7855 ret = GS_OK;
7856 break;
7857 }
7858
7859 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7860 is_gimple_reg, fb_rvalue);
dca26746
RG
7861 if (ret == GS_ERROR)
7862 break;
70f34814 7863
dca26746 7864 recalculate_side_effects (*expr_p);
70f34814
RG
7865 *expr_p = fold_build2_loc (input_location, MEM_REF,
7866 TREE_TYPE (*expr_p),
7867 TREE_OPERAND (*expr_p, 0),
7868 build_int_cst (saved_ptr_type, 0));
7869 TREE_THIS_VOLATILE (*expr_p) = volatilep;
3748f5c9 7870 TREE_THIS_NOTRAP (*expr_p) = notrap;
70f34814
RG
7871 ret = GS_OK;
7872 break;
7873 }
7874
7875 /* We arrive here through the various re-gimplifcation paths. */
7876 case MEM_REF:
7877 /* First try re-folding the whole thing. */
7878 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7879 TREE_OPERAND (*expr_p, 0),
7880 TREE_OPERAND (*expr_p, 1));
7881 if (tmp)
941f78d1 7882 {
70f34814
RG
7883 *expr_p = tmp;
7884 recalculate_side_effects (*expr_p);
941f78d1
JM
7885 ret = GS_OK;
7886 break;
7887 }
01718e96
RG
7888 /* Avoid re-gimplifying the address operand if it is already
7889 in suitable form. Re-gimplifying would mark the address
7890 operand addressable. Always gimplify when not in SSA form
7891 as we still may have to gimplify decls with value-exprs. */
7892 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7893 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7894 {
7895 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7896 is_gimple_mem_ref_addr, fb_rvalue);
7897 if (ret == GS_ERROR)
7898 break;
7899 }
6de9cd9a 7900 recalculate_side_effects (*expr_p);
70f34814 7901 ret = GS_ALL_DONE;
6de9cd9a
DN
7902 break;
7903
01718e96 7904 /* Constants need not be gimplified. */
6de9cd9a
DN
7905 case INTEGER_CST:
7906 case REAL_CST:
325217ed 7907 case FIXED_CST:
6de9cd9a
DN
7908 case STRING_CST:
7909 case COMPLEX_CST:
7910 case VECTOR_CST:
3f5c390d
RB
7911 /* Drop the overflow flag on constants, we do not want
7912 that in the GIMPLE IL. */
7913 if (TREE_OVERFLOW_P (*expr_p))
7914 *expr_p = drop_tree_overflow (*expr_p);
6de9cd9a
DN
7915 ret = GS_ALL_DONE;
7916 break;
7917
7918 case CONST_DECL:
0534fa56 7919 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 7920 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
7921 value. */
7922 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7923 if (fallback & fb_lvalue)
7924 ret = GS_ALL_DONE;
7925 else
941f78d1
JM
7926 {
7927 *expr_p = DECL_INITIAL (*expr_p);
7928 ret = GS_OK;
7929 }
6de9cd9a
DN
7930 break;
7931
350fae66 7932 case DECL_EXPR:
726a989a 7933 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
7934 break;
7935
6de9cd9a 7936 case BIND_EXPR:
c6c7698d 7937 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
7938 break;
7939
7940 case LOOP_EXPR:
7941 ret = gimplify_loop_expr (expr_p, pre_p);
7942 break;
7943
7944 case SWITCH_EXPR:
7945 ret = gimplify_switch_expr (expr_p, pre_p);
7946 break;
7947
6de9cd9a
DN
7948 case EXIT_EXPR:
7949 ret = gimplify_exit_expr (expr_p);
7950 break;
7951
7952 case GOTO_EXPR:
7953 /* If the target is not LABEL, then it is a computed jump
7954 and the target needs to be gimplified. */
7955 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
7956 {
7957 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7958 NULL, is_gimple_val, fb_rvalue);
7959 if (ret == GS_ERROR)
7960 break;
7961 }
726a989a
RB
7962 gimplify_seq_add_stmt (pre_p,
7963 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 7964 ret = GS_ALL_DONE;
6de9cd9a
DN
7965 break;
7966
2e28e797 7967 case PREDICT_EXPR:
726a989a
RB
7968 gimplify_seq_add_stmt (pre_p,
7969 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7970 PREDICT_EXPR_OUTCOME (*expr_p)));
7971 ret = GS_ALL_DONE;
7972 break;
2e28e797 7973
6de9cd9a
DN
7974 case LABEL_EXPR:
7975 ret = GS_ALL_DONE;
282899df
NS
7976 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7977 == current_function_decl);
726a989a
RB
7978 gimplify_seq_add_stmt (pre_p,
7979 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6de9cd9a
DN
7980 break;
7981
7982 case CASE_LABEL_EXPR:
726a989a 7983 ret = gimplify_case_label_expr (expr_p, pre_p);
6de9cd9a
DN
7984 break;
7985
7986 case RETURN_EXPR:
7987 ret = gimplify_return_expr (*expr_p, pre_p);
7988 break;
7989
7990 case CONSTRUCTOR:
48eb4e53
RK
7991 /* Don't reduce this in place; let gimplify_init_constructor work its
7992 magic. Buf if we're just elaborating this for side effects, just
7993 gimplify any element that has side-effects. */
7994 if (fallback == fb_none)
7995 {
4038c495 7996 unsigned HOST_WIDE_INT ix;
ac47786e 7997 tree val;
08330ec2 7998 tree temp = NULL_TREE;
ac47786e
NF
7999 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
8000 if (TREE_SIDE_EFFECTS (val))
8001 append_to_statement_list (val, &temp);
48eb4e53 8002
08330ec2 8003 *expr_p = temp;
941f78d1 8004 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 8005 }
ca0b7d18
AP
8006 /* C99 code may assign to an array in a constructed
8007 structure or union, and this has undefined behavior only
8008 on execution, so create a temporary if an lvalue is
8009 required. */
8010 else if (fallback == fb_lvalue)
8011 {
8012 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 8013 mark_addressable (*expr_p);
941f78d1 8014 ret = GS_OK;
ca0b7d18 8015 }
08330ec2
AP
8016 else
8017 ret = GS_ALL_DONE;
6de9cd9a
DN
8018 break;
8019
8020 /* The following are special cases that are not handled by the
8021 original GIMPLE grammar. */
8022
8023 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
8024 eliminated. */
8025 case SAVE_EXPR:
8026 ret = gimplify_save_expr (expr_p, pre_p, post_p);
8027 break;
8028
8029 case BIT_FIELD_REF:
ea814c66
EB
8030 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8031 post_p, is_gimple_lvalue, fb_either);
8032 recalculate_side_effects (*expr_p);
6de9cd9a
DN
8033 break;
8034
150e3929
RG
8035 case TARGET_MEM_REF:
8036 {
8037 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
8038
23a534a1 8039 if (TMR_BASE (*expr_p))
150e3929 8040 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
4d948885 8041 post_p, is_gimple_mem_ref_addr, fb_either);
150e3929
RG
8042 if (TMR_INDEX (*expr_p))
8043 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
8044 post_p, is_gimple_val, fb_rvalue);
4d948885
RG
8045 if (TMR_INDEX2 (*expr_p))
8046 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
8047 post_p, is_gimple_val, fb_rvalue);
150e3929
RG
8048 /* TMR_STEP and TMR_OFFSET are always integer constants. */
8049 ret = MIN (r0, r1);
8050 }
8051 break;
8052
6de9cd9a
DN
8053 case NON_LVALUE_EXPR:
8054 /* This should have been stripped above. */
282899df 8055 gcc_unreachable ();
6de9cd9a
DN
8056
8057 case ASM_EXPR:
8058 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
8059 break;
8060
8061 case TRY_FINALLY_EXPR:
8062 case TRY_CATCH_EXPR:
726a989a
RB
8063 {
8064 gimple_seq eval, cleanup;
538dd0b7 8065 gtry *try_;
726a989a 8066
820055a0
DC
8067 /* Calls to destructors are generated automatically in FINALLY/CATCH
8068 block. They should have location as UNKNOWN_LOCATION. However,
8069 gimplify_call_expr will reset these call stmts to input_location
8070 if it finds stmt's location is unknown. To prevent resetting for
8071 destructors, we set the input_location to unknown.
8072 Note that this only affects the destructor calls in FINALLY/CATCH
8073 block, and will automatically reset to its original value by the
8074 end of gimplify_expr. */
8075 input_location = UNKNOWN_LOCATION;
726a989a
RB
8076 eval = cleanup = NULL;
8077 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
8078 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
8079 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
8080 if (gimple_seq_empty_p (cleanup))
8081 {
8082 gimple_seq_add_seq (pre_p, eval);
8083 ret = GS_ALL_DONE;
8084 break;
8085 }
726a989a
RB
8086 try_ = gimple_build_try (eval, cleanup,
8087 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
8088 ? GIMPLE_TRY_FINALLY
8089 : GIMPLE_TRY_CATCH);
e368f44f
DC
8090 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
8091 gimple_set_location (try_, saved_location);
8092 else
8093 gimple_set_location (try_, EXPR_LOCATION (save_expr));
726a989a
RB
8094 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
8095 gimple_try_set_catch_is_cleanup (try_,
8096 TRY_CATCH_IS_CLEANUP (*expr_p));
8097 gimplify_seq_add_stmt (pre_p, try_);
8098 ret = GS_ALL_DONE;
8099 break;
8100 }
6de9cd9a
DN
8101
8102 case CLEANUP_POINT_EXPR:
8103 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
8104 break;
8105
8106 case TARGET_EXPR:
8107 ret = gimplify_target_expr (expr_p, pre_p, post_p);
8108 break;
8109
8110 case CATCH_EXPR:
726a989a
RB
8111 {
8112 gimple c;
8113 gimple_seq handler = NULL;
8114 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8115 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8116 gimplify_seq_add_stmt (pre_p, c);
8117 ret = GS_ALL_DONE;
8118 break;
8119 }
6de9cd9a
DN
8120
8121 case EH_FILTER_EXPR:
726a989a
RB
8122 {
8123 gimple ehf;
8124 gimple_seq failure = NULL;
8125
8126 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8127 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 8128 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
8129 gimplify_seq_add_stmt (pre_p, ehf);
8130 ret = GS_ALL_DONE;
8131 break;
8132 }
6de9cd9a 8133
0f59171d
RH
8134 case OBJ_TYPE_REF:
8135 {
8136 enum gimplify_status r0, r1;
726a989a
RB
8137 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8138 post_p, is_gimple_val, fb_rvalue);
8139 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8140 post_p, is_gimple_val, fb_rvalue);
0f3a057a 8141 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
8142 ret = MIN (r0, r1);
8143 }
6de9cd9a
DN
8144 break;
8145
6de9cd9a
DN
8146 case LABEL_DECL:
8147 /* We get here when taking the address of a label. We mark
8148 the label as "forced"; meaning it can never be removed and
8149 it is a potential target for any computed goto. */
8150 FORCED_LABEL (*expr_p) = 1;
8151 ret = GS_ALL_DONE;
8152 break;
8153
8154 case STATEMENT_LIST:
c6c7698d 8155 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
8156 break;
8157
d25cee4d
RH
8158 case WITH_SIZE_EXPR:
8159 {
70e2829d
KH
8160 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8161 post_p == &internal_post ? NULL : post_p,
8162 gimple_test_f, fallback);
8163 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8164 is_gimple_val, fb_rvalue);
941f78d1 8165 ret = GS_ALL_DONE;
d25cee4d
RH
8166 }
8167 break;
8168
6de9cd9a 8169 case VAR_DECL:
4744afba 8170 case PARM_DECL:
a9f7c570 8171 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
8172 break;
8173
077b0dfb
JJ
8174 case RESULT_DECL:
8175 /* When within an OpenMP context, notice uses of variables. */
8176 if (gimplify_omp_ctxp)
8177 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8178 ret = GS_ALL_DONE;
8179 break;
8180
71956db3
RH
8181 case SSA_NAME:
8182 /* Allow callbacks into the gimplifier during optimization. */
8183 ret = GS_ALL_DONE;
8184 break;
8185
953ff289 8186 case OMP_PARALLEL:
726a989a
RB
8187 gimplify_omp_parallel (expr_p, pre_p);
8188 ret = GS_ALL_DONE;
953ff289
DN
8189 break;
8190
a68ab351 8191 case OMP_TASK:
726a989a
RB
8192 gimplify_omp_task (expr_p, pre_p);
8193 ret = GS_ALL_DONE;
a68ab351
JJ
8194 break;
8195
953ff289 8196 case OMP_FOR:
74bf76ed 8197 case OMP_SIMD:
c02065fc 8198 case CILK_SIMD:
9a771876 8199 case CILK_FOR:
acf0174b 8200 case OMP_DISTRIBUTE:
953ff289
DN
8201 ret = gimplify_omp_for (expr_p, pre_p);
8202 break;
8203
8204 case OMP_SECTIONS:
8205 case OMP_SINGLE:
acf0174b
JJ
8206 case OMP_TARGET:
8207 case OMP_TARGET_DATA:
8208 case OMP_TEAMS:
726a989a
RB
8209 gimplify_omp_workshare (expr_p, pre_p);
8210 ret = GS_ALL_DONE;
953ff289
DN
8211 break;
8212
acf0174b
JJ
8213 case OMP_TARGET_UPDATE:
8214 gimplify_omp_target_update (expr_p, pre_p);
8215 ret = GS_ALL_DONE;
8216 break;
8217
953ff289
DN
8218 case OMP_SECTION:
8219 case OMP_MASTER:
acf0174b 8220 case OMP_TASKGROUP:
953ff289
DN
8221 case OMP_ORDERED:
8222 case OMP_CRITICAL:
726a989a
RB
8223 {
8224 gimple_seq body = NULL;
8225 gimple g;
8226
8227 gimplify_and_add (OMP_BODY (*expr_p), &body);
8228 switch (TREE_CODE (*expr_p))
8229 {
8230 case OMP_SECTION:
8231 g = gimple_build_omp_section (body);
8232 break;
8233 case OMP_MASTER:
8234 g = gimple_build_omp_master (body);
8235 break;
acf0174b
JJ
8236 case OMP_TASKGROUP:
8237 {
8238 gimple_seq cleanup = NULL;
8239 tree fn
8240 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8241 g = gimple_build_call (fn, 0);
8242 gimple_seq_add_stmt (&cleanup, g);
8243 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8244 body = NULL;
8245 gimple_seq_add_stmt (&body, g);
8246 g = gimple_build_omp_taskgroup (body);
8247 }
8248 break;
726a989a
RB
8249 case OMP_ORDERED:
8250 g = gimple_build_omp_ordered (body);
8251 break;
8252 case OMP_CRITICAL:
8253 g = gimple_build_omp_critical (body,
8254 OMP_CRITICAL_NAME (*expr_p));
8255 break;
8256 default:
8257 gcc_unreachable ();
8258 }
8259 gimplify_seq_add_stmt (pre_p, g);
8260 ret = GS_ALL_DONE;
8261 break;
8262 }
953ff289
DN
8263
8264 case OMP_ATOMIC:
20906c66
JJ
8265 case OMP_ATOMIC_READ:
8266 case OMP_ATOMIC_CAPTURE_OLD:
8267 case OMP_ATOMIC_CAPTURE_NEW:
953ff289
DN
8268 ret = gimplify_omp_atomic (expr_p, pre_p);
8269 break;
8270
0a35513e
AH
8271 case TRANSACTION_EXPR:
8272 ret = gimplify_transaction (expr_p, pre_p);
8273 break;
8274
16949072
RG
8275 case TRUTH_AND_EXPR:
8276 case TRUTH_OR_EXPR:
8277 case TRUTH_XOR_EXPR:
1d15f620 8278 {
bd5d002e 8279 tree orig_type = TREE_TYPE (*expr_p);
fc1f4caf 8280 tree new_type, xop0, xop1;
1d15f620 8281 *expr_p = gimple_boolify (*expr_p);
fc1f4caf
KT
8282 new_type = TREE_TYPE (*expr_p);
8283 if (!useless_type_conversion_p (orig_type, new_type))
1d15f620 8284 {
4b4455e5 8285 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
1d15f620
KT
8286 ret = GS_OK;
8287 break;
8288 }
da5fb469 8289
bd5d002e
RG
8290 /* Boolified binary truth expressions are semantically equivalent
8291 to bitwise binary expressions. Canonicalize them to the
8292 bitwise variant. */
8293 switch (TREE_CODE (*expr_p))
8294 {
8295 case TRUTH_AND_EXPR:
8296 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8297 break;
8298 case TRUTH_OR_EXPR:
8299 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8300 break;
8301 case TRUTH_XOR_EXPR:
8302 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8303 break;
8304 default:
8305 break;
8306 }
fc1f4caf
KT
8307 /* Now make sure that operands have compatible type to
8308 expression's new_type. */
8309 xop0 = TREE_OPERAND (*expr_p, 0);
8310 xop1 = TREE_OPERAND (*expr_p, 1);
8311 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8312 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8313 new_type,
8314 xop0);
8315 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8316 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8317 new_type,
8318 xop1);
bd5d002e
RG
8319 /* Continue classified as tcc_binary. */
8320 goto expr_2;
da5fb469 8321 }
16949072
RG
8322
8323 case FMA_EXPR:
e6ed43b0 8324 case VEC_COND_EXPR:
2205ed25 8325 case VEC_PERM_EXPR:
16949072
RG
8326 /* Classified as tcc_expression. */
8327 goto expr_3;
8328
5be014d5 8329 case POINTER_PLUS_EXPR:
315f5f1b
RG
8330 {
8331 enum gimplify_status r0, r1;
8332 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8333 post_p, is_gimple_val, fb_rvalue);
8334 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8335 post_p, is_gimple_val, fb_rvalue);
8336 recalculate_side_effects (*expr_p);
8337 ret = MIN (r0, r1);
8338 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
8339 after gimplifying operands - this is similar to how
8340 it would be folding all gimplified stmts on creation
8341 to have them canonicalized, which is what we eventually
8342 should do anyway. */
8343 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8344 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8345 {
8346 *expr_p = build_fold_addr_expr_with_type_loc
8347 (input_location,
8348 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8349 TREE_OPERAND (*expr_p, 0),
8350 fold_convert (ptr_type_node,
8351 TREE_OPERAND (*expr_p, 1))),
8352 TREE_TYPE (*expr_p));
8353 ret = MIN (ret, GS_OK);
8354 }
8355 break;
8356 }
726a989a 8357
939b37da
BI
8358 case CILK_SYNC_STMT:
8359 {
8360 if (!fn_contains_cilk_spawn_p (cfun))
8361 {
8362 error_at (EXPR_LOCATION (*expr_p),
8363 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8364 ret = GS_ERROR;
8365 }
8366 else
8367 {
8368 gimplify_cilk_sync (expr_p, pre_p);
8369 ret = GS_ALL_DONE;
8370 }
8371 break;
8372 }
8373
6de9cd9a 8374 default:
282899df 8375 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 8376 {
6615c446 8377 case tcc_comparison:
61c25908
OH
8378 /* Handle comparison of objects of non scalar mode aggregates
8379 with a call to memcmp. It would be nice to only have to do
8380 this for variable-sized objects, but then we'd have to allow
8381 the same nest of reference nodes we allow for MODIFY_EXPR and
8382 that's too complex.
8383
8384 Compare scalar mode aggregates as scalar mode values. Using
8385 memcmp for them would be very inefficient at best, and is
8386 plain wrong if bitfields are involved. */
726a989a
RB
8387 {
8388 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 8389
544d960a
AS
8390 /* Vector comparisons need no boolification. */
8391 if (TREE_CODE (type) == VECTOR_TYPE)
8392 goto expr_2;
8393 else if (!AGGREGATE_TYPE_P (type))
7f3ff782
KT
8394 {
8395 tree org_type = TREE_TYPE (*expr_p);
8396 *expr_p = gimple_boolify (*expr_p);
8397 if (!useless_type_conversion_p (org_type,
8398 TREE_TYPE (*expr_p)))
8399 {
8400 *expr_p = fold_convert_loc (input_location,
8401 org_type, *expr_p);
8402 ret = GS_OK;
8403 }
8404 else
8405 goto expr_2;
8406 }
726a989a
RB
8407 else if (TYPE_MODE (type) != BLKmode)
8408 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8409 else
8410 ret = gimplify_variable_sized_compare (expr_p);
61c25908 8411
726a989a 8412 break;
61c25908 8413 }
d3147f64 8414
282899df
NS
8415 /* If *EXPR_P does not need to be special-cased, handle it
8416 according to its class. */
6615c446 8417 case tcc_unary:
282899df
NS
8418 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8419 post_p, is_gimple_val, fb_rvalue);
8420 break;
6de9cd9a 8421
6615c446 8422 case tcc_binary:
282899df
NS
8423 expr_2:
8424 {
8425 enum gimplify_status r0, r1;
d3147f64 8426
282899df 8427 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 8428 post_p, is_gimple_val, fb_rvalue);
282899df
NS
8429 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8430 post_p, is_gimple_val, fb_rvalue);
d3147f64 8431
282899df
NS
8432 ret = MIN (r0, r1);
8433 break;
8434 }
d3147f64 8435
16949072
RG
8436 expr_3:
8437 {
8438 enum gimplify_status r0, r1, r2;
8439
8440 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8441 post_p, is_gimple_val, fb_rvalue);
8442 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8443 post_p, is_gimple_val, fb_rvalue);
8444 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8445 post_p, is_gimple_val, fb_rvalue);
8446
8447 ret = MIN (MIN (r0, r1), r2);
8448 break;
8449 }
8450
6615c446
JO
8451 case tcc_declaration:
8452 case tcc_constant:
6de9cd9a 8453 ret = GS_ALL_DONE;
282899df 8454 goto dont_recalculate;
d3147f64 8455
282899df 8456 default:
16949072 8457 gcc_unreachable ();
6de9cd9a 8458 }
6de9cd9a
DN
8459
8460 recalculate_side_effects (*expr_p);
726a989a 8461
282899df 8462 dont_recalculate:
6de9cd9a
DN
8463 break;
8464 }
d3147f64 8465
941f78d1 8466 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
8467 }
8468 while (ret == GS_OK);
8469
8470 /* If we encountered an error_mark somewhere nested inside, either
8471 stub out the statement or propagate the error back out. */
8472 if (ret == GS_ERROR)
8473 {
8474 if (is_statement)
65355d53 8475 *expr_p = NULL;
6de9cd9a
DN
8476 goto out;
8477 }
8478
6de9cd9a
DN
8479 /* This was only valid as a return value from the langhook, which
8480 we handled. Make sure it doesn't escape from any other context. */
282899df 8481 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 8482
65355d53 8483 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
8484 {
8485 /* We aren't looking for a value, and we don't have a valid
8486 statement. If it doesn't have side-effects, throw it away. */
8487 if (!TREE_SIDE_EFFECTS (*expr_p))
65355d53 8488 *expr_p = NULL;
6de9cd9a 8489 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
8490 {
8491 /* This is probably a _REF that contains something nested that
8492 has side effects. Recurse through the operands to find it. */
8493 enum tree_code code = TREE_CODE (*expr_p);
8494
282899df 8495 switch (code)
44de5aeb 8496 {
282899df 8497 case COMPONENT_REF:
02a5eac4
EB
8498 case REALPART_EXPR:
8499 case IMAGPART_EXPR:
8500 case VIEW_CONVERT_EXPR:
282899df
NS
8501 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8502 gimple_test_f, fallback);
8503 break;
8504
a9e64c63
EB
8505 case ARRAY_REF:
8506 case ARRAY_RANGE_REF:
44de5aeb
RK
8507 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8508 gimple_test_f, fallback);
8509 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
8510 gimple_test_f, fallback);
8511 break;
8512
8513 default:
8514 /* Anything else with side-effects must be converted to
a9e64c63 8515 a valid statement before we get here. */
282899df 8516 gcc_unreachable ();
44de5aeb 8517 }
44de5aeb 8518
65355d53 8519 *expr_p = NULL;
44de5aeb 8520 }
a9e64c63
EB
8521 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8522 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 8523 {
a9e64c63
EB
8524 /* Historically, the compiler has treated a bare reference
8525 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 8526 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 8527
c22b1771 8528 /* Normally, we do not want to create a temporary for a
a38578e1
MM
8529 TREE_ADDRESSABLE type because such a type should not be
8530 copied by bitwise-assignment. However, we make an
8531 exception here, as all we are doing here is ensuring that
8532 we read the bytes that make up the type. We use
8533 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 8534 given a TREE_ADDRESSABLE type. */
a38578e1
MM
8535 tree tmp = create_tmp_var_raw (type, "vol");
8536 gimple_add_tmp_var (tmp);
726a989a
RB
8537 gimplify_assign (tmp, *expr_p, pre_p);
8538 *expr_p = NULL;
6de9cd9a
DN
8539 }
8540 else
8541 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
8542 an incomplete type, so just throw it away. Likewise for
8543 a BLKmode type, since any implicit inner load should
8544 already have been turned into an explicit one by the
8545 gimplification process. */
65355d53 8546 *expr_p = NULL;
6de9cd9a
DN
8547 }
8548
8549 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 8550 everything together and return. */
325c3691 8551 if (fallback == fb_none || is_statement)
6de9cd9a 8552 {
726a989a
RB
8553 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8554 it out for GC to reclaim it. */
8555 *expr_p = NULL_TREE;
8556
8557 if (!gimple_seq_empty_p (internal_pre)
8558 || !gimple_seq_empty_p (internal_post))
be00f578 8559 {
726a989a
RB
8560 gimplify_seq_add_seq (&internal_pre, internal_post);
8561 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 8562 }
726a989a
RB
8563
8564 /* The result of gimplifying *EXPR_P is going to be the last few
8565 statements in *PRE_P and *POST_P. Add location information
8566 to all the statements that were added by the gimplification
8567 helpers. */
8568 if (!gimple_seq_empty_p (*pre_p))
8569 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8570
8571 if (!gimple_seq_empty_p (*post_p))
8572 annotate_all_with_location_after (*post_p, post_last_gsi,
8573 input_location);
8574
6de9cd9a
DN
8575 goto out;
8576 }
8577
726a989a
RB
8578#ifdef ENABLE_GIMPLE_CHECKING
8579 if (*expr_p)
8580 {
8581 enum tree_code code = TREE_CODE (*expr_p);
8582 /* These expressions should already be in gimple IR form. */
8583 gcc_assert (code != MODIFY_EXPR
8584 && code != ASM_EXPR
8585 && code != BIND_EXPR
8586 && code != CATCH_EXPR
6fc4fb06 8587 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
8588 && code != EH_FILTER_EXPR
8589 && code != GOTO_EXPR
8590 && code != LABEL_EXPR
8591 && code != LOOP_EXPR
726a989a
RB
8592 && code != SWITCH_EXPR
8593 && code != TRY_FINALLY_EXPR
8594 && code != OMP_CRITICAL
8595 && code != OMP_FOR
8596 && code != OMP_MASTER
acf0174b 8597 && code != OMP_TASKGROUP
726a989a
RB
8598 && code != OMP_ORDERED
8599 && code != OMP_PARALLEL
8600 && code != OMP_SECTIONS
8601 && code != OMP_SECTION
8602 && code != OMP_SINGLE);
8603 }
8604#endif
6de9cd9a 8605
726a989a
RB
8606 /* Otherwise we're gimplifying a subexpression, so the resulting
8607 value is interesting. If it's a valid operand that matches
8608 GIMPLE_TEST_F, we're done. Unless we are handling some
8609 post-effects internally; if that's the case, we need to copy into
8610 a temporary before adding the post-effects to POST_P. */
8611 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
8612 goto out;
8613
8614 /* Otherwise, we need to create a new temporary for the gimplified
8615 expression. */
8616
8617 /* We can't return an lvalue if we have an internal postqueue. The
8618 object the lvalue refers to would (probably) be modified by the
8619 postqueue; we need to copy the value out first, which means an
8620 rvalue. */
726a989a
RB
8621 if ((fallback & fb_lvalue)
8622 && gimple_seq_empty_p (internal_post)
e847cc68 8623 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
8624 {
8625 /* An lvalue will do. Take the address of the expression, store it
8626 in a temporary, and replace the expression with an INDIRECT_REF of
8627 that temporary. */
db3927fb 8628 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 8629 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7f5ad6d7 8630 *expr_p = build_simple_mem_ref (tmp);
6de9cd9a 8631 }
ba4d8f9d 8632 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 8633 {
726a989a
RB
8634 /* An rvalue will do. Assign the gimplified expression into a
8635 new temporary TMP and replace the original expression with
8636 TMP. First, make sure that the expression has a type so that
8637 it can be assigned into a temporary. */
282899df 8638 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
412a1d9e 8639 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6de9cd9a 8640 }
282899df 8641 else
6de9cd9a 8642 {
726a989a 8643#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
8644 if (!(fallback & fb_mayfail))
8645 {
8646 fprintf (stderr, "gimplification failed:\n");
8647 print_generic_expr (stderr, *expr_p, 0);
8648 debug_tree (*expr_p);
8649 internal_error ("gimplification failed");
8650 }
8651#endif
8652 gcc_assert (fallback & fb_mayfail);
726a989a 8653
282899df 8654 /* If this is an asm statement, and the user asked for the
535a42b1 8655 impossible, don't die. Fail and let gimplify_asm_expr
282899df 8656 issue an error. */
6de9cd9a
DN
8657 ret = GS_ERROR;
8658 goto out;
8659 }
6de9cd9a 8660
6de9cd9a 8661 /* Make sure the temporary matches our predicate. */
282899df 8662 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 8663
726a989a 8664 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 8665 {
726a989a
RB
8666 annotate_all_with_location (internal_post, input_location);
8667 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
8668 }
8669
8670 out:
8671 input_location = saved_location;
8672 return ret;
8673}
8674
44de5aeb 8675/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 8676 size that we find. Add to LIST_P any statements generated. */
44de5aeb 8677
65355d53 8678void
726a989a 8679gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 8680{
ad50bc8d
RH
8681 tree field, t;
8682
19dbbf36 8683 if (type == NULL || type == error_mark_node)
8e0a600b 8684 return;
ad50bc8d 8685
6c6cfbfd 8686 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 8687 type = TYPE_MAIN_VARIANT (type);
44de5aeb 8688
8e0a600b 8689 /* Avoid infinite recursion. */
19dbbf36 8690 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
8691 return;
8692
8693 TYPE_SIZES_GIMPLIFIED (type) = 1;
8694
44de5aeb
RK
8695 switch (TREE_CODE (type))
8696 {
44de5aeb
RK
8697 case INTEGER_TYPE:
8698 case ENUMERAL_TYPE:
8699 case BOOLEAN_TYPE:
44de5aeb 8700 case REAL_TYPE:
325217ed 8701 case FIXED_POINT_TYPE:
65355d53
RH
8702 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8703 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
8704
8705 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8706 {
8707 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8708 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 8709 }
44de5aeb
RK
8710 break;
8711
8712 case ARRAY_TYPE:
ad50bc8d 8713 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
8714 gimplify_type_sizes (TREE_TYPE (type), list_p);
8715 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
8716 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8717 with assigned stack slots, for -O1+ -g they should be tracked
8718 by VTA. */
08d78391
EB
8719 if (!(TYPE_NAME (type)
8720 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8721 && DECL_IGNORED_P (TYPE_NAME (type)))
8722 && TYPE_DOMAIN (type)
802e9f8e
JJ
8723 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8724 {
8725 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8726 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8727 DECL_IGNORED_P (t) = 0;
8728 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8729 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8730 DECL_IGNORED_P (t) = 0;
8731 }
44de5aeb
RK
8732 break;
8733
8734 case RECORD_TYPE:
8735 case UNION_TYPE:
8736 case QUAL_UNION_TYPE:
910ad8de 8737 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 8738 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
8739 {
8740 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
8741 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8742 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
8743 gimplify_type_sizes (TREE_TYPE (field), list_p);
8744 }
8745 break;
8746
8747 case POINTER_TYPE:
8748 case REFERENCE_TYPE:
706c4bb7
OH
8749 /* We used to recurse on the pointed-to type here, which turned out to
8750 be incorrect because its definition might refer to variables not
8751 yet initialized at this point if a forward declaration is involved.
8752
8753 It was actually useful for anonymous pointed-to types to ensure
8754 that the sizes evaluation dominates every possible later use of the
8755 values. Restricting to such types here would be safe since there
f63645be
KH
8756 is no possible forward declaration around, but would introduce an
8757 undesirable middle-end semantic to anonymity. We then defer to
8758 front-ends the responsibility of ensuring that the sizes are
8759 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 8760 type declarations to the tree. */
44de5aeb
RK
8761 break;
8762
8763 default:
8764 break;
8765 }
8766
65355d53
RH
8767 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8768 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 8769
ad50bc8d 8770 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 8771 {
ad50bc8d
RH
8772 TYPE_SIZE (t) = TYPE_SIZE (type);
8773 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8774 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 8775 }
b4830636
RH
8776}
8777
8778/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8779 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 8780 We add any required statements to *STMT_P. */
44de5aeb
RK
8781
8782void
726a989a 8783gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 8784{
3ac8781c 8785 tree expr = *expr_p;
a9c5ddf9 8786
44de5aeb 8787 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 8788 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 8789 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
8790 will want to replace it with a new variable, but that will cause problems
8791 if this type is from outside the function. It's OK to have that here. */
848be094 8792 if (is_gimple_sizepos (expr))
44de5aeb
RK
8793 return;
8794
a9c5ddf9
RH
8795 *expr_p = unshare_expr (expr);
8796
ad50bc8d 8797 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
44de5aeb 8798}
6de9cd9a 8799
3ad065ef
EB
8800/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8801 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8802 is true, also gimplify the parameters. */
726a989a 8803
538dd0b7 8804gbind *
3ad065ef 8805gimplify_body (tree fndecl, bool do_parms)
6de9cd9a
DN
8806{
8807 location_t saved_location = input_location;
726a989a 8808 gimple_seq parm_stmts, seq;
538dd0b7
DM
8809 gimple outer_stmt;
8810 gbind *outer_bind;
9f9ebcdf 8811 struct cgraph_node *cgn;
6de9cd9a
DN
8812
8813 timevar_push (TV_TREE_GIMPLIFY);
953ff289 8814
f66d6761
SB
8815 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8816 gimplification. */
8817 default_rtl_profile ();
8818
953ff289 8819 gcc_assert (gimplify_ctxp == NULL);
45852dcc 8820 push_gimplify_context ();
6de9cd9a 8821
acf0174b
JJ
8822 if (flag_openmp)
8823 {
8824 gcc_assert (gimplify_omp_ctxp == NULL);
8825 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8826 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8827 }
8828
44de5aeb
RK
8829 /* Unshare most shared trees in the body and in that of any nested functions.
8830 It would seem we don't have to do this for nested functions because
8831 they are supposed to be output and then the outer function gimplified
8832 first, but the g++ front end doesn't always do it that way. */
3ad065ef
EB
8833 unshare_body (fndecl);
8834 unvisit_body (fndecl);
6de9cd9a 8835
d52f5295 8836 cgn = cgraph_node::get (fndecl);
9f9ebcdf 8837 if (cgn && cgn->origin)
6e2830c3 8838 nonlocal_vlas = new hash_set<tree>;
77f2a970 8839
fa10beec 8840 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
8841 input_location = DECL_SOURCE_LOCATION (fndecl);
8842
4744afba
RH
8843 /* Resolve callee-copies. This has to be done before processing
8844 the body so that DECL_VALUE_EXPR gets processed correctly. */
3ad065ef 8845 parm_stmts = do_parms ? gimplify_parameters () : NULL;
4744afba 8846
6de9cd9a 8847 /* Gimplify the function's body. */
726a989a 8848 seq = NULL;
3ad065ef 8849 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
538dd0b7
DM
8850 outer_stmt = gimple_seq_first_stmt (seq);
8851 if (!outer_stmt)
6de9cd9a 8852 {
538dd0b7
DM
8853 outer_stmt = gimple_build_nop ();
8854 gimplify_seq_add_stmt (&seq, outer_stmt);
6de9cd9a 8855 }
44de5aeb 8856
726a989a
RB
8857 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8858 not the case, wrap everything in a GIMPLE_BIND to make it so. */
538dd0b7 8859 if (gimple_code (outer_stmt) == GIMPLE_BIND
726a989a 8860 && gimple_seq_first (seq) == gimple_seq_last (seq))
538dd0b7 8861 outer_bind = as_a <gbind *> (outer_stmt);
726a989a
RB
8862 else
8863 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8864
3ad065ef 8865 DECL_SAVED_TREE (fndecl) = NULL_TREE;
4744afba
RH
8866
8867 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 8868 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 8869 if (!gimple_seq_empty_p (parm_stmts))
4744afba 8870 {
f0c10f0f
RG
8871 tree parm;
8872
726a989a
RB
8873 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8874 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
8875
8876 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 8877 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
8878 if (DECL_HAS_VALUE_EXPR_P (parm))
8879 {
8880 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8881 DECL_IGNORED_P (parm) = 0;
8882 }
4744afba
RH
8883 }
8884
77f2a970
JJ
8885 if (nonlocal_vlas)
8886 {
96ddb7ec
JJ
8887 if (nonlocal_vla_vars)
8888 {
8889 /* tree-nested.c may later on call declare_vars (..., true);
8890 which relies on BLOCK_VARS chain to be the tail of the
8891 gimple_bind_vars chain. Ensure we don't violate that
8892 assumption. */
8893 if (gimple_bind_block (outer_bind)
8894 == DECL_INITIAL (current_function_decl))
8895 declare_vars (nonlocal_vla_vars, outer_bind, true);
8896 else
8897 BLOCK_VARS (DECL_INITIAL (current_function_decl))
8898 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
8899 nonlocal_vla_vars);
8900 nonlocal_vla_vars = NULL_TREE;
8901 }
6e2830c3 8902 delete nonlocal_vlas;
77f2a970
JJ
8903 nonlocal_vlas = NULL;
8904 }
8905
6d7f7e0a 8906 if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp)
acf0174b
JJ
8907 {
8908 delete_omp_context (gimplify_omp_ctxp);
8909 gimplify_omp_ctxp = NULL;
8910 }
8911
726a989a 8912 pop_gimplify_context (outer_bind);
953ff289 8913 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 8914
07c5a154 8915#ifdef ENABLE_CHECKING
1da2ed5f 8916 if (!seen_error ())
34019e28 8917 verify_gimple_in_seq (gimple_bind_body (outer_bind));
07c5a154 8918#endif
6de9cd9a
DN
8919
8920 timevar_pop (TV_TREE_GIMPLIFY);
8921 input_location = saved_location;
726a989a
RB
8922
8923 return outer_bind;
6de9cd9a
DN
8924}
8925
6a1f6c9c 8926typedef char *char_p; /* For DEF_VEC_P. */
6a1f6c9c
JM
8927
8928/* Return whether we should exclude FNDECL from instrumentation. */
8929
8930static bool
8931flag_instrument_functions_exclude_p (tree fndecl)
8932{
9771b263 8933 vec<char_p> *v;
6a1f6c9c 8934
9771b263
DN
8935 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8936 if (v && v->length () > 0)
6a1f6c9c
JM
8937 {
8938 const char *name;
8939 int i;
8940 char *s;
8941
8942 name = lang_hooks.decl_printable_name (fndecl, 0);
9771b263 8943 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
8944 if (strstr (name, s) != NULL)
8945 return true;
8946 }
8947
9771b263
DN
8948 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8949 if (v && v->length () > 0)
6a1f6c9c
JM
8950 {
8951 const char *name;
8952 int i;
8953 char *s;
8954
8955 name = DECL_SOURCE_FILE (fndecl);
9771b263 8956 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
8957 if (strstr (name, s) != NULL)
8958 return true;
8959 }
8960
8961 return false;
8962}
8963
6de9cd9a 8964/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 8965 node for the function we want to gimplify.
b8698a0f 8966
ad19c4be 8967 Return the sequence of GIMPLE statements corresponding to the body
726a989a 8968 of FNDECL. */
6de9cd9a
DN
8969
8970void
8971gimplify_function_tree (tree fndecl)
8972{
af16bc76 8973 tree parm, ret;
726a989a 8974 gimple_seq seq;
538dd0b7 8975 gbind *bind;
6de9cd9a 8976
a406865a
RG
8977 gcc_assert (!gimple_body (fndecl));
8978
db2960f4
SL
8979 if (DECL_STRUCT_FUNCTION (fndecl))
8980 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8981 else
8982 push_struct_function (fndecl);
6de9cd9a 8983
910ad8de 8984 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
e41d82f5
RH
8985 {
8986 /* Preliminarily mark non-addressed complex variables as eligible
8987 for promotion to gimple registers. We'll transform their uses
8988 as we find them. */
0890b981
AP
8989 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8990 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
e41d82f5
RH
8991 && !TREE_THIS_VOLATILE (parm)
8992 && !needs_to_live_in_memory (parm))
0890b981 8993 DECL_GIMPLE_REG_P (parm) = 1;
e41d82f5
RH
8994 }
8995
8996 ret = DECL_RESULT (fndecl);
0890b981 8997 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7b7e6ecd 8998 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
e41d82f5 8999 && !needs_to_live_in_memory (ret))
0890b981 9000 DECL_GIMPLE_REG_P (ret) = 1;
e41d82f5 9001
3ad065ef 9002 bind = gimplify_body (fndecl, true);
726a989a
RB
9003
9004 /* The tree body of the function is no longer needed, replace it
9005 with the new GIMPLE body. */
355a7673 9006 seq = NULL;
726a989a
RB
9007 gimple_seq_add_stmt (&seq, bind);
9008 gimple_set_body (fndecl, seq);
6de9cd9a
DN
9009
9010 /* If we're instrumenting function entry/exit, then prepend the call to
9011 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
9012 catch the exit hook. */
9013 /* ??? Add some way to ignore exceptions for this TFE. */
9014 if (flag_instrument_function_entry_exit
8d5a7d1f
ILT
9015 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
9016 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 9017 {
726a989a 9018 tree x;
538dd0b7 9019 gbind *new_bind;
726a989a
RB
9020 gimple tf;
9021 gimple_seq cleanup = NULL, body = NULL;
b01890ff 9022 tree tmp_var;
538dd0b7 9023 gcall *call;
b01890ff 9024
e79983f4 9025 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 9026 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
9027 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
9028 gimple_call_set_lhs (call, tmp_var);
9029 gimplify_seq_add_stmt (&cleanup, call);
e79983f4 9030 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
b01890ff
JH
9031 call = gimple_build_call (x, 2,
9032 build_fold_addr_expr (current_function_decl),
9033 tmp_var);
9034 gimplify_seq_add_stmt (&cleanup, call);
726a989a 9035 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 9036
e79983f4 9037 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 9038 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
9039 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
9040 gimple_call_set_lhs (call, tmp_var);
9041 gimplify_seq_add_stmt (&body, call);
e79983f4 9042 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
b01890ff
JH
9043 call = gimple_build_call (x, 2,
9044 build_fold_addr_expr (current_function_decl),
9045 tmp_var);
9046 gimplify_seq_add_stmt (&body, call);
726a989a 9047 gimplify_seq_add_stmt (&body, tf);
32001f69 9048 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
726a989a
RB
9049 /* Clear the block for BIND, since it is no longer directly inside
9050 the function, but within a try block. */
32001f69 9051 gimple_bind_set_block (bind, NULL);
6de9cd9a 9052
726a989a
RB
9053 /* Replace the current function body with the body
9054 wrapped in the try/finally TF. */
355a7673 9055 seq = NULL;
726a989a
RB
9056 gimple_seq_add_stmt (&seq, new_bind);
9057 gimple_set_body (fndecl, seq);
fca4adf2
JJ
9058 bind = new_bind;
9059 }
9060
9061 if (flag_sanitize & SANITIZE_THREAD)
9062 {
9063 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
9064 gimple tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
9065 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
9066 /* Clear the block for BIND, since it is no longer directly inside
9067 the function, but within a try block. */
9068 gimple_bind_set_block (bind, NULL);
9069 /* Replace the current function body with the body
9070 wrapped in the try/finally TF. */
9071 seq = NULL;
9072 gimple_seq_add_stmt (&seq, new_bind);
9073 gimple_set_body (fndecl, seq);
6de9cd9a
DN
9074 }
9075
726a989a 9076 DECL_SAVED_TREE (fndecl) = NULL_TREE;
a406865a 9077 cfun->curr_properties = PROP_gimple_any;
726a989a 9078
db2960f4 9079 pop_cfun ();
6de9cd9a 9080}
726a989a 9081
4a7cb16f
AM
9082/* Return a dummy expression of type TYPE in order to keep going after an
9083 error. */
b184c8f1 9084
4a7cb16f
AM
9085static tree
9086dummy_object (tree type)
b184c8f1 9087{
4a7cb16f
AM
9088 tree t = build_int_cst (build_pointer_type (type), 0);
9089 return build2 (MEM_REF, type, t, t);
b184c8f1
AM
9090}
9091
4a7cb16f
AM
9092/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
9093 builtin function, but a very special sort of operator. */
b184c8f1 9094
4a7cb16f
AM
9095enum gimplify_status
9096gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
9097{
9098 tree promoted_type, have_va_type;
9099 tree valist = TREE_OPERAND (*expr_p, 0);
9100 tree type = TREE_TYPE (*expr_p);
9101 tree t;
9102 location_t loc = EXPR_LOCATION (*expr_p);
b184c8f1 9103
4a7cb16f
AM
9104 /* Verify that valist is of the proper type. */
9105 have_va_type = TREE_TYPE (valist);
9106 if (have_va_type == error_mark_node)
9107 return GS_ERROR;
9108 have_va_type = targetm.canonical_va_list_type (have_va_type);
b184c8f1 9109
4a7cb16f
AM
9110 if (have_va_type == NULL_TREE)
9111 {
9112 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
9113 return GS_ERROR;
9114 }
b184c8f1 9115
4a7cb16f
AM
9116 /* Generate a diagnostic for requesting data of a type that cannot
9117 be passed through `...' due to type promotion at the call site. */
9118 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
9119 != type)
9120 {
9121 static bool gave_help;
9122 bool warned;
b184c8f1 9123
4a7cb16f
AM
9124 /* Unfortunately, this is merely undefined, rather than a constraint
9125 violation, so we cannot make this an error. If this call is never
9126 executed, the program is still strictly conforming. */
9127 warned = warning_at (loc, 0,
9128 "%qT is promoted to %qT when passed through %<...%>",
9129 type, promoted_type);
9130 if (!gave_help && warned)
9131 {
9132 gave_help = true;
9133 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
9134 promoted_type, type);
9135 }
b184c8f1 9136
4a7cb16f
AM
9137 /* We can, however, treat "undefined" any way we please.
9138 Call abort to encourage the user to fix the program. */
9139 if (warned)
9140 inform (loc, "if this code is reached, the program will abort");
9141 /* Before the abort, allow the evaluation of the va_list
9142 expression to exit or longjmp. */
9143 gimplify_and_add (valist, pre_p);
9144 t = build_call_expr_loc (loc,
9145 builtin_decl_implicit (BUILT_IN_TRAP), 0);
b184c8f1
AM
9146 gimplify_and_add (t, pre_p);
9147
4a7cb16f
AM
9148 /* This is dead code, but go ahead and finish so that the
9149 mode of the result comes out right. */
9150 *expr_p = dummy_object (type);
9151 return GS_ALL_DONE;
b184c8f1
AM
9152 }
9153 else
b184c8f1 9154 {
4a7cb16f
AM
9155 /* Make it easier for the backends by protecting the valist argument
9156 from multiple evaluations. */
9157 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
9158 {
9159 /* For this case, the backends will be expecting a pointer to
9160 TREE_TYPE (abi), but it's possible we've
9161 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
9162 So fix it. */
9163 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
9164 {
9165 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
9166 valist = fold_convert_loc (loc, p1,
9167 build_fold_addr_expr_loc (loc, valist));
9168 }
b184c8f1 9169
4a7cb16f
AM
9170 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
9171 }
9172 else
9173 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
b184c8f1 9174
4a7cb16f
AM
9175 if (!targetm.gimplify_va_arg_expr)
9176 /* FIXME: Once most targets are converted we should merely
9177 assert this is non-null. */
9178 return GS_ALL_DONE;
b184c8f1 9179
4a7cb16f
AM
9180 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
9181 return GS_OK;
b184c8f1 9182 }
b184c8f1 9183}
bcf71673 9184
45b0be94
AM
9185/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
9186
9187 DST/SRC are the destination and source respectively. You can pass
9188 ungimplified trees in DST or SRC, in which case they will be
9189 converted to a gimple operand if necessary.
9190
9191 This function returns the newly created GIMPLE_ASSIGN tuple. */
9192
9193gimple
9194gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
9195{
9196 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9197 gimplify_and_add (t, seq_p);
9198 ggc_free (t);
9199 return gimple_seq_last_stmt (*seq_p);
9200}
9201
18f429e2
AM
9202inline hashval_t
9203gimplify_hasher::hash (const value_type *p)
9204{
9205 tree t = p->val;
9206 return iterative_hash_expr (t, 0);
9207}
9208
9209inline bool
9210gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
9211{
9212 tree t1 = p1->val;
9213 tree t2 = p2->val;
9214 enum tree_code code = TREE_CODE (t1);
9215
9216 if (TREE_CODE (t2) != code
9217 || TREE_TYPE (t1) != TREE_TYPE (t2))
9218 return false;
9219
9220 if (!operand_equal_p (t1, t2, 0))
9221 return false;
9222
9223#ifdef ENABLE_CHECKING
9224 /* Only allow them to compare equal if they also hash equal; otherwise
9225 results are nondeterminate, and we fail bootstrap comparison. */
9226 gcc_assert (hash (p1) == hash (p2));
9227#endif
9228
9229 return true;
9230}