]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
Remove a Java-specific hunk.
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
cbe34bb5 3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
6de9cd9a
DN
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
c7131fb2 26#include "backend.h"
957060b5
AM
27#include "target.h"
28#include "rtl.h"
6de9cd9a 29#include "tree.h"
c7131fb2 30#include "gimple.h"
9fdcd34e 31#include "gimple-predict.h"
957060b5 32#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
c7131fb2 33#include "ssa.h"
957060b5
AM
34#include "cgraph.h"
35#include "tree-pretty-print.h"
36#include "diagnostic-core.h"
37#include "alias.h"
c7131fb2 38#include "fold-const.h"
36566b39 39#include "calls.h"
36566b39
PK
40#include "varasm.h"
41#include "stmt.h"
42#include "expr.h"
2fb9a547
AM
43#include "gimple-fold.h"
44#include "tree-eh.h"
45b0be94 45#include "gimplify.h"
5be5c238 46#include "gimple-iterator.h"
d8a2d370 47#include "stor-layout.h"
d8a2d370 48#include "print-tree.h"
726a989a 49#include "tree-iterator.h"
6de9cd9a 50#include "tree-inline.h"
6de9cd9a 51#include "langhooks.h"
442b4905 52#include "tree-cfg.h"
442b4905 53#include "tree-ssa.h"
629b3d75 54#include "omp-general.h"
0645c1a2 55#include "omp-low.h"
4484a35a 56#include "gimple-low.h"
939b37da 57#include "cilk.h"
41dbbb37 58#include "gomp-constants.h"
c24e924f 59#include "splay-tree.h"
1a80d6b8 60#include "gimple-walk.h"
7ee2468b 61#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
9b2b7279 62#include "builtins.h"
6dc4a604 63#include "asan.h"
fcd1b8df 64#include "dbgcnt.h"
6dc4a604
ML
65
66/* Hash set of poisoned variables in a bind expr. */
67static hash_set<tree> *asan_poisoned_variables = NULL;
953ff289
DN
68
69enum gimplify_omp_var_data
70{
71 GOVD_SEEN = 1,
72 GOVD_EXPLICIT = 2,
73 GOVD_SHARED = 4,
74 GOVD_PRIVATE = 8,
75 GOVD_FIRSTPRIVATE = 16,
76 GOVD_LASTPRIVATE = 32,
77 GOVD_REDUCTION = 64,
78 GOVD_LOCAL = 128,
acf0174b
JJ
79 GOVD_MAP = 256,
80 GOVD_DEBUG_PRIVATE = 512,
81 GOVD_PRIVATE_OUTER_REF = 1024,
74bf76ed 82 GOVD_LINEAR = 2048,
acf0174b 83 GOVD_ALIGNED = 4096,
41dbbb37
TS
84
85 /* Flag for GOVD_MAP: don't copy back. */
acf0174b 86 GOVD_MAP_TO_ONLY = 8192,
41dbbb37 87
41b37d5e
JJ
88 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
89 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
90
d9a6bd32
JJ
91 GOVD_MAP_0LEN_ARRAY = 32768,
92
e01d41e5
JJ
93 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
94 GOVD_MAP_ALWAYS_TO = 65536,
95
1a80d6b8
JJ
96 /* Flag for shared vars that are or might be stored to in the region. */
97 GOVD_WRITTEN = 131072,
98
db0f1c7a
TV
99 /* Flag for GOVD_MAP, if it is a forced mapping. */
100 GOVD_MAP_FORCE = 262144,
101
7fd549d2
TS
102 /* Flag for GOVD_MAP: must be present already. */
103 GOVD_MAP_FORCE_PRESENT = 524288,
104
953ff289 105 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
74bf76ed
JJ
106 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
107 | GOVD_LOCAL)
953ff289
DN
108};
109
726a989a 110
a68ab351
JJ
111enum omp_region_type
112{
182190f2
NS
113 ORT_WORKSHARE = 0x00,
114 ORT_SIMD = 0x01,
115
116 ORT_PARALLEL = 0x02,
117 ORT_COMBINED_PARALLEL = 0x03,
118
119 ORT_TASK = 0x04,
120 ORT_UNTIED_TASK = 0x05,
121
122 ORT_TEAMS = 0x08,
123 ORT_COMBINED_TEAMS = 0x09,
124
41dbbb37 125 /* Data region. */
182190f2
NS
126 ORT_TARGET_DATA = 0x10,
127
41dbbb37 128 /* Data region with offloading. */
182190f2
NS
129 ORT_TARGET = 0x20,
130 ORT_COMBINED_TARGET = 0x21,
131
132 /* OpenACC variants. */
133 ORT_ACC = 0x40, /* A generic OpenACC region. */
134 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
135 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
136 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
37d5ad46 137 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
182190f2 138
d9a6bd32
JJ
139 /* Dummy OpenMP region, used to disable expansion of
140 DECL_VALUE_EXPRs in taskloop pre body. */
182190f2 141 ORT_NONE = 0x100
a68ab351
JJ
142};
143
45852dcc
AM
144/* Gimplify hashtable helper. */
145
95fbe13e 146struct gimplify_hasher : free_ptr_hash <elt_t>
45852dcc 147{
67f58944
TS
148 static inline hashval_t hash (const elt_t *);
149 static inline bool equal (const elt_t *, const elt_t *);
45852dcc
AM
150};
151
152struct gimplify_ctx
153{
154 struct gimplify_ctx *prev_context;
155
538dd0b7 156 vec<gbind *> bind_expr_stack;
45852dcc
AM
157 tree temps;
158 gimple_seq conditional_cleanups;
159 tree exit_label;
160 tree return_temp;
161
162 vec<tree> case_labels;
6dc4a604 163 hash_set<tree> *live_switch_vars;
45852dcc 164 /* The formal temporary table. Should this be persistent? */
c203e8a7 165 hash_table<gimplify_hasher> *temp_htab;
45852dcc
AM
166
167 int conditions;
03c00798
EB
168 unsigned into_ssa : 1;
169 unsigned allow_rhs_cond_expr : 1;
170 unsigned in_cleanup_point_expr : 1;
171 unsigned keep_stack : 1;
172 unsigned save_stack : 1;
81fea426 173 unsigned in_switch_expr : 1;
45852dcc
AM
174};
175
953ff289 176struct gimplify_omp_ctx
6de9cd9a 177{
953ff289
DN
178 struct gimplify_omp_ctx *outer_context;
179 splay_tree variables;
6e2830c3 180 hash_set<tree> *privatized_types;
d9a6bd32
JJ
181 /* Iteration variables in an OMP_FOR. */
182 vec<tree> loop_iter_var;
953ff289
DN
183 location_t location;
184 enum omp_clause_default_kind default_kind;
a68ab351 185 enum omp_region_type region_type;
acf0174b 186 bool combined_loop;
9cf32741 187 bool distribute;
d9a6bd32
JJ
188 bool target_map_scalars_firstprivate;
189 bool target_map_pointers_as_0len_arrays;
190 bool target_firstprivatize_array_bases;
953ff289
DN
191};
192
45852dcc 193static struct gimplify_ctx *gimplify_ctxp;
953ff289
DN
194static struct gimplify_omp_ctx *gimplify_omp_ctxp;
195
ad19c4be 196/* Forward declaration. */
726a989a 197static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
6e232ba4 198static hash_map<tree, tree> *oacc_declare_returns;
381cdae4
RB
199static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
200 bool (*) (tree), fallback_t, bool);
eb6127a4 201
a1a6c5b2
JJ
202/* Shorter alias name for the above function for use in gimplify.c
203 only. */
204
205static inline void
355fe088 206gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
a1a6c5b2
JJ
207{
208 gimple_seq_add_stmt_without_update (seq_p, gs);
209}
210
726a989a
RB
211/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
212 NULL, a new sequence is allocated. This function is
213 similar to gimple_seq_add_seq, but does not scan the operands.
214 During gimplification, we need to manipulate statement sequences
215 before the def/use vectors have been constructed. */
216
217static void
218gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
219{
220 gimple_stmt_iterator si;
221
222 if (src == NULL)
223 return;
224
726a989a
RB
225 si = gsi_last (*dst_p);
226 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
227}
228
45852dcc
AM
229
230/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
231 and popping gimplify contexts. */
232
233static struct gimplify_ctx *ctx_pool = NULL;
234
235/* Return a gimplify context struct from the pool. */
236
237static inline struct gimplify_ctx *
238ctx_alloc (void)
239{
240 struct gimplify_ctx * c = ctx_pool;
241
242 if (c)
243 ctx_pool = c->prev_context;
244 else
245 c = XNEW (struct gimplify_ctx);
246
247 memset (c, '\0', sizeof (*c));
248 return c;
249}
250
251/* Put gimplify context C back into the pool. */
252
253static inline void
254ctx_free (struct gimplify_ctx *c)
255{
256 c->prev_context = ctx_pool;
257 ctx_pool = c;
258}
259
260/* Free allocated ctx stack memory. */
261
262void
263free_gimplify_stack (void)
264{
265 struct gimplify_ctx *c;
266
267 while ((c = ctx_pool))
268 {
269 ctx_pool = c->prev_context;
270 free (c);
271 }
272}
273
274
6de9cd9a
DN
275/* Set up a context for the gimplifier. */
276
277void
45852dcc 278push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
6de9cd9a 279{
45852dcc
AM
280 struct gimplify_ctx *c = ctx_alloc ();
281
953ff289 282 c->prev_context = gimplify_ctxp;
953ff289 283 gimplify_ctxp = c;
45852dcc
AM
284 gimplify_ctxp->into_ssa = in_ssa;
285 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
6de9cd9a
DN
286}
287
288/* Tear down a context for the gimplifier. If BODY is non-null, then
289 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
290 in the local_decls.
291
292 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
293
294void
355fe088 295pop_gimplify_context (gimple *body)
6de9cd9a 296{
953ff289 297 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 298
9771b263
DN
299 gcc_assert (c
300 && (!c->bind_expr_stack.exists ()
301 || c->bind_expr_stack.is_empty ()));
302 c->bind_expr_stack.release ();
953ff289 303 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
304
305 if (body)
5123ad09 306 declare_vars (c->temps, body, false);
6de9cd9a 307 else
953ff289 308 record_vars (c->temps);
6de9cd9a 309
c203e8a7
TS
310 delete c->temp_htab;
311 c->temp_htab = NULL;
45852dcc 312 ctx_free (c);
6de9cd9a
DN
313}
314
ad19c4be
EB
315/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
316
c24b7de9 317static void
538dd0b7 318gimple_push_bind_expr (gbind *bind_stmt)
6de9cd9a 319{
9771b263 320 gimplify_ctxp->bind_expr_stack.reserve (8);
538dd0b7 321 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
6de9cd9a
DN
322}
323
ad19c4be
EB
324/* Pop the first element off the stack of bindings. */
325
c24b7de9 326static void
6de9cd9a
DN
327gimple_pop_bind_expr (void)
328{
9771b263 329 gimplify_ctxp->bind_expr_stack.pop ();
6de9cd9a
DN
330}
331
ad19c4be
EB
332/* Return the first element of the stack of bindings. */
333
538dd0b7 334gbind *
6de9cd9a
DN
335gimple_current_bind_expr (void)
336{
9771b263 337 return gimplify_ctxp->bind_expr_stack.last ();
726a989a
RB
338}
339
ad19c4be 340/* Return the stack of bindings created during gimplification. */
726a989a 341
538dd0b7 342vec<gbind *>
726a989a
RB
343gimple_bind_expr_stack (void)
344{
345 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
346}
347
ad19c4be 348/* Return true iff there is a COND_EXPR between us and the innermost
6de9cd9a
DN
349 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
350
351static bool
352gimple_conditional_context (void)
353{
354 return gimplify_ctxp->conditions > 0;
355}
356
357/* Note that we've entered a COND_EXPR. */
358
359static void
360gimple_push_condition (void)
361{
726a989a 362#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 363 if (gimplify_ctxp->conditions == 0)
726a989a 364 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 365#endif
6de9cd9a
DN
366 ++(gimplify_ctxp->conditions);
367}
368
369/* Note that we've left a COND_EXPR. If we're back at unconditional scope
370 now, add any conditional cleanups we've seen to the prequeue. */
371
372static void
726a989a 373gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
374{
375 int conds = --(gimplify_ctxp->conditions);
aa4a53af 376
282899df 377 gcc_assert (conds >= 0);
6de9cd9a
DN
378 if (conds == 0)
379 {
726a989a
RB
380 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
381 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 382 }
6de9cd9a
DN
383}
384
953ff289
DN
385/* A stable comparison routine for use with splay trees and DECLs. */
386
387static int
388splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
389{
390 tree a = (tree) xa;
391 tree b = (tree) xb;
392
393 return DECL_UID (a) - DECL_UID (b);
394}
395
396/* Create a new omp construct that deals with variable remapping. */
397
398static struct gimplify_omp_ctx *
a68ab351 399new_omp_context (enum omp_region_type region_type)
953ff289
DN
400{
401 struct gimplify_omp_ctx *c;
402
403 c = XCNEW (struct gimplify_omp_ctx);
404 c->outer_context = gimplify_omp_ctxp;
405 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
6e2830c3 406 c->privatized_types = new hash_set<tree>;
953ff289 407 c->location = input_location;
a68ab351 408 c->region_type = region_type;
f22f4340 409 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
410 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
411 else
412 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
953ff289
DN
413
414 return c;
415}
416
417/* Destroy an omp construct that deals with variable remapping. */
418
419static void
420delete_omp_context (struct gimplify_omp_ctx *c)
421{
422 splay_tree_delete (c->variables);
6e2830c3 423 delete c->privatized_types;
d9a6bd32 424 c->loop_iter_var.release ();
953ff289
DN
425 XDELETE (c);
426}
427
428static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
429static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
430
726a989a
RB
431/* Both gimplify the statement T and append it to *SEQ_P. This function
432 behaves exactly as gimplify_stmt, but you don't have to pass T as a
433 reference. */
cd3ce9b4
JM
434
435void
726a989a
RB
436gimplify_and_add (tree t, gimple_seq *seq_p)
437{
438 gimplify_stmt (&t, seq_p);
439}
440
441/* Gimplify statement T into sequence *SEQ_P, and return the first
442 tuple in the sequence of generated tuples for this statement.
443 Return NULL if gimplifying T produced no tuples. */
444
355fe088 445static gimple *
726a989a 446gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 447{
726a989a
RB
448 gimple_stmt_iterator last = gsi_last (*seq_p);
449
450 gimplify_and_add (t, seq_p);
451
452 if (!gsi_end_p (last))
453 {
454 gsi_next (&last);
455 return gsi_stmt (last);
456 }
457 else
458 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
459}
460
216820a4
RG
461/* Returns true iff T is a valid RHS for an assignment to an un-renamed
462 LHS, or for a call argument. */
463
464static bool
465is_gimple_mem_rhs (tree t)
466{
467 /* If we're dealing with a renamable type, either source or dest must be
468 a renamed variable. */
469 if (is_gimple_reg_type (TREE_TYPE (t)))
470 return is_gimple_val (t);
471 else
472 return is_gimple_val (t) || is_gimple_lvalue (t);
473}
474
726a989a 475/* Return true if T is a CALL_EXPR or an expression that can be
12947319 476 assigned to a temporary. Note that this predicate should only be
726a989a
RB
477 used during gimplification. See the rationale for this in
478 gimplify_modify_expr. */
479
480static bool
ba4d8f9d 481is_gimple_reg_rhs_or_call (tree t)
726a989a 482{
ba4d8f9d
RG
483 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
484 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
485}
486
487/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
488 this predicate should only be used during gimplification. See the
489 rationale for this in gimplify_modify_expr. */
490
491static bool
ba4d8f9d 492is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
493{
494 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
497 return is_gimple_val (t);
498 else
cd7fac96
RB
499 return (is_gimple_val (t)
500 || is_gimple_lvalue (t)
501 || TREE_CLOBBER_P (t)
ba4d8f9d 502 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
503}
504
2ad728d2
RG
505/* Create a temporary with a name derived from VAL. Subroutine of
506 lookup_tmp_var; nobody else should call this function. */
507
508static inline tree
947ca6a0 509create_tmp_from_val (tree val)
2ad728d2
RG
510{
511 /* Drop all qualifiers and address-space information from the value type. */
512 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
513 tree var = create_tmp_var (type, get_name (val));
947ca6a0
RB
514 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
515 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
2ad728d2
RG
516 DECL_GIMPLE_REG_P (var) = 1;
517 return var;
518}
519
520/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
521 an existing expression temporary. */
522
523static tree
524lookup_tmp_var (tree val, bool is_formal)
525{
526 tree ret;
527
528 /* If not optimizing, never really reuse a temporary. local-alloc
529 won't allocate any variable that is used in more than one basic
530 block, which means it will go into memory, causing much extra
531 work in reload and final and poorer code generation, outweighing
532 the extra memory allocation here. */
533 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
947ca6a0 534 ret = create_tmp_from_val (val);
2ad728d2
RG
535 else
536 {
537 elt_t elt, *elt_p;
4a8fb1a1 538 elt_t **slot;
2ad728d2
RG
539
540 elt.val = val;
c203e8a7
TS
541 if (!gimplify_ctxp->temp_htab)
542 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
543 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
2ad728d2
RG
544 if (*slot == NULL)
545 {
546 elt_p = XNEW (elt_t);
547 elt_p->val = val;
947ca6a0 548 elt_p->temp = ret = create_tmp_from_val (val);
4a8fb1a1 549 *slot = elt_p;
2ad728d2
RG
550 }
551 else
552 {
4a8fb1a1 553 elt_p = *slot;
2ad728d2
RG
554 ret = elt_p->temp;
555 }
556 }
557
558 return ret;
559}
560
ba4d8f9d 561/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
562
563static tree
726a989a 564internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
381cdae4 565 bool is_formal, bool allow_ssa)
6de9cd9a
DN
566{
567 tree t, mod;
6de9cd9a 568
726a989a
RB
569 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
570 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 571 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 572 fb_rvalue);
6de9cd9a 573
381cdae4
RB
574 if (allow_ssa
575 && gimplify_ctxp->into_ssa
2ad728d2 576 && is_gimple_reg_type (TREE_TYPE (val)))
381cdae4
RB
577 {
578 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
579 if (! gimple_in_ssa_p (cfun))
580 {
581 const char *name = get_name (val);
582 if (name)
583 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
584 }
585 }
2ad728d2
RG
586 else
587 t = lookup_tmp_var (val, is_formal);
e41d82f5 588
2e929cf3 589 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 590
8400e75e 591 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
6de9cd9a 592
fff34d35
RK
593 /* gimplify_modify_expr might want to reduce this further. */
594 gimplify_and_add (mod, pre_p);
726a989a 595 ggc_free (mod);
8b11a64c 596
6de9cd9a
DN
597 return t;
598}
599
ad19c4be 600/* Return a formal temporary variable initialized with VAL. PRE_P is as
ba4d8f9d
RG
601 in gimplify_expr. Only use this function if:
602
603 1) The value of the unfactored expression represented by VAL will not
604 change between the initialization and use of the temporary, and
605 2) The temporary will not be otherwise modified.
606
607 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
608 and #2 means it is inappropriate for && temps.
609
610 For other cases, use get_initialized_tmp_var instead. */
50674e96 611
6de9cd9a 612tree
726a989a 613get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a 614{
381cdae4 615 return internal_get_tmp_var (val, pre_p, NULL, true, true);
6de9cd9a
DN
616}
617
ad19c4be 618/* Return a temporary variable initialized with VAL. PRE_P and POST_P
6de9cd9a
DN
619 are as in gimplify_expr. */
620
621tree
381cdae4
RB
622get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
623 bool allow_ssa)
6de9cd9a 624{
381cdae4 625 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
6de9cd9a
DN
626}
627
ad19c4be
EB
628/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
629 generate debug info for them; otherwise don't. */
6de9cd9a
DN
630
631void
355fe088 632declare_vars (tree vars, gimple *gs, bool debug_info)
6de9cd9a
DN
633{
634 tree last = vars;
635 if (last)
636 {
5123ad09 637 tree temps, block;
6de9cd9a 638
538dd0b7 639 gbind *scope = as_a <gbind *> (gs);
6de9cd9a
DN
640
641 temps = nreverse (last);
5123ad09 642
524d9a45 643 block = gimple_bind_block (scope);
726a989a 644 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
645 if (!block || !debug_info)
646 {
910ad8de 647 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 648 gimple_bind_set_vars (scope, temps);
5123ad09
EB
649 }
650 else
651 {
652 /* We need to attach the nodes both to the BIND_EXPR and to its
653 associated BLOCK for debugging purposes. The key point here
654 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
655 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
656 if (BLOCK_VARS (block))
657 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
658 else
659 {
726a989a
RB
660 gimple_bind_set_vars (scope,
661 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
662 BLOCK_VARS (block) = temps;
663 }
664 }
6de9cd9a
DN
665 }
666}
667
a441447f
OH
668/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
669 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
670 no such upper bound can be obtained. */
671
672static void
673force_constant_size (tree var)
674{
675 /* The only attempt we make is by querying the maximum size of objects
676 of the variable's type. */
677
678 HOST_WIDE_INT max_size;
679
8813a647 680 gcc_assert (VAR_P (var));
a441447f
OH
681
682 max_size = max_int_size_in_bytes (TREE_TYPE (var));
683
684 gcc_assert (max_size >= 0);
685
686 DECL_SIZE_UNIT (var)
687 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
688 DECL_SIZE (var)
689 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
690}
691
ad19c4be
EB
692/* Push the temporary variable TMP into the current binding. */
693
45b62594
RB
694void
695gimple_add_tmp_var_fn (struct function *fn, tree tmp)
696{
697 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
698
699 /* Later processing assumes that the object size is constant, which might
700 not be true at this point. Force the use of a constant upper bound in
701 this case. */
702 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
703 force_constant_size (tmp);
704
705 DECL_CONTEXT (tmp) = fn->decl;
706 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
707
708 record_vars_into (tmp, fn->decl);
709}
710
711/* Push the temporary variable TMP into the current binding. */
712
6de9cd9a
DN
713void
714gimple_add_tmp_var (tree tmp)
715{
910ad8de 716 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 717
a441447f
OH
718 /* Later processing assumes that the object size is constant, which might
719 not be true at this point. Force the use of a constant upper bound in
720 this case. */
cc269bb6 721 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
a441447f
OH
722 force_constant_size (tmp);
723
6de9cd9a 724 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 725 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
726
727 if (gimplify_ctxp)
728 {
910ad8de 729 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 730 gimplify_ctxp->temps = tmp;
953ff289
DN
731
732 /* Mark temporaries local within the nearest enclosing parallel. */
733 if (gimplify_omp_ctxp)
734 {
735 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
736 while (ctx
737 && (ctx->region_type == ORT_WORKSHARE
182190f2
NS
738 || ctx->region_type == ORT_SIMD
739 || ctx->region_type == ORT_ACC))
953ff289
DN
740 ctx = ctx->outer_context;
741 if (ctx)
742 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
743 }
6de9cd9a
DN
744 }
745 else if (cfun)
746 record_vars (tmp);
747 else
726a989a
RB
748 {
749 gimple_seq body_seq;
750
751 /* This case is for nested functions. We need to expose the locals
752 they create. */
753 body_seq = gimple_body (current_function_decl);
754 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
755 }
756}
757
726a989a 758
616f1431
EB
759\f
760/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
761 nodes that are referenced more than once in GENERIC functions. This is
762 necessary because gimplification (translation into GIMPLE) is performed
763 by modifying tree nodes in-place, so gimplication of a shared node in a
764 first context could generate an invalid GIMPLE form in a second context.
765
766 This is achieved with a simple mark/copy/unmark algorithm that walks the
767 GENERIC representation top-down, marks nodes with TREE_VISITED the first
768 time it encounters them, duplicates them if they already have TREE_VISITED
769 set, and finally removes the TREE_VISITED marks it has set.
770
771 The algorithm works only at the function level, i.e. it generates a GENERIC
772 representation of a function with no nodes shared within the function when
773 passed a GENERIC function (except for nodes that are allowed to be shared).
774
775 At the global level, it is also necessary to unshare tree nodes that are
776 referenced in more than one function, for the same aforementioned reason.
777 This requires some cooperation from the front-end. There are 2 strategies:
778
779 1. Manual unsharing. The front-end needs to call unshare_expr on every
780 expression that might end up being shared across functions.
781
782 2. Deep unsharing. This is an extension of regular unsharing. Instead
783 of calling unshare_expr on expressions that might be shared across
784 functions, the front-end pre-marks them with TREE_VISITED. This will
785 ensure that they are unshared on the first reference within functions
786 when the regular unsharing algorithm runs. The counterpart is that
787 this algorithm must look deeper than for manual unsharing, which is
788 specified by LANG_HOOKS_DEEP_UNSHARING.
789
790 If there are only few specific cases of node sharing across functions, it is
791 probably easier for a front-end to unshare the expressions manually. On the
792 contrary, if the expressions generated at the global level are as widespread
793 as expressions generated within functions, deep unsharing is very likely the
794 way to go. */
795
796/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
3ad065ef
EB
797 These nodes model computations that must be done once. If we were to
798 unshare something like SAVE_EXPR(i++), the gimplification process would
799 create wrong code. However, if DATA is non-null, it must hold a pointer
800 set that is used to unshare the subtrees of these nodes. */
6de9cd9a
DN
801
802static tree
803mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
804{
616f1431
EB
805 tree t = *tp;
806 enum tree_code code = TREE_CODE (t);
807
6687b740
EB
808 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
809 copy their subtrees if we can make sure to do it only once. */
810 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431 811 {
6e2830c3 812 if (data && !((hash_set<tree> *)data)->add (t))
616f1431
EB
813 ;
814 else
815 *walk_subtrees = 0;
816 }
817
818 /* Stop at types, decls, constants like copy_tree_r. */
819 else if (TREE_CODE_CLASS (code) == tcc_type
820 || TREE_CODE_CLASS (code) == tcc_declaration
3dd93025 821 || TREE_CODE_CLASS (code) == tcc_constant)
6de9cd9a 822 *walk_subtrees = 0;
616f1431
EB
823
824 /* Cope with the statement expression extension. */
825 else if (code == STATEMENT_LIST)
826 ;
827
828 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 829 else
6687b740 830 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
831
832 return NULL_TREE;
833}
834
3ad065ef
EB
835/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
836 If *TP has been visited already, then *TP is deeply copied by calling
837 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
6de9cd9a
DN
838
839static tree
616f1431 840copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 841{
f0638e1d
RH
842 tree t = *tp;
843 enum tree_code code = TREE_CODE (t);
844
44de5aeb
RK
845 /* Skip types, decls, and constants. But we do want to look at their
846 types and the bounds of types. Mark them as visited so we properly
847 unmark their subtrees on the unmark pass. If we've already seen them,
848 don't look down further. */
6615c446
JO
849 if (TREE_CODE_CLASS (code) == tcc_type
850 || TREE_CODE_CLASS (code) == tcc_declaration
851 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
852 {
853 if (TREE_VISITED (t))
854 *walk_subtrees = 0;
855 else
856 TREE_VISITED (t) = 1;
857 }
f0638e1d 858
6de9cd9a
DN
859 /* If this node has been visited already, unshare it and don't look
860 any deeper. */
f0638e1d 861 else if (TREE_VISITED (t))
6de9cd9a 862 {
616f1431 863 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
864 *walk_subtrees = 0;
865 }
f0638e1d 866
616f1431 867 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 868 else
77c9db77 869 TREE_VISITED (t) = 1;
f0638e1d 870
6de9cd9a
DN
871 return NULL_TREE;
872}
873
3ad065ef
EB
874/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
875 copy_if_shared_r callback unmodified. */
6de9cd9a 876
616f1431 877static inline void
3ad065ef 878copy_if_shared (tree *tp, void *data)
616f1431 879{
3ad065ef 880 walk_tree (tp, copy_if_shared_r, data, NULL);
6de9cd9a
DN
881}
882
3ad065ef
EB
883/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
884 any nested functions. */
44de5aeb
RK
885
886static void
3ad065ef 887unshare_body (tree fndecl)
44de5aeb 888{
d52f5295 889 struct cgraph_node *cgn = cgraph_node::get (fndecl);
3ad065ef
EB
890 /* If the language requires deep unsharing, we need a pointer set to make
891 sure we don't repeatedly unshare subtrees of unshareable nodes. */
6e2830c3
TS
892 hash_set<tree> *visited
893 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
44de5aeb 894
3ad065ef
EB
895 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
896 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
897 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
898
6e2830c3 899 delete visited;
616f1431 900
3ad065ef 901 if (cgn)
48eb4e53 902 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 903 unshare_body (cgn->decl);
44de5aeb
RK
904}
905
616f1431
EB
906/* Callback for walk_tree to unmark the visited trees rooted at *TP.
907 Subtrees are walked until the first unvisited node is encountered. */
908
909static tree
910unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
911{
912 tree t = *tp;
913
914 /* If this node has been visited, unmark it and keep looking. */
915 if (TREE_VISITED (t))
916 TREE_VISITED (t) = 0;
917
918 /* Otherwise, don't look any deeper. */
919 else
920 *walk_subtrees = 0;
921
922 return NULL_TREE;
923}
924
925/* Unmark the visited trees rooted at *TP. */
926
927static inline void
928unmark_visited (tree *tp)
929{
930 walk_tree (tp, unmark_visited_r, NULL, NULL);
931}
932
44de5aeb
RK
933/* Likewise, but mark all trees as not visited. */
934
935static void
3ad065ef 936unvisit_body (tree fndecl)
44de5aeb 937{
d52f5295 938 struct cgraph_node *cgn = cgraph_node::get (fndecl);
44de5aeb 939
3ad065ef
EB
940 unmark_visited (&DECL_SAVED_TREE (fndecl));
941 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
942 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
616f1431 943
3ad065ef 944 if (cgn)
48eb4e53 945 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 946 unvisit_body (cgn->decl);
44de5aeb
RK
947}
948
6de9cd9a
DN
949/* Unconditionally make an unshared copy of EXPR. This is used when using
950 stored expressions which span multiple functions, such as BINFO_VTABLE,
951 as the normal unsharing process can't tell that they're shared. */
952
953tree
954unshare_expr (tree expr)
955{
956 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
957 return expr;
958}
d1f98542
RB
959
960/* Worker for unshare_expr_without_location. */
961
962static tree
963prune_expr_location (tree *tp, int *walk_subtrees, void *)
964{
965 if (EXPR_P (*tp))
966 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
967 else
968 *walk_subtrees = 0;
969 return NULL_TREE;
970}
971
972/* Similar to unshare_expr but also prune all expression locations
973 from EXPR. */
974
975tree
976unshare_expr_without_location (tree expr)
977{
978 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
979 if (EXPR_P (expr))
980 walk_tree (&expr, prune_expr_location, NULL, NULL);
981 return expr;
982}
6de9cd9a
DN
983\f
984/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
985 contain statements and have a value. Assign its value to a temporary
ad19c4be 986 and give it void_type_node. Return the temporary, or NULL_TREE if
6de9cd9a
DN
987 WRAPPER was already void. */
988
989tree
325c3691 990voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 991{
4832214a
JM
992 tree type = TREE_TYPE (wrapper);
993 if (type && !VOID_TYPE_P (type))
6de9cd9a 994 {
c6c7698d 995 tree *p;
6de9cd9a 996
c6c7698d
JM
997 /* Set p to point to the body of the wrapper. Loop until we find
998 something that isn't a wrapper. */
999 for (p = &wrapper; p && *p; )
d3147f64 1000 {
c6c7698d 1001 switch (TREE_CODE (*p))
6de9cd9a 1002 {
c6c7698d
JM
1003 case BIND_EXPR:
1004 TREE_SIDE_EFFECTS (*p) = 1;
1005 TREE_TYPE (*p) = void_type_node;
1006 /* For a BIND_EXPR, the body is operand 1. */
1007 p = &BIND_EXPR_BODY (*p);
1008 break;
1009
1010 case CLEANUP_POINT_EXPR:
1011 case TRY_FINALLY_EXPR:
1012 case TRY_CATCH_EXPR:
6de9cd9a
DN
1013 TREE_SIDE_EFFECTS (*p) = 1;
1014 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
1015 p = &TREE_OPERAND (*p, 0);
1016 break;
1017
1018 case STATEMENT_LIST:
1019 {
1020 tree_stmt_iterator i = tsi_last (*p);
1021 TREE_SIDE_EFFECTS (*p) = 1;
1022 TREE_TYPE (*p) = void_type_node;
1023 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1024 }
1025 break;
1026
1027 case COMPOUND_EXPR:
ad19c4be
EB
1028 /* Advance to the last statement. Set all container types to
1029 void. */
c6c7698d
JM
1030 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1031 {
1032 TREE_SIDE_EFFECTS (*p) = 1;
1033 TREE_TYPE (*p) = void_type_node;
1034 }
1035 break;
1036
0a35513e
AH
1037 case TRANSACTION_EXPR:
1038 TREE_SIDE_EFFECTS (*p) = 1;
1039 TREE_TYPE (*p) = void_type_node;
1040 p = &TRANSACTION_EXPR_BODY (*p);
1041 break;
1042
c6c7698d 1043 default:
5f23640f
TR
1044 /* Assume that any tree upon which voidify_wrapper_expr is
1045 directly called is a wrapper, and that its body is op0. */
1046 if (p == &wrapper)
1047 {
1048 TREE_SIDE_EFFECTS (*p) = 1;
1049 TREE_TYPE (*p) = void_type_node;
1050 p = &TREE_OPERAND (*p, 0);
1051 break;
1052 }
c6c7698d 1053 goto out;
6de9cd9a
DN
1054 }
1055 }
1056
c6c7698d 1057 out:
325c3691 1058 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
1059 temp = NULL_TREE;
1060 else if (temp)
6de9cd9a 1061 {
c6c7698d
JM
1062 /* The wrapper is on the RHS of an assignment that we're pushing
1063 down. */
1064 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1065 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 1066 TREE_OPERAND (temp, 1) = *p;
c6c7698d 1067 *p = temp;
6de9cd9a
DN
1068 }
1069 else
1070 {
c6c7698d
JM
1071 temp = create_tmp_var (type, "retval");
1072 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
1073 }
1074
6de9cd9a
DN
1075 return temp;
1076 }
1077
1078 return NULL_TREE;
1079}
1080
1081/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 1082 a temporary through which they communicate. */
6de9cd9a
DN
1083
1084static void
538dd0b7 1085build_stack_save_restore (gcall **save, gcall **restore)
6de9cd9a 1086{
726a989a 1087 tree tmp_var;
6de9cd9a 1088
e79983f4 1089 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
6de9cd9a 1090 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 1091 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 1092
ad19c4be 1093 *restore
e79983f4 1094 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
ad19c4be 1095 1, tmp_var);
6de9cd9a
DN
1096}
1097
6dc4a604
ML
1098/* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1099
1100static tree
1101build_asan_poison_call_expr (tree decl)
1102{
1103 /* Do not poison variables that have size equal to zero. */
1104 tree unit_size = DECL_SIZE_UNIT (decl);
1105 if (zerop (unit_size))
1106 return NULL_TREE;
1107
1108 tree base = build_fold_addr_expr (decl);
1109
1110 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1111 void_type_node, 3,
1112 build_int_cst (integer_type_node,
56b7aede 1113 ASAN_MARK_POISON),
6dc4a604
ML
1114 base, unit_size);
1115}
1116
1117/* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1118 on POISON flag, shadow memory of a DECL variable. The call will be
1119 put on location identified by IT iterator, where BEFORE flag drives
1120 position where the stmt will be put. */
1121
1122static void
1123asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1124 bool before)
1125{
1126 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1127 if (gimplify_omp_ctxp)
1128 return;
1129
1130 tree unit_size = DECL_SIZE_UNIT (decl);
1131 tree base = build_fold_addr_expr (decl);
1132
1133 /* Do not poison variables that have size equal to zero. */
1134 if (zerop (unit_size))
1135 return;
1136
1137 /* It's necessary to have all stack variables aligned to ASAN granularity
1138 bytes. */
1139 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1140 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1141
56b7aede 1142 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
6dc4a604
ML
1143
1144 gimple *g
1145 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1146 build_int_cst (integer_type_node, flags),
1147 base, unit_size);
1148
1149 if (before)
1150 gsi_insert_before (it, g, GSI_NEW_STMT);
1151 else
1152 gsi_insert_after (it, g, GSI_NEW_STMT);
1153}
1154
1155/* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1156 either poisons or unpoisons a DECL. Created statement is appended
1157 to SEQ_P gimple sequence. */
1158
1159static void
1160asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1161{
1162 gimple_stmt_iterator it = gsi_last (*seq_p);
1163 bool before = false;
1164
1165 if (gsi_end_p (it))
1166 before = true;
1167
1168 asan_poison_variable (decl, poison, &it, before);
1169}
1170
1171/* Sort pair of VAR_DECLs A and B by DECL_UID. */
1172
1173static int
1174sort_by_decl_uid (const void *a, const void *b)
1175{
1176 const tree *t1 = (const tree *)a;
1177 const tree *t2 = (const tree *)b;
1178
1179 int uid1 = DECL_UID (*t1);
1180 int uid2 = DECL_UID (*t2);
1181
1182 if (uid1 < uid2)
1183 return -1;
1184 else if (uid1 > uid2)
1185 return 1;
1186 else
1187 return 0;
1188}
1189
1190/* Generate IFN_ASAN_MARK internal call for all VARIABLES
1191 depending on POISON flag. Created statement is appended
1192 to SEQ_P gimple sequence. */
1193
1194static void
1195asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1196{
1197 unsigned c = variables->elements ();
1198 if (c == 0)
1199 return;
1200
1201 auto_vec<tree> sorted_variables (c);
1202
1203 for (hash_set<tree>::iterator it = variables->begin ();
1204 it != variables->end (); ++it)
1205 sorted_variables.safe_push (*it);
1206
1207 sorted_variables.qsort (sort_by_decl_uid);
1208
f6b9f2ff
ML
1209 unsigned i;
1210 tree var;
1211 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1212 {
1213 asan_poison_variable (var, poison, seq_p);
1214
1215 /* Add use_after_scope_memory attribute for the variable in order
1216 to prevent re-written into SSA. */
1217 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1218 DECL_ATTRIBUTES (var)))
1219 DECL_ATTRIBUTES (var)
1220 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1221 integer_one_node,
1222 DECL_ATTRIBUTES (var));
1223 }
6dc4a604
ML
1224}
1225
6de9cd9a
DN
1226/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1227
1228static enum gimplify_status
726a989a 1229gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1230{
1231 tree bind_expr = *expr_p;
03c00798 1232 bool old_keep_stack = gimplify_ctxp->keep_stack;
6de9cd9a
DN
1233 bool old_save_stack = gimplify_ctxp->save_stack;
1234 tree t;
538dd0b7 1235 gbind *bind_stmt;
47598145 1236 gimple_seq body, cleanup;
538dd0b7 1237 gcall *stack_save;
a5852bea 1238 location_t start_locus = 0, end_locus = 0;
6e232ba4 1239 tree ret_clauses = NULL;
6de9cd9a 1240
c6c7698d 1241 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 1242
6de9cd9a 1243 /* Mark variables seen in this bind expr. */
910ad8de 1244 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 1245 {
8813a647 1246 if (VAR_P (t))
8cb86b65
JJ
1247 {
1248 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1249
1250 /* Mark variable as local. */
d9a6bd32 1251 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
8cb86b65
JJ
1252 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1253 || splay_tree_lookup (ctx->variables,
1254 (splay_tree_key) t) == NULL))
c74559df
JJ
1255 {
1256 if (ctx->region_type == ORT_SIMD
1257 && TREE_ADDRESSABLE (t)
1258 && !TREE_STATIC (t))
1259 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1260 else
1261 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1262 }
8cb86b65
JJ
1263
1264 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
1265
1266 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1267 cfun->has_local_explicit_reg_vars = true;
8cb86b65 1268 }
e41d82f5
RH
1269
1270 /* Preliminarily mark non-addressed complex variables as eligible
1271 for promotion to gimple registers. We'll transform their uses
bd2e63a1
RG
1272 as we find them. */
1273 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1274 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
e41d82f5 1275 && !TREE_THIS_VOLATILE (t)
8813a647 1276 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
e41d82f5 1277 && !needs_to_live_in_memory (t))
0890b981 1278 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 1279 }
6de9cd9a 1280
538dd0b7 1281 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
03c00798 1282 BIND_EXPR_BLOCK (bind_expr));
538dd0b7 1283 gimple_push_bind_expr (bind_stmt);
726a989a 1284
03c00798 1285 gimplify_ctxp->keep_stack = false;
6de9cd9a
DN
1286 gimplify_ctxp->save_stack = false;
1287
726a989a
RB
1288 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1289 body = NULL;
1290 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
538dd0b7 1291 gimple_bind_set_body (bind_stmt, body);
6de9cd9a 1292
a5852bea
OH
1293 /* Source location wise, the cleanup code (stack_restore and clobbers)
1294 belongs to the end of the block, so propagate what we have. The
1295 stack_save operation belongs to the beginning of block, which we can
1296 infer from the bind_expr directly if the block has no explicit
1297 assignment. */
1298 if (BIND_EXPR_BLOCK (bind_expr))
1299 {
1300 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1301 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1302 }
1303 if (start_locus == 0)
1304 start_locus = EXPR_LOCATION (bind_expr);
1305
47598145
MM
1306 cleanup = NULL;
1307 stack_save = NULL;
03c00798
EB
1308
1309 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1310 the stack space allocated to the VLAs. */
1311 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
6de9cd9a 1312 {
538dd0b7 1313 gcall *stack_restore;
6de9cd9a
DN
1314
1315 /* Save stack on entry and restore it on exit. Add a try_finally
98906124 1316 block to achieve this. */
6de9cd9a
DN
1317 build_stack_save_restore (&stack_save, &stack_restore);
1318
a5852bea
OH
1319 gimple_set_location (stack_save, start_locus);
1320 gimple_set_location (stack_restore, end_locus);
1321
726a989a 1322 gimplify_seq_add_stmt (&cleanup, stack_restore);
47598145
MM
1323 }
1324
1325 /* Add clobbers for all variables that go out of scope. */
1326 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1327 {
8813a647 1328 if (VAR_P (t)
47598145 1329 && !is_global_var (t)
f9faea37 1330 && DECL_CONTEXT (t) == current_function_decl)
47598145 1331 {
f9faea37
JJ
1332 if (!DECL_HARD_REGISTER (t)
1333 && !TREE_THIS_VOLATILE (t)
1334 && !DECL_HAS_VALUE_EXPR_P (t)
1335 /* Only care for variables that have to be in memory. Others
1336 will be rewritten into SSA names, hence moved to the
1337 top-level. */
1338 && !is_gimple_reg (t)
1339 && flag_stack_reuse != SR_NONE)
1340 {
1341 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1342 gimple *clobber_stmt;
1343 TREE_THIS_VOLATILE (clobber) = 1;
1344 clobber_stmt = gimple_build_assign (t, clobber);
1345 gimple_set_location (clobber_stmt, end_locus);
1346 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1347 }
6e232ba4
JN
1348
1349 if (flag_openacc && oacc_declare_returns != NULL)
1350 {
1351 tree *c = oacc_declare_returns->get (t);
1352 if (c != NULL)
1353 {
1354 if (ret_clauses)
1355 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1356
1357 ret_clauses = *c;
1358
1359 oacc_declare_returns->remove (t);
1360
1361 if (oacc_declare_returns->elements () == 0)
1362 {
1363 delete oacc_declare_returns;
1364 oacc_declare_returns = NULL;
1365 }
1366 }
1367 }
47598145 1368 }
6dc4a604
ML
1369
1370 if (asan_poisoned_variables != NULL
1371 && asan_poisoned_variables->contains (t))
1372 {
1373 asan_poisoned_variables->remove (t);
1374 asan_poison_variable (t, true, &cleanup);
1375 }
1376
1377 if (gimplify_ctxp->live_switch_vars != NULL
1378 && gimplify_ctxp->live_switch_vars->contains (t))
1379 gimplify_ctxp->live_switch_vars->remove (t);
47598145
MM
1380 }
1381
6e232ba4
JN
1382 if (ret_clauses)
1383 {
1384 gomp_target *stmt;
1385 gimple_stmt_iterator si = gsi_start (cleanup);
1386
1387 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1388 ret_clauses);
1389 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1390 }
1391
47598145
MM
1392 if (cleanup)
1393 {
538dd0b7 1394 gtry *gs;
47598145
MM
1395 gimple_seq new_body;
1396
1397 new_body = NULL;
538dd0b7 1398 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
726a989a 1399 GIMPLE_TRY_FINALLY);
6de9cd9a 1400
47598145
MM
1401 if (stack_save)
1402 gimplify_seq_add_stmt (&new_body, stack_save);
726a989a 1403 gimplify_seq_add_stmt (&new_body, gs);
538dd0b7 1404 gimple_bind_set_body (bind_stmt, new_body);
6de9cd9a
DN
1405 }
1406
03c00798
EB
1407 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1408 if (!gimplify_ctxp->keep_stack)
1409 gimplify_ctxp->keep_stack = old_keep_stack;
6de9cd9a 1410 gimplify_ctxp->save_stack = old_save_stack;
03c00798 1411
6de9cd9a
DN
1412 gimple_pop_bind_expr ();
1413
538dd0b7 1414 gimplify_seq_add_stmt (pre_p, bind_stmt);
726a989a 1415
6de9cd9a
DN
1416 if (temp)
1417 {
1418 *expr_p = temp;
6de9cd9a
DN
1419 return GS_OK;
1420 }
726a989a
RB
1421
1422 *expr_p = NULL_TREE;
1423 return GS_ALL_DONE;
6de9cd9a
DN
1424}
1425
e59a1c22
ML
1426/* Maybe add early return predict statement to PRE_P sequence. */
1427
1428static void
1429maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1430{
1431 /* If we are not in a conditional context, add PREDICT statement. */
1432 if (gimple_conditional_context ())
1433 {
1434 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1435 NOT_TAKEN);
1436 gimplify_seq_add_stmt (pre_p, predict);
1437 }
1438}
1439
6de9cd9a
DN
1440/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1441 GIMPLE value, it is assigned to a new temporary and the statement is
1442 re-written to return the temporary.
1443
726a989a 1444 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1445 STMT should be stored. */
1446
1447static enum gimplify_status
726a989a 1448gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1449{
538dd0b7 1450 greturn *ret;
6de9cd9a 1451 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1452 tree result_decl, result;
6de9cd9a 1453
726a989a
RB
1454 if (ret_expr == error_mark_node)
1455 return GS_ERROR;
1456
939b37da
BI
1457 /* Implicit _Cilk_sync must be inserted right before any return statement
1458 if there is a _Cilk_spawn in the function. If the user has provided a
1459 _Cilk_sync, the optimizer should remove this duplicate one. */
1460 if (fn_contains_cilk_spawn_p (cfun))
1461 {
1462 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1463 gimplify_and_add (impl_sync, pre_p);
1464 }
1465
726a989a
RB
1466 if (!ret_expr
1467 || TREE_CODE (ret_expr) == RESULT_DECL
55e99d52 1468 || ret_expr == error_mark_node)
726a989a 1469 {
e59a1c22 1470 maybe_add_early_return_predict_stmt (pre_p);
538dd0b7 1471 greturn *ret = gimple_build_return (ret_expr);
726a989a
RB
1472 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1473 gimplify_seq_add_stmt (pre_p, ret);
1474 return GS_ALL_DONE;
1475 }
6de9cd9a 1476
6de9cd9a 1477 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1478 result_decl = NULL_TREE;
6de9cd9a
DN
1479 else
1480 {
726a989a
RB
1481 result_decl = TREE_OPERAND (ret_expr, 0);
1482
1483 /* See through a return by reference. */
cc77ae10 1484 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1485 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1486
1487 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1488 || TREE_CODE (ret_expr) == INIT_EXPR)
1489 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1490 }
1491
71877985
RH
1492 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1493 Recall that aggregate_value_p is FALSE for any aggregate type that is
1494 returned in registers. If we're returning values in registers, then
1495 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1496 across another call. In addition, for those aggregates for which
535a42b1 1497 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1498 expansion of structure assignments; there's special code in expand_return
1499 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1500 if (!result_decl)
1501 result = NULL_TREE;
1502 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1503 {
1504 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1505 {
1506 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1507 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1508 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1509 should be effectively allocated by the caller, i.e. all calls to
1510 this function must be subject to the Return Slot Optimization. */
1511 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1512 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1513 }
1514 result = result_decl;
1515 }
71877985
RH
1516 else if (gimplify_ctxp->return_temp)
1517 result = gimplify_ctxp->return_temp;
1518 else
1519 {
b731b390 1520 result = create_tmp_reg (TREE_TYPE (result_decl));
ff98621c
RH
1521
1522 /* ??? With complex control flow (usually involving abnormal edges),
1523 we can wind up warning about an uninitialized value for this. Due
1524 to how this variable is constructed and initialized, this is never
1525 true. Give up and never warn. */
1526 TREE_NO_WARNING (result) = 1;
1527
71877985
RH
1528 gimplify_ctxp->return_temp = result;
1529 }
1530
726a989a 1531 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1532 Then gimplify the whole thing. */
1533 if (result != result_decl)
726a989a 1534 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1535
1536 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1537
e59a1c22 1538 maybe_add_early_return_predict_stmt (pre_p);
726a989a
RB
1539 ret = gimple_build_return (result);
1540 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1541 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1542
6de9cd9a
DN
1543 return GS_ALL_DONE;
1544}
1545
ad19c4be
EB
1546/* Gimplify a variable-length array DECL. */
1547
786025ea 1548static void
726a989a 1549gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1550{
1551 /* This is a variable-sized decl. Simplify its size and mark it
98906124 1552 for deferred expansion. */
786025ea
JJ
1553 tree t, addr, ptr_type;
1554
726a989a
RB
1555 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1556 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea 1557
0138d6b2
JM
1558 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1559 if (DECL_HAS_VALUE_EXPR_P (decl))
1560 return;
1561
786025ea
JJ
1562 /* All occurrences of this decl in final gimplified code will be
1563 replaced by indirection. Setting DECL_VALUE_EXPR does two
1564 things: First, it lets the rest of the gimplifier know what
1565 replacement to use. Second, it lets the debug info know
1566 where to find the value. */
1567 ptr_type = build_pointer_type (TREE_TYPE (decl));
1568 addr = create_tmp_var (ptr_type, get_name (decl));
1569 DECL_IGNORED_P (addr) = 0;
1570 t = build_fold_indirect_ref (addr);
31408f60 1571 TREE_THIS_NOTRAP (t) = 1;
786025ea
JJ
1572 SET_DECL_VALUE_EXPR (decl, t);
1573 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1574
e79983f4 1575 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13e49da9
TV
1576 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1577 size_int (DECL_ALIGN (decl)));
d3c12306 1578 /* The call has been built for a variable-sized object. */
63d2a353 1579 CALL_ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1580 t = fold_convert (ptr_type, t);
726a989a 1581 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1582
726a989a 1583 gimplify_and_add (t, seq_p);
786025ea
JJ
1584}
1585
45b0be94
AM
1586/* A helper function to be called via walk_tree. Mark all labels under *TP
1587 as being forced. To be called for DECL_INITIAL of static variables. */
1588
1589static tree
1590force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1591{
1592 if (TYPE_P (*tp))
1593 *walk_subtrees = 0;
1594 if (TREE_CODE (*tp) == LABEL_DECL)
aa43616c
RH
1595 {
1596 FORCED_LABEL (*tp) = 1;
1597 cfun->has_forced_label_in_static = 1;
1598 }
45b0be94
AM
1599
1600 return NULL_TREE;
1601}
1602
ad19c4be 1603/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
350fae66
RK
1604 and initialization explicit. */
1605
1606static enum gimplify_status
726a989a 1607gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1608{
1609 tree stmt = *stmt_p;
1610 tree decl = DECL_EXPR_DECL (stmt);
1611
1612 *stmt_p = NULL_TREE;
1613
1614 if (TREE_TYPE (decl) == error_mark_node)
1615 return GS_ERROR;
1616
8e0a600b 1617 if ((TREE_CODE (decl) == TYPE_DECL
8813a647 1618 || VAR_P (decl))
8e0a600b 1619 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
da3d46cb
JJ
1620 {
1621 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1622 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1623 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1624 }
350fae66 1625
d400d17e
EB
1626 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1627 in case its size expressions contain problematic nodes like CALL_EXPR. */
1628 if (TREE_CODE (decl) == TYPE_DECL
1629 && DECL_ORIGINAL_TYPE (decl)
1630 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
da3d46cb
JJ
1631 {
1632 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1633 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1634 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1635 }
d400d17e 1636
8813a647 1637 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
350fae66
RK
1638 {
1639 tree init = DECL_INITIAL (decl);
6dc4a604 1640 bool is_vla = false;
350fae66 1641
b38f3813
EB
1642 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1643 || (!TREE_STATIC (decl)
1644 && flag_stack_check == GENERIC_STACK_CHECK
1645 && compare_tree_int (DECL_SIZE_UNIT (decl),
1646 STACK_CHECK_MAX_VAR_SIZE) > 0))
6dc4a604
ML
1647 {
1648 gimplify_vla_decl (decl, seq_p);
1649 is_vla = true;
1650 }
1651
6ff92497 1652 if (asan_poisoned_variables
6dc4a604
ML
1653 && !is_vla
1654 && TREE_ADDRESSABLE (decl)
fcd1b8df
ML
1655 && !TREE_STATIC (decl)
1656 && !DECL_HAS_VALUE_EXPR_P (decl)
1657 && dbg_cnt (asan_use_after_scope))
6dc4a604
ML
1658 {
1659 asan_poisoned_variables->add (decl);
1660 asan_poison_variable (decl, false, seq_p);
fcd1b8df 1661 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
6dc4a604
ML
1662 gimplify_ctxp->live_switch_vars->add (decl);
1663 }
350fae66 1664
22192559
JM
1665 /* Some front ends do not explicitly declare all anonymous
1666 artificial variables. We compensate here by declaring the
1667 variables, though it would be better if the front ends would
1668 explicitly declare them. */
1669 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1670 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1671 gimple_add_tmp_var (decl);
1672
350fae66
RK
1673 if (init && init != error_mark_node)
1674 {
1675 if (!TREE_STATIC (decl))
1676 {
1677 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1678 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1679 gimplify_and_add (init, seq_p);
1680 ggc_free (init);
350fae66
RK
1681 }
1682 else
1683 /* We must still examine initializers for static variables
1684 as they may contain a label address. */
1685 walk_tree (&init, force_labels_r, NULL, NULL);
1686 }
350fae66
RK
1687 }
1688
1689 return GS_ALL_DONE;
1690}
1691
6de9cd9a
DN
1692/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1693 and replacing the LOOP_EXPR with goto, but if the loop contains an
1694 EXIT_EXPR, we need to append a label for it to jump to. */
1695
1696static enum gimplify_status
726a989a 1697gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1698{
1699 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1700 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1701
726a989a 1702 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1703
1704 gimplify_ctxp->exit_label = NULL_TREE;
1705
fff34d35 1706 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1707
726a989a
RB
1708 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1709
6de9cd9a 1710 if (gimplify_ctxp->exit_label)
ad19c4be
EB
1711 gimplify_seq_add_stmt (pre_p,
1712 gimple_build_label (gimplify_ctxp->exit_label));
726a989a
RB
1713
1714 gimplify_ctxp->exit_label = saved_label;
1715
1716 *expr_p = NULL;
1717 return GS_ALL_DONE;
1718}
1719
ad19c4be 1720/* Gimplify a statement list onto a sequence. These may be created either
726a989a
RB
1721 by an enlightened front-end, or by shortcut_cond_expr. */
1722
1723static enum gimplify_status
1724gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1725{
1726 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1727
1728 tree_stmt_iterator i = tsi_start (*expr_p);
1729
1730 while (!tsi_end_p (i))
6de9cd9a 1731 {
726a989a
RB
1732 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1733 tsi_delink (&i);
6de9cd9a 1734 }
6de9cd9a 1735
726a989a
RB
1736 if (temp)
1737 {
1738 *expr_p = temp;
1739 return GS_OK;
1740 }
6de9cd9a
DN
1741
1742 return GS_ALL_DONE;
1743}
0f1f6967 1744
146c55da
MP
1745/* Callback for walk_gimple_seq. */
1746
1747static tree
1748warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1749 struct walk_stmt_info *wi)
1750{
1751 gimple *stmt = gsi_stmt (*gsi_p);
1752
1753 *handled_ops_p = true;
1754 switch (gimple_code (stmt))
1755 {
1756 case GIMPLE_TRY:
1757 /* A compiler-generated cleanup or a user-written try block.
1758 If it's empty, don't dive into it--that would result in
1759 worse location info. */
1760 if (gimple_try_eval (stmt) == NULL)
1761 {
1762 wi->info = stmt;
1763 return integer_zero_node;
1764 }
1765 /* Fall through. */
1766 case GIMPLE_BIND:
1767 case GIMPLE_CATCH:
1768 case GIMPLE_EH_FILTER:
1769 case GIMPLE_TRANSACTION:
1770 /* Walk the sub-statements. */
1771 *handled_ops_p = false;
1772 break;
6dc4a604
ML
1773 case GIMPLE_CALL:
1774 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1775 {
1776 *handled_ops_p = false;
1777 break;
1778 }
1779 /* Fall through. */
146c55da
MP
1780 default:
1781 /* Save the first "real" statement (not a decl/lexical scope/...). */
1782 wi->info = stmt;
1783 return integer_zero_node;
1784 }
1785 return NULL_TREE;
1786}
1787
1788/* Possibly warn about unreachable statements between switch's controlling
1789 expression and the first case. SEQ is the body of a switch expression. */
1790
1791static void
1792maybe_warn_switch_unreachable (gimple_seq seq)
1793{
1794 if (!warn_switch_unreachable
1795 /* This warning doesn't play well with Fortran when optimizations
1796 are on. */
1797 || lang_GNU_Fortran ()
1798 || seq == NULL)
1799 return;
1800
1801 struct walk_stmt_info wi;
1802 memset (&wi, 0, sizeof (wi));
1803 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1804 gimple *stmt = (gimple *) wi.info;
1805
1806 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1807 {
1808 if (gimple_code (stmt) == GIMPLE_GOTO
1809 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1810 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1811 /* Don't warn for compiler-generated gotos. These occur
1812 in Duff's devices, for example. */;
1813 else
1814 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1815 "statement will never be executed");
1816 }
1817}
1818
81fea426
MP
1819
1820/* A label entry that pairs label and a location. */
1821struct label_entry
1822{
1823 tree label;
1824 location_t loc;
1825};
1826
1827/* Find LABEL in vector of label entries VEC. */
1828
1829static struct label_entry *
1830find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1831{
1832 unsigned int i;
1833 struct label_entry *l;
1834
1835 FOR_EACH_VEC_ELT (*vec, i, l)
1836 if (l->label == label)
1837 return l;
1838 return NULL;
1839}
1840
1841/* Return true if LABEL, a LABEL_DECL, represents a case label
1842 in a vector of labels CASES. */
1843
1844static bool
1845case_label_p (const vec<tree> *cases, tree label)
1846{
1847 unsigned int i;
1848 tree l;
1849
1850 FOR_EACH_VEC_ELT (*cases, i, l)
1851 if (CASE_LABEL (l) == label)
1852 return true;
1853 return false;
1854}
1855
1856/* Find the last statement in a scope STMT. */
1857
1858static gimple *
1859last_stmt_in_scope (gimple *stmt)
1860{
1861 if (!stmt)
1862 return NULL;
1863
1864 switch (gimple_code (stmt))
1865 {
1866 case GIMPLE_BIND:
1867 {
1868 gbind *bind = as_a <gbind *> (stmt);
1869 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1870 return last_stmt_in_scope (stmt);
1871 }
1872
1873 case GIMPLE_TRY:
1874 {
1875 gtry *try_stmt = as_a <gtry *> (stmt);
1876 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1877 gimple *last_eval = last_stmt_in_scope (stmt);
1878 if (gimple_stmt_may_fallthru (last_eval)
d4bd4646
MP
1879 && (last_eval == NULL
1880 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
81fea426
MP
1881 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1882 {
1883 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1884 return last_stmt_in_scope (stmt);
1885 }
1886 else
1887 return last_eval;
1888 }
1889
1890 default:
1891 return stmt;
1892 }
1893}
1894
1895/* Collect interesting labels in LABELS and return the statement preceding
1896 another case label, or a user-defined label. */
1897
1898static gimple *
1899collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1900 auto_vec <struct label_entry> *labels)
1901{
1902 gimple *prev = NULL;
1903
1904 do
1905 {
1906 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1907 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1908 {
1909 /* Nested scope. Only look at the last statement of
1910 the innermost scope. */
1911 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1912 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1913 if (last)
1914 {
1915 prev = last;
1916 /* It might be a label without a location. Use the
1917 location of the scope then. */
1918 if (!gimple_has_location (prev))
1919 gimple_set_location (prev, bind_loc);
1920 }
1921 gsi_next (gsi_p);
1922 continue;
1923 }
1924
1925 /* Ifs are tricky. */
1926 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1927 {
1928 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1929 tree false_lab = gimple_cond_false_label (cond_stmt);
1930 location_t if_loc = gimple_location (cond_stmt);
1931
1932 /* If we have e.g.
1933 if (i > 1) goto <D.2259>; else goto D;
1934 we can't do much with the else-branch. */
1935 if (!DECL_ARTIFICIAL (false_lab))
1936 break;
1937
1938 /* Go on until the false label, then one step back. */
1939 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1940 {
1941 gimple *stmt = gsi_stmt (*gsi_p);
1942 if (gimple_code (stmt) == GIMPLE_LABEL
1943 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1944 break;
1945 }
1946
1947 /* Not found? Oops. */
1948 if (gsi_end_p (*gsi_p))
1949 break;
1950
1951 struct label_entry l = { false_lab, if_loc };
1952 labels->safe_push (l);
1953
1954 /* Go to the last statement of the then branch. */
1955 gsi_prev (gsi_p);
1956
1957 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1958 <D.1759>:
1959 <stmt>;
1960 goto <D.1761>;
1961 <D.1760>:
1962 */
1963 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1964 && !gimple_has_location (gsi_stmt (*gsi_p)))
1965 {
1966 /* Look at the statement before, it might be
1967 attribute fallthrough, in which case don't warn. */
1968 gsi_prev (gsi_p);
1969 bool fallthru_before_dest
1970 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1971 gsi_next (gsi_p);
1972 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1973 if (!fallthru_before_dest)
1974 {
1975 struct label_entry l = { goto_dest, if_loc };
1976 labels->safe_push (l);
1977 }
1978 }
1979 /* And move back. */
1980 gsi_next (gsi_p);
1981 }
1982
1983 /* Remember the last statement. Skip labels that are of no interest
1984 to us. */
1985 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1986 {
1987 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1988 if (find_label_entry (labels, label))
1989 prev = gsi_stmt (*gsi_p);
1990 }
6dc4a604
ML
1991 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1992 ;
81fea426
MP
1993 else
1994 prev = gsi_stmt (*gsi_p);
1995 gsi_next (gsi_p);
1996 }
1997 while (!gsi_end_p (*gsi_p)
1998 /* Stop if we find a case or a user-defined label. */
1999 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2000 || !gimple_has_location (gsi_stmt (*gsi_p))));
2001
2002 return prev;
2003}
2004
2005/* Return true if the switch fallthough warning should occur. LABEL is
2006 the label statement that we're falling through to. */
2007
2008static bool
2009should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2010{
2011 gimple_stmt_iterator gsi = *gsi_p;
2012
a9172bf3
MP
2013 /* Don't warn if the label is marked with a "falls through" comment. */
2014 if (FALLTHROUGH_LABEL_P (label))
2015 return false;
2016
d2aadab1 2017 /* Don't warn for non-case labels followed by a statement:
81fea426
MP
2018 case 0:
2019 foo ();
2020 label:
2021 bar ();
2022 as these are likely intentional. */
2023 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2024 {
d2aadab1
MP
2025 tree l;
2026 while (!gsi_end_p (gsi)
2027 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2028 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2029 && !case_label_p (&gimplify_ctxp->case_labels, l))
2030 gsi_next (&gsi);
81fea426
MP
2031 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2032 return false;
2033 }
2034
2035 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2036 immediately breaks. */
2037 gsi = *gsi_p;
2038
2039 /* Skip all immediately following labels. */
2040 while (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
2041 gsi_next (&gsi);
2042
2043 /* { ... something; default:; } */
2044 if (gsi_end_p (gsi)
2045 /* { ... something; default: break; } or
2046 { ... something; default: goto L; } */
2047 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2048 /* { ... something; default: return; } */
2049 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2050 return false;
2051
2052 return true;
2053}
2054
2055/* Callback for walk_gimple_seq. */
2056
2057static tree
2058warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2059 struct walk_stmt_info *)
2060{
2061 gimple *stmt = gsi_stmt (*gsi_p);
2062
2063 *handled_ops_p = true;
2064 switch (gimple_code (stmt))
2065 {
2066 case GIMPLE_TRY:
2067 case GIMPLE_BIND:
2068 case GIMPLE_CATCH:
2069 case GIMPLE_EH_FILTER:
2070 case GIMPLE_TRANSACTION:
2071 /* Walk the sub-statements. */
2072 *handled_ops_p = false;
2073 break;
2074
2075 /* Find a sequence of form:
2076
2077 GIMPLE_LABEL
2078 [...]
2079 <may fallthru stmt>
2080 GIMPLE_LABEL
2081
2082 and possibly warn. */
2083 case GIMPLE_LABEL:
2084 {
2085 /* Found a label. Skip all immediately following labels. */
2086 while (!gsi_end_p (*gsi_p)
2087 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2088 gsi_next (gsi_p);
2089
2090 /* There might be no more statements. */
2091 if (gsi_end_p (*gsi_p))
2092 return integer_zero_node;
2093
2094 /* Vector of labels that fall through. */
2095 auto_vec <struct label_entry> labels;
2096 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2097
2098 /* There might be no more statements. */
2099 if (gsi_end_p (*gsi_p))
2100 return integer_zero_node;
2101
2102 gimple *next = gsi_stmt (*gsi_p);
2103 tree label;
2104 /* If what follows is a label, then we may have a fallthrough. */
2105 if (gimple_code (next) == GIMPLE_LABEL
2106 && gimple_has_location (next)
2107 && (label = gimple_label_label (as_a <glabel *> (next)))
81fea426
MP
2108 && prev != NULL)
2109 {
2110 struct label_entry *l;
2111 bool warned_p = false;
2112 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2113 /* Quiet. */;
2114 else if (gimple_code (prev) == GIMPLE_LABEL
2115 && (label = gimple_label_label (as_a <glabel *> (prev)))
2116 && (l = find_label_entry (&labels, label)))
70f6d5e1 2117 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
81fea426
MP
2118 "this statement may fall through");
2119 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2120 /* Try to be clever and don't warn when the statement
2121 can't actually fall through. */
2122 && gimple_stmt_may_fallthru (prev)
2123 && gimple_has_location (prev))
2124 warned_p = warning_at (gimple_location (prev),
70f6d5e1 2125 OPT_Wimplicit_fallthrough_,
81fea426
MP
2126 "this statement may fall through");
2127 if (warned_p)
2128 inform (gimple_location (next), "here");
2129
2130 /* Mark this label as processed so as to prevent multiple
2131 warnings in nested switches. */
2132 FALLTHROUGH_LABEL_P (label) = true;
2133
2134 /* So that next warn_implicit_fallthrough_r will start looking for
2135 a new sequence starting with this label. */
2136 gsi_prev (gsi_p);
2137 }
2138 }
2139 break;
2140 default:
2141 break;
2142 }
2143 return NULL_TREE;
2144}
2145
2146/* Warn when a switch case falls through. */
2147
2148static void
2149maybe_warn_implicit_fallthrough (gimple_seq seq)
2150{
2151 if (!warn_implicit_fallthrough)
2152 return;
2153
2154 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2155 if (!(lang_GNU_C ()
2156 || lang_GNU_CXX ()
2157 || lang_GNU_OBJC ()))
2158 return;
2159
2160 struct walk_stmt_info wi;
2161 memset (&wi, 0, sizeof (wi));
2162 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2163}
2164
2165/* Callback for walk_gimple_seq. */
2166
2167static tree
2168expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2169 struct walk_stmt_info *)
2170{
2171 gimple *stmt = gsi_stmt (*gsi_p);
2172
2173 *handled_ops_p = true;
2174 switch (gimple_code (stmt))
2175 {
2176 case GIMPLE_TRY:
2177 case GIMPLE_BIND:
2178 case GIMPLE_CATCH:
2179 case GIMPLE_EH_FILTER:
2180 case GIMPLE_TRANSACTION:
2181 /* Walk the sub-statements. */
2182 *handled_ops_p = false;
2183 break;
2184 case GIMPLE_CALL:
2185 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2186 {
2187 gsi_remove (gsi_p, true);
2188 if (gsi_end_p (*gsi_p))
2189 return integer_zero_node;
2190
2191 bool found = false;
2192 location_t loc = gimple_location (stmt);
2193
2194 gimple_stmt_iterator gsi2 = *gsi_p;
2195 stmt = gsi_stmt (gsi2);
2196 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2197 {
2198 /* Go on until the artificial label. */
2199 tree goto_dest = gimple_goto_dest (stmt);
2200 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2201 {
2202 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2203 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2204 == goto_dest)
2205 break;
2206 }
2207
2208 /* Not found? Stop. */
2209 if (gsi_end_p (gsi2))
2210 break;
2211
2212 /* Look one past it. */
2213 gsi_next (&gsi2);
2214 }
2215
2216 /* We're looking for a case label or default label here. */
2217 while (!gsi_end_p (gsi2))
2218 {
2219 stmt = gsi_stmt (gsi2);
2220 if (gimple_code (stmt) == GIMPLE_LABEL)
2221 {
2222 tree label = gimple_label_label (as_a <glabel *> (stmt));
2223 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2224 {
2225 found = true;
2226 break;
2227 }
2228 }
2229 else
2230 /* Something other than a label. That's not expected. */
2231 break;
2232 gsi_next (&gsi2);
2233 }
2234 if (!found)
2235 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2236 "a case label or default label");
2237 }
2238 break;
2239 default:
2240 break;
2241 }
2242 return NULL_TREE;
2243}
2244
2245/* Expand all FALLTHROUGH () calls in SEQ. */
2246
2247static void
2248expand_FALLTHROUGH (gimple_seq *seq_p)
2249{
2250 struct walk_stmt_info wi;
2251 memset (&wi, 0, sizeof (wi));
2252 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2253}
2254
68e72840
SB
2255\f
2256/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
6de9cd9a
DN
2257 branch to. */
2258
2259static enum gimplify_status
726a989a 2260gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
2261{
2262 tree switch_expr = *expr_p;
726a989a 2263 gimple_seq switch_body_seq = NULL;
6de9cd9a 2264 enum gimplify_status ret;
0cd2402d
SB
2265 tree index_type = TREE_TYPE (switch_expr);
2266 if (index_type == NULL_TREE)
2267 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
6de9cd9a 2268
726a989a
RB
2269 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2270 fb_rvalue);
2271 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2272 return ret;
6de9cd9a
DN
2273
2274 if (SWITCH_BODY (switch_expr))
2275 {
9771b263
DN
2276 vec<tree> labels;
2277 vec<tree> saved_labels;
86bc8506 2278 hash_set<tree> *saved_live_switch_vars = NULL;
726a989a 2279 tree default_case = NULL_TREE;
538dd0b7 2280 gswitch *switch_stmt;
b8698a0f 2281
6de9cd9a
DN
2282 /* If someone can be bothered to fill in the labels, they can
2283 be bothered to null out the body too. */
282899df 2284 gcc_assert (!SWITCH_LABELS (switch_expr));
6de9cd9a 2285
0cd2402d 2286 /* Save old labels, get new ones from body, then restore the old
726a989a 2287 labels. Save all the things from the switch body to append after. */
6de9cd9a 2288 saved_labels = gimplify_ctxp->case_labels;
9771b263 2289 gimplify_ctxp->case_labels.create (8);
86bc8506
ML
2290
2291 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
080140bc 2292 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
a6e5212a
ML
2293 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2294 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
080140bc
ML
2295 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2296 else
2297 gimplify_ctxp->live_switch_vars = NULL;
86bc8506 2298
81fea426
MP
2299 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2300 gimplify_ctxp->in_switch_expr = true;
6de9cd9a 2301
726a989a 2302 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
a7dc5980 2303
81fea426 2304 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
146c55da 2305 maybe_warn_switch_unreachable (switch_body_seq);
81fea426
MP
2306 maybe_warn_implicit_fallthrough (switch_body_seq);
2307 /* Only do this for the outermost GIMPLE_SWITCH. */
2308 if (!gimplify_ctxp->in_switch_expr)
2309 expand_FALLTHROUGH (&switch_body_seq);
146c55da 2310
6de9cd9a
DN
2311 labels = gimplify_ctxp->case_labels;
2312 gimplify_ctxp->case_labels = saved_labels;
86bc8506
ML
2313
2314 if (gimplify_ctxp->live_switch_vars)
2315 {
2316 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2317 delete gimplify_ctxp->live_switch_vars;
2318 }
6dc4a604 2319 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
b8698a0f 2320
68e72840
SB
2321 preprocess_case_label_vec_for_gimple (labels, index_type,
2322 &default_case);
32f579f6 2323
726a989a 2324 if (!default_case)
6de9cd9a 2325 {
538dd0b7 2326 glabel *new_default;
6de9cd9a 2327
68e72840
SB
2328 default_case
2329 = build_case_label (NULL_TREE, NULL_TREE,
2330 create_artificial_label (UNKNOWN_LOCATION));
2331 new_default = gimple_build_label (CASE_LABEL (default_case));
2332 gimplify_seq_add_stmt (&switch_body_seq, new_default);
32f579f6 2333 }
f667741c 2334
538dd0b7 2335 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
fd8d363e 2336 default_case, labels);
538dd0b7 2337 gimplify_seq_add_stmt (pre_p, switch_stmt);
726a989a 2338 gimplify_seq_add_seq (pre_p, switch_body_seq);
9771b263 2339 labels.release ();
6de9cd9a 2340 }
282899df
NS
2341 else
2342 gcc_assert (SWITCH_LABELS (switch_expr));
6de9cd9a 2343
726a989a 2344 return GS_ALL_DONE;
6de9cd9a
DN
2345}
2346
81fea426
MP
2347/* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2348
2349static enum gimplify_status
2350gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2351{
2352 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2353 == current_function_decl);
2354
93c18375
ML
2355 tree label = LABEL_EXPR_LABEL (*expr_p);
2356 glabel *label_stmt = gimple_build_label (label);
81fea426
MP
2357 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2358 gimplify_seq_add_stmt (pre_p, label_stmt);
2359
93c18375
ML
2360 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2361 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2362 NOT_TAKEN));
2363 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2364 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2365 TAKEN));
2366
81fea426
MP
2367 return GS_ALL_DONE;
2368}
2369
ad19c4be 2370/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
726a989a 2371
6de9cd9a 2372static enum gimplify_status
726a989a 2373gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 2374{
953ff289 2375 struct gimplify_ctx *ctxp;
538dd0b7 2376 glabel *label_stmt;
953ff289 2377
41dbbb37 2378 /* Invalid programs can play Duff's Device type games with, for example,
953ff289 2379 #pragma omp parallel. At least in the C front end, we don't
41dbbb37
TS
2380 detect such invalid branches until after gimplification, in the
2381 diagnose_omp_blocks pass. */
953ff289 2382 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
9771b263 2383 if (ctxp->case_labels.exists ())
953ff289 2384 break;
282899df 2385
538dd0b7 2386 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
81fea426 2387 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
9771b263 2388 ctxp->case_labels.safe_push (*expr_p);
538dd0b7 2389 gimplify_seq_add_stmt (pre_p, label_stmt);
726a989a 2390
6de9cd9a
DN
2391 return GS_ALL_DONE;
2392}
2393
6de9cd9a
DN
2394/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2395 if necessary. */
2396
2397tree
2398build_and_jump (tree *label_p)
2399{
2400 if (label_p == NULL)
2401 /* If there's nowhere to jump, just fall through. */
65355d53 2402 return NULL_TREE;
6de9cd9a
DN
2403
2404 if (*label_p == NULL_TREE)
2405 {
c2255bc4 2406 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
2407 *label_p = label;
2408 }
2409
2410 return build1 (GOTO_EXPR, void_type_node, *label_p);
2411}
2412
2413/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2414 This also involves building a label to jump to and communicating it to
2415 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2416
2417static enum gimplify_status
2418gimplify_exit_expr (tree *expr_p)
2419{
2420 tree cond = TREE_OPERAND (*expr_p, 0);
2421 tree expr;
2422
2423 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 2424 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
2425 *expr_p = expr;
2426
2427 return GS_OK;
2428}
2429
26d44ae2
RH
2430/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2431 different from its canonical type, wrap the whole thing inside a
2432 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2433 type.
6de9cd9a 2434
26d44ae2
RH
2435 The canonical type of a COMPONENT_REF is the type of the field being
2436 referenced--unless the field is a bit-field which can be read directly
2437 in a smaller mode, in which case the canonical type is the
2438 sign-appropriate type corresponding to that mode. */
6de9cd9a 2439
26d44ae2
RH
2440static void
2441canonicalize_component_ref (tree *expr_p)
6de9cd9a 2442{
26d44ae2
RH
2443 tree expr = *expr_p;
2444 tree type;
6de9cd9a 2445
282899df 2446 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 2447
26d44ae2
RH
2448 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2449 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2450 else
2451 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 2452
b26c6d55
RG
2453 /* One could argue that all the stuff below is not necessary for
2454 the non-bitfield case and declare it a FE error if type
2455 adjustment would be needed. */
26d44ae2 2456 if (TREE_TYPE (expr) != type)
6de9cd9a 2457 {
b26c6d55 2458#ifdef ENABLE_TYPES_CHECKING
26d44ae2 2459 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
2460#endif
2461 int type_quals;
2462
2463 /* We need to preserve qualifiers and propagate them from
2464 operand 0. */
2465 type_quals = TYPE_QUALS (type)
2466 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2467 if (TYPE_QUALS (type) != type_quals)
2468 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 2469
26d44ae2
RH
2470 /* Set the type of the COMPONENT_REF to the underlying type. */
2471 TREE_TYPE (expr) = type;
6de9cd9a 2472
b26c6d55
RG
2473#ifdef ENABLE_TYPES_CHECKING
2474 /* It is now a FE error, if the conversion from the canonical
2475 type to the original expression type is not useless. */
2476 gcc_assert (useless_type_conversion_p (old_type, type));
2477#endif
26d44ae2
RH
2478 }
2479}
6de9cd9a 2480
26d44ae2 2481/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 2482 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
2483 T array[U];
2484 (T *)&array
2485 ==>
2486 &array[L]
2487 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
2488 lower bound.
2489 The constraint is that the type of &array[L] is trivially convertible
2490 to T *. */
6de9cd9a 2491
26d44ae2
RH
2492static void
2493canonicalize_addr_expr (tree *expr_p)
2494{
2495 tree expr = *expr_p;
26d44ae2 2496 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 2497 tree datype, ddatype, pddatype;
6de9cd9a 2498
04d86531
RG
2499 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2500 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2501 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 2502 return;
6de9cd9a 2503
26d44ae2 2504 /* The addr_expr type should be a pointer to an array. */
04d86531 2505 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
2506 if (TREE_CODE (datype) != ARRAY_TYPE)
2507 return;
6de9cd9a 2508
04d86531
RG
2509 /* The pointer to element type shall be trivially convertible to
2510 the expression pointer type. */
26d44ae2 2511 ddatype = TREE_TYPE (datype);
04d86531 2512 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
2513 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2514 pddatype))
26d44ae2 2515 return;
6de9cd9a 2516
26d44ae2 2517 /* The lower bound and element sizes must be constant. */
04d86531
RG
2518 if (!TYPE_SIZE_UNIT (ddatype)
2519 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
2520 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2521 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2522 return;
6de9cd9a 2523
26d44ae2 2524 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 2525 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 2526 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 2527 NULL_TREE, NULL_TREE);
04d86531 2528 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
2529
2530 /* We can have stripped a required restrict qualifier above. */
2531 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2532 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 2533}
6de9cd9a 2534
26d44ae2
RH
2535/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2536 underneath as appropriate. */
6de9cd9a 2537
26d44ae2
RH
2538static enum gimplify_status
2539gimplify_conversion (tree *expr_p)
d3147f64 2540{
db3927fb 2541 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 2542 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 2543
0710ccff
NS
2544 /* Then strip away all but the outermost conversion. */
2545 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2546
2547 /* And remove the outermost conversion if it's useless. */
2548 if (tree_ssa_useless_type_conversion (*expr_p))
2549 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 2550
26d44ae2
RH
2551 /* If we still have a conversion at the toplevel,
2552 then canonicalize some constructs. */
1043771b 2553 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
2554 {
2555 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 2556
26d44ae2
RH
2557 /* If a NOP conversion is changing the type of a COMPONENT_REF
2558 expression, then canonicalize its type now in order to expose more
2559 redundant conversions. */
2560 if (TREE_CODE (sub) == COMPONENT_REF)
2561 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 2562
26d44ae2
RH
2563 /* If a NOP conversion is changing a pointer to array of foo
2564 to a pointer to foo, embed that change in the ADDR_EXPR. */
2565 else if (TREE_CODE (sub) == ADDR_EXPR)
2566 canonicalize_addr_expr (expr_p);
2567 }
6de9cd9a 2568
8b17cc05
RG
2569 /* If we have a conversion to a non-register type force the
2570 use of a VIEW_CONVERT_EXPR instead. */
4f934809 2571 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 2572 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 2573 TREE_OPERAND (*expr_p, 0));
8b17cc05 2574
741233cf
RB
2575 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2576 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2577 TREE_SET_CODE (*expr_p, NOP_EXPR);
2578
6de9cd9a
DN
2579 return GS_OK;
2580}
2581
77f2a970 2582/* Nonlocal VLAs seen in the current function. */
6e2830c3 2583static hash_set<tree> *nonlocal_vlas;
77f2a970 2584
96ddb7ec
JJ
2585/* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2586static tree nonlocal_vla_vars;
2587
ad19c4be 2588/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
a9f7c570
RH
2589 DECL_VALUE_EXPR, and it's worth re-examining things. */
2590
2591static enum gimplify_status
2592gimplify_var_or_parm_decl (tree *expr_p)
2593{
2594 tree decl = *expr_p;
2595
2596 /* ??? If this is a local variable, and it has not been seen in any
2597 outer BIND_EXPR, then it's probably the result of a duplicate
2598 declaration, for which we've already issued an error. It would
2599 be really nice if the front end wouldn't leak these at all.
2600 Currently the only known culprit is C++ destructors, as seen
2601 in g++.old-deja/g++.jason/binding.C. */
8813a647 2602 if (VAR_P (decl)
a9f7c570
RH
2603 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2604 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2605 && decl_function_context (decl) == current_function_decl)
2606 {
1da2ed5f 2607 gcc_assert (seen_error ());
a9f7c570
RH
2608 return GS_ERROR;
2609 }
2610
41dbbb37 2611 /* When within an OMP context, notice uses of variables. */
953ff289
DN
2612 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2613 return GS_ALL_DONE;
2614
a9f7c570
RH
2615 /* If the decl is an alias for another expression, substitute it now. */
2616 if (DECL_HAS_VALUE_EXPR_P (decl))
2617 {
77f2a970
JJ
2618 tree value_expr = DECL_VALUE_EXPR (decl);
2619
2620 /* For referenced nonlocal VLAs add a decl for debugging purposes
2621 to the current function. */
8813a647 2622 if (VAR_P (decl)
77f2a970
JJ
2623 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2624 && nonlocal_vlas != NULL
2625 && TREE_CODE (value_expr) == INDIRECT_REF
2626 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2627 && decl_function_context (decl) != current_function_decl)
2628 {
2629 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
2630 while (ctx
2631 && (ctx->region_type == ORT_WORKSHARE
182190f2
NS
2632 || ctx->region_type == ORT_SIMD
2633 || ctx->region_type == ORT_ACC))
77f2a970 2634 ctx = ctx->outer_context;
6e2830c3 2635 if (!ctx && !nonlocal_vlas->add (decl))
77f2a970 2636 {
96ddb7ec 2637 tree copy = copy_node (decl);
77f2a970
JJ
2638
2639 lang_hooks.dup_lang_specific_decl (copy);
2eb79bbb 2640 SET_DECL_RTL (copy, 0);
77f2a970 2641 TREE_USED (copy) = 1;
96ddb7ec
JJ
2642 DECL_CHAIN (copy) = nonlocal_vla_vars;
2643 nonlocal_vla_vars = copy;
77f2a970
JJ
2644 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2645 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2646 }
2647 }
2648
2649 *expr_p = unshare_expr (value_expr);
a9f7c570
RH
2650 return GS_OK;
2651 }
2652
2653 return GS_ALL_DONE;
2654}
2655
66c14933
EB
2656/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2657
2658static void
2fb9a547
AM
2659recalculate_side_effects (tree t)
2660{
2661 enum tree_code code = TREE_CODE (t);
2662 int len = TREE_OPERAND_LENGTH (t);
2663 int i;
2664
2665 switch (TREE_CODE_CLASS (code))
2666 {
2667 case tcc_expression:
2668 switch (code)
2669 {
2670 case INIT_EXPR:
2671 case MODIFY_EXPR:
2672 case VA_ARG_EXPR:
2673 case PREDECREMENT_EXPR:
2674 case PREINCREMENT_EXPR:
2675 case POSTDECREMENT_EXPR:
2676 case POSTINCREMENT_EXPR:
2677 /* All of these have side-effects, no matter what their
2678 operands are. */
2679 return;
2680
2681 default:
2682 break;
2683 }
2684 /* Fall through. */
2685
2686 case tcc_comparison: /* a comparison expression */
2687 case tcc_unary: /* a unary arithmetic expression */
2688 case tcc_binary: /* a binary arithmetic expression */
2689 case tcc_reference: /* a reference */
2690 case tcc_vl_exp: /* a function call */
2691 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2692 for (i = 0; i < len; ++i)
2693 {
2694 tree op = TREE_OPERAND (t, i);
2695 if (op && TREE_SIDE_EFFECTS (op))
2696 TREE_SIDE_EFFECTS (t) = 1;
2697 }
2698 break;
2699
2700 case tcc_constant:
2701 /* No side-effects. */
2702 return;
2703
2704 default:
2705 gcc_unreachable ();
2706 }
2707}
2708
6de9cd9a 2709/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 2710 node *EXPR_P.
6de9cd9a
DN
2711
2712 compound_lval
2713 : min_lval '[' val ']'
2714 | min_lval '.' ID
2715 | compound_lval '[' val ']'
2716 | compound_lval '.' ID
2717
2718 This is not part of the original SIMPLE definition, which separates
2719 array and member references, but it seems reasonable to handle them
2720 together. Also, this way we don't run into problems with union
2721 aliasing; gcc requires that for accesses through a union to alias, the
2722 union reference must be explicit, which was not always the case when we
2723 were splitting up array and member refs.
2724
726a989a 2725 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
2726 *EXPR_P should be stored.
2727
726a989a 2728 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
2729 *EXPR_P should be stored. */
2730
2731static enum gimplify_status
726a989a
RB
2732gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2733 fallback_t fallback)
6de9cd9a
DN
2734{
2735 tree *p;
941f78d1 2736 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 2737 int i;
db3927fb 2738 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 2739 tree expr = *expr_p;
6de9cd9a 2740
6de9cd9a 2741 /* Create a stack of the subexpressions so later we can walk them in
ec234842 2742 order from inner to outer. */
00f96dc9 2743 auto_vec<tree, 10> expr_stack;
6de9cd9a 2744
afe84921 2745 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
2746 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2747 {
a9f7c570 2748 restart:
6a720599
JM
2749 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2750 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 2751 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
2752
2753 if (handled_component_p (*p))
2754 ;
2755 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2756 additional COMPONENT_REFs. */
8813a647 2757 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
a9f7c570
RH
2758 && gimplify_var_or_parm_decl (p) == GS_OK)
2759 goto restart;
2760 else
6a720599 2761 break;
b8698a0f 2762
9771b263 2763 expr_stack.safe_push (*p);
6a720599 2764 }
6de9cd9a 2765
9771b263 2766 gcc_assert (expr_stack.length ());
9e51aaf5 2767
0823efed
DN
2768 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2769 walked through and P points to the innermost expression.
6de9cd9a 2770
af72267c
RK
2771 Java requires that we elaborated nodes in source order. That
2772 means we must gimplify the inner expression followed by each of
2773 the indices, in order. But we can't gimplify the inner
2774 expression until we deal with any variable bounds, sizes, or
2775 positions in order to deal with PLACEHOLDER_EXPRs.
2776
2777 So we do this in three steps. First we deal with the annotations
2778 for any variables in the components, then we gimplify the base,
2779 then we gimplify any indices, from left to right. */
9771b263 2780 for (i = expr_stack.length () - 1; i >= 0; i--)
6de9cd9a 2781 {
9771b263 2782 tree t = expr_stack[i];
44de5aeb
RK
2783
2784 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 2785 {
44de5aeb
RK
2786 /* Gimplify the low bound and element type size and put them into
2787 the ARRAY_REF. If these values are set, they have already been
2788 gimplified. */
726a989a 2789 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 2790 {
a7cc468a
RH
2791 tree low = unshare_expr (array_ref_low_bound (t));
2792 if (!is_gimple_min_invariant (low))
44de5aeb 2793 {
726a989a
RB
2794 TREE_OPERAND (t, 2) = low;
2795 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2796 post_p, is_gimple_reg,
726a989a 2797 fb_rvalue);
44de5aeb
RK
2798 ret = MIN (ret, tret);
2799 }
2800 }
19c44640
JJ
2801 else
2802 {
2803 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2804 is_gimple_reg, fb_rvalue);
2805 ret = MIN (ret, tret);
2806 }
44de5aeb 2807
19c44640 2808 if (TREE_OPERAND (t, 3) == NULL_TREE)
44de5aeb
RK
2809 {
2810 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2811 tree elmt_size = unshare_expr (array_ref_element_size (t));
a4e9ffe5 2812 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
44de5aeb
RK
2813
2814 /* Divide the element size by the alignment of the element
2815 type (above). */
ad19c4be
EB
2816 elmt_size
2817 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
44de5aeb 2818
a7cc468a 2819 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 2820 {
726a989a
RB
2821 TREE_OPERAND (t, 3) = elmt_size;
2822 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 2823 post_p, is_gimple_reg,
726a989a 2824 fb_rvalue);
44de5aeb
RK
2825 ret = MIN (ret, tret);
2826 }
6de9cd9a 2827 }
19c44640
JJ
2828 else
2829 {
2830 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2831 is_gimple_reg, fb_rvalue);
2832 ret = MIN (ret, tret);
2833 }
6de9cd9a 2834 }
44de5aeb
RK
2835 else if (TREE_CODE (t) == COMPONENT_REF)
2836 {
2837 /* Set the field offset into T and gimplify it. */
19c44640 2838 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb
RK
2839 {
2840 tree offset = unshare_expr (component_ref_field_offset (t));
2841 tree field = TREE_OPERAND (t, 1);
2842 tree factor
2843 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2844
2845 /* Divide the offset by its alignment. */
db3927fb 2846 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
44de5aeb 2847
a7cc468a 2848 if (!is_gimple_min_invariant (offset))
44de5aeb 2849 {
726a989a
RB
2850 TREE_OPERAND (t, 2) = offset;
2851 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2852 post_p, is_gimple_reg,
726a989a 2853 fb_rvalue);
44de5aeb
RK
2854 ret = MIN (ret, tret);
2855 }
2856 }
19c44640
JJ
2857 else
2858 {
2859 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2860 is_gimple_reg, fb_rvalue);
2861 ret = MIN (ret, tret);
2862 }
44de5aeb 2863 }
af72267c
RK
2864 }
2865
a9f7c570
RH
2866 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2867 so as to match the min_lval predicate. Failure to do so may result
2868 in the creation of large aggregate temporaries. */
2869 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2870 fallback | fb_lvalue);
af72267c
RK
2871 ret = MIN (ret, tret);
2872
ea814c66 2873 /* And finally, the indices and operands of ARRAY_REF. During this
48eb4e53 2874 loop we also remove any useless conversions. */
9771b263 2875 for (; expr_stack.length () > 0; )
af72267c 2876 {
9771b263 2877 tree t = expr_stack.pop ();
af72267c
RK
2878
2879 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2880 {
ba4d8f9d 2881 /* Gimplify the dimension. */
af72267c
RK
2882 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2883 {
2884 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 2885 is_gimple_val, fb_rvalue);
af72267c
RK
2886 ret = MIN (ret, tret);
2887 }
2888 }
48eb4e53
RK
2889
2890 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2891
726a989a
RB
2892 /* The innermost expression P may have originally had
2893 TREE_SIDE_EFFECTS set which would have caused all the outer
2894 expressions in *EXPR_P leading to P to also have had
2895 TREE_SIDE_EFFECTS set. */
6de9cd9a 2896 recalculate_side_effects (t);
6de9cd9a
DN
2897 }
2898
2899 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 2900 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
2901 {
2902 canonicalize_component_ref (expr_p);
6de9cd9a
DN
2903 }
2904
9771b263 2905 expr_stack.release ();
07724022 2906
941f78d1
JM
2907 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2908
6de9cd9a
DN
2909 return ret;
2910}
2911
206048bd
VR
2912/* Gimplify the self modifying expression pointed to by EXPR_P
2913 (++, --, +=, -=).
6de9cd9a
DN
2914
2915 PRE_P points to the list where side effects that must happen before
2916 *EXPR_P should be stored.
2917
2918 POST_P points to the list where side effects that must happen after
2919 *EXPR_P should be stored.
2920
2921 WANT_VALUE is nonzero iff we want to use the value of this expression
cc3c4f62 2922 in another expression.
6de9cd9a 2923
cc3c4f62
RB
2924 ARITH_TYPE is the type the computation should be performed in. */
2925
2926enum gimplify_status
726a989a 2927gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
cc3c4f62 2928 bool want_value, tree arith_type)
6de9cd9a
DN
2929{
2930 enum tree_code code;
726a989a
RB
2931 tree lhs, lvalue, rhs, t1;
2932 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
2933 bool postfix;
2934 enum tree_code arith_code;
2935 enum gimplify_status ret;
db3927fb 2936 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
2937
2938 code = TREE_CODE (*expr_p);
2939
282899df
NS
2940 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2941 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
2942
2943 /* Prefix or postfix? */
2944 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2945 /* Faster to treat as prefix if result is not used. */
2946 postfix = want_value;
2947 else
2948 postfix = false;
2949
82181741
JJ
2950 /* For postfix, make sure the inner expression's post side effects
2951 are executed after side effects from this expression. */
2952 if (postfix)
2953 post_p = &post;
2954
6de9cd9a
DN
2955 /* Add or subtract? */
2956 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2957 arith_code = PLUS_EXPR;
2958 else
2959 arith_code = MINUS_EXPR;
2960
2961 /* Gimplify the LHS into a GIMPLE lvalue. */
2962 lvalue = TREE_OPERAND (*expr_p, 0);
2963 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2964 if (ret == GS_ERROR)
2965 return ret;
2966
2967 /* Extract the operands to the arithmetic operation. */
2968 lhs = lvalue;
2969 rhs = TREE_OPERAND (*expr_p, 1);
2970
2971 /* For postfix operator, we evaluate the LHS to an rvalue and then use
d97c9b22 2972 that as the result value and in the postqueue operation. */
6de9cd9a
DN
2973 if (postfix)
2974 {
2975 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2976 if (ret == GS_ERROR)
2977 return ret;
6de9cd9a 2978
d97c9b22
JJ
2979 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2980 }
cc3c4f62 2981
5be014d5
AP
2982 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2983 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2984 {
0d82a1c8 2985 rhs = convert_to_ptrofftype_loc (loc, rhs);
5be014d5 2986 if (arith_code == MINUS_EXPR)
db3927fb 2987 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
cc3c4f62 2988 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
5be014d5 2989 }
cc3c4f62
RB
2990 else
2991 t1 = fold_convert (TREE_TYPE (*expr_p),
2992 fold_build2 (arith_code, arith_type,
2993 fold_convert (arith_type, lhs),
2994 fold_convert (arith_type, rhs)));
5be014d5 2995
6de9cd9a
DN
2996 if (postfix)
2997 {
cf1867a0 2998 gimplify_assign (lvalue, t1, pre_p);
726a989a 2999 gimplify_seq_add_seq (orig_post_p, post);
cc3c4f62 3000 *expr_p = lhs;
6de9cd9a
DN
3001 return GS_ALL_DONE;
3002 }
3003 else
3004 {
726a989a 3005 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
3006 return GS_OK;
3007 }
3008}
3009
d25cee4d
RH
3010/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3011
3012static void
3013maybe_with_size_expr (tree *expr_p)
3014{
61025d1b
RK
3015 tree expr = *expr_p;
3016 tree type = TREE_TYPE (expr);
3017 tree size;
d25cee4d 3018
61025d1b
RK
3019 /* If we've already wrapped this or the type is error_mark_node, we can't do
3020 anything. */
3021 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3022 || type == error_mark_node)
d25cee4d
RH
3023 return;
3024
61025d1b 3025 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 3026 size = TYPE_SIZE_UNIT (type);
61025d1b
RK
3027 if (!size || TREE_CODE (size) == INTEGER_CST)
3028 return;
3029
3030 /* Otherwise, make a WITH_SIZE_EXPR. */
3031 size = unshare_expr (size);
3032 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3033 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
3034}
3035
726a989a 3036/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f 3037 Store any side-effects in PRE_P. CALL_LOCATION is the location of
381cdae4
RB
3038 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3039 gimplified to an SSA name. */
e4f78bd4 3040
fe6ebcf1 3041enum gimplify_status
381cdae4
RB
3042gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3043 bool allow_ssa)
e4f78bd4
JM
3044{
3045 bool (*test) (tree);
3046 fallback_t fb;
3047
3048 /* In general, we allow lvalues for function arguments to avoid
3049 extra overhead of copying large aggregates out of even larger
3050 aggregates into temporaries only to copy the temporaries to
3051 the argument list. Make optimizers happy by pulling out to
3052 temporaries those types that fit in registers. */
726a989a 3053 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
3054 test = is_gimple_val, fb = fb_rvalue;
3055 else
b4ef8aac
JM
3056 {
3057 test = is_gimple_lvalue, fb = fb_either;
3058 /* Also strip a TARGET_EXPR that would force an extra copy. */
3059 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3060 {
3061 tree init = TARGET_EXPR_INITIAL (*arg_p);
3062 if (init
3063 && !VOID_TYPE_P (TREE_TYPE (init)))
3064 *arg_p = init;
3065 }
3066 }
e4f78bd4 3067
d25cee4d 3068 /* If this is a variable sized type, we must remember the size. */
726a989a 3069 maybe_with_size_expr (arg_p);
d25cee4d 3070
c2255bc4 3071 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
3072 /* Make sure arguments have the same location as the function call
3073 itself. */
3074 protected_set_expr_location (*arg_p, call_location);
3075
e4f78bd4
JM
3076 /* There is a sequence point before a function call. Side effects in
3077 the argument list must occur before the actual call. So, when
3078 gimplifying arguments, force gimplify_expr to use an internal
3079 post queue which is then appended to the end of PRE_P. */
381cdae4 3080 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
e4f78bd4
JM
3081}
3082
d26fc979
JJ
3083/* Don't fold inside offloading or taskreg regions: it can break code by
3084 adding decl references that weren't in the source. We'll do it during
3085 omplower pass instead. */
88ac13da
TS
3086
3087static bool
3088maybe_fold_stmt (gimple_stmt_iterator *gsi)
3089{
3090 struct gimplify_omp_ctx *ctx;
3091 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
d9a6bd32 3092 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
88ac13da
TS
3093 return false;
3094 return fold_stmt (gsi);
3095}
3096
815d9cc6
XR
3097/* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3098 with the pointer to the proper cilk frame. */
3099static void
3100gimplify_cilk_detach (gimple_seq *pre_p)
3101{
3102 tree frame = cfun->cilk_frame_decl;
3103 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3104 frame);
3105 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3106 ptrf);
3107 gimplify_seq_add_stmt(pre_p, detach);
3108}
3109
726a989a 3110/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 3111 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
3112
3113static enum gimplify_status
726a989a 3114gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 3115{
f20ca725 3116 tree fndecl, parms, p, fnptrtype;
6de9cd9a 3117 enum gimplify_status ret;
5039610b 3118 int i, nargs;
538dd0b7 3119 gcall *call;
ed9c79e1 3120 bool builtin_va_start_p = false;
db3927fb 3121 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 3122
282899df 3123 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 3124
d3147f64 3125 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 3126 every call_expr be annotated with file and line. */
a281759f
PB
3127 if (! EXPR_HAS_LOCATION (*expr_p))
3128 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a 3129
0e37a2f3
MP
3130 /* Gimplify internal functions created in the FEs. */
3131 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3132 {
1304953e
JJ
3133 if (want_value)
3134 return GS_ALL_DONE;
3135
0e37a2f3
MP
3136 nargs = call_expr_nargs (*expr_p);
3137 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3138 auto_vec<tree> vargs (nargs);
3139
3140 for (i = 0; i < nargs; i++)
3141 {
3142 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3143 EXPR_LOCATION (*expr_p));
3144 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3145 }
815d9cc6
XR
3146
3147 if (EXPR_CILK_SPAWN (*expr_p))
3148 gimplify_cilk_detach (pre_p);
355fe088 3149 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
0e37a2f3
MP
3150 gimplify_seq_add_stmt (pre_p, call);
3151 return GS_ALL_DONE;
3152 }
3153
6de9cd9a
DN
3154 /* This may be a call to a builtin function.
3155
3156 Builtin function calls may be transformed into different
3157 (and more efficient) builtin function calls under certain
3158 circumstances. Unfortunately, gimplification can muck things
3159 up enough that the builtin expanders are not aware that certain
3160 transformations are still valid.
3161
3162 So we attempt transformation/gimplification of the call before
3163 we gimplify the CALL_EXPR. At this time we do not manage to
3164 transform all calls in the same manner as the expanders do, but
3165 we do transform most of them. */
726a989a 3166 fndecl = get_callee_fndecl (*expr_p);
3537a0cd
RG
3167 if (fndecl
3168 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3169 switch (DECL_FUNCTION_CODE (fndecl))
3170 {
03c00798
EB
3171 case BUILT_IN_ALLOCA:
3172 case BUILT_IN_ALLOCA_WITH_ALIGN:
3173 /* If the call has been built for a variable-sized object, then we
3174 want to restore the stack level when the enclosing BIND_EXPR is
3175 exited to reclaim the allocated space; otherwise, we precisely
3176 need to do the opposite and preserve the latest stack level. */
3177 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3178 gimplify_ctxp->save_stack = true;
3179 else
3180 gimplify_ctxp->keep_stack = true;
3181 break;
3182
3537a0cd 3183 case BUILT_IN_VA_START:
2efcfa4e 3184 {
726a989a 3185 builtin_va_start_p = TRUE;
5039610b 3186 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
3187 {
3188 error ("too few arguments to function %<va_start%>");
c2255bc4 3189 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
3190 return GS_OK;
3191 }
b8698a0f 3192
5039610b 3193 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 3194 {
c2255bc4 3195 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
3196 return GS_OK;
3197 }
3537a0cd
RG
3198 break;
3199 }
b25aad5f 3200
3537a0cd
RG
3201 default:
3202 ;
3203 }
3204 if (fndecl && DECL_BUILT_IN (fndecl))
3205 {
3206 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3207 if (new_tree && new_tree != *expr_p)
3208 {
3209 /* There was a transformation of this call which computes the
3210 same value, but in a more efficient way. Return and try
3211 again. */
3212 *expr_p = new_tree;
3213 return GS_OK;
2efcfa4e 3214 }
6de9cd9a
DN
3215 }
3216
f20ca725
RG
3217 /* Remember the original function pointer type. */
3218 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3219
6de9cd9a
DN
3220 /* There is a sequence point before the call, so any side effects in
3221 the calling expression must occur before the actual call. Force
3222 gimplify_expr to use an internal post queue. */
5039610b 3223 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 3224 is_gimple_call_addr, fb_rvalue);
6de9cd9a 3225
5039610b
SL
3226 nargs = call_expr_nargs (*expr_p);
3227
e36711f3 3228 /* Get argument types for verification. */
726a989a 3229 fndecl = get_callee_fndecl (*expr_p);
e36711f3 3230 parms = NULL_TREE;
726a989a
RB
3231 if (fndecl)
3232 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
a96c6a62
RB
3233 else
3234 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
e36711f3 3235
726a989a 3236 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 3237 p = DECL_ARGUMENTS (fndecl);
004e2fa7 3238 else if (parms)
f9487002 3239 p = parms;
6ef5231b 3240 else
498e51ca 3241 p = NULL_TREE;
f9487002
JJ
3242 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3243 ;
6ef5231b
JJ
3244
3245 /* If the last argument is __builtin_va_arg_pack () and it is not
3246 passed as a named argument, decrease the number of CALL_EXPR
3247 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3248 if (!p
3249 && i < nargs
3250 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3251 {
3252 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3253 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3254
3255 if (last_arg_fndecl
3256 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3257 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3258 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3259 {
3260 tree call = *expr_p;
3261
3262 --nargs;
db3927fb
AH
3263 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3264 CALL_EXPR_FN (call),
3265 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
3266
3267 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
3268 CALL_EXPR_VA_ARG_PACK flag. */
3269 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3270 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3271 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3272 = CALL_EXPR_RETURN_SLOT_OPT (call);
3273 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
5e278028 3274 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
726a989a 3275
6ef5231b
JJ
3276 /* Set CALL_EXPR_VA_ARG_PACK. */
3277 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3278 }
3279 }
e36711f3 3280
381cdae4
RB
3281 /* If the call returns twice then after building the CFG the call
3282 argument computations will no longer dominate the call because
3283 we add an abnormal incoming edge to the call. So do not use SSA
3284 vars there. */
3285 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3286
f2d3d07e 3287 /* Gimplify the function arguments. */
726a989a 3288 if (nargs > 0)
6de9cd9a 3289 {
726a989a
RB
3290 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3291 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3292 PUSH_ARGS_REVERSED ? i-- : i++)
3293 {
3294 enum gimplify_status t;
6de9cd9a 3295
726a989a
RB
3296 /* Avoid gimplifying the second argument to va_start, which needs to
3297 be the plain PARM_DECL. */
3298 if ((i != 1) || !builtin_va_start_p)
3299 {
1282697f 3300 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
381cdae4 3301 EXPR_LOCATION (*expr_p), ! returns_twice);
6de9cd9a 3302
726a989a
RB
3303 if (t == GS_ERROR)
3304 ret = GS_ERROR;
3305 }
3306 }
6de9cd9a 3307 }
6de9cd9a 3308
f2d3d07e
RH
3309 /* Gimplify the static chain. */
3310 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3311 {
3312 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3313 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3314 else
3315 {
3316 enum gimplify_status t;
3317 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
381cdae4 3318 EXPR_LOCATION (*expr_p), ! returns_twice);
f2d3d07e
RH
3319 if (t == GS_ERROR)
3320 ret = GS_ERROR;
3321 }
3322 }
3323
33922890
RG
3324 /* Verify the function result. */
3325 if (want_value && fndecl
f20ca725 3326 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
33922890
RG
3327 {
3328 error_at (loc, "using result of function returning %<void%>");
3329 ret = GS_ERROR;
3330 }
3331
6de9cd9a 3332 /* Try this again in case gimplification exposed something. */
6f538523 3333 if (ret != GS_ERROR)
6de9cd9a 3334 {
db3927fb 3335 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 3336
82d6e6fc 3337 if (new_tree && new_tree != *expr_p)
5039610b
SL
3338 {
3339 /* There was a transformation of this call which computes the
3340 same value, but in a more efficient way. Return and try
3341 again. */
82d6e6fc 3342 *expr_p = new_tree;
5039610b 3343 return GS_OK;
6de9cd9a
DN
3344 }
3345 }
726a989a
RB
3346 else
3347 {
df8fa700 3348 *expr_p = error_mark_node;
726a989a
RB
3349 return GS_ERROR;
3350 }
6de9cd9a
DN
3351
3352 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3353 decl. This allows us to eliminate redundant or useless
3354 calls to "const" functions. */
becfd6e5
KZ
3355 if (TREE_CODE (*expr_p) == CALL_EXPR)
3356 {
3357 int flags = call_expr_flags (*expr_p);
3358 if (flags & (ECF_CONST | ECF_PURE)
3359 /* An infinite loop is considered a side effect. */
3360 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3361 TREE_SIDE_EFFECTS (*expr_p) = 0;
3362 }
726a989a
RB
3363
3364 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3365 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3366 form and delegate the creation of a GIMPLE_CALL to
3367 gimplify_modify_expr. This is always possible because when
3368 WANT_VALUE is true, the caller wants the result of this call into
3369 a temporary, which means that we will emit an INIT_EXPR in
3370 internal_get_tmp_var which will then be handled by
3371 gimplify_modify_expr. */
3372 if (!want_value)
3373 {
3374 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3375 have to do is replicate it as a GIMPLE_CALL tuple. */
64e0f5ff 3376 gimple_stmt_iterator gsi;
726a989a 3377 call = gimple_build_call_from_tree (*expr_p);
f20ca725 3378 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
f6b64c35 3379 notice_special_calls (call);
815d9cc6
XR
3380 if (EXPR_CILK_SPAWN (*expr_p))
3381 gimplify_cilk_detach (pre_p);
726a989a 3382 gimplify_seq_add_stmt (pre_p, call);
64e0f5ff 3383 gsi = gsi_last (*pre_p);
88ac13da 3384 maybe_fold_stmt (&gsi);
726a989a
RB
3385 *expr_p = NULL_TREE;
3386 }
f20ca725
RG
3387 else
3388 /* Remember the original function type. */
3389 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3390 CALL_EXPR_FN (*expr_p));
726a989a 3391
6de9cd9a
DN
3392 return ret;
3393}
3394
3395/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3396 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3397
3398 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3399 condition is true or false, respectively. If null, we should generate
3400 our own to skip over the evaluation of this specific expression.
3401
ca80e52b
EB
3402 LOCUS is the source location of the COND_EXPR.
3403
6de9cd9a
DN
3404 This function is the tree equivalent of do_jump.
3405
3406 shortcut_cond_r should only be called by shortcut_cond_expr. */
3407
3408static tree
ca80e52b
EB
3409shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3410 location_t locus)
6de9cd9a
DN
3411{
3412 tree local_label = NULL_TREE;
3413 tree t, expr = NULL;
3414
3415 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3416 retain the shortcut semantics. Just insert the gotos here;
3417 shortcut_cond_expr will append the real blocks later. */
3418 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3419 {
ca80e52b
EB
3420 location_t new_locus;
3421
6de9cd9a
DN
3422 /* Turn if (a && b) into
3423
3424 if (a); else goto no;
3425 if (b) goto yes; else goto no;
3426 (no:) */
3427
3428 if (false_label_p == NULL)
3429 false_label_p = &local_label;
3430
ca80e52b
EB
3431 /* Keep the original source location on the first 'if'. */
3432 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
3433 append_to_statement_list (t, &expr);
3434
ca80e52b
EB
3435 /* Set the source location of the && on the second 'if'. */
3436 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3437 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3438 new_locus);
6de9cd9a
DN
3439 append_to_statement_list (t, &expr);
3440 }
3441 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3442 {
ca80e52b
EB
3443 location_t new_locus;
3444
6de9cd9a
DN
3445 /* Turn if (a || b) into
3446
3447 if (a) goto yes;
3448 if (b) goto yes; else goto no;
3449 (yes:) */
3450
3451 if (true_label_p == NULL)
3452 true_label_p = &local_label;
3453
ca80e52b
EB
3454 /* Keep the original source location on the first 'if'. */
3455 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
3456 append_to_statement_list (t, &expr);
3457
ca80e52b
EB
3458 /* Set the source location of the || on the second 'if'. */
3459 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3460 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3461 new_locus);
6de9cd9a
DN
3462 append_to_statement_list (t, &expr);
3463 }
1537737f
JJ
3464 else if (TREE_CODE (pred) == COND_EXPR
3465 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3466 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
6de9cd9a 3467 {
ca80e52b
EB
3468 location_t new_locus;
3469
6de9cd9a
DN
3470 /* As long as we're messing with gotos, turn if (a ? b : c) into
3471 if (a)
3472 if (b) goto yes; else goto no;
3473 else
1537737f
JJ
3474 if (c) goto yes; else goto no;
3475
3476 Don't do this if one of the arms has void type, which can happen
3477 in C++ when the arm is throw. */
ca80e52b
EB
3478
3479 /* Keep the original source location on the first 'if'. Set the source
3480 location of the ? on the second 'if'. */
3481 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
b4257cfc
RG
3482 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3483 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 3484 false_label_p, locus),
b4257cfc 3485 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 3486 false_label_p, new_locus));
6de9cd9a
DN
3487 }
3488 else
3489 {
b4257cfc
RG
3490 expr = build3 (COND_EXPR, void_type_node, pred,
3491 build_and_jump (true_label_p),
3492 build_and_jump (false_label_p));
ca80e52b 3493 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
3494 }
3495
3496 if (local_label)
3497 {
3498 t = build1 (LABEL_EXPR, void_type_node, local_label);
3499 append_to_statement_list (t, &expr);
3500 }
3501
3502 return expr;
3503}
3504
726a989a
RB
3505/* Given a conditional expression EXPR with short-circuit boolean
3506 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
073a8998 3507 predicate apart into the equivalent sequence of conditionals. */
726a989a 3508
6de9cd9a
DN
3509static tree
3510shortcut_cond_expr (tree expr)
3511{
3512 tree pred = TREE_OPERAND (expr, 0);
3513 tree then_ = TREE_OPERAND (expr, 1);
3514 tree else_ = TREE_OPERAND (expr, 2);
3515 tree true_label, false_label, end_label, t;
3516 tree *true_label_p;
3517 tree *false_label_p;
089efaa4 3518 bool emit_end, emit_false, jump_over_else;
65355d53
RH
3519 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3520 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
3521
3522 /* First do simple transformations. */
65355d53 3523 if (!else_se)
6de9cd9a 3524 {
ca80e52b
EB
3525 /* If there is no 'else', turn
3526 if (a && b) then c
3527 into
3528 if (a) if (b) then c. */
6de9cd9a
DN
3529 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3530 {
ca80e52b 3531 /* Keep the original source location on the first 'if'. */
8400e75e 3532 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 3533 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
3534 /* Set the source location of the && on the second 'if'. */
3535 if (EXPR_HAS_LOCATION (pred))
3536 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 3537 then_ = shortcut_cond_expr (expr);
4356a1bf 3538 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 3539 pred = TREE_OPERAND (pred, 0);
b4257cfc 3540 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 3541 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
3542 }
3543 }
726a989a 3544
65355d53 3545 if (!then_se)
6de9cd9a
DN
3546 {
3547 /* If there is no 'then', turn
3548 if (a || b); else d
3549 into
3550 if (a); else if (b); else d. */
3551 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3552 {
ca80e52b 3553 /* Keep the original source location on the first 'if'. */
8400e75e 3554 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 3555 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
3556 /* Set the source location of the || on the second 'if'. */
3557 if (EXPR_HAS_LOCATION (pred))
3558 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 3559 else_ = shortcut_cond_expr (expr);
4356a1bf 3560 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 3561 pred = TREE_OPERAND (pred, 0);
b4257cfc 3562 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 3563 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
3564 }
3565 }
3566
3567 /* If we're done, great. */
3568 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3569 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3570 return expr;
3571
3572 /* Otherwise we need to mess with gotos. Change
3573 if (a) c; else d;
3574 to
3575 if (a); else goto no;
3576 c; goto end;
3577 no: d; end:
3578 and recursively gimplify the condition. */
3579
3580 true_label = false_label = end_label = NULL_TREE;
3581
3582 /* If our arms just jump somewhere, hijack those labels so we don't
3583 generate jumps to jumps. */
3584
65355d53
RH
3585 if (then_
3586 && TREE_CODE (then_) == GOTO_EXPR
6de9cd9a
DN
3587 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3588 {
3589 true_label = GOTO_DESTINATION (then_);
65355d53
RH
3590 then_ = NULL;
3591 then_se = false;
6de9cd9a
DN
3592 }
3593
65355d53
RH
3594 if (else_
3595 && TREE_CODE (else_) == GOTO_EXPR
6de9cd9a
DN
3596 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3597 {
3598 false_label = GOTO_DESTINATION (else_);
65355d53
RH
3599 else_ = NULL;
3600 else_se = false;
6de9cd9a
DN
3601 }
3602
9cf737f8 3603 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
3604 if (true_label)
3605 true_label_p = &true_label;
3606 else
3607 true_label_p = NULL;
3608
3609 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 3610 if (false_label || else_se)
6de9cd9a
DN
3611 false_label_p = &false_label;
3612 else
3613 false_label_p = NULL;
3614
3615 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 3616 if (!then_se && !else_se)
ca80e52b 3617 return shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 3618 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
3619
3620 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 3621 if (else_se)
ca80e52b 3622 t = expr_last (else_);
65355d53 3623 else if (then_se)
ca80e52b 3624 t = expr_last (then_);
65355d53 3625 else
ca80e52b
EB
3626 t = NULL;
3627 if (t && TREE_CODE (t) == LABEL_EXPR)
3628 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
3629
3630 /* If we don't care about jumping to the 'else' branch, jump to the end
3631 if the condition is false. */
3632 if (!false_label_p)
3633 false_label_p = &end_label;
3634
3635 /* We only want to emit these labels if we aren't hijacking them. */
3636 emit_end = (end_label == NULL_TREE);
3637 emit_false = (false_label == NULL_TREE);
3638
089efaa4
ILT
3639 /* We only emit the jump over the else clause if we have to--if the
3640 then clause may fall through. Otherwise we can wind up with a
3641 useless jump and a useless label at the end of gimplified code,
3642 which will cause us to think that this conditional as a whole
3643 falls through even if it doesn't. If we then inline a function
3644 which ends with such a condition, that can cause us to issue an
3645 inappropriate warning about control reaching the end of a
3646 non-void function. */
3647 jump_over_else = block_may_fallthru (then_);
3648
ca80e52b 3649 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 3650 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
3651
3652 expr = NULL;
3653 append_to_statement_list (pred, &expr);
3654
3655 append_to_statement_list (then_, &expr);
65355d53 3656 if (else_se)
6de9cd9a 3657 {
089efaa4
ILT
3658 if (jump_over_else)
3659 {
ca80e52b 3660 tree last = expr_last (expr);
089efaa4 3661 t = build_and_jump (&end_label);
ca80e52b
EB
3662 if (EXPR_HAS_LOCATION (last))
3663 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
089efaa4
ILT
3664 append_to_statement_list (t, &expr);
3665 }
6de9cd9a
DN
3666 if (emit_false)
3667 {
3668 t = build1 (LABEL_EXPR, void_type_node, false_label);
3669 append_to_statement_list (t, &expr);
3670 }
3671 append_to_statement_list (else_, &expr);
3672 }
3673 if (emit_end && end_label)
3674 {
3675 t = build1 (LABEL_EXPR, void_type_node, end_label);
3676 append_to_statement_list (t, &expr);
3677 }
3678
3679 return expr;
3680}
3681
3682/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3683
50674e96 3684tree
6de9cd9a
DN
3685gimple_boolify (tree expr)
3686{
3687 tree type = TREE_TYPE (expr);
db3927fb 3688 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 3689
554cf330
JJ
3690 if (TREE_CODE (expr) == NE_EXPR
3691 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3692 && integer_zerop (TREE_OPERAND (expr, 1)))
3693 {
3694 tree call = TREE_OPERAND (expr, 0);
3695 tree fn = get_callee_fndecl (call);
3696
d53c73e0
JJ
3697 /* For __builtin_expect ((long) (x), y) recurse into x as well
3698 if x is truth_value_p. */
554cf330
JJ
3699 if (fn
3700 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3701 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3702 && call_expr_nargs (call) == 2)
3703 {
3704 tree arg = CALL_EXPR_ARG (call, 0);
3705 if (arg)
3706 {
3707 if (TREE_CODE (arg) == NOP_EXPR
3708 && TREE_TYPE (arg) == TREE_TYPE (call))
3709 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
3710 if (truth_value_p (TREE_CODE (arg)))
3711 {
3712 arg = gimple_boolify (arg);
3713 CALL_EXPR_ARG (call, 0)
3714 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3715 }
554cf330
JJ
3716 }
3717 }
3718 }
3719
6de9cd9a
DN
3720 switch (TREE_CODE (expr))
3721 {
3722 case TRUTH_AND_EXPR:
3723 case TRUTH_OR_EXPR:
3724 case TRUTH_XOR_EXPR:
3725 case TRUTH_ANDIF_EXPR:
3726 case TRUTH_ORIF_EXPR:
3727 /* Also boolify the arguments of truth exprs. */
3728 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3729 /* FALLTHRU */
3730
3731 case TRUTH_NOT_EXPR:
3732 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
6de9cd9a 3733
6de9cd9a 3734 /* These expressions always produce boolean results. */
7f3ff782
KT
3735 if (TREE_CODE (type) != BOOLEAN_TYPE)
3736 TREE_TYPE (expr) = boolean_type_node;
6de9cd9a 3737 return expr;
d3147f64 3738
8170608b 3739 case ANNOTATE_EXPR:
718c4601 3740 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
8170608b 3741 {
718c4601
EB
3742 case annot_expr_ivdep_kind:
3743 case annot_expr_no_vector_kind:
3744 case annot_expr_vector_kind:
8170608b
TB
3745 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3746 if (TREE_CODE (type) != BOOLEAN_TYPE)
3747 TREE_TYPE (expr) = boolean_type_node;
3748 return expr;
718c4601
EB
3749 default:
3750 gcc_unreachable ();
8170608b 3751 }
8170608b 3752
6de9cd9a 3753 default:
7f3ff782
KT
3754 if (COMPARISON_CLASS_P (expr))
3755 {
3756 /* There expressions always prduce boolean results. */
3757 if (TREE_CODE (type) != BOOLEAN_TYPE)
3758 TREE_TYPE (expr) = boolean_type_node;
3759 return expr;
3760 }
6de9cd9a
DN
3761 /* Other expressions that get here must have boolean values, but
3762 might need to be converted to the appropriate mode. */
7f3ff782 3763 if (TREE_CODE (type) == BOOLEAN_TYPE)
1d15f620 3764 return expr;
db3927fb 3765 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
3766 }
3767}
3768
aea74440
JJ
3769/* Given a conditional expression *EXPR_P without side effects, gimplify
3770 its operands. New statements are inserted to PRE_P. */
3771
3772static enum gimplify_status
726a989a 3773gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
3774{
3775 tree expr = *expr_p, cond;
3776 enum gimplify_status ret, tret;
3777 enum tree_code code;
3778
3779 cond = gimple_boolify (COND_EXPR_COND (expr));
3780
3781 /* We need to handle && and || specially, as their gimplification
3782 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3783 code = TREE_CODE (cond);
3784 if (code == TRUTH_ANDIF_EXPR)
3785 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3786 else if (code == TRUTH_ORIF_EXPR)
3787 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 3788 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
3789 COND_EXPR_COND (*expr_p) = cond;
3790
3791 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3792 is_gimple_val, fb_rvalue);
3793 ret = MIN (ret, tret);
3794 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3795 is_gimple_val, fb_rvalue);
3796
3797 return MIN (ret, tret);
3798}
3799
ad19c4be 3800/* Return true if evaluating EXPR could trap.
aea74440
JJ
3801 EXPR is GENERIC, while tree_could_trap_p can be called
3802 only on GIMPLE. */
3803
3804static bool
3805generic_expr_could_trap_p (tree expr)
3806{
3807 unsigned i, n;
3808
3809 if (!expr || is_gimple_val (expr))
3810 return false;
3811
3812 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3813 return true;
3814
3815 n = TREE_OPERAND_LENGTH (expr);
3816 for (i = 0; i < n; i++)
3817 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3818 return true;
3819
3820 return false;
3821}
3822
206048bd 3823/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
3824 into
3825
3826 if (p) if (p)
3827 t1 = a; a;
3828 else or else
3829 t1 = b; b;
3830 t1;
3831
3832 The second form is used when *EXPR_P is of type void.
3833
3834 PRE_P points to the list where side effects that must happen before
dae7ec87 3835 *EXPR_P should be stored. */
6de9cd9a
DN
3836
3837static enum gimplify_status
726a989a 3838gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
3839{
3840 tree expr = *expr_p;
06ec59e6
EB
3841 tree type = TREE_TYPE (expr);
3842 location_t loc = EXPR_LOCATION (expr);
3843 tree tmp, arm1, arm2;
6de9cd9a 3844 enum gimplify_status ret;
726a989a
RB
3845 tree label_true, label_false, label_cont;
3846 bool have_then_clause_p, have_else_clause_p;
538dd0b7 3847 gcond *cond_stmt;
726a989a
RB
3848 enum tree_code pred_code;
3849 gimple_seq seq = NULL;
26d44ae2
RH
3850
3851 /* If this COND_EXPR has a value, copy the values into a temporary within
3852 the arms. */
06ec59e6 3853 if (!VOID_TYPE_P (type))
26d44ae2 3854 {
06ec59e6 3855 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
3856 tree result;
3857
06ec59e6
EB
3858 /* If either an rvalue is ok or we do not require an lvalue, create the
3859 temporary. But we cannot do that if the type is addressable. */
3860 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c3e203cf 3861 && !TREE_ADDRESSABLE (type))
aff98faf 3862 {
aea74440
JJ
3863 if (gimplify_ctxp->allow_rhs_cond_expr
3864 /* If either branch has side effects or could trap, it can't be
3865 evaluated unconditionally. */
06ec59e6
EB
3866 && !TREE_SIDE_EFFECTS (then_)
3867 && !generic_expr_could_trap_p (then_)
3868 && !TREE_SIDE_EFFECTS (else_)
3869 && !generic_expr_could_trap_p (else_))
aea74440
JJ
3870 return gimplify_pure_cond_expr (expr_p, pre_p);
3871
06ec59e6
EB
3872 tmp = create_tmp_var (type, "iftmp");
3873 result = tmp;
aff98faf 3874 }
06ec59e6
EB
3875
3876 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
3877 else
3878 {
06ec59e6 3879 type = build_pointer_type (type);
aff98faf 3880
06ec59e6
EB
3881 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3882 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 3883
06ec59e6
EB
3884 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3885 else_ = build_fold_addr_expr_loc (loc, else_);
3886
3887 expr
3888 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 3889
726a989a 3890 tmp = create_tmp_var (type, "iftmp");
70f34814 3891 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
3892 }
3893
06ec59e6
EB
3894 /* Build the new then clause, `tmp = then_;'. But don't build the
3895 assignment if the value is void; in C++ it can be if it's a throw. */
3896 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3897 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
26d44ae2 3898
06ec59e6
EB
3899 /* Similarly, build the new else clause, `tmp = else_;'. */
3900 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3901 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
26d44ae2
RH
3902
3903 TREE_TYPE (expr) = void_type_node;
3904 recalculate_side_effects (expr);
3905
d91ba7b0 3906 /* Move the COND_EXPR to the prequeue. */
726a989a 3907 gimplify_stmt (&expr, pre_p);
26d44ae2 3908
aff98faf 3909 *expr_p = result;
726a989a 3910 return GS_ALL_DONE;
26d44ae2
RH
3911 }
3912
f2f81d57
EB
3913 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3914 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3915 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3916 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3917
26d44ae2
RH
3918 /* Make sure the condition has BOOLEAN_TYPE. */
3919 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3920
3921 /* Break apart && and || conditions. */
3922 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3923 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3924 {
3925 expr = shortcut_cond_expr (expr);
3926
3927 if (expr != *expr_p)
3928 {
3929 *expr_p = expr;
3930
3931 /* We can't rely on gimplify_expr to re-gimplify the expanded
3932 form properly, as cleanups might cause the target labels to be
3933 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3934 set up a conditional context. */
3935 gimple_push_condition ();
726a989a 3936 gimplify_stmt (expr_p, &seq);
26d44ae2 3937 gimple_pop_condition (pre_p);
726a989a 3938 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
3939
3940 return GS_ALL_DONE;
3941 }
3942 }
3943
3944 /* Now do the normal gimplification. */
26d44ae2 3945
726a989a
RB
3946 /* Gimplify condition. */
3947 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3948 fb_rvalue);
26d44ae2 3949 if (ret == GS_ERROR)
726a989a
RB
3950 return GS_ERROR;
3951 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3952
3953 gimple_push_condition ();
26d44ae2 3954
726a989a
RB
3955 have_then_clause_p = have_else_clause_p = false;
3956 if (TREE_OPERAND (expr, 1) != NULL
3957 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3958 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3959 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3960 == current_function_decl)
3961 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3962 have different locations, otherwise we end up with incorrect
3963 location information on the branches. */
3964 && (optimize
3965 || !EXPR_HAS_LOCATION (expr)
3966 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3967 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3968 {
3969 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3970 have_then_clause_p = true;
26d44ae2
RH
3971 }
3972 else
c2255bc4 3973 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3974 if (TREE_OPERAND (expr, 2) != NULL
3975 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3976 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3977 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3978 == current_function_decl)
3979 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3980 have different locations, otherwise we end up with incorrect
3981 location information on the branches. */
3982 && (optimize
3983 || !EXPR_HAS_LOCATION (expr)
3984 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3985 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3986 {
3987 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3988 have_else_clause_p = true;
3989 }
3990 else
c2255bc4 3991 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 3992
726a989a
RB
3993 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3994 &arm2);
538dd0b7 3995 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
c3bea076 3996 label_false);
932c0da4 3997 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
538dd0b7 3998 gimplify_seq_add_stmt (&seq, cond_stmt);
c3bea076
RB
3999 gimple_stmt_iterator gsi = gsi_last (seq);
4000 maybe_fold_stmt (&gsi);
4001
726a989a
RB
4002 label_cont = NULL_TREE;
4003 if (!have_then_clause_p)
4004 {
4005 /* For if (...) {} else { code; } put label_true after
4006 the else block. */
4007 if (TREE_OPERAND (expr, 1) == NULL_TREE
4008 && !have_else_clause_p
4009 && TREE_OPERAND (expr, 2) != NULL_TREE)
4010 label_cont = label_true;
4011 else
4012 {
4013 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4014 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4015 /* For if (...) { code; } else {} or
4016 if (...) { code; } else goto label; or
4017 if (...) { code; return; } else { ... }
4018 label_cont isn't needed. */
4019 if (!have_else_clause_p
4020 && TREE_OPERAND (expr, 2) != NULL_TREE
4021 && gimple_seq_may_fallthru (seq))
4022 {
355fe088 4023 gimple *g;
c2255bc4 4024 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
4025
4026 g = gimple_build_goto (label_cont);
4027
4028 /* GIMPLE_COND's are very low level; they have embedded
4029 gotos. This particular embedded goto should not be marked
4030 with the location of the original COND_EXPR, as it would
4031 correspond to the COND_EXPR's condition, not the ELSE or the
4032 THEN arms. To avoid marking it with the wrong location, flag
4033 it as "no location". */
4034 gimple_set_do_not_emit_location (g);
4035
4036 gimplify_seq_add_stmt (&seq, g);
4037 }
4038 }
4039 }
4040 if (!have_else_clause_p)
4041 {
4042 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4043 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4044 }
4045 if (label_cont)
4046 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4047
4048 gimple_pop_condition (pre_p);
4049 gimple_seq_add_seq (pre_p, seq);
4050
4051 if (ret == GS_ERROR)
4052 ; /* Do nothing. */
4053 else if (have_then_clause_p || have_else_clause_p)
4054 ret = GS_ALL_DONE;
4055 else
4056 {
4057 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4058 expr = TREE_OPERAND (expr, 0);
4059 gimplify_stmt (&expr, pre_p);
4060 }
4061
4062 *expr_p = NULL;
4063 return ret;
4064}
4065
f76d6e6f
EB
4066/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4067 to be marked addressable.
4068
4069 We cannot rely on such an expression being directly markable if a temporary
4070 has been created by the gimplification. In this case, we create another
4071 temporary and initialize it with a copy, which will become a store after we
4072 mark it addressable. This can happen if the front-end passed us something
4073 that it could not mark addressable yet, like a Fortran pass-by-reference
4074 parameter (int) floatvar. */
4075
4076static void
4077prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4078{
4079 while (handled_component_p (*expr_p))
4080 expr_p = &TREE_OPERAND (*expr_p, 0);
4081 if (is_gimple_reg (*expr_p))
947ca6a0 4082 {
381cdae4
RB
4083 /* Do not allow an SSA name as the temporary. */
4084 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
947ca6a0
RB
4085 DECL_GIMPLE_REG_P (var) = 0;
4086 *expr_p = var;
4087 }
f76d6e6f
EB
4088}
4089
726a989a
RB
4090/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4091 a call to __builtin_memcpy. */
4092
4093static enum gimplify_status
4094gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4095 gimple_seq *seq_p)
26d44ae2 4096{
5039610b 4097 tree t, to, to_ptr, from, from_ptr;
538dd0b7 4098 gcall *gs;
db3927fb 4099 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 4100
726a989a
RB
4101 to = TREE_OPERAND (*expr_p, 0);
4102 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 4103
f76d6e6f
EB
4104 /* Mark the RHS addressable. Beware that it may not be possible to do so
4105 directly if a temporary has been created by the gimplification. */
4106 prepare_gimple_addressable (&from, seq_p);
4107
628c189e 4108 mark_addressable (from);
db3927fb
AH
4109 from_ptr = build_fold_addr_expr_loc (loc, from);
4110 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 4111
628c189e 4112 mark_addressable (to);
db3927fb
AH
4113 to_ptr = build_fold_addr_expr_loc (loc, to);
4114 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 4115
e79983f4 4116 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
726a989a
RB
4117
4118 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
4119
4120 if (want_value)
4121 {
726a989a 4122 /* tmp = memcpy() */
b731b390 4123 t = create_tmp_var (TREE_TYPE (to_ptr));
726a989a
RB
4124 gimple_call_set_lhs (gs, t);
4125 gimplify_seq_add_stmt (seq_p, gs);
4126
70f34814 4127 *expr_p = build_simple_mem_ref (t);
726a989a 4128 return GS_ALL_DONE;
26d44ae2
RH
4129 }
4130
726a989a
RB
4131 gimplify_seq_add_stmt (seq_p, gs);
4132 *expr_p = NULL;
4133 return GS_ALL_DONE;
26d44ae2
RH
4134}
4135
4136/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4137 a call to __builtin_memset. In this case we know that the RHS is
4138 a CONSTRUCTOR with an empty element list. */
4139
4140static enum gimplify_status
726a989a
RB
4141gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4142 gimple_seq *seq_p)
26d44ae2 4143{
1a13360e 4144 tree t, from, to, to_ptr;
538dd0b7 4145 gcall *gs;
db3927fb 4146 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 4147
1a13360e
OH
4148 /* Assert our assumptions, to abort instead of producing wrong code
4149 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4150 not be immediately exposed. */
b8698a0f 4151 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
4152 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4153 from = TREE_OPERAND (from, 0);
4154
4155 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
9771b263 4156 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
1a13360e
OH
4157
4158 /* Now proceed. */
726a989a 4159 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 4160
db3927fb
AH
4161 to_ptr = build_fold_addr_expr_loc (loc, to);
4162 gimplify_arg (&to_ptr, seq_p, loc);
e79983f4 4163 t = builtin_decl_implicit (BUILT_IN_MEMSET);
726a989a
RB
4164
4165 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
4166
4167 if (want_value)
4168 {
726a989a 4169 /* tmp = memset() */
b731b390 4170 t = create_tmp_var (TREE_TYPE (to_ptr));
726a989a
RB
4171 gimple_call_set_lhs (gs, t);
4172 gimplify_seq_add_stmt (seq_p, gs);
4173
4174 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4175 return GS_ALL_DONE;
26d44ae2
RH
4176 }
4177
726a989a
RB
4178 gimplify_seq_add_stmt (seq_p, gs);
4179 *expr_p = NULL;
4180 return GS_ALL_DONE;
26d44ae2
RH
4181}
4182
57d1dd87
RH
4183/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4184 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
ad19c4be 4185 assignment. Return non-null if we detect a potential overlap. */
57d1dd87
RH
4186
4187struct gimplify_init_ctor_preeval_data
4188{
4189 /* The base decl of the lhs object. May be NULL, in which case we
4190 have to assume the lhs is indirect. */
4191 tree lhs_base_decl;
4192
4193 /* The alias set of the lhs object. */
4862826d 4194 alias_set_type lhs_alias_set;
57d1dd87
RH
4195};
4196
4197static tree
4198gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4199{
4200 struct gimplify_init_ctor_preeval_data *data
4201 = (struct gimplify_init_ctor_preeval_data *) xdata;
4202 tree t = *tp;
4203
4204 /* If we find the base object, obviously we have overlap. */
4205 if (data->lhs_base_decl == t)
4206 return t;
4207
4208 /* If the constructor component is indirect, determine if we have a
4209 potential overlap with the lhs. The only bits of information we
4210 have to go on at this point are addressability and alias sets. */
70f34814
RG
4211 if ((INDIRECT_REF_P (t)
4212 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
4213 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4214 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4215 return t;
4216
df10ee2a 4217 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
4218 potential overlap with the lhs through an INDIRECT_REF like above.
4219 ??? Ugh - this is completely broken. In fact this whole analysis
4220 doesn't look conservative. */
df10ee2a
EB
4221 if (TREE_CODE (t) == CALL_EXPR)
4222 {
4223 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4224
4225 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4226 if (POINTER_TYPE_P (TREE_VALUE (type))
4227 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4228 && alias_sets_conflict_p (data->lhs_alias_set,
4229 get_alias_set
4230 (TREE_TYPE (TREE_VALUE (type)))))
4231 return t;
4232 }
4233
6615c446 4234 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
4235 *walk_subtrees = 0;
4236 return NULL;
4237}
4238
726a989a 4239/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
4240 force values that overlap with the lhs (as described by *DATA)
4241 into temporaries. */
4242
4243static void
726a989a 4244gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
4245 struct gimplify_init_ctor_preeval_data *data)
4246{
4247 enum gimplify_status one;
4248
51eed280
PB
4249 /* If the value is constant, then there's nothing to pre-evaluate. */
4250 if (TREE_CONSTANT (*expr_p))
4251 {
4252 /* Ensure it does not have side effects, it might contain a reference to
4253 the object we're initializing. */
4254 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4255 return;
4256 }
57d1dd87
RH
4257
4258 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4259 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4260 return;
4261
4262 /* Recurse for nested constructors. */
4263 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4264 {
4038c495
GB
4265 unsigned HOST_WIDE_INT ix;
4266 constructor_elt *ce;
9771b263 4267 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4038c495 4268
9771b263 4269 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4038c495 4270 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 4271
57d1dd87
RH
4272 return;
4273 }
4274
0461b801
EB
4275 /* If this is a variable sized type, we must remember the size. */
4276 maybe_with_size_expr (expr_p);
57d1dd87
RH
4277
4278 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 4279 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 4280 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
4281 gimplification now means that we won't have to deal with complicated
4282 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 4283 exponential search behavior. */
57d1dd87
RH
4284 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4285 if (one == GS_ERROR)
4286 {
4287 *expr_p = NULL;
4288 return;
4289 }
4290
4291 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4292 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4293 always be true for all scalars, since is_gimple_mem_rhs insists on a
4294 temporary variable for them. */
4295 if (DECL_P (*expr_p))
4296 return;
4297
4298 /* If this is of variable size, we have no choice but to assume it doesn't
4299 overlap since we can't make a temporary for it. */
4c923c28 4300 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
4301 return;
4302
4303 /* Otherwise, we must search for overlap ... */
4304 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4305 return;
4306
4307 /* ... and if found, force the value into a temporary. */
4308 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4309}
4310
6fa91b48
SB
4311/* A subroutine of gimplify_init_ctor_eval. Create a loop for
4312 a RANGE_EXPR in a CONSTRUCTOR for an array.
4313
4314 var = lower;
4315 loop_entry:
4316 object[var] = value;
4317 if (var == upper)
4318 goto loop_exit;
4319 var = var + 1;
4320 goto loop_entry;
4321 loop_exit:
4322
4323 We increment var _after_ the loop exit check because we might otherwise
4324 fail if upper == TYPE_MAX_VALUE (type for upper).
4325
4326 Note that we never have to deal with SAVE_EXPRs here, because this has
4327 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4328
9771b263 4329static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
726a989a 4330 gimple_seq *, bool);
6fa91b48
SB
4331
4332static void
4333gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4334 tree value, tree array_elt_type,
726a989a 4335 gimple_seq *pre_p, bool cleared)
6fa91b48 4336{
726a989a 4337 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 4338 tree var, var_type, cref, tmp;
6fa91b48 4339
c2255bc4
AH
4340 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4341 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4342 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
4343
4344 /* Create and initialize the index variable. */
4345 var_type = TREE_TYPE (upper);
b731b390 4346 var = create_tmp_var (var_type);
726a989a 4347 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
4348
4349 /* Add the loop entry label. */
726a989a 4350 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
4351
4352 /* Build the reference. */
4353 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4354 var, NULL_TREE, NULL_TREE);
4355
4356 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4357 the store. Otherwise just assign value to the reference. */
4358
4359 if (TREE_CODE (value) == CONSTRUCTOR)
4360 /* NB we might have to call ourself recursively through
4361 gimplify_init_ctor_eval if the value is a constructor. */
4362 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4363 pre_p, cleared);
4364 else
726a989a 4365 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
4366
4367 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
4368 gimplify_seq_add_stmt (pre_p,
4369 gimple_build_cond (EQ_EXPR, var, upper,
4370 loop_exit_label, fall_thru_label));
4371
4372 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
4373
4374 /* Otherwise, increment the index var... */
b56b9fe3
RS
4375 tmp = build2 (PLUS_EXPR, var_type, var,
4376 fold_convert (var_type, integer_one_node));
726a989a 4377 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
4378
4379 /* ...and jump back to the loop entry. */
726a989a 4380 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
4381
4382 /* Add the loop exit label. */
726a989a 4383 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
4384}
4385
292a398f 4386/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 4387
292a398f 4388static bool
22ea9ec0 4389zero_sized_field_decl (const_tree fdecl)
292a398f 4390{
b8698a0f 4391 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
4392 && integer_zerop (DECL_SIZE (fdecl)))
4393 return true;
4394 return false;
4395}
4396
d06526b7 4397/* Return true if TYPE is zero sized. */
b8698a0f 4398
d06526b7 4399static bool
22ea9ec0 4400zero_sized_type (const_tree type)
d06526b7
AP
4401{
4402 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4403 && integer_zerop (TYPE_SIZE (type)))
4404 return true;
4405 return false;
4406}
4407
57d1dd87
RH
4408/* A subroutine of gimplify_init_constructor. Generate individual
4409 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 4410 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
4411 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4412 zeroed first. */
4413
4414static void
9771b263 4415gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
726a989a 4416 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
4417{
4418 tree array_elt_type = NULL;
4038c495
GB
4419 unsigned HOST_WIDE_INT ix;
4420 tree purpose, value;
57d1dd87
RH
4421
4422 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4423 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4424
4038c495 4425 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 4426 {
726a989a 4427 tree cref;
57d1dd87
RH
4428
4429 /* NULL values are created above for gimplification errors. */
4430 if (value == NULL)
4431 continue;
4432
4433 if (cleared && initializer_zerop (value))
4434 continue;
4435
6fa91b48
SB
4436 /* ??? Here's to hoping the front end fills in all of the indices,
4437 so we don't have to figure out what's missing ourselves. */
4438 gcc_assert (purpose);
4439
816fa80a
OH
4440 /* Skip zero-sized fields, unless value has side-effects. This can
4441 happen with calls to functions returning a zero-sized type, which
4442 we shouldn't discard. As a number of downstream passes don't
4443 expect sets of zero-sized fields, we rely on the gimplification of
4444 the MODIFY_EXPR we make below to drop the assignment statement. */
4445 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
4446 continue;
4447
6fa91b48
SB
4448 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4449 whole range. */
4450 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 4451 {
6fa91b48
SB
4452 tree lower = TREE_OPERAND (purpose, 0);
4453 tree upper = TREE_OPERAND (purpose, 1);
4454
4455 /* If the lower bound is equal to upper, just treat it as if
4456 upper was the index. */
4457 if (simple_cst_equal (lower, upper))
4458 purpose = upper;
4459 else
4460 {
4461 gimplify_init_ctor_eval_range (object, lower, upper, value,
4462 array_elt_type, pre_p, cleared);
4463 continue;
4464 }
4465 }
57d1dd87 4466
6fa91b48
SB
4467 if (array_elt_type)
4468 {
1a1640db
RG
4469 /* Do not use bitsizetype for ARRAY_REF indices. */
4470 if (TYPE_DOMAIN (TREE_TYPE (object)))
ad19c4be
EB
4471 purpose
4472 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4473 purpose);
b4257cfc
RG
4474 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4475 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
4476 }
4477 else
cf0efa6a
ILT
4478 {
4479 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
4480 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4481 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 4482 }
57d1dd87 4483
cf0efa6a
ILT
4484 if (TREE_CODE (value) == CONSTRUCTOR
4485 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
4486 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4487 pre_p, cleared);
4488 else
4489 {
726a989a 4490 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 4491 gimplify_and_add (init, pre_p);
726a989a 4492 ggc_free (init);
57d1dd87
RH
4493 }
4494 }
4495}
4496
ad19c4be 4497/* Return the appropriate RHS predicate for this LHS. */
726a989a 4498
18f429e2 4499gimple_predicate
726a989a
RB
4500rhs_predicate_for (tree lhs)
4501{
ba4d8f9d
RG
4502 if (is_gimple_reg (lhs))
4503 return is_gimple_reg_rhs_or_call;
726a989a 4504 else
ba4d8f9d 4505 return is_gimple_mem_rhs_or_call;
726a989a
RB
4506}
4507
8a1b7b7f
JM
4508/* Return the initial guess for an appropriate RHS predicate for this LHS,
4509 before the LHS has been gimplified. */
4510
4511static gimple_predicate
4512initial_rhs_predicate_for (tree lhs)
4513{
4514 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4515 return is_gimple_reg_rhs_or_call;
4516 else
4517 return is_gimple_mem_rhs_or_call;
4518}
4519
2ec5deb5
PB
4520/* Gimplify a C99 compound literal expression. This just means adding
4521 the DECL_EXPR before the current statement and using its anonymous
4522 decl instead. */
4523
4524static enum gimplify_status
a845a7f5 4525gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4c53d183 4526 bool (*gimple_test_f) (tree),
a845a7f5 4527 fallback_t fallback)
2ec5deb5
PB
4528{
4529 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4530 tree decl = DECL_EXPR_DECL (decl_s);
4c53d183 4531 tree init = DECL_INITIAL (decl);
2ec5deb5
PB
4532 /* Mark the decl as addressable if the compound literal
4533 expression is addressable now, otherwise it is marked too late
4534 after we gimplify the initialization expression. */
4535 if (TREE_ADDRESSABLE (*expr_p))
4536 TREE_ADDRESSABLE (decl) = 1;
4c53d183
MM
4537 /* Otherwise, if we don't need an lvalue and have a literal directly
4538 substitute it. Check if it matches the gimple predicate, as
4539 otherwise we'd generate a new temporary, and we can as well just
4540 use the decl we already have. */
4541 else if (!TREE_ADDRESSABLE (decl)
4542 && init
4543 && (fallback & fb_lvalue) == 0
4544 && gimple_test_f (init))
4545 {
4546 *expr_p = init;
4547 return GS_OK;
4548 }
2ec5deb5
PB
4549
4550 /* Preliminarily mark non-addressed complex variables as eligible
4551 for promotion to gimple registers. We'll transform their uses
4552 as we find them. */
4553 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4554 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4555 && !TREE_THIS_VOLATILE (decl)
4556 && !needs_to_live_in_memory (decl))
4557 DECL_GIMPLE_REG_P (decl) = 1;
4558
a845a7f5
ILT
4559 /* If the decl is not addressable, then it is being used in some
4560 expression or on the right hand side of a statement, and it can
4561 be put into a readonly data section. */
4562 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4563 TREE_READONLY (decl) = 1;
4564
2ec5deb5
PB
4565 /* This decl isn't mentioned in the enclosing block, so add it to the
4566 list of temps. FIXME it seems a bit of a kludge to say that
4567 anonymous artificial vars aren't pushed, but everything else is. */
4568 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4569 gimple_add_tmp_var (decl);
4570
4571 gimplify_and_add (decl_s, pre_p);
4572 *expr_p = decl;
4573 return GS_OK;
4574}
4575
4576/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4577 return a new CONSTRUCTOR if something changed. */
4578
4579static tree
4580optimize_compound_literals_in_ctor (tree orig_ctor)
4581{
4582 tree ctor = orig_ctor;
9771b263
DN
4583 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4584 unsigned int idx, num = vec_safe_length (elts);
2ec5deb5
PB
4585
4586 for (idx = 0; idx < num; idx++)
4587 {
9771b263 4588 tree value = (*elts)[idx].value;
2ec5deb5
PB
4589 tree newval = value;
4590 if (TREE_CODE (value) == CONSTRUCTOR)
4591 newval = optimize_compound_literals_in_ctor (value);
4592 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4593 {
4594 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4595 tree decl = DECL_EXPR_DECL (decl_s);
4596 tree init = DECL_INITIAL (decl);
4597
4598 if (!TREE_ADDRESSABLE (value)
4599 && !TREE_ADDRESSABLE (decl)
6f8f67e9
JJ
4600 && init
4601 && TREE_CODE (init) == CONSTRUCTOR)
2ec5deb5
PB
4602 newval = optimize_compound_literals_in_ctor (init);
4603 }
4604 if (newval == value)
4605 continue;
4606
4607 if (ctor == orig_ctor)
4608 {
4609 ctor = copy_node (orig_ctor);
9771b263 4610 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
2ec5deb5
PB
4611 elts = CONSTRUCTOR_ELTS (ctor);
4612 }
9771b263 4613 (*elts)[idx].value = newval;
2ec5deb5
PB
4614 }
4615 return ctor;
4616}
4617
26d44ae2
RH
4618/* A subroutine of gimplify_modify_expr. Break out elements of a
4619 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4620
4621 Note that we still need to clear any elements that don't have explicit
4622 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
4623 original MODIFY_EXPR, we just remove all of the constructor elements.
4624
4625 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4626 GS_ERROR if we would have to create a temporary when gimplifying
4627 this constructor. Otherwise, return GS_OK.
4628
4629 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
4630
4631static enum gimplify_status
726a989a
RB
4632gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4633 bool want_value, bool notify_temp_creation)
26d44ae2 4634{
f5a1f0d0 4635 tree object, ctor, type;
26d44ae2 4636 enum gimplify_status ret;
9771b263 4637 vec<constructor_elt, va_gc> *elts;
26d44ae2 4638
f5a1f0d0 4639 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 4640
ffed8a01
AH
4641 if (!notify_temp_creation)
4642 {
726a989a 4643 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
4644 is_gimple_lvalue, fb_lvalue);
4645 if (ret == GS_ERROR)
4646 return ret;
4647 }
57d1dd87 4648
726a989a 4649 object = TREE_OPERAND (*expr_p, 0);
98e92fb2
JJ
4650 ctor = TREE_OPERAND (*expr_p, 1)
4651 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
f5a1f0d0
PB
4652 type = TREE_TYPE (ctor);
4653 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 4654 ret = GS_ALL_DONE;
726a989a 4655
26d44ae2
RH
4656 switch (TREE_CODE (type))
4657 {
4658 case RECORD_TYPE:
4659 case UNION_TYPE:
4660 case QUAL_UNION_TYPE:
4661 case ARRAY_TYPE:
4662 {
57d1dd87 4663 struct gimplify_init_ctor_preeval_data preeval_data;
953d0c90
RS
4664 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4665 bool cleared, complete_p, valid_const_initializer;
26d44ae2
RH
4666
4667 /* Aggregate types must lower constructors to initialization of
4668 individual elements. The exception is that a CONSTRUCTOR node
4669 with no elements indicates zero-initialization of the whole. */
9771b263 4670 if (vec_safe_is_empty (elts))
ffed8a01
AH
4671 {
4672 if (notify_temp_creation)
4673 return GS_OK;
4674 break;
4675 }
b8698a0f 4676
fe24d485
OH
4677 /* Fetch information about the constructor to direct later processing.
4678 We might want to make static versions of it in various cases, and
4679 can only do so if it known to be a valid constant initializer. */
4680 valid_const_initializer
4681 = categorize_ctor_elements (ctor, &num_nonzero_elements,
953d0c90 4682 &num_ctor_elements, &complete_p);
26d44ae2
RH
4683
4684 /* If a const aggregate variable is being initialized, then it
4685 should never be a lose to promote the variable to be static. */
fe24d485 4686 if (valid_const_initializer
6f642f98 4687 && num_nonzero_elements > 1
26d44ae2 4688 && TREE_READONLY (object)
8813a647 4689 && VAR_P (object)
d0ea0759 4690 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
26d44ae2 4691 {
ffed8a01
AH
4692 if (notify_temp_creation)
4693 return GS_ERROR;
26d44ae2
RH
4694 DECL_INITIAL (object) = ctor;
4695 TREE_STATIC (object) = 1;
4696 if (!DECL_NAME (object))
4697 DECL_NAME (object) = create_tmp_var_name ("C");
4698 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4699
4700 /* ??? C++ doesn't automatically append a .<number> to the
6bdf3519 4701 assembler name, and even when it does, it looks at FE private
26d44ae2
RH
4702 data structures to figure out what that number should be,
4703 which are not set for this variable. I suppose this is
4704 important for local statics for inline functions, which aren't
4705 "local" in the object file sense. So in order to get a unique
4706 TU-local symbol, we must invoke the lhd version now. */
4707 lhd_set_decl_assembler_name (object);
4708
4709 *expr_p = NULL_TREE;
4710 break;
4711 }
4712
cce70747
JC
4713 /* If there are "lots" of initialized elements, even discounting
4714 those that are not address constants (and thus *must* be
4715 computed at runtime), then partition the constructor into
4716 constant and non-constant parts. Block copy the constant
4717 parts in, then generate code for the non-constant parts. */
4718 /* TODO. There's code in cp/typeck.c to do this. */
4719
953d0c90
RS
4720 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4721 /* store_constructor will ignore the clearing of variable-sized
4722 objects. Initializers for such objects must explicitly set
4723 every field that needs to be set. */
4724 cleared = false;
d368135f 4725 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
953d0c90 4726 /* If the constructor isn't complete, clear the whole object
d368135f 4727 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
953d0c90
RS
4728
4729 ??? This ought not to be needed. For any element not present
4730 in the initializer, we should simply set them to zero. Except
4731 we'd need to *find* the elements that are not present, and that
4732 requires trickery to avoid quadratic compile-time behavior in
4733 large cases or excessive memory use in small cases. */
73ed17ff 4734 cleared = true;
953d0c90 4735 else if (num_ctor_elements - num_nonzero_elements
e04ad03d 4736 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
953d0c90
RS
4737 && num_nonzero_elements < num_ctor_elements / 4)
4738 /* If there are "lots" of zeros, it's more efficient to clear
4739 the memory and then set the nonzero elements. */
cce70747 4740 cleared = true;
953d0c90
RS
4741 else
4742 cleared = false;
cce70747 4743
26d44ae2
RH
4744 /* If there are "lots" of initialized elements, and all of them
4745 are valid address constants, then the entire initializer can
cce70747
JC
4746 be dropped to memory, and then memcpy'd out. Don't do this
4747 for sparse arrays, though, as it's more efficient to follow
4748 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
4749 individual element initialization. Also don't do this for small
4750 all-zero initializers (which aren't big enough to merit
4751 clearing), and don't try to make bitwise copies of
d5e254e1
IE
4752 TREE_ADDRESSABLE types.
4753
4754 We cannot apply such transformation when compiling chkp static
4755 initializer because creation of initializer image in the memory
4756 will require static initialization of bounds for it. It should
4757 result in another gimplification of similar initializer and we
4758 may fall into infinite loop. */
8afd015a
JM
4759 if (valid_const_initializer
4760 && !(cleared || num_nonzero_elements == 0)
d5e254e1
IE
4761 && !TREE_ADDRESSABLE (type)
4762 && (!current_function_decl
4763 || !lookup_attribute ("chkp ctor",
4764 DECL_ATTRIBUTES (current_function_decl))))
26d44ae2
RH
4765 {
4766 HOST_WIDE_INT size = int_size_in_bytes (type);
4767 unsigned int align;
4768
4769 /* ??? We can still get unbounded array types, at least
4770 from the C++ front end. This seems wrong, but attempt
4771 to work around it for now. */
4772 if (size < 0)
4773 {
4774 size = int_size_in_bytes (TREE_TYPE (object));
4775 if (size >= 0)
4776 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4777 }
4778
4779 /* Find the maximum alignment we can assume for the object. */
4780 /* ??? Make use of DECL_OFFSET_ALIGN. */
4781 if (DECL_P (object))
4782 align = DECL_ALIGN (object);
4783 else
4784 align = TYPE_ALIGN (type);
4785
f301837e
EB
4786 /* Do a block move either if the size is so small as to make
4787 each individual move a sub-unit move on average, or if it
4788 is so large as to make individual moves inefficient. */
329ad380
JJ
4789 if (size > 0
4790 && num_nonzero_elements > 1
f301837e
EB
4791 && (size < num_nonzero_elements
4792 || !can_move_by_pieces (size, align)))
26d44ae2 4793 {
ffed8a01
AH
4794 if (notify_temp_creation)
4795 return GS_ERROR;
4796
46314d3e
EB
4797 walk_tree (&ctor, force_labels_r, NULL, NULL);
4798 ctor = tree_output_constant_def (ctor);
4799 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4800 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4801 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
4802
4803 /* This is no longer an assignment of a CONSTRUCTOR, but
4804 we still may have processing to do on the LHS. So
4805 pretend we didn't do anything here to let that happen. */
4806 return GS_UNHANDLED;
26d44ae2
RH
4807 }
4808 }
4809
558af7ca
EB
4810 /* If the target is volatile, we have non-zero elements and more than
4811 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
4812 if (TREE_THIS_VOLATILE (object)
4813 && !TREE_ADDRESSABLE (type)
558af7ca 4814 && num_nonzero_elements > 0
9771b263 4815 && vec_safe_length (elts) > 1)
61c7cbf8 4816 {
b731b390 4817 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
61c7cbf8
RG
4818 TREE_OPERAND (*expr_p, 0) = temp;
4819 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4820 *expr_p,
4821 build2 (MODIFY_EXPR, void_type_node,
4822 object, temp));
4823 return GS_OK;
4824 }
4825
ffed8a01
AH
4826 if (notify_temp_creation)
4827 return GS_OK;
4828
675c873b
EB
4829 /* If there are nonzero elements and if needed, pre-evaluate to capture
4830 elements overlapping with the lhs into temporaries. We must do this
4831 before clearing to fetch the values before they are zeroed-out. */
4832 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
4833 {
4834 preeval_data.lhs_base_decl = get_base_address (object);
4835 if (!DECL_P (preeval_data.lhs_base_decl))
4836 preeval_data.lhs_base_decl = NULL;
4837 preeval_data.lhs_alias_set = get_alias_set (object);
4838
726a989a 4839 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
4840 pre_p, post_p, &preeval_data);
4841 }
4842
2234a9cb
PP
4843 bool ctor_has_side_effects_p
4844 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4845
26d44ae2
RH
4846 if (cleared)
4847 {
4848 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4849 Note that we still have to gimplify, in order to handle the
57d1dd87 4850 case of variable sized types. Avoid shared tree structures. */
4038c495 4851 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 4852 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 4853 object = unshare_expr (object);
726a989a 4854 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
4855 }
4856
6fa91b48 4857 /* If we have not block cleared the object, or if there are nonzero
2234a9cb
PP
4858 elements in the constructor, or if the constructor has side effects,
4859 add assignments to the individual scalar fields of the object. */
4860 if (!cleared
4861 || num_nonzero_elements > 0
4862 || ctor_has_side_effects_p)
85d89e76 4863 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
4864
4865 *expr_p = NULL_TREE;
4866 }
4867 break;
4868
4869 case COMPLEX_TYPE:
4870 {
4871 tree r, i;
4872
ffed8a01
AH
4873 if (notify_temp_creation)
4874 return GS_OK;
4875
26d44ae2 4876 /* Extract the real and imaginary parts out of the ctor. */
9771b263
DN
4877 gcc_assert (elts->length () == 2);
4878 r = (*elts)[0].value;
4879 i = (*elts)[1].value;
26d44ae2
RH
4880 if (r == NULL || i == NULL)
4881 {
e8160c9a 4882 tree zero = build_zero_cst (TREE_TYPE (type));
26d44ae2
RH
4883 if (r == NULL)
4884 r = zero;
4885 if (i == NULL)
4886 i = zero;
4887 }
4888
4889 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4890 represent creation of a complex value. */
4891 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4892 {
4893 ctor = build_complex (type, r, i);
4894 TREE_OPERAND (*expr_p, 1) = ctor;
4895 }
4896 else
4897 {
b4257cfc 4898 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 4899 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
4900 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4901 pre_p,
4902 post_p,
17ad5b5e
RH
4903 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4904 fb_rvalue);
26d44ae2
RH
4905 }
4906 }
4907 break;
506e2710 4908
26d44ae2 4909 case VECTOR_TYPE:
4038c495
GB
4910 {
4911 unsigned HOST_WIDE_INT ix;
4912 constructor_elt *ce;
e89be13b 4913
ffed8a01
AH
4914 if (notify_temp_creation)
4915 return GS_OK;
4916
4038c495
GB
4917 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4918 if (TREE_CONSTANT (ctor))
4919 {
4920 bool constant_p = true;
4921 tree value;
4922
4923 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
4924 elements, such as addresses or trapping values like
4925 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4926 in VECTOR_CST nodes. */
4038c495
GB
4927 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4928 if (!CONSTANT_CLASS_P (value))
4929 {
4930 constant_p = false;
4931 break;
4932 }
e89be13b 4933
4038c495
GB
4934 if (constant_p)
4935 {
4936 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4937 break;
4938 }
84816907 4939
9f1da821 4940 TREE_CONSTANT (ctor) = 0;
4038c495 4941 }
e89be13b 4942
4038c495 4943 /* Vector types use CONSTRUCTOR all the way through gimple
37947cd0 4944 compilation as a general initializer. */
9771b263 4945 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038c495
GB
4946 {
4947 enum gimplify_status tret;
726a989a
RB
4948 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4949 fb_rvalue);
4038c495
GB
4950 if (tret == GS_ERROR)
4951 ret = GS_ERROR;
37947cd0
JJ
4952 else if (TREE_STATIC (ctor)
4953 && !initializer_constant_valid_p (ce->value,
4954 TREE_TYPE (ce->value)))
4955 TREE_STATIC (ctor) = 0;
4038c495 4956 }
726a989a
RB
4957 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4958 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 4959 }
26d44ae2 4960 break;
6de9cd9a 4961
26d44ae2
RH
4962 default:
4963 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 4964 gcc_unreachable ();
26d44ae2 4965 }
6de9cd9a 4966
26d44ae2
RH
4967 if (ret == GS_ERROR)
4968 return GS_ERROR;
0faf9ab4
WS
4969 /* If we have gimplified both sides of the initializer but have
4970 not emitted an assignment, do so now. */
4971 if (*expr_p)
4972 {
4973 tree lhs = TREE_OPERAND (*expr_p, 0);
4974 tree rhs = TREE_OPERAND (*expr_p, 1);
98e92fb2
JJ
4975 if (want_value && object == lhs)
4976 lhs = unshare_expr (lhs);
0faf9ab4
WS
4977 gassign *init = gimple_build_assign (lhs, rhs);
4978 gimplify_seq_add_stmt (pre_p, init);
4979 }
4980 if (want_value)
26d44ae2 4981 {
26d44ae2
RH
4982 *expr_p = object;
4983 return GS_OK;
6de9cd9a 4984 }
26d44ae2 4985 else
726a989a 4986 {
0faf9ab4 4987 *expr_p = NULL;
726a989a
RB
4988 return GS_ALL_DONE;
4989 }
26d44ae2 4990}
6de9cd9a 4991
de4af523
JJ
4992/* Given a pointer value OP0, return a simplified version of an
4993 indirection through OP0, or NULL_TREE if no simplification is
4994 possible. This may only be applied to a rhs of an expression.
4995 Note that the resulting type may be different from the type pointed
4996 to in the sense that it is still compatible from the langhooks
4997 point of view. */
4998
4999static tree
5000gimple_fold_indirect_ref_rhs (tree t)
5001{
5002 return gimple_fold_indirect_ref (t);
5003}
5004
4caa08da
AH
5005/* Subroutine of gimplify_modify_expr to do simplifications of
5006 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5007 something changes. */
6de9cd9a 5008
26d44ae2 5009static enum gimplify_status
726a989a
RB
5010gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5011 gimple_seq *pre_p, gimple_seq *post_p,
5012 bool want_value)
26d44ae2 5013{
6d729f28
JM
5014 enum gimplify_status ret = GS_UNHANDLED;
5015 bool changed;
6de9cd9a 5016
6d729f28
JM
5017 do
5018 {
5019 changed = false;
5020 switch (TREE_CODE (*from_p))
5021 {
5022 case VAR_DECL:
5023 /* If we're assigning from a read-only variable initialized with
5024 a constructor, do the direct assignment from the constructor,
5025 but only if neither source nor target are volatile since this
5026 latter assignment might end up being done on a per-field basis. */
5027 if (DECL_INITIAL (*from_p)
5028 && TREE_READONLY (*from_p)
5029 && !TREE_THIS_VOLATILE (*from_p)
5030 && !TREE_THIS_VOLATILE (*to_p)
5031 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5032 {
5033 tree old_from = *from_p;
5034 enum gimplify_status subret;
5035
5036 /* Move the constructor into the RHS. */
5037 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5038
5039 /* Let's see if gimplify_init_constructor will need to put
5040 it in memory. */
5041 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5042 false, true);
5043 if (subret == GS_ERROR)
5044 {
5045 /* If so, revert the change. */
5046 *from_p = old_from;
5047 }
5048 else
5049 {
5050 ret = GS_OK;
5051 changed = true;
5052 }
5053 }
5054 break;
5055 case INDIRECT_REF:
4caa08da 5056 {
6d729f28 5057 /* If we have code like
ffed8a01 5058
6d729f28 5059 *(const A*)(A*)&x
ffed8a01 5060
6d729f28
JM
5061 where the type of "x" is a (possibly cv-qualified variant
5062 of "A"), treat the entire expression as identical to "x".
5063 This kind of code arises in C++ when an object is bound
5064 to a const reference, and if "x" is a TARGET_EXPR we want
5065 to take advantage of the optimization below. */
06baaba3 5066 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6d729f28
JM
5067 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5068 if (t)
ffed8a01 5069 {
06baaba3
RG
5070 if (TREE_THIS_VOLATILE (t) != volatile_p)
5071 {
3a65ee74 5072 if (DECL_P (t))
06baaba3
RG
5073 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5074 build_fold_addr_expr (t));
5075 if (REFERENCE_CLASS_P (t))
5076 TREE_THIS_VOLATILE (t) = volatile_p;
5077 }
6d729f28
JM
5078 *from_p = t;
5079 ret = GS_OK;
5080 changed = true;
ffed8a01 5081 }
6d729f28
JM
5082 break;
5083 }
5084
5085 case TARGET_EXPR:
5086 {
5087 /* If we are initializing something from a TARGET_EXPR, strip the
5088 TARGET_EXPR and initialize it directly, if possible. This can't
5089 be done if the initializer is void, since that implies that the
5090 temporary is set in some non-trivial way.
5091
5092 ??? What about code that pulls out the temp and uses it
5093 elsewhere? I think that such code never uses the TARGET_EXPR as
5094 an initializer. If I'm wrong, we'll die because the temp won't
5095 have any RTL. In that case, I guess we'll need to replace
5096 references somehow. */
5097 tree init = TARGET_EXPR_INITIAL (*from_p);
5098
5099 if (init
5100 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 5101 {
6d729f28 5102 *from_p = init;
ffed8a01 5103 ret = GS_OK;
6d729f28 5104 changed = true;
ffed8a01 5105 }
4caa08da 5106 }
6d729f28 5107 break;
f98625f6 5108
6d729f28
JM
5109 case COMPOUND_EXPR:
5110 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5111 caught. */
5112 gimplify_compound_expr (from_p, pre_p, true);
5113 ret = GS_OK;
5114 changed = true;
5115 break;
f98625f6 5116
6d729f28 5117 case CONSTRUCTOR:
ce3beba3
JM
5118 /* If we already made some changes, let the front end have a
5119 crack at this before we break it down. */
5120 if (ret != GS_UNHANDLED)
5121 break;
6d729f28
JM
5122 /* If we're initializing from a CONSTRUCTOR, break this into
5123 individual MODIFY_EXPRs. */
5124 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5125 false);
5126
5127 case COND_EXPR:
5128 /* If we're assigning to a non-register type, push the assignment
5129 down into the branches. This is mandatory for ADDRESSABLE types,
5130 since we cannot generate temporaries for such, but it saves a
5131 copy in other cases as well. */
5132 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 5133 {
6d729f28
JM
5134 /* This code should mirror the code in gimplify_cond_expr. */
5135 enum tree_code code = TREE_CODE (*expr_p);
5136 tree cond = *from_p;
5137 tree result = *to_p;
5138
5139 ret = gimplify_expr (&result, pre_p, post_p,
5140 is_gimple_lvalue, fb_lvalue);
5141 if (ret != GS_ERROR)
5142 ret = GS_OK;
5143
68ed2ba0
JJ
5144 /* If we are going to write RESULT more than once, clear
5145 TREE_READONLY flag, otherwise we might incorrectly promote
5146 the variable to static const and initialize it at compile
5147 time in one of the branches. */
5148 if (VAR_P (result)
5149 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5150 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5151 TREE_READONLY (result) = 0;
6d729f28
JM
5152 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5153 TREE_OPERAND (cond, 1)
5154 = build2 (code, void_type_node, result,
5155 TREE_OPERAND (cond, 1));
5156 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5157 TREE_OPERAND (cond, 2)
5158 = build2 (code, void_type_node, unshare_expr (result),
5159 TREE_OPERAND (cond, 2));
5160
5161 TREE_TYPE (cond) = void_type_node;
5162 recalculate_side_effects (cond);
5163
5164 if (want_value)
5165 {
5166 gimplify_and_add (cond, pre_p);
5167 *expr_p = unshare_expr (result);
5168 }
5169 else
5170 *expr_p = cond;
5171 return ret;
f98625f6 5172 }
f98625f6 5173 break;
f98625f6 5174
6d729f28
JM
5175 case CALL_EXPR:
5176 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5177 return slot so that we don't generate a temporary. */
5178 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5179 && aggregate_value_p (*from_p, *from_p))
26d44ae2 5180 {
6d729f28
JM
5181 bool use_target;
5182
5183 if (!(rhs_predicate_for (*to_p))(*from_p))
5184 /* If we need a temporary, *to_p isn't accurate. */
5185 use_target = false;
ad19c4be 5186 /* It's OK to use the return slot directly unless it's an NRV. */
6d729f28
JM
5187 else if (TREE_CODE (*to_p) == RESULT_DECL
5188 && DECL_NAME (*to_p) == NULL_TREE
5189 && needs_to_live_in_memory (*to_p))
6d729f28
JM
5190 use_target = true;
5191 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5192 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5193 /* Don't force regs into memory. */
5194 use_target = false;
5195 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5196 /* It's OK to use the target directly if it's being
5197 initialized. */
5198 use_target = true;
e6a54b01
EB
5199 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5200 != INTEGER_CST)
aabb90e5
RG
5201 /* Always use the target and thus RSO for variable-sized types.
5202 GIMPLE cannot deal with a variable-sized assignment
5203 embedded in a call statement. */
5204 use_target = true;
345ae177
AH
5205 else if (TREE_CODE (*to_p) != SSA_NAME
5206 && (!is_gimple_variable (*to_p)
5207 || needs_to_live_in_memory (*to_p)))
6d729f28
JM
5208 /* Don't use the original target if it's already addressable;
5209 if its address escapes, and the called function uses the
5210 NRV optimization, a conforming program could see *to_p
5211 change before the called function returns; see c++/19317.
5212 When optimizing, the return_slot pass marks more functions
5213 as safe after we have escape info. */
5214 use_target = false;
5215 else
5216 use_target = true;
5217
5218 if (use_target)
5219 {
5220 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5221 mark_addressable (*to_p);
5222 }
26d44ae2 5223 }
6d729f28 5224 break;
6de9cd9a 5225
6d729f28
JM
5226 case WITH_SIZE_EXPR:
5227 /* Likewise for calls that return an aggregate of non-constant size,
5228 since we would not be able to generate a temporary at all. */
5229 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5230 {
5231 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
5232 /* We don't change ret in this case because the
5233 WITH_SIZE_EXPR might have been added in
5234 gimplify_modify_expr, so returning GS_OK would lead to an
5235 infinite loop. */
6d729f28
JM
5236 changed = true;
5237 }
5238 break;
6de9cd9a 5239
6d729f28
JM
5240 /* If we're initializing from a container, push the initialization
5241 inside it. */
5242 case CLEANUP_POINT_EXPR:
5243 case BIND_EXPR:
5244 case STATEMENT_LIST:
26d44ae2 5245 {
6d729f28
JM
5246 tree wrap = *from_p;
5247 tree t;
dae7ec87 5248
6d729f28
JM
5249 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5250 fb_lvalue);
dae7ec87
JM
5251 if (ret != GS_ERROR)
5252 ret = GS_OK;
5253
6d729f28
JM
5254 t = voidify_wrapper_expr (wrap, *expr_p);
5255 gcc_assert (t == *expr_p);
dae7ec87
JM
5256
5257 if (want_value)
5258 {
6d729f28
JM
5259 gimplify_and_add (wrap, pre_p);
5260 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
5261 }
5262 else
6d729f28
JM
5263 *expr_p = wrap;
5264 return GS_OK;
26d44ae2 5265 }
6de9cd9a 5266
6d729f28 5267 case COMPOUND_LITERAL_EXPR:
fa47911c 5268 {
6d729f28
JM
5269 tree complit = TREE_OPERAND (*expr_p, 1);
5270 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5271 tree decl = DECL_EXPR_DECL (decl_s);
5272 tree init = DECL_INITIAL (decl);
5273
5274 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5275 into struct T x = { 0, 1, 2 } if the address of the
5276 compound literal has never been taken. */
5277 if (!TREE_ADDRESSABLE (complit)
5278 && !TREE_ADDRESSABLE (decl)
5279 && init)
fa47911c 5280 {
6d729f28
JM
5281 *expr_p = copy_node (*expr_p);
5282 TREE_OPERAND (*expr_p, 1) = init;
5283 return GS_OK;
fa47911c
JM
5284 }
5285 }
5286
6d729f28
JM
5287 default:
5288 break;
2ec5deb5 5289 }
6d729f28
JM
5290 }
5291 while (changed);
6de9cd9a 5292
6de9cd9a
DN
5293 return ret;
5294}
5295
216820a4
RG
5296
5297/* Return true if T looks like a valid GIMPLE statement. */
5298
5299static bool
5300is_gimple_stmt (tree t)
5301{
5302 const enum tree_code code = TREE_CODE (t);
5303
5304 switch (code)
5305 {
5306 case NOP_EXPR:
5307 /* The only valid NOP_EXPR is the empty statement. */
5308 return IS_EMPTY_STMT (t);
5309
5310 case BIND_EXPR:
5311 case COND_EXPR:
5312 /* These are only valid if they're void. */
5313 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5314
5315 case SWITCH_EXPR:
5316 case GOTO_EXPR:
5317 case RETURN_EXPR:
5318 case LABEL_EXPR:
5319 case CASE_LABEL_EXPR:
5320 case TRY_CATCH_EXPR:
5321 case TRY_FINALLY_EXPR:
5322 case EH_FILTER_EXPR:
5323 case CATCH_EXPR:
5324 case ASM_EXPR:
5325 case STATEMENT_LIST:
41dbbb37
TS
5326 case OACC_PARALLEL:
5327 case OACC_KERNELS:
5328 case OACC_DATA:
5329 case OACC_HOST_DATA:
5330 case OACC_DECLARE:
5331 case OACC_UPDATE:
5332 case OACC_ENTER_DATA:
5333 case OACC_EXIT_DATA:
5334 case OACC_CACHE:
216820a4
RG
5335 case OMP_PARALLEL:
5336 case OMP_FOR:
74bf76ed 5337 case OMP_SIMD:
c02065fc 5338 case CILK_SIMD:
acf0174b 5339 case OMP_DISTRIBUTE:
41dbbb37 5340 case OACC_LOOP:
216820a4
RG
5341 case OMP_SECTIONS:
5342 case OMP_SECTION:
5343 case OMP_SINGLE:
5344 case OMP_MASTER:
acf0174b 5345 case OMP_TASKGROUP:
216820a4
RG
5346 case OMP_ORDERED:
5347 case OMP_CRITICAL:
5348 case OMP_TASK:
d9a6bd32
JJ
5349 case OMP_TARGET:
5350 case OMP_TARGET_DATA:
5351 case OMP_TARGET_UPDATE:
5352 case OMP_TARGET_ENTER_DATA:
5353 case OMP_TARGET_EXIT_DATA:
5354 case OMP_TASKLOOP:
5355 case OMP_TEAMS:
216820a4
RG
5356 /* These are always void. */
5357 return true;
5358
5359 case CALL_EXPR:
5360 case MODIFY_EXPR:
5361 case PREDICT_EXPR:
5362 /* These are valid regardless of their type. */
5363 return true;
5364
5365 default:
5366 return false;
5367 }
5368}
5369
5370
d9c2d296
AP
5371/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5372 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
7b7e6ecd
EB
5373 DECL_GIMPLE_REG_P set.
5374
5375 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5376 other, unmodified part of the complex object just before the total store.
5377 As a consequence, if the object is still uninitialized, an undefined value
5378 will be loaded into a register, which may result in a spurious exception
5379 if the register is floating-point and the value happens to be a signaling
5380 NaN for example. Then the fully-fledged complex operations lowering pass
5381 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
5382
5383static enum gimplify_status
726a989a
RB
5384gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5385 bool want_value)
d9c2d296
AP
5386{
5387 enum tree_code code, ocode;
5388 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5389
726a989a
RB
5390 lhs = TREE_OPERAND (*expr_p, 0);
5391 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
5392 code = TREE_CODE (lhs);
5393 lhs = TREE_OPERAND (lhs, 0);
5394
5395 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5396 other = build1 (ocode, TREE_TYPE (rhs), lhs);
8d2b0410 5397 TREE_NO_WARNING (other) = 1;
d9c2d296
AP
5398 other = get_formal_tmp_var (other, pre_p);
5399
5400 realpart = code == REALPART_EXPR ? rhs : other;
5401 imagpart = code == REALPART_EXPR ? other : rhs;
5402
5403 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5404 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5405 else
5406 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5407
726a989a
RB
5408 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5409 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
5410
5411 return GS_ALL_DONE;
5412}
5413
206048bd 5414/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
5415
5416 modify_expr
5417 : varname '=' rhs
5418 | '*' ID '=' rhs
5419
5420 PRE_P points to the list where side effects that must happen before
5421 *EXPR_P should be stored.
5422
5423 POST_P points to the list where side effects that must happen after
5424 *EXPR_P should be stored.
5425
5426 WANT_VALUE is nonzero iff we want to use the value of this expression
5427 in another expression. */
5428
5429static enum gimplify_status
726a989a
RB
5430gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5431 bool want_value)
6de9cd9a 5432{
726a989a
RB
5433 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5434 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 5435 enum gimplify_status ret = GS_UNHANDLED;
355fe088 5436 gimple *assign;
db3927fb 5437 location_t loc = EXPR_LOCATION (*expr_p);
6da8be89 5438 gimple_stmt_iterator gsi;
6de9cd9a 5439
282899df
NS
5440 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5441 || TREE_CODE (*expr_p) == INIT_EXPR);
6de9cd9a 5442
d0ad58f9
JM
5443 /* Trying to simplify a clobber using normal logic doesn't work,
5444 so handle it here. */
5445 if (TREE_CLOBBER_P (*from_p))
5446 {
5d751b0c
JJ
5447 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5448 if (ret == GS_ERROR)
5449 return ret;
5450 gcc_assert (!want_value
8813a647 5451 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
d0ad58f9
JM
5452 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5453 *expr_p = NULL;
5454 return GS_ALL_DONE;
5455 }
5456
1b24a790
RG
5457 /* Insert pointer conversions required by the middle-end that are not
5458 required by the frontend. This fixes middle-end type checking for
5459 for example gcc.dg/redecl-6.c. */
daad0278 5460 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
5461 {
5462 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5463 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 5464 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
5465 }
5466
83d7e8f0
JM
5467 /* See if any simplifications can be done based on what the RHS is. */
5468 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5469 want_value);
5470 if (ret != GS_UNHANDLED)
5471 return ret;
5472
5473 /* For zero sized types only gimplify the left hand side and right hand
5474 side as statements and throw away the assignment. Do this after
5475 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5476 types properly. */
753b34d7 5477 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
412f8986 5478 {
726a989a
RB
5479 gimplify_stmt (from_p, pre_p);
5480 gimplify_stmt (to_p, pre_p);
412f8986
AP
5481 *expr_p = NULL_TREE;
5482 return GS_ALL_DONE;
5483 }
6de9cd9a 5484
d25cee4d
RH
5485 /* If the value being copied is of variable width, compute the length
5486 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5487 before gimplifying any of the operands so that we can resolve any
5488 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5489 the size of the expression to be copied, not of the destination, so
726a989a 5490 that is what we must do here. */
d25cee4d 5491 maybe_with_size_expr (from_p);
6de9cd9a 5492
726a989a
RB
5493 /* As a special case, we have to temporarily allow for assignments
5494 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5495 a toplevel statement, when gimplifying the GENERIC expression
5496 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5497 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5498
5499 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5500 prevent gimplify_expr from trying to create a new temporary for
5501 foo's LHS, we tell it that it should only gimplify until it
5502 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5503 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5504 and all we need to do here is set 'a' to be its LHS. */
8a1b7b7f
JM
5505
5506 /* Gimplify the RHS first for C++17 and bug 71104. */
5507 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5508 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5509 if (ret == GS_ERROR)
5510 return ret;
5511
5512 /* Then gimplify the LHS. */
7f15b177
RB
5513 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5514 twice we have to make sure to gimplify into non-SSA as otherwise
5515 the abnormal edge added later will make those defs not dominate
5516 their uses.
5517 ??? Technically this applies only to the registers used in the
5518 resulting non-register *TO_P. */
5519 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5520 if (saved_into_ssa
5521 && TREE_CODE (*from_p) == CALL_EXPR
5522 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5523 gimplify_ctxp->into_ssa = false;
8a1b7b7f 5524 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
7f15b177 5525 gimplify_ctxp->into_ssa = saved_into_ssa;
6de9cd9a
DN
5526 if (ret == GS_ERROR)
5527 return ret;
5528
8a1b7b7f
JM
5529 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5530 guess for the predicate was wrong. */
5531 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5532 if (final_pred != initial_pred)
5533 {
5534 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5535 if (ret == GS_ERROR)
5536 return ret;
5537 }
5538
f8e89441 5539 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
026c3cfd 5540 size as argument to the call. */
f8e89441
TV
5541 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5542 {
5543 tree call = TREE_OPERAND (*from_p, 0);
5544 tree vlasize = TREE_OPERAND (*from_p, 1);
5545
5546 if (TREE_CODE (call) == CALL_EXPR
5547 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5548 {
2fe1d762 5549 int nargs = call_expr_nargs (call);
f8e89441
TV
5550 tree type = TREE_TYPE (call);
5551 tree ap = CALL_EXPR_ARG (call, 0);
5552 tree tag = CALL_EXPR_ARG (call, 1);
33f0852f 5553 tree aptag = CALL_EXPR_ARG (call, 2);
f8e89441 5554 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
2fe1d762
TV
5555 IFN_VA_ARG, type,
5556 nargs + 1, ap, tag,
33f0852f
JJ
5557 aptag, vlasize);
5558 TREE_OPERAND (*from_p, 0) = newcall;
f8e89441
TV
5559 }
5560 }
5561
44de5aeb
RK
5562 /* Now see if the above changed *from_p to something we handle specially. */
5563 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5564 want_value);
6de9cd9a
DN
5565 if (ret != GS_UNHANDLED)
5566 return ret;
5567
d25cee4d
RH
5568 /* If we've got a variable sized assignment between two lvalues (i.e. does
5569 not involve a call), then we can make things a bit more straightforward
5570 by converting the assignment to memcpy or memset. */
5571 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5572 {
5573 tree from = TREE_OPERAND (*from_p, 0);
5574 tree size = TREE_OPERAND (*from_p, 1);
5575
5576 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
5577 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5578
e847cc68 5579 if (is_gimple_addressable (from))
d25cee4d
RH
5580 {
5581 *from_p = from;
726a989a
RB
5582 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5583 pre_p);
d25cee4d
RH
5584 }
5585 }
5586
e41d82f5
RH
5587 /* Transform partial stores to non-addressable complex variables into
5588 total stores. This allows us to use real instead of virtual operands
5589 for these variables, which improves optimization. */
5590 if ((TREE_CODE (*to_p) == REALPART_EXPR
5591 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5592 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5593 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5594
f173837a 5595 /* Try to alleviate the effects of the gimplification creating artificial
b4771722
EB
5596 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5597 make sure not to create DECL_DEBUG_EXPR links across functions. */
f173837a 5598 if (!gimplify_ctxp->into_ssa
8813a647 5599 && VAR_P (*from_p)
726a989a
RB
5600 && DECL_IGNORED_P (*from_p)
5601 && DECL_P (*to_p)
b4771722 5602 && !DECL_IGNORED_P (*to_p)
0a37d40c
PMR
5603 && decl_function_context (*to_p) == current_function_decl
5604 && decl_function_context (*from_p) == current_function_decl)
f173837a
EB
5605 {
5606 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5607 DECL_NAME (*from_p)
5608 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
839b422f 5609 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
f173837a 5610 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
5611 }
5612
8f0fe813
NS
5613 if (want_value && TREE_THIS_VOLATILE (*to_p))
5614 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5615
726a989a
RB
5616 if (TREE_CODE (*from_p) == CALL_EXPR)
5617 {
5618 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5619 instead of a GIMPLE_ASSIGN. */
538dd0b7 5620 gcall *call_stmt;
1304953e
JJ
5621 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5622 {
5623 /* Gimplify internal functions created in the FEs. */
5624 int nargs = call_expr_nargs (*from_p), i;
5625 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5626 auto_vec<tree> vargs (nargs);
5627
5628 for (i = 0; i < nargs; i++)
5629 {
5630 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5631 EXPR_LOCATION (*from_p));
5632 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5633 }
538dd0b7
DM
5634 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5635 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
1304953e 5636 }
ed9c79e1
JJ
5637 else
5638 {
1304953e
JJ
5639 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5640 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5641 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5642 tree fndecl = get_callee_fndecl (*from_p);
5643 if (fndecl
5644 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5645 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5646 && call_expr_nargs (*from_p) == 3)
538dd0b7
DM
5647 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5648 CALL_EXPR_ARG (*from_p, 0),
5649 CALL_EXPR_ARG (*from_p, 1),
5650 CALL_EXPR_ARG (*from_p, 2));
1304953e
JJ
5651 else
5652 {
538dd0b7
DM
5653 call_stmt = gimple_build_call_from_tree (*from_p);
5654 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
1304953e 5655 }
ed9c79e1 5656 }
538dd0b7 5657 notice_special_calls (call_stmt);
abd3a68c 5658 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
538dd0b7 5659 gimple_call_set_lhs (call_stmt, *to_p);
381cdae4
RB
5660 else if (TREE_CODE (*to_p) == SSA_NAME)
5661 /* The above is somewhat premature, avoid ICEing later for a
5662 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5663 ??? This doesn't make it a default-def. */
5664 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
815d9cc6
XR
5665
5666 if (EXPR_CILK_SPAWN (*from_p))
5667 gimplify_cilk_detach (pre_p);
538dd0b7 5668 assign = call_stmt;
f173837a 5669 }
726a989a 5670 else
c2255bc4
AH
5671 {
5672 assign = gimple_build_assign (*to_p, *from_p);
5673 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4cde512c
JJ
5674 if (COMPARISON_CLASS_P (*from_p))
5675 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
c2255bc4 5676 }
f173837a 5677
726a989a 5678 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 5679 {
2ad728d2 5680 /* We should have got an SSA name from the start. */
381cdae4
RB
5681 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5682 || ! gimple_in_ssa_p (cfun));
726a989a 5683 }
07beea0d 5684
6da8be89
MM
5685 gimplify_seq_add_stmt (pre_p, assign);
5686 gsi = gsi_last (*pre_p);
88ac13da 5687 maybe_fold_stmt (&gsi);
6da8be89 5688
726a989a
RB
5689 if (want_value)
5690 {
8f0fe813 5691 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 5692 return GS_OK;
6de9cd9a 5693 }
726a989a
RB
5694 else
5695 *expr_p = NULL;
6de9cd9a 5696
17ad5b5e 5697 return GS_ALL_DONE;
6de9cd9a
DN
5698}
5699
ad19c4be
EB
5700/* Gimplify a comparison between two variable-sized objects. Do this
5701 with a call to BUILT_IN_MEMCMP. */
44de5aeb
RK
5702
5703static enum gimplify_status
5704gimplify_variable_sized_compare (tree *expr_p)
5705{
692ad9aa 5706 location_t loc = EXPR_LOCATION (*expr_p);
44de5aeb
RK
5707 tree op0 = TREE_OPERAND (*expr_p, 0);
5708 tree op1 = TREE_OPERAND (*expr_p, 1);
692ad9aa 5709 tree t, arg, dest, src, expr;
5039610b
SL
5710
5711 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5712 arg = unshare_expr (arg);
5713 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
5714 src = build_fold_addr_expr_loc (loc, op1);
5715 dest = build_fold_addr_expr_loc (loc, op0);
e79983f4 5716 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
db3927fb 5717 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
692ad9aa
EB
5718
5719 expr
b4257cfc 5720 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
692ad9aa
EB
5721 SET_EXPR_LOCATION (expr, loc);
5722 *expr_p = expr;
44de5aeb
RK
5723
5724 return GS_OK;
5725}
5726
ad19c4be
EB
5727/* Gimplify a comparison between two aggregate objects of integral scalar
5728 mode as a comparison between the bitwise equivalent scalar values. */
61c25908
OH
5729
5730static enum gimplify_status
5731gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5732{
db3927fb 5733 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
5734 tree op0 = TREE_OPERAND (*expr_p, 0);
5735 tree op1 = TREE_OPERAND (*expr_p, 1);
5736
5737 tree type = TREE_TYPE (op0);
5738 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5739
db3927fb
AH
5740 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5741 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
5742
5743 *expr_p
db3927fb 5744 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
5745
5746 return GS_OK;
5747}
5748
ad19c4be
EB
5749/* Gimplify an expression sequence. This function gimplifies each
5750 expression and rewrites the original expression with the last
6de9cd9a
DN
5751 expression of the sequence in GIMPLE form.
5752
5753 PRE_P points to the list where the side effects for all the
5754 expressions in the sequence will be emitted.
d3147f64 5755
6de9cd9a 5756 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
5757
5758static enum gimplify_status
726a989a 5759gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
5760{
5761 tree t = *expr_p;
5762
5763 do
5764 {
5765 tree *sub_p = &TREE_OPERAND (t, 0);
5766
5767 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5768 gimplify_compound_expr (sub_p, pre_p, false);
5769 else
726a989a 5770 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
5771
5772 t = TREE_OPERAND (t, 1);
5773 }
5774 while (TREE_CODE (t) == COMPOUND_EXPR);
5775
5776 *expr_p = t;
5777 if (want_value)
5778 return GS_OK;
5779 else
5780 {
726a989a 5781 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
5782 return GS_ALL_DONE;
5783 }
5784}
5785
726a989a
RB
5786/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5787 gimplify. After gimplification, EXPR_P will point to a new temporary
5788 that holds the original value of the SAVE_EXPR node.
6de9cd9a 5789
726a989a 5790 PRE_P points to the list where side effects that must happen before
ad19c4be 5791 *EXPR_P should be stored. */
6de9cd9a
DN
5792
5793static enum gimplify_status
726a989a 5794gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5795{
5796 enum gimplify_status ret = GS_ALL_DONE;
5797 tree val;
5798
282899df 5799 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
5800 val = TREE_OPERAND (*expr_p, 0);
5801
7f5e6307
RH
5802 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5803 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 5804 {
3dd93025 5805 /* The operand may be a void-valued expression. It is
519087cf
EB
5806 being executed only for its side-effects. */
5807 if (TREE_TYPE (val) == void_type_node)
5808 {
5809 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5810 is_gimple_stmt, fb_none);
5811 val = NULL;
5812 }
5813 else
5814 /* The temporary may not be an SSA name as later abnormal and EH
5815 control flow may invalidate use/def domination. */
5816 val = get_initialized_tmp_var (val, pre_p, post_p, false);
7f5e6307
RH
5817
5818 TREE_OPERAND (*expr_p, 0) = val;
5819 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 5820 }
6de9cd9a 5821
7f5e6307
RH
5822 *expr_p = val;
5823
6de9cd9a
DN
5824 return ret;
5825}
5826
ad19c4be 5827/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
5828
5829 unary_expr
5830 : ...
5831 | '&' varname
5832 ...
5833
5834 PRE_P points to the list where side effects that must happen before
5835 *EXPR_P should be stored.
5836
5837 POST_P points to the list where side effects that must happen after
5838 *EXPR_P should be stored. */
5839
5840static enum gimplify_status
726a989a 5841gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5842{
5843 tree expr = *expr_p;
5844 tree op0 = TREE_OPERAND (expr, 0);
5845 enum gimplify_status ret;
db3927fb 5846 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
5847
5848 switch (TREE_CODE (op0))
5849 {
5850 case INDIRECT_REF:
67f23620 5851 do_indirect_ref:
6de9cd9a
DN
5852 /* Check if we are dealing with an expression of the form '&*ptr'.
5853 While the front end folds away '&*ptr' into 'ptr', these
5854 expressions may be generated internally by the compiler (e.g.,
5855 builtins like __builtin_va_end). */
67f23620
RH
5856 /* Caution: the silent array decomposition semantics we allow for
5857 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
5858 /* Gimplification of the ADDR_EXPR operand may drop
5859 cv-qualification conversions, so make sure we add them if
5860 needed. */
67f23620
RH
5861 {
5862 tree op00 = TREE_OPERAND (op0, 0);
5863 tree t_expr = TREE_TYPE (expr);
5864 tree t_op00 = TREE_TYPE (op00);
5865
f4088621 5866 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 5867 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
5868 *expr_p = op00;
5869 ret = GS_OK;
5870 }
6de9cd9a
DN
5871 break;
5872
44de5aeb
RK
5873 case VIEW_CONVERT_EXPR:
5874 /* Take the address of our operand and then convert it to the type of
af72267c
RK
5875 this ADDR_EXPR.
5876
5877 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5878 all clear. The impact of this transformation is even less clear. */
91804752
EB
5879
5880 /* If the operand is a useless conversion, look through it. Doing so
5881 guarantees that the ADDR_EXPR and its operand will remain of the
5882 same type. */
5883 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 5884 op0 = TREE_OPERAND (op0, 0);
91804752 5885
db3927fb
AH
5886 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5887 build_fold_addr_expr_loc (loc,
5888 TREE_OPERAND (op0, 0)));
44de5aeb 5889 ret = GS_OK;
6de9cd9a
DN
5890 break;
5891
903eccd1
EB
5892 case MEM_REF:
5893 if (integer_zerop (TREE_OPERAND (op0, 1)))
5894 goto do_indirect_ref;
5895
191816a3 5896 /* fall through */
903eccd1 5897
6de9cd9a 5898 default:
cbf5d0e7
RB
5899 /* If we see a call to a declared builtin or see its address
5900 being taken (we can unify those cases here) then we can mark
5901 the builtin for implicit generation by GCC. */
5902 if (TREE_CODE (op0) == FUNCTION_DECL
5903 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5904 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5905 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5906
6de9cd9a 5907 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
5908 the address of a call that returns a struct; see
5909 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5910 the implied temporary explicit. */
936d04b6 5911
f76d6e6f 5912 /* Make the operand addressable. */
6de9cd9a 5913 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 5914 is_gimple_addressable, fb_either);
8b17cc05
RG
5915 if (ret == GS_ERROR)
5916 break;
67f23620 5917
f76d6e6f
EB
5918 /* Then mark it. Beware that it may not be possible to do so directly
5919 if a temporary has been created by the gimplification. */
5920 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 5921
8b17cc05 5922 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 5923
8b17cc05
RG
5924 /* For various reasons, the gimplification of the expression
5925 may have made a new INDIRECT_REF. */
5926 if (TREE_CODE (op0) == INDIRECT_REF)
5927 goto do_indirect_ref;
5928
6b8b9e42
RG
5929 mark_addressable (TREE_OPERAND (expr, 0));
5930
5931 /* The FEs may end up building ADDR_EXPRs early on a decl with
5932 an incomplete type. Re-build ADDR_EXPRs in canonical form
5933 here. */
5934 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5935 *expr_p = build_fold_addr_expr (op0);
5936
8b17cc05 5937 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
5938 recompute_tree_invariant_for_addr_expr (*expr_p);
5939
5940 /* If we re-built the ADDR_EXPR add a conversion to the original type
5941 if required. */
5942 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5943 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 5944
6de9cd9a
DN
5945 break;
5946 }
5947
6de9cd9a
DN
5948 return ret;
5949}
5950
5951/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5952 value; output operands should be a gimple lvalue. */
5953
5954static enum gimplify_status
726a989a 5955gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 5956{
726a989a
RB
5957 tree expr;
5958 int noutputs;
5959 const char **oconstraints;
6de9cd9a
DN
5960 int i;
5961 tree link;
5962 const char *constraint;
5963 bool allows_mem, allows_reg, is_inout;
5964 enum gimplify_status ret, tret;
538dd0b7 5965 gasm *stmt;
9771b263
DN
5966 vec<tree, va_gc> *inputs;
5967 vec<tree, va_gc> *outputs;
5968 vec<tree, va_gc> *clobbers;
5969 vec<tree, va_gc> *labels;
726a989a 5970 tree link_next;
b8698a0f 5971
726a989a
RB
5972 expr = *expr_p;
5973 noutputs = list_length (ASM_OUTPUTS (expr));
5974 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5975
9771b263
DN
5976 inputs = NULL;
5977 outputs = NULL;
5978 clobbers = NULL;
5979 labels = NULL;
6de9cd9a 5980
6de9cd9a 5981 ret = GS_ALL_DONE;
726a989a
RB
5982 link_next = NULL_TREE;
5983 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5984 {
2c68ba8e 5985 bool ok;
726a989a
RB
5986 size_t constraint_len;
5987
5988 link_next = TREE_CHAIN (link);
5989
5990 oconstraints[i]
5991 = constraint
6de9cd9a 5992 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
5993 constraint_len = strlen (constraint);
5994 if (constraint_len == 0)
5995 continue;
6de9cd9a 5996
2c68ba8e
LB
5997 ok = parse_output_constraint (&constraint, i, 0, 0,
5998 &allows_mem, &allows_reg, &is_inout);
5999 if (!ok)
6000 {
6001 ret = GS_ERROR;
6002 is_inout = false;
6003 }
6de9cd9a
DN
6004
6005 if (!allows_reg && allows_mem)
936d04b6 6006 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
6007
6008 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6009 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6010 fb_lvalue | fb_mayfail);
6011 if (tret == GS_ERROR)
6012 {
6013 error ("invalid lvalue in asm output %d", i);
6014 ret = tret;
6015 }
6016
ed87de55
RB
6017 /* If the constraint does not allow memory make sure we gimplify
6018 it to a register if it is not already but its base is. This
6019 happens for complex and vector components. */
6020 if (!allows_mem)
6021 {
6022 tree op = TREE_VALUE (link);
6023 if (! is_gimple_val (op)
6024 && is_gimple_reg_type (TREE_TYPE (op))
6025 && is_gimple_reg (get_base_address (op)))
6026 {
6027 tree tem = create_tmp_reg (TREE_TYPE (op));
6028 tree ass;
6029 if (is_inout)
6030 {
6031 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6032 tem, unshare_expr (op));
6033 gimplify_and_add (ass, pre_p);
6034 }
6035 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6036 gimplify_and_add (ass, post_p);
6037
6038 TREE_VALUE (link) = tem;
6039 tret = GS_OK;
6040 }
6041 }
6042
9771b263 6043 vec_safe_push (outputs, link);
726a989a
RB
6044 TREE_CHAIN (link) = NULL_TREE;
6045
6de9cd9a
DN
6046 if (is_inout)
6047 {
6048 /* An input/output operand. To give the optimizers more
6049 flexibility, split it into separate input and output
6050 operands. */
6051 tree input;
3d7b83b6
MS
6052 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6053 char buf[11];
6de9cd9a
DN
6054
6055 /* Turn the in/out constraint into an output constraint. */
6056 char *p = xstrdup (constraint);
6057 p[0] = '=';
6058 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
6059
6060 /* And add a matching input constraint. */
6061 if (allows_reg)
6062 {
3d7b83b6 6063 sprintf (buf, "%u", i);
372d72d9
JJ
6064
6065 /* If there are multiple alternatives in the constraint,
6066 handle each of them individually. Those that allow register
6067 will be replaced with operand number, the others will stay
6068 unchanged. */
6069 if (strchr (p, ',') != NULL)
6070 {
6071 size_t len = 0, buflen = strlen (buf);
6072 char *beg, *end, *str, *dst;
6073
6074 for (beg = p + 1;;)
6075 {
6076 end = strchr (beg, ',');
6077 if (end == NULL)
6078 end = strchr (beg, '\0');
6079 if ((size_t) (end - beg) < buflen)
6080 len += buflen + 1;
6081 else
6082 len += end - beg + 1;
6083 if (*end)
6084 beg = end + 1;
6085 else
6086 break;
6087 }
6088
858904db 6089 str = (char *) alloca (len);
372d72d9
JJ
6090 for (beg = p + 1, dst = str;;)
6091 {
6092 const char *tem;
6093 bool mem_p, reg_p, inout_p;
6094
6095 end = strchr (beg, ',');
6096 if (end)
6097 *end = '\0';
6098 beg[-1] = '=';
6099 tem = beg - 1;
6100 parse_output_constraint (&tem, i, 0, 0,
6101 &mem_p, &reg_p, &inout_p);
6102 if (dst != str)
6103 *dst++ = ',';
6104 if (reg_p)
6105 {
6106 memcpy (dst, buf, buflen);
6107 dst += buflen;
6108 }
6109 else
6110 {
6111 if (end)
6112 len = end - beg;
6113 else
6114 len = strlen (beg);
6115 memcpy (dst, beg, len);
6116 dst += len;
6117 }
6118 if (end)
6119 beg = end + 1;
6120 else
6121 break;
6122 }
6123 *dst = '\0';
6124 input = build_string (dst - str, str);
6125 }
6126 else
6127 input = build_string (strlen (buf), buf);
6de9cd9a
DN
6128 }
6129 else
6130 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
6131
6132 free (p);
6133
6de9cd9a
DN
6134 input = build_tree_list (build_tree_list (NULL_TREE, input),
6135 unshare_expr (TREE_VALUE (link)));
6136 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6137 }
6138 }
6139
726a989a
RB
6140 link_next = NULL_TREE;
6141 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 6142 {
726a989a
RB
6143 link_next = TREE_CHAIN (link);
6144 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
6145 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6146 oconstraints, &allows_mem, &allows_reg);
6147
f497c16c
JJ
6148 /* If we can't make copies, we can only accept memory. */
6149 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6150 {
6151 if (allows_mem)
6152 allows_reg = 0;
6153 else
6154 {
6155 error ("impossible constraint in %<asm%>");
6156 error ("non-memory input %d must stay in memory", i);
6157 return GS_ERROR;
6158 }
6159 }
6160
6de9cd9a
DN
6161 /* If the operand is a memory input, it should be an lvalue. */
6162 if (!allows_reg && allows_mem)
6163 {
502c5084
JJ
6164 tree inputv = TREE_VALUE (link);
6165 STRIP_NOPS (inputv);
6166 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6167 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6168 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
8f1e28e0
MP
6169 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6170 || TREE_CODE (inputv) == MODIFY_EXPR)
502c5084 6171 TREE_VALUE (link) = error_mark_node;
6de9cd9a
DN
6172 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6173 is_gimple_lvalue, fb_lvalue | fb_mayfail);
bdd3aea6
JJ
6174 if (tret != GS_ERROR)
6175 {
6176 /* Unlike output operands, memory inputs are not guaranteed
6177 to be lvalues by the FE, and while the expressions are
6178 marked addressable there, if it is e.g. a statement
6179 expression, temporaries in it might not end up being
6180 addressable. They might be already used in the IL and thus
6181 it is too late to make them addressable now though. */
6182 tree x = TREE_VALUE (link);
6183 while (handled_component_p (x))
6184 x = TREE_OPERAND (x, 0);
6185 if (TREE_CODE (x) == MEM_REF
6186 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6187 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
8813a647 6188 if ((VAR_P (x)
bdd3aea6
JJ
6189 || TREE_CODE (x) == PARM_DECL
6190 || TREE_CODE (x) == RESULT_DECL)
6191 && !TREE_ADDRESSABLE (x)
6192 && is_gimple_reg (x))
6193 {
6194 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6195 input_location), 0,
6196 "memory input %d is not directly addressable",
6197 i);
6198 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6199 }
6200 }
936d04b6 6201 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
6202 if (tret == GS_ERROR)
6203 {
bdd3aea6
JJ
6204 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6205 "memory input %d is not directly addressable", i);
6de9cd9a
DN
6206 ret = tret;
6207 }
6208 }
6209 else
6210 {
6211 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 6212 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
6213 if (tret == GS_ERROR)
6214 ret = tret;
6215 }
726a989a
RB
6216
6217 TREE_CHAIN (link) = NULL_TREE;
9771b263 6218 vec_safe_push (inputs, link);
6de9cd9a 6219 }
b8698a0f 6220
ca081cc8
EB
6221 link_next = NULL_TREE;
6222 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6223 {
6224 link_next = TREE_CHAIN (link);
6225 TREE_CHAIN (link) = NULL_TREE;
6226 vec_safe_push (clobbers, link);
6227 }
1c384bf1 6228
ca081cc8
EB
6229 link_next = NULL_TREE;
6230 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6231 {
6232 link_next = TREE_CHAIN (link);
6233 TREE_CHAIN (link) = NULL_TREE;
6234 vec_safe_push (labels, link);
6235 }
726a989a 6236
a406865a
RG
6237 /* Do not add ASMs with errors to the gimple IL stream. */
6238 if (ret != GS_ERROR)
6239 {
6240 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 6241 inputs, outputs, clobbers, labels);
726a989a 6242
15a85b05 6243 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
a406865a
RG
6244 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6245
6246 gimplify_seq_add_stmt (pre_p, stmt);
6247 }
6de9cd9a
DN
6248
6249 return ret;
6250}
6251
6252/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 6253 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
6254 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6255 return to this function.
6256
6257 FIXME should we complexify the prequeue handling instead? Or use flags
6258 for all the cleanups and let the optimizer tighten them up? The current
6259 code seems pretty fragile; it will break on a cleanup within any
6260 non-conditional nesting. But any such nesting would be broken, anyway;
6261 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6262 and continues out of it. We can do that at the RTL level, though, so
6263 having an optimizer to tighten up try/finally regions would be a Good
6264 Thing. */
6265
6266static enum gimplify_status
726a989a 6267gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 6268{
726a989a
RB
6269 gimple_stmt_iterator iter;
6270 gimple_seq body_sequence = NULL;
6de9cd9a 6271
325c3691 6272 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
6273
6274 /* We only care about the number of conditions between the innermost
df77f454
JM
6275 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6276 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 6277 int old_conds = gimplify_ctxp->conditions;
726a989a 6278 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
32be32af 6279 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6de9cd9a 6280 gimplify_ctxp->conditions = 0;
726a989a 6281 gimplify_ctxp->conditional_cleanups = NULL;
32be32af 6282 gimplify_ctxp->in_cleanup_point_expr = true;
6de9cd9a 6283
726a989a 6284 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
6285
6286 gimplify_ctxp->conditions = old_conds;
df77f454 6287 gimplify_ctxp->conditional_cleanups = old_cleanups;
32be32af 6288 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6de9cd9a 6289
726a989a 6290 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 6291 {
355fe088 6292 gimple *wce = gsi_stmt (iter);
6de9cd9a 6293
726a989a 6294 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 6295 {
726a989a 6296 if (gsi_one_before_end_p (iter))
6de9cd9a 6297 {
726a989a
RB
6298 /* Note that gsi_insert_seq_before and gsi_remove do not
6299 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
6300 if (!gimple_wce_cleanup_eh_only (wce))
6301 gsi_insert_seq_before_without_update (&iter,
6302 gimple_wce_cleanup (wce),
6303 GSI_SAME_STMT);
726a989a 6304 gsi_remove (&iter, true);
6de9cd9a
DN
6305 break;
6306 }
6307 else
6308 {
538dd0b7 6309 gtry *gtry;
726a989a
RB
6310 gimple_seq seq;
6311 enum gimple_try_flags kind;
40aac948 6312
726a989a
RB
6313 if (gimple_wce_cleanup_eh_only (wce))
6314 kind = GIMPLE_TRY_CATCH;
40aac948 6315 else
726a989a
RB
6316 kind = GIMPLE_TRY_FINALLY;
6317 seq = gsi_split_seq_after (iter);
6318
82d6e6fc 6319 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
6320 /* Do not use gsi_replace here, as it may scan operands.
6321 We want to do a simple structural modification only. */
355a7673 6322 gsi_set_stmt (&iter, gtry);
daa6e488 6323 iter = gsi_start (gtry->eval);
6de9cd9a
DN
6324 }
6325 }
6326 else
726a989a 6327 gsi_next (&iter);
6de9cd9a
DN
6328 }
6329
726a989a 6330 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
6331 if (temp)
6332 {
6333 *expr_p = temp;
6de9cd9a
DN
6334 return GS_OK;
6335 }
6336 else
6337 {
726a989a 6338 *expr_p = NULL;
6de9cd9a
DN
6339 return GS_ALL_DONE;
6340 }
6341}
6342
6343/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a 6344 is the cleanup action required. EH_ONLY is true if the cleanup should
e650ea2a
RB
6345 only be executed if an exception is thrown, not on normal exit.
6346 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6347 only valid for clobbers. */
6de9cd9a
DN
6348
6349static void
e650ea2a
RB
6350gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6351 bool force_uncond = false)
6de9cd9a 6352{
355fe088 6353 gimple *wce;
726a989a 6354 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
6355
6356 /* Errors can result in improperly nested cleanups. Which results in
726a989a 6357 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 6358 if (seen_error ())
6de9cd9a
DN
6359 return;
6360
cda4d053 6361 if (gimple_conditional_context ())
6de9cd9a
DN
6362 {
6363 /* If we're in a conditional context, this is more complex. We only
6364 want to run the cleanup if we actually ran the initialization that
6365 necessitates it, but we want to run it after the end of the
6366 conditional context. So we wrap the try/finally around the
6367 condition and use a flag to determine whether or not to actually
6368 run the destructor. Thus
6369
6370 test ? f(A()) : 0
6371
6372 becomes (approximately)
6373
6374 flag = 0;
6375 try {
6376 if (test) { A::A(temp); flag = 1; val = f(temp); }
6377 else { val = 0; }
6378 } finally {
6379 if (flag) A::~A(temp);
6380 }
6381 val
6382 */
cda4d053
RB
6383 if (force_uncond)
6384 {
6385 gimplify_stmt (&cleanup, &cleanup_stmts);
6386 wce = gimple_build_wce (cleanup_stmts);
6387 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6388 }
6389 else
6390 {
6391 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6392 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6393 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6394
6395 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6396 gimplify_stmt (&cleanup, &cleanup_stmts);
6397 wce = gimple_build_wce (cleanup_stmts);
6398
6399 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6400 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6401 gimplify_seq_add_stmt (pre_p, ftrue);
6402
6403 /* Because of this manipulation, and the EH edges that jump
6404 threading cannot redirect, the temporary (VAR) will appear
6405 to be used uninitialized. Don't warn. */
6406 TREE_NO_WARNING (var) = 1;
6407 }
6de9cd9a
DN
6408 }
6409 else
6410 {
726a989a
RB
6411 gimplify_stmt (&cleanup, &cleanup_stmts);
6412 wce = gimple_build_wce (cleanup_stmts);
6413 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6414 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 6415 }
6de9cd9a
DN
6416}
6417
6418/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6419
6420static enum gimplify_status
726a989a 6421gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
6422{
6423 tree targ = *expr_p;
6424 tree temp = TARGET_EXPR_SLOT (targ);
6425 tree init = TARGET_EXPR_INITIAL (targ);
6426 enum gimplify_status ret;
6427
6dc4a604
ML
6428 bool unpoison_empty_seq = false;
6429 gimple_stmt_iterator unpoison_it;
6430
6de9cd9a
DN
6431 if (init)
6432 {
d0ad58f9
JM
6433 tree cleanup = NULL_TREE;
6434
3a5b9284 6435 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea
JJ
6436 to the temps list. Handle also variable length TARGET_EXPRs. */
6437 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6438 {
6439 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6440 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6441 gimplify_vla_decl (temp, pre_p);
6442 }
6443 else
6dc4a604
ML
6444 {
6445 /* Save location where we need to place unpoisoning. It's possible
6446 that a variable will be converted to needs_to_live_in_memory. */
6447 unpoison_it = gsi_last (*pre_p);
6448 unpoison_empty_seq = gsi_end_p (unpoison_it);
6449
6450 gimple_add_tmp_var (temp);
6451 }
6de9cd9a 6452
3a5b9284
RH
6453 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6454 expression is supposed to initialize the slot. */
6455 if (VOID_TYPE_P (TREE_TYPE (init)))
6456 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6457 else
325c3691 6458 {
726a989a
RB
6459 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6460 init = init_expr;
6461 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6462 init = NULL;
6463 ggc_free (init_expr);
325c3691 6464 }
3a5b9284 6465 if (ret == GS_ERROR)
abc67de1
SM
6466 {
6467 /* PR c++/28266 Make sure this is expanded only once. */
6468 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6469 return GS_ERROR;
6470 }
726a989a
RB
6471 if (init)
6472 gimplify_and_add (init, pre_p);
6de9cd9a
DN
6473
6474 /* If needed, push the cleanup for the temp. */
6475 if (TARGET_EXPR_CLEANUP (targ))
d0ad58f9
JM
6476 {
6477 if (CLEANUP_EH_ONLY (targ))
6478 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6479 CLEANUP_EH_ONLY (targ), pre_p);
6480 else
6481 cleanup = TARGET_EXPR_CLEANUP (targ);
6482 }
6483
6484 /* Add a clobber for the temporary going out of scope, like
6485 gimplify_bind_expr. */
32be32af 6486 if (gimplify_ctxp->in_cleanup_point_expr
6dc4a604 6487 && needs_to_live_in_memory (temp))
d0ad58f9 6488 {
6dc4a604
ML
6489 if (flag_stack_reuse == SR_ALL)
6490 {
6491 tree clobber = build_constructor (TREE_TYPE (temp),
6492 NULL);
6493 TREE_THIS_VOLATILE (clobber) = true;
6494 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
e650ea2a 6495 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6dc4a604 6496 }
6ff92497 6497 if (asan_poisoned_variables && dbg_cnt (asan_use_after_scope))
6dc4a604
ML
6498 {
6499 tree asan_cleanup = build_asan_poison_call_expr (temp);
6500 if (asan_cleanup)
6501 {
6502 if (unpoison_empty_seq)
6503 unpoison_it = gsi_start (*pre_p);
d0ad58f9 6504
6dc4a604
ML
6505 asan_poison_variable (temp, false, &unpoison_it,
6506 unpoison_empty_seq);
6507 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6508 }
6509 }
6510 }
d0ad58f9
JM
6511 if (cleanup)
6512 gimple_push_cleanup (temp, cleanup, false, pre_p);
6de9cd9a
DN
6513
6514 /* Only expand this once. */
6515 TREE_OPERAND (targ, 3) = init;
6516 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6517 }
282899df 6518 else
6de9cd9a 6519 /* We should have expanded this before. */
282899df 6520 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
6521
6522 *expr_p = temp;
6523 return GS_OK;
6524}
6525
6526/* Gimplification of expression trees. */
6527
726a989a
RB
6528/* Gimplify an expression which appears at statement context. The
6529 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6530 NULL, a new sequence is allocated.
6de9cd9a 6531
726a989a
RB
6532 Return true if we actually added a statement to the queue. */
6533
6534bool
6535gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 6536{
726a989a 6537 gimple_seq_node last;
6de9cd9a 6538
726a989a
RB
6539 last = gimple_seq_last (*seq_p);
6540 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6541 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
6542}
6543
953ff289
DN
6544/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6545 to CTX. If entries already exist, force them to be some flavor of private.
6546 If there is no enclosing parallel, do nothing. */
6547
6548void
6549omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6550{
6551 splay_tree_node n;
6552
d9a6bd32 6553 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
953ff289
DN
6554 return;
6555
6556 do
6557 {
6558 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6559 if (n != NULL)
6560 {
6561 if (n->value & GOVD_SHARED)
6562 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
acf0174b
JJ
6563 else if (n->value & GOVD_MAP)
6564 n->value |= GOVD_MAP_TO_ONLY;
953ff289
DN
6565 else
6566 return;
6567 }
d9a6bd32
JJ
6568 else if ((ctx->region_type & ORT_TARGET) != 0)
6569 {
6570 if (ctx->target_map_scalars_firstprivate)
6571 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6572 else
6573 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6574 }
74bf76ed 6575 else if (ctx->region_type != ORT_WORKSHARE
acf0174b 6576 && ctx->region_type != ORT_SIMD
182190f2
NS
6577 && ctx->region_type != ORT_ACC
6578 && !(ctx->region_type & ORT_TARGET_DATA))
953ff289
DN
6579 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6580
6581 ctx = ctx->outer_context;
6582 }
6583 while (ctx);
6584}
6585
6586/* Similarly for each of the type sizes of TYPE. */
6587
6588static void
6589omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6590{
6591 if (type == NULL || type == error_mark_node)
6592 return;
6593 type = TYPE_MAIN_VARIANT (type);
6594
6e2830c3 6595 if (ctx->privatized_types->add (type))
953ff289
DN
6596 return;
6597
6598 switch (TREE_CODE (type))
6599 {
6600 case INTEGER_TYPE:
6601 case ENUMERAL_TYPE:
6602 case BOOLEAN_TYPE:
953ff289 6603 case REAL_TYPE:
325217ed 6604 case FIXED_POINT_TYPE:
953ff289
DN
6605 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6606 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6607 break;
6608
6609 case ARRAY_TYPE:
6610 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6611 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6612 break;
6613
6614 case RECORD_TYPE:
6615 case UNION_TYPE:
6616 case QUAL_UNION_TYPE:
6617 {
6618 tree field;
910ad8de 6619 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
6620 if (TREE_CODE (field) == FIELD_DECL)
6621 {
6622 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6623 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6624 }
6625 }
6626 break;
6627
6628 case POINTER_TYPE:
6629 case REFERENCE_TYPE:
6630 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6631 break;
6632
6633 default:
6634 break;
6635 }
6636
6637 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6638 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6639 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6640}
6641
41dbbb37 6642/* Add an entry for DECL in the OMP context CTX with FLAGS. */
953ff289
DN
6643
6644static void
6645omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6646{
6647 splay_tree_node n;
6648 unsigned int nflags;
6649 tree t;
6650
d9a6bd32 6651 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
953ff289
DN
6652 return;
6653
6654 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
d54d1fc3
JJ
6655 there are constructors involved somewhere. Exception is a shared clause,
6656 there is nothing privatized in that case. */
6657 if ((flags & GOVD_SHARED) == 0
6658 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6659 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
953ff289
DN
6660 flags |= GOVD_SEEN;
6661
6662 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
d9a6bd32 6663 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
953ff289
DN
6664 {
6665 /* We shouldn't be re-adding the decl with the same data
6666 sharing class. */
6667 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
953ff289 6668 nflags = n->value | flags;
182190f2
NS
6669 /* The only combination of data sharing classes we should see is
6670 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6671 reduction variables to be used in data sharing clauses. */
6672 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6673 || ((nflags & GOVD_DATA_SHARE_CLASS)
6674 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
74bf76ed 6675 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
953ff289
DN
6676 n->value = nflags;
6677 return;
6678 }
6679
6680 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 6681 of additional bits of data: the pointer replacement variable, and
953ff289 6682 the parameters of the type. */
4c923c28 6683 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
6684 {
6685 /* Add the pointer replacement variable as PRIVATE if the variable
6686 replacement is private, else FIRSTPRIVATE since we'll need the
6687 address of the original variable either for SHARED, or for the
6688 copy into or out of the context. */
6689 if (!(flags & GOVD_LOCAL))
6690 {
41dbbb37
TS
6691 if (flags & GOVD_MAP)
6692 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6693 else if (flags & GOVD_PRIVATE)
6694 nflags = GOVD_PRIVATE;
d9a6bd32
JJ
6695 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6696 && (flags & GOVD_FIRSTPRIVATE))
6697 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
41dbbb37
TS
6698 else
6699 nflags = GOVD_FIRSTPRIVATE;
953ff289
DN
6700 nflags |= flags & GOVD_SEEN;
6701 t = DECL_VALUE_EXPR (decl);
6702 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6703 t = TREE_OPERAND (t, 0);
6704 gcc_assert (DECL_P (t));
6705 omp_add_variable (ctx, t, nflags);
6706 }
6707
6708 /* Add all of the variable and type parameters (which should have
6709 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6710 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6711 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6712 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6713
6714 /* The variable-sized variable itself is never SHARED, only some form
6715 of PRIVATE. The sharing would take place via the pointer variable
6716 which we remapped above. */
6717 if (flags & GOVD_SHARED)
e9e2ef9f 6718 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
953ff289
DN
6719 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6720
b8698a0f 6721 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
6722 alloca statement we generate for the variable, so make sure it
6723 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
6724 case, since we won't be allocating local storage then.
6725 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6726 in this case omp_notice_variable will be called later
6727 on when it is gimplified. */
acf0174b 6728 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
423ed416 6729 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
953ff289
DN
6730 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6731 }
acf0174b
JJ
6732 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6733 && lang_hooks.decls.omp_privatize_by_reference (decl))
953ff289 6734 {
953ff289
DN
6735 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6736
6737 /* Similar to the direct variable sized case above, we'll need the
6738 size of references being privatized. */
6739 if ((flags & GOVD_SHARED) == 0)
6740 {
6741 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
d9a6bd32 6742 if (DECL_P (t))
953ff289
DN
6743 omp_notice_variable (ctx, t, true);
6744 }
6745 }
6746
74bf76ed
JJ
6747 if (n != NULL)
6748 n->value |= flags;
6749 else
6750 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
2c71d454
CLT
6751
6752 /* For reductions clauses in OpenACC loop directives, by default create a
6753 copy clause on the enclosing parallel construct for carrying back the
6754 results. */
6755 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6756 {
6757 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6758 while (outer_ctx)
6759 {
6760 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6761 if (n != NULL)
6762 {
6763 /* Ignore local variables and explicitly declared clauses. */
6764 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6765 break;
6766 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6767 {
6768 /* According to the OpenACC spec, such a reduction variable
6769 should already have a copy map on a kernels construct,
6770 verify that here. */
6771 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6772 && (n->value & GOVD_MAP));
6773 }
6774 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6775 {
6776 /* Remove firstprivate and make it a copy map. */
6777 n->value &= ~GOVD_FIRSTPRIVATE;
6778 n->value |= GOVD_MAP;
6779 }
6780 }
6781 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6782 {
6783 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6784 GOVD_MAP | GOVD_SEEN);
6785 break;
6786 }
6787 outer_ctx = outer_ctx->outer_context;
6788 }
6789 }
953ff289
DN
6790}
6791
41dbbb37 6792/* Notice a threadprivate variable DECL used in OMP context CTX.
f22f4340
JJ
6793 This just prints out diagnostics about threadprivate variable uses
6794 in untied tasks. If DECL2 is non-NULL, prevent this warning
6795 on that variable. */
6796
6797static bool
6798omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6799 tree decl2)
6800{
6801 splay_tree_node n;
acf0174b
JJ
6802 struct gimplify_omp_ctx *octx;
6803
6804 for (octx = ctx; octx; octx = octx->outer_context)
d9a6bd32 6805 if ((octx->region_type & ORT_TARGET) != 0)
acf0174b
JJ
6806 {
6807 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6808 if (n == NULL)
6809 {
6810 error ("threadprivate variable %qE used in target region",
6811 DECL_NAME (decl));
6812 error_at (octx->location, "enclosing target region");
6813 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6814 }
6815 if (decl2)
6816 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6817 }
f22f4340
JJ
6818
6819 if (ctx->region_type != ORT_UNTIED_TASK)
6820 return false;
6821 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6822 if (n == NULL)
6823 {
ad19c4be
EB
6824 error ("threadprivate variable %qE used in untied task",
6825 DECL_NAME (decl));
f22f4340
JJ
6826 error_at (ctx->location, "enclosing task");
6827 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6828 }
6829 if (decl2)
6830 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6831 return false;
6832}
6833
6e232ba4
JN
6834/* Return true if global var DECL is device resident. */
6835
6836static bool
6837device_resident_p (tree decl)
6838{
6839 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6840
6841 if (!attr)
6842 return false;
6843
6844 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6845 {
6846 tree c = TREE_VALUE (t);
6847 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6848 return true;
6849 }
6850
6851 return false;
6852}
6853
7ba8651e
CP
6854/* Return true if DECL has an ACC DECLARE attribute. */
6855
6856static bool
6857is_oacc_declared (tree decl)
6858{
6859 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6860 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6861 return declared != NULL_TREE;
6862}
6863
72500605
NS
6864/* Determine outer default flags for DECL mentioned in an OMP region
6865 but not declared in an enclosing clause.
6866
6867 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6868 remapped firstprivate instead of shared. To some extent this is
6869 addressed in omp_firstprivatize_type_sizes, but not
6870 effectively. */
6871
6872static unsigned
6873omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6874 bool in_code, unsigned flags)
6875{
6876 enum omp_clause_default_kind default_kind = ctx->default_kind;
6877 enum omp_clause_default_kind kind;
6878
6879 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6880 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6881 default_kind = kind;
6882
6883 switch (default_kind)
6884 {
6885 case OMP_CLAUSE_DEFAULT_NONE:
6886 {
6887 const char *rtype;
6888
6889 if (ctx->region_type & ORT_PARALLEL)
6890 rtype = "parallel";
6891 else if (ctx->region_type & ORT_TASK)
6892 rtype = "task";
6893 else if (ctx->region_type & ORT_TEAMS)
6894 rtype = "teams";
6895 else
6896 gcc_unreachable ();
6897
724d25f3 6898 error ("%qE not specified in enclosing %qs",
72500605 6899 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
724d25f3 6900 error_at (ctx->location, "enclosing %qs", rtype);
72500605
NS
6901 }
6902 /* FALLTHRU */
6903 case OMP_CLAUSE_DEFAULT_SHARED:
6904 flags |= GOVD_SHARED;
6905 break;
6906 case OMP_CLAUSE_DEFAULT_PRIVATE:
6907 flags |= GOVD_PRIVATE;
6908 break;
6909 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6910 flags |= GOVD_FIRSTPRIVATE;
6911 break;
6912 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6913 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6914 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6915 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6916 {
6917 omp_notice_variable (octx, decl, in_code);
6918 for (; octx; octx = octx->outer_context)
6919 {
6920 splay_tree_node n2;
6921
72500605 6922 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
e01d41e5
JJ
6923 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6924 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6925 continue;
72500605
NS
6926 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6927 {
6928 flags |= GOVD_FIRSTPRIVATE;
6929 goto found_outer;
6930 }
6931 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6932 {
6933 flags |= GOVD_SHARED;
6934 goto found_outer;
6935 }
6936 }
6937 }
6938
6939 if (TREE_CODE (decl) == PARM_DECL
6940 || (!is_global_var (decl)
6941 && DECL_CONTEXT (decl) == current_function_decl))
6942 flags |= GOVD_FIRSTPRIVATE;
6943 else
6944 flags |= GOVD_SHARED;
6945 found_outer:
6946 break;
6947
6948 default:
6949 gcc_unreachable ();
6950 }
6951
6952 return flags;
6953}
6954
fffeedeb
NS
6955
6956/* Determine outer default flags for DECL mentioned in an OACC region
6957 but not declared in an enclosing clause. */
6958
6959static unsigned
6960oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6961{
6962 const char *rkind;
6e232ba4 6963 bool on_device = false;
7ba8651e 6964 bool declared = is_oacc_declared (decl);
33a126a6
CP
6965 tree type = TREE_TYPE (decl);
6966
6967 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6968 type = TREE_TYPE (type);
6e232ba4
JN
6969
6970 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6971 && is_global_var (decl)
6972 && device_resident_p (decl))
6973 {
6974 on_device = true;
6975 flags |= GOVD_MAP_TO_ONLY;
6976 }
fffeedeb
NS
6977
6978 switch (ctx->region_type)
6979 {
fffeedeb 6980 case ORT_ACC_KERNELS:
fffeedeb 6981 rkind = "kernels";
0d0afa9f
TS
6982
6983 if (AGGREGATE_TYPE_P (type))
7fd549d2
TS
6984 {
6985 /* Aggregates default to 'present_or_copy', or 'present'. */
6986 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6987 flags |= GOVD_MAP;
6988 else
6989 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
6990 }
0d0afa9f
TS
6991 else
6992 /* Scalars default to 'copy'. */
6993 flags |= GOVD_MAP | GOVD_MAP_FORCE;
6994
fffeedeb
NS
6995 break;
6996
6997 case ORT_ACC_PARALLEL:
0d0afa9f
TS
6998 rkind = "parallel";
6999
7000 if (on_device || declared)
7001 flags |= GOVD_MAP;
7002 else if (AGGREGATE_TYPE_P (type))
7fd549d2
TS
7003 {
7004 /* Aggregates default to 'present_or_copy', or 'present'. */
7005 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7006 flags |= GOVD_MAP;
7007 else
7008 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7009 }
0d0afa9f
TS
7010 else
7011 /* Scalars default to 'firstprivate'. */
7012 flags |= GOVD_FIRSTPRIVATE;
7013
fffeedeb 7014 break;
0d0afa9f
TS
7015
7016 default:
7017 gcc_unreachable ();
fffeedeb
NS
7018 }
7019
7020 if (DECL_ARTIFICIAL (decl))
7021 ; /* We can get compiler-generated decls, and should not complain
7022 about them. */
7023 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7024 {
7c602779 7025 error ("%qE not specified in enclosing OpenACC %qs construct",
fffeedeb 7026 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
20e8b68f 7027 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
fffeedeb 7028 }
7fd549d2
TS
7029 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7030 ; /* Handled above. */
fffeedeb
NS
7031 else
7032 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7033
7034 return flags;
7035}
7036
41dbbb37 7037/* Record the fact that DECL was used within the OMP context CTX.
953ff289
DN
7038 IN_CODE is true when real code uses DECL, and false when we should
7039 merely emit default(none) errors. Return true if DECL is going to
7040 be remapped and thus DECL shouldn't be gimplified into its
7041 DECL_VALUE_EXPR (if any). */
7042
7043static bool
7044omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7045{
7046 splay_tree_node n;
7047 unsigned flags = in_code ? GOVD_SEEN : 0;
7048 bool ret = false, shared;
7049
b504a918 7050 if (error_operand_p (decl))
953ff289
DN
7051 return false;
7052
d9a6bd32
JJ
7053 if (ctx->region_type == ORT_NONE)
7054 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7055
953ff289
DN
7056 if (is_global_var (decl))
7057 {
eb077516 7058 /* Threadprivate variables are predetermined. */
953ff289 7059 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 7060 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
7061
7062 if (DECL_HAS_VALUE_EXPR_P (decl))
7063 {
7064 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7065
7066 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
f22f4340 7067 return omp_notice_threadprivate_variable (ctx, decl, value);
953ff289 7068 }
eb077516
JN
7069
7070 if (gimplify_omp_ctxp->outer_context == NULL
7071 && VAR_P (decl)
629b3d75 7072 && oacc_get_fn_attrib (current_function_decl))
eb077516
JN
7073 {
7074 location_t loc = DECL_SOURCE_LOCATION (decl);
7075
7076 if (lookup_attribute ("omp declare target link",
7077 DECL_ATTRIBUTES (decl)))
7078 {
7079 error_at (loc,
7080 "%qE with %<link%> clause used in %<routine%> function",
7081 DECL_NAME (decl));
7082 return false;
7083 }
7084 else if (!lookup_attribute ("omp declare target",
7085 DECL_ATTRIBUTES (decl)))
7086 {
7087 error_at (loc,
7088 "%qE requires a %<declare%> directive for use "
7089 "in a %<routine%> function", DECL_NAME (decl));
7090 return false;
7091 }
7092 }
953ff289
DN
7093 }
7094
7095 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
d9a6bd32 7096 if ((ctx->region_type & ORT_TARGET) != 0)
acf0174b 7097 {
f014c653 7098 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
acf0174b
JJ
7099 if (n == NULL)
7100 {
d9a6bd32
JJ
7101 unsigned nflags = flags;
7102 if (ctx->target_map_pointers_as_0len_arrays
7103 || ctx->target_map_scalars_firstprivate)
7104 {
7105 bool is_declare_target = false;
7106 bool is_scalar = false;
7107 if (is_global_var (decl)
7108 && varpool_node::get_create (decl)->offloadable)
7109 {
7110 struct gimplify_omp_ctx *octx;
7111 for (octx = ctx->outer_context;
7112 octx; octx = octx->outer_context)
7113 {
7114 n = splay_tree_lookup (octx->variables,
7115 (splay_tree_key)decl);
7116 if (n
7117 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7118 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7119 break;
7120 }
7121 is_declare_target = octx == NULL;
7122 }
7123 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
b4c3a85b 7124 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
d9a6bd32
JJ
7125 if (is_declare_target)
7126 ;
7127 else if (ctx->target_map_pointers_as_0len_arrays
7128 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7129 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7130 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7131 == POINTER_TYPE)))
7132 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7133 else if (is_scalar)
7134 nflags |= GOVD_FIRSTPRIVATE;
7135 }
182190f2
NS
7136
7137 struct gimplify_omp_ctx *octx = ctx->outer_context;
7138 if ((ctx->region_type & ORT_ACC) && octx)
acf0174b 7139 {
182190f2
NS
7140 /* Look in outer OpenACC contexts, to see if there's a
7141 data attribute for this variable. */
7142 omp_notice_variable (octx, decl, in_code);
7143
7144 for (; octx; octx = octx->outer_context)
7145 {
7146 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7147 break;
7148 splay_tree_node n2
7149 = splay_tree_lookup (octx->variables,
7150 (splay_tree_key) decl);
7151 if (n2)
7152 {
37d5ad46
JB
7153 if (octx->region_type == ORT_ACC_HOST_DATA)
7154 error ("variable %qE declared in enclosing "
7155 "%<host_data%> region", DECL_NAME (decl));
182190f2 7156 nflags |= GOVD_MAP;
e46c7770
CP
7157 if (octx->region_type == ORT_ACC_DATA
7158 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7159 nflags |= GOVD_MAP_0LEN_ARRAY;
182190f2
NS
7160 goto found_outer;
7161 }
7162 }
acf0174b 7163 }
182190f2
NS
7164
7165 {
7166 tree type = TREE_TYPE (decl);
7167
7168 if (nflags == flags
7169 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7170 && lang_hooks.decls.omp_privatize_by_reference (decl))
7171 type = TREE_TYPE (type);
7172 if (nflags == flags
7173 && !lang_hooks.types.omp_mappable_type (type))
7174 {
7175 error ("%qD referenced in target region does not have "
7176 "a mappable type", decl);
7177 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7178 }
7179 else if (nflags == flags)
fffeedeb
NS
7180 {
7181 if ((ctx->region_type & ORT_ACC) != 0)
7182 nflags = oacc_default_clause (ctx, decl, flags);
7183 else
7184 nflags |= GOVD_MAP;
7185 }
182190f2
NS
7186 }
7187 found_outer:
d9a6bd32 7188 omp_add_variable (ctx, decl, nflags);
acf0174b
JJ
7189 }
7190 else
f014c653
JJ
7191 {
7192 /* If nothing changed, there's nothing left to do. */
7193 if ((n->value & flags) == flags)
7194 return ret;
1a80d6b8
JJ
7195 flags |= n->value;
7196 n->value = flags;
f014c653 7197 }
acf0174b
JJ
7198 goto do_outer;
7199 }
7200
953ff289
DN
7201 if (n == NULL)
7202 {
74bf76ed 7203 if (ctx->region_type == ORT_WORKSHARE
acf0174b 7204 || ctx->region_type == ORT_SIMD
182190f2
NS
7205 || ctx->region_type == ORT_ACC
7206 || (ctx->region_type & ORT_TARGET_DATA) != 0)
953ff289
DN
7207 goto do_outer;
7208
72500605 7209 flags = omp_default_clause (ctx, decl, in_code, flags);
953ff289 7210
a68ab351
JJ
7211 if ((flags & GOVD_PRIVATE)
7212 && lang_hooks.decls.omp_private_outer_ref (decl))
7213 flags |= GOVD_PRIVATE_OUTER_REF;
7214
953ff289
DN
7215 omp_add_variable (ctx, decl, flags);
7216
7217 shared = (flags & GOVD_SHARED) != 0;
7218 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7219 goto do_outer;
7220 }
7221
3ad6b266
JJ
7222 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7223 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
da3d46cb
JJ
7224 && DECL_SIZE (decl))
7225 {
7226 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7227 {
7228 splay_tree_node n2;
7229 tree t = DECL_VALUE_EXPR (decl);
7230 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7231 t = TREE_OPERAND (t, 0);
7232 gcc_assert (DECL_P (t));
7233 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7234 n2->value |= GOVD_SEEN;
7235 }
7236 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7237 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7238 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7239 != INTEGER_CST))
7240 {
7241 splay_tree_node n2;
7242 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7243 gcc_assert (DECL_P (t));
7244 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7245 if (n2)
ebff5c3f 7246 omp_notice_variable (ctx, t, true);
da3d46cb 7247 }
3ad6b266
JJ
7248 }
7249
953ff289
DN
7250 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7251 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7252
7253 /* If nothing changed, there's nothing left to do. */
7254 if ((n->value & flags) == flags)
7255 return ret;
7256 flags |= n->value;
7257 n->value = flags;
7258
7259 do_outer:
7260 /* If the variable is private in the current context, then we don't
7261 need to propagate anything to an outer context. */
a68ab351 7262 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289 7263 return ret;
41b37d5e
JJ
7264 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7265 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7266 return ret;
7267 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7268 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7269 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7270 return ret;
953ff289
DN
7271 if (ctx->outer_context
7272 && omp_notice_variable (ctx->outer_context, decl, in_code))
7273 return true;
7274 return ret;
7275}
7276
7277/* Verify that DECL is private within CTX. If there's specific information
7278 to the contrary in the innermost scope, generate an error. */
7279
7280static bool
f7468577 7281omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
953ff289
DN
7282{
7283 splay_tree_node n;
7284
7285 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7286 if (n != NULL)
7287 {
7288 if (n->value & GOVD_SHARED)
7289 {
7290 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 7291 {
74bf76ed
JJ
7292 if (simd)
7293 error ("iteration variable %qE is predetermined linear",
7294 DECL_NAME (decl));
7295 else
7296 error ("iteration variable %qE should be private",
7297 DECL_NAME (decl));
f6a5ffbf
JJ
7298 n->value = GOVD_PRIVATE;
7299 return true;
7300 }
7301 else
7302 return false;
953ff289 7303 }
761041be
JJ
7304 else if ((n->value & GOVD_EXPLICIT) != 0
7305 && (ctx == gimplify_omp_ctxp
a68ab351 7306 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
7307 && gimplify_omp_ctxp->outer_context == ctx)))
7308 {
7309 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
7310 error ("iteration variable %qE should not be firstprivate",
7311 DECL_NAME (decl));
761041be 7312 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
7313 error ("iteration variable %qE should not be reduction",
7314 DECL_NAME (decl));
e01d41e5
JJ
7315 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7316 error ("iteration variable %qE should not be linear",
7317 DECL_NAME (decl));
f7468577 7318 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
74bf76ed
JJ
7319 error ("iteration variable %qE should not be lastprivate",
7320 DECL_NAME (decl));
7321 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7322 error ("iteration variable %qE should not be private",
7323 DECL_NAME (decl));
f7468577 7324 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
74bf76ed
JJ
7325 error ("iteration variable %qE is predetermined linear",
7326 DECL_NAME (decl));
761041be 7327 }
ca2b1311
JJ
7328 return (ctx == gimplify_omp_ctxp
7329 || (ctx->region_type == ORT_COMBINED_PARALLEL
7330 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
7331 }
7332
74bf76ed 7333 if (ctx->region_type != ORT_WORKSHARE
182190f2
NS
7334 && ctx->region_type != ORT_SIMD
7335 && ctx->region_type != ORT_ACC)
953ff289 7336 return false;
f6a5ffbf 7337 else if (ctx->outer_context)
74bf76ed 7338 return omp_is_private (ctx->outer_context, decl, simd);
ca2b1311 7339 return false;
953ff289
DN
7340}
7341
07b7aade
JJ
7342/* Return true if DECL is private within a parallel region
7343 that binds to the current construct's context or in parallel
7344 region's REDUCTION clause. */
7345
7346static bool
cab37c89 7347omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
07b7aade
JJ
7348{
7349 splay_tree_node n;
7350
7351 do
7352 {
7353 ctx = ctx->outer_context;
7354 if (ctx == NULL)
d9a6bd32
JJ
7355 {
7356 if (is_global_var (decl))
7357 return false;
7358
7359 /* References might be private, but might be shared too,
7360 when checking for copyprivate, assume they might be
7361 private, otherwise assume they might be shared. */
7362 if (copyprivate)
7363 return true;
7364
7365 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7366 return false;
7367
7368 /* Treat C++ privatized non-static data members outside
7369 of the privatization the same. */
7370 if (omp_member_access_dummy_var (decl))
7371 return false;
7372
7373 return true;
7374 }
07b7aade 7375
e01d41e5
JJ
7376 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7377
7378 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7379 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
acf0174b
JJ
7380 continue;
7381
07b7aade 7382 if (n != NULL)
d9a6bd32
JJ
7383 {
7384 if ((n->value & GOVD_LOCAL) != 0
7385 && omp_member_access_dummy_var (decl))
7386 return false;
7387 return (n->value & GOVD_SHARED) == 0;
7388 }
07b7aade 7389 }
74bf76ed 7390 while (ctx->region_type == ORT_WORKSHARE
182190f2
NS
7391 || ctx->region_type == ORT_SIMD
7392 || ctx->region_type == ORT_ACC);
07b7aade
JJ
7393 return false;
7394}
7395
d9a6bd32
JJ
7396/* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7397
7398static tree
7399find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7400{
7401 tree t = *tp;
7402
7403 /* If this node has been visited, unmark it and keep looking. */
7404 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7405 return t;
7406
7407 if (IS_TYPE_OR_DECL_P (t))
7408 *walk_subtrees = 0;
7409 return NULL_TREE;
7410}
7411
41dbbb37 7412/* Scan the OMP clauses in *LIST_P, installing mappings into a new
953ff289
DN
7413 and previous omp contexts. */
7414
7415static void
726a989a 7416gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
d9a6bd32
JJ
7417 enum omp_region_type region_type,
7418 enum tree_code code)
953ff289
DN
7419{
7420 struct gimplify_omp_ctx *ctx, *outer_ctx;
7421 tree c;
d9a6bd32 7422 hash_map<tree, tree> *struct_map_to_clause = NULL;
e01d41e5 7423 tree *prev_list_p = NULL;
953ff289 7424
a68ab351 7425 ctx = new_omp_context (region_type);
953ff289 7426 outer_ctx = ctx->outer_context;
b4c3a85b 7427 if (code == OMP_TARGET)
d9a6bd32 7428 {
b4c3a85b
JJ
7429 if (!lang_GNU_Fortran ())
7430 ctx->target_map_pointers_as_0len_arrays = true;
d9a6bd32
JJ
7431 ctx->target_map_scalars_firstprivate = true;
7432 }
7433 if (!lang_GNU_Fortran ())
7434 switch (code)
7435 {
7436 case OMP_TARGET:
7437 case OMP_TARGET_DATA:
7438 case OMP_TARGET_ENTER_DATA:
7439 case OMP_TARGET_EXIT_DATA:
7ba8651e 7440 case OACC_DECLARE:
37d5ad46 7441 case OACC_HOST_DATA:
d9a6bd32
JJ
7442 ctx->target_firstprivatize_array_bases = true;
7443 default:
7444 break;
7445 }
953ff289
DN
7446
7447 while ((c = *list_p) != NULL)
7448 {
953ff289
DN
7449 bool remove = false;
7450 bool notice_outer = true;
07b7aade 7451 const char *check_non_private = NULL;
953ff289
DN
7452 unsigned int flags;
7453 tree decl;
7454
aaf46ef9 7455 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
7456 {
7457 case OMP_CLAUSE_PRIVATE:
7458 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
7459 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7460 {
7461 flags |= GOVD_PRIVATE_OUTER_REF;
7462 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7463 }
7464 else
7465 notice_outer = false;
953ff289
DN
7466 goto do_add;
7467 case OMP_CLAUSE_SHARED:
7468 flags = GOVD_SHARED | GOVD_EXPLICIT;
7469 goto do_add;
7470 case OMP_CLAUSE_FIRSTPRIVATE:
7471 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 7472 check_non_private = "firstprivate";
953ff289
DN
7473 goto do_add;
7474 case OMP_CLAUSE_LASTPRIVATE:
7475 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 7476 check_non_private = "lastprivate";
41b37d5e 7477 decl = OMP_CLAUSE_DECL (c);
b4c3a85b 7478 if (error_operand_p (decl))
41b37d5e
JJ
7479 goto do_add;
7480 else if (outer_ctx
e01d41e5
JJ
7481 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7482 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
41b37d5e
JJ
7483 && splay_tree_lookup (outer_ctx->variables,
7484 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
7485 {
7486 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7487 if (outer_ctx->outer_context)
7488 omp_notice_variable (outer_ctx->outer_context, decl, true);
7489 }
d9a6bd32
JJ
7490 else if (outer_ctx
7491 && (outer_ctx->region_type & ORT_TASK) != 0
7492 && outer_ctx->combined_loop
7493 && splay_tree_lookup (outer_ctx->variables,
7494 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
7495 {
7496 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7497 if (outer_ctx->outer_context)
7498 omp_notice_variable (outer_ctx->outer_context, decl, true);
7499 }
41b37d5e 7500 else if (outer_ctx
182190f2
NS
7501 && (outer_ctx->region_type == ORT_WORKSHARE
7502 || outer_ctx->region_type == ORT_ACC)
41b37d5e
JJ
7503 && outer_ctx->combined_loop
7504 && splay_tree_lookup (outer_ctx->variables,
7505 (splay_tree_key) decl) == NULL
7506 && !omp_check_private (outer_ctx, decl, false))
7507 {
7508 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7509 if (outer_ctx->outer_context
7510 && (outer_ctx->outer_context->region_type
7511 == ORT_COMBINED_PARALLEL)
7512 && splay_tree_lookup (outer_ctx->outer_context->variables,
7513 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
7514 {
7515 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7516 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7517 if (octx->outer_context)
b4c3a85b
JJ
7518 {
7519 octx = octx->outer_context;
7520 if (octx->region_type == ORT_WORKSHARE
7521 && octx->combined_loop
7522 && splay_tree_lookup (octx->variables,
7523 (splay_tree_key) decl) == NULL
7524 && !omp_check_private (octx, decl, false))
7525 {
7526 omp_add_variable (octx, decl,
7527 GOVD_LASTPRIVATE | GOVD_SEEN);
7528 octx = octx->outer_context;
7529 if (octx
7530 && octx->region_type == ORT_COMBINED_TEAMS
7531 && (splay_tree_lookup (octx->variables,
7532 (splay_tree_key) decl)
7533 == NULL))
7534 {
7535 omp_add_variable (octx, decl,
7536 GOVD_SHARED | GOVD_SEEN);
7537 octx = octx->outer_context;
7538 }
7539 }
7540 if (octx)
7541 omp_notice_variable (octx, decl, true);
7542 }
e01d41e5
JJ
7543 }
7544 else if (outer_ctx->outer_context)
7545 omp_notice_variable (outer_ctx->outer_context, decl, true);
41b37d5e 7546 }
953ff289
DN
7547 goto do_add;
7548 case OMP_CLAUSE_REDUCTION:
7549 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
182190f2
NS
7550 /* OpenACC permits reductions on private variables. */
7551 if (!(region_type & ORT_ACC))
7552 check_non_private = "reduction";
d9a6bd32
JJ
7553 decl = OMP_CLAUSE_DECL (c);
7554 if (TREE_CODE (decl) == MEM_REF)
7555 {
7556 tree type = TREE_TYPE (decl);
7557 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
381cdae4
RB
7558 NULL, is_gimple_val, fb_rvalue, false)
7559 == GS_ERROR)
d9a6bd32
JJ
7560 {
7561 remove = true;
7562 break;
7563 }
7564 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7565 if (DECL_P (v))
7566 {
7567 omp_firstprivatize_variable (ctx, v);
7568 omp_notice_variable (ctx, v, true);
7569 }
7570 decl = TREE_OPERAND (decl, 0);
e01d41e5
JJ
7571 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7572 {
7573 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
381cdae4 7574 NULL, is_gimple_val, fb_rvalue, false)
e01d41e5
JJ
7575 == GS_ERROR)
7576 {
7577 remove = true;
7578 break;
7579 }
7580 v = TREE_OPERAND (decl, 1);
7581 if (DECL_P (v))
7582 {
7583 omp_firstprivatize_variable (ctx, v);
7584 omp_notice_variable (ctx, v, true);
7585 }
7586 decl = TREE_OPERAND (decl, 0);
7587 }
d9a6bd32
JJ
7588 if (TREE_CODE (decl) == ADDR_EXPR
7589 || TREE_CODE (decl) == INDIRECT_REF)
7590 decl = TREE_OPERAND (decl, 0);
7591 }
7592 goto do_add_decl;
acf0174b
JJ
7593 case OMP_CLAUSE_LINEAR:
7594 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7595 is_gimple_val, fb_rvalue) == GS_ERROR)
7596 {
7597 remove = true;
7598 break;
7599 }
41b37d5e
JJ
7600 else
7601 {
d9a6bd32
JJ
7602 if (code == OMP_SIMD
7603 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7604 {
7605 struct gimplify_omp_ctx *octx = outer_ctx;
7606 if (octx
7607 && octx->region_type == ORT_WORKSHARE
7608 && octx->combined_loop
7609 && !octx->distribute)
7610 {
7611 if (octx->outer_context
7612 && (octx->outer_context->region_type
7613 == ORT_COMBINED_PARALLEL))
7614 octx = octx->outer_context->outer_context;
7615 else
7616 octx = octx->outer_context;
7617 }
7618 if (octx
7619 && octx->region_type == ORT_WORKSHARE
7620 && octx->combined_loop
b4c3a85b 7621 && octx->distribute)
d9a6bd32
JJ
7622 {
7623 error_at (OMP_CLAUSE_LOCATION (c),
7624 "%<linear%> clause for variable other than "
7625 "loop iterator specified on construct "
7626 "combined with %<distribute%>");
7627 remove = true;
7628 break;
7629 }
7630 }
41b37d5e
JJ
7631 /* For combined #pragma omp parallel for simd, need to put
7632 lastprivate and perhaps firstprivate too on the
7633 parallel. Similarly for #pragma omp for simd. */
7634 struct gimplify_omp_ctx *octx = outer_ctx;
7635 decl = NULL_TREE;
41b37d5e
JJ
7636 do
7637 {
7638 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7639 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7640 break;
7641 decl = OMP_CLAUSE_DECL (c);
7642 if (error_operand_p (decl))
7643 {
7644 decl = NULL_TREE;
7645 break;
7646 }
d9a6bd32
JJ
7647 flags = GOVD_SEEN;
7648 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7649 flags |= GOVD_FIRSTPRIVATE;
7650 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7651 flags |= GOVD_LASTPRIVATE;
41b37d5e
JJ
7652 if (octx
7653 && octx->region_type == ORT_WORKSHARE
7654 && octx->combined_loop)
7655 {
7656 if (octx->outer_context
7657 && (octx->outer_context->region_type
e01d41e5 7658 == ORT_COMBINED_PARALLEL))
41b37d5e
JJ
7659 octx = octx->outer_context;
7660 else if (omp_check_private (octx, decl, false))
7661 break;
7662 }
d9a6bd32
JJ
7663 else if (octx
7664 && (octx->region_type & ORT_TASK) != 0
7665 && octx->combined_loop)
7666 ;
7667 else if (octx
7668 && octx->region_type == ORT_COMBINED_PARALLEL
7669 && ctx->region_type == ORT_WORKSHARE
7670 && octx == outer_ctx)
7671 flags = GOVD_SEEN | GOVD_SHARED;
e01d41e5
JJ
7672 else if (octx
7673 && octx->region_type == ORT_COMBINED_TEAMS)
7674 flags = GOVD_SEEN | GOVD_SHARED;
d9a6bd32
JJ
7675 else if (octx
7676 && octx->region_type == ORT_COMBINED_TARGET)
e01d41e5
JJ
7677 {
7678 flags &= ~GOVD_LASTPRIVATE;
7679 if (flags == GOVD_SEEN)
7680 break;
7681 }
41b37d5e
JJ
7682 else
7683 break;
d9a6bd32
JJ
7684 splay_tree_node on
7685 = splay_tree_lookup (octx->variables,
7686 (splay_tree_key) decl);
7687 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
2ee10f81
JJ
7688 {
7689 octx = NULL;
7690 break;
7691 }
41b37d5e
JJ
7692 omp_add_variable (octx, decl, flags);
7693 if (octx->outer_context == NULL)
7694 break;
7695 octx = octx->outer_context;
7696 }
7697 while (1);
7698 if (octx
7699 && decl
7700 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7701 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7702 omp_notice_variable (octx, decl, true);
7703 }
acf0174b 7704 flags = GOVD_LINEAR | GOVD_EXPLICIT;
41b37d5e
JJ
7705 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7706 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7707 {
7708 notice_outer = false;
7709 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7710 }
acf0174b
JJ
7711 goto do_add;
7712
7713 case OMP_CLAUSE_MAP:
b46ebd6c
JJ
7714 decl = OMP_CLAUSE_DECL (c);
7715 if (error_operand_p (decl))
d9a6bd32
JJ
7716 remove = true;
7717 switch (code)
b46ebd6c 7718 {
d9a6bd32
JJ
7719 case OMP_TARGET:
7720 break;
e46c7770
CP
7721 case OACC_DATA:
7722 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7723 break;
191816a3 7724 /* FALLTHRU */
d9a6bd32
JJ
7725 case OMP_TARGET_DATA:
7726 case OMP_TARGET_ENTER_DATA:
7727 case OMP_TARGET_EXIT_DATA:
e46c7770
CP
7728 case OACC_ENTER_DATA:
7729 case OACC_EXIT_DATA:
37d5ad46 7730 case OACC_HOST_DATA:
e01d41e5
JJ
7731 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7732 || (OMP_CLAUSE_MAP_KIND (c)
7733 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32
JJ
7734 /* For target {,enter ,exit }data only the array slice is
7735 mapped, but not the pointer to it. */
7736 remove = true;
7737 break;
7738 default:
b46ebd6c
JJ
7739 break;
7740 }
d9a6bd32
JJ
7741 if (remove)
7742 break;
37d5ad46
JB
7743 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7744 {
7745 struct gimplify_omp_ctx *octx;
7746 for (octx = outer_ctx; octx; octx = octx->outer_context)
7747 {
7748 if (octx->region_type != ORT_ACC_HOST_DATA)
7749 break;
7750 splay_tree_node n2
7751 = splay_tree_lookup (octx->variables,
7752 (splay_tree_key) decl);
7753 if (n2)
7754 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7755 "declared in enclosing %<host_data%> region",
7756 DECL_NAME (decl));
7757 }
7758 }
b46ebd6c
JJ
7759 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7760 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7761 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7762 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7763 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
7764 {
7765 remove = true;
7766 break;
7767 }
e01d41e5
JJ
7768 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7769 || (OMP_CLAUSE_MAP_KIND (c)
7770 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32
JJ
7771 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7772 {
7773 OMP_CLAUSE_SIZE (c)
381cdae4
RB
7774 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7775 false);
d9a6bd32
JJ
7776 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7777 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7778 }
acf0174b
JJ
7779 if (!DECL_P (decl))
7780 {
d9a6bd32
JJ
7781 tree d = decl, *pd;
7782 if (TREE_CODE (d) == ARRAY_REF)
7783 {
7784 while (TREE_CODE (d) == ARRAY_REF)
7785 d = TREE_OPERAND (d, 0);
7786 if (TREE_CODE (d) == COMPONENT_REF
7787 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7788 decl = d;
7789 }
7790 pd = &OMP_CLAUSE_DECL (c);
7791 if (d == decl
7792 && TREE_CODE (decl) == INDIRECT_REF
7793 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7794 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7795 == REFERENCE_TYPE))
7796 {
7797 pd = &TREE_OPERAND (decl, 0);
7798 decl = TREE_OPERAND (decl, 0);
7799 }
7800 if (TREE_CODE (decl) == COMPONENT_REF)
7801 {
7802 while (TREE_CODE (decl) == COMPONENT_REF)
7803 decl = TREE_OPERAND (decl, 0);
283635f9
JJ
7804 if (TREE_CODE (decl) == INDIRECT_REF
7805 && DECL_P (TREE_OPERAND (decl, 0))
7806 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7807 == REFERENCE_TYPE))
7808 decl = TREE_OPERAND (decl, 0);
d9a6bd32
JJ
7809 }
7810 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
acf0174b
JJ
7811 == GS_ERROR)
7812 {
7813 remove = true;
7814 break;
7815 }
d9a6bd32
JJ
7816 if (DECL_P (decl))
7817 {
7818 if (error_operand_p (decl))
7819 {
7820 remove = true;
7821 break;
7822 }
7823
283635f9
JJ
7824 tree stype = TREE_TYPE (decl);
7825 if (TREE_CODE (stype) == REFERENCE_TYPE)
7826 stype = TREE_TYPE (stype);
7827 if (TYPE_SIZE_UNIT (stype) == NULL
7828 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
d9a6bd32
JJ
7829 {
7830 error_at (OMP_CLAUSE_LOCATION (c),
7831 "mapping field %qE of variable length "
7832 "structure", OMP_CLAUSE_DECL (c));
7833 remove = true;
7834 break;
7835 }
7836
e01d41e5
JJ
7837 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7838 {
7839 /* Error recovery. */
7840 if (prev_list_p == NULL)
7841 {
7842 remove = true;
7843 break;
7844 }
7845 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7846 {
7847 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7848 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7849 {
7850 remove = true;
7851 break;
7852 }
7853 }
7854 }
7855
d9a6bd32
JJ
7856 tree offset;
7857 HOST_WIDE_INT bitsize, bitpos;
7858 machine_mode mode;
ee45a32d 7859 int unsignedp, reversep, volatilep = 0;
d9a6bd32
JJ
7860 tree base = OMP_CLAUSE_DECL (c);
7861 while (TREE_CODE (base) == ARRAY_REF)
7862 base = TREE_OPERAND (base, 0);
7863 if (TREE_CODE (base) == INDIRECT_REF)
7864 base = TREE_OPERAND (base, 0);
7865 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
ee45a32d 7866 &mode, &unsignedp, &reversep,
25b75a48 7867 &volatilep);
283635f9
JJ
7868 tree orig_base = base;
7869 if ((TREE_CODE (base) == INDIRECT_REF
7870 || (TREE_CODE (base) == MEM_REF
7871 && integer_zerop (TREE_OPERAND (base, 1))))
7872 && DECL_P (TREE_OPERAND (base, 0))
7873 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7874 == REFERENCE_TYPE))
7875 base = TREE_OPERAND (base, 0);
d9a6bd32
JJ
7876 gcc_assert (base == decl
7877 && (offset == NULL_TREE
7878 || TREE_CODE (offset) == INTEGER_CST));
7879
7880 splay_tree_node n
7881 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7882 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
e01d41e5
JJ
7883 == GOMP_MAP_ALWAYS_POINTER);
7884 if (n == NULL || (n->value & GOVD_MAP) == 0)
d9a6bd32 7885 {
e01d41e5
JJ
7886 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7887 OMP_CLAUSE_MAP);
7888 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
283635f9
JJ
7889 if (orig_base != base)
7890 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7891 else
7892 OMP_CLAUSE_DECL (l) = decl;
e01d41e5
JJ
7893 OMP_CLAUSE_SIZE (l) = size_int (1);
7894 if (struct_map_to_clause == NULL)
7895 struct_map_to_clause = new hash_map<tree, tree>;
7896 struct_map_to_clause->put (decl, l);
d9a6bd32
JJ
7897 if (ptr)
7898 {
e01d41e5
JJ
7899 enum gomp_map_kind mkind
7900 = code == OMP_TARGET_EXIT_DATA
7901 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
d9a6bd32 7902 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
e01d41e5
JJ
7903 OMP_CLAUSE_MAP);
7904 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7905 OMP_CLAUSE_DECL (c2)
7906 = unshare_expr (OMP_CLAUSE_DECL (c));
7907 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7908 OMP_CLAUSE_SIZE (c2)
7909 = TYPE_SIZE_UNIT (ptr_type_node);
7910 OMP_CLAUSE_CHAIN (l) = c2;
7911 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7912 {
7913 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7914 tree c3
7915 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7916 OMP_CLAUSE_MAP);
7917 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7918 OMP_CLAUSE_DECL (c3)
7919 = unshare_expr (OMP_CLAUSE_DECL (c4));
7920 OMP_CLAUSE_SIZE (c3)
7921 = TYPE_SIZE_UNIT (ptr_type_node);
7922 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7923 OMP_CLAUSE_CHAIN (c2) = c3;
7924 }
7925 *prev_list_p = l;
7926 prev_list_p = NULL;
7927 }
7928 else
7929 {
7930 OMP_CLAUSE_CHAIN (l) = c;
7931 *list_p = l;
7932 list_p = &OMP_CLAUSE_CHAIN (l);
d9a6bd32 7933 }
283635f9
JJ
7934 if (orig_base != base && code == OMP_TARGET)
7935 {
7936 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7937 OMP_CLAUSE_MAP);
7938 enum gomp_map_kind mkind
7939 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7940 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7941 OMP_CLAUSE_DECL (c2) = decl;
7942 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7943 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7944 OMP_CLAUSE_CHAIN (l) = c2;
7945 }
d9a6bd32 7946 flags = GOVD_MAP | GOVD_EXPLICIT;
e01d41e5 7947 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
d9a6bd32
JJ
7948 flags |= GOVD_SEEN;
7949 goto do_add_decl;
7950 }
7951 else
7952 {
7953 tree *osc = struct_map_to_clause->get (decl);
e01d41e5
JJ
7954 tree *sc = NULL, *scp = NULL;
7955 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
d9a6bd32
JJ
7956 n->value |= GOVD_SEEN;
7957 offset_int o1, o2;
7958 if (offset)
7959 o1 = wi::to_offset (offset);
7960 else
7961 o1 = 0;
7962 if (bitpos)
7963 o1 = o1 + bitpos / BITS_PER_UNIT;
283635f9
JJ
7964 sc = &OMP_CLAUSE_CHAIN (*osc);
7965 if (*sc != c
7966 && (OMP_CLAUSE_MAP_KIND (*sc)
7967 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7968 sc = &OMP_CLAUSE_CHAIN (*sc);
7969 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
e01d41e5
JJ
7970 if (ptr && sc == prev_list_p)
7971 break;
7972 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7973 != COMPONENT_REF
7974 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7975 != INDIRECT_REF)
7976 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7977 != ARRAY_REF))
d9a6bd32
JJ
7978 break;
7979 else
7980 {
7981 tree offset2;
7982 HOST_WIDE_INT bitsize2, bitpos2;
7983 base = OMP_CLAUSE_DECL (*sc);
7984 if (TREE_CODE (base) == ARRAY_REF)
7985 {
7986 while (TREE_CODE (base) == ARRAY_REF)
7987 base = TREE_OPERAND (base, 0);
7988 if (TREE_CODE (base) != COMPONENT_REF
7989 || (TREE_CODE (TREE_TYPE (base))
7990 != ARRAY_TYPE))
7991 break;
7992 }
7993 else if (TREE_CODE (base) == INDIRECT_REF
7994 && (TREE_CODE (TREE_OPERAND (base, 0))
7995 == COMPONENT_REF)
7996 && (TREE_CODE (TREE_TYPE
7997 (TREE_OPERAND (base, 0)))
7998 == REFERENCE_TYPE))
7999 base = TREE_OPERAND (base, 0);
8000 base = get_inner_reference (base, &bitsize2,
8001 &bitpos2, &offset2,
8002 &mode, &unsignedp,
25b75a48 8003 &reversep, &volatilep);
283635f9
JJ
8004 if ((TREE_CODE (base) == INDIRECT_REF
8005 || (TREE_CODE (base) == MEM_REF
8006 && integer_zerop (TREE_OPERAND (base,
8007 1))))
8008 && DECL_P (TREE_OPERAND (base, 0))
8009 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8010 0)))
8011 == REFERENCE_TYPE))
8012 base = TREE_OPERAND (base, 0);
d9a6bd32
JJ
8013 if (base != decl)
8014 break;
e01d41e5
JJ
8015 if (scp)
8016 continue;
d9a6bd32
JJ
8017 gcc_assert (offset == NULL_TREE
8018 || TREE_CODE (offset) == INTEGER_CST);
8019 tree d1 = OMP_CLAUSE_DECL (*sc);
8020 tree d2 = OMP_CLAUSE_DECL (c);
8021 while (TREE_CODE (d1) == ARRAY_REF)
8022 d1 = TREE_OPERAND (d1, 0);
8023 while (TREE_CODE (d2) == ARRAY_REF)
8024 d2 = TREE_OPERAND (d2, 0);
8025 if (TREE_CODE (d1) == INDIRECT_REF)
8026 d1 = TREE_OPERAND (d1, 0);
8027 if (TREE_CODE (d2) == INDIRECT_REF)
8028 d2 = TREE_OPERAND (d2, 0);
8029 while (TREE_CODE (d1) == COMPONENT_REF)
8030 if (TREE_CODE (d2) == COMPONENT_REF
8031 && TREE_OPERAND (d1, 1)
8032 == TREE_OPERAND (d2, 1))
8033 {
8034 d1 = TREE_OPERAND (d1, 0);
8035 d2 = TREE_OPERAND (d2, 0);
8036 }
8037 else
8038 break;
8039 if (d1 == d2)
8040 {
8041 error_at (OMP_CLAUSE_LOCATION (c),
8042 "%qE appears more than once in map "
8043 "clauses", OMP_CLAUSE_DECL (c));
8044 remove = true;
8045 break;
8046 }
8047 if (offset2)
8048 o2 = wi::to_offset (offset2);
8049 else
8050 o2 = 0;
8051 if (bitpos2)
8052 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8053 if (wi::ltu_p (o1, o2)
8054 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
e01d41e5
JJ
8055 {
8056 if (ptr)
8057 scp = sc;
8058 else
8059 break;
8060 }
d9a6bd32 8061 }
e01d41e5
JJ
8062 if (remove)
8063 break;
8064 OMP_CLAUSE_SIZE (*osc)
8065 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8066 size_one_node);
d9a6bd32
JJ
8067 if (ptr)
8068 {
e01d41e5
JJ
8069 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8070 OMP_CLAUSE_MAP);
8071 tree cl = NULL_TREE;
8072 enum gomp_map_kind mkind
8073 = code == OMP_TARGET_EXIT_DATA
8074 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8075 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8076 OMP_CLAUSE_DECL (c2)
8077 = unshare_expr (OMP_CLAUSE_DECL (c));
8078 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8079 OMP_CLAUSE_SIZE (c2)
8080 = TYPE_SIZE_UNIT (ptr_type_node);
8081 cl = scp ? *prev_list_p : c2;
8082 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8083 {
8084 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8085 tree c3
8086 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8087 OMP_CLAUSE_MAP);
8088 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8089 OMP_CLAUSE_DECL (c3)
8090 = unshare_expr (OMP_CLAUSE_DECL (c4));
8091 OMP_CLAUSE_SIZE (c3)
8092 = TYPE_SIZE_UNIT (ptr_type_node);
8093 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8094 if (!scp)
8095 OMP_CLAUSE_CHAIN (c2) = c3;
8096 else
8097 cl = c3;
8098 }
8099 if (scp)
8100 *scp = c2;
8101 if (sc == prev_list_p)
8102 {
8103 *sc = cl;
8104 prev_list_p = NULL;
8105 }
8106 else
8107 {
8108 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8109 list_p = prev_list_p;
8110 prev_list_p = NULL;
8111 OMP_CLAUSE_CHAIN (c) = *sc;
8112 *sc = cl;
8113 continue;
8114 }
d9a6bd32 8115 }
e01d41e5 8116 else if (*sc != c)
d9a6bd32
JJ
8117 {
8118 *list_p = OMP_CLAUSE_CHAIN (c);
8119 OMP_CLAUSE_CHAIN (c) = *sc;
8120 *sc = c;
8121 continue;
8122 }
8123 }
8124 }
e01d41e5
JJ
8125 if (!remove
8126 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8127 && OMP_CLAUSE_CHAIN (c)
8128 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8129 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8130 == GOMP_MAP_ALWAYS_POINTER))
8131 prev_list_p = list_p;
acf0174b
JJ
8132 break;
8133 }
8134 flags = GOVD_MAP | GOVD_EXPLICIT;
e01d41e5
JJ
8135 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8136 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8137 flags |= GOVD_MAP_ALWAYS_TO;
acf0174b
JJ
8138 goto do_add;
8139
8140 case OMP_CLAUSE_DEPEND:
b4c3a85b 8141 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
d9a6bd32 8142 {
b4c3a85b
JJ
8143 tree deps = OMP_CLAUSE_DECL (c);
8144 while (deps && TREE_CODE (deps) == TREE_LIST)
8145 {
8146 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8147 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8148 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8149 pre_p, NULL, is_gimple_val, fb_rvalue);
8150 deps = TREE_CHAIN (deps);
8151 }
d9a6bd32
JJ
8152 break;
8153 }
b4c3a85b
JJ
8154 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8155 break;
acf0174b
JJ
8156 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8157 {
8158 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8159 NULL, is_gimple_val, fb_rvalue);
8160 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8161 }
8162 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8163 {
8164 remove = true;
8165 break;
8166 }
8167 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8168 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8169 is_gimple_val, fb_rvalue) == GS_ERROR)
8170 {
8171 remove = true;
8172 break;
8173 }
8174 break;
8175
8176 case OMP_CLAUSE_TO:
8177 case OMP_CLAUSE_FROM:
41dbbb37 8178 case OMP_CLAUSE__CACHE_:
b46ebd6c
JJ
8179 decl = OMP_CLAUSE_DECL (c);
8180 if (error_operand_p (decl))
acf0174b
JJ
8181 {
8182 remove = true;
8183 break;
8184 }
b46ebd6c
JJ
8185 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8186 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8187 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8188 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8189 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
8190 {
8191 remove = true;
8192 break;
8193 }
8194 if (!DECL_P (decl))
8195 {
8196 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8197 NULL, is_gimple_lvalue, fb_lvalue)
8198 == GS_ERROR)
8199 {
8200 remove = true;
8201 break;
8202 }
8203 break;
8204 }
8205 goto do_notice;
953ff289 8206
d9a6bd32
JJ
8207 case OMP_CLAUSE_USE_DEVICE_PTR:
8208 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8209 goto do_add;
8210 case OMP_CLAUSE_IS_DEVICE_PTR:
8211 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8212 goto do_add;
8213
953ff289
DN
8214 do_add:
8215 decl = OMP_CLAUSE_DECL (c);
d9a6bd32 8216 do_add_decl:
b504a918 8217 if (error_operand_p (decl))
953ff289
DN
8218 {
8219 remove = true;
8220 break;
8221 }
d9a6bd32
JJ
8222 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8223 {
8224 tree t = omp_member_access_dummy_var (decl);
8225 if (t)
8226 {
8227 tree v = DECL_VALUE_EXPR (decl);
8228 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8229 if (outer_ctx)
8230 omp_notice_variable (outer_ctx, t, true);
8231 }
8232 }
e46c7770
CP
8233 if (code == OACC_DATA
8234 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8235 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8236 flags |= GOVD_MAP_0LEN_ARRAY;
953ff289 8237 omp_add_variable (ctx, decl, flags);
693d710f 8238 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
8239 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8240 {
8241 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 8242 GOVD_LOCAL | GOVD_SEEN);
d9a6bd32
JJ
8243 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8244 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8245 find_decl_expr,
8246 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8247 NULL) == NULL_TREE)
8248 omp_add_variable (ctx,
8249 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8250 GOVD_LOCAL | GOVD_SEEN);
953ff289 8251 gimplify_omp_ctxp = ctx;
45852dcc 8252 push_gimplify_context ();
726a989a 8253
355a7673
MM
8254 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8255 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
726a989a
RB
8256
8257 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8258 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8259 pop_gimplify_context
8260 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
45852dcc 8261 push_gimplify_context ();
726a989a
RB
8262 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8263 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 8264 pop_gimplify_context
726a989a
RB
8265 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8266 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8267 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8268
953ff289
DN
8269 gimplify_omp_ctxp = outer_ctx;
8270 }
a68ab351
JJ
8271 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8272 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8273 {
8274 gimplify_omp_ctxp = ctx;
45852dcc 8275 push_gimplify_context ();
a68ab351
JJ
8276 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8277 {
8278 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8279 NULL, NULL);
8280 TREE_SIDE_EFFECTS (bind) = 1;
8281 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8282 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8283 }
726a989a
RB
8284 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8285 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8286 pop_gimplify_context
8287 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8288 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8289
dd2fc525
JJ
8290 gimplify_omp_ctxp = outer_ctx;
8291 }
8292 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8293 && OMP_CLAUSE_LINEAR_STMT (c))
8294 {
8295 gimplify_omp_ctxp = ctx;
8296 push_gimplify_context ();
8297 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8298 {
8299 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8300 NULL, NULL);
8301 TREE_SIDE_EFFECTS (bind) = 1;
8302 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8303 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8304 }
8305 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8306 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8307 pop_gimplify_context
8308 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8309 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8310
a68ab351
JJ
8311 gimplify_omp_ctxp = outer_ctx;
8312 }
953ff289
DN
8313 if (notice_outer)
8314 goto do_notice;
8315 break;
8316
8317 case OMP_CLAUSE_COPYIN:
8318 case OMP_CLAUSE_COPYPRIVATE:
8319 decl = OMP_CLAUSE_DECL (c);
b504a918 8320 if (error_operand_p (decl))
953ff289
DN
8321 {
8322 remove = true;
8323 break;
8324 }
cab37c89
JJ
8325 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8326 && !remove
8327 && !omp_check_private (ctx, decl, true))
8328 {
8329 remove = true;
8330 if (is_global_var (decl))
8331 {
8332 if (DECL_THREAD_LOCAL_P (decl))
8333 remove = false;
8334 else if (DECL_HAS_VALUE_EXPR_P (decl))
8335 {
8336 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8337
8338 if (value
8339 && DECL_P (value)
8340 && DECL_THREAD_LOCAL_P (value))
8341 remove = false;
8342 }
8343 }
8344 if (remove)
8345 error_at (OMP_CLAUSE_LOCATION (c),
8346 "copyprivate variable %qE is not threadprivate"
8347 " or private in outer context", DECL_NAME (decl));
8348 }
953ff289
DN
8349 do_notice:
8350 if (outer_ctx)
8351 omp_notice_variable (outer_ctx, decl, true);
07b7aade 8352 if (check_non_private
a68ab351 8353 && region_type == ORT_WORKSHARE
d9a6bd32
JJ
8354 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8355 || decl == OMP_CLAUSE_DECL (c)
8356 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8357 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
e01d41e5
JJ
8358 == ADDR_EXPR
8359 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8360 == POINTER_PLUS_EXPR
8361 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8362 (OMP_CLAUSE_DECL (c), 0), 0))
8363 == ADDR_EXPR)))))
cab37c89 8364 && omp_check_private (ctx, decl, false))
07b7aade 8365 {
4f1e4960
JM
8366 error ("%s variable %qE is private in outer context",
8367 check_non_private, DECL_NAME (decl));
07b7aade
JJ
8368 remove = true;
8369 }
953ff289
DN
8370 break;
8371
953ff289 8372 case OMP_CLAUSE_IF:
d9a6bd32
JJ
8373 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8374 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8375 {
8376 const char *p[2];
8377 for (int i = 0; i < 2; i++)
8378 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8379 {
8380 case OMP_PARALLEL: p[i] = "parallel"; break;
8381 case OMP_TASK: p[i] = "task"; break;
8382 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8383 case OMP_TARGET_DATA: p[i] = "target data"; break;
8384 case OMP_TARGET: p[i] = "target"; break;
8385 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8386 case OMP_TARGET_ENTER_DATA:
8387 p[i] = "target enter data"; break;
8388 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8389 default: gcc_unreachable ();
8390 }
8391 error_at (OMP_CLAUSE_LOCATION (c),
8392 "expected %qs %<if%> clause modifier rather than %qs",
8393 p[0], p[1]);
8394 remove = true;
8395 }
8396 /* Fall through. */
8397
8398 case OMP_CLAUSE_FINAL:
d568d1a8
RS
8399 OMP_CLAUSE_OPERAND (c, 0)
8400 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8401 /* Fall through. */
8402
8403 case OMP_CLAUSE_SCHEDULE:
953ff289 8404 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
8405 case OMP_CLAUSE_NUM_TEAMS:
8406 case OMP_CLAUSE_THREAD_LIMIT:
8407 case OMP_CLAUSE_DIST_SCHEDULE:
8408 case OMP_CLAUSE_DEVICE:
d9a6bd32
JJ
8409 case OMP_CLAUSE_PRIORITY:
8410 case OMP_CLAUSE_GRAINSIZE:
8411 case OMP_CLAUSE_NUM_TASKS:
8412 case OMP_CLAUSE_HINT:
9a771876 8413 case OMP_CLAUSE__CILK_FOR_COUNT_:
41dbbb37
TS
8414 case OMP_CLAUSE_ASYNC:
8415 case OMP_CLAUSE_WAIT:
8416 case OMP_CLAUSE_NUM_GANGS:
8417 case OMP_CLAUSE_NUM_WORKERS:
8418 case OMP_CLAUSE_VECTOR_LENGTH:
41dbbb37
TS
8419 case OMP_CLAUSE_WORKER:
8420 case OMP_CLAUSE_VECTOR:
726a989a
RB
8421 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8422 is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b 8423 remove = true;
d9a6bd32
JJ
8424 break;
8425
8426 case OMP_CLAUSE_GANG:
8427 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8428 is_gimple_val, fb_rvalue) == GS_ERROR)
8429 remove = true;
8430 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8431 is_gimple_val, fb_rvalue) == GS_ERROR)
41dbbb37
TS
8432 remove = true;
8433 break;
8434
953ff289
DN
8435 case OMP_CLAUSE_NOWAIT:
8436 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
8437 case OMP_CLAUSE_UNTIED:
8438 case OMP_CLAUSE_COLLAPSE:
02889d23 8439 case OMP_CLAUSE_TILE:
41dbbb37
TS
8440 case OMP_CLAUSE_AUTO:
8441 case OMP_CLAUSE_SEQ:
7a5e4956 8442 case OMP_CLAUSE_INDEPENDENT:
20906c66 8443 case OMP_CLAUSE_MERGEABLE:
acf0174b 8444 case OMP_CLAUSE_PROC_BIND:
74bf76ed 8445 case OMP_CLAUSE_SAFELEN:
d9a6bd32
JJ
8446 case OMP_CLAUSE_SIMDLEN:
8447 case OMP_CLAUSE_NOGROUP:
8448 case OMP_CLAUSE_THREADS:
8449 case OMP_CLAUSE_SIMD:
8450 break;
8451
8452 case OMP_CLAUSE_DEFAULTMAP:
8453 ctx->target_map_scalars_firstprivate = false;
953ff289
DN
8454 break;
8455
acf0174b
JJ
8456 case OMP_CLAUSE_ALIGNED:
8457 decl = OMP_CLAUSE_DECL (c);
8458 if (error_operand_p (decl))
8459 {
8460 remove = true;
8461 break;
8462 }
b46ebd6c
JJ
8463 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8464 is_gimple_val, fb_rvalue) == GS_ERROR)
8465 {
8466 remove = true;
8467 break;
8468 }
acf0174b
JJ
8469 if (!is_global_var (decl)
8470 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8471 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8472 break;
8473
953ff289
DN
8474 case OMP_CLAUSE_DEFAULT:
8475 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8476 break;
8477
8478 default:
8479 gcc_unreachable ();
8480 }
8481
e46c7770
CP
8482 if (code == OACC_DATA
8483 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8484 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8485 remove = true;
953ff289
DN
8486 if (remove)
8487 *list_p = OMP_CLAUSE_CHAIN (c);
8488 else
8489 list_p = &OMP_CLAUSE_CHAIN (c);
8490 }
8491
8492 gimplify_omp_ctxp = ctx;
d9a6bd32
JJ
8493 if (struct_map_to_clause)
8494 delete struct_map_to_clause;
953ff289
DN
8495}
8496
1a80d6b8
JJ
8497/* Return true if DECL is a candidate for shared to firstprivate
8498 optimization. We only consider non-addressable scalars, not
8499 too big, and not references. */
8500
8501static bool
8502omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8503{
8504 if (TREE_ADDRESSABLE (decl))
8505 return false;
8506 tree type = TREE_TYPE (decl);
8507 if (!is_gimple_reg_type (type)
8508 || TREE_CODE (type) == REFERENCE_TYPE
8509 || TREE_ADDRESSABLE (type))
8510 return false;
8511 /* Don't optimize too large decls, as each thread/task will have
8512 its own. */
8513 HOST_WIDE_INT len = int_size_in_bytes (type);
8514 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8515 return false;
8516 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8517 return false;
8518 return true;
8519}
8520
8521/* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8522 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8523 GOVD_WRITTEN in outer contexts. */
8524
8525static void
8526omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8527{
8528 for (; ctx; ctx = ctx->outer_context)
8529 {
8530 splay_tree_node n = splay_tree_lookup (ctx->variables,
8531 (splay_tree_key) decl);
8532 if (n == NULL)
8533 continue;
8534 else if (n->value & GOVD_SHARED)
8535 {
8536 n->value |= GOVD_WRITTEN;
8537 return;
8538 }
8539 else if (n->value & GOVD_DATA_SHARE_CLASS)
8540 return;
8541 }
8542}
8543
8544/* Helper callback for walk_gimple_seq to discover possible stores
8545 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8546 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8547 for those. */
8548
8549static tree
8550omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8551{
8552 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8553
8554 *walk_subtrees = 0;
8555 if (!wi->is_lhs)
8556 return NULL_TREE;
8557
8558 tree op = *tp;
8559 do
8560 {
8561 if (handled_component_p (op))
8562 op = TREE_OPERAND (op, 0);
8563 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8564 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8565 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8566 else
8567 break;
8568 }
8569 while (1);
8570 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8571 return NULL_TREE;
8572
8573 omp_mark_stores (gimplify_omp_ctxp, op);
8574 return NULL_TREE;
8575}
8576
8577/* Helper callback for walk_gimple_seq to discover possible stores
8578 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8579 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8580 for those. */
8581
8582static tree
8583omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8584 bool *handled_ops_p,
8585 struct walk_stmt_info *wi)
8586{
8587 gimple *stmt = gsi_stmt (*gsi_p);
8588 switch (gimple_code (stmt))
8589 {
8590 /* Don't recurse on OpenMP constructs for which
8591 gimplify_adjust_omp_clauses already handled the bodies,
8592 except handle gimple_omp_for_pre_body. */
8593 case GIMPLE_OMP_FOR:
8594 *handled_ops_p = true;
8595 if (gimple_omp_for_pre_body (stmt))
8596 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8597 omp_find_stores_stmt, omp_find_stores_op, wi);
8598 break;
8599 case GIMPLE_OMP_PARALLEL:
8600 case GIMPLE_OMP_TASK:
8601 case GIMPLE_OMP_SECTIONS:
8602 case GIMPLE_OMP_SINGLE:
8603 case GIMPLE_OMP_TARGET:
8604 case GIMPLE_OMP_TEAMS:
8605 case GIMPLE_OMP_CRITICAL:
8606 *handled_ops_p = true;
8607 break;
8608 default:
8609 break;
8610 }
8611 return NULL_TREE;
8612}
8613
f014c653
JJ
8614struct gimplify_adjust_omp_clauses_data
8615{
8616 tree *list_p;
8617 gimple_seq *pre_p;
8618};
8619
953ff289
DN
8620/* For all variables that were not actually used within the context,
8621 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8622
8623static int
8624gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8625{
f014c653
JJ
8626 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8627 gimple_seq *pre_p
8628 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
953ff289
DN
8629 tree decl = (tree) n->key;
8630 unsigned flags = n->value;
aaf46ef9 8631 enum omp_clause_code code;
953ff289
DN
8632 tree clause;
8633 bool private_debug;
8634
8635 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8636 return 0;
8637 if ((flags & GOVD_SEEN) == 0)
8638 return 0;
8639 if (flags & GOVD_DEBUG_PRIVATE)
8640 {
e9e2ef9f 8641 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
953ff289
DN
8642 private_debug = true;
8643 }
acf0174b
JJ
8644 else if (flags & GOVD_MAP)
8645 private_debug = false;
953ff289
DN
8646 else
8647 private_debug
8648 = lang_hooks.decls.omp_private_debug_clause (decl,
8649 !!(flags & GOVD_SHARED));
8650 if (private_debug)
8651 code = OMP_CLAUSE_PRIVATE;
acf0174b 8652 else if (flags & GOVD_MAP)
9dc5773f
JJ
8653 {
8654 code = OMP_CLAUSE_MAP;
8655 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8656 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8657 {
8658 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8659 return 0;
8660 }
8661 }
953ff289
DN
8662 else if (flags & GOVD_SHARED)
8663 {
8664 if (is_global_var (decl))
64964499
JJ
8665 {
8666 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8667 while (ctx != NULL)
8668 {
8669 splay_tree_node on
8670 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8671 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
74bf76ed 8672 | GOVD_PRIVATE | GOVD_REDUCTION
7de20fbd 8673 | GOVD_LINEAR | GOVD_MAP)) != 0)
64964499
JJ
8674 break;
8675 ctx = ctx->outer_context;
8676 }
8677 if (ctx == NULL)
8678 return 0;
8679 }
953ff289
DN
8680 code = OMP_CLAUSE_SHARED;
8681 }
8682 else if (flags & GOVD_PRIVATE)
8683 code = OMP_CLAUSE_PRIVATE;
8684 else if (flags & GOVD_FIRSTPRIVATE)
9dc5773f
JJ
8685 {
8686 code = OMP_CLAUSE_FIRSTPRIVATE;
8687 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8688 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8689 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8690 {
8691 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8692 "%<target%> construct", decl);
8693 return 0;
8694 }
8695 }
74bf76ed
JJ
8696 else if (flags & GOVD_LASTPRIVATE)
8697 code = OMP_CLAUSE_LASTPRIVATE;
acf0174b
JJ
8698 else if (flags & GOVD_ALIGNED)
8699 return 0;
953ff289
DN
8700 else
8701 gcc_unreachable ();
8702
1a80d6b8
JJ
8703 if (((flags & GOVD_LASTPRIVATE)
8704 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8705 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8706 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8707
3693a620 8708 tree chain = *list_p;
c2255bc4 8709 clause = build_omp_clause (input_location, code);
aaf46ef9 8710 OMP_CLAUSE_DECL (clause) = decl;
3693a620 8711 OMP_CLAUSE_CHAIN (clause) = chain;
953ff289
DN
8712 if (private_debug)
8713 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
8714 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8715 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
1a80d6b8
JJ
8716 else if (code == OMP_CLAUSE_SHARED
8717 && (flags & GOVD_WRITTEN) == 0
8718 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8719 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
ec35ea45
JJ
8720 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8721 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
d9a6bd32
JJ
8722 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8723 {
8724 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8725 OMP_CLAUSE_DECL (nc) = decl;
8726 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8727 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8728 OMP_CLAUSE_DECL (clause)
8729 = build_simple_mem_ref_loc (input_location, decl);
8730 OMP_CLAUSE_DECL (clause)
8731 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8732 build_int_cst (build_pointer_type (char_type_node), 0));
8733 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8734 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8735 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8736 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8737 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
3693a620 8738 OMP_CLAUSE_CHAIN (nc) = chain;
d9a6bd32
JJ
8739 OMP_CLAUSE_CHAIN (clause) = nc;
8740 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8741 gimplify_omp_ctxp = ctx->outer_context;
8742 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8743 pre_p, NULL, is_gimple_val, fb_rvalue);
8744 gimplify_omp_ctxp = ctx;
8745 }
acf0174b
JJ
8746 else if (code == OMP_CLAUSE_MAP)
8747 {
7fd549d2
TS
8748 int kind;
8749 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8750 switch (flags & (GOVD_MAP_TO_ONLY
8751 | GOVD_MAP_FORCE
8752 | GOVD_MAP_FORCE_PRESENT))
8753 {
8754 case 0:
8755 kind = GOMP_MAP_TOFROM;
8756 break;
8757 case GOVD_MAP_FORCE:
8758 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8759 break;
8760 case GOVD_MAP_TO_ONLY:
8761 kind = GOMP_MAP_TO;
8762 break;
8763 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8764 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8765 break;
8766 case GOVD_MAP_FORCE_PRESENT:
8767 kind = GOMP_MAP_FORCE_PRESENT;
8768 break;
8769 default:
8770 gcc_unreachable ();
8771 }
db0f1c7a 8772 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
acf0174b
JJ
8773 if (DECL_SIZE (decl)
8774 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8775 {
8776 tree decl2 = DECL_VALUE_EXPR (decl);
8777 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8778 decl2 = TREE_OPERAND (decl2, 0);
8779 gcc_assert (DECL_P (decl2));
8780 tree mem = build_simple_mem_ref (decl2);
8781 OMP_CLAUSE_DECL (clause) = mem;
8782 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8783 if (gimplify_omp_ctxp->outer_context)
8784 {
8785 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8786 omp_notice_variable (ctx, decl2, true);
8787 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8788 }
8789 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8790 OMP_CLAUSE_MAP);
8791 OMP_CLAUSE_DECL (nc) = decl;
8792 OMP_CLAUSE_SIZE (nc) = size_zero_node;
d9a6bd32
JJ
8793 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8794 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8795 else
8796 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
acf0174b
JJ
8797 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8798 OMP_CLAUSE_CHAIN (clause) = nc;
8799 }
e01d41e5
JJ
8800 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8801 && lang_hooks.decls.omp_privatize_by_reference (decl))
8802 {
8803 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8804 OMP_CLAUSE_SIZE (clause)
8805 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8806 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8807 gimplify_omp_ctxp = ctx->outer_context;
8808 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8809 pre_p, NULL, is_gimple_val, fb_rvalue);
8810 gimplify_omp_ctxp = ctx;
8811 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8812 OMP_CLAUSE_MAP);
8813 OMP_CLAUSE_DECL (nc) = decl;
8814 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8815 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8816 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8817 OMP_CLAUSE_CHAIN (clause) = nc;
8818 }
b46ebd6c
JJ
8819 else
8820 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
acf0174b 8821 }
95782571
JJ
8822 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8823 {
8824 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8825 OMP_CLAUSE_DECL (nc) = decl;
8826 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
3693a620 8827 OMP_CLAUSE_CHAIN (nc) = chain;
95782571 8828 OMP_CLAUSE_CHAIN (clause) = nc;
f014c653
JJ
8829 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8830 gimplify_omp_ctxp = ctx->outer_context;
8831 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8832 gimplify_omp_ctxp = ctx;
95782571 8833 }
953ff289 8834 *list_p = clause;
f014c653
JJ
8835 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8836 gimplify_omp_ctxp = ctx->outer_context;
8837 lang_hooks.decls.omp_finish_clause (clause, pre_p);
3693a620
JJ
8838 if (gimplify_omp_ctxp)
8839 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8840 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8841 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8842 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8843 true);
f014c653 8844 gimplify_omp_ctxp = ctx;
953ff289
DN
8845 return 0;
8846}
8847
8848static void
1a80d6b8 8849gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
d9a6bd32 8850 enum tree_code code)
953ff289
DN
8851{
8852 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8853 tree c, decl;
8854
1a80d6b8
JJ
8855 if (body)
8856 {
8857 struct gimplify_omp_ctx *octx;
8858 for (octx = ctx; octx; octx = octx->outer_context)
8859 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8860 break;
8861 if (octx)
8862 {
8863 struct walk_stmt_info wi;
8864 memset (&wi, 0, sizeof (wi));
8865 walk_gimple_seq (body, omp_find_stores_stmt,
8866 omp_find_stores_op, &wi);
8867 }
8868 }
953ff289
DN
8869 while ((c = *list_p) != NULL)
8870 {
8871 splay_tree_node n;
8872 bool remove = false;
8873
aaf46ef9 8874 switch (OMP_CLAUSE_CODE (c))
953ff289 8875 {
9dc5773f
JJ
8876 case OMP_CLAUSE_FIRSTPRIVATE:
8877 if ((ctx->region_type & ORT_TARGET)
8878 && (ctx->region_type & ORT_ACC) == 0
8879 && TYPE_ATOMIC (strip_array_types
8880 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8881 {
8882 error_at (OMP_CLAUSE_LOCATION (c),
8883 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8884 "%<target%> construct", OMP_CLAUSE_DECL (c));
8885 remove = true;
8886 break;
8887 }
8888 /* FALLTHRU */
953ff289
DN
8889 case OMP_CLAUSE_PRIVATE:
8890 case OMP_CLAUSE_SHARED:
74bf76ed 8891 case OMP_CLAUSE_LINEAR:
953ff289
DN
8892 decl = OMP_CLAUSE_DECL (c);
8893 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8894 remove = !(n->value & GOVD_SEEN);
8895 if (! remove)
8896 {
aaf46ef9 8897 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
8898 if ((n->value & GOVD_DEBUG_PRIVATE)
8899 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8900 {
8901 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8902 || ((n->value & GOVD_DATA_SHARE_CLASS)
e9e2ef9f 8903 == GOVD_SHARED));
aaf46ef9 8904 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
8905 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8906 }
1a80d6b8
JJ
8907 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8908 && (n->value & GOVD_WRITTEN) == 0
8909 && DECL_P (decl)
8910 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8911 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8912 else if (DECL_P (decl)
8913 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8914 && (n->value & GOVD_WRITTEN) != 1)
8915 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8916 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8917 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8918 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
953ff289
DN
8919 }
8920 break;
8921
8922 case OMP_CLAUSE_LASTPRIVATE:
8923 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8924 accurately reflect the presence of a FIRSTPRIVATE clause. */
8925 decl = OMP_CLAUSE_DECL (c);
8926 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8927 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8928 = (n->value & GOVD_FIRSTPRIVATE) != 0;
b4c3a85b
JJ
8929 if (code == OMP_DISTRIBUTE
8930 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
e01d41e5
JJ
8931 {
8932 remove = true;
8933 error_at (OMP_CLAUSE_LOCATION (c),
8934 "same variable used in %<firstprivate%> and "
8935 "%<lastprivate%> clauses on %<distribute%> "
8936 "construct");
8937 }
1a80d6b8
JJ
8938 if (!remove
8939 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8940 && DECL_P (decl)
8941 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8942 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
953ff289 8943 break;
b8698a0f 8944
acf0174b
JJ
8945 case OMP_CLAUSE_ALIGNED:
8946 decl = OMP_CLAUSE_DECL (c);
8947 if (!is_global_var (decl))
8948 {
8949 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8950 remove = n == NULL || !(n->value & GOVD_SEEN);
8951 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8952 {
8953 struct gimplify_omp_ctx *octx;
8954 if (n != NULL
8955 && (n->value & (GOVD_DATA_SHARE_CLASS
8956 & ~GOVD_FIRSTPRIVATE)))
8957 remove = true;
8958 else
8959 for (octx = ctx->outer_context; octx;
8960 octx = octx->outer_context)
8961 {
8962 n = splay_tree_lookup (octx->variables,
8963 (splay_tree_key) decl);
8964 if (n == NULL)
8965 continue;
8966 if (n->value & GOVD_LOCAL)
8967 break;
8968 /* We have to avoid assigning a shared variable
8969 to itself when trying to add
8970 __builtin_assume_aligned. */
8971 if (n->value & GOVD_SHARED)
8972 {
8973 remove = true;
8974 break;
8975 }
8976 }
8977 }
8978 }
8979 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8980 {
8981 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8982 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8983 remove = true;
8984 }
8985 break;
8986
8987 case OMP_CLAUSE_MAP:
e01d41e5
JJ
8988 if (code == OMP_TARGET_EXIT_DATA
8989 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8990 {
8991 remove = true;
8992 break;
8993 }
acf0174b 8994 decl = OMP_CLAUSE_DECL (c);
e46c7770 8995 /* Data clauses associated with acc parallel reductions must be
c42cfb5c
CP
8996 compatible with present_or_copy. Warn and adjust the clause
8997 if that is not the case. */
8998 if (ctx->region_type == ORT_ACC_PARALLEL)
8999 {
9000 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9001 n = NULL;
9002
9003 if (DECL_P (t))
9004 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9005
9006 if (n && (n->value & GOVD_REDUCTION))
9007 {
9008 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9009
9010 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9011 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9012 && kind != GOMP_MAP_FORCE_PRESENT
9013 && kind != GOMP_MAP_POINTER)
9014 {
9015 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9016 "incompatible data clause with reduction "
9017 "on %qE; promoting to present_or_copy",
9018 DECL_NAME (t));
9019 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9020 }
9021 }
9022 }
acf0174b 9023 if (!DECL_P (decl))
d9a6bd32
JJ
9024 {
9025 if ((ctx->region_type & ORT_TARGET) != 0
9026 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9027 {
9028 if (TREE_CODE (decl) == INDIRECT_REF
9029 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9030 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9031 == REFERENCE_TYPE))
9032 decl = TREE_OPERAND (decl, 0);
9033 if (TREE_CODE (decl) == COMPONENT_REF)
9034 {
9035 while (TREE_CODE (decl) == COMPONENT_REF)
9036 decl = TREE_OPERAND (decl, 0);
9037 if (DECL_P (decl))
9038 {
9039 n = splay_tree_lookup (ctx->variables,
9040 (splay_tree_key) decl);
9041 if (!(n->value & GOVD_SEEN))
9042 remove = true;
9043 }
9044 }
9045 }
9046 break;
9047 }
acf0174b 9048 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
d9a6bd32
JJ
9049 if ((ctx->region_type & ORT_TARGET) != 0
9050 && !(n->value & GOVD_SEEN)
4a38b02b 9051 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
56f71478
JJ
9052 && (!is_global_var (decl)
9053 || !lookup_attribute ("omp declare target link",
9054 DECL_ATTRIBUTES (decl))))
d9a6bd32
JJ
9055 {
9056 remove = true;
9057 /* For struct element mapping, if struct is never referenced
9058 in target block and none of the mapping has always modifier,
9059 remove all the struct element mappings, which immediately
9060 follow the GOMP_MAP_STRUCT map clause. */
9061 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9062 {
9063 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9064 while (cnt--)
9065 OMP_CLAUSE_CHAIN (c)
9066 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9067 }
9068 }
9069 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9070 && code == OMP_TARGET_EXIT_DATA)
acf0174b
JJ
9071 remove = true;
9072 else if (DECL_SIZE (decl)
9073 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
d9a6bd32 9074 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
e01d41e5
JJ
9075 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9076 && (OMP_CLAUSE_MAP_KIND (c)
9077 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
acf0174b 9078 {
41dbbb37
TS
9079 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9080 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9081 INTEGER_CST. */
9082 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9083
acf0174b
JJ
9084 tree decl2 = DECL_VALUE_EXPR (decl);
9085 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9086 decl2 = TREE_OPERAND (decl2, 0);
9087 gcc_assert (DECL_P (decl2));
9088 tree mem = build_simple_mem_ref (decl2);
9089 OMP_CLAUSE_DECL (c) = mem;
9090 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9091 if (ctx->outer_context)
9092 {
9093 omp_notice_variable (ctx->outer_context, decl2, true);
9094 omp_notice_variable (ctx->outer_context,
9095 OMP_CLAUSE_SIZE (c), true);
9096 }
d9a6bd32
JJ
9097 if (((ctx->region_type & ORT_TARGET) != 0
9098 || !ctx->target_firstprivatize_array_bases)
9099 && ((n->value & GOVD_SEEN) == 0
9100 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9101 {
9102 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9103 OMP_CLAUSE_MAP);
9104 OMP_CLAUSE_DECL (nc) = decl;
9105 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9106 if (ctx->target_firstprivatize_array_bases)
9107 OMP_CLAUSE_SET_MAP_KIND (nc,
9108 GOMP_MAP_FIRSTPRIVATE_POINTER);
9109 else
9110 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9111 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9112 OMP_CLAUSE_CHAIN (c) = nc;
9113 c = nc;
9114 }
9115 }
9116 else
9117 {
9118 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9119 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
e01d41e5
JJ
9120 gcc_assert ((n->value & GOVD_SEEN) == 0
9121 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9122 == 0));
acf0174b
JJ
9123 }
9124 break;
9125
9126 case OMP_CLAUSE_TO:
9127 case OMP_CLAUSE_FROM:
41dbbb37 9128 case OMP_CLAUSE__CACHE_:
acf0174b
JJ
9129 decl = OMP_CLAUSE_DECL (c);
9130 if (!DECL_P (decl))
9131 break;
9132 if (DECL_SIZE (decl)
9133 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9134 {
9135 tree decl2 = DECL_VALUE_EXPR (decl);
9136 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9137 decl2 = TREE_OPERAND (decl2, 0);
9138 gcc_assert (DECL_P (decl2));
9139 tree mem = build_simple_mem_ref (decl2);
9140 OMP_CLAUSE_DECL (c) = mem;
9141 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9142 if (ctx->outer_context)
9143 {
9144 omp_notice_variable (ctx->outer_context, decl2, true);
9145 omp_notice_variable (ctx->outer_context,
9146 OMP_CLAUSE_SIZE (c), true);
9147 }
9148 }
b46ebd6c
JJ
9149 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9150 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
acf0174b
JJ
9151 break;
9152
953ff289 9153 case OMP_CLAUSE_REDUCTION:
1a80d6b8 9154 decl = OMP_CLAUSE_DECL (c);
c42cfb5c
CP
9155 /* OpenACC reductions need a present_or_copy data clause.
9156 Add one if necessary. Error is the reduction is private. */
9157 if (ctx->region_type == ORT_ACC_PARALLEL)
9158 {
9159 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9160 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9161 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9162 "reduction on %qE", DECL_NAME (decl));
9163 else if ((n->value & GOVD_MAP) == 0)
9164 {
9165 tree next = OMP_CLAUSE_CHAIN (c);
9166 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9167 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9168 OMP_CLAUSE_DECL (nc) = decl;
9169 OMP_CLAUSE_CHAIN (c) = nc;
9170 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9171 while (1)
9172 {
9173 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9174 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9175 break;
9176 nc = OMP_CLAUSE_CHAIN (nc);
9177 }
9178 OMP_CLAUSE_CHAIN (nc) = next;
9179 n->value |= GOVD_MAP;
9180 }
9181 }
1a80d6b8
JJ
9182 if (DECL_P (decl)
9183 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9184 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9185 break;
953ff289
DN
9186 case OMP_CLAUSE_COPYIN:
9187 case OMP_CLAUSE_COPYPRIVATE:
9188 case OMP_CLAUSE_IF:
9189 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
9190 case OMP_CLAUSE_NUM_TEAMS:
9191 case OMP_CLAUSE_THREAD_LIMIT:
9192 case OMP_CLAUSE_DIST_SCHEDULE:
9193 case OMP_CLAUSE_DEVICE:
953ff289
DN
9194 case OMP_CLAUSE_SCHEDULE:
9195 case OMP_CLAUSE_NOWAIT:
9196 case OMP_CLAUSE_ORDERED:
9197 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
9198 case OMP_CLAUSE_UNTIED:
9199 case OMP_CLAUSE_COLLAPSE:
20906c66
JJ
9200 case OMP_CLAUSE_FINAL:
9201 case OMP_CLAUSE_MERGEABLE:
acf0174b 9202 case OMP_CLAUSE_PROC_BIND:
74bf76ed 9203 case OMP_CLAUSE_SAFELEN:
d9a6bd32 9204 case OMP_CLAUSE_SIMDLEN:
acf0174b 9205 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
9206 case OMP_CLAUSE_PRIORITY:
9207 case OMP_CLAUSE_GRAINSIZE:
9208 case OMP_CLAUSE_NUM_TASKS:
9209 case OMP_CLAUSE_NOGROUP:
9210 case OMP_CLAUSE_THREADS:
9211 case OMP_CLAUSE_SIMD:
9212 case OMP_CLAUSE_HINT:
9213 case OMP_CLAUSE_DEFAULTMAP:
9214 case OMP_CLAUSE_USE_DEVICE_PTR:
9215 case OMP_CLAUSE_IS_DEVICE_PTR:
9a771876 9216 case OMP_CLAUSE__CILK_FOR_COUNT_:
41dbbb37
TS
9217 case OMP_CLAUSE_ASYNC:
9218 case OMP_CLAUSE_WAIT:
41dbbb37
TS
9219 case OMP_CLAUSE_INDEPENDENT:
9220 case OMP_CLAUSE_NUM_GANGS:
9221 case OMP_CLAUSE_NUM_WORKERS:
9222 case OMP_CLAUSE_VECTOR_LENGTH:
9223 case OMP_CLAUSE_GANG:
9224 case OMP_CLAUSE_WORKER:
9225 case OMP_CLAUSE_VECTOR:
9226 case OMP_CLAUSE_AUTO:
9227 case OMP_CLAUSE_SEQ:
7a5e4956 9228 case OMP_CLAUSE_TILE:
953ff289
DN
9229 break;
9230
9231 default:
9232 gcc_unreachable ();
9233 }
9234
9235 if (remove)
9236 *list_p = OMP_CLAUSE_CHAIN (c);
9237 else
9238 list_p = &OMP_CLAUSE_CHAIN (c);
9239 }
9240
9241 /* Add in any implicit data sharing. */
f014c653
JJ
9242 struct gimplify_adjust_omp_clauses_data data;
9243 data.list_p = list_p;
9244 data.pre_p = pre_p;
9245 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
b8698a0f 9246
953ff289
DN
9247 gimplify_omp_ctxp = ctx->outer_context;
9248 delete_omp_context (ctx);
9249}
9250
41dbbb37
TS
9251/* Gimplify OACC_CACHE. */
9252
9253static void
9254gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9255{
9256 tree expr = *expr_p;
9257
182190f2 9258 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
d9a6bd32 9259 OACC_CACHE);
1a80d6b8
JJ
9260 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9261 OACC_CACHE);
41dbbb37
TS
9262
9263 /* TODO: Do something sensible with this information. */
9264
9265 *expr_p = NULL_TREE;
9266}
9267
6e232ba4
JN
9268/* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9269 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9270 kind. The entry kind will replace the one in CLAUSE, while the exit
9271 kind will be used in a new omp_clause and returned to the caller. */
9272
9273static tree
9274gimplify_oacc_declare_1 (tree clause)
9275{
9276 HOST_WIDE_INT kind, new_op;
9277 bool ret = false;
9278 tree c = NULL;
9279
9280 kind = OMP_CLAUSE_MAP_KIND (clause);
9281
9282 switch (kind)
9283 {
9284 case GOMP_MAP_ALLOC:
9285 case GOMP_MAP_FORCE_ALLOC:
9286 case GOMP_MAP_FORCE_TO:
91106e84 9287 new_op = GOMP_MAP_DELETE;
6e232ba4
JN
9288 ret = true;
9289 break;
9290
9291 case GOMP_MAP_FORCE_FROM:
9292 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9293 new_op = GOMP_MAP_FORCE_FROM;
9294 ret = true;
9295 break;
9296
9297 case GOMP_MAP_FORCE_TOFROM:
9298 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9299 new_op = GOMP_MAP_FORCE_FROM;
9300 ret = true;
9301 break;
9302
9303 case GOMP_MAP_FROM:
9304 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9305 new_op = GOMP_MAP_FROM;
9306 ret = true;
9307 break;
9308
9309 case GOMP_MAP_TOFROM:
9310 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9311 new_op = GOMP_MAP_FROM;
9312 ret = true;
9313 break;
9314
9315 case GOMP_MAP_DEVICE_RESIDENT:
9316 case GOMP_MAP_FORCE_DEVICEPTR:
9317 case GOMP_MAP_FORCE_PRESENT:
9318 case GOMP_MAP_LINK:
9319 case GOMP_MAP_POINTER:
9320 case GOMP_MAP_TO:
9321 break;
9322
9323 default:
9324 gcc_unreachable ();
9325 break;
9326 }
9327
9328 if (ret)
9329 {
9330 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9331 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9332 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9333 }
9334
9335 return c;
9336}
9337
9338/* Gimplify OACC_DECLARE. */
9339
9340static void
9341gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9342{
9343 tree expr = *expr_p;
9344 gomp_target *stmt;
7ba8651e 9345 tree clauses, t, decl;
6e232ba4
JN
9346
9347 clauses = OACC_DECLARE_CLAUSES (expr);
9348
9349 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
7ba8651e 9350 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
6e232ba4
JN
9351
9352 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9353 {
7ba8651e 9354 decl = OMP_CLAUSE_DECL (t);
6e232ba4
JN
9355
9356 if (TREE_CODE (decl) == MEM_REF)
7ba8651e
CP
9357 decl = TREE_OPERAND (decl, 0);
9358
9359 if (VAR_P (decl) && !is_oacc_declared (decl))
9360 {
9361 tree attr = get_identifier ("oacc declare target");
9362 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9363 DECL_ATTRIBUTES (decl));
9364 }
6e232ba4 9365
8813a647 9366 if (VAR_P (decl)
6e232ba4
JN
9367 && !is_global_var (decl)
9368 && DECL_CONTEXT (decl) == current_function_decl)
9369 {
9370 tree c = gimplify_oacc_declare_1 (t);
9371 if (c)
9372 {
9373 if (oacc_declare_returns == NULL)
9374 oacc_declare_returns = new hash_map<tree, tree>;
9375
9376 oacc_declare_returns->put (decl, c);
9377 }
9378 }
9379
7ba8651e
CP
9380 if (gimplify_omp_ctxp)
9381 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
6e232ba4
JN
9382 }
9383
9384 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9385 clauses);
9386
9387 gimplify_seq_add_stmt (pre_p, stmt);
9388
9389 *expr_p = NULL_TREE;
9390}
9391
953ff289
DN
9392/* Gimplify the contents of an OMP_PARALLEL statement. This involves
9393 gimplification of the body, as well as scanning the body for used
9394 variables. We need to do this scan now, because variable-sized
9395 decls will be decomposed during gimplification. */
9396
726a989a
RB
9397static void
9398gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
9399{
9400 tree expr = *expr_p;
355fe088 9401 gimple *g;
726a989a 9402 gimple_seq body = NULL;
953ff289 9403
a68ab351
JJ
9404 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9405 OMP_PARALLEL_COMBINED (expr)
9406 ? ORT_COMBINED_PARALLEL
d9a6bd32 9407 : ORT_PARALLEL, OMP_PARALLEL);
953ff289 9408
45852dcc 9409 push_gimplify_context ();
953ff289 9410
726a989a
RB
9411 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9412 if (gimple_code (g) == GIMPLE_BIND)
9413 pop_gimplify_context (g);
50674e96 9414 else
726a989a 9415 pop_gimplify_context (NULL);
953ff289 9416
1a80d6b8 9417 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
d9a6bd32 9418 OMP_PARALLEL);
953ff289 9419
726a989a
RB
9420 g = gimple_build_omp_parallel (body,
9421 OMP_PARALLEL_CLAUSES (expr),
9422 NULL_TREE, NULL_TREE);
9423 if (OMP_PARALLEL_COMBINED (expr))
9424 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9425 gimplify_seq_add_stmt (pre_p, g);
9426 *expr_p = NULL_TREE;
953ff289
DN
9427}
9428
a68ab351
JJ
9429/* Gimplify the contents of an OMP_TASK statement. This involves
9430 gimplification of the body, as well as scanning the body for used
9431 variables. We need to do this scan now, because variable-sized
9432 decls will be decomposed during gimplification. */
953ff289 9433
726a989a
RB
9434static void
9435gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 9436{
a68ab351 9437 tree expr = *expr_p;
355fe088 9438 gimple *g;
726a989a 9439 gimple_seq body = NULL;
953ff289 9440
f22f4340 9441 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
629b3d75 9442 omp_find_clause (OMP_TASK_CLAUSES (expr),
f22f4340 9443 OMP_CLAUSE_UNTIED)
d9a6bd32 9444 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
953ff289 9445
45852dcc 9446 push_gimplify_context ();
953ff289 9447
726a989a
RB
9448 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9449 if (gimple_code (g) == GIMPLE_BIND)
9450 pop_gimplify_context (g);
953ff289 9451 else
726a989a 9452 pop_gimplify_context (NULL);
953ff289 9453
1a80d6b8
JJ
9454 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9455 OMP_TASK);
917948d3 9456
726a989a
RB
9457 g = gimple_build_omp_task (body,
9458 OMP_TASK_CLAUSES (expr),
9459 NULL_TREE, NULL_TREE,
9460 NULL_TREE, NULL_TREE, NULL_TREE);
9461 gimplify_seq_add_stmt (pre_p, g);
9462 *expr_p = NULL_TREE;
a68ab351
JJ
9463}
9464
acf0174b
JJ
9465/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9466 with non-NULL OMP_FOR_INIT. */
9467
9468static tree
9469find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9470{
9471 *walk_subtrees = 0;
9472 switch (TREE_CODE (*tp))
9473 {
9474 case OMP_FOR:
9475 *walk_subtrees = 1;
9476 /* FALLTHRU */
9477 case OMP_SIMD:
9478 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9479 return *tp;
9480 break;
9481 case BIND_EXPR:
9482 case STATEMENT_LIST:
9483 case OMP_PARALLEL:
9484 *walk_subtrees = 1;
9485 break;
9486 default:
9487 break;
9488 }
9489 return NULL_TREE;
9490}
9491
a68ab351
JJ
9492/* Gimplify the gross structure of an OMP_FOR statement. */
9493
9494static enum gimplify_status
726a989a 9495gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 9496{
9ce1688b 9497 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
32e8bb8e
ILT
9498 enum gimplify_status ret = GS_ALL_DONE;
9499 enum gimplify_status tret;
538dd0b7 9500 gomp_for *gfor;
726a989a 9501 gimple_seq for_body, for_pre_body;
a68ab351 9502 int i;
74bf76ed 9503 bitmap has_decl_expr = NULL;
d9a6bd32 9504 enum omp_region_type ort = ORT_WORKSHARE;
a68ab351 9505
acf0174b 9506 orig_for_stmt = for_stmt = *expr_p;
a68ab351 9507
41dbbb37
TS
9508 switch (TREE_CODE (for_stmt))
9509 {
9510 case OMP_FOR:
9511 case CILK_FOR:
9512 case OMP_DISTRIBUTE:
182190f2 9513 break;
41dbbb37 9514 case OACC_LOOP:
182190f2 9515 ort = ORT_ACC;
d9a6bd32
JJ
9516 break;
9517 case OMP_TASKLOOP:
629b3d75 9518 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
d9a6bd32
JJ
9519 ort = ORT_UNTIED_TASK;
9520 else
9521 ort = ORT_TASK;
41dbbb37
TS
9522 break;
9523 case OMP_SIMD:
9524 case CILK_SIMD:
d9a6bd32 9525 ort = ORT_SIMD;
41dbbb37
TS
9526 break;
9527 default:
9528 gcc_unreachable ();
9529 }
9530
41b37d5e
JJ
9531 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9532 clause for the IV. */
d9a6bd32 9533 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
41b37d5e
JJ
9534 {
9535 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9536 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9537 decl = TREE_OPERAND (t, 0);
9538 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9539 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9540 && OMP_CLAUSE_DECL (c) == decl)
9541 {
9542 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9543 break;
9544 }
9545 }
9546
9ce1688b
JJ
9547 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9548 {
9549 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9550 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9551 find_combined_omp_for, NULL, NULL);
9552 if (inner_for_stmt == NULL_TREE)
9553 {
9554 gcc_assert (seen_error ());
9555 *expr_p = NULL_TREE;
9556 return GS_ERROR;
9557 }
9558 }
9559
d9a6bd32
JJ
9560 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9561 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9562 TREE_CODE (for_stmt));
9563
9cf32741
JJ
9564 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9565 gimplify_omp_ctxp->distribute = true;
917948d3 9566
726a989a
RB
9567 /* Handle OMP_FOR_INIT. */
9568 for_pre_body = NULL;
d9a6bd32 9569 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
74bf76ed
JJ
9570 {
9571 has_decl_expr = BITMAP_ALLOC (NULL);
9572 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9573 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
acf0174b 9574 == VAR_DECL)
74bf76ed
JJ
9575 {
9576 t = OMP_FOR_PRE_BODY (for_stmt);
9577 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9578 }
9579 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9580 {
9581 tree_stmt_iterator si;
9582 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9583 tsi_next (&si))
9584 {
9585 t = tsi_stmt (si);
9586 if (TREE_CODE (t) == DECL_EXPR
9587 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9588 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9589 }
9590 }
9591 }
d9a6bd32
JJ
9592 if (OMP_FOR_PRE_BODY (for_stmt))
9593 {
9594 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9595 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9596 else
9597 {
9598 struct gimplify_omp_ctx ctx;
9599 memset (&ctx, 0, sizeof (ctx));
9600 ctx.region_type = ORT_NONE;
9601 gimplify_omp_ctxp = &ctx;
9602 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9603 gimplify_omp_ctxp = NULL;
9604 }
9605 }
726a989a 9606 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 9607
acf0174b 9608 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
d9a6bd32
JJ
9609 for_stmt = inner_for_stmt;
9610
9611 /* For taskloop, need to gimplify the start, end and step before the
9612 taskloop, outside of the taskloop omp context. */
9613 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
acf0174b 9614 {
d9a6bd32
JJ
9615 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9616 {
9617 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9618 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9619 {
9620 TREE_OPERAND (t, 1)
9621 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
381cdae4 9622 pre_p, NULL, false);
d9a6bd32
JJ
9623 tree c = build_omp_clause (input_location,
9624 OMP_CLAUSE_FIRSTPRIVATE);
9625 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9626 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9627 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9628 }
9629
9630 /* Handle OMP_FOR_COND. */
9631 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9632 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9633 {
9634 TREE_OPERAND (t, 1)
9635 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9636 gimple_seq_empty_p (for_pre_body)
381cdae4
RB
9637 ? pre_p : &for_pre_body, NULL,
9638 false);
d9a6bd32
JJ
9639 tree c = build_omp_clause (input_location,
9640 OMP_CLAUSE_FIRSTPRIVATE);
9641 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9642 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9643 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9644 }
9645
9646 /* Handle OMP_FOR_INCR. */
9647 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9648 if (TREE_CODE (t) == MODIFY_EXPR)
9649 {
9650 decl = TREE_OPERAND (t, 0);
9651 t = TREE_OPERAND (t, 1);
9652 tree *tp = &TREE_OPERAND (t, 1);
9653 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9654 tp = &TREE_OPERAND (t, 0);
9655
9656 if (!is_gimple_constant (*tp))
9657 {
9658 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9659 ? pre_p : &for_pre_body;
381cdae4 9660 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
d9a6bd32
JJ
9661 tree c = build_omp_clause (input_location,
9662 OMP_CLAUSE_FIRSTPRIVATE);
9663 OMP_CLAUSE_DECL (c) = *tp;
9664 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9665 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9666 }
9667 }
9668 }
9669
9670 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9671 OMP_TASKLOOP);
acf0174b
JJ
9672 }
9673
d9a6bd32
JJ
9674 if (orig_for_stmt != for_stmt)
9675 gimplify_omp_ctxp->combined_loop = true;
9676
355a7673 9677 for_body = NULL;
a68ab351
JJ
9678 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9679 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9680 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9681 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
d9a6bd32 9682
629b3d75 9683 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
d9a6bd32
JJ
9684 bool is_doacross = false;
9685 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9686 {
9687 is_doacross = true;
9688 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9689 (OMP_FOR_INIT (for_stmt))
9690 * 2);
9691 }
02889d23 9692 int collapse = 1, tile = 0;
629b3d75 9693 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
d9a6bd32
JJ
9694 if (c)
9695 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
02889d23
CLT
9696 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9697 if (c)
9698 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
a68ab351
JJ
9699 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9700 {
9701 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
9702 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9703 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
9704 gcc_assert (DECL_P (decl));
9705 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9706 || POINTER_TYPE_P (TREE_TYPE (decl)));
d9a6bd32
JJ
9707 if (is_doacross)
9708 {
9709 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9710 gimplify_omp_ctxp->loop_iter_var.quick_push
9711 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9712 else
9713 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9714 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9715 }
a68ab351
JJ
9716
9717 /* Make sure the iteration variable is private. */
74bf76ed 9718 tree c = NULL_TREE;
f7468577 9719 tree c2 = NULL_TREE;
acf0174b
JJ
9720 if (orig_for_stmt != for_stmt)
9721 /* Do this only on innermost construct for combined ones. */;
d9a6bd32 9722 else if (ort == ORT_SIMD)
74bf76ed
JJ
9723 {
9724 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
d9a6bd32 9725 (splay_tree_key) decl);
f7468577
JJ
9726 omp_is_private (gimplify_omp_ctxp, decl,
9727 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9728 != 1));
74bf76ed
JJ
9729 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9730 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9731 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9732 {
9733 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9734 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
41b37d5e 9735 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
b4c3a85b
JJ
9736 if (has_decl_expr
9737 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
41b37d5e
JJ
9738 {
9739 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9740 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9741 }
499c20bb
JJ
9742 struct gimplify_omp_ctx *outer
9743 = gimplify_omp_ctxp->outer_context;
9744 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9745 {
9746 if (outer->region_type == ORT_WORKSHARE
9747 && outer->combined_loop)
9748 {
9749 n = splay_tree_lookup (outer->variables,
9750 (splay_tree_key)decl);
9751 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9752 {
9753 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9754 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9755 }
e01d41e5
JJ
9756 else
9757 {
9758 struct gimplify_omp_ctx *octx = outer->outer_context;
9759 if (octx
9760 && octx->region_type == ORT_COMBINED_PARALLEL
9761 && octx->outer_context
9762 && (octx->outer_context->region_type
9763 == ORT_WORKSHARE)
9764 && octx->outer_context->combined_loop)
9765 {
9766 octx = octx->outer_context;
9767 n = splay_tree_lookup (octx->variables,
9768 (splay_tree_key)decl);
9769 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9770 {
9771 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9772 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9773 }
9774 }
9775 }
499c20bb
JJ
9776 }
9777 }
9778
74bf76ed
JJ
9779 OMP_CLAUSE_DECL (c) = decl;
9780 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9781 OMP_FOR_CLAUSES (for_stmt) = c;
41b37d5e 9782 omp_add_variable (gimplify_omp_ctxp, decl, flags);
41b37d5e
JJ
9783 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9784 {
9785 if (outer->region_type == ORT_WORKSHARE
9786 && outer->combined_loop)
9787 {
9788 if (outer->outer_context
9789 && (outer->outer_context->region_type
9790 == ORT_COMBINED_PARALLEL))
9791 outer = outer->outer_context;
9792 else if (omp_check_private (outer, decl, false))
9793 outer = NULL;
9794 }
d9a6bd32
JJ
9795 else if (((outer->region_type & ORT_TASK) != 0)
9796 && outer->combined_loop
9797 && !omp_check_private (gimplify_omp_ctxp,
9798 decl, false))
9799 ;
41b37d5e 9800 else if (outer->region_type != ORT_COMBINED_PARALLEL)
84311083
JJ
9801 {
9802 omp_notice_variable (outer, decl, true);
9803 outer = NULL;
9804 }
41b37d5e
JJ
9805 if (outer)
9806 {
cbdfbde8
JJ
9807 n = splay_tree_lookup (outer->variables,
9808 (splay_tree_key)decl);
9809 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9810 {
9811 omp_add_variable (outer, decl,
9812 GOVD_LASTPRIVATE | GOVD_SEEN);
e01d41e5
JJ
9813 if (outer->region_type == ORT_COMBINED_PARALLEL
9814 && outer->outer_context
9815 && (outer->outer_context->region_type
9816 == ORT_WORKSHARE)
9817 && outer->outer_context->combined_loop)
9818 {
9819 outer = outer->outer_context;
9820 n = splay_tree_lookup (outer->variables,
9821 (splay_tree_key)decl);
9822 if (omp_check_private (outer, decl, false))
9823 outer = NULL;
9824 else if (n == NULL
9825 || ((n->value & GOVD_DATA_SHARE_CLASS)
9826 == 0))
9827 omp_add_variable (outer, decl,
9828 GOVD_LASTPRIVATE
9829 | GOVD_SEEN);
9830 else
9831 outer = NULL;
9832 }
9833 if (outer && outer->outer_context
9834 && (outer->outer_context->region_type
9835 == ORT_COMBINED_TEAMS))
9836 {
9837 outer = outer->outer_context;
9838 n = splay_tree_lookup (outer->variables,
9839 (splay_tree_key)decl);
9840 if (n == NULL
9841 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9842 omp_add_variable (outer, decl,
9843 GOVD_SHARED | GOVD_SEEN);
9844 else
9845 outer = NULL;
9846 }
9847 if (outer && outer->outer_context)
cbdfbde8
JJ
9848 omp_notice_variable (outer->outer_context, decl,
9849 true);
9850 }
41b37d5e
JJ
9851 }
9852 }
74bf76ed
JJ
9853 }
9854 else
9855 {
9856 bool lastprivate
9857 = (!has_decl_expr
b4c3a85b 9858 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
41b37d5e
JJ
9859 struct gimplify_omp_ctx *outer
9860 = gimplify_omp_ctxp->outer_context;
9861 if (outer && lastprivate)
56ad0e38 9862 {
41b37d5e
JJ
9863 if (outer->region_type == ORT_WORKSHARE
9864 && outer->combined_loop)
9865 {
499c20bb
JJ
9866 n = splay_tree_lookup (outer->variables,
9867 (splay_tree_key)decl);
9868 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9869 {
9870 lastprivate = false;
9871 outer = NULL;
9872 }
9873 else if (outer->outer_context
9874 && (outer->outer_context->region_type
9875 == ORT_COMBINED_PARALLEL))
41b37d5e
JJ
9876 outer = outer->outer_context;
9877 else if (omp_check_private (outer, decl, false))
9878 outer = NULL;
9879 }
d9a6bd32
JJ
9880 else if (((outer->region_type & ORT_TASK) != 0)
9881 && outer->combined_loop
9882 && !omp_check_private (gimplify_omp_ctxp,
9883 decl, false))
9884 ;
41b37d5e 9885 else if (outer->region_type != ORT_COMBINED_PARALLEL)
84311083
JJ
9886 {
9887 omp_notice_variable (outer, decl, true);
9888 outer = NULL;
9889 }
41b37d5e 9890 if (outer)
56ad0e38 9891 {
cbdfbde8
JJ
9892 n = splay_tree_lookup (outer->variables,
9893 (splay_tree_key)decl);
9894 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9895 {
9896 omp_add_variable (outer, decl,
9897 GOVD_LASTPRIVATE | GOVD_SEEN);
e01d41e5
JJ
9898 if (outer->region_type == ORT_COMBINED_PARALLEL
9899 && outer->outer_context
9900 && (outer->outer_context->region_type
9901 == ORT_WORKSHARE)
9902 && outer->outer_context->combined_loop)
9903 {
9904 outer = outer->outer_context;
9905 n = splay_tree_lookup (outer->variables,
9906 (splay_tree_key)decl);
9907 if (omp_check_private (outer, decl, false))
9908 outer = NULL;
9909 else if (n == NULL
9910 || ((n->value & GOVD_DATA_SHARE_CLASS)
9911 == 0))
9912 omp_add_variable (outer, decl,
9913 GOVD_LASTPRIVATE
9914 | GOVD_SEEN);
9915 else
9916 outer = NULL;
9917 }
9918 if (outer && outer->outer_context
9919 && (outer->outer_context->region_type
9920 == ORT_COMBINED_TEAMS))
9921 {
9922 outer = outer->outer_context;
9923 n = splay_tree_lookup (outer->variables,
9924 (splay_tree_key)decl);
9925 if (n == NULL
9926 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9927 omp_add_variable (outer, decl,
9928 GOVD_SHARED | GOVD_SEEN);
9929 else
9930 outer = NULL;
9931 }
9932 if (outer && outer->outer_context)
cbdfbde8
JJ
9933 omp_notice_variable (outer->outer_context, decl,
9934 true);
9935 }
56ad0e38
JJ
9936 }
9937 }
41b37d5e 9938
74bf76ed
JJ
9939 c = build_omp_clause (input_location,
9940 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9941 : OMP_CLAUSE_PRIVATE);
9942 OMP_CLAUSE_DECL (c) = decl;
9943 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
f7468577 9944 OMP_FOR_CLAUSES (for_stmt) = c;
74bf76ed
JJ
9945 omp_add_variable (gimplify_omp_ctxp, decl,
9946 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
f7468577 9947 | GOVD_EXPLICIT | GOVD_SEEN);
74bf76ed
JJ
9948 c = NULL_TREE;
9949 }
9950 }
f7468577 9951 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
a68ab351
JJ
9952 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9953 else
9954 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9955
9956 /* If DECL is not a gimple register, create a temporary variable to act
9957 as an iteration counter. This is valid, since DECL cannot be
56ad0e38
JJ
9958 modified in the body of the loop. Similarly for any iteration vars
9959 in simd with collapse > 1 where the iterator vars must be
9960 lastprivate. */
acf0174b
JJ
9961 if (orig_for_stmt != for_stmt)
9962 var = decl;
56ad0e38 9963 else if (!is_gimple_reg (decl)
d9a6bd32
JJ
9964 || (ort == ORT_SIMD
9965 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
a68ab351 9966 {
ab62397a
JJ
9967 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9968 /* Make sure omp_add_variable is not called on it prematurely.
9969 We call it ourselves a few lines later. */
9970 gimplify_omp_ctxp = NULL;
a68ab351 9971 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
ab62397a 9972 gimplify_omp_ctxp = ctx;
726a989a 9973 TREE_OPERAND (t, 0) = var;
b8698a0f 9974
726a989a 9975 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 9976
d9a6bd32
JJ
9977 if (ort == ORT_SIMD
9978 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
f7468577
JJ
9979 {
9980 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9981 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9982 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9983 OMP_CLAUSE_DECL (c2) = var;
9984 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9985 OMP_FOR_CLAUSES (for_stmt) = c2;
9986 omp_add_variable (gimplify_omp_ctxp, var,
9987 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9988 if (c == NULL_TREE)
9989 {
9990 c = c2;
9991 c2 = NULL_TREE;
9992 }
9993 }
9994 else
9995 omp_add_variable (gimplify_omp_ctxp, var,
9996 GOVD_PRIVATE | GOVD_SEEN);
a68ab351
JJ
9997 }
9998 else
9999 var = decl;
07beea0d 10000
32e8bb8e 10001 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
381cdae4 10002 is_gimple_val, fb_rvalue, false);
32e8bb8e 10003 ret = MIN (ret, tret);
726a989a
RB
10004 if (ret == GS_ERROR)
10005 return ret;
953ff289 10006
726a989a 10007 /* Handle OMP_FOR_COND. */
a68ab351
JJ
10008 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10009 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 10010 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 10011
32e8bb8e 10012 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
381cdae4 10013 is_gimple_val, fb_rvalue, false);
32e8bb8e 10014 ret = MIN (ret, tret);
917948d3 10015
726a989a 10016 /* Handle OMP_FOR_INCR. */
a68ab351 10017 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
10018 switch (TREE_CODE (t))
10019 {
a68ab351
JJ
10020 case PREINCREMENT_EXPR:
10021 case POSTINCREMENT_EXPR:
c02065fc
AH
10022 {
10023 tree decl = TREE_OPERAND (t, 0);
da6f124d
JJ
10024 /* c_omp_for_incr_canonicalize_ptr() should have been
10025 called to massage things appropriately. */
c02065fc
AH
10026 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10027
10028 if (orig_for_stmt != for_stmt)
10029 break;
10030 t = build_int_cst (TREE_TYPE (decl), 1);
10031 if (c)
10032 OMP_CLAUSE_LINEAR_STEP (c) = t;
10033 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10034 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10035 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
acf0174b 10036 break;
c02065fc 10037 }
a68ab351
JJ
10038
10039 case PREDECREMENT_EXPR:
10040 case POSTDECREMENT_EXPR:
da6f124d
JJ
10041 /* c_omp_for_incr_canonicalize_ptr() should have been
10042 called to massage things appropriately. */
10043 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
acf0174b
JJ
10044 if (orig_for_stmt != for_stmt)
10045 break;
a68ab351 10046 t = build_int_cst (TREE_TYPE (decl), -1);
74bf76ed
JJ
10047 if (c)
10048 OMP_CLAUSE_LINEAR_STEP (c) = t;
a68ab351 10049 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 10050 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
10051 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10052 break;
10053
726a989a
RB
10054 case MODIFY_EXPR:
10055 gcc_assert (TREE_OPERAND (t, 0) == decl);
10056 TREE_OPERAND (t, 0) = var;
a68ab351 10057
726a989a 10058 t = TREE_OPERAND (t, 1);
a68ab351 10059 switch (TREE_CODE (t))
953ff289 10060 {
a68ab351
JJ
10061 case PLUS_EXPR:
10062 if (TREE_OPERAND (t, 1) == decl)
10063 {
10064 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10065 TREE_OPERAND (t, 0) = var;
10066 break;
10067 }
10068
10069 /* Fallthru. */
10070 case MINUS_EXPR:
10071 case POINTER_PLUS_EXPR:
10072 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 10073 TREE_OPERAND (t, 0) = var;
953ff289 10074 break;
a68ab351
JJ
10075 default:
10076 gcc_unreachable ();
953ff289 10077 }
917948d3 10078
32e8bb8e 10079 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
381cdae4 10080 is_gimple_val, fb_rvalue, false);
32e8bb8e 10081 ret = MIN (ret, tret);
74bf76ed
JJ
10082 if (c)
10083 {
da6f124d
JJ
10084 tree step = TREE_OPERAND (t, 1);
10085 tree stept = TREE_TYPE (decl);
10086 if (POINTER_TYPE_P (stept))
10087 stept = sizetype;
10088 step = fold_convert (stept, step);
74bf76ed 10089 if (TREE_CODE (t) == MINUS_EXPR)
da6f124d
JJ
10090 step = fold_build1 (NEGATE_EXPR, stept, step);
10091 OMP_CLAUSE_LINEAR_STEP (c) = step;
10092 if (step != TREE_OPERAND (t, 1))
74bf76ed 10093 {
74bf76ed
JJ
10094 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10095 &for_pre_body, NULL,
381cdae4 10096 is_gimple_val, fb_rvalue, false);
74bf76ed
JJ
10097 ret = MIN (ret, tret);
10098 }
10099 }
953ff289 10100 break;
a68ab351 10101
953ff289
DN
10102 default:
10103 gcc_unreachable ();
10104 }
10105
f7468577
JJ
10106 if (c2)
10107 {
10108 gcc_assert (c);
10109 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10110 }
10111
02889d23 10112 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
a68ab351 10113 {
a68ab351 10114 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
f7468577
JJ
10115 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10116 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10117 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10118 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10119 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10120 && OMP_CLAUSE_DECL (c) == decl)
726a989a 10121 {
d9a6bd32
JJ
10122 if (is_doacross && (collapse == 1 || i >= collapse))
10123 t = var;
10124 else
10125 {
10126 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10127 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10128 gcc_assert (TREE_OPERAND (t, 0) == var);
10129 t = TREE_OPERAND (t, 1);
10130 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10131 || TREE_CODE (t) == MINUS_EXPR
10132 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10133 gcc_assert (TREE_OPERAND (t, 0) == var);
10134 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10135 is_doacross ? var : decl,
10136 TREE_OPERAND (t, 1));
10137 }
f7468577
JJ
10138 gimple_seq *seq;
10139 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10140 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10141 else
10142 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10143 gimplify_assign (decl, t, seq);
a68ab351
JJ
10144 }
10145 }
953ff289
DN
10146 }
10147
74bf76ed
JJ
10148 BITMAP_FREE (has_decl_expr);
10149
d9a6bd32
JJ
10150 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10151 {
10152 push_gimplify_context ();
10153 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10154 {
10155 OMP_FOR_BODY (orig_for_stmt)
10156 = build3 (BIND_EXPR, void_type_node, NULL,
10157 OMP_FOR_BODY (orig_for_stmt), NULL);
10158 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10159 }
10160 }
10161
10162 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10163 &for_body);
10164
10165 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10166 {
10167 if (gimple_code (g) == GIMPLE_BIND)
10168 pop_gimplify_context (g);
10169 else
10170 pop_gimplify_context (NULL);
10171 }
726a989a 10172
acf0174b
JJ
10173 if (orig_for_stmt != for_stmt)
10174 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10175 {
10176 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10177 decl = TREE_OPERAND (t, 0);
d9a6bd32
JJ
10178 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10179 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10180 gimplify_omp_ctxp = ctx->outer_context;
acf0174b 10181 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
d9a6bd32 10182 gimplify_omp_ctxp = ctx;
acf0174b
JJ
10183 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10184 TREE_OPERAND (t, 0) = var;
10185 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10186 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10187 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10188 }
10189
1a80d6b8
JJ
10190 gimplify_adjust_omp_clauses (pre_p, for_body,
10191 &OMP_FOR_CLAUSES (orig_for_stmt),
d9a6bd32 10192 TREE_CODE (orig_for_stmt));
953ff289 10193
74bf76ed 10194 int kind;
acf0174b 10195 switch (TREE_CODE (orig_for_stmt))
74bf76ed
JJ
10196 {
10197 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10198 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
c02065fc 10199 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9a771876 10200 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
acf0174b 10201 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
d9a6bd32 10202 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
41dbbb37 10203 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
74bf76ed
JJ
10204 default:
10205 gcc_unreachable ();
10206 }
acf0174b 10207 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
726a989a
RB
10208 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10209 for_pre_body);
acf0174b
JJ
10210 if (orig_for_stmt != for_stmt)
10211 gimple_omp_for_set_combined_p (gfor, true);
10212 if (gimplify_omp_ctxp
10213 && (gimplify_omp_ctxp->combined_loop
10214 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10215 && gimplify_omp_ctxp->outer_context
10216 && gimplify_omp_ctxp->outer_context->combined_loop)))
10217 {
10218 gimple_omp_for_set_combined_into_p (gfor, true);
10219 if (gimplify_omp_ctxp->combined_loop)
10220 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10221 else
10222 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10223 }
726a989a
RB
10224
10225 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10226 {
10227 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10228 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10229 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10230 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10231 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10232 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10233 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10234 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10235 }
10236
d9a6bd32
JJ
10237 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10238 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10239 The outer taskloop stands for computing the number of iterations,
10240 counts for collapsed loops and holding taskloop specific clauses.
10241 The task construct stands for the effect of data sharing on the
10242 explicit task it creates and the inner taskloop stands for expansion
10243 of the static loop inside of the explicit task construct. */
10244 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10245 {
10246 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10247 tree task_clauses = NULL_TREE;
10248 tree c = *gfor_clauses_ptr;
10249 tree *gtask_clauses_ptr = &task_clauses;
10250 tree outer_for_clauses = NULL_TREE;
10251 tree *gforo_clauses_ptr = &outer_for_clauses;
10252 for (; c; c = OMP_CLAUSE_CHAIN (c))
10253 switch (OMP_CLAUSE_CODE (c))
10254 {
10255 /* These clauses are allowed on task, move them there. */
10256 case OMP_CLAUSE_SHARED:
10257 case OMP_CLAUSE_FIRSTPRIVATE:
10258 case OMP_CLAUSE_DEFAULT:
10259 case OMP_CLAUSE_IF:
10260 case OMP_CLAUSE_UNTIED:
10261 case OMP_CLAUSE_FINAL:
10262 case OMP_CLAUSE_MERGEABLE:
10263 case OMP_CLAUSE_PRIORITY:
10264 *gtask_clauses_ptr = c;
10265 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10266 break;
10267 case OMP_CLAUSE_PRIVATE:
10268 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10269 {
10270 /* We want private on outer for and firstprivate
10271 on task. */
10272 *gtask_clauses_ptr
10273 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10274 OMP_CLAUSE_FIRSTPRIVATE);
10275 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10276 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10277 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10278 *gforo_clauses_ptr = c;
10279 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10280 }
10281 else
10282 {
10283 *gtask_clauses_ptr = c;
10284 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10285 }
10286 break;
10287 /* These clauses go into outer taskloop clauses. */
10288 case OMP_CLAUSE_GRAINSIZE:
10289 case OMP_CLAUSE_NUM_TASKS:
10290 case OMP_CLAUSE_NOGROUP:
10291 *gforo_clauses_ptr = c;
10292 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10293 break;
10294 /* Taskloop clause we duplicate on both taskloops. */
10295 case OMP_CLAUSE_COLLAPSE:
10296 *gfor_clauses_ptr = c;
10297 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10298 *gforo_clauses_ptr = copy_node (c);
10299 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10300 break;
10301 /* For lastprivate, keep the clause on inner taskloop, and add
10302 a shared clause on task. If the same decl is also firstprivate,
10303 add also firstprivate clause on the inner taskloop. */
10304 case OMP_CLAUSE_LASTPRIVATE:
10305 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10306 {
10307 /* For taskloop C++ lastprivate IVs, we want:
10308 1) private on outer taskloop
10309 2) firstprivate and shared on task
10310 3) lastprivate on inner taskloop */
10311 *gtask_clauses_ptr
10312 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10313 OMP_CLAUSE_FIRSTPRIVATE);
10314 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10315 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10316 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10317 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10318 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10319 OMP_CLAUSE_PRIVATE);
10320 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10321 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10322 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10323 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10324 }
10325 *gfor_clauses_ptr = c;
10326 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10327 *gtask_clauses_ptr
10328 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10329 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10330 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10331 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10332 gtask_clauses_ptr
10333 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10334 break;
10335 default:
10336 gcc_unreachable ();
10337 }
10338 *gfor_clauses_ptr = NULL_TREE;
10339 *gtask_clauses_ptr = NULL_TREE;
10340 *gforo_clauses_ptr = NULL_TREE;
10341 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10342 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10343 NULL_TREE, NULL_TREE, NULL_TREE);
10344 gimple_omp_task_set_taskloop_p (g, true);
10345 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10346 gomp_for *gforo
10347 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10348 gimple_omp_for_collapse (gfor),
10349 gimple_omp_for_pre_body (gfor));
10350 gimple_omp_for_set_pre_body (gfor, NULL);
10351 gimple_omp_for_set_combined_p (gforo, true);
10352 gimple_omp_for_set_combined_into_p (gfor, true);
10353 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10354 {
eebc5e2d
JJ
10355 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10356 tree v = create_tmp_var (type);
10357 gimple_omp_for_set_index (gforo, i, v);
d9a6bd32
JJ
10358 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10359 gimple_omp_for_set_initial (gforo, i, t);
10360 gimple_omp_for_set_cond (gforo, i,
10361 gimple_omp_for_cond (gfor, i));
10362 t = unshare_expr (gimple_omp_for_final (gfor, i));
10363 gimple_omp_for_set_final (gforo, i, t);
10364 t = unshare_expr (gimple_omp_for_incr (gfor, i));
eebc5e2d
JJ
10365 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10366 TREE_OPERAND (t, 0) = v;
d9a6bd32 10367 gimple_omp_for_set_incr (gforo, i, t);
eebc5e2d
JJ
10368 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10369 OMP_CLAUSE_DECL (t) = v;
10370 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10371 gimple_omp_for_set_clauses (gforo, t);
d9a6bd32
JJ
10372 }
10373 gimplify_seq_add_stmt (pre_p, gforo);
10374 }
10375 else
10376 gimplify_seq_add_stmt (pre_p, gfor);
74bf76ed
JJ
10377 if (ret != GS_ALL_DONE)
10378 return GS_ERROR;
10379 *expr_p = NULL_TREE;
10380 return GS_ALL_DONE;
953ff289
DN
10381}
10382
e01d41e5
JJ
10383/* Helper function of optimize_target_teams, find OMP_TEAMS inside
10384 of OMP_TARGET's body. */
10385
10386static tree
10387find_omp_teams (tree *tp, int *walk_subtrees, void *)
10388{
10389 *walk_subtrees = 0;
10390 switch (TREE_CODE (*tp))
10391 {
10392 case OMP_TEAMS:
10393 return *tp;
10394 case BIND_EXPR:
10395 case STATEMENT_LIST:
10396 *walk_subtrees = 1;
10397 break;
10398 default:
10399 break;
10400 }
10401 return NULL_TREE;
10402}
10403
10404/* Helper function of optimize_target_teams, determine if the expression
10405 can be computed safely before the target construct on the host. */
10406
10407static tree
10408computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10409{
10410 splay_tree_node n;
10411
10412 if (TYPE_P (*tp))
10413 {
10414 *walk_subtrees = 0;
10415 return NULL_TREE;
10416 }
10417 switch (TREE_CODE (*tp))
10418 {
10419 case VAR_DECL:
10420 case PARM_DECL:
10421 case RESULT_DECL:
10422 *walk_subtrees = 0;
10423 if (error_operand_p (*tp)
10424 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10425 || DECL_HAS_VALUE_EXPR_P (*tp)
10426 || DECL_THREAD_LOCAL_P (*tp)
10427 || TREE_SIDE_EFFECTS (*tp)
10428 || TREE_THIS_VOLATILE (*tp))
10429 return *tp;
10430 if (is_global_var (*tp)
10431 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10432 || lookup_attribute ("omp declare target link",
10433 DECL_ATTRIBUTES (*tp))))
10434 return *tp;
b4c3a85b
JJ
10435 if (VAR_P (*tp)
10436 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10437 && !is_global_var (*tp)
10438 && decl_function_context (*tp) == current_function_decl)
10439 return *tp;
e01d41e5
JJ
10440 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10441 (splay_tree_key) *tp);
10442 if (n == NULL)
10443 {
10444 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10445 return NULL_TREE;
10446 return *tp;
10447 }
10448 else if (n->value & GOVD_LOCAL)
10449 return *tp;
10450 else if (n->value & GOVD_FIRSTPRIVATE)
10451 return NULL_TREE;
10452 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10453 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10454 return NULL_TREE;
10455 return *tp;
10456 case INTEGER_CST:
10457 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10458 return *tp;
10459 return NULL_TREE;
10460 case TARGET_EXPR:
10461 if (TARGET_EXPR_INITIAL (*tp)
10462 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10463 return *tp;
10464 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10465 walk_subtrees, NULL);
10466 /* Allow some reasonable subset of integral arithmetics. */
10467 case PLUS_EXPR:
10468 case MINUS_EXPR:
10469 case MULT_EXPR:
10470 case TRUNC_DIV_EXPR:
10471 case CEIL_DIV_EXPR:
10472 case FLOOR_DIV_EXPR:
10473 case ROUND_DIV_EXPR:
10474 case TRUNC_MOD_EXPR:
10475 case CEIL_MOD_EXPR:
10476 case FLOOR_MOD_EXPR:
10477 case ROUND_MOD_EXPR:
10478 case RDIV_EXPR:
10479 case EXACT_DIV_EXPR:
10480 case MIN_EXPR:
10481 case MAX_EXPR:
10482 case LSHIFT_EXPR:
10483 case RSHIFT_EXPR:
10484 case BIT_IOR_EXPR:
10485 case BIT_XOR_EXPR:
10486 case BIT_AND_EXPR:
10487 case NEGATE_EXPR:
10488 case ABS_EXPR:
10489 case BIT_NOT_EXPR:
10490 case NON_LVALUE_EXPR:
10491 CASE_CONVERT:
10492 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10493 return *tp;
10494 return NULL_TREE;
10495 /* And disallow anything else, except for comparisons. */
10496 default:
10497 if (COMPARISON_CLASS_P (*tp))
10498 return NULL_TREE;
10499 return *tp;
10500 }
10501}
10502
10503/* Try to determine if the num_teams and/or thread_limit expressions
10504 can have their values determined already before entering the
10505 target construct.
10506 INTEGER_CSTs trivially are,
10507 integral decls that are firstprivate (explicitly or implicitly)
10508 or explicitly map(always, to:) or map(always, tofrom:) on the target
10509 region too, and expressions involving simple arithmetics on those
10510 too, function calls are not ok, dereferencing something neither etc.
10511 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10512 EXPR based on what we find:
10513 0 stands for clause not specified at all, use implementation default
10514 -1 stands for value that can't be determined easily before entering
10515 the target construct.
10516 If teams construct is not present at all, use 1 for num_teams
10517 and 0 for thread_limit (only one team is involved, and the thread
10518 limit is implementation defined. */
10519
10520static void
10521optimize_target_teams (tree target, gimple_seq *pre_p)
10522{
10523 tree body = OMP_BODY (target);
10524 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10525 tree num_teams = integer_zero_node;
10526 tree thread_limit = integer_zero_node;
10527 location_t num_teams_loc = EXPR_LOCATION (target);
10528 location_t thread_limit_loc = EXPR_LOCATION (target);
10529 tree c, *p, expr;
10530 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10531
10532 if (teams == NULL_TREE)
10533 num_teams = integer_one_node;
10534 else
10535 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10536 {
10537 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10538 {
10539 p = &num_teams;
10540 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10541 }
10542 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10543 {
10544 p = &thread_limit;
10545 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10546 }
10547 else
10548 continue;
10549 expr = OMP_CLAUSE_OPERAND (c, 0);
10550 if (TREE_CODE (expr) == INTEGER_CST)
10551 {
10552 *p = expr;
10553 continue;
10554 }
10555 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10556 {
10557 *p = integer_minus_one_node;
10558 continue;
10559 }
10560 *p = expr;
10561 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
381cdae4 10562 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
e01d41e5
JJ
10563 == GS_ERROR)
10564 {
10565 gimplify_omp_ctxp = target_ctx;
10566 *p = integer_minus_one_node;
10567 continue;
10568 }
10569 gimplify_omp_ctxp = target_ctx;
10570 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10571 OMP_CLAUSE_OPERAND (c, 0) = *p;
10572 }
10573 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10574 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10575 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10576 OMP_TARGET_CLAUSES (target) = c;
10577 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10578 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10579 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10580 OMP_TARGET_CLAUSES (target) = c;
10581}
10582
41dbbb37 10583/* Gimplify the gross structure of several OMP constructs. */
953ff289 10584
726a989a
RB
10585static void
10586gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 10587{
726a989a 10588 tree expr = *expr_p;
355fe088 10589 gimple *stmt;
726a989a 10590 gimple_seq body = NULL;
41dbbb37 10591 enum omp_region_type ort;
953ff289 10592
acf0174b
JJ
10593 switch (TREE_CODE (expr))
10594 {
10595 case OMP_SECTIONS:
10596 case OMP_SINGLE:
41dbbb37 10597 ort = ORT_WORKSHARE;
acf0174b 10598 break;
d9a6bd32
JJ
10599 case OMP_TARGET:
10600 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10601 break;
41dbbb37 10602 case OACC_KERNELS:
182190f2
NS
10603 ort = ORT_ACC_KERNELS;
10604 break;
41dbbb37 10605 case OACC_PARALLEL:
182190f2 10606 ort = ORT_ACC_PARALLEL;
acf0174b 10607 break;
41dbbb37 10608 case OACC_DATA:
182190f2
NS
10609 ort = ORT_ACC_DATA;
10610 break;
acf0174b
JJ
10611 case OMP_TARGET_DATA:
10612 ort = ORT_TARGET_DATA;
10613 break;
10614 case OMP_TEAMS:
41b37d5e 10615 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
acf0174b 10616 break;
37d5ad46
JB
10617 case OACC_HOST_DATA:
10618 ort = ORT_ACC_HOST_DATA;
10619 break;
acf0174b
JJ
10620 default:
10621 gcc_unreachable ();
10622 }
d9a6bd32
JJ
10623 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10624 TREE_CODE (expr));
e01d41e5
JJ
10625 if (TREE_CODE (expr) == OMP_TARGET)
10626 optimize_target_teams (expr, pre_p);
d9a6bd32 10627 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
acf0174b 10628 {
45852dcc 10629 push_gimplify_context ();
355fe088 10630 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
acf0174b
JJ
10631 if (gimple_code (g) == GIMPLE_BIND)
10632 pop_gimplify_context (g);
10633 else
10634 pop_gimplify_context (NULL);
182190f2 10635 if ((ort & ORT_TARGET_DATA) != 0)
acf0174b 10636 {
41dbbb37
TS
10637 enum built_in_function end_ix;
10638 switch (TREE_CODE (expr))
10639 {
10640 case OACC_DATA:
37d5ad46 10641 case OACC_HOST_DATA:
41dbbb37
TS
10642 end_ix = BUILT_IN_GOACC_DATA_END;
10643 break;
10644 case OMP_TARGET_DATA:
10645 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10646 break;
10647 default:
10648 gcc_unreachable ();
10649 }
10650 tree fn = builtin_decl_explicit (end_ix);
acf0174b 10651 g = gimple_build_call (fn, 0);
41dbbb37 10652 gimple_seq cleanup = NULL;
acf0174b
JJ
10653 gimple_seq_add_stmt (&cleanup, g);
10654 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10655 body = NULL;
10656 gimple_seq_add_stmt (&body, g);
10657 }
10658 }
10659 else
10660 gimplify_and_add (OMP_BODY (expr), &body);
1a80d6b8
JJ
10661 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10662 TREE_CODE (expr));
953ff289 10663
acf0174b
JJ
10664 switch (TREE_CODE (expr))
10665 {
41dbbb37
TS
10666 case OACC_DATA:
10667 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10668 OMP_CLAUSES (expr));
10669 break;
10670 case OACC_KERNELS:
10671 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10672 OMP_CLAUSES (expr));
10673 break;
37d5ad46
JB
10674 case OACC_HOST_DATA:
10675 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10676 OMP_CLAUSES (expr));
10677 break;
41dbbb37
TS
10678 case OACC_PARALLEL:
10679 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10680 OMP_CLAUSES (expr));
10681 break;
acf0174b
JJ
10682 case OMP_SECTIONS:
10683 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10684 break;
10685 case OMP_SINGLE:
10686 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10687 break;
10688 case OMP_TARGET:
10689 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10690 OMP_CLAUSES (expr));
10691 break;
10692 case OMP_TARGET_DATA:
10693 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10694 OMP_CLAUSES (expr));
10695 break;
10696 case OMP_TEAMS:
10697 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10698 break;
10699 default:
10700 gcc_unreachable ();
10701 }
10702
10703 gimplify_seq_add_stmt (pre_p, stmt);
10704 *expr_p = NULL_TREE;
10705}
10706
41dbbb37
TS
10707/* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10708 target update constructs. */
acf0174b
JJ
10709
10710static void
10711gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10712{
b811915d 10713 tree expr = *expr_p;
41dbbb37 10714 int kind;
538dd0b7 10715 gomp_target *stmt;
182190f2 10716 enum omp_region_type ort = ORT_WORKSHARE;
acf0174b 10717
41dbbb37
TS
10718 switch (TREE_CODE (expr))
10719 {
10720 case OACC_ENTER_DATA:
41dbbb37 10721 case OACC_EXIT_DATA:
41dbbb37 10722 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
182190f2 10723 ort = ORT_ACC;
41dbbb37
TS
10724 break;
10725 case OACC_UPDATE:
41dbbb37 10726 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
182190f2 10727 ort = ORT_ACC;
41dbbb37
TS
10728 break;
10729 case OMP_TARGET_UPDATE:
41dbbb37
TS
10730 kind = GF_OMP_TARGET_KIND_UPDATE;
10731 break;
d9a6bd32
JJ
10732 case OMP_TARGET_ENTER_DATA:
10733 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10734 break;
10735 case OMP_TARGET_EXIT_DATA:
10736 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10737 break;
41dbbb37
TS
10738 default:
10739 gcc_unreachable ();
10740 }
b811915d 10741 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
182190f2 10742 ort, TREE_CODE (expr));
1a80d6b8 10743 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
d9a6bd32 10744 TREE_CODE (expr));
b811915d 10745 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
726a989a
RB
10746
10747 gimplify_seq_add_stmt (pre_p, stmt);
acf0174b 10748 *expr_p = NULL_TREE;
953ff289
DN
10749}
10750
10751/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 10752 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
10753 EXPR is this stabilized form. */
10754
10755static bool
a509ebb5 10756goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
10757{
10758 /* Also include casts to other type variants. The C front end is fond
b8698a0f 10759 of adding these for e.g. volatile variables. This is like
953ff289 10760 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 10761 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 10762
78e47463
JJ
10763 if (TREE_CODE (expr) == INDIRECT_REF)
10764 {
10765 expr = TREE_OPERAND (expr, 0);
10766 while (expr != addr
1043771b 10767 && (CONVERT_EXPR_P (expr)
78e47463
JJ
10768 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10769 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 10770 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
10771 {
10772 expr = TREE_OPERAND (expr, 0);
10773 addr = TREE_OPERAND (addr, 0);
10774 }
251923f5
JJ
10775 if (expr == addr)
10776 return true;
71458b8a
JJ
10777 return (TREE_CODE (addr) == ADDR_EXPR
10778 && TREE_CODE (expr) == ADDR_EXPR
251923f5 10779 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 10780 }
953ff289
DN
10781 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10782 return true;
10783 return false;
10784}
10785
ad19c4be
EB
10786/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10787 expression does not involve the lhs, evaluate it into a temporary.
10788 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10789 or -1 if an error was encountered. */
953ff289
DN
10790
10791static int
726a989a
RB
10792goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10793 tree lhs_var)
953ff289
DN
10794{
10795 tree expr = *expr_p;
10796 int saw_lhs;
10797
10798 if (goa_lhs_expr_p (expr, lhs_addr))
10799 {
10800 *expr_p = lhs_var;
10801 return 1;
10802 }
10803 if (is_gimple_val (expr))
10804 return 0;
b8698a0f 10805
953ff289
DN
10806 saw_lhs = 0;
10807 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10808 {
10809 case tcc_binary:
067dd3c9 10810 case tcc_comparison:
726a989a
RB
10811 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10812 lhs_var);
191816a3 10813 /* FALLTHRU */
953ff289 10814 case tcc_unary:
726a989a
RB
10815 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10816 lhs_var);
953ff289 10817 break;
067dd3c9
JJ
10818 case tcc_expression:
10819 switch (TREE_CODE (expr))
10820 {
10821 case TRUTH_ANDIF_EXPR:
10822 case TRUTH_ORIF_EXPR:
f2b11865
JJ
10823 case TRUTH_AND_EXPR:
10824 case TRUTH_OR_EXPR:
10825 case TRUTH_XOR_EXPR:
067dd3c9
JJ
10826 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10827 lhs_addr, lhs_var);
191816a3 10828 /* FALLTHRU */
f2b11865 10829 case TRUTH_NOT_EXPR:
067dd3c9
JJ
10830 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10831 lhs_addr, lhs_var);
10832 break;
4063e61b
JM
10833 case COMPOUND_EXPR:
10834 /* Break out any preevaluations from cp_build_modify_expr. */
10835 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10836 expr = TREE_OPERAND (expr, 1))
10837 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10838 *expr_p = expr;
10839 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
067dd3c9
JJ
10840 default:
10841 break;
10842 }
10843 break;
953ff289
DN
10844 default:
10845 break;
10846 }
10847
10848 if (saw_lhs == 0)
10849 {
10850 enum gimplify_status gs;
10851 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10852 if (gs != GS_ALL_DONE)
10853 saw_lhs = -1;
10854 }
10855
10856 return saw_lhs;
10857}
10858
953ff289
DN
10859/* Gimplify an OMP_ATOMIC statement. */
10860
10861static enum gimplify_status
726a989a 10862gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
10863{
10864 tree addr = TREE_OPERAND (*expr_p, 0);
20906c66
JJ
10865 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10866 ? NULL : TREE_OPERAND (*expr_p, 1);
953ff289 10867 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 10868 tree tmp_load;
538dd0b7
DM
10869 gomp_atomic_load *loadstmt;
10870 gomp_atomic_store *storestmt;
953ff289 10871
b731b390 10872 tmp_load = create_tmp_reg (type);
20906c66
JJ
10873 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10874 return GS_ERROR;
10875
10876 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10877 != GS_ALL_DONE)
10878 return GS_ERROR;
953ff289 10879
20906c66
JJ
10880 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10881 gimplify_seq_add_stmt (pre_p, loadstmt);
10882 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10883 != GS_ALL_DONE)
10884 return GS_ERROR;
953ff289 10885
20906c66
JJ
10886 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10887 rhs = tmp_load;
10888 storestmt = gimple_build_omp_atomic_store (rhs);
10889 gimplify_seq_add_stmt (pre_p, storestmt);
acf0174b
JJ
10890 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10891 {
10892 gimple_omp_atomic_set_seq_cst (loadstmt);
10893 gimple_omp_atomic_set_seq_cst (storestmt);
10894 }
20906c66
JJ
10895 switch (TREE_CODE (*expr_p))
10896 {
10897 case OMP_ATOMIC_READ:
10898 case OMP_ATOMIC_CAPTURE_OLD:
10899 *expr_p = tmp_load;
10900 gimple_omp_atomic_set_need_value (loadstmt);
10901 break;
10902 case OMP_ATOMIC_CAPTURE_NEW:
10903 *expr_p = rhs;
10904 gimple_omp_atomic_set_need_value (storestmt);
10905 break;
10906 default:
10907 *expr_p = NULL;
10908 break;
10909 }
a509ebb5 10910
acf0174b 10911 return GS_ALL_DONE;
953ff289 10912}
6de9cd9a 10913
0a35513e
AH
10914/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10915 body, and adding some EH bits. */
10916
10917static enum gimplify_status
10918gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10919{
10920 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
355fe088 10921 gimple *body_stmt;
538dd0b7 10922 gtransaction *trans_stmt;
0a35513e 10923 gimple_seq body = NULL;
0a35513e
AH
10924 int subcode = 0;
10925
10926 /* Wrap the transaction body in a BIND_EXPR so we have a context
41dbbb37 10927 where to put decls for OMP. */
0a35513e
AH
10928 if (TREE_CODE (tbody) != BIND_EXPR)
10929 {
10930 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10931 TREE_SIDE_EFFECTS (bind) = 1;
10932 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10933 TRANSACTION_EXPR_BODY (expr) = bind;
10934 }
10935
45852dcc 10936 push_gimplify_context ();
0a35513e
AH
10937 temp = voidify_wrapper_expr (*expr_p, NULL);
10938
538dd0b7
DM
10939 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10940 pop_gimplify_context (body_stmt);
0a35513e 10941
7c11b0fe 10942 trans_stmt = gimple_build_transaction (body);
0a35513e
AH
10943 if (TRANSACTION_EXPR_OUTER (expr))
10944 subcode = GTMA_IS_OUTER;
10945 else if (TRANSACTION_EXPR_RELAXED (expr))
10946 subcode = GTMA_IS_RELAXED;
538dd0b7 10947 gimple_transaction_set_subcode (trans_stmt, subcode);
0a35513e 10948
538dd0b7 10949 gimplify_seq_add_stmt (pre_p, trans_stmt);
0a35513e
AH
10950
10951 if (temp)
10952 {
10953 *expr_p = temp;
10954 return GS_OK;
10955 }
10956
10957 *expr_p = NULL_TREE;
10958 return GS_ALL_DONE;
10959}
10960
d9a6bd32
JJ
10961/* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10962 is the OMP_BODY of the original EXPR (which has already been
10963 gimplified so it's not present in the EXPR).
10964
10965 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10966
10967static gimple *
10968gimplify_omp_ordered (tree expr, gimple_seq body)
10969{
10970 tree c, decls;
10971 int failures = 0;
10972 unsigned int i;
10973 tree source_c = NULL_TREE;
10974 tree sink_c = NULL_TREE;
10975
10976 if (gimplify_omp_ctxp)
6b37bdaf
PP
10977 {
10978 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10979 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10980 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10981 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10982 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10983 {
10984 error_at (OMP_CLAUSE_LOCATION (c),
10985 "%<ordered%> construct with %<depend%> clause must be "
10986 "closely nested inside a loop with %<ordered%> clause "
10987 "with a parameter");
10988 failures++;
10989 }
10990 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10991 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10992 {
10993 bool fail = false;
10994 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10995 decls && TREE_CODE (decls) == TREE_LIST;
10996 decls = TREE_CHAIN (decls), ++i)
10997 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10998 continue;
10999 else if (TREE_VALUE (decls)
11000 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11001 {
11002 error_at (OMP_CLAUSE_LOCATION (c),
11003 "variable %qE is not an iteration "
11004 "of outermost loop %d, expected %qE",
11005 TREE_VALUE (decls), i + 1,
11006 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11007 fail = true;
11008 failures++;
11009 }
11010 else
11011 TREE_VALUE (decls)
11012 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11013 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11014 {
11015 error_at (OMP_CLAUSE_LOCATION (c),
11016 "number of variables in %<depend(sink)%> "
11017 "clause does not match number of "
11018 "iteration variables");
11019 failures++;
11020 }
11021 sink_c = c;
11022 }
11023 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11024 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11025 {
11026 if (source_c)
d9a6bd32
JJ
11027 {
11028 error_at (OMP_CLAUSE_LOCATION (c),
6b37bdaf
PP
11029 "more than one %<depend(source)%> clause on an "
11030 "%<ordered%> construct");
d9a6bd32
JJ
11031 failures++;
11032 }
11033 else
6b37bdaf
PP
11034 source_c = c;
11035 }
11036 }
d9a6bd32
JJ
11037 if (source_c && sink_c)
11038 {
11039 error_at (OMP_CLAUSE_LOCATION (source_c),
11040 "%<depend(source)%> clause specified together with "
11041 "%<depend(sink:)%> clauses on the same construct");
11042 failures++;
11043 }
11044
11045 if (failures)
11046 return gimple_build_nop ();
11047 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11048}
11049
ad19c4be 11050/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
726a989a
RB
11051 expression produces a value to be used as an operand inside a GIMPLE
11052 statement, the value will be stored back in *EXPR_P. This value will
11053 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11054 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11055 emitted in PRE_P and POST_P.
11056
11057 Additionally, this process may overwrite parts of the input
11058 expression during gimplification. Ideally, it should be
11059 possible to do non-destructive gimplification.
11060
11061 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11062 the expression needs to evaluate to a value to be used as
11063 an operand in a GIMPLE statement, this value will be stored in
11064 *EXPR_P on exit. This happens when the caller specifies one
11065 of fb_lvalue or fb_rvalue fallback flags.
11066
11067 PRE_P will contain the sequence of GIMPLE statements corresponding
11068 to the evaluation of EXPR and all the side-effects that must
11069 be executed before the main expression. On exit, the last
11070 statement of PRE_P is the core statement being gimplified. For
11071 instance, when gimplifying 'if (++a)' the last statement in
11072 PRE_P will be 'if (t.1)' where t.1 is the result of
11073 pre-incrementing 'a'.
11074
11075 POST_P will contain the sequence of GIMPLE statements corresponding
11076 to the evaluation of all the side-effects that must be executed
11077 after the main expression. If this is NULL, the post
11078 side-effects are stored at the end of PRE_P.
11079
11080 The reason why the output is split in two is to handle post
11081 side-effects explicitly. In some cases, an expression may have
11082 inner and outer post side-effects which need to be emitted in
11083 an order different from the one given by the recursive
11084 traversal. For instance, for the expression (*p--)++ the post
11085 side-effects of '--' must actually occur *after* the post
11086 side-effects of '++'. However, gimplification will first visit
11087 the inner expression, so if a separate POST sequence was not
11088 used, the resulting sequence would be:
11089
11090 1 t.1 = *p
11091 2 p = p - 1
11092 3 t.2 = t.1 + 1
11093 4 *p = t.2
11094
11095 However, the post-decrement operation in line #2 must not be
11096 evaluated until after the store to *p at line #4, so the
11097 correct sequence should be:
11098
11099 1 t.1 = *p
11100 2 t.2 = t.1 + 1
11101 3 *p = t.2
11102 4 p = p - 1
11103
11104 So, by specifying a separate post queue, it is possible
11105 to emit the post side-effects in the correct order.
11106 If POST_P is NULL, an internal queue will be used. Before
11107 returning to the caller, the sequence POST_P is appended to
11108 the main output sequence PRE_P.
11109
11110 GIMPLE_TEST_F points to a function that takes a tree T and
11111 returns nonzero if T is in the GIMPLE form requested by the
12947319 11112 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
11113
11114 FALLBACK tells the function what sort of a temporary we want if
11115 gimplification cannot produce an expression that complies with
11116 GIMPLE_TEST_F.
11117
11118 fb_none means that no temporary should be generated
11119 fb_rvalue means that an rvalue is OK to generate
11120 fb_lvalue means that an lvalue is OK to generate
11121 fb_either means that either is OK, but an lvalue is preferable.
11122 fb_mayfail means that gimplification may fail (in which case
11123 GS_ERROR will be returned)
11124
11125 The return value is either GS_ERROR or GS_ALL_DONE, since this
11126 function iterates until EXPR is completely gimplified or an error
11127 occurs. */
6de9cd9a
DN
11128
11129enum gimplify_status
726a989a
RB
11130gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11131 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
11132{
11133 tree tmp;
726a989a
RB
11134 gimple_seq internal_pre = NULL;
11135 gimple_seq internal_post = NULL;
6de9cd9a 11136 tree save_expr;
726a989a 11137 bool is_statement;
6de9cd9a
DN
11138 location_t saved_location;
11139 enum gimplify_status ret;
726a989a 11140 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6dc4a604 11141 tree label;
6de9cd9a
DN
11142
11143 save_expr = *expr_p;
11144 if (save_expr == NULL_TREE)
11145 return GS_ALL_DONE;
11146
726a989a
RB
11147 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11148 is_statement = gimple_test_f == is_gimple_stmt;
11149 if (is_statement)
11150 gcc_assert (pre_p);
11151
11152 /* Consistency checks. */
11153 if (gimple_test_f == is_gimple_reg)
11154 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11155 else if (gimple_test_f == is_gimple_val
726a989a
RB
11156 || gimple_test_f == is_gimple_call_addr
11157 || gimple_test_f == is_gimple_condexpr
11158 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 11159 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 11160 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 11161 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
11162 || gimple_test_f == is_gimple_asm_val
11163 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
11164 gcc_assert (fallback & fb_rvalue);
11165 else if (gimple_test_f == is_gimple_min_lval
11166 || gimple_test_f == is_gimple_lvalue)
11167 gcc_assert (fallback & fb_lvalue);
11168 else if (gimple_test_f == is_gimple_addressable)
11169 gcc_assert (fallback & fb_either);
11170 else if (gimple_test_f == is_gimple_stmt)
11171 gcc_assert (fallback == fb_none);
11172 else
11173 {
11174 /* We should have recognized the GIMPLE_TEST_F predicate to
11175 know what kind of fallback to use in case a temporary is
11176 needed to hold the value or address of *EXPR_P. */
11177 gcc_unreachable ();
11178 }
11179
6de9cd9a
DN
11180 /* We used to check the predicate here and return immediately if it
11181 succeeds. This is wrong; the design is for gimplification to be
11182 idempotent, and for the predicates to only test for valid forms, not
11183 whether they are fully simplified. */
6de9cd9a
DN
11184 if (pre_p == NULL)
11185 pre_p = &internal_pre;
726a989a 11186
6de9cd9a
DN
11187 if (post_p == NULL)
11188 post_p = &internal_post;
11189
726a989a
RB
11190 /* Remember the last statements added to PRE_P and POST_P. Every
11191 new statement added by the gimplification helpers needs to be
11192 annotated with location information. To centralize the
11193 responsibility, we remember the last statement that had been
11194 added to both queues before gimplifying *EXPR_P. If
11195 gimplification produces new statements in PRE_P and POST_P, those
11196 statements will be annotated with the same location information
11197 as *EXPR_P. */
11198 pre_last_gsi = gsi_last (*pre_p);
11199 post_last_gsi = gsi_last (*post_p);
11200
6de9cd9a 11201 saved_location = input_location;
a281759f
PB
11202 if (save_expr != error_mark_node
11203 && EXPR_HAS_LOCATION (*expr_p))
11204 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
11205
11206 /* Loop over the specific gimplifiers until the toplevel node
11207 remains the same. */
11208 do
11209 {
73d6ddef
RK
11210 /* Strip away as many useless type conversions as possible
11211 at the toplevel. */
11212 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
11213
11214 /* Remember the expr. */
11215 save_expr = *expr_p;
11216
11217 /* Die, die, die, my darling. */
11218 if (save_expr == error_mark_node
726a989a 11219 || (TREE_TYPE (save_expr)
65355d53 11220 && TREE_TYPE (save_expr) == error_mark_node))
6de9cd9a
DN
11221 {
11222 ret = GS_ERROR;
11223 break;
11224 }
11225
11226 /* Do any language-specific gimplification. */
32e8bb8e
ILT
11227 ret = ((enum gimplify_status)
11228 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
11229 if (ret == GS_OK)
11230 {
11231 if (*expr_p == NULL_TREE)
11232 break;
11233 if (*expr_p != save_expr)
11234 continue;
11235 }
11236 else if (ret != GS_UNHANDLED)
11237 break;
11238
941f78d1
JM
11239 /* Make sure that all the cases set 'ret' appropriately. */
11240 ret = GS_UNHANDLED;
6de9cd9a
DN
11241 switch (TREE_CODE (*expr_p))
11242 {
11243 /* First deal with the special cases. */
11244
11245 case POSTINCREMENT_EXPR:
11246 case POSTDECREMENT_EXPR:
11247 case PREINCREMENT_EXPR:
11248 case PREDECREMENT_EXPR:
11249 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
cc3c4f62
RB
11250 fallback != fb_none,
11251 TREE_TYPE (*expr_p));
6de9cd9a
DN
11252 break;
11253
0bd34ae4
RB
11254 case VIEW_CONVERT_EXPR:
11255 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11256 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11257 {
11258 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11259 post_p, is_gimple_val, fb_rvalue);
11260 recalculate_side_effects (*expr_p);
11261 break;
11262 }
11263 /* Fallthru. */
11264
6de9cd9a 11265 case ARRAY_REF:
44de5aeb
RK
11266 case ARRAY_RANGE_REF:
11267 case REALPART_EXPR:
11268 case IMAGPART_EXPR:
6de9cd9a
DN
11269 case COMPONENT_REF:
11270 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 11271 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
11272 break;
11273
11274 case COND_EXPR:
dae7ec87 11275 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 11276
0223e4f5
JM
11277 /* C99 code may assign to an array in a structure value of a
11278 conditional expression, and this has undefined behavior
11279 only on execution, so create a temporary if an lvalue is
11280 required. */
11281 if (fallback == fb_lvalue)
11282 {
381cdae4 11283 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
936d04b6 11284 mark_addressable (*expr_p);
941f78d1 11285 ret = GS_OK;
0223e4f5 11286 }
6de9cd9a
DN
11287 break;
11288
11289 case CALL_EXPR:
90051e16 11290 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 11291
0223e4f5
JM
11292 /* C99 code may assign to an array in a structure returned
11293 from a function, and this has undefined behavior only on
11294 execution, so create a temporary if an lvalue is
11295 required. */
11296 if (fallback == fb_lvalue)
11297 {
381cdae4 11298 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
936d04b6 11299 mark_addressable (*expr_p);
941f78d1 11300 ret = GS_OK;
0223e4f5 11301 }
6de9cd9a
DN
11302 break;
11303
11304 case TREE_LIST:
282899df 11305 gcc_unreachable ();
6de9cd9a
DN
11306
11307 case COMPOUND_EXPR:
11308 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11309 break;
11310
2ec5deb5 11311 case COMPOUND_LITERAL_EXPR:
4c53d183
MM
11312 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11313 gimple_test_f, fallback);
2ec5deb5
PB
11314 break;
11315
6de9cd9a
DN
11316 case MODIFY_EXPR:
11317 case INIT_EXPR:
ebad5233
JM
11318 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11319 fallback != fb_none);
6de9cd9a
DN
11320 break;
11321
11322 case TRUTH_ANDIF_EXPR:
11323 case TRUTH_ORIF_EXPR:
1d15f620
KT
11324 {
11325 /* Preserve the original type of the expression and the
11326 source location of the outer expression. */
11327 tree org_type = TREE_TYPE (*expr_p);
11328 *expr_p = gimple_boolify (*expr_p);
4b4455e5 11329 *expr_p = build3_loc (input_location, COND_EXPR,
1d15f620
KT
11330 org_type, *expr_p,
11331 fold_convert_loc
4b4455e5 11332 (input_location,
1d15f620
KT
11333 org_type, boolean_true_node),
11334 fold_convert_loc
4b4455e5 11335 (input_location,
1d15f620
KT
11336 org_type, boolean_false_node));
11337 ret = GS_OK;
11338 break;
11339 }
6de9cd9a
DN
11340
11341 case TRUTH_NOT_EXPR:
3c6cbf7a 11342 {
53020648
RG
11343 tree type = TREE_TYPE (*expr_p);
11344 /* The parsers are careful to generate TRUTH_NOT_EXPR
11345 only with operands that are always zero or one.
11346 We do not fold here but handle the only interesting case
11347 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
3c6cbf7a 11348 *expr_p = gimple_boolify (*expr_p);
53020648
RG
11349 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11350 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11351 TREE_TYPE (*expr_p),
11352 TREE_OPERAND (*expr_p, 0));
11353 else
11354 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11355 TREE_TYPE (*expr_p),
11356 TREE_OPERAND (*expr_p, 0),
11357 build_int_cst (TREE_TYPE (*expr_p), 1));
11358 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11359 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11360 ret = GS_OK;
bd5d002e 11361 break;
3c6cbf7a 11362 }
67339062 11363
6de9cd9a
DN
11364 case ADDR_EXPR:
11365 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11366 break;
11367
8170608b
TB
11368 case ANNOTATE_EXPR:
11369 {
11370 tree cond = TREE_OPERAND (*expr_p, 0);
718c4601 11371 tree kind = TREE_OPERAND (*expr_p, 1);
664ceb1e
JJ
11372 tree type = TREE_TYPE (cond);
11373 if (!INTEGRAL_TYPE_P (type))
11374 {
11375 *expr_p = cond;
11376 ret = GS_OK;
11377 break;
11378 }
b731b390 11379 tree tmp = create_tmp_var (type);
8170608b 11380 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
538dd0b7 11381 gcall *call
718c4601 11382 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
8170608b
TB
11383 gimple_call_set_lhs (call, tmp);
11384 gimplify_seq_add_stmt (pre_p, call);
11385 *expr_p = tmp;
11386 ret = GS_ALL_DONE;
11387 break;
11388 }
11389
6de9cd9a 11390 case VA_ARG_EXPR:
cd3ce9b4 11391 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
11392 break;
11393
1043771b 11394 CASE_CONVERT:
6de9cd9a
DN
11395 if (IS_EMPTY_STMT (*expr_p))
11396 {
11397 ret = GS_ALL_DONE;
11398 break;
11399 }
11400
11401 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11402 || fallback == fb_none)
11403 {
11404 /* Just strip a conversion to void (or in void context) and
11405 try again. */
11406 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 11407 ret = GS_OK;
6de9cd9a
DN
11408 break;
11409 }
11410
11411 ret = gimplify_conversion (expr_p);
11412 if (ret == GS_ERROR)
11413 break;
11414 if (*expr_p != save_expr)
11415 break;
11416 /* FALLTHRU */
11417
11418 case FIX_TRUNC_EXPR:
6de9cd9a
DN
11419 /* unary_expr: ... | '(' cast ')' val | ... */
11420 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11421 is_gimple_val, fb_rvalue);
11422 recalculate_side_effects (*expr_p);
11423 break;
11424
6a720599 11425 case INDIRECT_REF:
70f34814
RG
11426 {
11427 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
3748f5c9 11428 bool notrap = TREE_THIS_NOTRAP (*expr_p);
70f34814
RG
11429 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11430
11431 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11432 if (*expr_p != save_expr)
11433 {
11434 ret = GS_OK;
11435 break;
11436 }
11437
11438 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11439 is_gimple_reg, fb_rvalue);
dca26746
RG
11440 if (ret == GS_ERROR)
11441 break;
70f34814 11442
dca26746 11443 recalculate_side_effects (*expr_p);
70f34814
RG
11444 *expr_p = fold_build2_loc (input_location, MEM_REF,
11445 TREE_TYPE (*expr_p),
11446 TREE_OPERAND (*expr_p, 0),
11447 build_int_cst (saved_ptr_type, 0));
11448 TREE_THIS_VOLATILE (*expr_p) = volatilep;
3748f5c9 11449 TREE_THIS_NOTRAP (*expr_p) = notrap;
70f34814
RG
11450 ret = GS_OK;
11451 break;
11452 }
11453
11454 /* We arrive here through the various re-gimplifcation paths. */
11455 case MEM_REF:
11456 /* First try re-folding the whole thing. */
11457 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11458 TREE_OPERAND (*expr_p, 0),
11459 TREE_OPERAND (*expr_p, 1));
11460 if (tmp)
941f78d1 11461 {
ee45a32d
EB
11462 REF_REVERSE_STORAGE_ORDER (tmp)
11463 = REF_REVERSE_STORAGE_ORDER (*expr_p);
70f34814
RG
11464 *expr_p = tmp;
11465 recalculate_side_effects (*expr_p);
941f78d1
JM
11466 ret = GS_OK;
11467 break;
11468 }
01718e96
RG
11469 /* Avoid re-gimplifying the address operand if it is already
11470 in suitable form. Re-gimplifying would mark the address
11471 operand addressable. Always gimplify when not in SSA form
11472 as we still may have to gimplify decls with value-exprs. */
ebc1b29e 11473 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
01718e96
RG
11474 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11475 {
11476 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11477 is_gimple_mem_ref_addr, fb_rvalue);
11478 if (ret == GS_ERROR)
11479 break;
11480 }
6de9cd9a 11481 recalculate_side_effects (*expr_p);
70f34814 11482 ret = GS_ALL_DONE;
6de9cd9a
DN
11483 break;
11484
01718e96 11485 /* Constants need not be gimplified. */
6de9cd9a
DN
11486 case INTEGER_CST:
11487 case REAL_CST:
325217ed 11488 case FIXED_CST:
6de9cd9a
DN
11489 case STRING_CST:
11490 case COMPLEX_CST:
11491 case VECTOR_CST:
3f5c390d
RB
11492 /* Drop the overflow flag on constants, we do not want
11493 that in the GIMPLE IL. */
11494 if (TREE_OVERFLOW_P (*expr_p))
11495 *expr_p = drop_tree_overflow (*expr_p);
6de9cd9a
DN
11496 ret = GS_ALL_DONE;
11497 break;
11498
11499 case CONST_DECL:
0534fa56 11500 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 11501 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
11502 value. */
11503 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11504 if (fallback & fb_lvalue)
11505 ret = GS_ALL_DONE;
11506 else
941f78d1
JM
11507 {
11508 *expr_p = DECL_INITIAL (*expr_p);
11509 ret = GS_OK;
11510 }
6de9cd9a
DN
11511 break;
11512
350fae66 11513 case DECL_EXPR:
726a989a 11514 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
11515 break;
11516
6de9cd9a 11517 case BIND_EXPR:
c6c7698d 11518 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
11519 break;
11520
11521 case LOOP_EXPR:
11522 ret = gimplify_loop_expr (expr_p, pre_p);
11523 break;
11524
11525 case SWITCH_EXPR:
11526 ret = gimplify_switch_expr (expr_p, pre_p);
11527 break;
11528
6de9cd9a
DN
11529 case EXIT_EXPR:
11530 ret = gimplify_exit_expr (expr_p);
11531 break;
11532
11533 case GOTO_EXPR:
11534 /* If the target is not LABEL, then it is a computed jump
11535 and the target needs to be gimplified. */
11536 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
11537 {
11538 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11539 NULL, is_gimple_val, fb_rvalue);
11540 if (ret == GS_ERROR)
11541 break;
11542 }
726a989a
RB
11543 gimplify_seq_add_stmt (pre_p,
11544 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 11545 ret = GS_ALL_DONE;
6de9cd9a
DN
11546 break;
11547
2e28e797 11548 case PREDICT_EXPR:
726a989a
RB
11549 gimplify_seq_add_stmt (pre_p,
11550 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11551 PREDICT_EXPR_OUTCOME (*expr_p)));
11552 ret = GS_ALL_DONE;
11553 break;
2e28e797 11554
6de9cd9a 11555 case LABEL_EXPR:
81fea426 11556 ret = gimplify_label_expr (expr_p, pre_p);
6dc4a604
ML
11557 label = LABEL_EXPR_LABEL (*expr_p);
11558 gcc_assert (decl_function_context (label) == current_function_decl);
11559
11560 /* If the label is used in a goto statement, or address of the label
11561 is taken, we need to unpoison all variables that were seen so far.
11562 Doing so would prevent us from reporting a false positives. */
6ff92497 11563 if (asan_poisoned_variables
6dc4a604
ML
11564 && asan_used_labels != NULL
11565 && asan_used_labels->contains (label))
11566 asan_poison_variables (asan_poisoned_variables, false, pre_p);
6de9cd9a
DN
11567 break;
11568
11569 case CASE_LABEL_EXPR:
726a989a 11570 ret = gimplify_case_label_expr (expr_p, pre_p);
6dc4a604
ML
11571
11572 if (gimplify_ctxp->live_switch_vars)
11573 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11574 pre_p);
6de9cd9a
DN
11575 break;
11576
11577 case RETURN_EXPR:
11578 ret = gimplify_return_expr (*expr_p, pre_p);
11579 break;
11580
11581 case CONSTRUCTOR:
48eb4e53
RK
11582 /* Don't reduce this in place; let gimplify_init_constructor work its
11583 magic. Buf if we're just elaborating this for side effects, just
11584 gimplify any element that has side-effects. */
11585 if (fallback == fb_none)
11586 {
4038c495 11587 unsigned HOST_WIDE_INT ix;
ac47786e 11588 tree val;
08330ec2 11589 tree temp = NULL_TREE;
ac47786e
NF
11590 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11591 if (TREE_SIDE_EFFECTS (val))
11592 append_to_statement_list (val, &temp);
48eb4e53 11593
08330ec2 11594 *expr_p = temp;
941f78d1 11595 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 11596 }
ca0b7d18
AP
11597 /* C99 code may assign to an array in a constructed
11598 structure or union, and this has undefined behavior only
11599 on execution, so create a temporary if an lvalue is
11600 required. */
11601 else if (fallback == fb_lvalue)
11602 {
381cdae4 11603 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
936d04b6 11604 mark_addressable (*expr_p);
941f78d1 11605 ret = GS_OK;
ca0b7d18 11606 }
08330ec2
AP
11607 else
11608 ret = GS_ALL_DONE;
6de9cd9a
DN
11609 break;
11610
11611 /* The following are special cases that are not handled by the
11612 original GIMPLE grammar. */
11613
11614 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11615 eliminated. */
11616 case SAVE_EXPR:
11617 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11618 break;
11619
11620 case BIT_FIELD_REF:
ea814c66
EB
11621 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11622 post_p, is_gimple_lvalue, fb_either);
11623 recalculate_side_effects (*expr_p);
6de9cd9a
DN
11624 break;
11625
150e3929
RG
11626 case TARGET_MEM_REF:
11627 {
11628 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11629
23a534a1 11630 if (TMR_BASE (*expr_p))
150e3929 11631 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
4d948885 11632 post_p, is_gimple_mem_ref_addr, fb_either);
150e3929
RG
11633 if (TMR_INDEX (*expr_p))
11634 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11635 post_p, is_gimple_val, fb_rvalue);
4d948885
RG
11636 if (TMR_INDEX2 (*expr_p))
11637 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11638 post_p, is_gimple_val, fb_rvalue);
150e3929
RG
11639 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11640 ret = MIN (r0, r1);
11641 }
11642 break;
11643
6de9cd9a
DN
11644 case NON_LVALUE_EXPR:
11645 /* This should have been stripped above. */
282899df 11646 gcc_unreachable ();
6de9cd9a
DN
11647
11648 case ASM_EXPR:
11649 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11650 break;
11651
11652 case TRY_FINALLY_EXPR:
11653 case TRY_CATCH_EXPR:
726a989a
RB
11654 {
11655 gimple_seq eval, cleanup;
538dd0b7 11656 gtry *try_;
726a989a 11657
820055a0
DC
11658 /* Calls to destructors are generated automatically in FINALLY/CATCH
11659 block. They should have location as UNKNOWN_LOCATION. However,
11660 gimplify_call_expr will reset these call stmts to input_location
11661 if it finds stmt's location is unknown. To prevent resetting for
11662 destructors, we set the input_location to unknown.
11663 Note that this only affects the destructor calls in FINALLY/CATCH
11664 block, and will automatically reset to its original value by the
11665 end of gimplify_expr. */
11666 input_location = UNKNOWN_LOCATION;
726a989a
RB
11667 eval = cleanup = NULL;
11668 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11669 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
11670 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11671 if (gimple_seq_empty_p (cleanup))
11672 {
11673 gimple_seq_add_seq (pre_p, eval);
11674 ret = GS_ALL_DONE;
11675 break;
11676 }
726a989a
RB
11677 try_ = gimple_build_try (eval, cleanup,
11678 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11679 ? GIMPLE_TRY_FINALLY
11680 : GIMPLE_TRY_CATCH);
220d905f 11681 if (EXPR_HAS_LOCATION (save_expr))
e368f44f 11682 gimple_set_location (try_, EXPR_LOCATION (save_expr));
220d905f
AH
11683 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11684 gimple_set_location (try_, saved_location);
726a989a
RB
11685 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11686 gimple_try_set_catch_is_cleanup (try_,
11687 TRY_CATCH_IS_CLEANUP (*expr_p));
11688 gimplify_seq_add_stmt (pre_p, try_);
11689 ret = GS_ALL_DONE;
11690 break;
11691 }
6de9cd9a
DN
11692
11693 case CLEANUP_POINT_EXPR:
11694 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11695 break;
11696
11697 case TARGET_EXPR:
11698 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11699 break;
11700
11701 case CATCH_EXPR:
726a989a 11702 {
355fe088 11703 gimple *c;
726a989a
RB
11704 gimple_seq handler = NULL;
11705 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11706 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11707 gimplify_seq_add_stmt (pre_p, c);
11708 ret = GS_ALL_DONE;
11709 break;
11710 }
6de9cd9a
DN
11711
11712 case EH_FILTER_EXPR:
726a989a 11713 {
355fe088 11714 gimple *ehf;
726a989a
RB
11715 gimple_seq failure = NULL;
11716
11717 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11718 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 11719 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
11720 gimplify_seq_add_stmt (pre_p, ehf);
11721 ret = GS_ALL_DONE;
11722 break;
11723 }
6de9cd9a 11724
0f59171d
RH
11725 case OBJ_TYPE_REF:
11726 {
11727 enum gimplify_status r0, r1;
726a989a
RB
11728 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11729 post_p, is_gimple_val, fb_rvalue);
11730 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11731 post_p, is_gimple_val, fb_rvalue);
0f3a057a 11732 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
11733 ret = MIN (r0, r1);
11734 }
6de9cd9a
DN
11735 break;
11736
6de9cd9a
DN
11737 case LABEL_DECL:
11738 /* We get here when taking the address of a label. We mark
11739 the label as "forced"; meaning it can never be removed and
11740 it is a potential target for any computed goto. */
11741 FORCED_LABEL (*expr_p) = 1;
11742 ret = GS_ALL_DONE;
11743 break;
11744
11745 case STATEMENT_LIST:
c6c7698d 11746 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
11747 break;
11748
d25cee4d
RH
11749 case WITH_SIZE_EXPR:
11750 {
70e2829d
KH
11751 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11752 post_p == &internal_post ? NULL : post_p,
11753 gimple_test_f, fallback);
11754 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11755 is_gimple_val, fb_rvalue);
941f78d1 11756 ret = GS_ALL_DONE;
d25cee4d
RH
11757 }
11758 break;
11759
6de9cd9a 11760 case VAR_DECL:
4744afba 11761 case PARM_DECL:
a9f7c570 11762 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
11763 break;
11764
077b0dfb 11765 case RESULT_DECL:
41dbbb37 11766 /* When within an OMP context, notice uses of variables. */
077b0dfb
JJ
11767 if (gimplify_omp_ctxp)
11768 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11769 ret = GS_ALL_DONE;
11770 break;
11771
71956db3
RH
11772 case SSA_NAME:
11773 /* Allow callbacks into the gimplifier during optimization. */
11774 ret = GS_ALL_DONE;
11775 break;
11776
953ff289 11777 case OMP_PARALLEL:
726a989a
RB
11778 gimplify_omp_parallel (expr_p, pre_p);
11779 ret = GS_ALL_DONE;
953ff289
DN
11780 break;
11781
a68ab351 11782 case OMP_TASK:
726a989a
RB
11783 gimplify_omp_task (expr_p, pre_p);
11784 ret = GS_ALL_DONE;
a68ab351
JJ
11785 break;
11786
953ff289 11787 case OMP_FOR:
74bf76ed 11788 case OMP_SIMD:
c02065fc 11789 case CILK_SIMD:
9a771876 11790 case CILK_FOR:
acf0174b 11791 case OMP_DISTRIBUTE:
d9a6bd32 11792 case OMP_TASKLOOP:
41dbbb37 11793 case OACC_LOOP:
953ff289
DN
11794 ret = gimplify_omp_for (expr_p, pre_p);
11795 break;
11796
41dbbb37
TS
11797 case OACC_CACHE:
11798 gimplify_oacc_cache (expr_p, pre_p);
11799 ret = GS_ALL_DONE;
11800 break;
11801
6e232ba4
JN
11802 case OACC_DECLARE:
11803 gimplify_oacc_declare (expr_p, pre_p);
11804 ret = GS_ALL_DONE;
11805 break;
11806
37d5ad46 11807 case OACC_HOST_DATA:
88bae6f4 11808 case OACC_DATA:
41dbbb37 11809 case OACC_KERNELS:
41dbbb37 11810 case OACC_PARALLEL:
953ff289
DN
11811 case OMP_SECTIONS:
11812 case OMP_SINGLE:
acf0174b
JJ
11813 case OMP_TARGET:
11814 case OMP_TARGET_DATA:
11815 case OMP_TEAMS:
726a989a
RB
11816 gimplify_omp_workshare (expr_p, pre_p);
11817 ret = GS_ALL_DONE;
953ff289
DN
11818 break;
11819
41dbbb37
TS
11820 case OACC_ENTER_DATA:
11821 case OACC_EXIT_DATA:
11822 case OACC_UPDATE:
acf0174b 11823 case OMP_TARGET_UPDATE:
d9a6bd32
JJ
11824 case OMP_TARGET_ENTER_DATA:
11825 case OMP_TARGET_EXIT_DATA:
acf0174b
JJ
11826 gimplify_omp_target_update (expr_p, pre_p);
11827 ret = GS_ALL_DONE;
11828 break;
11829
953ff289
DN
11830 case OMP_SECTION:
11831 case OMP_MASTER:
acf0174b 11832 case OMP_TASKGROUP:
953ff289
DN
11833 case OMP_ORDERED:
11834 case OMP_CRITICAL:
726a989a
RB
11835 {
11836 gimple_seq body = NULL;
355fe088 11837 gimple *g;
726a989a
RB
11838
11839 gimplify_and_add (OMP_BODY (*expr_p), &body);
11840 switch (TREE_CODE (*expr_p))
11841 {
11842 case OMP_SECTION:
11843 g = gimple_build_omp_section (body);
11844 break;
11845 case OMP_MASTER:
11846 g = gimple_build_omp_master (body);
11847 break;
acf0174b
JJ
11848 case OMP_TASKGROUP:
11849 {
11850 gimple_seq cleanup = NULL;
11851 tree fn
11852 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11853 g = gimple_build_call (fn, 0);
11854 gimple_seq_add_stmt (&cleanup, g);
11855 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11856 body = NULL;
11857 gimple_seq_add_stmt (&body, g);
11858 g = gimple_build_omp_taskgroup (body);
11859 }
11860 break;
726a989a 11861 case OMP_ORDERED:
d9a6bd32 11862 g = gimplify_omp_ordered (*expr_p, body);
726a989a
RB
11863 break;
11864 case OMP_CRITICAL:
d9a6bd32
JJ
11865 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11866 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
1a80d6b8 11867 gimplify_adjust_omp_clauses (pre_p, body,
d9a6bd32
JJ
11868 &OMP_CRITICAL_CLAUSES (*expr_p),
11869 OMP_CRITICAL);
726a989a 11870 g = gimple_build_omp_critical (body,
d9a6bd32
JJ
11871 OMP_CRITICAL_NAME (*expr_p),
11872 OMP_CRITICAL_CLAUSES (*expr_p));
726a989a
RB
11873 break;
11874 default:
11875 gcc_unreachable ();
11876 }
11877 gimplify_seq_add_stmt (pre_p, g);
11878 ret = GS_ALL_DONE;
11879 break;
11880 }
953ff289
DN
11881
11882 case OMP_ATOMIC:
20906c66
JJ
11883 case OMP_ATOMIC_READ:
11884 case OMP_ATOMIC_CAPTURE_OLD:
11885 case OMP_ATOMIC_CAPTURE_NEW:
953ff289
DN
11886 ret = gimplify_omp_atomic (expr_p, pre_p);
11887 break;
11888
0a35513e
AH
11889 case TRANSACTION_EXPR:
11890 ret = gimplify_transaction (expr_p, pre_p);
11891 break;
11892
16949072
RG
11893 case TRUTH_AND_EXPR:
11894 case TRUTH_OR_EXPR:
11895 case TRUTH_XOR_EXPR:
1d15f620 11896 {
bd5d002e 11897 tree orig_type = TREE_TYPE (*expr_p);
fc1f4caf 11898 tree new_type, xop0, xop1;
1d15f620 11899 *expr_p = gimple_boolify (*expr_p);
fc1f4caf
KT
11900 new_type = TREE_TYPE (*expr_p);
11901 if (!useless_type_conversion_p (orig_type, new_type))
1d15f620 11902 {
4b4455e5 11903 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
1d15f620
KT
11904 ret = GS_OK;
11905 break;
11906 }
da5fb469 11907
bd5d002e
RG
11908 /* Boolified binary truth expressions are semantically equivalent
11909 to bitwise binary expressions. Canonicalize them to the
11910 bitwise variant. */
11911 switch (TREE_CODE (*expr_p))
11912 {
11913 case TRUTH_AND_EXPR:
11914 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11915 break;
11916 case TRUTH_OR_EXPR:
11917 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11918 break;
11919 case TRUTH_XOR_EXPR:
11920 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11921 break;
11922 default:
11923 break;
11924 }
fc1f4caf
KT
11925 /* Now make sure that operands have compatible type to
11926 expression's new_type. */
11927 xop0 = TREE_OPERAND (*expr_p, 0);
11928 xop1 = TREE_OPERAND (*expr_p, 1);
11929 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11930 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11931 new_type,
11932 xop0);
11933 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11934 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11935 new_type,
11936 xop1);
bd5d002e
RG
11937 /* Continue classified as tcc_binary. */
11938 goto expr_2;
da5fb469 11939 }
16949072 11940
e6ed43b0 11941 case VEC_COND_EXPR:
e93ed60e
RH
11942 {
11943 enum gimplify_status r0, r1, r2;
11944
11945 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11946 post_p, is_gimple_condexpr, fb_rvalue);
11947 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11948 post_p, is_gimple_val, fb_rvalue);
11949 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11950 post_p, is_gimple_val, fb_rvalue);
11951
11952 ret = MIN (MIN (r0, r1), r2);
11953 recalculate_side_effects (*expr_p);
11954 }
11955 break;
11956
11957 case FMA_EXPR:
2205ed25 11958 case VEC_PERM_EXPR:
16949072
RG
11959 /* Classified as tcc_expression. */
11960 goto expr_3;
11961
483c6429
RG
11962 case BIT_INSERT_EXPR:
11963 /* Argument 3 is a constant. */
11964 goto expr_2;
11965
5be014d5 11966 case POINTER_PLUS_EXPR:
315f5f1b
RG
11967 {
11968 enum gimplify_status r0, r1;
11969 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11970 post_p, is_gimple_val, fb_rvalue);
11971 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11972 post_p, is_gimple_val, fb_rvalue);
11973 recalculate_side_effects (*expr_p);
11974 ret = MIN (r0, r1);
315f5f1b
RG
11975 break;
11976 }
726a989a 11977
939b37da
BI
11978 case CILK_SYNC_STMT:
11979 {
11980 if (!fn_contains_cilk_spawn_p (cfun))
11981 {
11982 error_at (EXPR_LOCATION (*expr_p),
11983 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11984 ret = GS_ERROR;
11985 }
11986 else
11987 {
11988 gimplify_cilk_sync (expr_p, pre_p);
11989 ret = GS_ALL_DONE;
11990 }
11991 break;
11992 }
11993
6de9cd9a 11994 default:
282899df 11995 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 11996 {
6615c446 11997 case tcc_comparison:
61c25908
OH
11998 /* Handle comparison of objects of non scalar mode aggregates
11999 with a call to memcmp. It would be nice to only have to do
12000 this for variable-sized objects, but then we'd have to allow
12001 the same nest of reference nodes we allow for MODIFY_EXPR and
12002 that's too complex.
12003
12004 Compare scalar mode aggregates as scalar mode values. Using
12005 memcmp for them would be very inefficient at best, and is
12006 plain wrong if bitfields are involved. */
726a989a
RB
12007 {
12008 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 12009
544d960a
AS
12010 /* Vector comparisons need no boolification. */
12011 if (TREE_CODE (type) == VECTOR_TYPE)
12012 goto expr_2;
12013 else if (!AGGREGATE_TYPE_P (type))
7f3ff782
KT
12014 {
12015 tree org_type = TREE_TYPE (*expr_p);
12016 *expr_p = gimple_boolify (*expr_p);
12017 if (!useless_type_conversion_p (org_type,
12018 TREE_TYPE (*expr_p)))
12019 {
12020 *expr_p = fold_convert_loc (input_location,
12021 org_type, *expr_p);
12022 ret = GS_OK;
12023 }
12024 else
12025 goto expr_2;
12026 }
726a989a
RB
12027 else if (TYPE_MODE (type) != BLKmode)
12028 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12029 else
12030 ret = gimplify_variable_sized_compare (expr_p);
61c25908 12031
726a989a 12032 break;
61c25908 12033 }
d3147f64 12034
282899df
NS
12035 /* If *EXPR_P does not need to be special-cased, handle it
12036 according to its class. */
6615c446 12037 case tcc_unary:
282899df
NS
12038 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12039 post_p, is_gimple_val, fb_rvalue);
12040 break;
6de9cd9a 12041
6615c446 12042 case tcc_binary:
282899df
NS
12043 expr_2:
12044 {
12045 enum gimplify_status r0, r1;
d3147f64 12046
282899df 12047 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 12048 post_p, is_gimple_val, fb_rvalue);
282899df
NS
12049 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12050 post_p, is_gimple_val, fb_rvalue);
d3147f64 12051
282899df
NS
12052 ret = MIN (r0, r1);
12053 break;
12054 }
d3147f64 12055
16949072
RG
12056 expr_3:
12057 {
12058 enum gimplify_status r0, r1, r2;
12059
12060 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12061 post_p, is_gimple_val, fb_rvalue);
12062 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12063 post_p, is_gimple_val, fb_rvalue);
12064 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12065 post_p, is_gimple_val, fb_rvalue);
12066
12067 ret = MIN (MIN (r0, r1), r2);
12068 break;
12069 }
12070
6615c446
JO
12071 case tcc_declaration:
12072 case tcc_constant:
6de9cd9a 12073 ret = GS_ALL_DONE;
282899df 12074 goto dont_recalculate;
d3147f64 12075
282899df 12076 default:
16949072 12077 gcc_unreachable ();
6de9cd9a 12078 }
6de9cd9a
DN
12079
12080 recalculate_side_effects (*expr_p);
726a989a 12081
282899df 12082 dont_recalculate:
6de9cd9a
DN
12083 break;
12084 }
d3147f64 12085
941f78d1 12086 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
12087 }
12088 while (ret == GS_OK);
12089
12090 /* If we encountered an error_mark somewhere nested inside, either
12091 stub out the statement or propagate the error back out. */
12092 if (ret == GS_ERROR)
12093 {
12094 if (is_statement)
65355d53 12095 *expr_p = NULL;
6de9cd9a
DN
12096 goto out;
12097 }
12098
6de9cd9a
DN
12099 /* This was only valid as a return value from the langhook, which
12100 we handled. Make sure it doesn't escape from any other context. */
282899df 12101 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 12102
65355d53 12103 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
12104 {
12105 /* We aren't looking for a value, and we don't have a valid
80480cee
MP
12106 statement. If it doesn't have side-effects, throw it away.
12107 We can also get here with code such as "*&&L;", where L is
12108 a LABEL_DECL that is marked as FORCED_LABEL. */
12109 if (TREE_CODE (*expr_p) == LABEL_DECL
12110 || !TREE_SIDE_EFFECTS (*expr_p))
65355d53 12111 *expr_p = NULL;
6de9cd9a 12112 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
12113 {
12114 /* This is probably a _REF that contains something nested that
12115 has side effects. Recurse through the operands to find it. */
12116 enum tree_code code = TREE_CODE (*expr_p);
12117
282899df 12118 switch (code)
44de5aeb 12119 {
282899df 12120 case COMPONENT_REF:
02a5eac4
EB
12121 case REALPART_EXPR:
12122 case IMAGPART_EXPR:
12123 case VIEW_CONVERT_EXPR:
282899df
NS
12124 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12125 gimple_test_f, fallback);
12126 break;
12127
a9e64c63
EB
12128 case ARRAY_REF:
12129 case ARRAY_RANGE_REF:
44de5aeb
RK
12130 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12131 gimple_test_f, fallback);
12132 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
12133 gimple_test_f, fallback);
12134 break;
12135
12136 default:
12137 /* Anything else with side-effects must be converted to
a9e64c63 12138 a valid statement before we get here. */
282899df 12139 gcc_unreachable ();
44de5aeb 12140 }
44de5aeb 12141
65355d53 12142 *expr_p = NULL;
44de5aeb 12143 }
a9e64c63
EB
12144 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12145 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 12146 {
a9e64c63
EB
12147 /* Historically, the compiler has treated a bare reference
12148 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 12149 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 12150
c22b1771 12151 /* Normally, we do not want to create a temporary for a
a38578e1
MM
12152 TREE_ADDRESSABLE type because such a type should not be
12153 copied by bitwise-assignment. However, we make an
12154 exception here, as all we are doing here is ensuring that
12155 we read the bytes that make up the type. We use
12156 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 12157 given a TREE_ADDRESSABLE type. */
a38578e1
MM
12158 tree tmp = create_tmp_var_raw (type, "vol");
12159 gimple_add_tmp_var (tmp);
726a989a
RB
12160 gimplify_assign (tmp, *expr_p, pre_p);
12161 *expr_p = NULL;
6de9cd9a
DN
12162 }
12163 else
12164 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
12165 an incomplete type, so just throw it away. Likewise for
12166 a BLKmode type, since any implicit inner load should
12167 already have been turned into an explicit one by the
12168 gimplification process. */
65355d53 12169 *expr_p = NULL;
6de9cd9a
DN
12170 }
12171
12172 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 12173 everything together and return. */
325c3691 12174 if (fallback == fb_none || is_statement)
6de9cd9a 12175 {
726a989a
RB
12176 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12177 it out for GC to reclaim it. */
12178 *expr_p = NULL_TREE;
12179
12180 if (!gimple_seq_empty_p (internal_pre)
12181 || !gimple_seq_empty_p (internal_post))
be00f578 12182 {
726a989a
RB
12183 gimplify_seq_add_seq (&internal_pre, internal_post);
12184 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 12185 }
726a989a
RB
12186
12187 /* The result of gimplifying *EXPR_P is going to be the last few
12188 statements in *PRE_P and *POST_P. Add location information
12189 to all the statements that were added by the gimplification
12190 helpers. */
12191 if (!gimple_seq_empty_p (*pre_p))
12192 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12193
12194 if (!gimple_seq_empty_p (*post_p))
12195 annotate_all_with_location_after (*post_p, post_last_gsi,
12196 input_location);
12197
6de9cd9a
DN
12198 goto out;
12199 }
12200
726a989a
RB
12201#ifdef ENABLE_GIMPLE_CHECKING
12202 if (*expr_p)
12203 {
12204 enum tree_code code = TREE_CODE (*expr_p);
12205 /* These expressions should already be in gimple IR form. */
12206 gcc_assert (code != MODIFY_EXPR
12207 && code != ASM_EXPR
12208 && code != BIND_EXPR
12209 && code != CATCH_EXPR
6fc4fb06 12210 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
12211 && code != EH_FILTER_EXPR
12212 && code != GOTO_EXPR
12213 && code != LABEL_EXPR
12214 && code != LOOP_EXPR
726a989a
RB
12215 && code != SWITCH_EXPR
12216 && code != TRY_FINALLY_EXPR
41dbbb37
TS
12217 && code != OACC_PARALLEL
12218 && code != OACC_KERNELS
12219 && code != OACC_DATA
12220 && code != OACC_HOST_DATA
12221 && code != OACC_DECLARE
12222 && code != OACC_UPDATE
12223 && code != OACC_ENTER_DATA
12224 && code != OACC_EXIT_DATA
12225 && code != OACC_CACHE
726a989a
RB
12226 && code != OMP_CRITICAL
12227 && code != OMP_FOR
41dbbb37 12228 && code != OACC_LOOP
726a989a 12229 && code != OMP_MASTER
acf0174b 12230 && code != OMP_TASKGROUP
726a989a
RB
12231 && code != OMP_ORDERED
12232 && code != OMP_PARALLEL
12233 && code != OMP_SECTIONS
12234 && code != OMP_SECTION
12235 && code != OMP_SINGLE);
12236 }
12237#endif
6de9cd9a 12238
726a989a
RB
12239 /* Otherwise we're gimplifying a subexpression, so the resulting
12240 value is interesting. If it's a valid operand that matches
12241 GIMPLE_TEST_F, we're done. Unless we are handling some
12242 post-effects internally; if that's the case, we need to copy into
12243 a temporary before adding the post-effects to POST_P. */
12244 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
12245 goto out;
12246
12247 /* Otherwise, we need to create a new temporary for the gimplified
12248 expression. */
12249
12250 /* We can't return an lvalue if we have an internal postqueue. The
12251 object the lvalue refers to would (probably) be modified by the
12252 postqueue; we need to copy the value out first, which means an
12253 rvalue. */
726a989a
RB
12254 if ((fallback & fb_lvalue)
12255 && gimple_seq_empty_p (internal_post)
e847cc68 12256 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
12257 {
12258 /* An lvalue will do. Take the address of the expression, store it
12259 in a temporary, and replace the expression with an INDIRECT_REF of
12260 that temporary. */
db3927fb 12261 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 12262 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7f5ad6d7 12263 *expr_p = build_simple_mem_ref (tmp);
6de9cd9a 12264 }
ba4d8f9d 12265 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 12266 {
726a989a
RB
12267 /* An rvalue will do. Assign the gimplified expression into a
12268 new temporary TMP and replace the original expression with
12269 TMP. First, make sure that the expression has a type so that
12270 it can be assigned into a temporary. */
282899df 12271 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
412a1d9e 12272 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6de9cd9a 12273 }
282899df 12274 else
6de9cd9a 12275 {
726a989a 12276#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
12277 if (!(fallback & fb_mayfail))
12278 {
12279 fprintf (stderr, "gimplification failed:\n");
ef6cb4c7 12280 print_generic_expr (stderr, *expr_p);
282899df
NS
12281 debug_tree (*expr_p);
12282 internal_error ("gimplification failed");
12283 }
12284#endif
12285 gcc_assert (fallback & fb_mayfail);
726a989a 12286
282899df 12287 /* If this is an asm statement, and the user asked for the
535a42b1 12288 impossible, don't die. Fail and let gimplify_asm_expr
282899df 12289 issue an error. */
6de9cd9a
DN
12290 ret = GS_ERROR;
12291 goto out;
12292 }
6de9cd9a 12293
6de9cd9a 12294 /* Make sure the temporary matches our predicate. */
282899df 12295 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 12296
726a989a 12297 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 12298 {
726a989a
RB
12299 annotate_all_with_location (internal_post, input_location);
12300 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
12301 }
12302
12303 out:
12304 input_location = saved_location;
12305 return ret;
12306}
12307
381cdae4
RB
12308/* Like gimplify_expr but make sure the gimplified result is not itself
12309 a SSA name (but a decl if it were). Temporaries required by
12310 evaluating *EXPR_P may be still SSA names. */
12311
12312static enum gimplify_status
12313gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12314 bool (*gimple_test_f) (tree), fallback_t fallback,
12315 bool allow_ssa)
12316{
12317 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12318 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12319 gimple_test_f, fallback);
12320 if (! allow_ssa
12321 && TREE_CODE (*expr_p) == SSA_NAME)
12322 {
12323 tree name = *expr_p;
12324 if (was_ssa_name_p)
12325 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12326 else
12327 {
12328 /* Avoid the extra copy if possible. */
12329 *expr_p = create_tmp_reg (TREE_TYPE (name));
12330 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12331 release_ssa_name (name);
12332 }
12333 }
12334 return ret;
12335}
12336
44de5aeb 12337/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 12338 size that we find. Add to LIST_P any statements generated. */
44de5aeb 12339
65355d53 12340void
726a989a 12341gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 12342{
ad50bc8d
RH
12343 tree field, t;
12344
19dbbf36 12345 if (type == NULL || type == error_mark_node)
8e0a600b 12346 return;
ad50bc8d 12347
6c6cfbfd 12348 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 12349 type = TYPE_MAIN_VARIANT (type);
44de5aeb 12350
8e0a600b 12351 /* Avoid infinite recursion. */
19dbbf36 12352 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
12353 return;
12354
12355 TYPE_SIZES_GIMPLIFIED (type) = 1;
12356
44de5aeb
RK
12357 switch (TREE_CODE (type))
12358 {
44de5aeb
RK
12359 case INTEGER_TYPE:
12360 case ENUMERAL_TYPE:
12361 case BOOLEAN_TYPE:
44de5aeb 12362 case REAL_TYPE:
325217ed 12363 case FIXED_POINT_TYPE:
65355d53
RH
12364 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12365 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
12366
12367 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12368 {
12369 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12370 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 12371 }
44de5aeb
RK
12372 break;
12373
12374 case ARRAY_TYPE:
ad50bc8d 12375 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
12376 gimplify_type_sizes (TREE_TYPE (type), list_p);
12377 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
12378 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12379 with assigned stack slots, for -O1+ -g they should be tracked
12380 by VTA. */
08d78391
EB
12381 if (!(TYPE_NAME (type)
12382 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12383 && DECL_IGNORED_P (TYPE_NAME (type)))
12384 && TYPE_DOMAIN (type)
802e9f8e
JJ
12385 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12386 {
12387 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8813a647 12388 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
802e9f8e
JJ
12389 DECL_IGNORED_P (t) = 0;
12390 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8813a647 12391 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
802e9f8e
JJ
12392 DECL_IGNORED_P (t) = 0;
12393 }
44de5aeb
RK
12394 break;
12395
12396 case RECORD_TYPE:
12397 case UNION_TYPE:
12398 case QUAL_UNION_TYPE:
910ad8de 12399 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 12400 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
12401 {
12402 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
12403 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12404 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
12405 gimplify_type_sizes (TREE_TYPE (field), list_p);
12406 }
12407 break;
12408
12409 case POINTER_TYPE:
12410 case REFERENCE_TYPE:
706c4bb7
OH
12411 /* We used to recurse on the pointed-to type here, which turned out to
12412 be incorrect because its definition might refer to variables not
12413 yet initialized at this point if a forward declaration is involved.
12414
12415 It was actually useful for anonymous pointed-to types to ensure
12416 that the sizes evaluation dominates every possible later use of the
12417 values. Restricting to such types here would be safe since there
f63645be
KH
12418 is no possible forward declaration around, but would introduce an
12419 undesirable middle-end semantic to anonymity. We then defer to
12420 front-ends the responsibility of ensuring that the sizes are
12421 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 12422 type declarations to the tree. */
44de5aeb
RK
12423 break;
12424
12425 default:
12426 break;
12427 }
12428
65355d53
RH
12429 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12430 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 12431
ad50bc8d 12432 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 12433 {
ad50bc8d
RH
12434 TYPE_SIZE (t) = TYPE_SIZE (type);
12435 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12436 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 12437 }
b4830636
RH
12438}
12439
12440/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12441 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 12442 We add any required statements to *STMT_P. */
44de5aeb
RK
12443
12444void
726a989a 12445gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 12446{
3ac8781c 12447 tree expr = *expr_p;
a9c5ddf9 12448
44de5aeb 12449 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 12450 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 12451 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
12452 will want to replace it with a new variable, but that will cause problems
12453 if this type is from outside the function. It's OK to have that here. */
848be094 12454 if (is_gimple_sizepos (expr))
44de5aeb
RK
12455 return;
12456
a9c5ddf9
RH
12457 *expr_p = unshare_expr (expr);
12458
381cdae4
RB
12459 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12460 if the def vanishes. */
12461 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
44de5aeb 12462}
6de9cd9a 12463
3ad065ef
EB
12464/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12465 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12466 is true, also gimplify the parameters. */
726a989a 12467
538dd0b7 12468gbind *
3ad065ef 12469gimplify_body (tree fndecl, bool do_parms)
6de9cd9a
DN
12470{
12471 location_t saved_location = input_location;
726a989a 12472 gimple_seq parm_stmts, seq;
355fe088 12473 gimple *outer_stmt;
538dd0b7 12474 gbind *outer_bind;
9f9ebcdf 12475 struct cgraph_node *cgn;
6de9cd9a
DN
12476
12477 timevar_push (TV_TREE_GIMPLIFY);
953ff289 12478
381cdae4
RB
12479 init_tree_ssa (cfun);
12480
f66d6761
SB
12481 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12482 gimplification. */
12483 default_rtl_profile ();
12484
953ff289 12485 gcc_assert (gimplify_ctxp == NULL);
381cdae4 12486 push_gimplify_context (true);
6de9cd9a 12487
41dbbb37 12488 if (flag_openacc || flag_openmp)
acf0174b
JJ
12489 {
12490 gcc_assert (gimplify_omp_ctxp == NULL);
12491 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12492 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12493 }
12494
44de5aeb
RK
12495 /* Unshare most shared trees in the body and in that of any nested functions.
12496 It would seem we don't have to do this for nested functions because
12497 they are supposed to be output and then the outer function gimplified
12498 first, but the g++ front end doesn't always do it that way. */
3ad065ef
EB
12499 unshare_body (fndecl);
12500 unvisit_body (fndecl);
6de9cd9a 12501
d52f5295 12502 cgn = cgraph_node::get (fndecl);
9f9ebcdf 12503 if (cgn && cgn->origin)
6e2830c3 12504 nonlocal_vlas = new hash_set<tree>;
77f2a970 12505
fa10beec 12506 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
12507 input_location = DECL_SOURCE_LOCATION (fndecl);
12508
4744afba
RH
12509 /* Resolve callee-copies. This has to be done before processing
12510 the body so that DECL_VALUE_EXPR gets processed correctly. */
3ad065ef 12511 parm_stmts = do_parms ? gimplify_parameters () : NULL;
4744afba 12512
6de9cd9a 12513 /* Gimplify the function's body. */
726a989a 12514 seq = NULL;
3ad065ef 12515 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
538dd0b7
DM
12516 outer_stmt = gimple_seq_first_stmt (seq);
12517 if (!outer_stmt)
6de9cd9a 12518 {
538dd0b7
DM
12519 outer_stmt = gimple_build_nop ();
12520 gimplify_seq_add_stmt (&seq, outer_stmt);
6de9cd9a 12521 }
44de5aeb 12522
726a989a
RB
12523 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12524 not the case, wrap everything in a GIMPLE_BIND to make it so. */
538dd0b7 12525 if (gimple_code (outer_stmt) == GIMPLE_BIND
726a989a 12526 && gimple_seq_first (seq) == gimple_seq_last (seq))
538dd0b7 12527 outer_bind = as_a <gbind *> (outer_stmt);
726a989a
RB
12528 else
12529 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12530
3ad065ef 12531 DECL_SAVED_TREE (fndecl) = NULL_TREE;
4744afba
RH
12532
12533 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 12534 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 12535 if (!gimple_seq_empty_p (parm_stmts))
4744afba 12536 {
f0c10f0f
RG
12537 tree parm;
12538
726a989a
RB
12539 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12540 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
12541
12542 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 12543 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
12544 if (DECL_HAS_VALUE_EXPR_P (parm))
12545 {
12546 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12547 DECL_IGNORED_P (parm) = 0;
12548 }
4744afba
RH
12549 }
12550
77f2a970
JJ
12551 if (nonlocal_vlas)
12552 {
96ddb7ec
JJ
12553 if (nonlocal_vla_vars)
12554 {
12555 /* tree-nested.c may later on call declare_vars (..., true);
12556 which relies on BLOCK_VARS chain to be the tail of the
12557 gimple_bind_vars chain. Ensure we don't violate that
12558 assumption. */
12559 if (gimple_bind_block (outer_bind)
12560 == DECL_INITIAL (current_function_decl))
12561 declare_vars (nonlocal_vla_vars, outer_bind, true);
12562 else
12563 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12564 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12565 nonlocal_vla_vars);
12566 nonlocal_vla_vars = NULL_TREE;
12567 }
6e2830c3 12568 delete nonlocal_vlas;
77f2a970
JJ
12569 nonlocal_vlas = NULL;
12570 }
12571
41dbbb37
TS
12572 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12573 && gimplify_omp_ctxp)
acf0174b
JJ
12574 {
12575 delete_omp_context (gimplify_omp_ctxp);
12576 gimplify_omp_ctxp = NULL;
12577 }
12578
726a989a 12579 pop_gimplify_context (outer_bind);
953ff289 12580 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 12581
b2b29377 12582 if (flag_checking && !seen_error ())
34019e28 12583 verify_gimple_in_seq (gimple_bind_body (outer_bind));
6de9cd9a
DN
12584
12585 timevar_pop (TV_TREE_GIMPLIFY);
12586 input_location = saved_location;
726a989a
RB
12587
12588 return outer_bind;
6de9cd9a
DN
12589}
12590
6a1f6c9c 12591typedef char *char_p; /* For DEF_VEC_P. */
6a1f6c9c
JM
12592
12593/* Return whether we should exclude FNDECL from instrumentation. */
12594
12595static bool
12596flag_instrument_functions_exclude_p (tree fndecl)
12597{
9771b263 12598 vec<char_p> *v;
6a1f6c9c 12599
9771b263
DN
12600 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12601 if (v && v->length () > 0)
6a1f6c9c
JM
12602 {
12603 const char *name;
12604 int i;
12605 char *s;
12606
12607 name = lang_hooks.decl_printable_name (fndecl, 0);
9771b263 12608 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
12609 if (strstr (name, s) != NULL)
12610 return true;
12611 }
12612
9771b263
DN
12613 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12614 if (v && v->length () > 0)
6a1f6c9c
JM
12615 {
12616 const char *name;
12617 int i;
12618 char *s;
12619
12620 name = DECL_SOURCE_FILE (fndecl);
9771b263 12621 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
12622 if (strstr (name, s) != NULL)
12623 return true;
12624 }
12625
12626 return false;
12627}
12628
6de9cd9a 12629/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 12630 node for the function we want to gimplify.
b8698a0f 12631
ad19c4be 12632 Return the sequence of GIMPLE statements corresponding to the body
726a989a 12633 of FNDECL. */
6de9cd9a
DN
12634
12635void
12636gimplify_function_tree (tree fndecl)
12637{
af16bc76 12638 tree parm, ret;
726a989a 12639 gimple_seq seq;
538dd0b7 12640 gbind *bind;
6de9cd9a 12641
a406865a
RG
12642 gcc_assert (!gimple_body (fndecl));
12643
db2960f4
SL
12644 if (DECL_STRUCT_FUNCTION (fndecl))
12645 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12646 else
12647 push_struct_function (fndecl);
6de9cd9a 12648
d67cb100
TV
12649 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12650 if necessary. */
12651 cfun->curr_properties |= PROP_gimple_lva;
12652
910ad8de 12653 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
e41d82f5
RH
12654 {
12655 /* Preliminarily mark non-addressed complex variables as eligible
12656 for promotion to gimple registers. We'll transform their uses
12657 as we find them. */
0890b981
AP
12658 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12659 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
e41d82f5
RH
12660 && !TREE_THIS_VOLATILE (parm)
12661 && !needs_to_live_in_memory (parm))
0890b981 12662 DECL_GIMPLE_REG_P (parm) = 1;
e41d82f5
RH
12663 }
12664
12665 ret = DECL_RESULT (fndecl);
0890b981 12666 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7b7e6ecd 12667 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
e41d82f5 12668 && !needs_to_live_in_memory (ret))
0890b981 12669 DECL_GIMPLE_REG_P (ret) = 1;
e41d82f5 12670
45b2222a 12671 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
6ff92497 12672 asan_poisoned_variables = new hash_set<tree> ();
3ad065ef 12673 bind = gimplify_body (fndecl, true);
6ff92497
ML
12674 if (asan_poisoned_variables)
12675 {
12676 delete asan_poisoned_variables;
12677 asan_poisoned_variables = NULL;
12678 }
726a989a
RB
12679
12680 /* The tree body of the function is no longer needed, replace it
12681 with the new GIMPLE body. */
355a7673 12682 seq = NULL;
726a989a
RB
12683 gimple_seq_add_stmt (&seq, bind);
12684 gimple_set_body (fndecl, seq);
6de9cd9a
DN
12685
12686 /* If we're instrumenting function entry/exit, then prepend the call to
12687 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12688 catch the exit hook. */
12689 /* ??? Add some way to ignore exceptions for this TFE. */
12690 if (flag_instrument_function_entry_exit
8d5a7d1f 12691 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
d964ba07
RB
12692 /* Do not instrument extern inline functions. */
12693 && !(DECL_DECLARED_INLINE_P (fndecl)
12694 && DECL_EXTERNAL (fndecl)
12695 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
8d5a7d1f 12696 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 12697 {
726a989a 12698 tree x;
538dd0b7 12699 gbind *new_bind;
355fe088 12700 gimple *tf;
726a989a 12701 gimple_seq cleanup = NULL, body = NULL;
b01890ff 12702 tree tmp_var;
538dd0b7 12703 gcall *call;
b01890ff 12704
e79983f4 12705 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 12706 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
12707 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12708 gimple_call_set_lhs (call, tmp_var);
12709 gimplify_seq_add_stmt (&cleanup, call);
e79983f4 12710 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
b01890ff
JH
12711 call = gimple_build_call (x, 2,
12712 build_fold_addr_expr (current_function_decl),
12713 tmp_var);
12714 gimplify_seq_add_stmt (&cleanup, call);
726a989a 12715 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 12716
e79983f4 12717 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 12718 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
12719 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12720 gimple_call_set_lhs (call, tmp_var);
12721 gimplify_seq_add_stmt (&body, call);
e79983f4 12722 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
b01890ff
JH
12723 call = gimple_build_call (x, 2,
12724 build_fold_addr_expr (current_function_decl),
12725 tmp_var);
12726 gimplify_seq_add_stmt (&body, call);
726a989a 12727 gimplify_seq_add_stmt (&body, tf);
d6b1fea2 12728 new_bind = gimple_build_bind (NULL, body, NULL);
6de9cd9a 12729
726a989a
RB
12730 /* Replace the current function body with the body
12731 wrapped in the try/finally TF. */
355a7673 12732 seq = NULL;
726a989a
RB
12733 gimple_seq_add_stmt (&seq, new_bind);
12734 gimple_set_body (fndecl, seq);
fca4adf2
JJ
12735 bind = new_bind;
12736 }
12737
45b2222a 12738 if (sanitize_flags_p (SANITIZE_THREAD))
fca4adf2
JJ
12739 {
12740 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
355fe088 12741 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
d6b1fea2 12742 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
fca4adf2
JJ
12743 /* Replace the current function body with the body
12744 wrapped in the try/finally TF. */
12745 seq = NULL;
12746 gimple_seq_add_stmt (&seq, new_bind);
12747 gimple_set_body (fndecl, seq);
6de9cd9a
DN
12748 }
12749
726a989a 12750 DECL_SAVED_TREE (fndecl) = NULL_TREE;
d67cb100 12751 cfun->curr_properties |= PROP_gimple_any;
726a989a 12752
db2960f4 12753 pop_cfun ();
88d91afd 12754
363dc72c 12755 dump_function (TDI_gimple, fndecl);
6de9cd9a 12756}
726a989a 12757
4a7cb16f
AM
12758/* Return a dummy expression of type TYPE in order to keep going after an
12759 error. */
b184c8f1 12760
4a7cb16f
AM
12761static tree
12762dummy_object (tree type)
b184c8f1 12763{
4a7cb16f
AM
12764 tree t = build_int_cst (build_pointer_type (type), 0);
12765 return build2 (MEM_REF, type, t, t);
b184c8f1
AM
12766}
12767
4a7cb16f
AM
12768/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12769 builtin function, but a very special sort of operator. */
b184c8f1 12770
4a7cb16f 12771enum gimplify_status
f8e89441
TV
12772gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12773 gimple_seq *post_p ATTRIBUTE_UNUSED)
4a7cb16f
AM
12774{
12775 tree promoted_type, have_va_type;
12776 tree valist = TREE_OPERAND (*expr_p, 0);
12777 tree type = TREE_TYPE (*expr_p);
33f0852f 12778 tree t, tag, aptag;
4a7cb16f 12779 location_t loc = EXPR_LOCATION (*expr_p);
b184c8f1 12780
4a7cb16f
AM
12781 /* Verify that valist is of the proper type. */
12782 have_va_type = TREE_TYPE (valist);
12783 if (have_va_type == error_mark_node)
12784 return GS_ERROR;
12785 have_va_type = targetm.canonical_va_list_type (have_va_type);
5b28efbb 12786 if (have_va_type == NULL_TREE
3071bfa9 12787 && POINTER_TYPE_P (TREE_TYPE (valist)))
5b28efbb
TV
12788 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12789 have_va_type
12790 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
ba9bbd6f 12791 gcc_assert (have_va_type != NULL_TREE);
b184c8f1 12792
4a7cb16f
AM
12793 /* Generate a diagnostic for requesting data of a type that cannot
12794 be passed through `...' due to type promotion at the call site. */
12795 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12796 != type)
12797 {
12798 static bool gave_help;
12799 bool warned;
bd02f00f
MP
12800 /* Use the expansion point to handle cases such as passing bool (defined
12801 in a system header) through `...'. */
12802 source_location xloc
12803 = expansion_point_location_if_in_system_header (loc);
b184c8f1 12804
4a7cb16f
AM
12805 /* Unfortunately, this is merely undefined, rather than a constraint
12806 violation, so we cannot make this an error. If this call is never
12807 executed, the program is still strictly conforming. */
bd02f00f
MP
12808 warned = warning_at (xloc, 0,
12809 "%qT is promoted to %qT when passed through %<...%>",
4a7cb16f
AM
12810 type, promoted_type);
12811 if (!gave_help && warned)
12812 {
12813 gave_help = true;
bd02f00f 12814 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
4a7cb16f
AM
12815 promoted_type, type);
12816 }
b184c8f1 12817
4a7cb16f
AM
12818 /* We can, however, treat "undefined" any way we please.
12819 Call abort to encourage the user to fix the program. */
12820 if (warned)
bd02f00f 12821 inform (xloc, "if this code is reached, the program will abort");
4a7cb16f
AM
12822 /* Before the abort, allow the evaluation of the va_list
12823 expression to exit or longjmp. */
12824 gimplify_and_add (valist, pre_p);
12825 t = build_call_expr_loc (loc,
12826 builtin_decl_implicit (BUILT_IN_TRAP), 0);
b184c8f1
AM
12827 gimplify_and_add (t, pre_p);
12828
4a7cb16f
AM
12829 /* This is dead code, but go ahead and finish so that the
12830 mode of the result comes out right. */
12831 *expr_p = dummy_object (type);
12832 return GS_ALL_DONE;
b184c8f1 12833 }
b184c8f1 12834
f8e89441 12835 tag = build_int_cst (build_pointer_type (type), 0);
33f0852f
JJ
12836 aptag = build_int_cst (TREE_TYPE (valist), 0);
12837
12838 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12839 valist, tag, aptag);
b184c8f1 12840
d67cb100
TV
12841 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12842 needs to be expanded. */
12843 cfun->curr_properties &= ~PROP_gimple_lva;
12844
f8e89441 12845 return GS_OK;
b184c8f1 12846}
bcf71673 12847
45b0be94
AM
12848/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12849
12850 DST/SRC are the destination and source respectively. You can pass
12851 ungimplified trees in DST or SRC, in which case they will be
12852 converted to a gimple operand if necessary.
12853
12854 This function returns the newly created GIMPLE_ASSIGN tuple. */
12855
355fe088 12856gimple *
45b0be94
AM
12857gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12858{
12859 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12860 gimplify_and_add (t, seq_p);
12861 ggc_free (t);
12862 return gimple_seq_last_stmt (*seq_p);
12863}
12864
18f429e2 12865inline hashval_t
67f58944 12866gimplify_hasher::hash (const elt_t *p)
18f429e2
AM
12867{
12868 tree t = p->val;
12869 return iterative_hash_expr (t, 0);
12870}
12871
12872inline bool
67f58944 12873gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18f429e2
AM
12874{
12875 tree t1 = p1->val;
12876 tree t2 = p2->val;
12877 enum tree_code code = TREE_CODE (t1);
12878
12879 if (TREE_CODE (t2) != code
12880 || TREE_TYPE (t1) != TREE_TYPE (t2))
12881 return false;
12882
12883 if (!operand_equal_p (t1, t2, 0))
12884 return false;
12885
18f429e2
AM
12886 /* Only allow them to compare equal if they also hash equal; otherwise
12887 results are nondeterminate, and we fail bootstrap comparison. */
b2b29377 12888 gcc_checking_assert (hash (p1) == hash (p2));
18f429e2
AM
12889
12890 return true;
12891}