]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
PR c++/59813
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
4ee9c684 1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
fbd26352 3 Copyright (C) 2002-2019 Free Software Foundation, Inc.
4ee9c684 4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
8c4c00c1 11Software Foundation; either version 3, or (at your option) any later
4ee9c684 12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
4ee9c684 22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
9ef16211 26#include "backend.h"
7c29e30e 27#include "target.h"
28#include "rtl.h"
4ee9c684 29#include "tree.h"
c3207897 30#include "memmodel.h"
d48f7e1f 31#include "tm_p.h"
9ef16211 32#include "gimple.h"
d040a5b0 33#include "gimple-predict.h"
7c29e30e 34#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
9ef16211 35#include "ssa.h"
7c29e30e 36#include "cgraph.h"
37#include "tree-pretty-print.h"
38#include "diagnostic-core.h"
39#include "alias.h"
9ef16211 40#include "fold-const.h"
d53441c8 41#include "calls.h"
d53441c8 42#include "varasm.h"
43#include "stmt.h"
44#include "expr.h"
bc61cadb 45#include "gimple-fold.h"
46#include "tree-eh.h"
a8783bee 47#include "gimplify.h"
dcf1a1ec 48#include "gimple-iterator.h"
9ed99284 49#include "stor-layout.h"
9ed99284 50#include "print-tree.h"
75a70cf9 51#include "tree-iterator.h"
4ee9c684 52#include "tree-inline.h"
4ee9c684 53#include "langhooks.h"
073c1fd5 54#include "tree-cfg.h"
073c1fd5 55#include "tree-ssa.h"
4954efd4 56#include "omp-general.h"
7740abd8 57#include "omp-low.h"
424a4a92 58#include "gimple-low.h"
ca4c3545 59#include "gomp-constants.h"
930c75f4 60#include "splay-tree.h"
98588013 61#include "gimple-walk.h"
b9ed1410 62#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
f7715905 63#include "builtins.h"
30a86690 64#include "stringpool.h"
65#include "attribs.h"
629b6abc 66#include "asan.h"
d71fcb4d 67#include "dbgcnt.h"
629b6abc 68
69/* Hash set of poisoned variables in a bind expr. */
70static hash_set<tree> *asan_poisoned_variables = NULL;
1e8e9920 71
72enum gimplify_omp_var_data
73{
74 GOVD_SEEN = 1,
75 GOVD_EXPLICIT = 2,
76 GOVD_SHARED = 4,
77 GOVD_PRIVATE = 8,
78 GOVD_FIRSTPRIVATE = 16,
79 GOVD_LASTPRIVATE = 32,
80 GOVD_REDUCTION = 64,
81 GOVD_LOCAL = 128,
bc7bff74 82 GOVD_MAP = 256,
83 GOVD_DEBUG_PRIVATE = 512,
84 GOVD_PRIVATE_OUTER_REF = 1024,
3d483a94 85 GOVD_LINEAR = 2048,
bc7bff74 86 GOVD_ALIGNED = 4096,
ca4c3545 87
88 /* Flag for GOVD_MAP: don't copy back. */
bc7bff74 89 GOVD_MAP_TO_ONLY = 8192,
ca4c3545 90
d7729e26 91 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
92 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
93
43895be5 94 GOVD_MAP_0LEN_ARRAY = 32768,
95
9561765e 96 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
97 GOVD_MAP_ALWAYS_TO = 65536,
98
98588013 99 /* Flag for shared vars that are or might be stored to in the region. */
100 GOVD_WRITTEN = 131072,
101
96eefdee 102 /* Flag for GOVD_MAP, if it is a forced mapping. */
103 GOVD_MAP_FORCE = 262144,
104
6acf639f 105 /* Flag for GOVD_MAP: must be present already. */
106 GOVD_MAP_FORCE_PRESENT = 524288,
107
7e5a76c8 108 /* Flag for GOVD_MAP: only allocate. */
109 GOVD_MAP_ALLOC_ONLY = 1048576,
110
111 /* Flag for GOVD_MAP: only copy back. */
112 GOVD_MAP_FROM_ONLY = 2097152,
113
114 GOVD_NONTEMPORAL = 4194304,
115
1e8e9920 116 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
3d483a94 117 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
118 | GOVD_LOCAL)
1e8e9920 119};
120
75a70cf9 121
fd6481cf 122enum omp_region_type
123{
b656be3a 124 ORT_WORKSHARE = 0x00,
7e5a76c8 125 ORT_TASKGROUP = 0x01,
126 ORT_SIMD = 0x04,
b656be3a 127
7e5a76c8 128 ORT_PARALLEL = 0x08,
129 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
b656be3a 130
7e5a76c8 131 ORT_TASK = 0x10,
132 ORT_UNTIED_TASK = ORT_TASK | 1,
133 ORT_TASKLOOP = ORT_TASK | 2,
134 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
b656be3a 135
7e5a76c8 136 ORT_TEAMS = 0x20,
137 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
138 ORT_HOST_TEAMS = ORT_TEAMS | 2,
139 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
b656be3a 140
ca4c3545 141 /* Data region. */
7e5a76c8 142 ORT_TARGET_DATA = 0x40,
b656be3a 143
ca4c3545 144 /* Data region with offloading. */
7e5a76c8 145 ORT_TARGET = 0x80,
146 ORT_COMBINED_TARGET = ORT_TARGET | 1,
b656be3a 147
148 /* OpenACC variants. */
7e5a76c8 149 ORT_ACC = 0x100, /* A generic OpenACC region. */
b656be3a 150 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
151 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
7e5a76c8 152 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
153 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
b656be3a 154
43895be5 155 /* Dummy OpenMP region, used to disable expansion of
156 DECL_VALUE_EXPRs in taskloop pre body. */
7e5a76c8 157 ORT_NONE = 0x200
fd6481cf 158};
159
8a4a28a8 160/* Gimplify hashtable helper. */
161
298e7f9a 162struct gimplify_hasher : free_ptr_hash <elt_t>
8a4a28a8 163{
9969c043 164 static inline hashval_t hash (const elt_t *);
165 static inline bool equal (const elt_t *, const elt_t *);
8a4a28a8 166};
167
168struct gimplify_ctx
169{
170 struct gimplify_ctx *prev_context;
171
1a91d914 172 vec<gbind *> bind_expr_stack;
8a4a28a8 173 tree temps;
174 gimple_seq conditional_cleanups;
175 tree exit_label;
176 tree return_temp;
177
178 vec<tree> case_labels;
629b6abc 179 hash_set<tree> *live_switch_vars;
8a4a28a8 180 /* The formal temporary table. Should this be persistent? */
c1f445d2 181 hash_table<gimplify_hasher> *temp_htab;
8a4a28a8 182
183 int conditions;
6ad0a0bc 184 unsigned into_ssa : 1;
185 unsigned allow_rhs_cond_expr : 1;
186 unsigned in_cleanup_point_expr : 1;
187 unsigned keep_stack : 1;
188 unsigned save_stack : 1;
3c77f69c 189 unsigned in_switch_expr : 1;
8a4a28a8 190};
191
7e5a76c8 192enum gimplify_defaultmap_kind
193{
194 GDMK_SCALAR,
195 GDMK_AGGREGATE,
196 GDMK_ALLOCATABLE,
197 GDMK_POINTER
198};
199
1e8e9920 200struct gimplify_omp_ctx
4ee9c684 201{
1e8e9920 202 struct gimplify_omp_ctx *outer_context;
203 splay_tree variables;
431205b7 204 hash_set<tree> *privatized_types;
43895be5 205 /* Iteration variables in an OMP_FOR. */
206 vec<tree> loop_iter_var;
1e8e9920 207 location_t location;
208 enum omp_clause_default_kind default_kind;
fd6481cf 209 enum omp_region_type region_type;
bc7bff74 210 bool combined_loop;
5fddcf34 211 bool distribute;
43895be5 212 bool target_firstprivatize_array_bases;
7e5a76c8 213 int defaultmap[4];
1e8e9920 214};
215
8a4a28a8 216static struct gimplify_ctx *gimplify_ctxp;
1e8e9920 217static struct gimplify_omp_ctx *gimplify_omp_ctxp;
218
57859735 219/* Forward declaration. */
75a70cf9 220static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
2fc5e987 221static hash_map<tree, tree> *oacc_declare_returns;
9ae1b28a 222static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
223 bool (*) (tree), fallback_t, bool);
4c636aad 224
e0d98d5f 225/* Shorter alias name for the above function for use in gimplify.c
226 only. */
227
228static inline void
42acab1c 229gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
e0d98d5f 230{
231 gimple_seq_add_stmt_without_update (seq_p, gs);
232}
233
75a70cf9 234/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
235 NULL, a new sequence is allocated. This function is
236 similar to gimple_seq_add_seq, but does not scan the operands.
237 During gimplification, we need to manipulate statement sequences
238 before the def/use vectors have been constructed. */
239
240static void
241gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
242{
243 gimple_stmt_iterator si;
244
245 if (src == NULL)
246 return;
247
75a70cf9 248 si = gsi_last (*dst_p);
249 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
250}
251
8a4a28a8 252
253/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
254 and popping gimplify contexts. */
255
256static struct gimplify_ctx *ctx_pool = NULL;
257
258/* Return a gimplify context struct from the pool. */
259
260static inline struct gimplify_ctx *
261ctx_alloc (void)
262{
263 struct gimplify_ctx * c = ctx_pool;
264
265 if (c)
266 ctx_pool = c->prev_context;
267 else
268 c = XNEW (struct gimplify_ctx);
269
270 memset (c, '\0', sizeof (*c));
271 return c;
272}
273
274/* Put gimplify context C back into the pool. */
275
276static inline void
277ctx_free (struct gimplify_ctx *c)
278{
279 c->prev_context = ctx_pool;
280 ctx_pool = c;
281}
282
283/* Free allocated ctx stack memory. */
284
285void
286free_gimplify_stack (void)
287{
288 struct gimplify_ctx *c;
289
290 while ((c = ctx_pool))
291 {
292 ctx_pool = c->prev_context;
293 free (c);
294 }
295}
296
297
4ee9c684 298/* Set up a context for the gimplifier. */
299
300void
8a4a28a8 301push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
4ee9c684 302{
8a4a28a8 303 struct gimplify_ctx *c = ctx_alloc ();
304
1e8e9920 305 c->prev_context = gimplify_ctxp;
1e8e9920 306 gimplify_ctxp = c;
8a4a28a8 307 gimplify_ctxp->into_ssa = in_ssa;
308 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
4ee9c684 309}
310
311/* Tear down a context for the gimplifier. If BODY is non-null, then
312 put the temporaries into the outer BIND_EXPR. Otherwise, put them
75a70cf9 313 in the local_decls.
314
315 BODY is not a sequence, but the first tuple in a sequence. */
4ee9c684 316
317void
42acab1c 318pop_gimplify_context (gimple *body)
4ee9c684 319{
1e8e9920 320 struct gimplify_ctx *c = gimplify_ctxp;
d471893d 321
f1f41a6c 322 gcc_assert (c
323 && (!c->bind_expr_stack.exists ()
324 || c->bind_expr_stack.is_empty ()));
325 c->bind_expr_stack.release ();
1e8e9920 326 gimplify_ctxp = c->prev_context;
4ee9c684 327
328 if (body)
0c2f12e9 329 declare_vars (c->temps, body, false);
4ee9c684 330 else
1e8e9920 331 record_vars (c->temps);
4ee9c684 332
c1f445d2 333 delete c->temp_htab;
334 c->temp_htab = NULL;
8a4a28a8 335 ctx_free (c);
4ee9c684 336}
337
57859735 338/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
339
77caf6c2 340static void
1a91d914 341gimple_push_bind_expr (gbind *bind_stmt)
4ee9c684 342{
f1f41a6c 343 gimplify_ctxp->bind_expr_stack.reserve (8);
1a91d914 344 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
4ee9c684 345}
346
57859735 347/* Pop the first element off the stack of bindings. */
348
77caf6c2 349static void
4ee9c684 350gimple_pop_bind_expr (void)
351{
f1f41a6c 352 gimplify_ctxp->bind_expr_stack.pop ();
4ee9c684 353}
354
57859735 355/* Return the first element of the stack of bindings. */
356
1a91d914 357gbind *
4ee9c684 358gimple_current_bind_expr (void)
359{
f1f41a6c 360 return gimplify_ctxp->bind_expr_stack.last ();
75a70cf9 361}
362
57859735 363/* Return the stack of bindings created during gimplification. */
75a70cf9 364
1a91d914 365vec<gbind *>
75a70cf9 366gimple_bind_expr_stack (void)
367{
368 return gimplify_ctxp->bind_expr_stack;
4ee9c684 369}
370
57859735 371/* Return true iff there is a COND_EXPR between us and the innermost
4ee9c684 372 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
373
374static bool
375gimple_conditional_context (void)
376{
377 return gimplify_ctxp->conditions > 0;
378}
379
380/* Note that we've entered a COND_EXPR. */
381
382static void
383gimple_push_condition (void)
384{
75a70cf9 385#ifdef ENABLE_GIMPLE_CHECKING
01194b5f 386 if (gimplify_ctxp->conditions == 0)
75a70cf9 387 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
01194b5f 388#endif
4ee9c684 389 ++(gimplify_ctxp->conditions);
390}
391
392/* Note that we've left a COND_EXPR. If we're back at unconditional scope
393 now, add any conditional cleanups we've seen to the prequeue. */
394
395static void
75a70cf9 396gimple_pop_condition (gimple_seq *pre_p)
4ee9c684 397{
398 int conds = --(gimplify_ctxp->conditions);
5ff0afa2 399
0d59b19d 400 gcc_assert (conds >= 0);
4ee9c684 401 if (conds == 0)
402 {
75a70cf9 403 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
404 gimplify_ctxp->conditional_cleanups = NULL;
4ee9c684 405 }
4ee9c684 406}
407
1e8e9920 408/* A stable comparison routine for use with splay trees and DECLs. */
409
410static int
411splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
412{
413 tree a = (tree) xa;
414 tree b = (tree) xb;
415
416 return DECL_UID (a) - DECL_UID (b);
417}
418
419/* Create a new omp construct that deals with variable remapping. */
420
421static struct gimplify_omp_ctx *
fd6481cf 422new_omp_context (enum omp_region_type region_type)
1e8e9920 423{
424 struct gimplify_omp_ctx *c;
425
426 c = XCNEW (struct gimplify_omp_ctx);
427 c->outer_context = gimplify_omp_ctxp;
428 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
431205b7 429 c->privatized_types = new hash_set<tree>;
1e8e9920 430 c->location = input_location;
fd6481cf 431 c->region_type = region_type;
b0b48c1d 432 if ((region_type & ORT_TASK) == 0)
fd6481cf 433 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
434 else
435 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
7e5a76c8 436 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
437 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
438 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
439 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
1e8e9920 440
441 return c;
442}
443
444/* Destroy an omp construct that deals with variable remapping. */
445
446static void
447delete_omp_context (struct gimplify_omp_ctx *c)
448{
449 splay_tree_delete (c->variables);
431205b7 450 delete c->privatized_types;
43895be5 451 c->loop_iter_var.release ();
1e8e9920 452 XDELETE (c);
453}
454
455static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
456static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
457
75a70cf9 458/* Both gimplify the statement T and append it to *SEQ_P. This function
459 behaves exactly as gimplify_stmt, but you don't have to pass T as a
460 reference. */
fcdd3ab3 461
462void
75a70cf9 463gimplify_and_add (tree t, gimple_seq *seq_p)
464{
465 gimplify_stmt (&t, seq_p);
466}
467
468/* Gimplify statement T into sequence *SEQ_P, and return the first
469 tuple in the sequence of generated tuples for this statement.
470 Return NULL if gimplifying T produced no tuples. */
471
42acab1c 472static gimple *
75a70cf9 473gimplify_and_return_first (tree t, gimple_seq *seq_p)
fcdd3ab3 474{
75a70cf9 475 gimple_stmt_iterator last = gsi_last (*seq_p);
476
477 gimplify_and_add (t, seq_p);
478
479 if (!gsi_end_p (last))
480 {
481 gsi_next (&last);
482 return gsi_stmt (last);
483 }
484 else
485 return gimple_seq_first_stmt (*seq_p);
fcdd3ab3 486}
487
183e96b6 488/* Returns true iff T is a valid RHS for an assignment to an un-renamed
489 LHS, or for a call argument. */
490
491static bool
492is_gimple_mem_rhs (tree t)
493{
494 /* If we're dealing with a renamable type, either source or dest must be
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t)))
497 return is_gimple_val (t);
498 else
499 return is_gimple_val (t) || is_gimple_lvalue (t);
500}
501
75a70cf9 502/* Return true if T is a CALL_EXPR or an expression that can be
81943faa 503 assigned to a temporary. Note that this predicate should only be
75a70cf9 504 used during gimplification. See the rationale for this in
505 gimplify_modify_expr. */
506
507static bool
47f11e84 508is_gimple_reg_rhs_or_call (tree t)
75a70cf9 509{
47f11e84 510 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
511 || TREE_CODE (t) == CALL_EXPR);
75a70cf9 512}
513
514/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
515 this predicate should only be used during gimplification. See the
516 rationale for this in gimplify_modify_expr. */
517
518static bool
47f11e84 519is_gimple_mem_rhs_or_call (tree t)
75a70cf9 520{
521 /* If we're dealing with a renamable type, either source or dest must be
89dd1b93 522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
75a70cf9 524 return is_gimple_val (t);
525 else
01e67f2d 526 return (is_gimple_val (t)
527 || is_gimple_lvalue (t)
528 || TREE_CLOBBER_P (t)
47f11e84 529 || TREE_CODE (t) == CALL_EXPR);
75a70cf9 530}
531
d3a27ad5 532/* Create a temporary with a name derived from VAL. Subroutine of
533 lookup_tmp_var; nobody else should call this function. */
534
535static inline tree
3ed3b9c9 536create_tmp_from_val (tree val)
d3a27ad5 537{
538 /* Drop all qualifiers and address-space information from the value type. */
539 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
540 tree var = create_tmp_var (type, get_name (val));
3ed3b9c9 541 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
d3a27ad5 543 DECL_GIMPLE_REG_P (var) = 1;
544 return var;
545}
546
547/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
548 an existing expression temporary. */
549
550static tree
551lookup_tmp_var (tree val, bool is_formal)
552{
553 tree ret;
554
555 /* If not optimizing, never really reuse a temporary. local-alloc
556 won't allocate any variable that is used in more than one basic
557 block, which means it will go into memory, causing much extra
558 work in reload and final and poorer code generation, outweighing
559 the extra memory allocation here. */
560 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
3ed3b9c9 561 ret = create_tmp_from_val (val);
d3a27ad5 562 else
563 {
564 elt_t elt, *elt_p;
d9dd21a8 565 elt_t **slot;
d3a27ad5 566
567 elt.val = val;
c1f445d2 568 if (!gimplify_ctxp->temp_htab)
569 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
570 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
d3a27ad5 571 if (*slot == NULL)
572 {
573 elt_p = XNEW (elt_t);
574 elt_p->val = val;
3ed3b9c9 575 elt_p->temp = ret = create_tmp_from_val (val);
d9dd21a8 576 *slot = elt_p;
d3a27ad5 577 }
578 else
579 {
d9dd21a8 580 elt_p = *slot;
d3a27ad5 581 ret = elt_p->temp;
582 }
583 }
584
585 return ret;
586}
587
47f11e84 588/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
4ee9c684 589
590static tree
75a70cf9 591internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
9ae1b28a 592 bool is_formal, bool allow_ssa)
4ee9c684 593{
594 tree t, mod;
4ee9c684 595
75a70cf9 596 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
597 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
47f11e84 598 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
75a70cf9 599 fb_rvalue);
4ee9c684 600
9ae1b28a 601 if (allow_ssa
602 && gimplify_ctxp->into_ssa
d3a27ad5 603 && is_gimple_reg_type (TREE_TYPE (val)))
9ae1b28a 604 {
605 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
606 if (! gimple_in_ssa_p (cfun))
607 {
608 const char *name = get_name (val);
609 if (name)
610 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
611 }
612 }
d3a27ad5 613 else
614 t = lookup_tmp_var (val, is_formal);
50c96bdc 615
a280136a 616 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
4ee9c684 617
3df42822 618 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
4ee9c684 619
6d105d50 620 /* gimplify_modify_expr might want to reduce this further. */
621 gimplify_and_add (mod, pre_p);
75a70cf9 622 ggc_free (mod);
dec41e98 623
4ee9c684 624 return t;
625}
626
57859735 627/* Return a formal temporary variable initialized with VAL. PRE_P is as
47f11e84 628 in gimplify_expr. Only use this function if:
629
630 1) The value of the unfactored expression represented by VAL will not
631 change between the initialization and use of the temporary, and
632 2) The temporary will not be otherwise modified.
633
634 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
635 and #2 means it is inappropriate for && temps.
636
637 For other cases, use get_initialized_tmp_var instead. */
773c5ba7 638
4ee9c684 639tree
75a70cf9 640get_formal_tmp_var (tree val, gimple_seq *pre_p)
4ee9c684 641{
9ae1b28a 642 return internal_get_tmp_var (val, pre_p, NULL, true, true);
4ee9c684 643}
644
57859735 645/* Return a temporary variable initialized with VAL. PRE_P and POST_P
4ee9c684 646 are as in gimplify_expr. */
647
648tree
9ae1b28a 649get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
650 bool allow_ssa)
4ee9c684 651{
9ae1b28a 652 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
4ee9c684 653}
654
57859735 655/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
656 generate debug info for them; otherwise don't. */
4ee9c684 657
658void
42acab1c 659declare_vars (tree vars, gimple *gs, bool debug_info)
4ee9c684 660{
661 tree last = vars;
662 if (last)
663 {
0c2f12e9 664 tree temps, block;
4ee9c684 665
1a91d914 666 gbind *scope = as_a <gbind *> (gs);
4ee9c684 667
668 temps = nreverse (last);
0c2f12e9 669
7551cac6 670 block = gimple_bind_block (scope);
75a70cf9 671 gcc_assert (!block || TREE_CODE (block) == BLOCK);
0c2f12e9 672 if (!block || !debug_info)
673 {
1767a056 674 DECL_CHAIN (last) = gimple_bind_vars (scope);
75a70cf9 675 gimple_bind_set_vars (scope, temps);
0c2f12e9 676 }
677 else
678 {
679 /* We need to attach the nodes both to the BIND_EXPR and to its
680 associated BLOCK for debugging purposes. The key point here
681 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
682 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
683 if (BLOCK_VARS (block))
684 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
685 else
686 {
75a70cf9 687 gimple_bind_set_vars (scope,
688 chainon (gimple_bind_vars (scope), temps));
0c2f12e9 689 BLOCK_VARS (block) = temps;
690 }
691 }
4ee9c684 692 }
693}
694
150edb07 695/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
696 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
697 no such upper bound can be obtained. */
698
699static void
700force_constant_size (tree var)
701{
702 /* The only attempt we make is by querying the maximum size of objects
703 of the variable's type. */
704
705 HOST_WIDE_INT max_size;
706
53e9c5c4 707 gcc_assert (VAR_P (var));
150edb07 708
709 max_size = max_int_size_in_bytes (TREE_TYPE (var));
710
711 gcc_assert (max_size >= 0);
712
713 DECL_SIZE_UNIT (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
715 DECL_SIZE (var)
716 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
717}
718
57859735 719/* Push the temporary variable TMP into the current binding. */
720
98107def 721void
722gimple_add_tmp_var_fn (struct function *fn, tree tmp)
723{
724 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
725
726 /* Later processing assumes that the object size is constant, which might
727 not be true at this point. Force the use of a constant upper bound in
728 this case. */
597e0685 729 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
98107def 730 force_constant_size (tmp);
731
732 DECL_CONTEXT (tmp) = fn->decl;
733 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
734
735 record_vars_into (tmp, fn->decl);
736}
737
738/* Push the temporary variable TMP into the current binding. */
739
4ee9c684 740void
741gimple_add_tmp_var (tree tmp)
742{
1767a056 743 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
4ee9c684 744
150edb07 745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
597e0685 748 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
150edb07 749 force_constant_size (tmp);
750
4ee9c684 751 DECL_CONTEXT (tmp) = current_function_decl;
60f65a0a 752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
4ee9c684 753
754 if (gimplify_ctxp)
755 {
1767a056 756 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
4ee9c684 757 gimplify_ctxp->temps = tmp;
1e8e9920 758
759 /* Mark temporaries local within the nearest enclosing parallel. */
760 if (gimplify_omp_ctxp)
761 {
762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
3d483a94 763 while (ctx
764 && (ctx->region_type == ORT_WORKSHARE
7e5a76c8 765 || ctx->region_type == ORT_TASKGROUP
b656be3a 766 || ctx->region_type == ORT_SIMD
767 || ctx->region_type == ORT_ACC))
1e8e9920 768 ctx = ctx->outer_context;
769 if (ctx)
770 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
771 }
4ee9c684 772 }
773 else if (cfun)
774 record_vars (tmp);
775 else
75a70cf9 776 {
777 gimple_seq body_seq;
778
779 /* This case is for nested functions. We need to expose the locals
780 they create. */
781 body_seq = gimple_body (current_function_decl);
782 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
783 }
784}
785
75a70cf9 786
f96dd706 787\f
788/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
789 nodes that are referenced more than once in GENERIC functions. This is
790 necessary because gimplification (translation into GIMPLE) is performed
791 by modifying tree nodes in-place, so gimplication of a shared node in a
792 first context could generate an invalid GIMPLE form in a second context.
793
794 This is achieved with a simple mark/copy/unmark algorithm that walks the
795 GENERIC representation top-down, marks nodes with TREE_VISITED the first
796 time it encounters them, duplicates them if they already have TREE_VISITED
797 set, and finally removes the TREE_VISITED marks it has set.
798
799 The algorithm works only at the function level, i.e. it generates a GENERIC
800 representation of a function with no nodes shared within the function when
801 passed a GENERIC function (except for nodes that are allowed to be shared).
802
803 At the global level, it is also necessary to unshare tree nodes that are
804 referenced in more than one function, for the same aforementioned reason.
805 This requires some cooperation from the front-end. There are 2 strategies:
806
807 1. Manual unsharing. The front-end needs to call unshare_expr on every
808 expression that might end up being shared across functions.
809
810 2. Deep unsharing. This is an extension of regular unsharing. Instead
811 of calling unshare_expr on expressions that might be shared across
812 functions, the front-end pre-marks them with TREE_VISITED. This will
813 ensure that they are unshared on the first reference within functions
814 when the regular unsharing algorithm runs. The counterpart is that
815 this algorithm must look deeper than for manual unsharing, which is
816 specified by LANG_HOOKS_DEEP_UNSHARING.
817
818 If there are only few specific cases of node sharing across functions, it is
819 probably easier for a front-end to unshare the expressions manually. On the
820 contrary, if the expressions generated at the global level are as widespread
821 as expressions generated within functions, deep unsharing is very likely the
822 way to go. */
823
824/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
7e3aae05 825 These nodes model computations that must be done once. If we were to
826 unshare something like SAVE_EXPR(i++), the gimplification process would
827 create wrong code. However, if DATA is non-null, it must hold a pointer
828 set that is used to unshare the subtrees of these nodes. */
4ee9c684 829
830static tree
831mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
832{
f96dd706 833 tree t = *tp;
834 enum tree_code code = TREE_CODE (t);
835
e660f7ee 836 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
837 copy their subtrees if we can make sure to do it only once. */
838 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
f96dd706 839 {
431205b7 840 if (data && !((hash_set<tree> *)data)->add (t))
f96dd706 841 ;
842 else
843 *walk_subtrees = 0;
844 }
845
846 /* Stop at types, decls, constants like copy_tree_r. */
847 else if (TREE_CODE_CLASS (code) == tcc_type
848 || TREE_CODE_CLASS (code) == tcc_declaration
91b9d66d 849 || TREE_CODE_CLASS (code) == tcc_constant)
4ee9c684 850 *walk_subtrees = 0;
f96dd706 851
852 /* Cope with the statement expression extension. */
853 else if (code == STATEMENT_LIST)
854 ;
855
856 /* Leave the bulk of the work to copy_tree_r itself. */
4ee9c684 857 else
e660f7ee 858 copy_tree_r (tp, walk_subtrees, NULL);
4ee9c684 859
860 return NULL_TREE;
861}
862
7e3aae05 863/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
864 If *TP has been visited already, then *TP is deeply copied by calling
865 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
4ee9c684 866
867static tree
f96dd706 868copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
4ee9c684 869{
fa44b632 870 tree t = *tp;
871 enum tree_code code = TREE_CODE (t);
872
6374121b 873 /* Skip types, decls, and constants. But we do want to look at their
874 types and the bounds of types. Mark them as visited so we properly
875 unmark their subtrees on the unmark pass. If we've already seen them,
876 don't look down further. */
ce45a448 877 if (TREE_CODE_CLASS (code) == tcc_type
878 || TREE_CODE_CLASS (code) == tcc_declaration
879 || TREE_CODE_CLASS (code) == tcc_constant)
6374121b 880 {
881 if (TREE_VISITED (t))
882 *walk_subtrees = 0;
883 else
884 TREE_VISITED (t) = 1;
885 }
fa44b632 886
4ee9c684 887 /* If this node has been visited already, unshare it and don't look
888 any deeper. */
fa44b632 889 else if (TREE_VISITED (t))
4ee9c684 890 {
f96dd706 891 walk_tree (tp, mostly_copy_tree_r, data, NULL);
4ee9c684 892 *walk_subtrees = 0;
893 }
fa44b632 894
f96dd706 895 /* Otherwise, mark the node as visited and keep looking. */
4ee9c684 896 else
2799a2b7 897 TREE_VISITED (t) = 1;
fa44b632 898
4ee9c684 899 return NULL_TREE;
900}
901
7e3aae05 902/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
903 copy_if_shared_r callback unmodified. */
4ee9c684 904
f96dd706 905static inline void
7e3aae05 906copy_if_shared (tree *tp, void *data)
f96dd706 907{
7e3aae05 908 walk_tree (tp, copy_if_shared_r, data, NULL);
4ee9c684 909}
910
7e3aae05 911/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
912 any nested functions. */
6374121b 913
914static void
7e3aae05 915unshare_body (tree fndecl)
6374121b 916{
415d1b9a 917 struct cgraph_node *cgn = cgraph_node::get (fndecl);
7e3aae05 918 /* If the language requires deep unsharing, we need a pointer set to make
919 sure we don't repeatedly unshare subtrees of unshareable nodes. */
431205b7 920 hash_set<tree> *visited
921 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
6374121b 922
7e3aae05 923 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
924 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
925 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
926
431205b7 927 delete visited;
f96dd706 928
7e3aae05 929 if (cgn)
60f65a0a 930 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
02774f2d 931 unshare_body (cgn->decl);
6374121b 932}
933
f96dd706 934/* Callback for walk_tree to unmark the visited trees rooted at *TP.
935 Subtrees are walked until the first unvisited node is encountered. */
936
937static tree
938unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
939{
940 tree t = *tp;
941
942 /* If this node has been visited, unmark it and keep looking. */
943 if (TREE_VISITED (t))
944 TREE_VISITED (t) = 0;
945
946 /* Otherwise, don't look any deeper. */
947 else
948 *walk_subtrees = 0;
949
950 return NULL_TREE;
951}
952
953/* Unmark the visited trees rooted at *TP. */
954
955static inline void
956unmark_visited (tree *tp)
957{
958 walk_tree (tp, unmark_visited_r, NULL, NULL);
959}
960
6374121b 961/* Likewise, but mark all trees as not visited. */
962
963static void
7e3aae05 964unvisit_body (tree fndecl)
6374121b 965{
415d1b9a 966 struct cgraph_node *cgn = cgraph_node::get (fndecl);
6374121b 967
7e3aae05 968 unmark_visited (&DECL_SAVED_TREE (fndecl));
969 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
970 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
f96dd706 971
7e3aae05 972 if (cgn)
60f65a0a 973 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
02774f2d 974 unvisit_body (cgn->decl);
6374121b 975}
976
4ee9c684 977/* Unconditionally make an unshared copy of EXPR. This is used when using
978 stored expressions which span multiple functions, such as BINFO_VTABLE,
979 as the normal unsharing process can't tell that they're shared. */
980
981tree
982unshare_expr (tree expr)
983{
984 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
985 return expr;
986}
827e392b 987
988/* Worker for unshare_expr_without_location. */
989
990static tree
991prune_expr_location (tree *tp, int *walk_subtrees, void *)
992{
993 if (EXPR_P (*tp))
994 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
995 else
996 *walk_subtrees = 0;
997 return NULL_TREE;
998}
999
1000/* Similar to unshare_expr but also prune all expression locations
1001 from EXPR. */
1002
1003tree
1004unshare_expr_without_location (tree expr)
1005{
1006 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1007 if (EXPR_P (expr))
1008 walk_tree (&expr, prune_expr_location, NULL, NULL);
1009 return expr;
1010}
90567983 1011
1012/* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1013 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1014 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1015 EXPR is the location of the EXPR. */
1016
1017static location_t
1018rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1019{
1020 if (!expr)
1021 return or_else;
1022
1023 if (EXPR_HAS_LOCATION (expr))
1024 return EXPR_LOCATION (expr);
1025
1026 if (TREE_CODE (expr) != STATEMENT_LIST)
1027 return or_else;
1028
1029 tree_stmt_iterator i = tsi_start (expr);
1030
1031 bool found = false;
1032 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1033 {
1034 found = true;
1035 tsi_next (&i);
1036 }
1037
1038 if (!found || !tsi_one_before_end_p (i))
1039 return or_else;
1040
1041 return rexpr_location (tsi_stmt (i), or_else);
1042}
1043
1044/* Return TRUE iff EXPR (maybe recursively) has a location; see
1045 rexpr_location for the potential recursion. */
1046
1047static inline bool
1048rexpr_has_location (tree expr)
1049{
1050 return rexpr_location (expr) != UNKNOWN_LOCATION;
1051}
1052
4ee9c684 1053\f
1054/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1055 contain statements and have a value. Assign its value to a temporary
57859735 1056 and give it void_type_node. Return the temporary, or NULL_TREE if
4ee9c684 1057 WRAPPER was already void. */
1058
1059tree
2363ef00 1060voidify_wrapper_expr (tree wrapper, tree temp)
4ee9c684 1061{
09607d08 1062 tree type = TREE_TYPE (wrapper);
1063 if (type && !VOID_TYPE_P (type))
4ee9c684 1064 {
c3d09d4d 1065 tree *p;
4ee9c684 1066
c3d09d4d 1067 /* Set p to point to the body of the wrapper. Loop until we find
1068 something that isn't a wrapper. */
1069 for (p = &wrapper; p && *p; )
7c2f0500 1070 {
c3d09d4d 1071 switch (TREE_CODE (*p))
4ee9c684 1072 {
c3d09d4d 1073 case BIND_EXPR:
1074 TREE_SIDE_EFFECTS (*p) = 1;
1075 TREE_TYPE (*p) = void_type_node;
1076 /* For a BIND_EXPR, the body is operand 1. */
1077 p = &BIND_EXPR_BODY (*p);
1078 break;
1079
1080 case CLEANUP_POINT_EXPR:
1081 case TRY_FINALLY_EXPR:
1082 case TRY_CATCH_EXPR:
4ee9c684 1083 TREE_SIDE_EFFECTS (*p) = 1;
1084 TREE_TYPE (*p) = void_type_node;
c3d09d4d 1085 p = &TREE_OPERAND (*p, 0);
1086 break;
1087
1088 case STATEMENT_LIST:
1089 {
1090 tree_stmt_iterator i = tsi_last (*p);
1091 TREE_SIDE_EFFECTS (*p) = 1;
1092 TREE_TYPE (*p) = void_type_node;
1093 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1094 }
1095 break;
1096
1097 case COMPOUND_EXPR:
57859735 1098 /* Advance to the last statement. Set all container types to
1099 void. */
c3d09d4d 1100 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1101 {
1102 TREE_SIDE_EFFECTS (*p) = 1;
1103 TREE_TYPE (*p) = void_type_node;
1104 }
1105 break;
1106
4c0315d0 1107 case TRANSACTION_EXPR:
1108 TREE_SIDE_EFFECTS (*p) = 1;
1109 TREE_TYPE (*p) = void_type_node;
1110 p = &TRANSACTION_EXPR_BODY (*p);
1111 break;
1112
c3d09d4d 1113 default:
273d05b3 1114 /* Assume that any tree upon which voidify_wrapper_expr is
1115 directly called is a wrapper, and that its body is op0. */
1116 if (p == &wrapper)
1117 {
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1122 }
c3d09d4d 1123 goto out;
4ee9c684 1124 }
1125 }
1126
c3d09d4d 1127 out:
2363ef00 1128 if (p == NULL || IS_EMPTY_STMT (*p))
c3d09d4d 1129 temp = NULL_TREE;
1130 else if (temp)
4ee9c684 1131 {
c3d09d4d 1132 /* The wrapper is on the RHS of an assignment that we're pushing
1133 down. */
1134 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1135 || TREE_CODE (temp) == MODIFY_EXPR);
75a70cf9 1136 TREE_OPERAND (temp, 1) = *p;
c3d09d4d 1137 *p = temp;
4ee9c684 1138 }
1139 else
1140 {
c3d09d4d 1141 temp = create_tmp_var (type, "retval");
1142 *p = build2 (INIT_EXPR, type, temp, *p);
4ee9c684 1143 }
1144
4ee9c684 1145 return temp;
1146 }
1147
1148 return NULL_TREE;
1149}
1150
1151/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
365db11e 1152 a temporary through which they communicate. */
4ee9c684 1153
1154static void
1a91d914 1155build_stack_save_restore (gcall **save, gcall **restore)
4ee9c684 1156{
75a70cf9 1157 tree tmp_var;
4ee9c684 1158
b9a16870 1159 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
4ee9c684 1160 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
75a70cf9 1161 gimple_call_set_lhs (*save, tmp_var);
4ee9c684 1162
57859735 1163 *restore
b9a16870 1164 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
57859735 1165 1, tmp_var);
4ee9c684 1166}
1167
629b6abc 1168/* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1169
1170static tree
1171build_asan_poison_call_expr (tree decl)
1172{
1173 /* Do not poison variables that have size equal to zero. */
1174 tree unit_size = DECL_SIZE_UNIT (decl);
1175 if (zerop (unit_size))
1176 return NULL_TREE;
1177
1178 tree base = build_fold_addr_expr (decl);
1179
1180 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1181 void_type_node, 3,
1182 build_int_cst (integer_type_node,
a30589d5 1183 ASAN_MARK_POISON),
629b6abc 1184 base, unit_size);
1185}
1186
1187/* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1188 on POISON flag, shadow memory of a DECL variable. The call will be
1189 put on location identified by IT iterator, where BEFORE flag drives
1190 position where the stmt will be put. */
1191
1192static void
1193asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1194 bool before)
1195{
629b6abc 1196 tree unit_size = DECL_SIZE_UNIT (decl);
1197 tree base = build_fold_addr_expr (decl);
1198
1199 /* Do not poison variables that have size equal to zero. */
1200 if (zerop (unit_size))
1201 return;
1202
1203 /* It's necessary to have all stack variables aligned to ASAN granularity
1204 bytes. */
1205 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1206 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1207
a30589d5 1208 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
629b6abc 1209
1210 gimple *g
1211 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1212 build_int_cst (integer_type_node, flags),
1213 base, unit_size);
1214
1215 if (before)
1216 gsi_insert_before (it, g, GSI_NEW_STMT);
1217 else
1218 gsi_insert_after (it, g, GSI_NEW_STMT);
1219}
1220
1221/* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1222 either poisons or unpoisons a DECL. Created statement is appended
1223 to SEQ_P gimple sequence. */
1224
1225static void
1226asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1227{
1228 gimple_stmt_iterator it = gsi_last (*seq_p);
1229 bool before = false;
1230
1231 if (gsi_end_p (it))
1232 before = true;
1233
1234 asan_poison_variable (decl, poison, &it, before);
1235}
1236
1237/* Sort pair of VAR_DECLs A and B by DECL_UID. */
1238
1239static int
1240sort_by_decl_uid (const void *a, const void *b)
1241{
1242 const tree *t1 = (const tree *)a;
1243 const tree *t2 = (const tree *)b;
1244
1245 int uid1 = DECL_UID (*t1);
1246 int uid2 = DECL_UID (*t2);
1247
1248 if (uid1 < uid2)
1249 return -1;
1250 else if (uid1 > uid2)
1251 return 1;
1252 else
1253 return 0;
1254}
1255
1256/* Generate IFN_ASAN_MARK internal call for all VARIABLES
1257 depending on POISON flag. Created statement is appended
1258 to SEQ_P gimple sequence. */
1259
1260static void
1261asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1262{
1263 unsigned c = variables->elements ();
1264 if (c == 0)
1265 return;
1266
1267 auto_vec<tree> sorted_variables (c);
1268
1269 for (hash_set<tree>::iterator it = variables->begin ();
1270 it != variables->end (); ++it)
1271 sorted_variables.safe_push (*it);
1272
1273 sorted_variables.qsort (sort_by_decl_uid);
1274
ba39c1d4 1275 unsigned i;
1276 tree var;
1277 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1278 {
1279 asan_poison_variable (var, poison, seq_p);
1280
1281 /* Add use_after_scope_memory attribute for the variable in order
1282 to prevent re-written into SSA. */
1283 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1284 DECL_ATTRIBUTES (var)))
1285 DECL_ATTRIBUTES (var)
1286 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1287 integer_one_node,
1288 DECL_ATTRIBUTES (var));
1289 }
629b6abc 1290}
1291
4ee9c684 1292/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1293
1294static enum gimplify_status
75a70cf9 1295gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
4ee9c684 1296{
1297 tree bind_expr = *expr_p;
6ad0a0bc 1298 bool old_keep_stack = gimplify_ctxp->keep_stack;
4ee9c684 1299 bool old_save_stack = gimplify_ctxp->save_stack;
1300 tree t;
1a91d914 1301 gbind *bind_stmt;
3c25489e 1302 gimple_seq body, cleanup;
1a91d914 1303 gcall *stack_save;
60d578d4 1304 location_t start_locus = 0, end_locus = 0;
2fc5e987 1305 tree ret_clauses = NULL;
4ee9c684 1306
c3d09d4d 1307 tree temp = voidify_wrapper_expr (bind_expr, NULL);
2363ef00 1308
4ee9c684 1309 /* Mark variables seen in this bind expr. */
1767a056 1310 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
50c96bdc 1311 {
53e9c5c4 1312 if (VAR_P (t))
97492514 1313 {
1314 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1315
1316 /* Mark variable as local. */
43895be5 1317 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
97492514 1318 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1319 || splay_tree_lookup (ctx->variables,
1320 (splay_tree_key) t) == NULL))
6d5a8d89 1321 {
1322 if (ctx->region_type == ORT_SIMD
1323 && TREE_ADDRESSABLE (t)
1324 && !TREE_STATIC (t))
1325 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1326 else
1327 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1328 }
97492514 1329
1330 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
e15deb4b 1331
1332 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1333 cfun->has_local_explicit_reg_vars = true;
97492514 1334 }
50c96bdc 1335
1336 /* Preliminarily mark non-addressed complex variables as eligible
1337 for promotion to gimple registers. We'll transform their uses
6b411909 1338 as we find them. */
1339 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1340 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
50c96bdc 1341 && !TREE_THIS_VOLATILE (t)
53e9c5c4 1342 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
50c96bdc 1343 && !needs_to_live_in_memory (t))
8ea8de24 1344 DECL_GIMPLE_REG_P (t) = 1;
50c96bdc 1345 }
4ee9c684 1346
1a91d914 1347 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
6ad0a0bc 1348 BIND_EXPR_BLOCK (bind_expr));
1a91d914 1349 gimple_push_bind_expr (bind_stmt);
75a70cf9 1350
6ad0a0bc 1351 gimplify_ctxp->keep_stack = false;
4ee9c684 1352 gimplify_ctxp->save_stack = false;
1353
75a70cf9 1354 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1355 body = NULL;
1356 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1a91d914 1357 gimple_bind_set_body (bind_stmt, body);
4ee9c684 1358
60d578d4 1359 /* Source location wise, the cleanup code (stack_restore and clobbers)
1360 belongs to the end of the block, so propagate what we have. The
1361 stack_save operation belongs to the beginning of block, which we can
1362 infer from the bind_expr directly if the block has no explicit
1363 assignment. */
1364 if (BIND_EXPR_BLOCK (bind_expr))
1365 {
1366 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1367 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1368 }
1369 if (start_locus == 0)
1370 start_locus = EXPR_LOCATION (bind_expr);
1371
3c25489e 1372 cleanup = NULL;
1373 stack_save = NULL;
6ad0a0bc 1374
1375 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1376 the stack space allocated to the VLAs. */
1377 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
4ee9c684 1378 {
1a91d914 1379 gcall *stack_restore;
4ee9c684 1380
1381 /* Save stack on entry and restore it on exit. Add a try_finally
b1c06ff9 1382 block to achieve this. */
4ee9c684 1383 build_stack_save_restore (&stack_save, &stack_restore);
1384
60d578d4 1385 gimple_set_location (stack_save, start_locus);
1386 gimple_set_location (stack_restore, end_locus);
1387
75a70cf9 1388 gimplify_seq_add_stmt (&cleanup, stack_restore);
3c25489e 1389 }
1390
1391 /* Add clobbers for all variables that go out of scope. */
1392 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1393 {
53e9c5c4 1394 if (VAR_P (t)
3c25489e 1395 && !is_global_var (t)
5405a917 1396 && DECL_CONTEXT (t) == current_function_decl)
3c25489e 1397 {
5405a917 1398 if (!DECL_HARD_REGISTER (t)
1399 && !TREE_THIS_VOLATILE (t)
1400 && !DECL_HAS_VALUE_EXPR_P (t)
1401 /* Only care for variables that have to be in memory. Others
1402 will be rewritten into SSA names, hence moved to the
1403 top-level. */
1404 && !is_gimple_reg (t)
1405 && flag_stack_reuse != SR_NONE)
1406 {
ebf0a6c6 1407 tree clobber = build_clobber (TREE_TYPE (t));
5405a917 1408 gimple *clobber_stmt;
5405a917 1409 clobber_stmt = gimple_build_assign (t, clobber);
1410 gimple_set_location (clobber_stmt, end_locus);
1411 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1412 }
2fc5e987 1413
1414 if (flag_openacc && oacc_declare_returns != NULL)
1415 {
1416 tree *c = oacc_declare_returns->get (t);
1417 if (c != NULL)
1418 {
1419 if (ret_clauses)
1420 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1421
1422 ret_clauses = *c;
1423
1424 oacc_declare_returns->remove (t);
1425
9a78b979 1426 if (oacc_declare_returns->is_empty ())
2fc5e987 1427 {
1428 delete oacc_declare_returns;
1429 oacc_declare_returns = NULL;
1430 }
1431 }
1432 }
3c25489e 1433 }
629b6abc 1434
1435 if (asan_poisoned_variables != NULL
1436 && asan_poisoned_variables->contains (t))
1437 {
1438 asan_poisoned_variables->remove (t);
1439 asan_poison_variable (t, true, &cleanup);
1440 }
1441
1442 if (gimplify_ctxp->live_switch_vars != NULL
1443 && gimplify_ctxp->live_switch_vars->contains (t))
1444 gimplify_ctxp->live_switch_vars->remove (t);
3c25489e 1445 }
1446
2fc5e987 1447 if (ret_clauses)
1448 {
1449 gomp_target *stmt;
1450 gimple_stmt_iterator si = gsi_start (cleanup);
1451
1452 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1453 ret_clauses);
1454 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1455 }
1456
3c25489e 1457 if (cleanup)
1458 {
1a91d914 1459 gtry *gs;
3c25489e 1460 gimple_seq new_body;
1461
1462 new_body = NULL;
1a91d914 1463 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
75a70cf9 1464 GIMPLE_TRY_FINALLY);
4ee9c684 1465
3c25489e 1466 if (stack_save)
1467 gimplify_seq_add_stmt (&new_body, stack_save);
75a70cf9 1468 gimplify_seq_add_stmt (&new_body, gs);
1a91d914 1469 gimple_bind_set_body (bind_stmt, new_body);
4ee9c684 1470 }
1471
6ad0a0bc 1472 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1473 if (!gimplify_ctxp->keep_stack)
1474 gimplify_ctxp->keep_stack = old_keep_stack;
4ee9c684 1475 gimplify_ctxp->save_stack = old_save_stack;
6ad0a0bc 1476
4ee9c684 1477 gimple_pop_bind_expr ();
1478
1a91d914 1479 gimplify_seq_add_stmt (pre_p, bind_stmt);
75a70cf9 1480
4ee9c684 1481 if (temp)
1482 {
1483 *expr_p = temp;
4ee9c684 1484 return GS_OK;
1485 }
75a70cf9 1486
1487 *expr_p = NULL_TREE;
1488 return GS_ALL_DONE;
4ee9c684 1489}
1490
c65f167e 1491/* Maybe add early return predict statement to PRE_P sequence. */
1492
1493static void
1494maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1495{
1496 /* If we are not in a conditional context, add PREDICT statement. */
1497 if (gimple_conditional_context ())
1498 {
1499 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1500 NOT_TAKEN);
1501 gimplify_seq_add_stmt (pre_p, predict);
1502 }
1503}
1504
4ee9c684 1505/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1506 GIMPLE value, it is assigned to a new temporary and the statement is
1507 re-written to return the temporary.
1508
75a70cf9 1509 PRE_P points to the sequence where side effects that must happen before
4ee9c684 1510 STMT should be stored. */
1511
1512static enum gimplify_status
75a70cf9 1513gimplify_return_expr (tree stmt, gimple_seq *pre_p)
4ee9c684 1514{
1a91d914 1515 greturn *ret;
4ee9c684 1516 tree ret_expr = TREE_OPERAND (stmt, 0);
6c6a0f2f 1517 tree result_decl, result;
4ee9c684 1518
75a70cf9 1519 if (ret_expr == error_mark_node)
1520 return GS_ERROR;
1521
1522 if (!ret_expr
04cd502b 1523 || TREE_CODE (ret_expr) == RESULT_DECL)
75a70cf9 1524 {
c65f167e 1525 maybe_add_early_return_predict_stmt (pre_p);
1a91d914 1526 greturn *ret = gimple_build_return (ret_expr);
75a70cf9 1527 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1528 gimplify_seq_add_stmt (pre_p, ret);
1529 return GS_ALL_DONE;
1530 }
4ee9c684 1531
4ee9c684 1532 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
6c6a0f2f 1533 result_decl = NULL_TREE;
4ee9c684 1534 else
1535 {
75a70cf9 1536 result_decl = TREE_OPERAND (ret_expr, 0);
1537
1538 /* See through a return by reference. */
806e4c12 1539 if (TREE_CODE (result_decl) == INDIRECT_REF)
806e4c12 1540 result_decl = TREE_OPERAND (result_decl, 0);
0d59b19d 1541
1542 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1543 || TREE_CODE (ret_expr) == INIT_EXPR)
1544 && TREE_CODE (result_decl) == RESULT_DECL);
4ee9c684 1545 }
1546
6c6a0f2f 1547 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1548 Recall that aggregate_value_p is FALSE for any aggregate type that is
1549 returned in registers. If we're returning values in registers, then
1550 we don't want to extend the lifetime of the RESULT_DECL, particularly
7c2f0500 1551 across another call. In addition, for those aggregates for which
89f18f73 1552 hard_function_value generates a PARALLEL, we'll die during normal
6c6a0f2f 1553 expansion of structure assignments; there's special code in expand_return
1554 to handle this case that does not exist in expand_expr. */
646150d6 1555 if (!result_decl)
1556 result = NULL_TREE;
1557 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1558 {
1559 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1560 {
1561 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1562 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1563 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1564 should be effectively allocated by the caller, i.e. all calls to
1565 this function must be subject to the Return Slot Optimization. */
1566 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1567 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1568 }
1569 result = result_decl;
1570 }
6c6a0f2f 1571 else if (gimplify_ctxp->return_temp)
1572 result = gimplify_ctxp->return_temp;
1573 else
1574 {
f9e245b2 1575 result = create_tmp_reg (TREE_TYPE (result_decl));
751ddc2b 1576
1577 /* ??? With complex control flow (usually involving abnormal edges),
1578 we can wind up warning about an uninitialized value for this. Due
1579 to how this variable is constructed and initialized, this is never
1580 true. Give up and never warn. */
1581 TREE_NO_WARNING (result) = 1;
1582
6c6a0f2f 1583 gimplify_ctxp->return_temp = result;
1584 }
1585
75a70cf9 1586 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
6c6a0f2f 1587 Then gimplify the whole thing. */
1588 if (result != result_decl)
75a70cf9 1589 TREE_OPERAND (ret_expr, 0) = result;
6d105d50 1590
1591 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
4ee9c684 1592
c65f167e 1593 maybe_add_early_return_predict_stmt (pre_p);
75a70cf9 1594 ret = gimple_build_return (result);
1595 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1596 gimplify_seq_add_stmt (pre_p, ret);
4ee9c684 1597
4ee9c684 1598 return GS_ALL_DONE;
1599}
1600
57859735 1601/* Gimplify a variable-length array DECL. */
1602
550ec1e0 1603static void
75a70cf9 1604gimplify_vla_decl (tree decl, gimple_seq *seq_p)
550ec1e0 1605{
1606 /* This is a variable-sized decl. Simplify its size and mark it
b1c06ff9 1607 for deferred expansion. */
550ec1e0 1608 tree t, addr, ptr_type;
1609
75a70cf9 1610 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1611 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
550ec1e0 1612
b749017f 1613 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1614 if (DECL_HAS_VALUE_EXPR_P (decl))
1615 return;
1616
550ec1e0 1617 /* All occurrences of this decl in final gimplified code will be
1618 replaced by indirection. Setting DECL_VALUE_EXPR does two
1619 things: First, it lets the rest of the gimplifier know what
1620 replacement to use. Second, it lets the debug info know
1621 where to find the value. */
1622 ptr_type = build_pointer_type (TREE_TYPE (decl));
1623 addr = create_tmp_var (ptr_type, get_name (decl));
1624 DECL_IGNORED_P (addr) = 0;
1625 t = build_fold_indirect_ref (addr);
8d683dc1 1626 TREE_THIS_NOTRAP (t) = 1;
550ec1e0 1627 SET_DECL_VALUE_EXPR (decl, t);
1628 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1629
2b34677f 1630 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1631 max_int_size_in_bytes (TREE_TYPE (decl)));
990495a7 1632 /* The call has been built for a variable-sized object. */
a882d754 1633 CALL_ALLOCA_FOR_VAR_P (t) = 1;
550ec1e0 1634 t = fold_convert (ptr_type, t);
75a70cf9 1635 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
550ec1e0 1636
75a70cf9 1637 gimplify_and_add (t, seq_p);
550ec1e0 1638}
1639
a8783bee 1640/* A helper function to be called via walk_tree. Mark all labels under *TP
1641 as being forced. To be called for DECL_INITIAL of static variables. */
1642
1643static tree
1644force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1645{
1646 if (TYPE_P (*tp))
1647 *walk_subtrees = 0;
1648 if (TREE_CODE (*tp) == LABEL_DECL)
37af779a 1649 {
1650 FORCED_LABEL (*tp) = 1;
1651 cfun->has_forced_label_in_static = 1;
1652 }
a8783bee 1653
1654 return NULL_TREE;
1655}
1656
57859735 1657/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
7dd37241 1658 and initialization explicit. */
1659
1660static enum gimplify_status
75a70cf9 1661gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
7dd37241 1662{
1663 tree stmt = *stmt_p;
1664 tree decl = DECL_EXPR_DECL (stmt);
1665
1666 *stmt_p = NULL_TREE;
1667
1668 if (TREE_TYPE (decl) == error_mark_node)
1669 return GS_ERROR;
1670
017775ce 1671 if ((TREE_CODE (decl) == TYPE_DECL
53e9c5c4 1672 || VAR_P (decl))
017775ce 1673 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
00e3f095 1674 {
1675 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1676 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1677 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1678 }
7dd37241 1679
72e1426b 1680 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1681 in case its size expressions contain problematic nodes like CALL_EXPR. */
1682 if (TREE_CODE (decl) == TYPE_DECL
1683 && DECL_ORIGINAL_TYPE (decl)
1684 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
00e3f095 1685 {
1686 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1687 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1688 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1689 }
72e1426b 1690
53e9c5c4 1691 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
7dd37241 1692 {
1693 tree init = DECL_INITIAL (decl);
629b6abc 1694 bool is_vla = false;
7dd37241 1695
4852b829 1696 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1697 || (!TREE_STATIC (decl)
1698 && flag_stack_check == GENERIC_STACK_CHECK
1699 && compare_tree_int (DECL_SIZE_UNIT (decl),
1700 STACK_CHECK_MAX_VAR_SIZE) > 0))
629b6abc 1701 {
1702 gimplify_vla_decl (decl, seq_p);
1703 is_vla = true;
1704 }
1705
1e35a093 1706 if (asan_poisoned_variables
629b6abc 1707 && !is_vla
1708 && TREE_ADDRESSABLE (decl)
d71fcb4d 1709 && !TREE_STATIC (decl)
1710 && !DECL_HAS_VALUE_EXPR_P (decl)
b02d9b50 1711 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
eaf3f6ed 1712 && dbg_cnt (asan_use_after_scope)
1713 && !gimplify_omp_ctxp)
629b6abc 1714 {
1715 asan_poisoned_variables->add (decl);
1716 asan_poison_variable (decl, false, seq_p);
d71fcb4d 1717 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
629b6abc 1718 gimplify_ctxp->live_switch_vars->add (decl);
1719 }
7dd37241 1720
6ad0b38a 1721 /* Some front ends do not explicitly declare all anonymous
1722 artificial variables. We compensate here by declaring the
1723 variables, though it would be better if the front ends would
1724 explicitly declare them. */
1725 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1726 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1727 gimple_add_tmp_var (decl);
1728
7dd37241 1729 if (init && init != error_mark_node)
1730 {
1731 if (!TREE_STATIC (decl))
1732 {
1733 DECL_INITIAL (decl) = NULL_TREE;
cf6b103e 1734 init = build2 (INIT_EXPR, void_type_node, decl, init);
75a70cf9 1735 gimplify_and_add (init, seq_p);
1736 ggc_free (init);
7dd37241 1737 }
1738 else
1739 /* We must still examine initializers for static variables
1740 as they may contain a label address. */
1741 walk_tree (&init, force_labels_r, NULL, NULL);
1742 }
7dd37241 1743 }
1744
1745 return GS_ALL_DONE;
1746}
1747
4ee9c684 1748/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1749 and replacing the LOOP_EXPR with goto, but if the loop contains an
1750 EXIT_EXPR, we need to append a label for it to jump to. */
1751
1752static enum gimplify_status
75a70cf9 1753gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
4ee9c684 1754{
1755 tree saved_label = gimplify_ctxp->exit_label;
e60a6f7b 1756 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
4ee9c684 1757
75a70cf9 1758 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
4ee9c684 1759
1760 gimplify_ctxp->exit_label = NULL_TREE;
1761
6d105d50 1762 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
4ee9c684 1763
75a70cf9 1764 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1765
4ee9c684 1766 if (gimplify_ctxp->exit_label)
57859735 1767 gimplify_seq_add_stmt (pre_p,
1768 gimple_build_label (gimplify_ctxp->exit_label));
75a70cf9 1769
1770 gimplify_ctxp->exit_label = saved_label;
1771
1772 *expr_p = NULL;
1773 return GS_ALL_DONE;
1774}
1775
57859735 1776/* Gimplify a statement list onto a sequence. These may be created either
75a70cf9 1777 by an enlightened front-end, or by shortcut_cond_expr. */
1778
1779static enum gimplify_status
1780gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1781{
1782 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1783
1784 tree_stmt_iterator i = tsi_start (*expr_p);
1785
1786 while (!tsi_end_p (i))
4ee9c684 1787 {
75a70cf9 1788 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1789 tsi_delink (&i);
4ee9c684 1790 }
4ee9c684 1791
75a70cf9 1792 if (temp)
1793 {
1794 *expr_p = temp;
1795 return GS_OK;
1796 }
4ee9c684 1797
1798 return GS_ALL_DONE;
1799}
da41aa8e 1800
1a54d3be 1801/* Callback for walk_gimple_seq. */
1802
1803static tree
1804warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1805 struct walk_stmt_info *wi)
1806{
1807 gimple *stmt = gsi_stmt (*gsi_p);
1808
1809 *handled_ops_p = true;
1810 switch (gimple_code (stmt))
1811 {
1812 case GIMPLE_TRY:
1813 /* A compiler-generated cleanup or a user-written try block.
1814 If it's empty, don't dive into it--that would result in
1815 worse location info. */
1816 if (gimple_try_eval (stmt) == NULL)
1817 {
1818 wi->info = stmt;
1819 return integer_zero_node;
1820 }
1821 /* Fall through. */
1822 case GIMPLE_BIND:
1823 case GIMPLE_CATCH:
1824 case GIMPLE_EH_FILTER:
1825 case GIMPLE_TRANSACTION:
1826 /* Walk the sub-statements. */
1827 *handled_ops_p = false;
1828 break;
90567983 1829
1830 case GIMPLE_DEBUG:
1831 /* Ignore these. We may generate them before declarations that
1832 are never executed. If there's something to warn about,
1833 there will be non-debug stmts too, and we'll catch those. */
1834 break;
1835
629b6abc 1836 case GIMPLE_CALL:
1837 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1838 {
1839 *handled_ops_p = false;
1840 break;
1841 }
1842 /* Fall through. */
1a54d3be 1843 default:
1844 /* Save the first "real" statement (not a decl/lexical scope/...). */
1845 wi->info = stmt;
1846 return integer_zero_node;
1847 }
1848 return NULL_TREE;
1849}
1850
1851/* Possibly warn about unreachable statements between switch's controlling
1852 expression and the first case. SEQ is the body of a switch expression. */
1853
1854static void
1855maybe_warn_switch_unreachable (gimple_seq seq)
1856{
1857 if (!warn_switch_unreachable
1858 /* This warning doesn't play well with Fortran when optimizations
1859 are on. */
1860 || lang_GNU_Fortran ()
1861 || seq == NULL)
1862 return;
1863
1864 struct walk_stmt_info wi;
1865 memset (&wi, 0, sizeof (wi));
1866 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1867 gimple *stmt = (gimple *) wi.info;
1868
1869 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1870 {
1871 if (gimple_code (stmt) == GIMPLE_GOTO
1872 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1873 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1874 /* Don't warn for compiler-generated gotos. These occur
1875 in Duff's devices, for example. */;
1876 else
1877 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1878 "statement will never be executed");
1879 }
1880}
1881
3c77f69c 1882
1883/* A label entry that pairs label and a location. */
1884struct label_entry
1885{
1886 tree label;
1887 location_t loc;
1888};
1889
1890/* Find LABEL in vector of label entries VEC. */
1891
1892static struct label_entry *
1893find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1894{
1895 unsigned int i;
1896 struct label_entry *l;
1897
1898 FOR_EACH_VEC_ELT (*vec, i, l)
1899 if (l->label == label)
1900 return l;
1901 return NULL;
1902}
1903
1904/* Return true if LABEL, a LABEL_DECL, represents a case label
1905 in a vector of labels CASES. */
1906
1907static bool
1908case_label_p (const vec<tree> *cases, tree label)
1909{
1910 unsigned int i;
1911 tree l;
1912
1913 FOR_EACH_VEC_ELT (*cases, i, l)
1914 if (CASE_LABEL (l) == label)
1915 return true;
1916 return false;
1917}
1918
bce107d7 1919/* Find the last nondebug statement in a scope STMT. */
3c77f69c 1920
1921static gimple *
1922last_stmt_in_scope (gimple *stmt)
1923{
1924 if (!stmt)
1925 return NULL;
1926
1927 switch (gimple_code (stmt))
1928 {
1929 case GIMPLE_BIND:
1930 {
1931 gbind *bind = as_a <gbind *> (stmt);
bce107d7 1932 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
3c77f69c 1933 return last_stmt_in_scope (stmt);
1934 }
1935
1936 case GIMPLE_TRY:
1937 {
1938 gtry *try_stmt = as_a <gtry *> (stmt);
bce107d7 1939 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
3c77f69c 1940 gimple *last_eval = last_stmt_in_scope (stmt);
1941 if (gimple_stmt_may_fallthru (last_eval)
95c0ad19 1942 && (last_eval == NULL
1943 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
3c77f69c 1944 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1945 {
bce107d7 1946 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
3c77f69c 1947 return last_stmt_in_scope (stmt);
1948 }
1949 else
1950 return last_eval;
1951 }
1952
bce107d7 1953 case GIMPLE_DEBUG:
1954 gcc_unreachable ();
1955
3c77f69c 1956 default:
1957 return stmt;
1958 }
1959}
1960
1961/* Collect interesting labels in LABELS and return the statement preceding
5381ec5e 1962 another case label, or a user-defined label. Store a location useful
1963 to give warnings at *PREVLOC (usually the location of the returned
1964 statement or of its surrounding scope). */
3c77f69c 1965
1966static gimple *
1967collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
d6246b9a 1968 auto_vec <struct label_entry> *labels,
1969 location_t *prevloc)
3c77f69c 1970{
1971 gimple *prev = NULL;
1972
d6246b9a 1973 *prevloc = UNKNOWN_LOCATION;
3c77f69c 1974 do
1975 {
17cf92d6 1976 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
1977 {
1978 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
1979 which starts on a GIMPLE_SWITCH and ends with a break label.
1980 Handle that as a single statement that can fall through. */
1981 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
1982 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
1983 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
1984 if (last
1985 && gimple_code (first) == GIMPLE_SWITCH
1986 && gimple_code (last) == GIMPLE_LABEL)
1987 {
1988 tree label = gimple_label_label (as_a <glabel *> (last));
1989 if (SWITCH_BREAK_LABEL_P (label))
1990 {
1991 prev = bind;
1992 gsi_next (gsi_p);
1993 continue;
1994 }
1995 }
1996 }
3c77f69c 1997 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1998 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1999 {
2000 /* Nested scope. Only look at the last statement of
2001 the innermost scope. */
2002 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2003 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2004 if (last)
2005 {
2006 prev = last;
2007 /* It might be a label without a location. Use the
2008 location of the scope then. */
2009 if (!gimple_has_location (prev))
d6246b9a 2010 *prevloc = bind_loc;
3c77f69c 2011 }
2012 gsi_next (gsi_p);
2013 continue;
2014 }
2015
2016 /* Ifs are tricky. */
2017 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2018 {
2019 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2020 tree false_lab = gimple_cond_false_label (cond_stmt);
2021 location_t if_loc = gimple_location (cond_stmt);
2022
2023 /* If we have e.g.
2024 if (i > 1) goto <D.2259>; else goto D;
2025 we can't do much with the else-branch. */
2026 if (!DECL_ARTIFICIAL (false_lab))
2027 break;
2028
2029 /* Go on until the false label, then one step back. */
2030 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2031 {
2032 gimple *stmt = gsi_stmt (*gsi_p);
2033 if (gimple_code (stmt) == GIMPLE_LABEL
2034 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2035 break;
2036 }
2037
2038 /* Not found? Oops. */
2039 if (gsi_end_p (*gsi_p))
2040 break;
2041
2042 struct label_entry l = { false_lab, if_loc };
2043 labels->safe_push (l);
2044
2045 /* Go to the last statement of the then branch. */
2046 gsi_prev (gsi_p);
2047
2048 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2049 <D.1759>:
2050 <stmt>;
2051 goto <D.1761>;
2052 <D.1760>:
2053 */
2054 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2055 && !gimple_has_location (gsi_stmt (*gsi_p)))
2056 {
2057 /* Look at the statement before, it might be
2058 attribute fallthrough, in which case don't warn. */
2059 gsi_prev (gsi_p);
2060 bool fallthru_before_dest
2061 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2062 gsi_next (gsi_p);
2063 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2064 if (!fallthru_before_dest)
2065 {
2066 struct label_entry l = { goto_dest, if_loc };
2067 labels->safe_push (l);
2068 }
2069 }
2070 /* And move back. */
2071 gsi_next (gsi_p);
2072 }
2073
2074 /* Remember the last statement. Skip labels that are of no interest
2075 to us. */
2076 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2077 {
2078 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2079 if (find_label_entry (labels, label))
2080 prev = gsi_stmt (*gsi_p);
2081 }
629b6abc 2082 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2083 ;
bce107d7 2084 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
3c77f69c 2085 prev = gsi_stmt (*gsi_p);
2086 gsi_next (gsi_p);
2087 }
2088 while (!gsi_end_p (*gsi_p)
2089 /* Stop if we find a case or a user-defined label. */
2090 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2091 || !gimple_has_location (gsi_stmt (*gsi_p))));
2092
d6246b9a 2093 if (prev && gimple_has_location (prev))
2094 *prevloc = gimple_location (prev);
3c77f69c 2095 return prev;
2096}
2097
2098/* Return true if the switch fallthough warning should occur. LABEL is
2099 the label statement that we're falling through to. */
2100
2101static bool
2102should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2103{
2104 gimple_stmt_iterator gsi = *gsi_p;
2105
dc5b5d21 2106 /* Don't warn if the label is marked with a "falls through" comment. */
2107 if (FALLTHROUGH_LABEL_P (label))
2108 return false;
2109
b57fb759 2110 /* Don't warn for non-case labels followed by a statement:
3c77f69c 2111 case 0:
2112 foo ();
2113 label:
2114 bar ();
2115 as these are likely intentional. */
2116 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2117 {
b57fb759 2118 tree l;
2119 while (!gsi_end_p (gsi)
2120 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2121 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2122 && !case_label_p (&gimplify_ctxp->case_labels, l))
bce107d7 2123 gsi_next_nondebug (&gsi);
3c77f69c 2124 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2125 return false;
2126 }
2127
2128 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2129 immediately breaks. */
2130 gsi = *gsi_p;
2131
2132 /* Skip all immediately following labels. */
38ef3642 2133 while (!gsi_end_p (gsi)
2134 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2135 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
bce107d7 2136 gsi_next_nondebug (&gsi);
3c77f69c 2137
2138 /* { ... something; default:; } */
2139 if (gsi_end_p (gsi)
2140 /* { ... something; default: break; } or
2141 { ... something; default: goto L; } */
2142 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2143 /* { ... something; default: return; } */
2144 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2145 return false;
2146
2147 return true;
2148}
2149
2150/* Callback for walk_gimple_seq. */
2151
2152static tree
2153warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2154 struct walk_stmt_info *)
2155{
2156 gimple *stmt = gsi_stmt (*gsi_p);
2157
2158 *handled_ops_p = true;
2159 switch (gimple_code (stmt))
2160 {
2161 case GIMPLE_TRY:
2162 case GIMPLE_BIND:
2163 case GIMPLE_CATCH:
2164 case GIMPLE_EH_FILTER:
2165 case GIMPLE_TRANSACTION:
2166 /* Walk the sub-statements. */
2167 *handled_ops_p = false;
2168 break;
2169
2170 /* Find a sequence of form:
2171
2172 GIMPLE_LABEL
2173 [...]
2174 <may fallthru stmt>
2175 GIMPLE_LABEL
2176
2177 and possibly warn. */
2178 case GIMPLE_LABEL:
2179 {
2180 /* Found a label. Skip all immediately following labels. */
2181 while (!gsi_end_p (*gsi_p)
2182 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
bce107d7 2183 gsi_next_nondebug (gsi_p);
3c77f69c 2184
2185 /* There might be no more statements. */
2186 if (gsi_end_p (*gsi_p))
2187 return integer_zero_node;
2188
2189 /* Vector of labels that fall through. */
2190 auto_vec <struct label_entry> labels;
d6246b9a 2191 location_t prevloc;
2192 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
3c77f69c 2193
2194 /* There might be no more statements. */
2195 if (gsi_end_p (*gsi_p))
2196 return integer_zero_node;
2197
2198 gimple *next = gsi_stmt (*gsi_p);
2199 tree label;
2200 /* If what follows is a label, then we may have a fallthrough. */
2201 if (gimple_code (next) == GIMPLE_LABEL
2202 && gimple_has_location (next)
2203 && (label = gimple_label_label (as_a <glabel *> (next)))
3c77f69c 2204 && prev != NULL)
2205 {
2206 struct label_entry *l;
2207 bool warned_p = false;
bc35ef65 2208 auto_diagnostic_group d;
3c77f69c 2209 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2210 /* Quiet. */;
2211 else if (gimple_code (prev) == GIMPLE_LABEL
2212 && (label = gimple_label_label (as_a <glabel *> (prev)))
2213 && (l = find_label_entry (&labels, label)))
12663602 2214 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
3c77f69c 2215 "this statement may fall through");
2216 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2217 /* Try to be clever and don't warn when the statement
2218 can't actually fall through. */
2219 && gimple_stmt_may_fallthru (prev)
d6246b9a 2220 && prevloc != UNKNOWN_LOCATION)
2221 warned_p = warning_at (prevloc,
12663602 2222 OPT_Wimplicit_fallthrough_,
3c77f69c 2223 "this statement may fall through");
2224 if (warned_p)
2225 inform (gimple_location (next), "here");
2226
2227 /* Mark this label as processed so as to prevent multiple
2228 warnings in nested switches. */
2229 FALLTHROUGH_LABEL_P (label) = true;
2230
2231 /* So that next warn_implicit_fallthrough_r will start looking for
2232 a new sequence starting with this label. */
2233 gsi_prev (gsi_p);
2234 }
2235 }
2236 break;
2237 default:
2238 break;
2239 }
2240 return NULL_TREE;
2241}
2242
2243/* Warn when a switch case falls through. */
2244
2245static void
2246maybe_warn_implicit_fallthrough (gimple_seq seq)
2247{
2248 if (!warn_implicit_fallthrough)
2249 return;
2250
2251 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2252 if (!(lang_GNU_C ()
2253 || lang_GNU_CXX ()
2254 || lang_GNU_OBJC ()))
2255 return;
2256
2257 struct walk_stmt_info wi;
2258 memset (&wi, 0, sizeof (wi));
2259 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2260}
2261
2262/* Callback for walk_gimple_seq. */
2263
2264static tree
2265expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
dff6bd38 2266 struct walk_stmt_info *wi)
3c77f69c 2267{
2268 gimple *stmt = gsi_stmt (*gsi_p);
2269
2270 *handled_ops_p = true;
2271 switch (gimple_code (stmt))
2272 {
2273 case GIMPLE_TRY:
2274 case GIMPLE_BIND:
2275 case GIMPLE_CATCH:
2276 case GIMPLE_EH_FILTER:
2277 case GIMPLE_TRANSACTION:
2278 /* Walk the sub-statements. */
2279 *handled_ops_p = false;
2280 break;
2281 case GIMPLE_CALL:
2282 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2283 {
2284 gsi_remove (gsi_p, true);
2285 if (gsi_end_p (*gsi_p))
dff6bd38 2286 {
2287 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2288 return integer_zero_node;
2289 }
3c77f69c 2290
2291 bool found = false;
2292 location_t loc = gimple_location (stmt);
2293
2294 gimple_stmt_iterator gsi2 = *gsi_p;
2295 stmt = gsi_stmt (gsi2);
2296 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2297 {
2298 /* Go on until the artificial label. */
2299 tree goto_dest = gimple_goto_dest (stmt);
2300 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2301 {
2302 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2303 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2304 == goto_dest)
2305 break;
2306 }
2307
2308 /* Not found? Stop. */
2309 if (gsi_end_p (gsi2))
2310 break;
2311
2312 /* Look one past it. */
2313 gsi_next (&gsi2);
2314 }
2315
2316 /* We're looking for a case label or default label here. */
2317 while (!gsi_end_p (gsi2))
2318 {
2319 stmt = gsi_stmt (gsi2);
116deb8c 2320 if (gimple_code (stmt) == GIMPLE_LABEL)
3c77f69c 2321 {
2322 tree label = gimple_label_label (as_a <glabel *> (stmt));
2323 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2324 {
2325 found = true;
2326 break;
2327 }
2328 }
116deb8c 2329 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
20082e09 2330 ;
bce107d7 2331 else if (!is_gimple_debug (stmt))
2332 /* Anything else is not expected. */
3c77f69c 2333 break;
2334 gsi_next (&gsi2);
2335 }
2336 if (!found)
2337 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2338 "a case label or default label");
2339 }
2340 break;
2341 default:
2342 break;
2343 }
2344 return NULL_TREE;
2345}
2346
2347/* Expand all FALLTHROUGH () calls in SEQ. */
2348
2349static void
2350expand_FALLTHROUGH (gimple_seq *seq_p)
2351{
2352 struct walk_stmt_info wi;
dff6bd38 2353 location_t loc;
3c77f69c 2354 memset (&wi, 0, sizeof (wi));
dff6bd38 2355 wi.info = (void *) &loc;
3c77f69c 2356 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
dff6bd38 2357 if (wi.callback_result == integer_zero_node)
2358 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2359 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2360 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2361 "a case label or default label");
3c77f69c 2362}
2363
b59e1c90 2364\f
2365/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
4ee9c684 2366 branch to. */
2367
2368static enum gimplify_status
75a70cf9 2369gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
4ee9c684 2370{
2371 tree switch_expr = *expr_p;
75a70cf9 2372 gimple_seq switch_body_seq = NULL;
4ee9c684 2373 enum gimplify_status ret;
bfb10994 2374 tree index_type = TREE_TYPE (switch_expr);
2375 if (index_type == NULL_TREE)
2376 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
4ee9c684 2377
75a70cf9 2378 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2379 fb_rvalue);
2380 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2381 return ret;
4ee9c684 2382
2383 if (SWITCH_BODY (switch_expr))
2384 {
f1f41a6c 2385 vec<tree> labels;
2386 vec<tree> saved_labels;
65dc2516 2387 hash_set<tree> *saved_live_switch_vars = NULL;
75a70cf9 2388 tree default_case = NULL_TREE;
1a91d914 2389 gswitch *switch_stmt;
48e1416a 2390
bfb10994 2391 /* Save old labels, get new ones from body, then restore the old
75a70cf9 2392 labels. Save all the things from the switch body to append after. */
4ee9c684 2393 saved_labels = gimplify_ctxp->case_labels;
f1f41a6c 2394 gimplify_ctxp->case_labels.create (8);
65dc2516 2395
2396 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
904e262b 2397 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
dfbe8090 2398 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2399 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
904e262b 2400 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2401 else
2402 gimplify_ctxp->live_switch_vars = NULL;
65dc2516 2403
3c77f69c 2404 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2405 gimplify_ctxp->in_switch_expr = true;
4ee9c684 2406
75a70cf9 2407 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1398da0f 2408
3c77f69c 2409 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
1a54d3be 2410 maybe_warn_switch_unreachable (switch_body_seq);
3c77f69c 2411 maybe_warn_implicit_fallthrough (switch_body_seq);
2412 /* Only do this for the outermost GIMPLE_SWITCH. */
2413 if (!gimplify_ctxp->in_switch_expr)
2414 expand_FALLTHROUGH (&switch_body_seq);
1a54d3be 2415
4ee9c684 2416 labels = gimplify_ctxp->case_labels;
2417 gimplify_ctxp->case_labels = saved_labels;
65dc2516 2418
2419 if (gimplify_ctxp->live_switch_vars)
2420 {
9a78b979 2421 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
65dc2516 2422 delete gimplify_ctxp->live_switch_vars;
2423 }
629b6abc 2424 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
48e1416a 2425
b59e1c90 2426 preprocess_case_label_vec_for_gimple (labels, index_type,
2427 &default_case);
72211fd1 2428
17cf92d6 2429 bool add_bind = false;
75a70cf9 2430 if (!default_case)
4ee9c684 2431 {
1a91d914 2432 glabel *new_default;
4ee9c684 2433
b59e1c90 2434 default_case
2435 = build_case_label (NULL_TREE, NULL_TREE,
2436 create_artificial_label (UNKNOWN_LOCATION));
17cf92d6 2437 if (old_in_switch_expr)
2438 {
2439 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2440 add_bind = true;
2441 }
b59e1c90 2442 new_default = gimple_build_label (CASE_LABEL (default_case));
2443 gimplify_seq_add_stmt (&switch_body_seq, new_default);
72211fd1 2444 }
17cf92d6 2445 else if (old_in_switch_expr)
2446 {
2447 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2448 if (last && gimple_code (last) == GIMPLE_LABEL)
2449 {
2450 tree label = gimple_label_label (as_a <glabel *> (last));
2451 if (SWITCH_BREAK_LABEL_P (label))
2452 add_bind = true;
2453 }
2454 }
b624a250 2455
1a91d914 2456 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
17cf92d6 2457 default_case, labels);
2458 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2459 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2460 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2461 so that we can easily find the start and end of the switch
2462 statement. */
2463 if (add_bind)
2464 {
2465 gimple_seq bind_body = NULL;
2466 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2467 gimple_seq_add_seq (&bind_body, switch_body_seq);
2468 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2469 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2470 gimplify_seq_add_stmt (pre_p, bind);
2471 }
2472 else
2473 {
2474 gimplify_seq_add_stmt (pre_p, switch_stmt);
2475 gimplify_seq_add_seq (pre_p, switch_body_seq);
2476 }
f1f41a6c 2477 labels.release ();
4ee9c684 2478 }
0d59b19d 2479 else
bd37ce3e 2480 gcc_unreachable ();
4ee9c684 2481
75a70cf9 2482 return GS_ALL_DONE;
4ee9c684 2483}
2484
3c77f69c 2485/* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2486
2487static enum gimplify_status
2488gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2489{
2490 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2491 == current_function_decl);
2492
8e31f1f1 2493 tree label = LABEL_EXPR_LABEL (*expr_p);
2494 glabel *label_stmt = gimple_build_label (label);
3c77f69c 2495 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2496 gimplify_seq_add_stmt (pre_p, label_stmt);
2497
8e31f1f1 2498 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2499 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2500 NOT_TAKEN));
2501 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2502 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2503 TAKEN));
2504
3c77f69c 2505 return GS_ALL_DONE;
2506}
2507
57859735 2508/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
75a70cf9 2509
4ee9c684 2510static enum gimplify_status
75a70cf9 2511gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
4ee9c684 2512{
1e8e9920 2513 struct gimplify_ctx *ctxp;
1a91d914 2514 glabel *label_stmt;
1e8e9920 2515
ca4c3545 2516 /* Invalid programs can play Duff's Device type games with, for example,
1e8e9920 2517 #pragma omp parallel. At least in the C front end, we don't
ca4c3545 2518 detect such invalid branches until after gimplification, in the
2519 diagnose_omp_blocks pass. */
1e8e9920 2520 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
f1f41a6c 2521 if (ctxp->case_labels.exists ())
1e8e9920 2522 break;
0d59b19d 2523
863c62e0 2524 tree label = CASE_LABEL (*expr_p);
2525 label_stmt = gimple_build_label (label);
3c77f69c 2526 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
f1f41a6c 2527 ctxp->case_labels.safe_push (*expr_p);
1a91d914 2528 gimplify_seq_add_stmt (pre_p, label_stmt);
75a70cf9 2529
863c62e0 2530 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2531 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2532 NOT_TAKEN));
2533 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2534 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2535 TAKEN));
2536
4ee9c684 2537 return GS_ALL_DONE;
2538}
2539
4ee9c684 2540/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2541 if necessary. */
2542
2543tree
2544build_and_jump (tree *label_p)
2545{
2546 if (label_p == NULL)
2547 /* If there's nowhere to jump, just fall through. */
b6431126 2548 return NULL_TREE;
4ee9c684 2549
2550 if (*label_p == NULL_TREE)
2551 {
e60a6f7b 2552 tree label = create_artificial_label (UNKNOWN_LOCATION);
4ee9c684 2553 *label_p = label;
2554 }
2555
2556 return build1 (GOTO_EXPR, void_type_node, *label_p);
2557}
2558
2559/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2560 This also involves building a label to jump to and communicating it to
2561 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2562
2563static enum gimplify_status
2564gimplify_exit_expr (tree *expr_p)
2565{
2566 tree cond = TREE_OPERAND (*expr_p, 0);
2567 tree expr;
2568
2569 expr = build_and_jump (&gimplify_ctxp->exit_label);
40b19772 2570 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
4ee9c684 2571 *expr_p = expr;
2572
2573 return GS_OK;
2574}
2575
c0a843e0 2576/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2577 different from its canonical type, wrap the whole thing inside a
2578 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2579 type.
4ee9c684 2580
c0a843e0 2581 The canonical type of a COMPONENT_REF is the type of the field being
2582 referenced--unless the field is a bit-field which can be read directly
2583 in a smaller mode, in which case the canonical type is the
2584 sign-appropriate type corresponding to that mode. */
4ee9c684 2585
c0a843e0 2586static void
2587canonicalize_component_ref (tree *expr_p)
4ee9c684 2588{
c0a843e0 2589 tree expr = *expr_p;
2590 tree type;
4ee9c684 2591
0d59b19d 2592 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
4ee9c684 2593
c0a843e0 2594 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2595 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2596 else
2597 type = TREE_TYPE (TREE_OPERAND (expr, 1));
4ee9c684 2598
038b4fdb 2599 /* One could argue that all the stuff below is not necessary for
2600 the non-bitfield case and declare it a FE error if type
2601 adjustment would be needed. */
c0a843e0 2602 if (TREE_TYPE (expr) != type)
4ee9c684 2603 {
038b4fdb 2604#ifdef ENABLE_TYPES_CHECKING
c0a843e0 2605 tree old_type = TREE_TYPE (expr);
038b4fdb 2606#endif
2607 int type_quals;
2608
2609 /* We need to preserve qualifiers and propagate them from
2610 operand 0. */
2611 type_quals = TYPE_QUALS (type)
2612 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2613 if (TYPE_QUALS (type) != type_quals)
2614 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
4ee9c684 2615
c0a843e0 2616 /* Set the type of the COMPONENT_REF to the underlying type. */
2617 TREE_TYPE (expr) = type;
4ee9c684 2618
038b4fdb 2619#ifdef ENABLE_TYPES_CHECKING
2620 /* It is now a FE error, if the conversion from the canonical
2621 type to the original expression type is not useless. */
2622 gcc_assert (useless_type_conversion_p (old_type, type));
2623#endif
c0a843e0 2624 }
2625}
4ee9c684 2626
c0a843e0 2627/* If a NOP conversion is changing a pointer to array of foo to a pointer
7c2f0500 2628 to foo, embed that change in the ADDR_EXPR by converting
c0a843e0 2629 T array[U];
2630 (T *)&array
2631 ==>
2632 &array[L]
2633 where L is the lower bound. For simplicity, only do this for constant
d3828421 2634 lower bound.
2635 The constraint is that the type of &array[L] is trivially convertible
2636 to T *. */
4ee9c684 2637
c0a843e0 2638static void
2639canonicalize_addr_expr (tree *expr_p)
2640{
2641 tree expr = *expr_p;
c0a843e0 2642 tree addr_expr = TREE_OPERAND (expr, 0);
d3828421 2643 tree datype, ddatype, pddatype;
4ee9c684 2644
d3828421 2645 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2646 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2647 || TREE_CODE (addr_expr) != ADDR_EXPR)
c0a843e0 2648 return;
4ee9c684 2649
c0a843e0 2650 /* The addr_expr type should be a pointer to an array. */
d3828421 2651 datype = TREE_TYPE (TREE_TYPE (addr_expr));
c0a843e0 2652 if (TREE_CODE (datype) != ARRAY_TYPE)
2653 return;
4ee9c684 2654
d3828421 2655 /* The pointer to element type shall be trivially convertible to
2656 the expression pointer type. */
c0a843e0 2657 ddatype = TREE_TYPE (datype);
d3828421 2658 pddatype = build_pointer_type (ddatype);
4c0d4e21 2659 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2660 pddatype))
c0a843e0 2661 return;
4ee9c684 2662
c0a843e0 2663 /* The lower bound and element sizes must be constant. */
d3828421 2664 if (!TYPE_SIZE_UNIT (ddatype)
2665 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
c0a843e0 2666 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2667 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2668 return;
4ee9c684 2669
c0a843e0 2670 /* All checks succeeded. Build a new node to merge the cast. */
d3828421 2671 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
c0a843e0 2672 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
a2501610 2673 NULL_TREE, NULL_TREE);
d3828421 2674 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
4c0d4e21 2675
2676 /* We can have stripped a required restrict qualifier above. */
2677 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2678 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
c0a843e0 2679}
4ee9c684 2680
c0a843e0 2681/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2682 underneath as appropriate. */
4ee9c684 2683
c0a843e0 2684static enum gimplify_status
2685gimplify_conversion (tree *expr_p)
7c2f0500 2686{
389dd41b 2687 location_t loc = EXPR_LOCATION (*expr_p);
72dd6141 2688 gcc_assert (CONVERT_EXPR_P (*expr_p));
e60a6f7b 2689
d2bca315 2690 /* Then strip away all but the outermost conversion. */
2691 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2692
2693 /* And remove the outermost conversion if it's useless. */
2694 if (tree_ssa_useless_type_conversion (*expr_p))
2695 *expr_p = TREE_OPERAND (*expr_p, 0);
4ee9c684 2696
c0a843e0 2697 /* If we still have a conversion at the toplevel,
2698 then canonicalize some constructs. */
72dd6141 2699 if (CONVERT_EXPR_P (*expr_p))
c0a843e0 2700 {
2701 tree sub = TREE_OPERAND (*expr_p, 0);
4ee9c684 2702
c0a843e0 2703 /* If a NOP conversion is changing the type of a COMPONENT_REF
2704 expression, then canonicalize its type now in order to expose more
2705 redundant conversions. */
2706 if (TREE_CODE (sub) == COMPONENT_REF)
2707 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
4ee9c684 2708
c0a843e0 2709 /* If a NOP conversion is changing a pointer to array of foo
2710 to a pointer to foo, embed that change in the ADDR_EXPR. */
2711 else if (TREE_CODE (sub) == ADDR_EXPR)
2712 canonicalize_addr_expr (expr_p);
2713 }
4ee9c684 2714
90cf240d 2715 /* If we have a conversion to a non-register type force the
2716 use of a VIEW_CONVERT_EXPR instead. */
39a1041d 2717 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
389dd41b 2718 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
39a1041d 2719 TREE_OPERAND (*expr_p, 0));
90cf240d 2720
c4ff8d50 2721 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2722 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2723 TREE_SET_CODE (*expr_p, NOP_EXPR);
2724
4ee9c684 2725 return GS_OK;
2726}
2727
57859735 2728/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
22041d3f 2729 DECL_VALUE_EXPR, and it's worth re-examining things. */
2730
2731static enum gimplify_status
2732gimplify_var_or_parm_decl (tree *expr_p)
2733{
2734 tree decl = *expr_p;
2735
2736 /* ??? If this is a local variable, and it has not been seen in any
2737 outer BIND_EXPR, then it's probably the result of a duplicate
2738 declaration, for which we've already issued an error. It would
2739 be really nice if the front end wouldn't leak these at all.
2740 Currently the only known culprit is C++ destructors, as seen
2741 in g++.old-deja/g++.jason/binding.C. */
53e9c5c4 2742 if (VAR_P (decl)
22041d3f 2743 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2744 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2745 && decl_function_context (decl) == current_function_decl)
2746 {
852f689e 2747 gcc_assert (seen_error ());
22041d3f 2748 return GS_ERROR;
2749 }
2750
ca4c3545 2751 /* When within an OMP context, notice uses of variables. */
1e8e9920 2752 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2753 return GS_ALL_DONE;
2754
22041d3f 2755 /* If the decl is an alias for another expression, substitute it now. */
2756 if (DECL_HAS_VALUE_EXPR_P (decl))
2757 {
1bc67c9d 2758 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
22041d3f 2759 return GS_OK;
2760 }
2761
2762 return GS_ALL_DONE;
2763}
2764
fb4256c6 2765/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2766
2767static void
bc61cadb 2768recalculate_side_effects (tree t)
2769{
2770 enum tree_code code = TREE_CODE (t);
2771 int len = TREE_OPERAND_LENGTH (t);
2772 int i;
2773
2774 switch (TREE_CODE_CLASS (code))
2775 {
2776 case tcc_expression:
2777 switch (code)
2778 {
2779 case INIT_EXPR:
2780 case MODIFY_EXPR:
2781 case VA_ARG_EXPR:
2782 case PREDECREMENT_EXPR:
2783 case PREINCREMENT_EXPR:
2784 case POSTDECREMENT_EXPR:
2785 case POSTINCREMENT_EXPR:
2786 /* All of these have side-effects, no matter what their
2787 operands are. */
2788 return;
2789
2790 default:
2791 break;
2792 }
2793 /* Fall through. */
2794
2795 case tcc_comparison: /* a comparison expression */
2796 case tcc_unary: /* a unary arithmetic expression */
2797 case tcc_binary: /* a binary arithmetic expression */
2798 case tcc_reference: /* a reference */
2799 case tcc_vl_exp: /* a function call */
2800 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2801 for (i = 0; i < len; ++i)
2802 {
2803 tree op = TREE_OPERAND (t, i);
2804 if (op && TREE_SIDE_EFFECTS (op))
2805 TREE_SIDE_EFFECTS (t) = 1;
2806 }
2807 break;
2808
2809 case tcc_constant:
2810 /* No side-effects. */
2811 return;
2812
2813 default:
2814 gcc_unreachable ();
2815 }
2816}
2817
4ee9c684 2818/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
75a70cf9 2819 node *EXPR_P.
4ee9c684 2820
2821 compound_lval
2822 : min_lval '[' val ']'
2823 | min_lval '.' ID
2824 | compound_lval '[' val ']'
2825 | compound_lval '.' ID
2826
2827 This is not part of the original SIMPLE definition, which separates
2828 array and member references, but it seems reasonable to handle them
2829 together. Also, this way we don't run into problems with union
2830 aliasing; gcc requires that for accesses through a union to alias, the
2831 union reference must be explicit, which was not always the case when we
2832 were splitting up array and member refs.
2833
75a70cf9 2834 PRE_P points to the sequence where side effects that must happen before
4ee9c684 2835 *EXPR_P should be stored.
2836
75a70cf9 2837 POST_P points to the sequence where side effects that must happen after
4ee9c684 2838 *EXPR_P should be stored. */
2839
2840static enum gimplify_status
75a70cf9 2841gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2842 fallback_t fallback)
4ee9c684 2843{
2844 tree *p;
5f52d2e2 2845 enum gimplify_status ret = GS_ALL_DONE, tret;
7b7695eb 2846 int i;
389dd41b 2847 location_t loc = EXPR_LOCATION (*expr_p);
5f52d2e2 2848 tree expr = *expr_p;
4ee9c684 2849
4ee9c684 2850 /* Create a stack of the subexpressions so later we can walk them in
758b4a11 2851 order from inner to outer. */
4997014d 2852 auto_vec<tree, 10> expr_stack;
4ee9c684 2853
1f9b622b 2854 /* We can handle anything that get_inner_reference can deal with. */
1928904f 2855 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2856 {
22041d3f 2857 restart:
1928904f 2858 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2859 if (TREE_CODE (*p) == INDIRECT_REF)
389dd41b 2860 *p = fold_indirect_ref_loc (loc, *p);
22041d3f 2861
2862 if (handled_component_p (*p))
2863 ;
2864 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2865 additional COMPONENT_REFs. */
53e9c5c4 2866 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
22041d3f 2867 && gimplify_var_or_parm_decl (p) == GS_OK)
2868 goto restart;
2869 else
1928904f 2870 break;
48e1416a 2871
f1f41a6c 2872 expr_stack.safe_push (*p);
1928904f 2873 }
4ee9c684 2874
f1f41a6c 2875 gcc_assert (expr_stack.length ());
73b2fde8 2876
2b15d2ba 2877 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2878 walked through and P points to the innermost expression.
4ee9c684 2879
7b7695eb 2880 Java requires that we elaborated nodes in source order. That
2881 means we must gimplify the inner expression followed by each of
2882 the indices, in order. But we can't gimplify the inner
2883 expression until we deal with any variable bounds, sizes, or
2884 positions in order to deal with PLACEHOLDER_EXPRs.
2885
2886 So we do this in three steps. First we deal with the annotations
2887 for any variables in the components, then we gimplify the base,
2888 then we gimplify any indices, from left to right. */
f1f41a6c 2889 for (i = expr_stack.length () - 1; i >= 0; i--)
4ee9c684 2890 {
f1f41a6c 2891 tree t = expr_stack[i];
6374121b 2892
2893 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4ee9c684 2894 {
6374121b 2895 /* Gimplify the low bound and element type size and put them into
2896 the ARRAY_REF. If these values are set, they have already been
2897 gimplified. */
75a70cf9 2898 if (TREE_OPERAND (t, 2) == NULL_TREE)
6374121b 2899 {
6780737f 2900 tree low = unshare_expr (array_ref_low_bound (t));
2901 if (!is_gimple_min_invariant (low))
6374121b 2902 {
75a70cf9 2903 TREE_OPERAND (t, 2) = low;
2904 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
47f11e84 2905 post_p, is_gimple_reg,
75a70cf9 2906 fb_rvalue);
6374121b 2907 ret = MIN (ret, tret);
2908 }
2909 }
85b059d7 2910 else
2911 {
2912 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2913 is_gimple_reg, fb_rvalue);
2914 ret = MIN (ret, tret);
2915 }
6374121b 2916
85b059d7 2917 if (TREE_OPERAND (t, 3) == NULL_TREE)
6374121b 2918 {
2919 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2920 tree elmt_size = unshare_expr (array_ref_element_size (t));
d37625c0 2921 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
6374121b 2922
2923 /* Divide the element size by the alignment of the element
2924 type (above). */
57859735 2925 elmt_size
2926 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
6374121b 2927
6780737f 2928 if (!is_gimple_min_invariant (elmt_size))
6374121b 2929 {
75a70cf9 2930 TREE_OPERAND (t, 3) = elmt_size;
2931 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
47f11e84 2932 post_p, is_gimple_reg,
75a70cf9 2933 fb_rvalue);
6374121b 2934 ret = MIN (ret, tret);
2935 }
4ee9c684 2936 }
85b059d7 2937 else
2938 {
2939 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2940 is_gimple_reg, fb_rvalue);
2941 ret = MIN (ret, tret);
2942 }
4ee9c684 2943 }
6374121b 2944 else if (TREE_CODE (t) == COMPONENT_REF)
2945 {
2946 /* Set the field offset into T and gimplify it. */
85b059d7 2947 if (TREE_OPERAND (t, 2) == NULL_TREE)
6374121b 2948 {
2949 tree offset = unshare_expr (component_ref_field_offset (t));
2950 tree field = TREE_OPERAND (t, 1);
2951 tree factor
2952 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2953
2954 /* Divide the offset by its alignment. */
389dd41b 2955 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
6374121b 2956
6780737f 2957 if (!is_gimple_min_invariant (offset))
6374121b 2958 {
75a70cf9 2959 TREE_OPERAND (t, 2) = offset;
2960 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
47f11e84 2961 post_p, is_gimple_reg,
75a70cf9 2962 fb_rvalue);
6374121b 2963 ret = MIN (ret, tret);
2964 }
2965 }
85b059d7 2966 else
2967 {
2968 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2969 is_gimple_reg, fb_rvalue);
2970 ret = MIN (ret, tret);
2971 }
6374121b 2972 }
7b7695eb 2973 }
2974
22041d3f 2975 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2976 so as to match the min_lval predicate. Failure to do so may result
2977 in the creation of large aggregate temporaries. */
2978 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2979 fallback | fb_lvalue);
7b7695eb 2980 ret = MIN (ret, tret);
2981
2330f9c5 2982 /* And finally, the indices and operands of ARRAY_REF. During this
60f65a0a 2983 loop we also remove any useless conversions. */
f1f41a6c 2984 for (; expr_stack.length () > 0; )
7b7695eb 2985 {
f1f41a6c 2986 tree t = expr_stack.pop ();
7b7695eb 2987
2988 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2989 {
47f11e84 2990 /* Gimplify the dimension. */
7b7695eb 2991 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2992 {
2993 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
47f11e84 2994 is_gimple_val, fb_rvalue);
7b7695eb 2995 ret = MIN (ret, tret);
2996 }
2997 }
60f65a0a 2998
2999 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3000
75a70cf9 3001 /* The innermost expression P may have originally had
3002 TREE_SIDE_EFFECTS set which would have caused all the outer
3003 expressions in *EXPR_P leading to P to also have had
3004 TREE_SIDE_EFFECTS set. */
4ee9c684 3005 recalculate_side_effects (t);
4ee9c684 3006 }
3007
3008 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
f0ac919b 3009 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
4ee9c684 3010 {
3011 canonicalize_component_ref (expr_p);
4ee9c684 3012 }
3013
f1f41a6c 3014 expr_stack.release ();
0ca9a7b6 3015
5f52d2e2 3016 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3017
4ee9c684 3018 return ret;
3019}
3020
5206b159 3021/* Gimplify the self modifying expression pointed to by EXPR_P
3022 (++, --, +=, -=).
4ee9c684 3023
3024 PRE_P points to the list where side effects that must happen before
3025 *EXPR_P should be stored.
3026
3027 POST_P points to the list where side effects that must happen after
3028 *EXPR_P should be stored.
3029
3030 WANT_VALUE is nonzero iff we want to use the value of this expression
b4c4a429 3031 in another expression.
4ee9c684 3032
b4c4a429 3033 ARITH_TYPE is the type the computation should be performed in. */
3034
3035enum gimplify_status
75a70cf9 3036gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
b4c4a429 3037 bool want_value, tree arith_type)
4ee9c684 3038{
3039 enum tree_code code;
75a70cf9 3040 tree lhs, lvalue, rhs, t1;
3041 gimple_seq post = NULL, *orig_post_p = post_p;
4ee9c684 3042 bool postfix;
3043 enum tree_code arith_code;
3044 enum gimplify_status ret;
389dd41b 3045 location_t loc = EXPR_LOCATION (*expr_p);
4ee9c684 3046
3047 code = TREE_CODE (*expr_p);
3048
0d59b19d 3049 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3050 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
4ee9c684 3051
3052 /* Prefix or postfix? */
3053 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3054 /* Faster to treat as prefix if result is not used. */
3055 postfix = want_value;
3056 else
3057 postfix = false;
3058
0aa655a0 3059 /* For postfix, make sure the inner expression's post side effects
3060 are executed after side effects from this expression. */
3061 if (postfix)
3062 post_p = &post;
3063
4ee9c684 3064 /* Add or subtract? */
3065 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3066 arith_code = PLUS_EXPR;
3067 else
3068 arith_code = MINUS_EXPR;
3069
3070 /* Gimplify the LHS into a GIMPLE lvalue. */
3071 lvalue = TREE_OPERAND (*expr_p, 0);
3072 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3073 if (ret == GS_ERROR)
3074 return ret;
3075
3076 /* Extract the operands to the arithmetic operation. */
3077 lhs = lvalue;
3078 rhs = TREE_OPERAND (*expr_p, 1);
3079
3080 /* For postfix operator, we evaluate the LHS to an rvalue and then use
43788075 3081 that as the result value and in the postqueue operation. */
4ee9c684 3082 if (postfix)
3083 {
3084 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3085 if (ret == GS_ERROR)
3086 return ret;
4ee9c684 3087
43788075 3088 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3089 }
b4c4a429 3090
0de36bdb 3091 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3092 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3093 {
a0553bff 3094 rhs = convert_to_ptrofftype_loc (loc, rhs);
0de36bdb 3095 if (arith_code == MINUS_EXPR)
389dd41b 3096 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
b4c4a429 3097 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
0de36bdb 3098 }
b4c4a429 3099 else
3100 t1 = fold_convert (TREE_TYPE (*expr_p),
3101 fold_build2 (arith_code, arith_type,
3102 fold_convert (arith_type, lhs),
3103 fold_convert (arith_type, rhs)));
0de36bdb 3104
4ee9c684 3105 if (postfix)
3106 {
32b227eb 3107 gimplify_assign (lvalue, t1, pre_p);
75a70cf9 3108 gimplify_seq_add_seq (orig_post_p, post);
b4c4a429 3109 *expr_p = lhs;
4ee9c684 3110 return GS_ALL_DONE;
3111 }
3112 else
3113 {
75a70cf9 3114 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
4ee9c684 3115 return GS_OK;
3116 }
3117}
3118
80f06481 3119/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3120
3121static void
3122maybe_with_size_expr (tree *expr_p)
3123{
dfab9d64 3124 tree expr = *expr_p;
3125 tree type = TREE_TYPE (expr);
3126 tree size;
80f06481 3127
dfab9d64 3128 /* If we've already wrapped this or the type is error_mark_node, we can't do
3129 anything. */
3130 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3131 || type == error_mark_node)
80f06481 3132 return;
3133
dfab9d64 3134 /* If the size isn't known or is a constant, we have nothing to do. */
80f06481 3135 size = TYPE_SIZE_UNIT (type);
8672ee56 3136 if (!size || poly_int_tree_p (size))
dfab9d64 3137 return;
3138
3139 /* Otherwise, make a WITH_SIZE_EXPR. */
3140 size = unshare_expr (size);
3141 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3142 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
80f06481 3143}
3144
75a70cf9 3145/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
489c4088 3146 Store any side-effects in PRE_P. CALL_LOCATION is the location of
9ae1b28a 3147 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3148 gimplified to an SSA name. */
cc6a30ae 3149
10621300 3150enum gimplify_status
9ae1b28a 3151gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3152 bool allow_ssa)
cc6a30ae 3153{
3154 bool (*test) (tree);
3155 fallback_t fb;
3156
3157 /* In general, we allow lvalues for function arguments to avoid
3158 extra overhead of copying large aggregates out of even larger
3159 aggregates into temporaries only to copy the temporaries to
3160 the argument list. Make optimizers happy by pulling out to
3161 temporaries those types that fit in registers. */
75a70cf9 3162 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
cc6a30ae 3163 test = is_gimple_val, fb = fb_rvalue;
3164 else
0a362bb5 3165 {
3166 test = is_gimple_lvalue, fb = fb_either;
3167 /* Also strip a TARGET_EXPR that would force an extra copy. */
3168 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3169 {
3170 tree init = TARGET_EXPR_INITIAL (*arg_p);
3171 if (init
3172 && !VOID_TYPE_P (TREE_TYPE (init)))
3173 *arg_p = init;
3174 }
3175 }
cc6a30ae 3176
80f06481 3177 /* If this is a variable sized type, we must remember the size. */
75a70cf9 3178 maybe_with_size_expr (arg_p);
80f06481 3179
e60a6f7b 3180 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
489c4088 3181 /* Make sure arguments have the same location as the function call
3182 itself. */
3183 protected_set_expr_location (*arg_p, call_location);
3184
cc6a30ae 3185 /* There is a sequence point before a function call. Side effects in
3186 the argument list must occur before the actual call. So, when
3187 gimplifying arguments, force gimplify_expr to use an internal
3188 post queue which is then appended to the end of PRE_P. */
9ae1b28a 3189 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
cc6a30ae 3190}
3191
c3a81971 3192/* Don't fold inside offloading or taskreg regions: it can break code by
3193 adding decl references that weren't in the source. We'll do it during
3194 omplower pass instead. */
c09f306d 3195
3196static bool
3197maybe_fold_stmt (gimple_stmt_iterator *gsi)
3198{
3199 struct gimplify_omp_ctx *ctx;
3200 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
43895be5 3201 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
c09f306d 3202 return false;
7e5a76c8 3203 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3204 return false;
85df98d7 3205 /* Delay folding of builtins until the IL is in consistent state
3206 so the diagnostic machinery can do a better job. */
3207 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3208 return false;
c09f306d 3209 return fold_stmt (gsi);
3210}
3211
75a70cf9 3212/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
f0ac919b 3213 WANT_VALUE is true if the result of the call is desired. */
4ee9c684 3214
3215static enum gimplify_status
75a70cf9 3216gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4ee9c684 3217{
39f59e65 3218 tree fndecl, parms, p, fnptrtype;
4ee9c684 3219 enum gimplify_status ret;
c2f47e15 3220 int i, nargs;
1a91d914 3221 gcall *call;
c83059be 3222 bool builtin_va_start_p = false;
389dd41b 3223 location_t loc = EXPR_LOCATION (*expr_p);
4ee9c684 3224
0d59b19d 3225 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
4ee9c684 3226
7c2f0500 3227 /* For reliable diagnostics during inlining, it is necessary that
4ee9c684 3228 every call_expr be annotated with file and line. */
2ed8b5d0 3229 if (! EXPR_HAS_LOCATION (*expr_p))
3230 SET_EXPR_LOCATION (*expr_p, input_location);
4ee9c684 3231
5ef6b660 3232 /* Gimplify internal functions created in the FEs. */
3233 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3234 {
0c93c8a9 3235 if (want_value)
3236 return GS_ALL_DONE;
3237
5ef6b660 3238 nargs = call_expr_nargs (*expr_p);
3239 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3240 auto_vec<tree> vargs (nargs);
3241
3242 for (i = 0; i < nargs; i++)
3243 {
3244 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3245 EXPR_LOCATION (*expr_p));
3246 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3247 }
92bc38e7 3248
989f02dc 3249 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3250 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
5ef6b660 3251 gimplify_seq_add_stmt (pre_p, call);
3252 return GS_ALL_DONE;
3253 }
3254
4ee9c684 3255 /* This may be a call to a builtin function.
3256
3257 Builtin function calls may be transformed into different
3258 (and more efficient) builtin function calls under certain
3259 circumstances. Unfortunately, gimplification can muck things
3260 up enough that the builtin expanders are not aware that certain
3261 transformations are still valid.
3262
3263 So we attempt transformation/gimplification of the call before
3264 we gimplify the CALL_EXPR. At this time we do not manage to
3265 transform all calls in the same manner as the expanders do, but
3266 we do transform most of them. */
75a70cf9 3267 fndecl = get_callee_fndecl (*expr_p);
a0e9bfbb 3268 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8b76fbe9 3269 switch (DECL_FUNCTION_CODE (fndecl))
3270 {
2b34677f 3271 CASE_BUILT_IN_ALLOCA:
6ad0a0bc 3272 /* If the call has been built for a variable-sized object, then we
3273 want to restore the stack level when the enclosing BIND_EXPR is
3274 exited to reclaim the allocated space; otherwise, we precisely
3275 need to do the opposite and preserve the latest stack level. */
3276 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3277 gimplify_ctxp->save_stack = true;
3278 else
3279 gimplify_ctxp->keep_stack = true;
3280 break;
3281
8b76fbe9 3282 case BUILT_IN_VA_START:
743b0c6a 3283 {
75a70cf9 3284 builtin_va_start_p = TRUE;
c2f47e15 3285 if (call_expr_nargs (*expr_p) < 2)
743b0c6a 3286 {
3287 error ("too few arguments to function %<va_start%>");
e60a6f7b 3288 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
743b0c6a 3289 return GS_OK;
3290 }
48e1416a 3291
c2f47e15 3292 if (fold_builtin_next_arg (*expr_p, true))
743b0c6a 3293 {
e60a6f7b 3294 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
743b0c6a 3295 return GS_OK;
3296 }
8b76fbe9 3297 break;
3298 }
c388a0cf 3299
a9b358ce 3300 case BUILT_IN_EH_RETURN:
3301 cfun->calls_eh_return = true;
3302 break;
3303
8b76fbe9 3304 default:
3305 ;
3306 }
a0e9bfbb 3307 if (fndecl && fndecl_built_in_p (fndecl))
8b76fbe9 3308 {
3309 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3310 if (new_tree && new_tree != *expr_p)
3311 {
3312 /* There was a transformation of this call which computes the
3313 same value, but in a more efficient way. Return and try
3314 again. */
3315 *expr_p = new_tree;
3316 return GS_OK;
743b0c6a 3317 }
4ee9c684 3318 }
3319
39f59e65 3320 /* Remember the original function pointer type. */
3321 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3322
4ee9c684 3323 /* There is a sequence point before the call, so any side effects in
3324 the calling expression must occur before the actual call. Force
3325 gimplify_expr to use an internal post queue. */
c2f47e15 3326 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
215e2f1d 3327 is_gimple_call_addr, fb_rvalue);
4ee9c684 3328
c2f47e15 3329 nargs = call_expr_nargs (*expr_p);
3330
28d5335f 3331 /* Get argument types for verification. */
75a70cf9 3332 fndecl = get_callee_fndecl (*expr_p);
28d5335f 3333 parms = NULL_TREE;
75a70cf9 3334 if (fndecl)
3335 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
61ac9677 3336 else
3337 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
28d5335f 3338
75a70cf9 3339 if (fndecl && DECL_ARGUMENTS (fndecl))
fdcb802d 3340 p = DECL_ARGUMENTS (fndecl);
11582f4e 3341 else if (parms)
fdcb802d 3342 p = parms;
48dc2227 3343 else
cf219ecd 3344 p = NULL_TREE;
fdcb802d 3345 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3346 ;
48dc2227 3347
3348 /* If the last argument is __builtin_va_arg_pack () and it is not
3349 passed as a named argument, decrease the number of CALL_EXPR
3350 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3351 if (!p
3352 && i < nargs
3353 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3354 {
3355 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3356 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3357
3358 if (last_arg_fndecl
a0e9bfbb 3359 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
48dc2227 3360 {
3361 tree call = *expr_p;
3362
3363 --nargs;
389dd41b 3364 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3365 CALL_EXPR_FN (call),
3366 nargs, CALL_EXPR_ARGP (call));
75a70cf9 3367
3368 /* Copy all CALL_EXPR flags, location and block, except
48dc2227 3369 CALL_EXPR_VA_ARG_PACK flag. */
3370 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3371 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3372 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3373 = CALL_EXPR_RETURN_SLOT_OPT (call);
3374 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
1cf1742e 3375 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
75a70cf9 3376
48dc2227 3377 /* Set CALL_EXPR_VA_ARG_PACK. */
3378 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3379 }
3380 }
28d5335f 3381
9ae1b28a 3382 /* If the call returns twice then after building the CFG the call
3383 argument computations will no longer dominate the call because
3384 we add an abnormal incoming edge to the call. So do not use SSA
3385 vars there. */
3386 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3387
156cc902 3388 /* Gimplify the function arguments. */
75a70cf9 3389 if (nargs > 0)
4ee9c684 3390 {
75a70cf9 3391 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3392 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3393 PUSH_ARGS_REVERSED ? i-- : i++)
3394 {
3395 enum gimplify_status t;
4ee9c684 3396
75a70cf9 3397 /* Avoid gimplifying the second argument to va_start, which needs to
3398 be the plain PARM_DECL. */
3399 if ((i != 1) || !builtin_va_start_p)
3400 {
489c4088 3401 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
9ae1b28a 3402 EXPR_LOCATION (*expr_p), ! returns_twice);
4ee9c684 3403
75a70cf9 3404 if (t == GS_ERROR)
3405 ret = GS_ERROR;
3406 }
3407 }
4ee9c684 3408 }
4ee9c684 3409
156cc902 3410 /* Gimplify the static chain. */
3411 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3412 {
3413 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3414 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3415 else
3416 {
3417 enum gimplify_status t;
3418 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
9ae1b28a 3419 EXPR_LOCATION (*expr_p), ! returns_twice);
156cc902 3420 if (t == GS_ERROR)
3421 ret = GS_ERROR;
3422 }
3423 }
3424
a09ce826 3425 /* Verify the function result. */
3426 if (want_value && fndecl
39f59e65 3427 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
a09ce826 3428 {
3429 error_at (loc, "using result of function returning %<void%>");
3430 ret = GS_ERROR;
3431 }
3432
4ee9c684 3433 /* Try this again in case gimplification exposed something. */
ac88cb27 3434 if (ret != GS_ERROR)
4ee9c684 3435 {
389dd41b 3436 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
ac88cb27 3437
f4e36c33 3438 if (new_tree && new_tree != *expr_p)
c2f47e15 3439 {
3440 /* There was a transformation of this call which computes the
3441 same value, but in a more efficient way. Return and try
3442 again. */
f4e36c33 3443 *expr_p = new_tree;
c2f47e15 3444 return GS_OK;
4ee9c684 3445 }
3446 }
75a70cf9 3447 else
3448 {
f7ea7934 3449 *expr_p = error_mark_node;
75a70cf9 3450 return GS_ERROR;
3451 }
4ee9c684 3452
3453 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3454 decl. This allows us to eliminate redundant or useless
3455 calls to "const" functions. */
9c2a0c05 3456 if (TREE_CODE (*expr_p) == CALL_EXPR)
3457 {
3458 int flags = call_expr_flags (*expr_p);
3459 if (flags & (ECF_CONST | ECF_PURE)
3460 /* An infinite loop is considered a side effect. */
3461 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3462 TREE_SIDE_EFFECTS (*expr_p) = 0;
3463 }
75a70cf9 3464
3465 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3466 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3467 form and delegate the creation of a GIMPLE_CALL to
3468 gimplify_modify_expr. This is always possible because when
3469 WANT_VALUE is true, the caller wants the result of this call into
3470 a temporary, which means that we will emit an INIT_EXPR in
3471 internal_get_tmp_var which will then be handled by
3472 gimplify_modify_expr. */
3473 if (!want_value)
3474 {
3475 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3476 have to do is replicate it as a GIMPLE_CALL tuple. */
7ae8b539 3477 gimple_stmt_iterator gsi;
3c0f15b4 3478 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
a5ef9e4d 3479 notice_special_calls (call);
75a70cf9 3480 gimplify_seq_add_stmt (pre_p, call);
7ae8b539 3481 gsi = gsi_last (*pre_p);
c09f306d 3482 maybe_fold_stmt (&gsi);
75a70cf9 3483 *expr_p = NULL_TREE;
3484 }
39f59e65 3485 else
3486 /* Remember the original function type. */
3487 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3488 CALL_EXPR_FN (*expr_p));
75a70cf9 3489
4ee9c684 3490 return ret;
3491}
3492
3493/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3494 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3495
3496 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3497 condition is true or false, respectively. If null, we should generate
3498 our own to skip over the evaluation of this specific expression.
3499
43158006 3500 LOCUS is the source location of the COND_EXPR.
3501
4ee9c684 3502 This function is the tree equivalent of do_jump.
3503
3504 shortcut_cond_r should only be called by shortcut_cond_expr. */
3505
3506static tree
43158006 3507shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3508 location_t locus)
4ee9c684 3509{
3510 tree local_label = NULL_TREE;
3511 tree t, expr = NULL;
3512
3513 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3514 retain the shortcut semantics. Just insert the gotos here;
3515 shortcut_cond_expr will append the real blocks later. */
3516 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3517 {
43158006 3518 location_t new_locus;
3519
4ee9c684 3520 /* Turn if (a && b) into
3521
3522 if (a); else goto no;
3523 if (b) goto yes; else goto no;
3524 (no:) */
3525
3526 if (false_label_p == NULL)
3527 false_label_p = &local_label;
3528
43158006 3529 /* Keep the original source location on the first 'if'. */
3530 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
4ee9c684 3531 append_to_statement_list (t, &expr);
3532
43158006 3533 /* Set the source location of the && on the second 'if'. */
90567983 3534 new_locus = rexpr_location (pred, locus);
43158006 3535 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3536 new_locus);
4ee9c684 3537 append_to_statement_list (t, &expr);
3538 }
3539 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3540 {
43158006 3541 location_t new_locus;
3542
4ee9c684 3543 /* Turn if (a || b) into
3544
3545 if (a) goto yes;
3546 if (b) goto yes; else goto no;
3547 (yes:) */
3548
3549 if (true_label_p == NULL)
3550 true_label_p = &local_label;
3551
43158006 3552 /* Keep the original source location on the first 'if'. */
3553 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
4ee9c684 3554 append_to_statement_list (t, &expr);
3555
43158006 3556 /* Set the source location of the || on the second 'if'. */
90567983 3557 new_locus = rexpr_location (pred, locus);
43158006 3558 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3559 new_locus);
4ee9c684 3560 append_to_statement_list (t, &expr);
3561 }
ae22319c 3562 else if (TREE_CODE (pred) == COND_EXPR
3563 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3564 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
4ee9c684 3565 {
43158006 3566 location_t new_locus;
3567
4ee9c684 3568 /* As long as we're messing with gotos, turn if (a ? b : c) into
3569 if (a)
3570 if (b) goto yes; else goto no;
3571 else
ae22319c 3572 if (c) goto yes; else goto no;
3573
3574 Don't do this if one of the arms has void type, which can happen
3575 in C++ when the arm is throw. */
43158006 3576
3577 /* Keep the original source location on the first 'if'. Set the source
3578 location of the ? on the second 'if'. */
90567983 3579 new_locus = rexpr_location (pred, locus);
40b19772 3580 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3581 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
43158006 3582 false_label_p, locus),
40b19772 3583 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
43158006 3584 false_label_p, new_locus));
4ee9c684 3585 }
3586 else
3587 {
40b19772 3588 expr = build3 (COND_EXPR, void_type_node, pred,
3589 build_and_jump (true_label_p),
3590 build_and_jump (false_label_p));
43158006 3591 SET_EXPR_LOCATION (expr, locus);
4ee9c684 3592 }
3593
3594 if (local_label)
3595 {
3596 t = build1 (LABEL_EXPR, void_type_node, local_label);
3597 append_to_statement_list (t, &expr);
3598 }
3599
3600 return expr;
3601}
3602
90567983 3603/* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3604 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3605 statement, if it is the last one. Otherwise, return NULL. */
3606
3607static tree
3608find_goto (tree expr)
3609{
3610 if (!expr)
3611 return NULL_TREE;
3612
3613 if (TREE_CODE (expr) == GOTO_EXPR)
3614 return expr;
3615
3616 if (TREE_CODE (expr) != STATEMENT_LIST)
3617 return NULL_TREE;
3618
3619 tree_stmt_iterator i = tsi_start (expr);
3620
3621 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3622 tsi_next (&i);
3623
3624 if (!tsi_one_before_end_p (i))
3625 return NULL_TREE;
3626
3627 return find_goto (tsi_stmt (i));
3628}
3629
3630/* Same as find_goto, except that it returns NULL if the destination
3631 is not a LABEL_DECL. */
3632
3633static inline tree
3634find_goto_label (tree expr)
3635{
3636 tree dest = find_goto (expr);
3637 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3638 return dest;
3639 return NULL_TREE;
3640}
3641
75a70cf9 3642/* Given a conditional expression EXPR with short-circuit boolean
3643 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
9d75589a 3644 predicate apart into the equivalent sequence of conditionals. */
75a70cf9 3645
4ee9c684 3646static tree
3647shortcut_cond_expr (tree expr)
3648{
3649 tree pred = TREE_OPERAND (expr, 0);
3650 tree then_ = TREE_OPERAND (expr, 1);
3651 tree else_ = TREE_OPERAND (expr, 2);
3652 tree true_label, false_label, end_label, t;
3653 tree *true_label_p;
3654 tree *false_label_p;
7f0f308d 3655 bool emit_end, emit_false, jump_over_else;
b6431126 3656 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3657 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4ee9c684 3658
3659 /* First do simple transformations. */
b6431126 3660 if (!else_se)
4ee9c684 3661 {
43158006 3662 /* If there is no 'else', turn
3663 if (a && b) then c
3664 into
3665 if (a) if (b) then c. */
4ee9c684 3666 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3667 {
43158006 3668 /* Keep the original source location on the first 'if'. */
3df42822 3669 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4ee9c684 3670 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
43158006 3671 /* Set the source location of the && on the second 'if'. */
90567983 3672 if (rexpr_has_location (pred))
3673 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4ee9c684 3674 then_ = shortcut_cond_expr (expr);
9ef37fb8 3675 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4ee9c684 3676 pred = TREE_OPERAND (pred, 0);
40b19772 3677 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
43158006 3678 SET_EXPR_LOCATION (expr, locus);
4ee9c684 3679 }
3680 }
75a70cf9 3681
b6431126 3682 if (!then_se)
4ee9c684 3683 {
3684 /* If there is no 'then', turn
3685 if (a || b); else d
3686 into
3687 if (a); else if (b); else d. */
3688 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3689 {
43158006 3690 /* Keep the original source location on the first 'if'. */
3df42822 3691 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4ee9c684 3692 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
43158006 3693 /* Set the source location of the || on the second 'if'. */
90567983 3694 if (rexpr_has_location (pred))
3695 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4ee9c684 3696 else_ = shortcut_cond_expr (expr);
9ef37fb8 3697 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4ee9c684 3698 pred = TREE_OPERAND (pred, 0);
40b19772 3699 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
43158006 3700 SET_EXPR_LOCATION (expr, locus);
4ee9c684 3701 }
3702 }
3703
3704 /* If we're done, great. */
3705 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3706 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3707 return expr;
3708
3709 /* Otherwise we need to mess with gotos. Change
3710 if (a) c; else d;
3711 to
3712 if (a); else goto no;
3713 c; goto end;
3714 no: d; end:
3715 and recursively gimplify the condition. */
3716
3717 true_label = false_label = end_label = NULL_TREE;
3718
3719 /* If our arms just jump somewhere, hijack those labels so we don't
3720 generate jumps to jumps. */
3721
90567983 3722 if (tree then_goto = find_goto_label (then_))
4ee9c684 3723 {
90567983 3724 true_label = GOTO_DESTINATION (then_goto);
b6431126 3725 then_ = NULL;
3726 then_se = false;
4ee9c684 3727 }
3728
90567983 3729 if (tree else_goto = find_goto_label (else_))
4ee9c684 3730 {
90567983 3731 false_label = GOTO_DESTINATION (else_goto);
b6431126 3732 else_ = NULL;
3733 else_se = false;
4ee9c684 3734 }
3735
0bed3869 3736 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4ee9c684 3737 if (true_label)
3738 true_label_p = &true_label;
3739 else
3740 true_label_p = NULL;
3741
3742 /* The 'else' branch also needs a label if it contains interesting code. */
b6431126 3743 if (false_label || else_se)
4ee9c684 3744 false_label_p = &false_label;
3745 else
3746 false_label_p = NULL;
3747
3748 /* If there was nothing else in our arms, just forward the label(s). */
b6431126 3749 if (!then_se && !else_se)
43158006 3750 return shortcut_cond_r (pred, true_label_p, false_label_p,
3df42822 3751 EXPR_LOC_OR_LOC (expr, input_location));
4ee9c684 3752
3753 /* If our last subexpression already has a terminal label, reuse it. */
b6431126 3754 if (else_se)
43158006 3755 t = expr_last (else_);
b6431126 3756 else if (then_se)
43158006 3757 t = expr_last (then_);
b6431126 3758 else
43158006 3759 t = NULL;
3760 if (t && TREE_CODE (t) == LABEL_EXPR)
3761 end_label = LABEL_EXPR_LABEL (t);
4ee9c684 3762
3763 /* If we don't care about jumping to the 'else' branch, jump to the end
3764 if the condition is false. */
3765 if (!false_label_p)
3766 false_label_p = &end_label;
3767
3768 /* We only want to emit these labels if we aren't hijacking them. */
3769 emit_end = (end_label == NULL_TREE);
3770 emit_false = (false_label == NULL_TREE);
3771
7f0f308d 3772 /* We only emit the jump over the else clause if we have to--if the
3773 then clause may fall through. Otherwise we can wind up with a
3774 useless jump and a useless label at the end of gimplified code,
3775 which will cause us to think that this conditional as a whole
3776 falls through even if it doesn't. If we then inline a function
3777 which ends with such a condition, that can cause us to issue an
3778 inappropriate warning about control reaching the end of a
3779 non-void function. */
3780 jump_over_else = block_may_fallthru (then_);
3781
43158006 3782 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3df42822 3783 EXPR_LOC_OR_LOC (expr, input_location));
4ee9c684 3784
3785 expr = NULL;
3786 append_to_statement_list (pred, &expr);
3787
3788 append_to_statement_list (then_, &expr);
b6431126 3789 if (else_se)
4ee9c684 3790 {
7f0f308d 3791 if (jump_over_else)
3792 {
43158006 3793 tree last = expr_last (expr);
7f0f308d 3794 t = build_and_jump (&end_label);
90567983 3795 if (rexpr_has_location (last))
3796 SET_EXPR_LOCATION (t, rexpr_location (last));
7f0f308d 3797 append_to_statement_list (t, &expr);
3798 }
4ee9c684 3799 if (emit_false)
3800 {
3801 t = build1 (LABEL_EXPR, void_type_node, false_label);
3802 append_to_statement_list (t, &expr);
3803 }
3804 append_to_statement_list (else_, &expr);
3805 }
3806 if (emit_end && end_label)
3807 {
3808 t = build1 (LABEL_EXPR, void_type_node, end_label);
3809 append_to_statement_list (t, &expr);
3810 }
3811
3812 return expr;
3813}
3814
3815/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3816
773c5ba7 3817tree
4ee9c684 3818gimple_boolify (tree expr)
3819{
3820 tree type = TREE_TYPE (expr);
389dd41b 3821 location_t loc = EXPR_LOCATION (expr);
4ee9c684 3822
2527d57b 3823 if (TREE_CODE (expr) == NE_EXPR
3824 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3825 && integer_zerop (TREE_OPERAND (expr, 1)))
3826 {
3827 tree call = TREE_OPERAND (expr, 0);
3828 tree fn = get_callee_fndecl (call);
3829
26043330 3830 /* For __builtin_expect ((long) (x), y) recurse into x as well
3831 if x is truth_value_p. */
2527d57b 3832 if (fn
a0e9bfbb 3833 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
2527d57b 3834 && call_expr_nargs (call) == 2)
3835 {
3836 tree arg = CALL_EXPR_ARG (call, 0);
3837 if (arg)
3838 {
3839 if (TREE_CODE (arg) == NOP_EXPR
3840 && TREE_TYPE (arg) == TREE_TYPE (call))
3841 arg = TREE_OPERAND (arg, 0);
26043330 3842 if (truth_value_p (TREE_CODE (arg)))
3843 {
3844 arg = gimple_boolify (arg);
3845 CALL_EXPR_ARG (call, 0)
3846 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3847 }
2527d57b 3848 }
3849 }
3850 }
3851
4ee9c684 3852 switch (TREE_CODE (expr))
3853 {
3854 case TRUTH_AND_EXPR:
3855 case TRUTH_OR_EXPR:
3856 case TRUTH_XOR_EXPR:
3857 case TRUTH_ANDIF_EXPR:
3858 case TRUTH_ORIF_EXPR:
3859 /* Also boolify the arguments of truth exprs. */
3860 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3861 /* FALLTHRU */
3862
3863 case TRUTH_NOT_EXPR:
3864 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4ee9c684 3865
4ee9c684 3866 /* These expressions always produce boolean results. */
4b5f1658 3867 if (TREE_CODE (type) != BOOLEAN_TYPE)
3868 TREE_TYPE (expr) = boolean_type_node;
4ee9c684 3869 return expr;
7c2f0500 3870
4644b593 3871 case ANNOTATE_EXPR:
eb71996d 3872 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4644b593 3873 {
eb71996d 3874 case annot_expr_ivdep_kind:
2a09b28c 3875 case annot_expr_unroll_kind:
eb71996d 3876 case annot_expr_no_vector_kind:
3877 case annot_expr_vector_kind:
56f2814d 3878 case annot_expr_parallel_kind:
4644b593 3879 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3880 if (TREE_CODE (type) != BOOLEAN_TYPE)
3881 TREE_TYPE (expr) = boolean_type_node;
3882 return expr;
eb71996d 3883 default:
3884 gcc_unreachable ();
4644b593 3885 }
4644b593 3886
4ee9c684 3887 default:
4b5f1658 3888 if (COMPARISON_CLASS_P (expr))
3889 {
3890 /* There expressions always prduce boolean results. */
3891 if (TREE_CODE (type) != BOOLEAN_TYPE)
3892 TREE_TYPE (expr) = boolean_type_node;
3893 return expr;
3894 }
4ee9c684 3895 /* Other expressions that get here must have boolean values, but
3896 might need to be converted to the appropriate mode. */
4b5f1658 3897 if (TREE_CODE (type) == BOOLEAN_TYPE)
4c7817e5 3898 return expr;
389dd41b 3899 return fold_convert_loc (loc, boolean_type_node, expr);
4ee9c684 3900 }
3901}
3902
367f2e2a 3903/* Given a conditional expression *EXPR_P without side effects, gimplify
3904 its operands. New statements are inserted to PRE_P. */
3905
3906static enum gimplify_status
75a70cf9 3907gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
367f2e2a 3908{
3909 tree expr = *expr_p, cond;
3910 enum gimplify_status ret, tret;
3911 enum tree_code code;
3912
3913 cond = gimple_boolify (COND_EXPR_COND (expr));
3914
3915 /* We need to handle && and || specially, as their gimplification
3916 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3917 code = TREE_CODE (cond);
3918 if (code == TRUTH_ANDIF_EXPR)
3919 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3920 else if (code == TRUTH_ORIF_EXPR)
3921 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
75a70cf9 3922 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
367f2e2a 3923 COND_EXPR_COND (*expr_p) = cond;
3924
3925 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3926 is_gimple_val, fb_rvalue);
3927 ret = MIN (ret, tret);
3928 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3929 is_gimple_val, fb_rvalue);
3930
3931 return MIN (ret, tret);
3932}
3933
57859735 3934/* Return true if evaluating EXPR could trap.
367f2e2a 3935 EXPR is GENERIC, while tree_could_trap_p can be called
3936 only on GIMPLE. */
3937
b88c2569 3938bool
367f2e2a 3939generic_expr_could_trap_p (tree expr)
3940{
3941 unsigned i, n;
3942
3943 if (!expr || is_gimple_val (expr))
3944 return false;
3945
3946 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3947 return true;
3948
3949 n = TREE_OPERAND_LENGTH (expr);
3950 for (i = 0; i < n; i++)
3951 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3952 return true;
3953
3954 return false;
3955}
3956
5206b159 3957/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4ee9c684 3958 into
3959
3960 if (p) if (p)
3961 t1 = a; a;
3962 else or else
3963 t1 = b; b;
3964 t1;
3965
3966 The second form is used when *EXPR_P is of type void.
3967
3968 PRE_P points to the list where side effects that must happen before
cf6b103e 3969 *EXPR_P should be stored. */
4ee9c684 3970
3971static enum gimplify_status
75a70cf9 3972gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4ee9c684 3973{
3974 tree expr = *expr_p;
71302876 3975 tree type = TREE_TYPE (expr);
3976 location_t loc = EXPR_LOCATION (expr);
3977 tree tmp, arm1, arm2;
4ee9c684 3978 enum gimplify_status ret;
75a70cf9 3979 tree label_true, label_false, label_cont;
3980 bool have_then_clause_p, have_else_clause_p;
1a91d914 3981 gcond *cond_stmt;
75a70cf9 3982 enum tree_code pred_code;
3983 gimple_seq seq = NULL;
c0a843e0 3984
3985 /* If this COND_EXPR has a value, copy the values into a temporary within
3986 the arms. */
71302876 3987 if (!VOID_TYPE_P (type))
c0a843e0 3988 {
71302876 3989 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
03623d96 3990 tree result;
3991
71302876 3992 /* If either an rvalue is ok or we do not require an lvalue, create the
3993 temporary. But we cannot do that if the type is addressable. */
3994 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
e8b8bad7 3995 && !TREE_ADDRESSABLE (type))
03623d96 3996 {
367f2e2a 3997 if (gimplify_ctxp->allow_rhs_cond_expr
3998 /* If either branch has side effects or could trap, it can't be
3999 evaluated unconditionally. */
71302876 4000 && !TREE_SIDE_EFFECTS (then_)
4001 && !generic_expr_could_trap_p (then_)
4002 && !TREE_SIDE_EFFECTS (else_)
4003 && !generic_expr_could_trap_p (else_))
367f2e2a 4004 return gimplify_pure_cond_expr (expr_p, pre_p);
4005
71302876 4006 tmp = create_tmp_var (type, "iftmp");
4007 result = tmp;
03623d96 4008 }
71302876 4009
4010 /* Otherwise, only create and copy references to the values. */
c0a843e0 4011 else
4012 {
71302876 4013 type = build_pointer_type (type);
03623d96 4014
71302876 4015 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4016 then_ = build_fold_addr_expr_loc (loc, then_);
03623d96 4017
71302876 4018 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4019 else_ = build_fold_addr_expr_loc (loc, else_);
4020
4021 expr
4022 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
367f2e2a 4023
75a70cf9 4024 tmp = create_tmp_var (type, "iftmp");
182cf5a9 4025 result = build_simple_mem_ref_loc (loc, tmp);
c0a843e0 4026 }
4027
71302876 4028 /* Build the new then clause, `tmp = then_;'. But don't build the
4029 assignment if the value is void; in C++ it can be if it's a throw. */
4030 if (!VOID_TYPE_P (TREE_TYPE (then_)))
152b1d2a 4031 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
c0a843e0 4032
71302876 4033 /* Similarly, build the new else clause, `tmp = else_;'. */
4034 if (!VOID_TYPE_P (TREE_TYPE (else_)))
152b1d2a 4035 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
c0a843e0 4036
4037 TREE_TYPE (expr) = void_type_node;
4038 recalculate_side_effects (expr);
4039
11f36fa6 4040 /* Move the COND_EXPR to the prequeue. */
75a70cf9 4041 gimplify_stmt (&expr, pre_p);
c0a843e0 4042
03623d96 4043 *expr_p = result;
75a70cf9 4044 return GS_ALL_DONE;
c0a843e0 4045 }
4046
00044e94 4047 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4048 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4049 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4050 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4051
c0a843e0 4052 /* Make sure the condition has BOOLEAN_TYPE. */
4053 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4054
4055 /* Break apart && and || conditions. */
4056 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4057 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4058 {
4059 expr = shortcut_cond_expr (expr);
4060
4061 if (expr != *expr_p)
4062 {
4063 *expr_p = expr;
4064
4065 /* We can't rely on gimplify_expr to re-gimplify the expanded
4066 form properly, as cleanups might cause the target labels to be
4067 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4068 set up a conditional context. */
4069 gimple_push_condition ();
75a70cf9 4070 gimplify_stmt (expr_p, &seq);
c0a843e0 4071 gimple_pop_condition (pre_p);
75a70cf9 4072 gimple_seq_add_seq (pre_p, seq);
c0a843e0 4073
4074 return GS_ALL_DONE;
4075 }
4076 }
4077
4078 /* Now do the normal gimplification. */
c0a843e0 4079
75a70cf9 4080 /* Gimplify condition. */
4081 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
4082 fb_rvalue);
c0a843e0 4083 if (ret == GS_ERROR)
75a70cf9 4084 return GS_ERROR;
4085 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4086
4087 gimple_push_condition ();
c0a843e0 4088
75a70cf9 4089 have_then_clause_p = have_else_clause_p = false;
90567983 4090 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4091 if (label_true
4092 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
75a70cf9 4093 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4094 have different locations, otherwise we end up with incorrect
4095 location information on the branches. */
4096 && (optimize
4097 || !EXPR_HAS_LOCATION (expr)
90567983 4098 || !rexpr_has_location (label_true)
4099 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
75a70cf9 4100 {
75a70cf9 4101 have_then_clause_p = true;
90567983 4102 label_true = GOTO_DESTINATION (label_true);
c0a843e0 4103 }
4104 else
e60a6f7b 4105 label_true = create_artificial_label (UNKNOWN_LOCATION);
90567983 4106 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4107 if (label_false
4108 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
75a70cf9 4109 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4110 have different locations, otherwise we end up with incorrect
4111 location information on the branches. */
4112 && (optimize
4113 || !EXPR_HAS_LOCATION (expr)
90567983 4114 || !rexpr_has_location (label_false)
4115 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
75a70cf9 4116 {
75a70cf9 4117 have_else_clause_p = true;
90567983 4118 label_false = GOTO_DESTINATION (label_false);
75a70cf9 4119 }
4120 else
e60a6f7b 4121 label_false = create_artificial_label (UNKNOWN_LOCATION);
c0a843e0 4122
75a70cf9 4123 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4124 &arm2);
1a91d914 4125 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
04a37b12 4126 label_false);
fc3527e3 4127 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
1a91d914 4128 gimplify_seq_add_stmt (&seq, cond_stmt);
04a37b12 4129 gimple_stmt_iterator gsi = gsi_last (seq);
4130 maybe_fold_stmt (&gsi);
4131
75a70cf9 4132 label_cont = NULL_TREE;
4133 if (!have_then_clause_p)
4134 {
4135 /* For if (...) {} else { code; } put label_true after
4136 the else block. */
4137 if (TREE_OPERAND (expr, 1) == NULL_TREE
4138 && !have_else_clause_p
4139 && TREE_OPERAND (expr, 2) != NULL_TREE)
4140 label_cont = label_true;
4141 else
4142 {
4143 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4144 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4145 /* For if (...) { code; } else {} or
4146 if (...) { code; } else goto label; or
4147 if (...) { code; return; } else { ... }
4148 label_cont isn't needed. */
4149 if (!have_else_clause_p
4150 && TREE_OPERAND (expr, 2) != NULL_TREE
4151 && gimple_seq_may_fallthru (seq))
4152 {
42acab1c 4153 gimple *g;
e60a6f7b 4154 label_cont = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 4155
4156 g = gimple_build_goto (label_cont);
4157
4158 /* GIMPLE_COND's are very low level; they have embedded
4159 gotos. This particular embedded goto should not be marked
4160 with the location of the original COND_EXPR, as it would
4161 correspond to the COND_EXPR's condition, not the ELSE or the
4162 THEN arms. To avoid marking it with the wrong location, flag
4163 it as "no location". */
4164 gimple_set_do_not_emit_location (g);
4165
4166 gimplify_seq_add_stmt (&seq, g);
4167 }
4168 }
4169 }
4170 if (!have_else_clause_p)
4171 {
4172 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4173 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4174 }
4175 if (label_cont)
4176 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4177
4178 gimple_pop_condition (pre_p);
4179 gimple_seq_add_seq (pre_p, seq);
4180
4181 if (ret == GS_ERROR)
4182 ; /* Do nothing. */
4183 else if (have_then_clause_p || have_else_clause_p)
4184 ret = GS_ALL_DONE;
4185 else
4186 {
4187 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4188 expr = TREE_OPERAND (expr, 0);
4189 gimplify_stmt (&expr, pre_p);
4190 }
4191
4192 *expr_p = NULL;
4193 return ret;
4194}
4195
1e5ec9ee 4196/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4197 to be marked addressable.
4198
4199 We cannot rely on such an expression being directly markable if a temporary
4200 has been created by the gimplification. In this case, we create another
4201 temporary and initialize it with a copy, which will become a store after we
4202 mark it addressable. This can happen if the front-end passed us something
4203 that it could not mark addressable yet, like a Fortran pass-by-reference
4204 parameter (int) floatvar. */
4205
4206static void
4207prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4208{
4209 while (handled_component_p (*expr_p))
4210 expr_p = &TREE_OPERAND (*expr_p, 0);
4211 if (is_gimple_reg (*expr_p))
3ed3b9c9 4212 {
9ae1b28a 4213 /* Do not allow an SSA name as the temporary. */
4214 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
3ed3b9c9 4215 DECL_GIMPLE_REG_P (var) = 0;
4216 *expr_p = var;
4217 }
1e5ec9ee 4218}
4219
75a70cf9 4220/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4221 a call to __builtin_memcpy. */
4222
4223static enum gimplify_status
4224gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4225 gimple_seq *seq_p)
c0a843e0 4226{
c2f47e15 4227 tree t, to, to_ptr, from, from_ptr;
1a91d914 4228 gcall *gs;
389dd41b 4229 location_t loc = EXPR_LOCATION (*expr_p);
c0a843e0 4230
75a70cf9 4231 to = TREE_OPERAND (*expr_p, 0);
4232 from = TREE_OPERAND (*expr_p, 1);
c0a843e0 4233
1e5ec9ee 4234 /* Mark the RHS addressable. Beware that it may not be possible to do so
4235 directly if a temporary has been created by the gimplification. */
4236 prepare_gimple_addressable (&from, seq_p);
4237
86f2ad37 4238 mark_addressable (from);
389dd41b 4239 from_ptr = build_fold_addr_expr_loc (loc, from);
4240 gimplify_arg (&from_ptr, seq_p, loc);
c0a843e0 4241
86f2ad37 4242 mark_addressable (to);
389dd41b 4243 to_ptr = build_fold_addr_expr_loc (loc, to);
4244 gimplify_arg (&to_ptr, seq_p, loc);
75a70cf9 4245
b9a16870 4246 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
75a70cf9 4247
4248 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
c0a843e0 4249
4250 if (want_value)
4251 {
75a70cf9 4252 /* tmp = memcpy() */
f9e245b2 4253 t = create_tmp_var (TREE_TYPE (to_ptr));
75a70cf9 4254 gimple_call_set_lhs (gs, t);
4255 gimplify_seq_add_stmt (seq_p, gs);
4256
182cf5a9 4257 *expr_p = build_simple_mem_ref (t);
75a70cf9 4258 return GS_ALL_DONE;
c0a843e0 4259 }
4260
75a70cf9 4261 gimplify_seq_add_stmt (seq_p, gs);
4262 *expr_p = NULL;
4263 return GS_ALL_DONE;
c0a843e0 4264}
4265
4266/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4267 a call to __builtin_memset. In this case we know that the RHS is
4268 a CONSTRUCTOR with an empty element list. */
4269
4270static enum gimplify_status
75a70cf9 4271gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4272 gimple_seq *seq_p)
c0a843e0 4273{
83402e8d 4274 tree t, from, to, to_ptr;
1a91d914 4275 gcall *gs;
389dd41b 4276 location_t loc = EXPR_LOCATION (*expr_p);
c0a843e0 4277
83402e8d 4278 /* Assert our assumptions, to abort instead of producing wrong code
4279 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4280 not be immediately exposed. */
48e1416a 4281 from = TREE_OPERAND (*expr_p, 1);
83402e8d 4282 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4283 from = TREE_OPERAND (from, 0);
4284
4285 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
f1f41a6c 4286 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
83402e8d 4287
4288 /* Now proceed. */
75a70cf9 4289 to = TREE_OPERAND (*expr_p, 0);
c0a843e0 4290
389dd41b 4291 to_ptr = build_fold_addr_expr_loc (loc, to);
4292 gimplify_arg (&to_ptr, seq_p, loc);
b9a16870 4293 t = builtin_decl_implicit (BUILT_IN_MEMSET);
75a70cf9 4294
4295 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
c0a843e0 4296
4297 if (want_value)
4298 {
75a70cf9 4299 /* tmp = memset() */
f9e245b2 4300 t = create_tmp_var (TREE_TYPE (to_ptr));
75a70cf9 4301 gimple_call_set_lhs (gs, t);
4302 gimplify_seq_add_stmt (seq_p, gs);
4303
4304 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4305 return GS_ALL_DONE;
c0a843e0 4306 }
4307
75a70cf9 4308 gimplify_seq_add_stmt (seq_p, gs);
4309 *expr_p = NULL;
4310 return GS_ALL_DONE;
c0a843e0 4311}
4312
d38cff30 4313/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4314 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
57859735 4315 assignment. Return non-null if we detect a potential overlap. */
d38cff30 4316
4317struct gimplify_init_ctor_preeval_data
4318{
4319 /* The base decl of the lhs object. May be NULL, in which case we
4320 have to assume the lhs is indirect. */
4321 tree lhs_base_decl;
4322
4323 /* The alias set of the lhs object. */
32c2fdea 4324 alias_set_type lhs_alias_set;
d38cff30 4325};
4326
4327static tree
4328gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4329{
4330 struct gimplify_init_ctor_preeval_data *data
4331 = (struct gimplify_init_ctor_preeval_data *) xdata;
4332 tree t = *tp;
4333
4334 /* If we find the base object, obviously we have overlap. */
4335 if (data->lhs_base_decl == t)
4336 return t;
4337
4338 /* If the constructor component is indirect, determine if we have a
4339 potential overlap with the lhs. The only bits of information we
4340 have to go on at this point are addressability and alias sets. */
182cf5a9 4341 if ((INDIRECT_REF_P (t)
4342 || TREE_CODE (t) == MEM_REF)
d38cff30 4343 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4344 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4345 return t;
4346
f62a072c 4347 /* If the constructor component is a call, determine if it can hide a
182cf5a9 4348 potential overlap with the lhs through an INDIRECT_REF like above.
4349 ??? Ugh - this is completely broken. In fact this whole analysis
4350 doesn't look conservative. */
f62a072c 4351 if (TREE_CODE (t) == CALL_EXPR)
4352 {
4353 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4354
4355 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4356 if (POINTER_TYPE_P (TREE_VALUE (type))
4357 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4358 && alias_sets_conflict_p (data->lhs_alias_set,
4359 get_alias_set
4360 (TREE_TYPE (TREE_VALUE (type)))))
4361 return t;
4362 }
4363
ce45a448 4364 if (IS_TYPE_OR_DECL_P (t))
d38cff30 4365 *walk_subtrees = 0;
4366 return NULL;
4367}
4368
75a70cf9 4369/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
d38cff30 4370 force values that overlap with the lhs (as described by *DATA)
4371 into temporaries. */
4372
4373static void
75a70cf9 4374gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
d38cff30 4375 struct gimplify_init_ctor_preeval_data *data)
4376{
4377 enum gimplify_status one;
4378
c7d4e749 4379 /* If the value is constant, then there's nothing to pre-evaluate. */
4380 if (TREE_CONSTANT (*expr_p))
4381 {
4382 /* Ensure it does not have side effects, it might contain a reference to
4383 the object we're initializing. */
4384 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4385 return;
4386 }
d38cff30 4387
4388 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4389 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4390 return;
4391
4392 /* Recurse for nested constructors. */
4393 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4394 {
c75b4594 4395 unsigned HOST_WIDE_INT ix;
4396 constructor_elt *ce;
f1f41a6c 4397 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
c75b4594 4398
f1f41a6c 4399 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
c75b4594 4400 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
75a70cf9 4401
d38cff30 4402 return;
4403 }
4404
18272fb7 4405 /* If this is a variable sized type, we must remember the size. */
4406 maybe_with_size_expr (expr_p);
d38cff30 4407
4408 /* Gimplify the constructor element to something appropriate for the rhs
75a70cf9 4409 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
7c2f0500 4410 the gimplifier will consider this a store to memory. Doing this
d38cff30 4411 gimplification now means that we won't have to deal with complicated
4412 language-specific trees, nor trees like SAVE_EXPR that can induce
c26a6416 4413 exponential search behavior. */
d38cff30 4414 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4415 if (one == GS_ERROR)
4416 {
4417 *expr_p = NULL;
4418 return;
4419 }
4420
4421 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4422 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4423 always be true for all scalars, since is_gimple_mem_rhs insists on a
4424 temporary variable for them. */
4425 if (DECL_P (*expr_p))
4426 return;
4427
4428 /* If this is of variable size, we have no choice but to assume it doesn't
4429 overlap since we can't make a temporary for it. */
00fde275 4430 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
d38cff30 4431 return;
4432
4433 /* Otherwise, we must search for overlap ... */
4434 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4435 return;
4436
4437 /* ... and if found, force the value into a temporary. */
4438 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4439}
4440
42b74698 4441/* A subroutine of gimplify_init_ctor_eval. Create a loop for
4442 a RANGE_EXPR in a CONSTRUCTOR for an array.
4443
4444 var = lower;
4445 loop_entry:
4446 object[var] = value;
4447 if (var == upper)
4448 goto loop_exit;
4449 var = var + 1;
4450 goto loop_entry;
4451 loop_exit:
4452
4453 We increment var _after_ the loop exit check because we might otherwise
4454 fail if upper == TYPE_MAX_VALUE (type for upper).
4455
4456 Note that we never have to deal with SAVE_EXPRs here, because this has
4457 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4458
f1f41a6c 4459static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
75a70cf9 4460 gimple_seq *, bool);
42b74698 4461
4462static void
4463gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4464 tree value, tree array_elt_type,
75a70cf9 4465 gimple_seq *pre_p, bool cleared)
42b74698 4466{
75a70cf9 4467 tree loop_entry_label, loop_exit_label, fall_thru_label;
a0147880 4468 tree var, var_type, cref, tmp;
42b74698 4469
e60a6f7b 4470 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4471 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4472 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
42b74698 4473
4474 /* Create and initialize the index variable. */
4475 var_type = TREE_TYPE (upper);
f9e245b2 4476 var = create_tmp_var (var_type);
75a70cf9 4477 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
42b74698 4478
4479 /* Add the loop entry label. */
75a70cf9 4480 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
42b74698 4481
4482 /* Build the reference. */
4483 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4484 var, NULL_TREE, NULL_TREE);
4485
4486 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4487 the store. Otherwise just assign value to the reference. */
4488
4489 if (TREE_CODE (value) == CONSTRUCTOR)
4490 /* NB we might have to call ourself recursively through
4491 gimplify_init_ctor_eval if the value is a constructor. */
4492 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4493 pre_p, cleared);
4494 else
75a70cf9 4495 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
42b74698 4496
4497 /* We exit the loop when the index var is equal to the upper bound. */
75a70cf9 4498 gimplify_seq_add_stmt (pre_p,
4499 gimple_build_cond (EQ_EXPR, var, upper,
4500 loop_exit_label, fall_thru_label));
4501
4502 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
42b74698 4503
4504 /* Otherwise, increment the index var... */
a0147880 4505 tmp = build2 (PLUS_EXPR, var_type, var,
4506 fold_convert (var_type, integer_one_node));
75a70cf9 4507 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
42b74698 4508
4509 /* ...and jump back to the loop entry. */
75a70cf9 4510 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
42b74698 4511
4512 /* Add the loop exit label. */
75a70cf9 4513 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
42b74698 4514}
4515
5d7bbf40 4516/* Return true if FDECL is accessing a field that is zero sized. */
48e1416a 4517
5d7bbf40 4518static bool
1f1872fd 4519zero_sized_field_decl (const_tree fdecl)
5d7bbf40 4520{
48e1416a 4521 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
5d7bbf40 4522 && integer_zerop (DECL_SIZE (fdecl)))
4523 return true;
4524 return false;
4525}
4526
37c33a59 4527/* Return true if TYPE is zero sized. */
48e1416a 4528
37c33a59 4529static bool
1f1872fd 4530zero_sized_type (const_tree type)
37c33a59 4531{
4532 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4533 && integer_zerop (TYPE_SIZE (type)))
4534 return true;
4535 return false;
4536}
4537
d38cff30 4538/* A subroutine of gimplify_init_constructor. Generate individual
4539 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
c75b4594 4540 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
d38cff30 4541 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4542 zeroed first. */
4543
4544static void
f1f41a6c 4545gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
75a70cf9 4546 gimple_seq *pre_p, bool cleared)
d38cff30 4547{
4548 tree array_elt_type = NULL;
c75b4594 4549 unsigned HOST_WIDE_INT ix;
4550 tree purpose, value;
d38cff30 4551
4552 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4553 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4554
c75b4594 4555 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
d38cff30 4556 {
75a70cf9 4557 tree cref;
d38cff30 4558
4559 /* NULL values are created above for gimplification errors. */
4560 if (value == NULL)
4561 continue;
4562
4563 if (cleared && initializer_zerop (value))
4564 continue;
4565
42b74698 4566 /* ??? Here's to hoping the front end fills in all of the indices,
4567 so we don't have to figure out what's missing ourselves. */
4568 gcc_assert (purpose);
4569
a454baad 4570 /* Skip zero-sized fields, unless value has side-effects. This can
4571 happen with calls to functions returning a zero-sized type, which
4572 we shouldn't discard. As a number of downstream passes don't
4573 expect sets of zero-sized fields, we rely on the gimplification of
4574 the MODIFY_EXPR we make below to drop the assignment statement. */
4575 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
5d7bbf40 4576 continue;
4577
42b74698 4578 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4579 whole range. */
4580 if (TREE_CODE (purpose) == RANGE_EXPR)
d38cff30 4581 {
42b74698 4582 tree lower = TREE_OPERAND (purpose, 0);
4583 tree upper = TREE_OPERAND (purpose, 1);
4584
4585 /* If the lower bound is equal to upper, just treat it as if
4586 upper was the index. */
4587 if (simple_cst_equal (lower, upper))
4588 purpose = upper;
4589 else
4590 {
4591 gimplify_init_ctor_eval_range (object, lower, upper, value,
4592 array_elt_type, pre_p, cleared);
4593 continue;
4594 }
4595 }
d38cff30 4596
42b74698 4597 if (array_elt_type)
4598 {
5bad702f 4599 /* Do not use bitsizetype for ARRAY_REF indices. */
4600 if (TYPE_DOMAIN (TREE_TYPE (object)))
57859735 4601 purpose
4602 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4603 purpose);
40b19772 4604 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4605 purpose, NULL_TREE, NULL_TREE);
d38cff30 4606 }
4607 else
fda23030 4608 {
4609 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
40b19772 4610 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4611 unshare_expr (object), purpose, NULL_TREE);
fda23030 4612 }
d38cff30 4613
fda23030 4614 if (TREE_CODE (value) == CONSTRUCTOR
4615 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
d38cff30 4616 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4617 pre_p, cleared);
4618 else
4619 {
75a70cf9 4620 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
d38cff30 4621 gimplify_and_add (init, pre_p);
75a70cf9 4622 ggc_free (init);
d38cff30 4623 }
4624 }
4625}
4626
57859735 4627/* Return the appropriate RHS predicate for this LHS. */
75a70cf9 4628
e795d6e1 4629gimple_predicate
75a70cf9 4630rhs_predicate_for (tree lhs)
4631{
47f11e84 4632 if (is_gimple_reg (lhs))
4633 return is_gimple_reg_rhs_or_call;
75a70cf9 4634 else
47f11e84 4635 return is_gimple_mem_rhs_or_call;
75a70cf9 4636}
4637
866b9fd5 4638/* Return the initial guess for an appropriate RHS predicate for this LHS,
4639 before the LHS has been gimplified. */
4640
4641static gimple_predicate
4642initial_rhs_predicate_for (tree lhs)
4643{
4644 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4645 return is_gimple_reg_rhs_or_call;
4646 else
4647 return is_gimple_mem_rhs_or_call;
4648}
4649
862f468c 4650/* Gimplify a C99 compound literal expression. This just means adding
4651 the DECL_EXPR before the current statement and using its anonymous
4652 decl instead. */
4653
4654static enum gimplify_status
73242672 4655gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
930802aa 4656 bool (*gimple_test_f) (tree),
73242672 4657 fallback_t fallback)
862f468c 4658{
4659 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4660 tree decl = DECL_EXPR_DECL (decl_s);
930802aa 4661 tree init = DECL_INITIAL (decl);
862f468c 4662 /* Mark the decl as addressable if the compound literal
4663 expression is addressable now, otherwise it is marked too late
4664 after we gimplify the initialization expression. */
4665 if (TREE_ADDRESSABLE (*expr_p))
4666 TREE_ADDRESSABLE (decl) = 1;
930802aa 4667 /* Otherwise, if we don't need an lvalue and have a literal directly
4668 substitute it. Check if it matches the gimple predicate, as
4669 otherwise we'd generate a new temporary, and we can as well just
4670 use the decl we already have. */
4671 else if (!TREE_ADDRESSABLE (decl)
7e6aef80 4672 && !TREE_THIS_VOLATILE (decl)
930802aa 4673 && init
4674 && (fallback & fb_lvalue) == 0
4675 && gimple_test_f (init))
4676 {
4677 *expr_p = init;
4678 return GS_OK;
4679 }
862f468c 4680
4681 /* Preliminarily mark non-addressed complex variables as eligible
4682 for promotion to gimple registers. We'll transform their uses
4683 as we find them. */
4684 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4685 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4686 && !TREE_THIS_VOLATILE (decl)
4687 && !needs_to_live_in_memory (decl))
4688 DECL_GIMPLE_REG_P (decl) = 1;
4689
73242672 4690 /* If the decl is not addressable, then it is being used in some
4691 expression or on the right hand side of a statement, and it can
4692 be put into a readonly data section. */
4693 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4694 TREE_READONLY (decl) = 1;
4695
862f468c 4696 /* This decl isn't mentioned in the enclosing block, so add it to the
4697 list of temps. FIXME it seems a bit of a kludge to say that
4698 anonymous artificial vars aren't pushed, but everything else is. */
4699 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4700 gimple_add_tmp_var (decl);
4701
4702 gimplify_and_add (decl_s, pre_p);
4703 *expr_p = decl;
4704 return GS_OK;
4705}
4706
4707/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4708 return a new CONSTRUCTOR if something changed. */
4709
4710static tree
4711optimize_compound_literals_in_ctor (tree orig_ctor)
4712{
4713 tree ctor = orig_ctor;
f1f41a6c 4714 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4715 unsigned int idx, num = vec_safe_length (elts);
862f468c 4716
4717 for (idx = 0; idx < num; idx++)
4718 {
f1f41a6c 4719 tree value = (*elts)[idx].value;
862f468c 4720 tree newval = value;
4721 if (TREE_CODE (value) == CONSTRUCTOR)
4722 newval = optimize_compound_literals_in_ctor (value);
4723 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4724 {
4725 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4726 tree decl = DECL_EXPR_DECL (decl_s);
4727 tree init = DECL_INITIAL (decl);
4728
4729 if (!TREE_ADDRESSABLE (value)
4730 && !TREE_ADDRESSABLE (decl)
8d1e0281 4731 && init
4732 && TREE_CODE (init) == CONSTRUCTOR)
862f468c 4733 newval = optimize_compound_literals_in_ctor (init);
4734 }
4735 if (newval == value)
4736 continue;
4737
4738 if (ctor == orig_ctor)
4739 {
4740 ctor = copy_node (orig_ctor);
f1f41a6c 4741 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
862f468c 4742 elts = CONSTRUCTOR_ELTS (ctor);
4743 }
f1f41a6c 4744 (*elts)[idx].value = newval;
862f468c 4745 }
4746 return ctor;
4747}
4748
c0a843e0 4749/* A subroutine of gimplify_modify_expr. Break out elements of a
4750 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4751
4752 Note that we still need to clear any elements that don't have explicit
4753 initializers, so if not all elements are initialized we keep the
5b56c905 4754 original MODIFY_EXPR, we just remove all of the constructor elements.
4755
4756 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4757 GS_ERROR if we would have to create a temporary when gimplifying
4758 this constructor. Otherwise, return GS_OK.
4759
4760 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
c0a843e0 4761
4762static enum gimplify_status
75a70cf9 4763gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4764 bool want_value, bool notify_temp_creation)
c0a843e0 4765{
a39fe414 4766 tree object, ctor, type;
c0a843e0 4767 enum gimplify_status ret;
f1f41a6c 4768 vec<constructor_elt, va_gc> *elts;
c0a843e0 4769
a39fe414 4770 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
c0a843e0 4771
5b56c905 4772 if (!notify_temp_creation)
4773 {
75a70cf9 4774 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5b56c905 4775 is_gimple_lvalue, fb_lvalue);
4776 if (ret == GS_ERROR)
4777 return ret;
4778 }
d38cff30 4779
75a70cf9 4780 object = TREE_OPERAND (*expr_p, 0);
51361f65 4781 ctor = TREE_OPERAND (*expr_p, 1)
4782 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
a39fe414 4783 type = TREE_TYPE (ctor);
4784 elts = CONSTRUCTOR_ELTS (ctor);
c0a843e0 4785 ret = GS_ALL_DONE;
75a70cf9 4786
c0a843e0 4787 switch (TREE_CODE (type))
4788 {
4789 case RECORD_TYPE:
4790 case UNION_TYPE:
4791 case QUAL_UNION_TYPE:
4792 case ARRAY_TYPE:
4793 {
d38cff30 4794 struct gimplify_init_ctor_preeval_data preeval_data;
927b65fb 4795 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
c90b96f5 4796 HOST_WIDE_INT num_unique_nonzero_elements;
927b65fb 4797 bool cleared, complete_p, valid_const_initializer;
c90b96f5 4798 /* Use readonly data for initializers of this or smaller size
4799 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4800 ratio. */
4801 const HOST_WIDE_INT min_unique_size = 64;
4802 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4803 is smaller than this, use readonly data. */
4804 const int unique_nonzero_ratio = 8;
c0a843e0 4805
4806 /* Aggregate types must lower constructors to initialization of
4807 individual elements. The exception is that a CONSTRUCTOR node
4808 with no elements indicates zero-initialization of the whole. */
f1f41a6c 4809 if (vec_safe_is_empty (elts))
5b56c905 4810 {
4811 if (notify_temp_creation)
4812 return GS_OK;
4813 break;
4814 }
48e1416a 4815
20169a64 4816 /* Fetch information about the constructor to direct later processing.
4817 We might want to make static versions of it in various cases, and
4818 can only do so if it known to be a valid constant initializer. */
4819 valid_const_initializer
4820 = categorize_ctor_elements (ctor, &num_nonzero_elements,
c90b96f5 4821 &num_unique_nonzero_elements,
927b65fb 4822 &num_ctor_elements, &complete_p);
c0a843e0 4823
4824 /* If a const aggregate variable is being initialized, then it
4825 should never be a lose to promote the variable to be static. */
20169a64 4826 if (valid_const_initializer
7cb4a4d0 4827 && num_nonzero_elements > 1
c0a843e0 4828 && TREE_READONLY (object)
53e9c5c4 4829 && VAR_P (object)
c90b96f5 4830 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4831 /* For ctors that have many repeated nonzero elements
4832 represented through RANGE_EXPRs, prefer initializing
4833 those through runtime loops over copies of large amounts
4834 of data from readonly data section. */
4835 && (num_unique_nonzero_elements
4836 > num_nonzero_elements / unique_nonzero_ratio
4837 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4838 <= (unsigned HOST_WIDE_INT) min_unique_size)))
c0a843e0 4839 {
5b56c905 4840 if (notify_temp_creation)
4841 return GS_ERROR;
c0a843e0 4842 DECL_INITIAL (object) = ctor;
4843 TREE_STATIC (object) = 1;
4844 if (!DECL_NAME (object))
4845 DECL_NAME (object) = create_tmp_var_name ("C");
4846 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4847
4848 /* ??? C++ doesn't automatically append a .<number> to the
88c5a1d1 4849 assembler name, and even when it does, it looks at FE private
c0a843e0 4850 data structures to figure out what that number should be,
4851 which are not set for this variable. I suppose this is
4852 important for local statics for inline functions, which aren't
4853 "local" in the object file sense. So in order to get a unique
4854 TU-local symbol, we must invoke the lhd version now. */
4855 lhd_set_decl_assembler_name (object);
4856
4857 *expr_p = NULL_TREE;
4858 break;
4859 }
4860
67580517 4861 /* If there are "lots" of initialized elements, even discounting
4862 those that are not address constants (and thus *must* be
4863 computed at runtime), then partition the constructor into
4864 constant and non-constant parts. Block copy the constant
4865 parts in, then generate code for the non-constant parts. */
4866 /* TODO. There's code in cp/typeck.c to do this. */
4867
927b65fb 4868 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4869 /* store_constructor will ignore the clearing of variable-sized
4870 objects. Initializers for such objects must explicitly set
4871 every field that needs to be set. */
4872 cleared = false;
40fa18d6 4873 else if (!complete_p)
927b65fb 4874 /* If the constructor isn't complete, clear the whole object
86669a07 4875 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
927b65fb 4876
4877 ??? This ought not to be needed. For any element not present
4878 in the initializer, we should simply set them to zero. Except
4879 we'd need to *find* the elements that are not present, and that
4880 requires trickery to avoid quadratic compile-time behavior in
4881 large cases or excessive memory use in small cases. */
40fa18d6 4882 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
927b65fb 4883 else if (num_ctor_elements - num_nonzero_elements
f5733e7c 4884 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
927b65fb 4885 && num_nonzero_elements < num_ctor_elements / 4)
4886 /* If there are "lots" of zeros, it's more efficient to clear
4887 the memory and then set the nonzero elements. */
67580517 4888 cleared = true;
927b65fb 4889 else
4890 cleared = false;
67580517 4891
c0a843e0 4892 /* If there are "lots" of initialized elements, and all of them
4893 are valid address constants, then the entire initializer can
67580517 4894 be dropped to memory, and then memcpy'd out. Don't do this
4895 for sparse arrays, though, as it's more efficient to follow
4896 the standard CONSTRUCTOR behavior of memset followed by
90926089 4897 individual element initialization. Also don't do this for small
4898 all-zero initializers (which aren't big enough to merit
4899 clearing), and don't try to make bitwise copies of
1e42d5c6 4900 TREE_ADDRESSABLE types. */
058a1b7a 4901
90926089 4902 if (valid_const_initializer
4903 && !(cleared || num_nonzero_elements == 0)
1e42d5c6 4904 && !TREE_ADDRESSABLE (type))
c0a843e0 4905 {
4906 HOST_WIDE_INT size = int_size_in_bytes (type);
4907 unsigned int align;
4908
4909 /* ??? We can still get unbounded array types, at least
4910 from the C++ front end. This seems wrong, but attempt
4911 to work around it for now. */
4912 if (size < 0)
4913 {
4914 size = int_size_in_bytes (TREE_TYPE (object));
4915 if (size >= 0)
4916 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4917 }
4918
4919 /* Find the maximum alignment we can assume for the object. */
4920 /* ??? Make use of DECL_OFFSET_ALIGN. */
4921 if (DECL_P (object))
4922 align = DECL_ALIGN (object);
4923 else
4924 align = TYPE_ALIGN (type);
4925
94a6770c 4926 /* Do a block move either if the size is so small as to make
4927 each individual move a sub-unit move on average, or if it
4928 is so large as to make individual moves inefficient. */
e3a32435 4929 if (size > 0
4930 && num_nonzero_elements > 1
c90b96f5 4931 /* For ctors that have many repeated nonzero elements
4932 represented through RANGE_EXPRs, prefer initializing
4933 those through runtime loops over copies of large amounts
4934 of data from readonly data section. */
4935 && (num_unique_nonzero_elements
4936 > num_nonzero_elements / unique_nonzero_ratio
4937 || size <= min_unique_size)
94a6770c 4938 && (size < num_nonzero_elements
4939 || !can_move_by_pieces (size, align)))
c0a843e0 4940 {
5b56c905 4941 if (notify_temp_creation)
4942 return GS_ERROR;
4943
126740ee 4944 walk_tree (&ctor, force_labels_r, NULL, NULL);
4945 ctor = tree_output_constant_def (ctor);
4946 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4947 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4948 TREE_OPERAND (*expr_p, 1) = ctor;
d38cff30 4949
4950 /* This is no longer an assignment of a CONSTRUCTOR, but
4951 we still may have processing to do on the LHS. So
4952 pretend we didn't do anything here to let that happen. */
4953 return GS_UNHANDLED;
c0a843e0 4954 }
4955 }
4956
9ec012cb 4957 /* If the target is volatile, we have non-zero elements and more than
4958 one field to assign, initialize the target from a temporary. */
ff5d4405 4959 if (TREE_THIS_VOLATILE (object)
4960 && !TREE_ADDRESSABLE (type)
9ec012cb 4961 && num_nonzero_elements > 0
f1f41a6c 4962 && vec_safe_length (elts) > 1)
ff5d4405 4963 {
f9e245b2 4964 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
ff5d4405 4965 TREE_OPERAND (*expr_p, 0) = temp;
4966 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4967 *expr_p,
4968 build2 (MODIFY_EXPR, void_type_node,
4969 object, temp));
4970 return GS_OK;
4971 }
4972
5b56c905 4973 if (notify_temp_creation)
4974 return GS_OK;
4975
30d12889 4976 /* If there are nonzero elements and if needed, pre-evaluate to capture
4977 elements overlapping with the lhs into temporaries. We must do this
4978 before clearing to fetch the values before they are zeroed-out. */
4979 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
e1685698 4980 {
4981 preeval_data.lhs_base_decl = get_base_address (object);
4982 if (!DECL_P (preeval_data.lhs_base_decl))
4983 preeval_data.lhs_base_decl = NULL;
4984 preeval_data.lhs_alias_set = get_alias_set (object);
4985
75a70cf9 4986 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
e1685698 4987 pre_p, post_p, &preeval_data);
4988 }
4989
7e8f098c 4990 bool ctor_has_side_effects_p
4991 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4992
c0a843e0 4993 if (cleared)
4994 {
4995 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4996 Note that we still have to gimplify, in order to handle the
d38cff30 4997 case of variable sized types. Avoid shared tree structures. */
c75b4594 4998 CONSTRUCTOR_ELTS (ctor) = NULL;
75a70cf9 4999 TREE_SIDE_EFFECTS (ctor) = 0;
d38cff30 5000 object = unshare_expr (object);
75a70cf9 5001 gimplify_stmt (expr_p, pre_p);
c0a843e0 5002 }
5003
42b74698 5004 /* If we have not block cleared the object, or if there are nonzero
7e8f098c 5005 elements in the constructor, or if the constructor has side effects,
5006 add assignments to the individual scalar fields of the object. */
5007 if (!cleared
5008 || num_nonzero_elements > 0
5009 || ctor_has_side_effects_p)
e1685698 5010 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
c0a843e0 5011
5012 *expr_p = NULL_TREE;
5013 }
5014 break;
5015
5016 case COMPLEX_TYPE:
5017 {
5018 tree r, i;
5019
5b56c905 5020 if (notify_temp_creation)
5021 return GS_OK;
5022
c0a843e0 5023 /* Extract the real and imaginary parts out of the ctor. */
f1f41a6c 5024 gcc_assert (elts->length () == 2);
5025 r = (*elts)[0].value;
5026 i = (*elts)[1].value;
c0a843e0 5027 if (r == NULL || i == NULL)
5028 {
385f3f36 5029 tree zero = build_zero_cst (TREE_TYPE (type));
c0a843e0 5030 if (r == NULL)
5031 r = zero;
5032 if (i == NULL)
5033 i = zero;
5034 }
5035
5036 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5037 represent creation of a complex value. */
5038 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5039 {
5040 ctor = build_complex (type, r, i);
5041 TREE_OPERAND (*expr_p, 1) = ctor;
5042 }
5043 else
5044 {
40b19772 5045 ctor = build2 (COMPLEX_EXPR, type, r, i);
c0a843e0 5046 TREE_OPERAND (*expr_p, 1) = ctor;
75a70cf9 5047 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5048 pre_p,
5049 post_p,
d471893d 5050 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5051 fb_rvalue);
c0a843e0 5052 }
5053 }
5054 break;
0375a275 5055
c0a843e0 5056 case VECTOR_TYPE:
c75b4594 5057 {
5058 unsigned HOST_WIDE_INT ix;
5059 constructor_elt *ce;
3a3c175c 5060
5b56c905 5061 if (notify_temp_creation)
5062 return GS_OK;
5063
c75b4594 5064 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5065 if (TREE_CONSTANT (ctor))
5066 {
5067 bool constant_p = true;
5068 tree value;
5069
5070 /* Even when ctor is constant, it might contain non-*_CST
e80ea974 5071 elements, such as addresses or trapping values like
5072 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5073 in VECTOR_CST nodes. */
c75b4594 5074 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5075 if (!CONSTANT_CLASS_P (value))
5076 {
5077 constant_p = false;
5078 break;
5079 }
3a3c175c 5080
c75b4594 5081 if (constant_p)
5082 {
5083 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5084 break;
5085 }
96d5c2e2 5086
e80ea974 5087 TREE_CONSTANT (ctor) = 0;
c75b4594 5088 }
3a3c175c 5089
c75b4594 5090 /* Vector types use CONSTRUCTOR all the way through gimple
d08aa032 5091 compilation as a general initializer. */
f1f41a6c 5092 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
c75b4594 5093 {
5094 enum gimplify_status tret;
75a70cf9 5095 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5096 fb_rvalue);
c75b4594 5097 if (tret == GS_ERROR)
5098 ret = GS_ERROR;
d08aa032 5099 else if (TREE_STATIC (ctor)
5100 && !initializer_constant_valid_p (ce->value,
5101 TREE_TYPE (ce->value)))
5102 TREE_STATIC (ctor) = 0;
c75b4594 5103 }
75a70cf9 5104 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5105 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
c75b4594 5106 }
c0a843e0 5107 break;
4ee9c684 5108
c0a843e0 5109 default:
5110 /* So how did we get a CONSTRUCTOR for a scalar type? */
0d59b19d 5111 gcc_unreachable ();
c0a843e0 5112 }
4ee9c684 5113
c0a843e0 5114 if (ret == GS_ERROR)
5115 return GS_ERROR;
7361bb02 5116 /* If we have gimplified both sides of the initializer but have
5117 not emitted an assignment, do so now. */
5118 if (*expr_p)
5119 {
5120 tree lhs = TREE_OPERAND (*expr_p, 0);
5121 tree rhs = TREE_OPERAND (*expr_p, 1);
51361f65 5122 if (want_value && object == lhs)
5123 lhs = unshare_expr (lhs);
7361bb02 5124 gassign *init = gimple_build_assign (lhs, rhs);
5125 gimplify_seq_add_stmt (pre_p, init);
5126 }
5127 if (want_value)
c0a843e0 5128 {
c0a843e0 5129 *expr_p = object;
5130 return GS_OK;
4ee9c684 5131 }
c0a843e0 5132 else
75a70cf9 5133 {
7361bb02 5134 *expr_p = NULL;
75a70cf9 5135 return GS_ALL_DONE;
5136 }
c0a843e0 5137}
4ee9c684 5138
db812c94 5139/* Given a pointer value OP0, return a simplified version of an
5140 indirection through OP0, or NULL_TREE if no simplification is
5141 possible. This may only be applied to a rhs of an expression.
5142 Note that the resulting type may be different from the type pointed
5143 to in the sense that it is still compatible from the langhooks
5144 point of view. */
5145
5146static tree
5147gimple_fold_indirect_ref_rhs (tree t)
5148{
5149 return gimple_fold_indirect_ref (t);
5150}
5151
82972775 5152/* Subroutine of gimplify_modify_expr to do simplifications of
5153 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5154 something changes. */
4ee9c684 5155
c0a843e0 5156static enum gimplify_status
75a70cf9 5157gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5158 gimple_seq *pre_p, gimple_seq *post_p,
5159 bool want_value)
c0a843e0 5160{
eda37335 5161 enum gimplify_status ret = GS_UNHANDLED;
5162 bool changed;
4ee9c684 5163
eda37335 5164 do
5165 {
5166 changed = false;
5167 switch (TREE_CODE (*from_p))
5168 {
5169 case VAR_DECL:
5170 /* If we're assigning from a read-only variable initialized with
5171 a constructor, do the direct assignment from the constructor,
5172 but only if neither source nor target are volatile since this
5173 latter assignment might end up being done on a per-field basis. */
5174 if (DECL_INITIAL (*from_p)
5175 && TREE_READONLY (*from_p)
5176 && !TREE_THIS_VOLATILE (*from_p)
5177 && !TREE_THIS_VOLATILE (*to_p)
5178 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5179 {
5180 tree old_from = *from_p;
5181 enum gimplify_status subret;
5182
5183 /* Move the constructor into the RHS. */
5184 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5185
5186 /* Let's see if gimplify_init_constructor will need to put
5187 it in memory. */
5188 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5189 false, true);
5190 if (subret == GS_ERROR)
5191 {
5192 /* If so, revert the change. */
5193 *from_p = old_from;
5194 }
5195 else
5196 {
5197 ret = GS_OK;
5198 changed = true;
5199 }
5200 }
5201 break;
5202 case INDIRECT_REF:
82972775 5203 {
eda37335 5204 /* If we have code like
5b56c905 5205
eda37335 5206 *(const A*)(A*)&x
5b56c905 5207
eda37335 5208 where the type of "x" is a (possibly cv-qualified variant
5209 of "A"), treat the entire expression as identical to "x".
5210 This kind of code arises in C++ when an object is bound
5211 to a const reference, and if "x" is a TARGET_EXPR we want
5212 to take advantage of the optimization below. */
cedf4b90 5213 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
eda37335 5214 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5215 if (t)
5b56c905 5216 {
cedf4b90 5217 if (TREE_THIS_VOLATILE (t) != volatile_p)
5218 {
74efe522 5219 if (DECL_P (t))
cedf4b90 5220 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5221 build_fold_addr_expr (t));
5222 if (REFERENCE_CLASS_P (t))
5223 TREE_THIS_VOLATILE (t) = volatile_p;
5224 }
eda37335 5225 *from_p = t;
5226 ret = GS_OK;
5227 changed = true;
5b56c905 5228 }
eda37335 5229 break;
5230 }
5231
5232 case TARGET_EXPR:
5233 {
5234 /* If we are initializing something from a TARGET_EXPR, strip the
5235 TARGET_EXPR and initialize it directly, if possible. This can't
5236 be done if the initializer is void, since that implies that the
5237 temporary is set in some non-trivial way.
5238
5239 ??? What about code that pulls out the temp and uses it
5240 elsewhere? I think that such code never uses the TARGET_EXPR as
5241 an initializer. If I'm wrong, we'll die because the temp won't
5242 have any RTL. In that case, I guess we'll need to replace
5243 references somehow. */
5244 tree init = TARGET_EXPR_INITIAL (*from_p);
5245
5246 if (init
7604a798 5247 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5248 || !TARGET_EXPR_NO_ELIDE (*from_p))
eda37335 5249 && !VOID_TYPE_P (TREE_TYPE (init)))
5b56c905 5250 {
eda37335 5251 *from_p = init;
5b56c905 5252 ret = GS_OK;
eda37335 5253 changed = true;
5b56c905 5254 }
82972775 5255 }
eda37335 5256 break;
ce43c806 5257
eda37335 5258 case COMPOUND_EXPR:
5259 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5260 caught. */
5261 gimplify_compound_expr (from_p, pre_p, true);
5262 ret = GS_OK;
5263 changed = true;
5264 break;
ce43c806 5265
eda37335 5266 case CONSTRUCTOR:
a187db2a 5267 /* If we already made some changes, let the front end have a
5268 crack at this before we break it down. */
5269 if (ret != GS_UNHANDLED)
5270 break;
eda37335 5271 /* If we're initializing from a CONSTRUCTOR, break this into
5272 individual MODIFY_EXPRs. */
5273 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5274 false);
5275
5276 case COND_EXPR:
5277 /* If we're assigning to a non-register type, push the assignment
5278 down into the branches. This is mandatory for ADDRESSABLE types,
5279 since we cannot generate temporaries for such, but it saves a
5280 copy in other cases as well. */
5281 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
ce43c806 5282 {
eda37335 5283 /* This code should mirror the code in gimplify_cond_expr. */
5284 enum tree_code code = TREE_CODE (*expr_p);
5285 tree cond = *from_p;
5286 tree result = *to_p;
5287
5288 ret = gimplify_expr (&result, pre_p, post_p,
5289 is_gimple_lvalue, fb_lvalue);
5290 if (ret != GS_ERROR)
5291 ret = GS_OK;
5292
aa251adb 5293 /* If we are going to write RESULT more than once, clear
5294 TREE_READONLY flag, otherwise we might incorrectly promote
5295 the variable to static const and initialize it at compile
5296 time in one of the branches. */
5297 if (VAR_P (result)
5298 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5299 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5300 TREE_READONLY (result) = 0;
eda37335 5301 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5302 TREE_OPERAND (cond, 1)
5303 = build2 (code, void_type_node, result,
5304 TREE_OPERAND (cond, 1));
5305 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5306 TREE_OPERAND (cond, 2)
5307 = build2 (code, void_type_node, unshare_expr (result),
5308 TREE_OPERAND (cond, 2));
5309
5310 TREE_TYPE (cond) = void_type_node;
5311 recalculate_side_effects (cond);
5312
5313 if (want_value)
5314 {
5315 gimplify_and_add (cond, pre_p);
5316 *expr_p = unshare_expr (result);
5317 }
5318 else
5319 *expr_p = cond;
5320 return ret;
ce43c806 5321 }
ce43c806 5322 break;
ce43c806 5323
eda37335 5324 case CALL_EXPR:
5325 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5326 return slot so that we don't generate a temporary. */
5327 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5328 && aggregate_value_p (*from_p, *from_p))
c0a843e0 5329 {
eda37335 5330 bool use_target;
5331
5332 if (!(rhs_predicate_for (*to_p))(*from_p))
5333 /* If we need a temporary, *to_p isn't accurate. */
5334 use_target = false;
57859735 5335 /* It's OK to use the return slot directly unless it's an NRV. */
eda37335 5336 else if (TREE_CODE (*to_p) == RESULT_DECL
5337 && DECL_NAME (*to_p) == NULL_TREE
5338 && needs_to_live_in_memory (*to_p))
eda37335 5339 use_target = true;
5340 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5341 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5342 /* Don't force regs into memory. */
5343 use_target = false;
5344 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5345 /* It's OK to use the target directly if it's being
5346 initialized. */
5347 use_target = true;
33b2642e 5348 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5349 != INTEGER_CST)
fc0c4d51 5350 /* Always use the target and thus RSO for variable-sized types.
5351 GIMPLE cannot deal with a variable-sized assignment
5352 embedded in a call statement. */
5353 use_target = true;
28098b5b 5354 else if (TREE_CODE (*to_p) != SSA_NAME
5355 && (!is_gimple_variable (*to_p)
5356 || needs_to_live_in_memory (*to_p)))
eda37335 5357 /* Don't use the original target if it's already addressable;
5358 if its address escapes, and the called function uses the
5359 NRV optimization, a conforming program could see *to_p
5360 change before the called function returns; see c++/19317.
5361 When optimizing, the return_slot pass marks more functions
5362 as safe after we have escape info. */
5363 use_target = false;
5364 else
5365 use_target = true;
5366
5367 if (use_target)
5368 {
5369 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5370 mark_addressable (*to_p);
5371 }
c0a843e0 5372 }
eda37335 5373 break;
4ee9c684 5374
eda37335 5375 case WITH_SIZE_EXPR:
5376 /* Likewise for calls that return an aggregate of non-constant size,
5377 since we would not be able to generate a temporary at all. */
5378 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5379 {
5380 *from_p = TREE_OPERAND (*from_p, 0);
e8f78e99 5381 /* We don't change ret in this case because the
5382 WITH_SIZE_EXPR might have been added in
5383 gimplify_modify_expr, so returning GS_OK would lead to an
5384 infinite loop. */
eda37335 5385 changed = true;
5386 }
5387 break;
4ee9c684 5388
eda37335 5389 /* If we're initializing from a container, push the initialization
5390 inside it. */
5391 case CLEANUP_POINT_EXPR:
5392 case BIND_EXPR:
5393 case STATEMENT_LIST:
c0a843e0 5394 {
eda37335 5395 tree wrap = *from_p;
5396 tree t;
cf6b103e 5397
eda37335 5398 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5399 fb_lvalue);
cf6b103e 5400 if (ret != GS_ERROR)
5401 ret = GS_OK;
5402
eda37335 5403 t = voidify_wrapper_expr (wrap, *expr_p);
5404 gcc_assert (t == *expr_p);
cf6b103e 5405
5406 if (want_value)
5407 {
eda37335 5408 gimplify_and_add (wrap, pre_p);
5409 *expr_p = unshare_expr (*to_p);
cf6b103e 5410 }
5411 else
eda37335 5412 *expr_p = wrap;
5413 return GS_OK;
c0a843e0 5414 }
4ee9c684 5415
eda37335 5416 case COMPOUND_LITERAL_EXPR:
ea523851 5417 {
eda37335 5418 tree complit = TREE_OPERAND (*expr_p, 1);
5419 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5420 tree decl = DECL_EXPR_DECL (decl_s);
5421 tree init = DECL_INITIAL (decl);
5422
5423 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5424 into struct T x = { 0, 1, 2 } if the address of the
5425 compound literal has never been taken. */
5426 if (!TREE_ADDRESSABLE (complit)
5427 && !TREE_ADDRESSABLE (decl)
5428 && init)
ea523851 5429 {
eda37335 5430 *expr_p = copy_node (*expr_p);
5431 TREE_OPERAND (*expr_p, 1) = init;
5432 return GS_OK;
ea523851 5433 }
5434 }
5435
eda37335 5436 default:
5437 break;
862f468c 5438 }
eda37335 5439 }
5440 while (changed);
4ee9c684 5441
4ee9c684 5442 return ret;
5443}
5444
183e96b6 5445
5446/* Return true if T looks like a valid GIMPLE statement. */
5447
5448static bool
5449is_gimple_stmt (tree t)
5450{
5451 const enum tree_code code = TREE_CODE (t);
5452
5453 switch (code)
5454 {
5455 case NOP_EXPR:
5456 /* The only valid NOP_EXPR is the empty statement. */
5457 return IS_EMPTY_STMT (t);
5458
5459 case BIND_EXPR:
5460 case COND_EXPR:
5461 /* These are only valid if they're void. */
5462 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5463
5464 case SWITCH_EXPR:
5465 case GOTO_EXPR:
5466 case RETURN_EXPR:
5467 case LABEL_EXPR:
5468 case CASE_LABEL_EXPR:
5469 case TRY_CATCH_EXPR:
5470 case TRY_FINALLY_EXPR:
5471 case EH_FILTER_EXPR:
5472 case CATCH_EXPR:
5473 case ASM_EXPR:
5474 case STATEMENT_LIST:
ca4c3545 5475 case OACC_PARALLEL:
5476 case OACC_KERNELS:
5477 case OACC_DATA:
5478 case OACC_HOST_DATA:
5479 case OACC_DECLARE:
5480 case OACC_UPDATE:
5481 case OACC_ENTER_DATA:
5482 case OACC_EXIT_DATA:
5483 case OACC_CACHE:
183e96b6 5484 case OMP_PARALLEL:
5485 case OMP_FOR:
3d483a94 5486 case OMP_SIMD:
bc7bff74 5487 case OMP_DISTRIBUTE:
ca4c3545 5488 case OACC_LOOP:
183e96b6 5489 case OMP_SECTIONS:
5490 case OMP_SECTION:
5491 case OMP_SINGLE:
5492 case OMP_MASTER:
bc7bff74 5493 case OMP_TASKGROUP:
183e96b6 5494 case OMP_ORDERED:
5495 case OMP_CRITICAL:
5496 case OMP_TASK:
43895be5 5497 case OMP_TARGET:
5498 case OMP_TARGET_DATA:
5499 case OMP_TARGET_UPDATE:
5500 case OMP_TARGET_ENTER_DATA:
5501 case OMP_TARGET_EXIT_DATA:
5502 case OMP_TASKLOOP:
5503 case OMP_TEAMS:
183e96b6 5504 /* These are always void. */
5505 return true;
5506
5507 case CALL_EXPR:
5508 case MODIFY_EXPR:
5509 case PREDICT_EXPR:
5510 /* These are valid regardless of their type. */
5511 return true;
5512
5513 default:
5514 return false;
5515 }
5516}
5517
5518
1dfbdb0f 5519/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5520 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
a70770d2 5521 DECL_GIMPLE_REG_P set.
5522
5523 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5524 other, unmodified part of the complex object just before the total store.
5525 As a consequence, if the object is still uninitialized, an undefined value
5526 will be loaded into a register, which may result in a spurious exception
5527 if the register is floating-point and the value happens to be a signaling
5528 NaN for example. Then the fully-fledged complex operations lowering pass
5529 followed by a DCE pass are necessary in order to fix things up. */
1dfbdb0f 5530
5531static enum gimplify_status
75a70cf9 5532gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5533 bool want_value)
1dfbdb0f 5534{
5535 enum tree_code code, ocode;
5536 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5537
75a70cf9 5538 lhs = TREE_OPERAND (*expr_p, 0);
5539 rhs = TREE_OPERAND (*expr_p, 1);
1dfbdb0f 5540 code = TREE_CODE (lhs);
5541 lhs = TREE_OPERAND (lhs, 0);
5542
5543 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5544 other = build1 (ocode, TREE_TYPE (rhs), lhs);
33da34ea 5545 TREE_NO_WARNING (other) = 1;
1dfbdb0f 5546 other = get_formal_tmp_var (other, pre_p);
5547
5548 realpart = code == REALPART_EXPR ? rhs : other;
5549 imagpart = code == REALPART_EXPR ? other : rhs;
5550
5551 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5552 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5553 else
5554 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5555
75a70cf9 5556 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5557 *expr_p = (want_value) ? rhs : NULL_TREE;
1dfbdb0f 5558
5559 return GS_ALL_DONE;
5560}
5561
5206b159 5562/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4ee9c684 5563
5564 modify_expr
5565 : varname '=' rhs
5566 | '*' ID '=' rhs
5567
5568 PRE_P points to the list where side effects that must happen before
5569 *EXPR_P should be stored.
5570
5571 POST_P points to the list where side effects that must happen after
5572 *EXPR_P should be stored.
5573
5574 WANT_VALUE is nonzero iff we want to use the value of this expression
5575 in another expression. */
5576
5577static enum gimplify_status
75a70cf9 5578gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5579 bool want_value)
4ee9c684 5580{
75a70cf9 5581 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5582 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6374121b 5583 enum gimplify_status ret = GS_UNHANDLED;
42acab1c 5584 gimple *assign;
389dd41b 5585 location_t loc = EXPR_LOCATION (*expr_p);
8f1c7d19 5586 gimple_stmt_iterator gsi;
4ee9c684 5587
0d59b19d 5588 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5589 || TREE_CODE (*expr_p) == INIT_EXPR);
4ee9c684 5590
6458ce4f 5591 /* Trying to simplify a clobber using normal logic doesn't work,
5592 so handle it here. */
5593 if (TREE_CLOBBER_P (*from_p))
5594 {
9f559b20 5595 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5596 if (ret == GS_ERROR)
5597 return ret;
6ae0d78c 5598 gcc_assert (!want_value);
5599 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5600 {
5601 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5602 pre_p, post_p);
5603 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5604 }
6458ce4f 5605 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5606 *expr_p = NULL;
5607 return GS_ALL_DONE;
5608 }
5609
80296012 5610 /* Insert pointer conversions required by the middle-end that are not
5611 required by the frontend. This fixes middle-end type checking for
5612 for example gcc.dg/redecl-6.c. */
dfe8e806 5613 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
80296012 5614 {
5615 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5616 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
389dd41b 5617 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
80296012 5618 }
5619
cf39ec50 5620 /* See if any simplifications can be done based on what the RHS is. */
5621 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5622 want_value);
5623 if (ret != GS_UNHANDLED)
5624 return ret;
5625
5626 /* For zero sized types only gimplify the left hand side and right hand
5627 side as statements and throw away the assignment. Do this after
5628 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5629 types properly. */
7a509355 5630 if (zero_sized_type (TREE_TYPE (*from_p))
5631 && !want_value
5632 /* Don't do this for calls that return addressable types, expand_call
5633 relies on those having a lhs. */
5634 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5635 && TREE_CODE (*from_p) == CALL_EXPR))
d11316a3 5636 {
75a70cf9 5637 gimplify_stmt (from_p, pre_p);
5638 gimplify_stmt (to_p, pre_p);
d11316a3 5639 *expr_p = NULL_TREE;
5640 return GS_ALL_DONE;
5641 }
4ee9c684 5642
80f06481 5643 /* If the value being copied is of variable width, compute the length
5644 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5645 before gimplifying any of the operands so that we can resolve any
5646 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5647 the size of the expression to be copied, not of the destination, so
75a70cf9 5648 that is what we must do here. */
80f06481 5649 maybe_with_size_expr (from_p);
4ee9c684 5650
75a70cf9 5651 /* As a special case, we have to temporarily allow for assignments
5652 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5653 a toplevel statement, when gimplifying the GENERIC expression
5654 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5655 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5656
5657 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5658 prevent gimplify_expr from trying to create a new temporary for
5659 foo's LHS, we tell it that it should only gimplify until it
5660 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5661 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5662 and all we need to do here is set 'a' to be its LHS. */
866b9fd5 5663
5664 /* Gimplify the RHS first for C++17 and bug 71104. */
5665 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5666 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5667 if (ret == GS_ERROR)
5668 return ret;
5669
5670 /* Then gimplify the LHS. */
68a5143e 5671 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5672 twice we have to make sure to gimplify into non-SSA as otherwise
5673 the abnormal edge added later will make those defs not dominate
5674 their uses.
5675 ??? Technically this applies only to the registers used in the
5676 resulting non-register *TO_P. */
5677 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5678 if (saved_into_ssa
5679 && TREE_CODE (*from_p) == CALL_EXPR
5680 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5681 gimplify_ctxp->into_ssa = false;
866b9fd5 5682 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
68a5143e 5683 gimplify_ctxp->into_ssa = saved_into_ssa;
4ee9c684 5684 if (ret == GS_ERROR)
5685 return ret;
5686
866b9fd5 5687 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5688 guess for the predicate was wrong. */
5689 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5690 if (final_pred != initial_pred)
5691 {
5692 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5693 if (ret == GS_ERROR)
5694 return ret;
5695 }
5696
82fc0e0a 5697 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
47ae02b7 5698 size as argument to the call. */
82fc0e0a 5699 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5700 {
5701 tree call = TREE_OPERAND (*from_p, 0);
5702 tree vlasize = TREE_OPERAND (*from_p, 1);
5703
5704 if (TREE_CODE (call) == CALL_EXPR
5705 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5706 {
2e474820 5707 int nargs = call_expr_nargs (call);
82fc0e0a 5708 tree type = TREE_TYPE (call);
5709 tree ap = CALL_EXPR_ARG (call, 0);
5710 tree tag = CALL_EXPR_ARG (call, 1);
c9595c51 5711 tree aptag = CALL_EXPR_ARG (call, 2);
82fc0e0a 5712 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
2e474820 5713 IFN_VA_ARG, type,
5714 nargs + 1, ap, tag,
c9595c51 5715 aptag, vlasize);
5716 TREE_OPERAND (*from_p, 0) = newcall;
82fc0e0a 5717 }
5718 }
5719
6374121b 5720 /* Now see if the above changed *from_p to something we handle specially. */
5721 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5722 want_value);
4ee9c684 5723 if (ret != GS_UNHANDLED)
5724 return ret;
5725
80f06481 5726 /* If we've got a variable sized assignment between two lvalues (i.e. does
5727 not involve a call), then we can make things a bit more straightforward
5728 by converting the assignment to memcpy or memset. */
5729 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5730 {
5731 tree from = TREE_OPERAND (*from_p, 0);
5732 tree size = TREE_OPERAND (*from_p, 1);
5733
5734 if (TREE_CODE (from) == CONSTRUCTOR)
75a70cf9 5735 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5736
c2514472 5737 if (is_gimple_addressable (from))
80f06481 5738 {
5739 *from_p = from;
75a70cf9 5740 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5741 pre_p);
80f06481 5742 }
5743 }
5744
50c96bdc 5745 /* Transform partial stores to non-addressable complex variables into
5746 total stores. This allows us to use real instead of virtual operands
5747 for these variables, which improves optimization. */
5748 if ((TREE_CODE (*to_p) == REALPART_EXPR
5749 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5750 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5751 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5752
4bef6a37 5753 /* Try to alleviate the effects of the gimplification creating artificial
708cee3f 5754 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5755 make sure not to create DECL_DEBUG_EXPR links across functions. */
4bef6a37 5756 if (!gimplify_ctxp->into_ssa
53e9c5c4 5757 && VAR_P (*from_p)
75a70cf9 5758 && DECL_IGNORED_P (*from_p)
5759 && DECL_P (*to_p)
708cee3f 5760 && !DECL_IGNORED_P (*to_p)
1b144f13 5761 && decl_function_context (*to_p) == current_function_decl
5762 && decl_function_context (*from_p) == current_function_decl)
4bef6a37 5763 {
5764 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5765 DECL_NAME (*from_p)
5766 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
8e966116 5767 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4bef6a37 5768 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
75a70cf9 5769 }
5770
e83c4d0f 5771 if (want_value && TREE_THIS_VOLATILE (*to_p))
5772 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5773
75a70cf9 5774 if (TREE_CODE (*from_p) == CALL_EXPR)
5775 {
5776 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5777 instead of a GIMPLE_ASSIGN. */
1a91d914 5778 gcall *call_stmt;
0c93c8a9 5779 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5780 {
5781 /* Gimplify internal functions created in the FEs. */
5782 int nargs = call_expr_nargs (*from_p), i;
5783 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5784 auto_vec<tree> vargs (nargs);
5785
5786 for (i = 0; i < nargs; i++)
5787 {
5788 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5789 EXPR_LOCATION (*from_p));
5790 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5791 }
1a91d914 5792 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
989f02dc 5793 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
1a91d914 5794 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
0c93c8a9 5795 }
c83059be 5796 else
5797 {
0c93c8a9 5798 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5799 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5800 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5801 tree fndecl = get_callee_fndecl (*from_p);
5802 if (fndecl
a0e9bfbb 5803 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
0c93c8a9 5804 && call_expr_nargs (*from_p) == 3)
1a91d914 5805 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5806 CALL_EXPR_ARG (*from_p, 0),
5807 CALL_EXPR_ARG (*from_p, 1),
5808 CALL_EXPR_ARG (*from_p, 2));
0c93c8a9 5809 else
5810 {
3c0f15b4 5811 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
0c93c8a9 5812 }
c83059be 5813 }
1a91d914 5814 notice_special_calls (call_stmt);
0a6b484c 5815 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
1a91d914 5816 gimple_call_set_lhs (call_stmt, *to_p);
9ae1b28a 5817 else if (TREE_CODE (*to_p) == SSA_NAME)
5818 /* The above is somewhat premature, avoid ICEing later for a
5819 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5820 ??? This doesn't make it a default-def. */
5821 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
92bc38e7 5822
1a91d914 5823 assign = call_stmt;
4bef6a37 5824 }
75a70cf9 5825 else
e60a6f7b 5826 {
5827 assign = gimple_build_assign (*to_p, *from_p);
5828 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
3d78ea44 5829 if (COMPARISON_CLASS_P (*from_p))
5830 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
e60a6f7b 5831 }
4bef6a37 5832
75a70cf9 5833 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4ee9c684 5834 {
d3a27ad5 5835 /* We should have got an SSA name from the start. */
9ae1b28a 5836 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5837 || ! gimple_in_ssa_p (cfun));
75a70cf9 5838 }
35cc02b5 5839
8f1c7d19 5840 gimplify_seq_add_stmt (pre_p, assign);
5841 gsi = gsi_last (*pre_p);
c09f306d 5842 maybe_fold_stmt (&gsi);
8f1c7d19 5843
75a70cf9 5844 if (want_value)
5845 {
e83c4d0f 5846 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
d471893d 5847 return GS_OK;
4ee9c684 5848 }
75a70cf9 5849 else
5850 *expr_p = NULL;
4ee9c684 5851
d471893d 5852 return GS_ALL_DONE;
4ee9c684 5853}
5854
57859735 5855/* Gimplify a comparison between two variable-sized objects. Do this
5856 with a call to BUILT_IN_MEMCMP. */
6374121b 5857
5858static enum gimplify_status
5859gimplify_variable_sized_compare (tree *expr_p)
5860{
ae7e0e56 5861 location_t loc = EXPR_LOCATION (*expr_p);
6374121b 5862 tree op0 = TREE_OPERAND (*expr_p, 0);
5863 tree op1 = TREE_OPERAND (*expr_p, 1);
ae7e0e56 5864 tree t, arg, dest, src, expr;
c2f47e15 5865
5866 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5867 arg = unshare_expr (arg);
5868 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
389dd41b 5869 src = build_fold_addr_expr_loc (loc, op1);
5870 dest = build_fold_addr_expr_loc (loc, op0);
b9a16870 5871 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
389dd41b 5872 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
ae7e0e56 5873
5874 expr
40b19772 5875 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
ae7e0e56 5876 SET_EXPR_LOCATION (expr, loc);
5877 *expr_p = expr;
6374121b 5878
5879 return GS_OK;
5880}
5881
57859735 5882/* Gimplify a comparison between two aggregate objects of integral scalar
5883 mode as a comparison between the bitwise equivalent scalar values. */
9c530f25 5884
5885static enum gimplify_status
5886gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5887{
389dd41b 5888 location_t loc = EXPR_LOCATION (*expr_p);
9c530f25 5889 tree op0 = TREE_OPERAND (*expr_p, 0);
5890 tree op1 = TREE_OPERAND (*expr_p, 1);
5891
5892 tree type = TREE_TYPE (op0);
5893 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5894
389dd41b 5895 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5896 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
9c530f25 5897
5898 *expr_p
389dd41b 5899 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
9c530f25 5900
5901 return GS_OK;
5902}
5903
57859735 5904/* Gimplify an expression sequence. This function gimplifies each
5905 expression and rewrites the original expression with the last
4ee9c684 5906 expression of the sequence in GIMPLE form.
5907
5908 PRE_P points to the list where the side effects for all the
5909 expressions in the sequence will be emitted.
7c2f0500 5910
4ee9c684 5911 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4ee9c684 5912
5913static enum gimplify_status
75a70cf9 5914gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4ee9c684 5915{
5916 tree t = *expr_p;
5917
5918 do
5919 {
5920 tree *sub_p = &TREE_OPERAND (t, 0);
5921
5922 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5923 gimplify_compound_expr (sub_p, pre_p, false);
5924 else
75a70cf9 5925 gimplify_stmt (sub_p, pre_p);
4ee9c684 5926
5927 t = TREE_OPERAND (t, 1);
5928 }
5929 while (TREE_CODE (t) == COMPOUND_EXPR);
5930
5931 *expr_p = t;
5932 if (want_value)
5933 return GS_OK;
5934 else
5935 {
75a70cf9 5936 gimplify_stmt (expr_p, pre_p);
4ee9c684 5937 return GS_ALL_DONE;
5938 }
5939}
5940
75a70cf9 5941/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5942 gimplify. After gimplification, EXPR_P will point to a new temporary
5943 that holds the original value of the SAVE_EXPR node.
4ee9c684 5944
75a70cf9 5945 PRE_P points to the list where side effects that must happen before
57859735 5946 *EXPR_P should be stored. */
4ee9c684 5947
5948static enum gimplify_status
75a70cf9 5949gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4ee9c684 5950{
5951 enum gimplify_status ret = GS_ALL_DONE;
5952 tree val;
5953
0d59b19d 5954 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4ee9c684 5955 val = TREE_OPERAND (*expr_p, 0);
5956
0c8fe10f 5957 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5958 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
d471893d 5959 {
91b9d66d 5960 /* The operand may be a void-valued expression. It is
02e9c224 5961 being executed only for its side-effects. */
5962 if (TREE_TYPE (val) == void_type_node)
5963 {
5964 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5965 is_gimple_stmt, fb_none);
5966 val = NULL;
5967 }
5968 else
5969 /* The temporary may not be an SSA name as later abnormal and EH
01b4fae9 5970 control flow may invalidate use/def domination. When in SSA
5971 form then assume there are no such issues and SAVE_EXPRs only
5972 appear via GENERIC foldings. */
5973 val = get_initialized_tmp_var (val, pre_p, post_p,
5974 gimple_in_ssa_p (cfun));
0c8fe10f 5975
5976 TREE_OPERAND (*expr_p, 0) = val;
5977 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
d471893d 5978 }
4ee9c684 5979
0c8fe10f 5980 *expr_p = val;
5981
4ee9c684 5982 return ret;
5983}
5984
57859735 5985/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4ee9c684 5986
5987 unary_expr
5988 : ...
5989 | '&' varname
5990 ...
5991
5992 PRE_P points to the list where side effects that must happen before
5993 *EXPR_P should be stored.
5994
5995 POST_P points to the list where side effects that must happen after
5996 *EXPR_P should be stored. */
5997
5998static enum gimplify_status
75a70cf9 5999gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4ee9c684 6000{
6001 tree expr = *expr_p;
6002 tree op0 = TREE_OPERAND (expr, 0);
6003 enum gimplify_status ret;
389dd41b 6004 location_t loc = EXPR_LOCATION (*expr_p);
4ee9c684 6005
6006 switch (TREE_CODE (op0))
6007 {
6008 case INDIRECT_REF:
c3d4fc14 6009 do_indirect_ref:
4ee9c684 6010 /* Check if we are dealing with an expression of the form '&*ptr'.
6011 While the front end folds away '&*ptr' into 'ptr', these
6012 expressions may be generated internally by the compiler (e.g.,
6013 builtins like __builtin_va_end). */
c3d4fc14 6014 /* Caution: the silent array decomposition semantics we allow for
6015 ADDR_EXPR means we can't always discard the pair. */
4e768ca7 6016 /* Gimplification of the ADDR_EXPR operand may drop
6017 cv-qualification conversions, so make sure we add them if
6018 needed. */
c3d4fc14 6019 {
6020 tree op00 = TREE_OPERAND (op0, 0);
6021 tree t_expr = TREE_TYPE (expr);
6022 tree t_op00 = TREE_TYPE (op00);
6023
c8ca3ee7 6024 if (!useless_type_conversion_p (t_expr, t_op00))
389dd41b 6025 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
c3d4fc14 6026 *expr_p = op00;
6027 ret = GS_OK;
6028 }
4ee9c684 6029 break;
6030
6374121b 6031 case VIEW_CONVERT_EXPR:
6032 /* Take the address of our operand and then convert it to the type of
7b7695eb 6033 this ADDR_EXPR.
6034
6035 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6036 all clear. The impact of this transformation is even less clear. */
5c98ac90 6037
6038 /* If the operand is a useless conversion, look through it. Doing so
6039 guarantees that the ADDR_EXPR and its operand will remain of the
6040 same type. */
6041 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
aaebd318 6042 op0 = TREE_OPERAND (op0, 0);
5c98ac90 6043
389dd41b 6044 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6045 build_fold_addr_expr_loc (loc,
6046 TREE_OPERAND (op0, 0)));
6374121b 6047 ret = GS_OK;
4ee9c684 6048 break;
6049
5834e0cd 6050 case MEM_REF:
6051 if (integer_zerop (TREE_OPERAND (op0, 1)))
6052 goto do_indirect_ref;
6053
e3533433 6054 /* fall through */
5834e0cd 6055
4ee9c684 6056 default:
cffdfb3d 6057 /* If we see a call to a declared builtin or see its address
6058 being taken (we can unify those cases here) then we can mark
6059 the builtin for implicit generation by GCC. */
6060 if (TREE_CODE (op0) == FUNCTION_DECL
a0e9bfbb 6061 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
cffdfb3d 6062 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6063 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6064
4ee9c684 6065 /* We use fb_either here because the C frontend sometimes takes
0e364d12 6066 the address of a call that returns a struct; see
6067 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6068 the implied temporary explicit. */
f5224fc9 6069
1e5ec9ee 6070 /* Make the operand addressable. */
4ee9c684 6071 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
c2514472 6072 is_gimple_addressable, fb_either);
90cf240d 6073 if (ret == GS_ERROR)
6074 break;
c3d4fc14 6075
1e5ec9ee 6076 /* Then mark it. Beware that it may not be possible to do so directly
6077 if a temporary has been created by the gimplification. */
6078 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
73b2fde8 6079
90cf240d 6080 op0 = TREE_OPERAND (expr, 0);
4ee9c684 6081
90cf240d 6082 /* For various reasons, the gimplification of the expression
6083 may have made a new INDIRECT_REF. */
6084 if (TREE_CODE (op0) == INDIRECT_REF)
6085 goto do_indirect_ref;
6086
a8cefe90 6087 mark_addressable (TREE_OPERAND (expr, 0));
6088
6089 /* The FEs may end up building ADDR_EXPRs early on a decl with
6090 an incomplete type. Re-build ADDR_EXPRs in canonical form
6091 here. */
6092 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6093 *expr_p = build_fold_addr_expr (op0);
6094
90cf240d 6095 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
a8cefe90 6096 recompute_tree_invariant_for_addr_expr (*expr_p);
6097
6098 /* If we re-built the ADDR_EXPR add a conversion to the original type
6099 if required. */
6100 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6101 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
90cf240d 6102
4ee9c684 6103 break;
6104 }
6105
4ee9c684 6106 return ret;
6107}
6108
6109/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6110 value; output operands should be a gimple lvalue. */
6111
6112static enum gimplify_status
75a70cf9 6113gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4ee9c684 6114{
75a70cf9 6115 tree expr;
6116 int noutputs;
6117 const char **oconstraints;
4ee9c684 6118 int i;
6119 tree link;
6120 const char *constraint;
6121 bool allows_mem, allows_reg, is_inout;
6122 enum gimplify_status ret, tret;
1a91d914 6123 gasm *stmt;
f1f41a6c 6124 vec<tree, va_gc> *inputs;
6125 vec<tree, va_gc> *outputs;
6126 vec<tree, va_gc> *clobbers;
6127 vec<tree, va_gc> *labels;
75a70cf9 6128 tree link_next;
48e1416a 6129
75a70cf9 6130 expr = *expr_p;
6131 noutputs = list_length (ASM_OUTPUTS (expr));
6132 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6133
f1f41a6c 6134 inputs = NULL;
6135 outputs = NULL;
6136 clobbers = NULL;
6137 labels = NULL;
4ee9c684 6138
4ee9c684 6139 ret = GS_ALL_DONE;
75a70cf9 6140 link_next = NULL_TREE;
6141 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4ee9c684 6142 {
1e2bc67f 6143 bool ok;
75a70cf9 6144 size_t constraint_len;
6145
6146 link_next = TREE_CHAIN (link);
6147
6148 oconstraints[i]
6149 = constraint
4ee9c684 6150 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
d9714f26 6151 constraint_len = strlen (constraint);
6152 if (constraint_len == 0)
6153 continue;
4ee9c684 6154
1e2bc67f 6155 ok = parse_output_constraint (&constraint, i, 0, 0,
6156 &allows_mem, &allows_reg, &is_inout);
6157 if (!ok)
6158 {
6159 ret = GS_ERROR;
6160 is_inout = false;
6161 }
4ee9c684 6162
14b34371 6163 /* If we can't make copies, we can only accept memory. */
6164 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6165 {
6166 if (allows_mem)
6167 allows_reg = 0;
6168 else
6169 {
6170 error ("impossible constraint in %<asm%>");
6171 error ("non-memory output %d must stay in memory", i);
6172 return GS_ERROR;
6173 }
6174 }
6175
4ee9c684 6176 if (!allows_reg && allows_mem)
f5224fc9 6177 mark_addressable (TREE_VALUE (link));
4ee9c684 6178
6179 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6180 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6181 fb_lvalue | fb_mayfail);
6182 if (tret == GS_ERROR)
6183 {
85b9be9b 6184 error ("invalid lvalue in %<asm%> output %d", i);
4ee9c684 6185 ret = tret;
6186 }
6187
f365d297 6188 /* If the constraint does not allow memory make sure we gimplify
6189 it to a register if it is not already but its base is. This
6190 happens for complex and vector components. */
6191 if (!allows_mem)
6192 {
6193 tree op = TREE_VALUE (link);
6194 if (! is_gimple_val (op)
6195 && is_gimple_reg_type (TREE_TYPE (op))
6196 && is_gimple_reg (get_base_address (op)))
6197 {
6198 tree tem = create_tmp_reg (TREE_TYPE (op));
6199 tree ass;
6200 if (is_inout)
6201 {
6202 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6203 tem, unshare_expr (op));
6204 gimplify_and_add (ass, pre_p);
6205 }
6206 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6207 gimplify_and_add (ass, post_p);
6208
6209 TREE_VALUE (link) = tem;
6210 tret = GS_OK;
6211 }
6212 }
6213
f1f41a6c 6214 vec_safe_push (outputs, link);
75a70cf9 6215 TREE_CHAIN (link) = NULL_TREE;
6216
4ee9c684 6217 if (is_inout)
6218 {
6219 /* An input/output operand. To give the optimizers more
6220 flexibility, split it into separate input and output
6221 operands. */
6222 tree input;
b18dea91 6223 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6224 char buf[11];
4ee9c684 6225
6226 /* Turn the in/out constraint into an output constraint. */
6227 char *p = xstrdup (constraint);
6228 p[0] = '=';
6229 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4ee9c684 6230
6231 /* And add a matching input constraint. */
6232 if (allows_reg)
6233 {
b18dea91 6234 sprintf (buf, "%u", i);
d72a7307 6235
6236 /* If there are multiple alternatives in the constraint,
6237 handle each of them individually. Those that allow register
6238 will be replaced with operand number, the others will stay
6239 unchanged. */
6240 if (strchr (p, ',') != NULL)
6241 {
6242 size_t len = 0, buflen = strlen (buf);
6243 char *beg, *end, *str, *dst;
6244
6245 for (beg = p + 1;;)
6246 {
6247 end = strchr (beg, ',');
6248 if (end == NULL)
6249 end = strchr (beg, '\0');
6250 if ((size_t) (end - beg) < buflen)
6251 len += buflen + 1;
6252 else
6253 len += end - beg + 1;
6254 if (*end)
6255 beg = end + 1;
6256 else
6257 break;
6258 }
6259
680a19b9 6260 str = (char *) alloca (len);
d72a7307 6261 for (beg = p + 1, dst = str;;)
6262 {
6263 const char *tem;
6264 bool mem_p, reg_p, inout_p;
6265
6266 end = strchr (beg, ',');
6267 if (end)
6268 *end = '\0';
6269 beg[-1] = '=';
6270 tem = beg - 1;
6271 parse_output_constraint (&tem, i, 0, 0,
6272 &mem_p, &reg_p, &inout_p);
6273 if (dst != str)
6274 *dst++ = ',';
6275 if (reg_p)
6276 {
6277 memcpy (dst, buf, buflen);
6278 dst += buflen;
6279 }
6280 else
6281 {
6282 if (end)
6283 len = end - beg;
6284 else
6285 len = strlen (beg);
6286 memcpy (dst, beg, len);
6287 dst += len;
6288 }
6289 if (end)
6290 beg = end + 1;
6291 else
6292 break;
6293 }
6294 *dst = '\0';
6295 input = build_string (dst - str, str);
6296 }
6297 else
6298 input = build_string (strlen (buf), buf);
4ee9c684 6299 }
6300 else
6301 input = build_string (constraint_len - 1, constraint + 1);
d72a7307 6302
6303 free (p);
6304
4ee9c684 6305 input = build_tree_list (build_tree_list (NULL_TREE, input),
6306 unshare_expr (TREE_VALUE (link)));
6307 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6308 }
6309 }
6310
75a70cf9 6311 link_next = NULL_TREE;
6312 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4ee9c684 6313 {
75a70cf9 6314 link_next = TREE_CHAIN (link);
6315 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4ee9c684 6316 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6317 oconstraints, &allows_mem, &allows_reg);
6318
a36f02c9 6319 /* If we can't make copies, we can only accept memory. */
6320 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6321 {
6322 if (allows_mem)
6323 allows_reg = 0;
6324 else
6325 {
6326 error ("impossible constraint in %<asm%>");
6327 error ("non-memory input %d must stay in memory", i);
6328 return GS_ERROR;
6329 }
6330 }
6331
4ee9c684 6332 /* If the operand is a memory input, it should be an lvalue. */
6333 if (!allows_reg && allows_mem)
6334 {
c246fb4f 6335 tree inputv = TREE_VALUE (link);
6336 STRIP_NOPS (inputv);
6337 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6338 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6339 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
c641a07a 6340 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6341 || TREE_CODE (inputv) == MODIFY_EXPR)
c246fb4f 6342 TREE_VALUE (link) = error_mark_node;
4ee9c684 6343 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6344 is_gimple_lvalue, fb_lvalue | fb_mayfail);
f55f9344 6345 if (tret != GS_ERROR)
6346 {
6347 /* Unlike output operands, memory inputs are not guaranteed
6348 to be lvalues by the FE, and while the expressions are
6349 marked addressable there, if it is e.g. a statement
6350 expression, temporaries in it might not end up being
6351 addressable. They might be already used in the IL and thus
6352 it is too late to make them addressable now though. */
6353 tree x = TREE_VALUE (link);
6354 while (handled_component_p (x))
6355 x = TREE_OPERAND (x, 0);
6356 if (TREE_CODE (x) == MEM_REF
6357 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6358 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
53e9c5c4 6359 if ((VAR_P (x)
f55f9344 6360 || TREE_CODE (x) == PARM_DECL
6361 || TREE_CODE (x) == RESULT_DECL)
6362 && !TREE_ADDRESSABLE (x)
6363 && is_gimple_reg (x))
6364 {
6365 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6366 input_location), 0,
6367 "memory input %d is not directly addressable",
6368 i);
6369 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6370 }
6371 }
f5224fc9 6372 mark_addressable (TREE_VALUE (link));
4ee9c684 6373 if (tret == GS_ERROR)
6374 {
f55f9344 6375 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6376 "memory input %d is not directly addressable", i);
4ee9c684 6377 ret = tret;
6378 }
6379 }
6380 else
6381 {
6382 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
fbc51f36 6383 is_gimple_asm_val, fb_rvalue);
4ee9c684 6384 if (tret == GS_ERROR)
6385 ret = tret;
6386 }
75a70cf9 6387
6388 TREE_CHAIN (link) = NULL_TREE;
f1f41a6c 6389 vec_safe_push (inputs, link);
4ee9c684 6390 }
48e1416a 6391
4251e916 6392 link_next = NULL_TREE;
6393 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6394 {
6395 link_next = TREE_CHAIN (link);
6396 TREE_CHAIN (link) = NULL_TREE;
6397 vec_safe_push (clobbers, link);
6398 }
78f55ca8 6399
4251e916 6400 link_next = NULL_TREE;
6401 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6402 {
6403 link_next = TREE_CHAIN (link);
6404 TREE_CHAIN (link) = NULL_TREE;
6405 vec_safe_push (labels, link);
6406 }
75a70cf9 6407
bfec3452 6408 /* Do not add ASMs with errors to the gimple IL stream. */
6409 if (ret != GS_ERROR)
6410 {
6411 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
78f55ca8 6412 inputs, outputs, clobbers, labels);
75a70cf9 6413
6753c3c6 6414 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
bfec3452 6415 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6de46ad5 6416 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
bfec3452 6417
6418 gimplify_seq_add_stmt (pre_p, stmt);
6419 }
4ee9c684 6420
6421 return ret;
6422}
6423
6424/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
75a70cf9 6425 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4ee9c684 6426 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6427 return to this function.
6428
6429 FIXME should we complexify the prequeue handling instead? Or use flags
6430 for all the cleanups and let the optimizer tighten them up? The current
6431 code seems pretty fragile; it will break on a cleanup within any
6432 non-conditional nesting. But any such nesting would be broken, anyway;
6433 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6434 and continues out of it. We can do that at the RTL level, though, so
6435 having an optimizer to tighten up try/finally regions would be a Good
6436 Thing. */
6437
6438static enum gimplify_status
75a70cf9 6439gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
4ee9c684 6440{
75a70cf9 6441 gimple_stmt_iterator iter;
6442 gimple_seq body_sequence = NULL;
4ee9c684 6443
2363ef00 6444 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4ee9c684 6445
6446 /* We only care about the number of conditions between the innermost
aa74adca 6447 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6448 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4ee9c684 6449 int old_conds = gimplify_ctxp->conditions;
75a70cf9 6450 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
cbabc8eb 6451 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
4ee9c684 6452 gimplify_ctxp->conditions = 0;
75a70cf9 6453 gimplify_ctxp->conditional_cleanups = NULL;
cbabc8eb 6454 gimplify_ctxp->in_cleanup_point_expr = true;
4ee9c684 6455
75a70cf9 6456 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
4ee9c684 6457
6458 gimplify_ctxp->conditions = old_conds;
aa74adca 6459 gimplify_ctxp->conditional_cleanups = old_cleanups;
cbabc8eb 6460 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
4ee9c684 6461
75a70cf9 6462 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
4ee9c684 6463 {
42acab1c 6464 gimple *wce = gsi_stmt (iter);
4ee9c684 6465
75a70cf9 6466 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
4ee9c684 6467 {
75a70cf9 6468 if (gsi_one_before_end_p (iter))
4ee9c684 6469 {
75a70cf9 6470 /* Note that gsi_insert_seq_before and gsi_remove do not
6471 scan operands, unlike some other sequence mutators. */
eb1b7c50 6472 if (!gimple_wce_cleanup_eh_only (wce))
6473 gsi_insert_seq_before_without_update (&iter,
6474 gimple_wce_cleanup (wce),
6475 GSI_SAME_STMT);
75a70cf9 6476 gsi_remove (&iter, true);
4ee9c684 6477 break;
6478 }
6479 else
6480 {
1a91d914 6481 gtry *gtry;
75a70cf9 6482 gimple_seq seq;
6483 enum gimple_try_flags kind;
4813f5af 6484
75a70cf9 6485 if (gimple_wce_cleanup_eh_only (wce))
6486 kind = GIMPLE_TRY_CATCH;
4813f5af 6487 else
75a70cf9 6488 kind = GIMPLE_TRY_FINALLY;
6489 seq = gsi_split_seq_after (iter);
6490
f4e36c33 6491 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
75a70cf9 6492 /* Do not use gsi_replace here, as it may scan operands.
6493 We want to do a simple structural modification only. */
e3a19533 6494 gsi_set_stmt (&iter, gtry);
de6bd75e 6495 iter = gsi_start (gtry->eval);
4ee9c684 6496 }
6497 }
6498 else
75a70cf9 6499 gsi_next (&iter);
4ee9c684 6500 }
6501
75a70cf9 6502 gimplify_seq_add_seq (pre_p, body_sequence);
4ee9c684 6503 if (temp)
6504 {
6505 *expr_p = temp;
4ee9c684 6506 return GS_OK;
6507 }
6508 else
6509 {
75a70cf9 6510 *expr_p = NULL;
4ee9c684 6511 return GS_ALL_DONE;
6512 }
6513}
6514
6515/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
75a70cf9 6516 is the cleanup action required. EH_ONLY is true if the cleanup should
bcae17d7 6517 only be executed if an exception is thrown, not on normal exit.
6518 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6519 only valid for clobbers. */
4ee9c684 6520
6521static void
bcae17d7 6522gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6523 bool force_uncond = false)
4ee9c684 6524{
42acab1c 6525 gimple *wce;
75a70cf9 6526 gimple_seq cleanup_stmts = NULL;
4ee9c684 6527
6528 /* Errors can result in improperly nested cleanups. Which results in
75a70cf9 6529 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
852f689e 6530 if (seen_error ())
4ee9c684 6531 return;
6532
6926124f 6533 if (gimple_conditional_context ())
4ee9c684 6534 {
6535 /* If we're in a conditional context, this is more complex. We only
6536 want to run the cleanup if we actually ran the initialization that
6537 necessitates it, but we want to run it after the end of the
6538 conditional context. So we wrap the try/finally around the
6539 condition and use a flag to determine whether or not to actually
6540 run the destructor. Thus
6541
6542 test ? f(A()) : 0
6543
6544 becomes (approximately)
6545
6546 flag = 0;
6547 try {
6548 if (test) { A::A(temp); flag = 1; val = f(temp); }
6549 else { val = 0; }
6550 } finally {
6551 if (flag) A::~A(temp);
6552 }
6553 val
6554 */
6926124f 6555 if (force_uncond)
6556 {
6557 gimplify_stmt (&cleanup, &cleanup_stmts);
6558 wce = gimple_build_wce (cleanup_stmts);
6559 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6560 }
6561 else
6562 {
6563 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6564 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6565 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6566
6567 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6568 gimplify_stmt (&cleanup, &cleanup_stmts);
6569 wce = gimple_build_wce (cleanup_stmts);
6570
6571 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6572 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6573 gimplify_seq_add_stmt (pre_p, ftrue);
6574
6575 /* Because of this manipulation, and the EH edges that jump
6576 threading cannot redirect, the temporary (VAR) will appear
6577 to be used uninitialized. Don't warn. */
6578 TREE_NO_WARNING (var) = 1;
6579 }
4ee9c684 6580 }
6581 else
6582 {
75a70cf9 6583 gimplify_stmt (&cleanup, &cleanup_stmts);
6584 wce = gimple_build_wce (cleanup_stmts);
6585 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6586 gimplify_seq_add_stmt (pre_p, wce);
4ee9c684 6587 }
4ee9c684 6588}
6589
6590/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6591
6592static enum gimplify_status
75a70cf9 6593gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4ee9c684 6594{
6595 tree targ = *expr_p;
6596 tree temp = TARGET_EXPR_SLOT (targ);
6597 tree init = TARGET_EXPR_INITIAL (targ);
6598 enum gimplify_status ret;
6599
629b6abc 6600 bool unpoison_empty_seq = false;
6601 gimple_stmt_iterator unpoison_it;
6602
4ee9c684 6603 if (init)
6604 {
6458ce4f 6605 tree cleanup = NULL_TREE;
6606
2569a1be 6607 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
550ec1e0 6608 to the temps list. Handle also variable length TARGET_EXPRs. */
6609 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6610 {
6611 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6612 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6613 gimplify_vla_decl (temp, pre_p);
6614 }
6615 else
629b6abc 6616 {
6617 /* Save location where we need to place unpoisoning. It's possible
6618 that a variable will be converted to needs_to_live_in_memory. */
6619 unpoison_it = gsi_last (*pre_p);
6620 unpoison_empty_seq = gsi_end_p (unpoison_it);
6621
6622 gimple_add_tmp_var (temp);
6623 }
4ee9c684 6624
2569a1be 6625 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6626 expression is supposed to initialize the slot. */
6627 if (VOID_TYPE_P (TREE_TYPE (init)))
6628 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6629 else
2363ef00 6630 {
75a70cf9 6631 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6632 init = init_expr;
6633 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6634 init = NULL;
6635 ggc_free (init_expr);
2363ef00 6636 }
2569a1be 6637 if (ret == GS_ERROR)
e1f6aa48 6638 {
6639 /* PR c++/28266 Make sure this is expanded only once. */
6640 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6641 return GS_ERROR;
6642 }
75a70cf9 6643 if (init)
6644 gimplify_and_add (init, pre_p);
4ee9c684 6645
6646 /* If needed, push the cleanup for the temp. */
6647 if (TARGET_EXPR_CLEANUP (targ))
6458ce4f 6648 {
6649 if (CLEANUP_EH_ONLY (targ))
6650 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6651 CLEANUP_EH_ONLY (targ), pre_p);
6652 else
6653 cleanup = TARGET_EXPR_CLEANUP (targ);
6654 }
6655
6656 /* Add a clobber for the temporary going out of scope, like
6657 gimplify_bind_expr. */
cbabc8eb 6658 if (gimplify_ctxp->in_cleanup_point_expr
629b6abc 6659 && needs_to_live_in_memory (temp))
6458ce4f 6660 {
629b6abc 6661 if (flag_stack_reuse == SR_ALL)
6662 {
ebf0a6c6 6663 tree clobber = build_clobber (TREE_TYPE (temp));
629b6abc 6664 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
bcae17d7 6665 gimple_push_cleanup (temp, clobber, false, pre_p, true);
629b6abc 6666 }
b02d9b50 6667 if (asan_poisoned_variables
6668 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
eaf3f6ed 6669 && dbg_cnt (asan_use_after_scope)
6670 && !gimplify_omp_ctxp)
629b6abc 6671 {
6672 tree asan_cleanup = build_asan_poison_call_expr (temp);
6673 if (asan_cleanup)
6674 {
6675 if (unpoison_empty_seq)
6676 unpoison_it = gsi_start (*pre_p);
6458ce4f 6677
629b6abc 6678 asan_poison_variable (temp, false, &unpoison_it,
6679 unpoison_empty_seq);
6680 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6681 }
6682 }
6683 }
6458ce4f 6684 if (cleanup)
6685 gimple_push_cleanup (temp, cleanup, false, pre_p);
4ee9c684 6686
6687 /* Only expand this once. */
6688 TREE_OPERAND (targ, 3) = init;
6689 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6690 }
0d59b19d 6691 else
4ee9c684 6692 /* We should have expanded this before. */
0d59b19d 6693 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4ee9c684 6694
6695 *expr_p = temp;
6696 return GS_OK;
6697}
6698
6699/* Gimplification of expression trees. */
6700
75a70cf9 6701/* Gimplify an expression which appears at statement context. The
6702 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6703 NULL, a new sequence is allocated.
4ee9c684 6704
75a70cf9 6705 Return true if we actually added a statement to the queue. */
6706
6707bool
6708gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
4ee9c684 6709{
75a70cf9 6710 gimple_seq_node last;
4ee9c684 6711
75a70cf9 6712 last = gimple_seq_last (*seq_p);
6713 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6714 return last != gimple_seq_last (*seq_p);
4ee9c684 6715}
6716
1e8e9920 6717/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6718 to CTX. If entries already exist, force them to be some flavor of private.
6719 If there is no enclosing parallel, do nothing. */
6720
6721void
6722omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6723{
6724 splay_tree_node n;
6725
43895be5 6726 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
1e8e9920 6727 return;
6728
6729 do
6730 {
6731 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6732 if (n != NULL)
6733 {
6734 if (n->value & GOVD_SHARED)
6735 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
bc7bff74 6736 else if (n->value & GOVD_MAP)
6737 n->value |= GOVD_MAP_TO_ONLY;
1e8e9920 6738 else
6739 return;
6740 }
43895be5 6741 else if ((ctx->region_type & ORT_TARGET) != 0)
6742 {
7e5a76c8 6743 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
43895be5 6744 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6745 else
6746 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6747 }
3d483a94 6748 else if (ctx->region_type != ORT_WORKSHARE
7e5a76c8 6749 && ctx->region_type != ORT_TASKGROUP
bc7bff74 6750 && ctx->region_type != ORT_SIMD
b656be3a 6751 && ctx->region_type != ORT_ACC
6752 && !(ctx->region_type & ORT_TARGET_DATA))
1e8e9920 6753 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6754
6755 ctx = ctx->outer_context;
6756 }
6757 while (ctx);
6758}
6759
6760/* Similarly for each of the type sizes of TYPE. */
6761
6762static void
6763omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6764{
6765 if (type == NULL || type == error_mark_node)
6766 return;
6767 type = TYPE_MAIN_VARIANT (type);
6768
431205b7 6769 if (ctx->privatized_types->add (type))
1e8e9920 6770 return;
6771
6772 switch (TREE_CODE (type))
6773 {
6774 case INTEGER_TYPE:
6775 case ENUMERAL_TYPE:
6776 case BOOLEAN_TYPE:
1e8e9920 6777 case REAL_TYPE:
06f0b99c 6778 case FIXED_POINT_TYPE:
1e8e9920 6779 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6780 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6781 break;
6782
6783 case ARRAY_TYPE:
6784 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6785 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6786 break;
6787
6788 case RECORD_TYPE:
6789 case UNION_TYPE:
6790 case QUAL_UNION_TYPE:
6791 {
6792 tree field;
1767a056 6793 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1e8e9920 6794 if (TREE_CODE (field) == FIELD_DECL)
6795 {
6796 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6797 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6798 }
6799 }
6800 break;
6801
6802 case POINTER_TYPE:
6803 case REFERENCE_TYPE:
6804 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6805 break;
6806
6807 default:
6808 break;
6809 }
6810
6811 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6812 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6813 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6814}
6815
ca4c3545 6816/* Add an entry for DECL in the OMP context CTX with FLAGS. */
1e8e9920 6817
6818static void
6819omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6820{
6821 splay_tree_node n;
6822 unsigned int nflags;
6823 tree t;
6824
43895be5 6825 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
1e8e9920 6826 return;
6827
6828 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
23a7b91b 6829 there are constructors involved somewhere. Exception is a shared clause,
6830 there is nothing privatized in that case. */
6831 if ((flags & GOVD_SHARED) == 0
6832 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6833 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
1e8e9920 6834 flags |= GOVD_SEEN;
6835
6836 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
43895be5 6837 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
1e8e9920 6838 {
6839 /* We shouldn't be re-adding the decl with the same data
6840 sharing class. */
6841 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
1e8e9920 6842 nflags = n->value | flags;
b656be3a 6843 /* The only combination of data sharing classes we should see is
6844 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6845 reduction variables to be used in data sharing clauses. */
6846 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6847 || ((nflags & GOVD_DATA_SHARE_CLASS)
6848 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
3d483a94 6849 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
1e8e9920 6850 n->value = nflags;
6851 return;
6852 }
6853
6854 /* When adding a variable-sized variable, we have to handle all sorts
48e1416a 6855 of additional bits of data: the pointer replacement variable, and
1e8e9920 6856 the parameters of the type. */
00fde275 6857 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1e8e9920 6858 {
6859 /* Add the pointer replacement variable as PRIVATE if the variable
6860 replacement is private, else FIRSTPRIVATE since we'll need the
6861 address of the original variable either for SHARED, or for the
6862 copy into or out of the context. */
7e5a76c8 6863 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
1e8e9920 6864 {
ca4c3545 6865 if (flags & GOVD_MAP)
6866 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6867 else if (flags & GOVD_PRIVATE)
6868 nflags = GOVD_PRIVATE;
43895be5 6869 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6870 && (flags & GOVD_FIRSTPRIVATE))
6871 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
ca4c3545 6872 else
6873 nflags = GOVD_FIRSTPRIVATE;
1e8e9920 6874 nflags |= flags & GOVD_SEEN;
6875 t = DECL_VALUE_EXPR (decl);
6876 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6877 t = TREE_OPERAND (t, 0);
6878 gcc_assert (DECL_P (t));
6879 omp_add_variable (ctx, t, nflags);
6880 }
6881
6882 /* Add all of the variable and type parameters (which should have
6883 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6884 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6885 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6886 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6887
6888 /* The variable-sized variable itself is never SHARED, only some form
6889 of PRIVATE. The sharing would take place via the pointer variable
6890 which we remapped above. */
6891 if (flags & GOVD_SHARED)
2be704a8 6892 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
1e8e9920 6893 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6894
48e1416a 6895 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
1e8e9920 6896 alloca statement we generate for the variable, so make sure it
6897 is available. This isn't automatically needed for the SHARED
fc6ebbc0 6898 case, since we won't be allocating local storage then.
6899 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6900 in this case omp_notice_variable will be called later
6901 on when it is gimplified. */
bc7bff74 6902 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
9b695a53 6903 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
1e8e9920 6904 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6905 }
bc7bff74 6906 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6907 && lang_hooks.decls.omp_privatize_by_reference (decl))
1e8e9920 6908 {
1e8e9920 6909 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6910
6911 /* Similar to the direct variable sized case above, we'll need the
6912 size of references being privatized. */
6913 if ((flags & GOVD_SHARED) == 0)
6914 {
6915 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
43895be5 6916 if (DECL_P (t))
1e8e9920 6917 omp_notice_variable (ctx, t, true);
6918 }
6919 }
6920
3d483a94 6921 if (n != NULL)
6922 n->value |= flags;
6923 else
6924 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6de0546d 6925
6926 /* For reductions clauses in OpenACC loop directives, by default create a
6927 copy clause on the enclosing parallel construct for carrying back the
6928 results. */
6929 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6930 {
6931 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6932 while (outer_ctx)
6933 {
6934 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6935 if (n != NULL)
6936 {
6937 /* Ignore local variables and explicitly declared clauses. */
6938 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6939 break;
6940 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6941 {
6942 /* According to the OpenACC spec, such a reduction variable
6943 should already have a copy map on a kernels construct,
6944 verify that here. */
6945 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6946 && (n->value & GOVD_MAP));
6947 }
6948 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6949 {
6950 /* Remove firstprivate and make it a copy map. */
6951 n->value &= ~GOVD_FIRSTPRIVATE;
6952 n->value |= GOVD_MAP;
6953 }
6954 }
6955 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6956 {
6957 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6958 GOVD_MAP | GOVD_SEEN);
6959 break;
6960 }
6961 outer_ctx = outer_ctx->outer_context;
6962 }
6963 }
1e8e9920 6964}
6965
ca4c3545 6966/* Notice a threadprivate variable DECL used in OMP context CTX.
b0b48c1d 6967 This just prints out diagnostics about threadprivate variable uses
6968 in untied tasks. If DECL2 is non-NULL, prevent this warning
6969 on that variable. */
6970
6971static bool
6972omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6973 tree decl2)
6974{
6975 splay_tree_node n;
bc7bff74 6976 struct gimplify_omp_ctx *octx;
6977
6978 for (octx = ctx; octx; octx = octx->outer_context)
43895be5 6979 if ((octx->region_type & ORT_TARGET) != 0)
bc7bff74 6980 {
6981 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6982 if (n == NULL)
6983 {
6984 error ("threadprivate variable %qE used in target region",
6985 DECL_NAME (decl));
6986 error_at (octx->location, "enclosing target region");
6987 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6988 }
6989 if (decl2)
6990 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6991 }
b0b48c1d 6992
6993 if (ctx->region_type != ORT_UNTIED_TASK)
6994 return false;
6995 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6996 if (n == NULL)
6997 {
57859735 6998 error ("threadprivate variable %qE used in untied task",
6999 DECL_NAME (decl));
b0b48c1d 7000 error_at (ctx->location, "enclosing task");
7001 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7002 }
7003 if (decl2)
7004 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7005 return false;
7006}
7007
2fc5e987 7008/* Return true if global var DECL is device resident. */
7009
7010static bool
7011device_resident_p (tree decl)
7012{
7013 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7014
7015 if (!attr)
7016 return false;
7017
7018 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7019 {
7020 tree c = TREE_VALUE (t);
7021 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7022 return true;
7023 }
7024
7025 return false;
7026}
7027
c4c30562 7028/* Return true if DECL has an ACC DECLARE attribute. */
7029
7030static bool
7031is_oacc_declared (tree decl)
7032{
7033 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7034 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7035 return declared != NULL_TREE;
7036}
7037
38f3f9a7 7038/* Determine outer default flags for DECL mentioned in an OMP region
7039 but not declared in an enclosing clause.
7040
7041 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7042 remapped firstprivate instead of shared. To some extent this is
7043 addressed in omp_firstprivatize_type_sizes, but not
7044 effectively. */
7045
7046static unsigned
7047omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7048 bool in_code, unsigned flags)
7049{
7050 enum omp_clause_default_kind default_kind = ctx->default_kind;
7051 enum omp_clause_default_kind kind;
7052
7053 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7054 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7055 default_kind = kind;
7056
7057 switch (default_kind)
7058 {
7059 case OMP_CLAUSE_DEFAULT_NONE:
7060 {
7061 const char *rtype;
7062
7063 if (ctx->region_type & ORT_PARALLEL)
7064 rtype = "parallel";
7e5a76c8 7065 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7066 rtype = "taskloop";
38f3f9a7 7067 else if (ctx->region_type & ORT_TASK)
7068 rtype = "task";
7069 else if (ctx->region_type & ORT_TEAMS)
7070 rtype = "teams";
7071 else
7072 gcc_unreachable ();
7073
fd7ef47a 7074 error ("%qE not specified in enclosing %qs",
38f3f9a7 7075 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
fd7ef47a 7076 error_at (ctx->location, "enclosing %qs", rtype);
38f3f9a7 7077 }
7078 /* FALLTHRU */
7079 case OMP_CLAUSE_DEFAULT_SHARED:
7080 flags |= GOVD_SHARED;
7081 break;
7082 case OMP_CLAUSE_DEFAULT_PRIVATE:
7083 flags |= GOVD_PRIVATE;
7084 break;
7085 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7086 flags |= GOVD_FIRSTPRIVATE;
7087 break;
7088 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7089 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7090 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7091 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7092 {
7093 omp_notice_variable (octx, decl, in_code);
7094 for (; octx; octx = octx->outer_context)
7095 {
7096 splay_tree_node n2;
7097
38f3f9a7 7098 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
9561765e 7099 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7100 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7101 continue;
38f3f9a7 7102 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7103 {
7104 flags |= GOVD_FIRSTPRIVATE;
7105 goto found_outer;
7106 }
7107 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7108 {
7109 flags |= GOVD_SHARED;
7110 goto found_outer;
7111 }
7112 }
7113 }
7114
7115 if (TREE_CODE (decl) == PARM_DECL
7116 || (!is_global_var (decl)
7117 && DECL_CONTEXT (decl) == current_function_decl))
7118 flags |= GOVD_FIRSTPRIVATE;
7119 else
7120 flags |= GOVD_SHARED;
7121 found_outer:
7122 break;
7123
7124 default:
7125 gcc_unreachable ();
7126 }
7127
7128 return flags;
7129}
7130
3cf4960a 7131
7132/* Determine outer default flags for DECL mentioned in an OACC region
7133 but not declared in an enclosing clause. */
7134
7135static unsigned
7136oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7137{
7138 const char *rkind;
2fc5e987 7139 bool on_device = false;
c4c30562 7140 bool declared = is_oacc_declared (decl);
1ea78d03 7141 tree type = TREE_TYPE (decl);
7142
7143 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7144 type = TREE_TYPE (type);
2fc5e987 7145
7146 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7147 && is_global_var (decl)
7148 && device_resident_p (decl))
7149 {
7150 on_device = true;
7151 flags |= GOVD_MAP_TO_ONLY;
7152 }
3cf4960a 7153
7154 switch (ctx->region_type)
7155 {
3cf4960a 7156 case ORT_ACC_KERNELS:
3cf4960a 7157 rkind = "kernels";
f7c10d53 7158
7159 if (AGGREGATE_TYPE_P (type))
6acf639f 7160 {
7161 /* Aggregates default to 'present_or_copy', or 'present'. */
7162 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7163 flags |= GOVD_MAP;
7164 else
7165 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7166 }
f7c10d53 7167 else
7168 /* Scalars default to 'copy'. */
7169 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7170
3cf4960a 7171 break;
7172
7173 case ORT_ACC_PARALLEL:
f7c10d53 7174 rkind = "parallel";
7175
7176 if (on_device || declared)
7177 flags |= GOVD_MAP;
7178 else if (AGGREGATE_TYPE_P (type))
6acf639f 7179 {
7180 /* Aggregates default to 'present_or_copy', or 'present'. */
7181 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7182 flags |= GOVD_MAP;
7183 else
7184 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7185 }
f7c10d53 7186 else
7187 /* Scalars default to 'firstprivate'. */
7188 flags |= GOVD_FIRSTPRIVATE;
7189
3cf4960a 7190 break;
f7c10d53 7191
7192 default:
7193 gcc_unreachable ();
3cf4960a 7194 }
7195
7196 if (DECL_ARTIFICIAL (decl))
7197 ; /* We can get compiler-generated decls, and should not complain
7198 about them. */
7199 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7200 {
296008a9 7201 error ("%qE not specified in enclosing OpenACC %qs construct",
3cf4960a 7202 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
a9798d3d 7203 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
3cf4960a 7204 }
6acf639f 7205 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7206 ; /* Handled above. */
3cf4960a 7207 else
7208 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7209
7210 return flags;
7211}
7212
ca4c3545 7213/* Record the fact that DECL was used within the OMP context CTX.
1e8e9920 7214 IN_CODE is true when real code uses DECL, and false when we should
7215 merely emit default(none) errors. Return true if DECL is going to
7216 be remapped and thus DECL shouldn't be gimplified into its
7217 DECL_VALUE_EXPR (if any). */
7218
7219static bool
7220omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7221{
7222 splay_tree_node n;
7223 unsigned flags = in_code ? GOVD_SEEN : 0;
7224 bool ret = false, shared;
7225
da579dbb 7226 if (error_operand_p (decl))
1e8e9920 7227 return false;
7228
43895be5 7229 if (ctx->region_type == ORT_NONE)
7230 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7231
1e8e9920 7232 if (is_global_var (decl))
7233 {
665fbc2f 7234 /* Threadprivate variables are predetermined. */
1e8e9920 7235 if (DECL_THREAD_LOCAL_P (decl))
b0b48c1d 7236 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
1e8e9920 7237
7238 if (DECL_HAS_VALUE_EXPR_P (decl))
7239 {
7240 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7241
7242 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
b0b48c1d 7243 return omp_notice_threadprivate_variable (ctx, decl, value);
1e8e9920 7244 }
665fbc2f 7245
7246 if (gimplify_omp_ctxp->outer_context == NULL
7247 && VAR_P (decl)
4954efd4 7248 && oacc_get_fn_attrib (current_function_decl))
665fbc2f 7249 {
7250 location_t loc = DECL_SOURCE_LOCATION (decl);
7251
7252 if (lookup_attribute ("omp declare target link",
7253 DECL_ATTRIBUTES (decl)))
7254 {
7255 error_at (loc,
7256 "%qE with %<link%> clause used in %<routine%> function",
7257 DECL_NAME (decl));
7258 return false;
7259 }
7260 else if (!lookup_attribute ("omp declare target",
7261 DECL_ATTRIBUTES (decl)))
7262 {
7263 error_at (loc,
7264 "%qE requires a %<declare%> directive for use "
7265 "in a %<routine%> function", DECL_NAME (decl));
7266 return false;
7267 }
7268 }
1e8e9920 7269 }
7270
7271 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
43895be5 7272 if ((ctx->region_type & ORT_TARGET) != 0)
bc7bff74 7273 {
691447ab 7274 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
bc7bff74 7275 if (n == NULL)
7276 {
43895be5 7277 unsigned nflags = flags;
7e5a76c8 7278 if ((ctx->region_type & ORT_ACC) == 0)
43895be5 7279 {
7280 bool is_declare_target = false;
43895be5 7281 if (is_global_var (decl)
7282 && varpool_node::get_create (decl)->offloadable)
7283 {
7284 struct gimplify_omp_ctx *octx;
7285 for (octx = ctx->outer_context;
7286 octx; octx = octx->outer_context)
7287 {
7288 n = splay_tree_lookup (octx->variables,
7289 (splay_tree_key)decl);
7290 if (n
7291 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7292 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7293 break;
7294 }
7295 is_declare_target = octx == NULL;
7296 }
7e5a76c8 7297 if (!is_declare_target)
7298 {
7299 int gdmk;
7300 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7301 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7302 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7303 == POINTER_TYPE)))
7304 gdmk = GDMK_POINTER;
7305 else if (lang_hooks.decls.omp_scalar_p (decl))
7306 gdmk = GDMK_SCALAR;
7307 else
7308 gdmk = GDMK_AGGREGATE;
7309 if (ctx->defaultmap[gdmk] == 0)
7310 {
7311 tree d = lang_hooks.decls.omp_report_decl (decl);
7312 error ("%qE not specified in enclosing %<target%>",
7313 DECL_NAME (d));
7314 error_at (ctx->location, "enclosing %<target%>");
7315 }
7316 else if (ctx->defaultmap[gdmk]
7317 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7318 nflags |= ctx->defaultmap[gdmk];
7319 else
7320 {
7321 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7322 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7323 }
7324 }
43895be5 7325 }
b656be3a 7326
7327 struct gimplify_omp_ctx *octx = ctx->outer_context;
7328 if ((ctx->region_type & ORT_ACC) && octx)
bc7bff74 7329 {
b656be3a 7330 /* Look in outer OpenACC contexts, to see if there's a
7331 data attribute for this variable. */
7332 omp_notice_variable (octx, decl, in_code);
7333
7334 for (; octx; octx = octx->outer_context)
7335 {
7336 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7337 break;
7338 splay_tree_node n2
7339 = splay_tree_lookup (octx->variables,
7340 (splay_tree_key) decl);
7341 if (n2)
7342 {
571b3486 7343 if (octx->region_type == ORT_ACC_HOST_DATA)
7344 error ("variable %qE declared in enclosing "
7345 "%<host_data%> region", DECL_NAME (decl));
b656be3a 7346 nflags |= GOVD_MAP;
6d6a3fc3 7347 if (octx->region_type == ORT_ACC_DATA
7348 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7349 nflags |= GOVD_MAP_0LEN_ARRAY;
b656be3a 7350 goto found_outer;
7351 }
7352 }
bc7bff74 7353 }
b656be3a 7354
7e5a76c8 7355 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7356 | GOVD_MAP_ALLOC_ONLY)) == flags)
7357 {
7358 tree type = TREE_TYPE (decl);
7359
7360 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7361 && lang_hooks.decls.omp_privatize_by_reference (decl))
7362 type = TREE_TYPE (type);
7363 if (!lang_hooks.types.omp_mappable_type (type))
7364 {
7365 error ("%qD referenced in target region does not have "
7366 "a mappable type", decl);
7367 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7368 }
7369 else
7370 {
7371 if ((ctx->region_type & ORT_ACC) != 0)
7372 nflags = oacc_default_clause (ctx, decl, flags);
7373 else
7374 nflags |= GOVD_MAP;
7375 }
7376 }
b656be3a 7377 found_outer:
43895be5 7378 omp_add_variable (ctx, decl, nflags);
bc7bff74 7379 }
7380 else
691447ab 7381 {
7382 /* If nothing changed, there's nothing left to do. */
7383 if ((n->value & flags) == flags)
7384 return ret;
98588013 7385 flags |= n->value;
7386 n->value = flags;
691447ab 7387 }
bc7bff74 7388 goto do_outer;
7389 }
7390
1e8e9920 7391 if (n == NULL)
7392 {
3d483a94 7393 if (ctx->region_type == ORT_WORKSHARE
7e5a76c8 7394 || ctx->region_type == ORT_TASKGROUP
bc7bff74 7395 || ctx->region_type == ORT_SIMD
b656be3a 7396 || ctx->region_type == ORT_ACC
7397 || (ctx->region_type & ORT_TARGET_DATA) != 0)
1e8e9920 7398 goto do_outer;
7399
38f3f9a7 7400 flags = omp_default_clause (ctx, decl, in_code, flags);
1e8e9920 7401
fd6481cf 7402 if ((flags & GOVD_PRIVATE)
7403 && lang_hooks.decls.omp_private_outer_ref (decl))
7404 flags |= GOVD_PRIVATE_OUTER_REF;
7405
1e8e9920 7406 omp_add_variable (ctx, decl, flags);
7407
7408 shared = (flags & GOVD_SHARED) != 0;
7409 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7410 goto do_outer;
7411 }
7412
2a92826d 7413 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7414 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
00e3f095 7415 && DECL_SIZE (decl))
7416 {
7417 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7418 {
7419 splay_tree_node n2;
7420 tree t = DECL_VALUE_EXPR (decl);
7421 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7422 t = TREE_OPERAND (t, 0);
7423 gcc_assert (DECL_P (t));
7424 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7425 n2->value |= GOVD_SEEN;
7426 }
7427 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7428 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7429 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7430 != INTEGER_CST))
7431 {
7432 splay_tree_node n2;
7433 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7434 gcc_assert (DECL_P (t));
7435 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7436 if (n2)
826d93b2 7437 omp_notice_variable (ctx, t, true);
00e3f095 7438 }
2a92826d 7439 }
7440
1e8e9920 7441 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7442 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7443
7444 /* If nothing changed, there's nothing left to do. */
7445 if ((n->value & flags) == flags)
7446 return ret;
7447 flags |= n->value;
7448 n->value = flags;
7449
7450 do_outer:
7451 /* If the variable is private in the current context, then we don't
7452 need to propagate anything to an outer context. */
fd6481cf 7453 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
1e8e9920 7454 return ret;
d7729e26 7455 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7456 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7457 return ret;
7458 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7459 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7460 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7461 return ret;
1e8e9920 7462 if (ctx->outer_context
7463 && omp_notice_variable (ctx->outer_context, decl, in_code))
7464 return true;
7465 return ret;
7466}
7467
7468/* Verify that DECL is private within CTX. If there's specific information
7469 to the contrary in the innermost scope, generate an error. */
7470
7471static bool
2b536a17 7472omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
1e8e9920 7473{
7474 splay_tree_node n;
7475
7476 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7477 if (n != NULL)
7478 {
7479 if (n->value & GOVD_SHARED)
7480 {
7481 if (ctx == gimplify_omp_ctxp)
013a7f55 7482 {
3d483a94 7483 if (simd)
7484 error ("iteration variable %qE is predetermined linear",
7485 DECL_NAME (decl));
7486 else
7487 error ("iteration variable %qE should be private",
7488 DECL_NAME (decl));
013a7f55 7489 n->value = GOVD_PRIVATE;
7490 return true;
7491 }
7492 else
7493 return false;
1e8e9920 7494 }
87f7c31e 7495 else if ((n->value & GOVD_EXPLICIT) != 0
7496 && (ctx == gimplify_omp_ctxp
fd6481cf 7497 || (ctx->region_type == ORT_COMBINED_PARALLEL
87f7c31e 7498 && gimplify_omp_ctxp->outer_context == ctx)))
7499 {
7500 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
abd3e6b5 7501 error ("iteration variable %qE should not be firstprivate",
7502 DECL_NAME (decl));
87f7c31e 7503 else if ((n->value & GOVD_REDUCTION) != 0)
abd3e6b5 7504 error ("iteration variable %qE should not be reduction",
7505 DECL_NAME (decl));
7e5a76c8 7506 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
9561765e 7507 error ("iteration variable %qE should not be linear",
7508 DECL_NAME (decl));
87f7c31e 7509 }
919041c7 7510 return (ctx == gimplify_omp_ctxp
7511 || (ctx->region_type == ORT_COMBINED_PARALLEL
7512 && gimplify_omp_ctxp->outer_context == ctx));
1e8e9920 7513 }
7514
3d483a94 7515 if (ctx->region_type != ORT_WORKSHARE
7e5a76c8 7516 && ctx->region_type != ORT_TASKGROUP
b656be3a 7517 && ctx->region_type != ORT_SIMD
7518 && ctx->region_type != ORT_ACC)
1e8e9920 7519 return false;
013a7f55 7520 else if (ctx->outer_context)
3d483a94 7521 return omp_is_private (ctx->outer_context, decl, simd);
919041c7 7522 return false;
1e8e9920 7523}
7524
f9b59f0c 7525/* Return true if DECL is private within a parallel region
7526 that binds to the current construct's context or in parallel
7527 region's REDUCTION clause. */
7528
7529static bool
b45e536e 7530omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
f9b59f0c 7531{
7532 splay_tree_node n;
7533
7534 do
7535 {
7536 ctx = ctx->outer_context;
7537 if (ctx == NULL)
43895be5 7538 {
7539 if (is_global_var (decl))
7540 return false;
7541
7542 /* References might be private, but might be shared too,
7543 when checking for copyprivate, assume they might be
7544 private, otherwise assume they might be shared. */
7545 if (copyprivate)
7546 return true;
7547
7548 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7549 return false;
7550
7551 /* Treat C++ privatized non-static data members outside
7552 of the privatization the same. */
7553 if (omp_member_access_dummy_var (decl))
7554 return false;
7555
7556 return true;
7557 }
f9b59f0c 7558
9561765e 7559 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7560
7561 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7562 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
bc7bff74 7563 continue;
7564
f9b59f0c 7565 if (n != NULL)
43895be5 7566 {
7567 if ((n->value & GOVD_LOCAL) != 0
7568 && omp_member_access_dummy_var (decl))
7569 return false;
7570 return (n->value & GOVD_SHARED) == 0;
7571 }
f9b59f0c 7572 }
3d483a94 7573 while (ctx->region_type == ORT_WORKSHARE
7e5a76c8 7574 || ctx->region_type == ORT_TASKGROUP
b656be3a 7575 || ctx->region_type == ORT_SIMD
7576 || ctx->region_type == ORT_ACC);
f9b59f0c 7577 return false;
7578}
7579
43895be5 7580/* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7581
7582static tree
7583find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7584{
7585 tree t = *tp;
7586
7587 /* If this node has been visited, unmark it and keep looking. */
7588 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7589 return t;
7590
7591 if (IS_TYPE_OR_DECL_P (t))
7592 *walk_subtrees = 0;
7593 return NULL_TREE;
7594}
7595
7e5a76c8 7596/* If *LIST_P contains any OpenMP depend clauses with iterators,
7597 lower all the depend clauses by populating corresponding depend
7598 array. Returns 0 if there are no such depend clauses, or
7599 2 if all depend clauses should be removed, 1 otherwise. */
7600
7601static int
7602gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7603{
7604 tree c;
7605 gimple *g;
7606 size_t n[4] = { 0, 0, 0, 0 };
7607 bool unused[4];
7608 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7609 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7610 size_t i, j;
7611 location_t first_loc = UNKNOWN_LOCATION;
7612
7613 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7614 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7615 {
7616 switch (OMP_CLAUSE_DEPEND_KIND (c))
7617 {
7618 case OMP_CLAUSE_DEPEND_IN:
7619 i = 2;
7620 break;
7621 case OMP_CLAUSE_DEPEND_OUT:
7622 case OMP_CLAUSE_DEPEND_INOUT:
7623 i = 0;
7624 break;
7625 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7626 i = 1;
7627 break;
7628 case OMP_CLAUSE_DEPEND_DEPOBJ:
7629 i = 3;
7630 break;
7631 case OMP_CLAUSE_DEPEND_SOURCE:
7632 case OMP_CLAUSE_DEPEND_SINK:
7633 continue;
7634 default:
7635 gcc_unreachable ();
7636 }
7637 tree t = OMP_CLAUSE_DECL (c);
7638 if (first_loc == UNKNOWN_LOCATION)
7639 first_loc = OMP_CLAUSE_LOCATION (c);
7640 if (TREE_CODE (t) == TREE_LIST
7641 && TREE_PURPOSE (t)
7642 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7643 {
7644 if (TREE_PURPOSE (t) != last_iter)
7645 {
7646 tree tcnt = size_one_node;
7647 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7648 {
7649 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7650 is_gimple_val, fb_rvalue) == GS_ERROR
7651 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7652 is_gimple_val, fb_rvalue) == GS_ERROR
7653 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7654 is_gimple_val, fb_rvalue) == GS_ERROR
7655 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7656 is_gimple_val, fb_rvalue)
7657 == GS_ERROR))
7658 return 2;
7659 tree var = TREE_VEC_ELT (it, 0);
7660 tree begin = TREE_VEC_ELT (it, 1);
7661 tree end = TREE_VEC_ELT (it, 2);
7662 tree step = TREE_VEC_ELT (it, 3);
7663 tree orig_step = TREE_VEC_ELT (it, 4);
7664 tree type = TREE_TYPE (var);
7665 tree stype = TREE_TYPE (step);
7666 location_t loc = DECL_SOURCE_LOCATION (var);
7667 tree endmbegin;
7668 /* Compute count for this iterator as
7669 orig_step > 0
7670 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7671 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7672 and compute product of those for the entire depend
7673 clause. */
7674 if (POINTER_TYPE_P (type))
7675 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7676 stype, end, begin);
7677 else
7678 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7679 end, begin);
7680 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7681 step,
7682 build_int_cst (stype, 1));
7683 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7684 build_int_cst (stype, 1));
7685 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7686 unshare_expr (endmbegin),
7687 stepm1);
7688 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7689 pos, step);
7690 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7691 endmbegin, stepp1);
7692 if (TYPE_UNSIGNED (stype))
7693 {
7694 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7695 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7696 }
7697 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7698 neg, step);
7699 step = NULL_TREE;
7700 tree cond = fold_build2_loc (loc, LT_EXPR,
7701 boolean_type_node,
7702 begin, end);
7703 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7704 build_int_cst (stype, 0));
7705 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7706 end, begin);
7707 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7708 build_int_cst (stype, 0));
7709 tree osteptype = TREE_TYPE (orig_step);
7710 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7711 orig_step,
7712 build_int_cst (osteptype, 0));
7713 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7714 cond, pos, neg);
7715 cnt = fold_convert_loc (loc, sizetype, cnt);
7716 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7717 fb_rvalue) == GS_ERROR)
7718 return 2;
7719 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7720 }
7721 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7722 fb_rvalue) == GS_ERROR)
7723 return 2;
7724 last_iter = TREE_PURPOSE (t);
7725 last_count = tcnt;
7726 }
7727 if (counts[i] == NULL_TREE)
7728 counts[i] = last_count;
7729 else
7730 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7731 PLUS_EXPR, counts[i], last_count);
7732 }
7733 else
7734 n[i]++;
7735 }
7736 for (i = 0; i < 4; i++)
7737 if (counts[i])
7738 break;
7739 if (i == 4)
7740 return 0;
7741
7742 tree total = size_zero_node;
7743 for (i = 0; i < 4; i++)
7744 {
7745 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7746 if (counts[i] == NULL_TREE)
7747 counts[i] = size_zero_node;
7748 if (n[i])
7749 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7750 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7751 fb_rvalue) == GS_ERROR)
7752 return 2;
7753 total = size_binop (PLUS_EXPR, total, counts[i]);
7754 }
7755
7756 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7757 == GS_ERROR)
7758 return 2;
7759 bool is_old = unused[1] && unused[3];
7760 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7761 size_int (is_old ? 1 : 4));
7762 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7763 tree array = create_tmp_var_raw (type);
7764 TREE_ADDRESSABLE (array) = 1;
7765 if (TREE_CODE (totalpx) != INTEGER_CST)
7766 {
7767 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7768 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7769 if (gimplify_omp_ctxp)
7770 {
7771 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7772 while (ctx
7773 && (ctx->region_type == ORT_WORKSHARE
7774 || ctx->region_type == ORT_TASKGROUP
7775 || ctx->region_type == ORT_SIMD
7776 || ctx->region_type == ORT_ACC))
7777 ctx = ctx->outer_context;
7778 if (ctx)
7779 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7780 }
7781 gimplify_vla_decl (array, pre_p);
7782 }
7783 else
7784 gimple_add_tmp_var (array);
7785 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7786 NULL_TREE);
7787 tree tem;
7788 if (!is_old)
7789 {
7790 tem = build2 (MODIFY_EXPR, void_type_node, r,
7791 build_int_cst (ptr_type_node, 0));
7792 gimplify_and_add (tem, pre_p);
7793 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7794 NULL_TREE);
7795 }
7796 tem = build2 (MODIFY_EXPR, void_type_node, r,
7797 fold_convert (ptr_type_node, total));
7798 gimplify_and_add (tem, pre_p);
7799 for (i = 1; i < (is_old ? 2 : 4); i++)
7800 {
7801 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7802 NULL_TREE, NULL_TREE);
7803 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7804 gimplify_and_add (tem, pre_p);
7805 }
7806
7807 tree cnts[4];
7808 for (j = 4; j; j--)
7809 if (!unused[j - 1])
7810 break;
7811 for (i = 0; i < 4; i++)
7812 {
7813 if (i && (i >= j || unused[i - 1]))
7814 {
7815 cnts[i] = cnts[i - 1];
7816 continue;
7817 }
7818 cnts[i] = create_tmp_var (sizetype);
7819 if (i == 0)
7820 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7821 else
7822 {
7823 tree t;
7824 if (is_old)
7825 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7826 else
7827 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7828 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7829 == GS_ERROR)
7830 return 2;
7831 g = gimple_build_assign (cnts[i], t);
7832 }
7833 gimple_seq_add_stmt (pre_p, g);
7834 }
7835
7836 last_iter = NULL_TREE;
7837 tree last_bind = NULL_TREE;
7838 tree *last_body = NULL;
7839 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7840 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7841 {
7842 switch (OMP_CLAUSE_DEPEND_KIND (c))
7843 {
7844 case OMP_CLAUSE_DEPEND_IN:
7845 i = 2;
7846 break;
7847 case OMP_CLAUSE_DEPEND_OUT:
7848 case OMP_CLAUSE_DEPEND_INOUT:
7849 i = 0;
7850 break;
7851 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7852 i = 1;
7853 break;
7854 case OMP_CLAUSE_DEPEND_DEPOBJ:
7855 i = 3;
7856 break;
7857 case OMP_CLAUSE_DEPEND_SOURCE:
7858 case OMP_CLAUSE_DEPEND_SINK:
7859 continue;
7860 default:
7861 gcc_unreachable ();
7862 }
7863 tree t = OMP_CLAUSE_DECL (c);
7864 if (TREE_CODE (t) == TREE_LIST
7865 && TREE_PURPOSE (t)
7866 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7867 {
7868 if (TREE_PURPOSE (t) != last_iter)
7869 {
7870 if (last_bind)
7871 gimplify_and_add (last_bind, pre_p);
7872 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
7873 last_bind = build3 (BIND_EXPR, void_type_node,
7874 BLOCK_VARS (block), NULL, block);
7875 TREE_SIDE_EFFECTS (last_bind) = 1;
7876 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
7877 tree *p = &BIND_EXPR_BODY (last_bind);
7878 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7879 {
7880 tree var = TREE_VEC_ELT (it, 0);
7881 tree begin = TREE_VEC_ELT (it, 1);
7882 tree end = TREE_VEC_ELT (it, 2);
7883 tree step = TREE_VEC_ELT (it, 3);
7884 tree orig_step = TREE_VEC_ELT (it, 4);
7885 tree type = TREE_TYPE (var);
7886 location_t loc = DECL_SOURCE_LOCATION (var);
7887 /* Emit:
7888 var = begin;
7889 goto cond_label;
7890 beg_label:
7891 ...
7892 var = var + step;
7893 cond_label:
7894 if (orig_step > 0) {
7895 if (var < end) goto beg_label;
7896 } else {
7897 if (var > end) goto beg_label;
7898 }
7899 for each iterator, with inner iterators added to
7900 the ... above. */
7901 tree beg_label = create_artificial_label (loc);
7902 tree cond_label = NULL_TREE;
7903 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7904 var, begin);
7905 append_to_statement_list_force (tem, p);
7906 tem = build_and_jump (&cond_label);
7907 append_to_statement_list_force (tem, p);
7908 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
7909 append_to_statement_list (tem, p);
7910 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
7911 NULL_TREE, NULL_TREE);
7912 TREE_SIDE_EFFECTS (bind) = 1;
7913 SET_EXPR_LOCATION (bind, loc);
7914 append_to_statement_list_force (bind, p);
7915 if (POINTER_TYPE_P (type))
7916 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
7917 var, fold_convert_loc (loc, sizetype,
7918 step));
7919 else
7920 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
7921 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7922 var, tem);
7923 append_to_statement_list_force (tem, p);
7924 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
7925 append_to_statement_list (tem, p);
7926 tree cond = fold_build2_loc (loc, LT_EXPR,
7927 boolean_type_node,
7928 var, end);
7929 tree pos
7930 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7931 cond, build_and_jump (&beg_label),
7932 void_node);
7933 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7934 var, end);
7935 tree neg
7936 = fold_build3_loc (loc, COND_EXPR, void_type_node,
7937 cond, build_and_jump (&beg_label),
7938 void_node);
7939 tree osteptype = TREE_TYPE (orig_step);
7940 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7941 orig_step,
7942 build_int_cst (osteptype, 0));
7943 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
7944 cond, pos, neg);
7945 append_to_statement_list_force (tem, p);
7946 p = &BIND_EXPR_BODY (bind);
7947 }
7948 last_body = p;
7949 }
7950 last_iter = TREE_PURPOSE (t);
7951 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
7952 {
7953 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
7954 0), last_body);
7955 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
7956 }
7957 if (error_operand_p (TREE_VALUE (t)))
7958 return 2;
7959 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
7960 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7961 NULL_TREE, NULL_TREE);
7962 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7963 void_type_node, r, TREE_VALUE (t));
7964 append_to_statement_list_force (tem, last_body);
7965 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
7966 void_type_node, cnts[i],
7967 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
7968 append_to_statement_list_force (tem, last_body);
7969 TREE_VALUE (t) = null_pointer_node;
7970 }
7971 else
7972 {
7973 if (last_bind)
7974 {
7975 gimplify_and_add (last_bind, pre_p);
7976 last_bind = NULL_TREE;
7977 }
7978 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7979 {
7980 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7981 NULL, is_gimple_val, fb_rvalue);
7982 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7983 }
7984 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7985 return 2;
7986 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7987 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7988 is_gimple_val, fb_rvalue) == GS_ERROR)
7989 return 2;
7990 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
7991 NULL_TREE, NULL_TREE);
7992 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
7993 gimplify_and_add (tem, pre_p);
7994 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
7995 size_int (1)));
7996 gimple_seq_add_stmt (pre_p, g);
7997 }
7998 }
7999 if (last_bind)
8000 gimplify_and_add (last_bind, pre_p);
8001 tree cond = boolean_false_node;
8002 if (is_old)
8003 {
8004 if (!unused[0])
8005 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8006 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8007 size_int (2)));
8008 if (!unused[2])
8009 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8010 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8011 cnts[2],
8012 size_binop_loc (first_loc, PLUS_EXPR,
8013 totalpx,
8014 size_int (1))));
8015 }
8016 else
8017 {
8018 tree prev = size_int (5);
8019 for (i = 0; i < 4; i++)
8020 {
8021 if (unused[i])
8022 continue;
8023 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8024 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8025 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8026 cnts[i], unshare_expr (prev)));
8027 }
8028 }
8029 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8030 build_call_expr_loc (first_loc,
8031 builtin_decl_explicit (BUILT_IN_TRAP),
8032 0), void_node);
8033 gimplify_and_add (tem, pre_p);
8034 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8035 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8036 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8037 OMP_CLAUSE_CHAIN (c) = *list_p;
8038 *list_p = c;
8039 return 1;
8040}
8041
ca4c3545 8042/* Scan the OMP clauses in *LIST_P, installing mappings into a new
1e8e9920 8043 and previous omp contexts. */
8044
8045static void
75a70cf9 8046gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
43895be5 8047 enum omp_region_type region_type,
8048 enum tree_code code)
1e8e9920 8049{
8050 struct gimplify_omp_ctx *ctx, *outer_ctx;
8051 tree c;
43895be5 8052 hash_map<tree, tree> *struct_map_to_clause = NULL;
9561765e 8053 tree *prev_list_p = NULL;
7e5a76c8 8054 int handled_depend_iterators = -1;
8055 int nowait = -1;
1e8e9920 8056
fd6481cf 8057 ctx = new_omp_context (region_type);
1e8e9920 8058 outer_ctx = ctx->outer_context;
44b49e6b 8059 if (code == OMP_TARGET)
43895be5 8060 {
44b49e6b 8061 if (!lang_GNU_Fortran ())
7e5a76c8 8062 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8063 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
43895be5 8064 }
8065 if (!lang_GNU_Fortran ())
8066 switch (code)
8067 {
8068 case OMP_TARGET:
8069 case OMP_TARGET_DATA:
8070 case OMP_TARGET_ENTER_DATA:
8071 case OMP_TARGET_EXIT_DATA:
c4c30562 8072 case OACC_DECLARE:
571b3486 8073 case OACC_HOST_DATA:
93251441 8074 case OACC_PARALLEL:
8075 case OACC_KERNELS:
43895be5 8076 ctx->target_firstprivatize_array_bases = true;
8077 default:
8078 break;
8079 }
1e8e9920 8080
8081 while ((c = *list_p) != NULL)
8082 {
1e8e9920 8083 bool remove = false;
8084 bool notice_outer = true;
f9b59f0c 8085 const char *check_non_private = NULL;
1e8e9920 8086 unsigned int flags;
8087 tree decl;
8088
55d6e7cd 8089 switch (OMP_CLAUSE_CODE (c))
1e8e9920 8090 {
8091 case OMP_CLAUSE_PRIVATE:
8092 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
fd6481cf 8093 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8094 {
8095 flags |= GOVD_PRIVATE_OUTER_REF;
8096 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8097 }
8098 else
8099 notice_outer = false;
1e8e9920 8100 goto do_add;
8101 case OMP_CLAUSE_SHARED:
8102 flags = GOVD_SHARED | GOVD_EXPLICIT;
8103 goto do_add;
8104 case OMP_CLAUSE_FIRSTPRIVATE:
8105 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
f9b59f0c 8106 check_non_private = "firstprivate";
1e8e9920 8107 goto do_add;
8108 case OMP_CLAUSE_LASTPRIVATE:
7e5a76c8 8109 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8110 switch (code)
8111 {
8112 case OMP_DISTRIBUTE:
8113 error_at (OMP_CLAUSE_LOCATION (c),
8114 "conditional %<lastprivate%> clause on "
8115 "%<distribute%> construct");
8116 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8117 break;
8118 case OMP_TASKLOOP:
8119 error_at (OMP_CLAUSE_LOCATION (c),
8120 "conditional %<lastprivate%> clause on "
8121 "%<taskloop%> construct");
8122 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8123 break;
8124 default:
8125 break;
8126 }
1e8e9920 8127 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
f9b59f0c 8128 check_non_private = "lastprivate";
d7729e26 8129 decl = OMP_CLAUSE_DECL (c);
44b49e6b 8130 if (error_operand_p (decl))
d7729e26 8131 goto do_add;
7e5a76c8 8132 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8133 && !lang_hooks.decls.omp_scalar_p (decl))
8134 {
8135 error_at (OMP_CLAUSE_LOCATION (c),
8136 "non-scalar variable %qD in conditional "
8137 "%<lastprivate%> clause", decl);
8138 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8139 }
a25ce283 8140 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8141 sorry_at (OMP_CLAUSE_LOCATION (c),
8142 "%<conditional%> modifier on %<lastprivate%> clause "
8143 "not supported yet");
7e5a76c8 8144 if (outer_ctx
8145 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8146 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8147 == ORT_COMBINED_TEAMS))
8148 && splay_tree_lookup (outer_ctx->variables,
8149 (splay_tree_key) decl) == NULL)
9561765e 8150 {
8151 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8152 if (outer_ctx->outer_context)
8153 omp_notice_variable (outer_ctx->outer_context, decl, true);
8154 }
43895be5 8155 else if (outer_ctx
8156 && (outer_ctx->region_type & ORT_TASK) != 0
8157 && outer_ctx->combined_loop
8158 && splay_tree_lookup (outer_ctx->variables,
8159 (splay_tree_key) decl) == NULL)
9561765e 8160 {
8161 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8162 if (outer_ctx->outer_context)
8163 omp_notice_variable (outer_ctx->outer_context, decl, true);
8164 }
d7729e26 8165 else if (outer_ctx
b656be3a 8166 && (outer_ctx->region_type == ORT_WORKSHARE
8167 || outer_ctx->region_type == ORT_ACC)
d7729e26 8168 && outer_ctx->combined_loop
8169 && splay_tree_lookup (outer_ctx->variables,
8170 (splay_tree_key) decl) == NULL
8171 && !omp_check_private (outer_ctx, decl, false))
8172 {
8173 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8174 if (outer_ctx->outer_context
8175 && (outer_ctx->outer_context->region_type
8176 == ORT_COMBINED_PARALLEL)
8177 && splay_tree_lookup (outer_ctx->outer_context->variables,
8178 (splay_tree_key) decl) == NULL)
9561765e 8179 {
8180 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8181 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8182 if (octx->outer_context)
44b49e6b 8183 {
8184 octx = octx->outer_context;
8185 if (octx->region_type == ORT_WORKSHARE
8186 && octx->combined_loop
8187 && splay_tree_lookup (octx->variables,
8188 (splay_tree_key) decl) == NULL
8189 && !omp_check_private (octx, decl, false))
8190 {
8191 omp_add_variable (octx, decl,
8192 GOVD_LASTPRIVATE | GOVD_SEEN);
8193 octx = octx->outer_context;
8194 if (octx
7e5a76c8 8195 && ((octx->region_type & ORT_COMBINED_TEAMS)
8196 == ORT_COMBINED_TEAMS)
44b49e6b 8197 && (splay_tree_lookup (octx->variables,
8198 (splay_tree_key) decl)
8199 == NULL))
8200 {
8201 omp_add_variable (octx, decl,
8202 GOVD_SHARED | GOVD_SEEN);
8203 octx = octx->outer_context;
8204 }
8205 }
8206 if (octx)
8207 omp_notice_variable (octx, decl, true);
8208 }
9561765e 8209 }
8210 else if (outer_ctx->outer_context)
8211 omp_notice_variable (outer_ctx->outer_context, decl, true);
d7729e26 8212 }
1e8e9920 8213 goto do_add;
8214 case OMP_CLAUSE_REDUCTION:
7e5a76c8 8215 if (OMP_CLAUSE_REDUCTION_TASK (c))
8216 {
8217 if (region_type == ORT_WORKSHARE)
8218 {
8219 if (nowait == -1)
8220 nowait = omp_find_clause (*list_p,
8221 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8222 if (nowait
8223 && (outer_ctx == NULL
8224 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8225 {
8226 error_at (OMP_CLAUSE_LOCATION (c),
8227 "%<task%> reduction modifier on a construct "
8228 "with a %<nowait%> clause");
8229 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8230 }
8231 }
8232 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8233 {
8234 error_at (OMP_CLAUSE_LOCATION (c),
8235 "invalid %<task%> reduction modifier on construct "
8236 "other than %<parallel%>, %<for%> or %<sections%>");
8237 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8238 }
8239 }
8240 /* FALLTHRU */
8241 case OMP_CLAUSE_IN_REDUCTION:
8242 case OMP_CLAUSE_TASK_REDUCTION:
1e8e9920 8243 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
b656be3a 8244 /* OpenACC permits reductions on private variables. */
7e5a76c8 8245 if (!(region_type & ORT_ACC)
8246 /* taskgroup is actually not a worksharing region. */
8247 && code != OMP_TASKGROUP)
8248 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
43895be5 8249 decl = OMP_CLAUSE_DECL (c);
8250 if (TREE_CODE (decl) == MEM_REF)
8251 {
8252 tree type = TREE_TYPE (decl);
8253 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
9ae1b28a 8254 NULL, is_gimple_val, fb_rvalue, false)
8255 == GS_ERROR)
43895be5 8256 {
8257 remove = true;
8258 break;
8259 }
8260 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8261 if (DECL_P (v))
8262 {
8263 omp_firstprivatize_variable (ctx, v);
8264 omp_notice_variable (ctx, v, true);
8265 }
8266 decl = TREE_OPERAND (decl, 0);
9561765e 8267 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8268 {
8269 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
9ae1b28a 8270 NULL, is_gimple_val, fb_rvalue, false)
9561765e 8271 == GS_ERROR)
8272 {
8273 remove = true;
8274 break;
8275 }
8276 v = TREE_OPERAND (decl, 1);
8277 if (DECL_P (v))
8278 {
8279 omp_firstprivatize_variable (ctx, v);
8280 omp_notice_variable (ctx, v, true);
8281 }
8282 decl = TREE_OPERAND (decl, 0);
8283 }
43895be5 8284 if (TREE_CODE (decl) == ADDR_EXPR
8285 || TREE_CODE (decl) == INDIRECT_REF)
8286 decl = TREE_OPERAND (decl, 0);
8287 }
8288 goto do_add_decl;
bc7bff74 8289 case OMP_CLAUSE_LINEAR:
8290 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8291 is_gimple_val, fb_rvalue) == GS_ERROR)
8292 {
8293 remove = true;
8294 break;
8295 }
d7729e26 8296 else
8297 {
43895be5 8298 if (code == OMP_SIMD
8299 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8300 {
8301 struct gimplify_omp_ctx *octx = outer_ctx;
8302 if (octx
8303 && octx->region_type == ORT_WORKSHARE
8304 && octx->combined_loop
8305 && !octx->distribute)
8306 {
8307 if (octx->outer_context
8308 && (octx->outer_context->region_type
8309 == ORT_COMBINED_PARALLEL))
8310 octx = octx->outer_context->outer_context;
8311 else
8312 octx = octx->outer_context;
8313 }
8314 if (octx
8315 && octx->region_type == ORT_WORKSHARE
8316 && octx->combined_loop
44b49e6b 8317 && octx->distribute)
43895be5 8318 {
8319 error_at (OMP_CLAUSE_LOCATION (c),
8320 "%<linear%> clause for variable other than "
8321 "loop iterator specified on construct "
8322 "combined with %<distribute%>");
8323 remove = true;
8324 break;
8325 }
8326 }
d7729e26 8327 /* For combined #pragma omp parallel for simd, need to put
8328 lastprivate and perhaps firstprivate too on the
8329 parallel. Similarly for #pragma omp for simd. */
8330 struct gimplify_omp_ctx *octx = outer_ctx;
8331 decl = NULL_TREE;
d7729e26 8332 do
8333 {
8334 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8335 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8336 break;
8337 decl = OMP_CLAUSE_DECL (c);
8338 if (error_operand_p (decl))
8339 {
8340 decl = NULL_TREE;
8341 break;
8342 }
43895be5 8343 flags = GOVD_SEEN;
8344 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8345 flags |= GOVD_FIRSTPRIVATE;
8346 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8347 flags |= GOVD_LASTPRIVATE;
d7729e26 8348 if (octx
8349 && octx->region_type == ORT_WORKSHARE
8350 && octx->combined_loop)
8351 {
8352 if (octx->outer_context
8353 && (octx->outer_context->region_type
9561765e 8354 == ORT_COMBINED_PARALLEL))
d7729e26 8355 octx = octx->outer_context;
8356 else if (omp_check_private (octx, decl, false))
8357 break;
8358 }
43895be5 8359 else if (octx
8360 && (octx->region_type & ORT_TASK) != 0
8361 && octx->combined_loop)
8362 ;
8363 else if (octx
8364 && octx->region_type == ORT_COMBINED_PARALLEL
8365 && ctx->region_type == ORT_WORKSHARE
8366 && octx == outer_ctx)
8367 flags = GOVD_SEEN | GOVD_SHARED;
9561765e 8368 else if (octx
7e5a76c8 8369 && ((octx->region_type & ORT_COMBINED_TEAMS)
8370 == ORT_COMBINED_TEAMS))
9561765e 8371 flags = GOVD_SEEN | GOVD_SHARED;
43895be5 8372 else if (octx
8373 && octx->region_type == ORT_COMBINED_TARGET)
9561765e 8374 {
8375 flags &= ~GOVD_LASTPRIVATE;
8376 if (flags == GOVD_SEEN)
8377 break;
8378 }
d7729e26 8379 else
8380 break;
43895be5 8381 splay_tree_node on
8382 = splay_tree_lookup (octx->variables,
8383 (splay_tree_key) decl);
8384 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
e56218b3 8385 {
8386 octx = NULL;
8387 break;
8388 }
d7729e26 8389 omp_add_variable (octx, decl, flags);
8390 if (octx->outer_context == NULL)
8391 break;
8392 octx = octx->outer_context;
8393 }
8394 while (1);
8395 if (octx
8396 && decl
8397 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8398 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8399 omp_notice_variable (octx, decl, true);
8400 }
bc7bff74 8401 flags = GOVD_LINEAR | GOVD_EXPLICIT;
d7729e26 8402 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8403 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8404 {
8405 notice_outer = false;
8406 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8407 }
bc7bff74 8408 goto do_add;
8409
8410 case OMP_CLAUSE_MAP:
c3f3b68d 8411 decl = OMP_CLAUSE_DECL (c);
8412 if (error_operand_p (decl))
43895be5 8413 remove = true;
8414 switch (code)
c3f3b68d 8415 {
43895be5 8416 case OMP_TARGET:
8417 break;
6d6a3fc3 8418 case OACC_DATA:
8419 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8420 break;
e3533433 8421 /* FALLTHRU */
43895be5 8422 case OMP_TARGET_DATA:
8423 case OMP_TARGET_ENTER_DATA:
8424 case OMP_TARGET_EXIT_DATA:
6d6a3fc3 8425 case OACC_ENTER_DATA:
8426 case OACC_EXIT_DATA:
571b3486 8427 case OACC_HOST_DATA:
9561765e 8428 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8429 || (OMP_CLAUSE_MAP_KIND (c)
8430 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
43895be5 8431 /* For target {,enter ,exit }data only the array slice is
8432 mapped, but not the pointer to it. */
8433 remove = true;
8434 break;
8435 default:
c3f3b68d 8436 break;
8437 }
43895be5 8438 if (remove)
8439 break;
571b3486 8440 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8441 {
8442 struct gimplify_omp_ctx *octx;
8443 for (octx = outer_ctx; octx; octx = octx->outer_context)
8444 {
8445 if (octx->region_type != ORT_ACC_HOST_DATA)
8446 break;
8447 splay_tree_node n2
8448 = splay_tree_lookup (octx->variables,
8449 (splay_tree_key) decl);
8450 if (n2)
8451 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8452 "declared in enclosing %<host_data%> region",
8453 DECL_NAME (decl));
8454 }
8455 }
c3f3b68d 8456 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8457 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8458 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8459 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8460 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
bc7bff74 8461 {
8462 remove = true;
8463 break;
8464 }
9561765e 8465 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8466 || (OMP_CLAUSE_MAP_KIND (c)
8467 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
43895be5 8468 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8469 {
8470 OMP_CLAUSE_SIZE (c)
9ae1b28a 8471 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8472 false);
43895be5 8473 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8474 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8475 }
bc7bff74 8476 if (!DECL_P (decl))
8477 {
43895be5 8478 tree d = decl, *pd;
8479 if (TREE_CODE (d) == ARRAY_REF)
8480 {
8481 while (TREE_CODE (d) == ARRAY_REF)
8482 d = TREE_OPERAND (d, 0);
8483 if (TREE_CODE (d) == COMPONENT_REF
8484 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8485 decl = d;
8486 }
8487 pd = &OMP_CLAUSE_DECL (c);
8488 if (d == decl
8489 && TREE_CODE (decl) == INDIRECT_REF
8490 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8491 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8492 == REFERENCE_TYPE))
8493 {
8494 pd = &TREE_OPERAND (decl, 0);
8495 decl = TREE_OPERAND (decl, 0);
8496 }
8497 if (TREE_CODE (decl) == COMPONENT_REF)
8498 {
8499 while (TREE_CODE (decl) == COMPONENT_REF)
8500 decl = TREE_OPERAND (decl, 0);
d8a3bc93 8501 if (TREE_CODE (decl) == INDIRECT_REF
8502 && DECL_P (TREE_OPERAND (decl, 0))
8503 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8504 == REFERENCE_TYPE))
8505 decl = TREE_OPERAND (decl, 0);
43895be5 8506 }
8507 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
bc7bff74 8508 == GS_ERROR)
8509 {
8510 remove = true;
8511 break;
8512 }
43895be5 8513 if (DECL_P (decl))
8514 {
8515 if (error_operand_p (decl))
8516 {
8517 remove = true;
8518 break;
8519 }
8520
d8a3bc93 8521 tree stype = TREE_TYPE (decl);
8522 if (TREE_CODE (stype) == REFERENCE_TYPE)
8523 stype = TREE_TYPE (stype);
8524 if (TYPE_SIZE_UNIT (stype) == NULL
8525 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
43895be5 8526 {
8527 error_at (OMP_CLAUSE_LOCATION (c),
8528 "mapping field %qE of variable length "
8529 "structure", OMP_CLAUSE_DECL (c));
8530 remove = true;
8531 break;
8532 }
8533
9561765e 8534 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8535 {
8536 /* Error recovery. */
8537 if (prev_list_p == NULL)
8538 {
8539 remove = true;
8540 break;
8541 }
8542 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8543 {
8544 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8545 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8546 {
8547 remove = true;
8548 break;
8549 }
8550 }
8551 }
8552
43895be5 8553 tree offset;
81bc0f0f 8554 poly_int64 bitsize, bitpos;
43895be5 8555 machine_mode mode;
292237f3 8556 int unsignedp, reversep, volatilep = 0;
43895be5 8557 tree base = OMP_CLAUSE_DECL (c);
8558 while (TREE_CODE (base) == ARRAY_REF)
8559 base = TREE_OPERAND (base, 0);
8560 if (TREE_CODE (base) == INDIRECT_REF)
8561 base = TREE_OPERAND (base, 0);
8562 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
292237f3 8563 &mode, &unsignedp, &reversep,
b3b6e4b5 8564 &volatilep);
d8a3bc93 8565 tree orig_base = base;
8566 if ((TREE_CODE (base) == INDIRECT_REF
8567 || (TREE_CODE (base) == MEM_REF
8568 && integer_zerop (TREE_OPERAND (base, 1))))
8569 && DECL_P (TREE_OPERAND (base, 0))
8570 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8571 == REFERENCE_TYPE))
8572 base = TREE_OPERAND (base, 0);
43895be5 8573 gcc_assert (base == decl
8574 && (offset == NULL_TREE
81bc0f0f 8575 || poly_int_tree_p (offset)));
43895be5 8576
8577 splay_tree_node n
8578 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8579 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9561765e 8580 == GOMP_MAP_ALWAYS_POINTER);
8581 if (n == NULL || (n->value & GOVD_MAP) == 0)
43895be5 8582 {
9561765e 8583 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8584 OMP_CLAUSE_MAP);
8585 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
d8a3bc93 8586 if (orig_base != base)
8587 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
8588 else
8589 OMP_CLAUSE_DECL (l) = decl;
9561765e 8590 OMP_CLAUSE_SIZE (l) = size_int (1);
8591 if (struct_map_to_clause == NULL)
8592 struct_map_to_clause = new hash_map<tree, tree>;
8593 struct_map_to_clause->put (decl, l);
43895be5 8594 if (ptr)
8595 {
9561765e 8596 enum gomp_map_kind mkind
8597 = code == OMP_TARGET_EXIT_DATA
8598 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
43895be5 8599 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9561765e 8600 OMP_CLAUSE_MAP);
8601 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8602 OMP_CLAUSE_DECL (c2)
8603 = unshare_expr (OMP_CLAUSE_DECL (c));
8604 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
8605 OMP_CLAUSE_SIZE (c2)
8606 = TYPE_SIZE_UNIT (ptr_type_node);
8607 OMP_CLAUSE_CHAIN (l) = c2;
8608 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8609 {
8610 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8611 tree c3
8612 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8613 OMP_CLAUSE_MAP);
8614 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8615 OMP_CLAUSE_DECL (c3)
8616 = unshare_expr (OMP_CLAUSE_DECL (c4));
8617 OMP_CLAUSE_SIZE (c3)
8618 = TYPE_SIZE_UNIT (ptr_type_node);
8619 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8620 OMP_CLAUSE_CHAIN (c2) = c3;
8621 }
8622 *prev_list_p = l;
8623 prev_list_p = NULL;
8624 }
8625 else
8626 {
8627 OMP_CLAUSE_CHAIN (l) = c;
8628 *list_p = l;
8629 list_p = &OMP_CLAUSE_CHAIN (l);
43895be5 8630 }
d8a3bc93 8631 if (orig_base != base && code == OMP_TARGET)
8632 {
8633 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8634 OMP_CLAUSE_MAP);
8635 enum gomp_map_kind mkind
8636 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
8637 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8638 OMP_CLAUSE_DECL (c2) = decl;
8639 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8640 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
8641 OMP_CLAUSE_CHAIN (l) = c2;
8642 }
43895be5 8643 flags = GOVD_MAP | GOVD_EXPLICIT;
9561765e 8644 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
43895be5 8645 flags |= GOVD_SEEN;
8646 goto do_add_decl;
8647 }
8648 else
8649 {
8650 tree *osc = struct_map_to_clause->get (decl);
9561765e 8651 tree *sc = NULL, *scp = NULL;
8652 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
43895be5 8653 n->value |= GOVD_SEEN;
81bc0f0f 8654 poly_offset_int o1, o2;
43895be5 8655 if (offset)
81bc0f0f 8656 o1 = wi::to_poly_offset (offset);
43895be5 8657 else
8658 o1 = 0;
81bc0f0f 8659 if (maybe_ne (bitpos, 0))
8660 o1 += bits_to_bytes_round_down (bitpos);
d8a3bc93 8661 sc = &OMP_CLAUSE_CHAIN (*osc);
8662 if (*sc != c
8663 && (OMP_CLAUSE_MAP_KIND (*sc)
8664 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8665 sc = &OMP_CLAUSE_CHAIN (*sc);
8666 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9561765e 8667 if (ptr && sc == prev_list_p)
8668 break;
8669 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8670 != COMPONENT_REF
8671 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8672 != INDIRECT_REF)
8673 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8674 != ARRAY_REF))
43895be5 8675 break;
8676 else
8677 {
8678 tree offset2;
81bc0f0f 8679 poly_int64 bitsize2, bitpos2;
43895be5 8680 base = OMP_CLAUSE_DECL (*sc);
8681 if (TREE_CODE (base) == ARRAY_REF)
8682 {
8683 while (TREE_CODE (base) == ARRAY_REF)
8684 base = TREE_OPERAND (base, 0);
8685 if (TREE_CODE (base) != COMPONENT_REF
8686 || (TREE_CODE (TREE_TYPE (base))
8687 != ARRAY_TYPE))
8688 break;
8689 }
8690 else if (TREE_CODE (base) == INDIRECT_REF
8691 && (TREE_CODE (TREE_OPERAND (base, 0))
8692 == COMPONENT_REF)
8693 && (TREE_CODE (TREE_TYPE
8694 (TREE_OPERAND (base, 0)))
8695 == REFERENCE_TYPE))
8696 base = TREE_OPERAND (base, 0);
8697 base = get_inner_reference (base, &bitsize2,
8698 &bitpos2, &offset2,
8699 &mode, &unsignedp,
b3b6e4b5 8700 &reversep, &volatilep);
d8a3bc93 8701 if ((TREE_CODE (base) == INDIRECT_REF
8702 || (TREE_CODE (base) == MEM_REF
8703 && integer_zerop (TREE_OPERAND (base,
8704 1))))
8705 && DECL_P (TREE_OPERAND (base, 0))
8706 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8707 0)))
8708 == REFERENCE_TYPE))
8709 base = TREE_OPERAND (base, 0);
43895be5 8710 if (base != decl)
8711 break;
9561765e 8712 if (scp)
8713 continue;
43895be5 8714 gcc_assert (offset == NULL_TREE
81bc0f0f 8715 || poly_int_tree_p (offset));
43895be5 8716 tree d1 = OMP_CLAUSE_DECL (*sc);
8717 tree d2 = OMP_CLAUSE_DECL (c);
8718 while (TREE_CODE (d1) == ARRAY_REF)
8719 d1 = TREE_OPERAND (d1, 0);
8720 while (TREE_CODE (d2) == ARRAY_REF)
8721 d2 = TREE_OPERAND (d2, 0);
8722 if (TREE_CODE (d1) == INDIRECT_REF)
8723 d1 = TREE_OPERAND (d1, 0);
8724 if (TREE_CODE (d2) == INDIRECT_REF)
8725 d2 = TREE_OPERAND (d2, 0);
8726 while (TREE_CODE (d1) == COMPONENT_REF)
8727 if (TREE_CODE (d2) == COMPONENT_REF
8728 && TREE_OPERAND (d1, 1)
8729 == TREE_OPERAND (d2, 1))
8730 {
8731 d1 = TREE_OPERAND (d1, 0);
8732 d2 = TREE_OPERAND (d2, 0);
8733 }
8734 else
8735 break;
8736 if (d1 == d2)
8737 {
8738 error_at (OMP_CLAUSE_LOCATION (c),
8739 "%qE appears more than once in map "
8740 "clauses", OMP_CLAUSE_DECL (c));
8741 remove = true;
8742 break;
8743 }
8744 if (offset2)
81bc0f0f 8745 o2 = wi::to_poly_offset (offset2);
43895be5 8746 else
8747 o2 = 0;
81bc0f0f 8748 o2 += bits_to_bytes_round_down (bitpos2);
8749 if (maybe_lt (o1, o2)
a9191765 8750 || (known_eq (o1, o2)
81bc0f0f 8751 && maybe_lt (bitpos, bitpos2)))
9561765e 8752 {
8753 if (ptr)
8754 scp = sc;
8755 else
8756 break;
8757 }
43895be5 8758 }
9561765e 8759 if (remove)
8760 break;
8761 OMP_CLAUSE_SIZE (*osc)
8762 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8763 size_one_node);
43895be5 8764 if (ptr)
8765 {
9561765e 8766 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8767 OMP_CLAUSE_MAP);
8768 tree cl = NULL_TREE;
8769 enum gomp_map_kind mkind
8770 = code == OMP_TARGET_EXIT_DATA
8771 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8772 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8773 OMP_CLAUSE_DECL (c2)
8774 = unshare_expr (OMP_CLAUSE_DECL (c));
8775 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8776 OMP_CLAUSE_SIZE (c2)
8777 = TYPE_SIZE_UNIT (ptr_type_node);
8778 cl = scp ? *prev_list_p : c2;
8779 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8780 {
8781 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8782 tree c3
8783 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8784 OMP_CLAUSE_MAP);
8785 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8786 OMP_CLAUSE_DECL (c3)
8787 = unshare_expr (OMP_CLAUSE_DECL (c4));
8788 OMP_CLAUSE_SIZE (c3)
8789 = TYPE_SIZE_UNIT (ptr_type_node);
8790 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8791 if (!scp)
8792 OMP_CLAUSE_CHAIN (c2) = c3;
8793 else
8794 cl = c3;
8795 }
8796 if (scp)
8797 *scp = c2;
8798 if (sc == prev_list_p)
8799 {
8800 *sc = cl;
8801 prev_list_p = NULL;
8802 }
8803 else
8804 {
8805 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8806 list_p = prev_list_p;
8807 prev_list_p = NULL;
8808 OMP_CLAUSE_CHAIN (c) = *sc;
8809 *sc = cl;
8810 continue;
8811 }
43895be5 8812 }
9561765e 8813 else if (*sc != c)
43895be5 8814 {
8815 *list_p = OMP_CLAUSE_CHAIN (c);
8816 OMP_CLAUSE_CHAIN (c) = *sc;
8817 *sc = c;
8818 continue;
8819 }
8820 }
8821 }
9561765e 8822 if (!remove
8823 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8824 && OMP_CLAUSE_CHAIN (c)
8825 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8826 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8827 == GOMP_MAP_ALWAYS_POINTER))
8828 prev_list_p = list_p;
bc7bff74 8829 break;
8830 }
8831 flags = GOVD_MAP | GOVD_EXPLICIT;
9561765e 8832 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8833 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8834 flags |= GOVD_MAP_ALWAYS_TO;
bc7bff74 8835 goto do_add;
8836
8837 case OMP_CLAUSE_DEPEND:
44b49e6b 8838 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
43895be5 8839 {
44b49e6b 8840 tree deps = OMP_CLAUSE_DECL (c);
8841 while (deps && TREE_CODE (deps) == TREE_LIST)
8842 {
8843 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8844 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8845 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8846 pre_p, NULL, is_gimple_val, fb_rvalue);
8847 deps = TREE_CHAIN (deps);
8848 }
43895be5 8849 break;
8850 }
44b49e6b 8851 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8852 break;
7e5a76c8 8853 if (handled_depend_iterators == -1)
8854 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
8855 if (handled_depend_iterators)
8856 {
8857 if (handled_depend_iterators == 2)
8858 remove = true;
8859 break;
8860 }
bc7bff74 8861 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8862 {
8863 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8864 NULL, is_gimple_val, fb_rvalue);
8865 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8866 }
8867 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8868 {
8869 remove = true;
8870 break;
8871 }
8872 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8873 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8874 is_gimple_val, fb_rvalue) == GS_ERROR)
8875 {
8876 remove = true;
8877 break;
8878 }
8879 break;
8880
8881 case OMP_CLAUSE_TO:
8882 case OMP_CLAUSE_FROM:
ca4c3545 8883 case OMP_CLAUSE__CACHE_:
c3f3b68d 8884 decl = OMP_CLAUSE_DECL (c);
8885 if (error_operand_p (decl))
bc7bff74 8886 {
8887 remove = true;
8888 break;
8889 }
c3f3b68d 8890 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8891 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8892 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8893 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8894 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
bc7bff74 8895 {
8896 remove = true;
8897 break;
8898 }
8899 if (!DECL_P (decl))
8900 {
8901 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8902 NULL, is_gimple_lvalue, fb_lvalue)
8903 == GS_ERROR)
8904 {
8905 remove = true;
8906 break;
8907 }
8908 break;
8909 }
8910 goto do_notice;
1e8e9920 8911
43895be5 8912 case OMP_CLAUSE_USE_DEVICE_PTR:
8913 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8914 goto do_add;
8915 case OMP_CLAUSE_IS_DEVICE_PTR:
8916 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8917 goto do_add;
8918
1e8e9920 8919 do_add:
8920 decl = OMP_CLAUSE_DECL (c);
43895be5 8921 do_add_decl:
da579dbb 8922 if (error_operand_p (decl))
1e8e9920 8923 {
8924 remove = true;
8925 break;
8926 }
43895be5 8927 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8928 {
8929 tree t = omp_member_access_dummy_var (decl);
8930 if (t)
8931 {
8932 tree v = DECL_VALUE_EXPR (decl);
8933 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8934 if (outer_ctx)
8935 omp_notice_variable (outer_ctx, t, true);
8936 }
8937 }
6d6a3fc3 8938 if (code == OACC_DATA
8939 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8940 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8941 flags |= GOVD_MAP_0LEN_ARRAY;
1e8e9920 8942 omp_add_variable (ctx, decl, flags);
7e5a76c8 8943 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8944 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
8945 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1e8e9920 8946 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8947 {
8948 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
0f9d10d4 8949 GOVD_LOCAL | GOVD_SEEN);
43895be5 8950 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8951 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8952 find_decl_expr,
8953 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8954 NULL) == NULL_TREE)
8955 omp_add_variable (ctx,
8956 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8957 GOVD_LOCAL | GOVD_SEEN);
1e8e9920 8958 gimplify_omp_ctxp = ctx;
8a4a28a8 8959 push_gimplify_context ();
75a70cf9 8960
e3a19533 8961 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8962 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
75a70cf9 8963
8964 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8965 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8966 pop_gimplify_context
8967 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8a4a28a8 8968 push_gimplify_context ();
75a70cf9 8969 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8970 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
48e1416a 8971 pop_gimplify_context
75a70cf9 8972 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8973 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8974 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8975
1e8e9920 8976 gimplify_omp_ctxp = outer_ctx;
8977 }
fd6481cf 8978 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8979 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8980 {
8981 gimplify_omp_ctxp = ctx;
8a4a28a8 8982 push_gimplify_context ();
fd6481cf 8983 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8984 {
8985 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8986 NULL, NULL);
8987 TREE_SIDE_EFFECTS (bind) = 1;
8988 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8989 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8990 }
75a70cf9 8991 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8992 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8993 pop_gimplify_context
8994 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8995 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8996
15b28553 8997 gimplify_omp_ctxp = outer_ctx;
8998 }
8999 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9000 && OMP_CLAUSE_LINEAR_STMT (c))
9001 {
9002 gimplify_omp_ctxp = ctx;
9003 push_gimplify_context ();
9004 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9005 {
9006 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9007 NULL, NULL);
9008 TREE_SIDE_EFFECTS (bind) = 1;
9009 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9010 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9011 }
9012 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9013 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9014 pop_gimplify_context
9015 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9016 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9017
fd6481cf 9018 gimplify_omp_ctxp = outer_ctx;
9019 }
1e8e9920 9020 if (notice_outer)
9021 goto do_notice;
9022 break;
9023
9024 case OMP_CLAUSE_COPYIN:
9025 case OMP_CLAUSE_COPYPRIVATE:
9026 decl = OMP_CLAUSE_DECL (c);
da579dbb 9027 if (error_operand_p (decl))
1e8e9920 9028 {
9029 remove = true;
9030 break;
9031 }
b45e536e 9032 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9033 && !remove
9034 && !omp_check_private (ctx, decl, true))
9035 {
9036 remove = true;
9037 if (is_global_var (decl))
9038 {
9039 if (DECL_THREAD_LOCAL_P (decl))
9040 remove = false;
9041 else if (DECL_HAS_VALUE_EXPR_P (decl))
9042 {
9043 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9044
9045 if (value
9046 && DECL_P (value)
9047 && DECL_THREAD_LOCAL_P (value))
9048 remove = false;
9049 }
9050 }
9051 if (remove)
9052 error_at (OMP_CLAUSE_LOCATION (c),
9053 "copyprivate variable %qE is not threadprivate"
9054 " or private in outer context", DECL_NAME (decl));
9055 }
1e8e9920 9056 do_notice:
7e5a76c8 9057 if ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9058 && outer_ctx
9059 && outer_ctx->region_type == ORT_COMBINED_PARALLEL
9060 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9061 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9062 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE))
9063 {
9064 splay_tree_node on
9065 = splay_tree_lookup (outer_ctx->variables,
9066 (splay_tree_key)decl);
9067 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9068 {
9069 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9070 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9071 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9072 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9073 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9074 == POINTER_TYPE))))
9075 omp_firstprivatize_variable (outer_ctx, decl);
9076 else
9077 omp_add_variable (outer_ctx, decl,
9078 GOVD_SEEN | GOVD_SHARED);
9079 omp_notice_variable (outer_ctx, decl, true);
9080 }
9081 }
1e8e9920 9082 if (outer_ctx)
9083 omp_notice_variable (outer_ctx, decl, true);
f9b59f0c 9084 if (check_non_private
fd6481cf 9085 && region_type == ORT_WORKSHARE
43895be5 9086 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9087 || decl == OMP_CLAUSE_DECL (c)
9088 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9089 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9561765e 9090 == ADDR_EXPR
9091 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9092 == POINTER_PLUS_EXPR
9093 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9094 (OMP_CLAUSE_DECL (c), 0), 0))
9095 == ADDR_EXPR)))))
b45e536e 9096 && omp_check_private (ctx, decl, false))
f9b59f0c 9097 {
abd3e6b5 9098 error ("%s variable %qE is private in outer context",
9099 check_non_private, DECL_NAME (decl));
f9b59f0c 9100 remove = true;
9101 }
1e8e9920 9102 break;
9103
1e8e9920 9104 case OMP_CLAUSE_IF:
43895be5 9105 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9106 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9107 {
9108 const char *p[2];
9109 for (int i = 0; i < 2; i++)
9110 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9111 {
7e5a76c8 9112 case VOID_CST: p[i] = "cancel"; break;
43895be5 9113 case OMP_PARALLEL: p[i] = "parallel"; break;
7e5a76c8 9114 case OMP_SIMD: p[i] = "simd"; break;
43895be5 9115 case OMP_TASK: p[i] = "task"; break;
9116 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9117 case OMP_TARGET_DATA: p[i] = "target data"; break;
9118 case OMP_TARGET: p[i] = "target"; break;
9119 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9120 case OMP_TARGET_ENTER_DATA:
9121 p[i] = "target enter data"; break;
9122 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9123 default: gcc_unreachable ();
9124 }
9125 error_at (OMP_CLAUSE_LOCATION (c),
9126 "expected %qs %<if%> clause modifier rather than %qs",
9127 p[0], p[1]);
9128 remove = true;
9129 }
9130 /* Fall through. */
9131
9132 case OMP_CLAUSE_FINAL:
9315094f 9133 OMP_CLAUSE_OPERAND (c, 0)
9134 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9135 /* Fall through. */
9136
9137 case OMP_CLAUSE_SCHEDULE:
1e8e9920 9138 case OMP_CLAUSE_NUM_THREADS:
bc7bff74 9139 case OMP_CLAUSE_NUM_TEAMS:
9140 case OMP_CLAUSE_THREAD_LIMIT:
9141 case OMP_CLAUSE_DIST_SCHEDULE:
9142 case OMP_CLAUSE_DEVICE:
43895be5 9143 case OMP_CLAUSE_PRIORITY:
9144 case OMP_CLAUSE_GRAINSIZE:
9145 case OMP_CLAUSE_NUM_TASKS:
9146 case OMP_CLAUSE_HINT:
ca4c3545 9147 case OMP_CLAUSE_ASYNC:
9148 case OMP_CLAUSE_WAIT:
9149 case OMP_CLAUSE_NUM_GANGS:
9150 case OMP_CLAUSE_NUM_WORKERS:
9151 case OMP_CLAUSE_VECTOR_LENGTH:
ca4c3545 9152 case OMP_CLAUSE_WORKER:
9153 case OMP_CLAUSE_VECTOR:
75a70cf9 9154 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9155 is_gimple_val, fb_rvalue) == GS_ERROR)
bc7bff74 9156 remove = true;
43895be5 9157 break;
9158
9159 case OMP_CLAUSE_GANG:
9160 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9161 is_gimple_val, fb_rvalue) == GS_ERROR)
9162 remove = true;
9163 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9164 is_gimple_val, fb_rvalue) == GS_ERROR)
ca4c3545 9165 remove = true;
9166 break;
9167
1e8e9920 9168 case OMP_CLAUSE_NOWAIT:
7e5a76c8 9169 nowait = 1;
9170 break;
9171
1e8e9920 9172 case OMP_CLAUSE_ORDERED:
fd6481cf 9173 case OMP_CLAUSE_UNTIED:
9174 case OMP_CLAUSE_COLLAPSE:
719a7570 9175 case OMP_CLAUSE_TILE:
ca4c3545 9176 case OMP_CLAUSE_AUTO:
9177 case OMP_CLAUSE_SEQ:
ef014f95 9178 case OMP_CLAUSE_INDEPENDENT:
2169f33b 9179 case OMP_CLAUSE_MERGEABLE:
bc7bff74 9180 case OMP_CLAUSE_PROC_BIND:
3d483a94 9181 case OMP_CLAUSE_SAFELEN:
43895be5 9182 case OMP_CLAUSE_SIMDLEN:
9183 case OMP_CLAUSE_NOGROUP:
9184 case OMP_CLAUSE_THREADS:
9185 case OMP_CLAUSE_SIMD:
737cc978 9186 case OMP_CLAUSE_IF_PRESENT:
9187 case OMP_CLAUSE_FINALIZE:
43895be5 9188 break;
9189
9190 case OMP_CLAUSE_DEFAULTMAP:
7e5a76c8 9191 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9192 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9193 {
9194 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9195 gdmkmin = GDMK_SCALAR;
9196 gdmkmax = GDMK_POINTER;
9197 break;
9198 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9199 gdmkmin = gdmkmax = GDMK_SCALAR;
9200 break;
9201 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9202 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9203 break;
9204 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9205 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9206 break;
9207 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9208 gdmkmin = gdmkmax = GDMK_POINTER;
9209 break;
9210 default:
9211 gcc_unreachable ();
9212 }
9213 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9214 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9215 {
9216 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9217 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9218 break;
9219 case OMP_CLAUSE_DEFAULTMAP_TO:
9220 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9221 break;
9222 case OMP_CLAUSE_DEFAULTMAP_FROM:
9223 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9224 break;
9225 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9226 ctx->defaultmap[gdmk] = GOVD_MAP;
9227 break;
9228 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9229 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9230 break;
9231 case OMP_CLAUSE_DEFAULTMAP_NONE:
9232 ctx->defaultmap[gdmk] = 0;
9233 break;
9234 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9235 switch (gdmk)
9236 {
9237 case GDMK_SCALAR:
9238 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9239 break;
9240 case GDMK_AGGREGATE:
9241 case GDMK_ALLOCATABLE:
9242 ctx->defaultmap[gdmk] = GOVD_MAP;
9243 break;
9244 case GDMK_POINTER:
9245 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9246 break;
9247 default:
9248 gcc_unreachable ();
9249 }
9250 break;
9251 default:
9252 gcc_unreachable ();
9253 }
1e8e9920 9254 break;
9255
bc7bff74 9256 case OMP_CLAUSE_ALIGNED:
9257 decl = OMP_CLAUSE_DECL (c);
9258 if (error_operand_p (decl))
9259 {
9260 remove = true;
9261 break;
9262 }
c3f3b68d 9263 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9264 is_gimple_val, fb_rvalue) == GS_ERROR)
9265 {
9266 remove = true;
9267 break;
9268 }
bc7bff74 9269 if (!is_global_var (decl)
9270 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9271 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9272 break;
9273
7e5a76c8 9274 case OMP_CLAUSE_NONTEMPORAL:
9275 decl = OMP_CLAUSE_DECL (c);
9276 if (error_operand_p (decl))
9277 {
9278 remove = true;
9279 break;
9280 }
9281 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9282 break;
9283
1e8e9920 9284 case OMP_CLAUSE_DEFAULT:
9285 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9286 break;
9287
9288 default:
9289 gcc_unreachable ();
9290 }
9291
6d6a3fc3 9292 if (code == OACC_DATA
9293 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
93251441 9294 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9295 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6d6a3fc3 9296 remove = true;
1e8e9920 9297 if (remove)
9298 *list_p = OMP_CLAUSE_CHAIN (c);
9299 else
9300 list_p = &OMP_CLAUSE_CHAIN (c);
9301 }
9302
9303 gimplify_omp_ctxp = ctx;
43895be5 9304 if (struct_map_to_clause)
9305 delete struct_map_to_clause;
1e8e9920 9306}
9307
98588013 9308/* Return true if DECL is a candidate for shared to firstprivate
9309 optimization. We only consider non-addressable scalars, not
9310 too big, and not references. */
9311
9312static bool
9313omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9314{
9315 if (TREE_ADDRESSABLE (decl))
9316 return false;
9317 tree type = TREE_TYPE (decl);
9318 if (!is_gimple_reg_type (type)
9319 || TREE_CODE (type) == REFERENCE_TYPE
9320 || TREE_ADDRESSABLE (type))
9321 return false;
9322 /* Don't optimize too large decls, as each thread/task will have
9323 its own. */
9324 HOST_WIDE_INT len = int_size_in_bytes (type);
9325 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9326 return false;
9327 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9328 return false;
9329 return true;
9330}
9331
9332/* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9333 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9334 GOVD_WRITTEN in outer contexts. */
9335
9336static void
9337omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9338{
9339 for (; ctx; ctx = ctx->outer_context)
9340 {
9341 splay_tree_node n = splay_tree_lookup (ctx->variables,
9342 (splay_tree_key) decl);
9343 if (n == NULL)
9344 continue;
9345 else if (n->value & GOVD_SHARED)
9346 {
9347 n->value |= GOVD_WRITTEN;
9348 return;
9349 }
9350 else if (n->value & GOVD_DATA_SHARE_CLASS)
9351 return;
9352 }
9353}
9354
9355/* Helper callback for walk_gimple_seq to discover possible stores
9356 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9357 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9358 for those. */
9359
9360static tree
9361omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9362{
9363 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9364
9365 *walk_subtrees = 0;
9366 if (!wi->is_lhs)
9367 return NULL_TREE;
9368
9369 tree op = *tp;
9370 do
9371 {
9372 if (handled_component_p (op))
9373 op = TREE_OPERAND (op, 0);
9374 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9375 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9376 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9377 else
9378 break;
9379 }
9380 while (1);
9381 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9382 return NULL_TREE;
9383
9384 omp_mark_stores (gimplify_omp_ctxp, op);
9385 return NULL_TREE;
9386}
9387
9388/* Helper callback for walk_gimple_seq to discover possible stores
9389 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9390 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9391 for those. */
9392
9393static tree
9394omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9395 bool *handled_ops_p,
9396 struct walk_stmt_info *wi)
9397{
9398 gimple *stmt = gsi_stmt (*gsi_p);
9399 switch (gimple_code (stmt))
9400 {
9401 /* Don't recurse on OpenMP constructs for which
9402 gimplify_adjust_omp_clauses already handled the bodies,
9403 except handle gimple_omp_for_pre_body. */
9404 case GIMPLE_OMP_FOR:
9405 *handled_ops_p = true;
9406 if (gimple_omp_for_pre_body (stmt))
9407 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9408 omp_find_stores_stmt, omp_find_stores_op, wi);
9409 break;
9410 case GIMPLE_OMP_PARALLEL:
9411 case GIMPLE_OMP_TASK:
9412 case GIMPLE_OMP_SECTIONS:
9413 case GIMPLE_OMP_SINGLE:
9414 case GIMPLE_OMP_TARGET:
9415 case GIMPLE_OMP_TEAMS:
9416 case GIMPLE_OMP_CRITICAL:
9417 *handled_ops_p = true;
9418 break;
9419 default:
9420 break;
9421 }
9422 return NULL_TREE;
9423}
9424
691447ab 9425struct gimplify_adjust_omp_clauses_data
9426{
9427 tree *list_p;
9428 gimple_seq *pre_p;
9429};
9430
1e8e9920 9431/* For all variables that were not actually used within the context,
9432 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9433
9434static int
9435gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9436{
691447ab 9437 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9438 gimple_seq *pre_p
9439 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
1e8e9920 9440 tree decl = (tree) n->key;
9441 unsigned flags = n->value;
55d6e7cd 9442 enum omp_clause_code code;
1e8e9920 9443 tree clause;
9444 bool private_debug;
9445
9446 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9447 return 0;
9448 if ((flags & GOVD_SEEN) == 0)
9449 return 0;
9450 if (flags & GOVD_DEBUG_PRIVATE)
9451 {
2be704a8 9452 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
1e8e9920 9453 private_debug = true;
9454 }
bc7bff74 9455 else if (flags & GOVD_MAP)
9456 private_debug = false;
1e8e9920 9457 else
9458 private_debug
9459 = lang_hooks.decls.omp_private_debug_clause (decl,
9460 !!(flags & GOVD_SHARED));
9461 if (private_debug)
9462 code = OMP_CLAUSE_PRIVATE;
bc7bff74 9463 else if (flags & GOVD_MAP)
0b80c4b2 9464 {
9465 code = OMP_CLAUSE_MAP;
9466 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9467 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9468 {
9469 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9470 return 0;
9471 }
9472 }
1e8e9920 9473 else if (flags & GOVD_SHARED)
9474 {
9475 if (is_global_var (decl))
c37594c7 9476 {
9477 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9478 while (ctx != NULL)
9479 {
9480 splay_tree_node on
9481 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9482 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
3d483a94 9483 | GOVD_PRIVATE | GOVD_REDUCTION
9f9cc47d 9484 | GOVD_LINEAR | GOVD_MAP)) != 0)
c37594c7 9485 break;
9486 ctx = ctx->outer_context;
9487 }
9488 if (ctx == NULL)
9489 return 0;
9490 }
1e8e9920 9491 code = OMP_CLAUSE_SHARED;
9492 }
9493 else if (flags & GOVD_PRIVATE)
9494 code = OMP_CLAUSE_PRIVATE;
9495 else if (flags & GOVD_FIRSTPRIVATE)
0b80c4b2 9496 {
9497 code = OMP_CLAUSE_FIRSTPRIVATE;
9498 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9499 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9500 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9501 {
9502 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9503 "%<target%> construct", decl);
9504 return 0;
9505 }
9506 }
3d483a94 9507 else if (flags & GOVD_LASTPRIVATE)
9508 code = OMP_CLAUSE_LASTPRIVATE;
7e5a76c8 9509 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
bc7bff74 9510 return 0;
1e8e9920 9511 else
9512 gcc_unreachable ();
9513
98588013 9514 if (((flags & GOVD_LASTPRIVATE)
9515 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9516 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9517 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9518
816a8c6d 9519 tree chain = *list_p;
e60a6f7b 9520 clause = build_omp_clause (input_location, code);
55d6e7cd 9521 OMP_CLAUSE_DECL (clause) = decl;
816a8c6d 9522 OMP_CLAUSE_CHAIN (clause) = chain;
1e8e9920 9523 if (private_debug)
9524 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
fd6481cf 9525 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9526 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
98588013 9527 else if (code == OMP_CLAUSE_SHARED
9528 && (flags & GOVD_WRITTEN) == 0
9529 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9530 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
585aefbb 9531 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9532 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
43895be5 9533 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9534 {
9535 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9536 OMP_CLAUSE_DECL (nc) = decl;
9537 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9538 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9539 OMP_CLAUSE_DECL (clause)
9540 = build_simple_mem_ref_loc (input_location, decl);
9541 OMP_CLAUSE_DECL (clause)
9542 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9543 build_int_cst (build_pointer_type (char_type_node), 0));
9544 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9545 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9546 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9547 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9548 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
816a8c6d 9549 OMP_CLAUSE_CHAIN (nc) = chain;
43895be5 9550 OMP_CLAUSE_CHAIN (clause) = nc;
9551 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9552 gimplify_omp_ctxp = ctx->outer_context;
9553 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9554 pre_p, NULL, is_gimple_val, fb_rvalue);
9555 gimplify_omp_ctxp = ctx;
9556 }
bc7bff74 9557 else if (code == OMP_CLAUSE_MAP)
9558 {
6acf639f 9559 int kind;
9560 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9561 switch (flags & (GOVD_MAP_TO_ONLY
9562 | GOVD_MAP_FORCE
7e5a76c8 9563 | GOVD_MAP_FORCE_PRESENT
9564 | GOVD_MAP_ALLOC_ONLY
9565 | GOVD_MAP_FROM_ONLY))
6acf639f 9566 {
9567 case 0:
9568 kind = GOMP_MAP_TOFROM;
9569 break;
9570 case GOVD_MAP_FORCE:
9571 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
9572 break;
9573 case GOVD_MAP_TO_ONLY:
9574 kind = GOMP_MAP_TO;
9575 break;
7e5a76c8 9576 case GOVD_MAP_FROM_ONLY:
9577 kind = GOMP_MAP_FROM;
9578 break;
9579 case GOVD_MAP_ALLOC_ONLY:
9580 kind = GOMP_MAP_ALLOC;
9581 break;
6acf639f 9582 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
9583 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
9584 break;
9585 case GOVD_MAP_FORCE_PRESENT:
9586 kind = GOMP_MAP_FORCE_PRESENT;
9587 break;
9588 default:
9589 gcc_unreachable ();
9590 }
96eefdee 9591 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
bc7bff74 9592 if (DECL_SIZE (decl)
9593 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9594 {
9595 tree decl2 = DECL_VALUE_EXPR (decl);
9596 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9597 decl2 = TREE_OPERAND (decl2, 0);
9598 gcc_assert (DECL_P (decl2));
9599 tree mem = build_simple_mem_ref (decl2);
9600 OMP_CLAUSE_DECL (clause) = mem;
9601 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9602 if (gimplify_omp_ctxp->outer_context)
9603 {
9604 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9605 omp_notice_variable (ctx, decl2, true);
9606 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
9607 }
9608 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9609 OMP_CLAUSE_MAP);
9610 OMP_CLAUSE_DECL (nc) = decl;
9611 OMP_CLAUSE_SIZE (nc) = size_zero_node;
43895be5 9612 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
9613 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9614 else
9615 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
bc7bff74 9616 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9617 OMP_CLAUSE_CHAIN (clause) = nc;
9618 }
9561765e 9619 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
9620 && lang_hooks.decls.omp_privatize_by_reference (decl))
9621 {
9622 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
9623 OMP_CLAUSE_SIZE (clause)
9624 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
9625 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9626 gimplify_omp_ctxp = ctx->outer_context;
9627 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
9628 pre_p, NULL, is_gimple_val, fb_rvalue);
9629 gimplify_omp_ctxp = ctx;
9630 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9631 OMP_CLAUSE_MAP);
9632 OMP_CLAUSE_DECL (nc) = decl;
9633 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9634 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
9635 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9636 OMP_CLAUSE_CHAIN (clause) = nc;
9637 }
c3f3b68d 9638 else
9639 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
bc7bff74 9640 }
9d172013 9641 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
9642 {
9643 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
9644 OMP_CLAUSE_DECL (nc) = decl;
9645 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
816a8c6d 9646 OMP_CLAUSE_CHAIN (nc) = chain;
9d172013 9647 OMP_CLAUSE_CHAIN (clause) = nc;
691447ab 9648 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9649 gimplify_omp_ctxp = ctx->outer_context;
9650 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9651 gimplify_omp_ctxp = ctx;
9d172013 9652 }
1e8e9920 9653 *list_p = clause;
691447ab 9654 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9655 gimplify_omp_ctxp = ctx->outer_context;
9656 lang_hooks.decls.omp_finish_clause (clause, pre_p);
816a8c6d 9657 if (gimplify_omp_ctxp)
9658 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
9659 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
9660 && DECL_P (OMP_CLAUSE_SIZE (clause)))
9661 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
9662 true);
691447ab 9663 gimplify_omp_ctxp = ctx;
1e8e9920 9664 return 0;
9665}
9666
9667static void
98588013 9668gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
43895be5 9669 enum tree_code code)
1e8e9920 9670{
9671 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9672 tree c, decl;
9673
98588013 9674 if (body)
9675 {
9676 struct gimplify_omp_ctx *octx;
9677 for (octx = ctx; octx; octx = octx->outer_context)
9678 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
9679 break;
9680 if (octx)
9681 {
9682 struct walk_stmt_info wi;
9683 memset (&wi, 0, sizeof (wi));
9684 walk_gimple_seq (body, omp_find_stores_stmt,
9685 omp_find_stores_op, &wi);
9686 }
9687 }
1e8e9920 9688 while ((c = *list_p) != NULL)
9689 {
9690 splay_tree_node n;
9691 bool remove = false;
9692
55d6e7cd 9693 switch (OMP_CLAUSE_CODE (c))
1e8e9920 9694 {
0b80c4b2 9695 case OMP_CLAUSE_FIRSTPRIVATE:
9696 if ((ctx->region_type & ORT_TARGET)
9697 && (ctx->region_type & ORT_ACC) == 0
9698 && TYPE_ATOMIC (strip_array_types
9699 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
9700 {
9701 error_at (OMP_CLAUSE_LOCATION (c),
9702 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9703 "%<target%> construct", OMP_CLAUSE_DECL (c));
9704 remove = true;
9705 break;
9706 }
9707 /* FALLTHRU */
1e8e9920 9708 case OMP_CLAUSE_PRIVATE:
9709 case OMP_CLAUSE_SHARED:
3d483a94 9710 case OMP_CLAUSE_LINEAR:
1e8e9920 9711 decl = OMP_CLAUSE_DECL (c);
9712 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9713 remove = !(n->value & GOVD_SEEN);
9714 if (! remove)
9715 {
55d6e7cd 9716 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
1e8e9920 9717 if ((n->value & GOVD_DEBUG_PRIVATE)
9718 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
9719 {
9720 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
9721 || ((n->value & GOVD_DATA_SHARE_CLASS)
2be704a8 9722 == GOVD_SHARED));
55d6e7cd 9723 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
1e8e9920 9724 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
9725 }
98588013 9726 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9727 && (n->value & GOVD_WRITTEN) == 0
9728 && DECL_P (decl)
9729 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9730 OMP_CLAUSE_SHARED_READONLY (c) = 1;
9731 else if (DECL_P (decl)
9732 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
159350cd 9733 && (n->value & GOVD_WRITTEN) != 0)
98588013 9734 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9735 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9736 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9737 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
1e8e9920 9738 }
9739 break;
9740
9741 case OMP_CLAUSE_LASTPRIVATE:
9742 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9743 accurately reflect the presence of a FIRSTPRIVATE clause. */
9744 decl = OMP_CLAUSE_DECL (c);
9745 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9746 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
9747 = (n->value & GOVD_FIRSTPRIVATE) != 0;
44b49e6b 9748 if (code == OMP_DISTRIBUTE
9749 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9561765e 9750 {
9751 remove = true;
9752 error_at (OMP_CLAUSE_LOCATION (c),
9753 "same variable used in %<firstprivate%> and "
9754 "%<lastprivate%> clauses on %<distribute%> "
9755 "construct");
9756 }
98588013 9757 if (!remove
9758 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9759 && DECL_P (decl)
9760 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9761 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
1e8e9920 9762 break;
48e1416a 9763
bc7bff74 9764 case OMP_CLAUSE_ALIGNED:
9765 decl = OMP_CLAUSE_DECL (c);
9766 if (!is_global_var (decl))
9767 {
9768 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9769 remove = n == NULL || !(n->value & GOVD_SEEN);
9770 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9771 {
9772 struct gimplify_omp_ctx *octx;
9773 if (n != NULL
9774 && (n->value & (GOVD_DATA_SHARE_CLASS
9775 & ~GOVD_FIRSTPRIVATE)))
9776 remove = true;
9777 else
9778 for (octx = ctx->outer_context; octx;
9779 octx = octx->outer_context)
9780 {
9781 n = splay_tree_lookup (octx->variables,
9782 (splay_tree_key) decl);
9783 if (n == NULL)
9784 continue;
9785 if (n->value & GOVD_LOCAL)
9786 break;
9787 /* We have to avoid assigning a shared variable
9788 to itself when trying to add
9789 __builtin_assume_aligned. */
9790 if (n->value & GOVD_SHARED)
9791 {
9792 remove = true;
9793 break;
9794 }
9795 }
9796 }
9797 }
9798 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
9799 {
9800 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9801 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9802 remove = true;
9803 }
9804 break;
9805
7e5a76c8 9806 case OMP_CLAUSE_NONTEMPORAL:
9807 decl = OMP_CLAUSE_DECL (c);
9808 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9809 remove = n == NULL || !(n->value & GOVD_SEEN);
9810 break;
9811
bc7bff74 9812 case OMP_CLAUSE_MAP:
9561765e 9813 if (code == OMP_TARGET_EXIT_DATA
9814 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9815 {
9816 remove = true;
9817 break;
9818 }
bc7bff74 9819 decl = OMP_CLAUSE_DECL (c);
6d6a3fc3 9820 /* Data clauses associated with acc parallel reductions must be
0c302595 9821 compatible with present_or_copy. Warn and adjust the clause
9822 if that is not the case. */
9823 if (ctx->region_type == ORT_ACC_PARALLEL)
9824 {
9825 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9826 n = NULL;
9827
9828 if (DECL_P (t))
9829 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9830
9831 if (n && (n->value & GOVD_REDUCTION))
9832 {
9833 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9834
9835 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9836 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9837 && kind != GOMP_MAP_FORCE_PRESENT
9838 && kind != GOMP_MAP_POINTER)
9839 {
9840 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9841 "incompatible data clause with reduction "
85b9be9b 9842 "on %qE; promoting to %<present_or_copy%>",
0c302595 9843 DECL_NAME (t));
9844 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9845 }
9846 }
9847 }
bc7bff74 9848 if (!DECL_P (decl))
43895be5 9849 {
9850 if ((ctx->region_type & ORT_TARGET) != 0
9851 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9852 {
9853 if (TREE_CODE (decl) == INDIRECT_REF
9854 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9855 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9856 == REFERENCE_TYPE))
9857 decl = TREE_OPERAND (decl, 0);
9858 if (TREE_CODE (decl) == COMPONENT_REF)
9859 {
9860 while (TREE_CODE (decl) == COMPONENT_REF)
9861 decl = TREE_OPERAND (decl, 0);
9862 if (DECL_P (decl))
9863 {
9864 n = splay_tree_lookup (ctx->variables,
9865 (splay_tree_key) decl);
9866 if (!(n->value & GOVD_SEEN))
9867 remove = true;
9868 }
9869 }
9870 }
9871 break;
9872 }
bc7bff74 9873 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
43895be5 9874 if ((ctx->region_type & ORT_TARGET) != 0
9875 && !(n->value & GOVD_SEEN)
c0998828 9876 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
b5c83b44 9877 && (!is_global_var (decl)
9878 || !lookup_attribute ("omp declare target link",
9879 DECL_ATTRIBUTES (decl))))
43895be5 9880 {
9881 remove = true;
9882 /* For struct element mapping, if struct is never referenced
9883 in target block and none of the mapping has always modifier,
9884 remove all the struct element mappings, which immediately
9885 follow the GOMP_MAP_STRUCT map clause. */
9886 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9887 {
9888 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9889 while (cnt--)
9890 OMP_CLAUSE_CHAIN (c)
9891 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9892 }
9893 }
9894 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9895 && code == OMP_TARGET_EXIT_DATA)
bc7bff74 9896 remove = true;
9897 else if (DECL_SIZE (decl)
9898 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
43895be5 9899 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9561765e 9900 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9901 && (OMP_CLAUSE_MAP_KIND (c)
9902 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
bc7bff74 9903 {
ca4c3545 9904 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9905 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9906 INTEGER_CST. */
9907 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9908
bc7bff74 9909 tree decl2 = DECL_VALUE_EXPR (decl);
9910 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9911 decl2 = TREE_OPERAND (decl2, 0);
9912 gcc_assert (DECL_P (decl2));
9913 tree mem = build_simple_mem_ref (decl2);
9914 OMP_CLAUSE_DECL (c) = mem;
9915 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9916 if (ctx->outer_context)
9917 {
9918 omp_notice_variable (ctx->outer_context, decl2, true);
9919 omp_notice_variable (ctx->outer_context,
9920 OMP_CLAUSE_SIZE (c), true);
9921 }
43895be5 9922 if (((ctx->region_type & ORT_TARGET) != 0
9923 || !ctx->target_firstprivatize_array_bases)
9924 && ((n->value & GOVD_SEEN) == 0
9925 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9926 {
9927 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9928 OMP_CLAUSE_MAP);
9929 OMP_CLAUSE_DECL (nc) = decl;
9930 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9931 if (ctx->target_firstprivatize_array_bases)
9932 OMP_CLAUSE_SET_MAP_KIND (nc,
9933 GOMP_MAP_FIRSTPRIVATE_POINTER);
9934 else
9935 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9936 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9937 OMP_CLAUSE_CHAIN (c) = nc;
9938 c = nc;
9939 }
9940 }
9941 else
9942 {
9943 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9944 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9561765e 9945 gcc_assert ((n->value & GOVD_SEEN) == 0
9946 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9947 == 0));
bc7bff74 9948 }
9949 break;
9950
9951 case OMP_CLAUSE_TO:
9952 case OMP_CLAUSE_FROM:
ca4c3545 9953 case OMP_CLAUSE__CACHE_:
bc7bff74 9954 decl = OMP_CLAUSE_DECL (c);
9955 if (!DECL_P (decl))
9956 break;
9957 if (DECL_SIZE (decl)
9958 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9959 {
9960 tree decl2 = DECL_VALUE_EXPR (decl);
9961 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9962 decl2 = TREE_OPERAND (decl2, 0);
9963 gcc_assert (DECL_P (decl2));
9964 tree mem = build_simple_mem_ref (decl2);
9965 OMP_CLAUSE_DECL (c) = mem;
9966 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9967 if (ctx->outer_context)
9968 {
9969 omp_notice_variable (ctx->outer_context, decl2, true);
9970 omp_notice_variable (ctx->outer_context,
9971 OMP_CLAUSE_SIZE (c), true);
9972 }
9973 }
c3f3b68d 9974 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9975 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
bc7bff74 9976 break;
9977
1e8e9920 9978 case OMP_CLAUSE_REDUCTION:
7e5a76c8 9979 case OMP_CLAUSE_IN_REDUCTION:
9980 case OMP_CLAUSE_TASK_REDUCTION:
98588013 9981 decl = OMP_CLAUSE_DECL (c);
0c302595 9982 /* OpenACC reductions need a present_or_copy data clause.
a209c8cc 9983 Add one if necessary. Emit error when the reduction is private. */
0c302595 9984 if (ctx->region_type == ORT_ACC_PARALLEL)
9985 {
9986 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9987 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
a209c8cc 9988 {
9989 remove = true;
9990 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9991 "reduction on %qE", DECL_NAME (decl));
9992 }
0c302595 9993 else if ((n->value & GOVD_MAP) == 0)
9994 {
9995 tree next = OMP_CLAUSE_CHAIN (c);
9996 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9997 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9998 OMP_CLAUSE_DECL (nc) = decl;
9999 OMP_CLAUSE_CHAIN (c) = nc;
10000 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10001 while (1)
10002 {
10003 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10004 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10005 break;
10006 nc = OMP_CLAUSE_CHAIN (nc);
10007 }
10008 OMP_CLAUSE_CHAIN (nc) = next;
10009 n->value |= GOVD_MAP;
10010 }
10011 }
98588013 10012 if (DECL_P (decl)
10013 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10014 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10015 break;
1e8e9920 10016 case OMP_CLAUSE_COPYIN:
10017 case OMP_CLAUSE_COPYPRIVATE:
10018 case OMP_CLAUSE_IF:
10019 case OMP_CLAUSE_NUM_THREADS:
bc7bff74 10020 case OMP_CLAUSE_NUM_TEAMS:
10021 case OMP_CLAUSE_THREAD_LIMIT:
10022 case OMP_CLAUSE_DIST_SCHEDULE:
10023 case OMP_CLAUSE_DEVICE:
1e8e9920 10024 case OMP_CLAUSE_SCHEDULE:
10025 case OMP_CLAUSE_NOWAIT:
10026 case OMP_CLAUSE_ORDERED:
10027 case OMP_CLAUSE_DEFAULT:
fd6481cf 10028 case OMP_CLAUSE_UNTIED:
10029 case OMP_CLAUSE_COLLAPSE:
2169f33b 10030 case OMP_CLAUSE_FINAL:
10031 case OMP_CLAUSE_MERGEABLE:
bc7bff74 10032 case OMP_CLAUSE_PROC_BIND:
3d483a94 10033 case OMP_CLAUSE_SAFELEN:
43895be5 10034 case OMP_CLAUSE_SIMDLEN:
bc7bff74 10035 case OMP_CLAUSE_DEPEND:
43895be5 10036 case OMP_CLAUSE_PRIORITY:
10037 case OMP_CLAUSE_GRAINSIZE:
10038 case OMP_CLAUSE_NUM_TASKS:
10039 case OMP_CLAUSE_NOGROUP:
10040 case OMP_CLAUSE_THREADS:
10041 case OMP_CLAUSE_SIMD:
10042 case OMP_CLAUSE_HINT:
10043 case OMP_CLAUSE_DEFAULTMAP:
10044 case OMP_CLAUSE_USE_DEVICE_PTR:
10045 case OMP_CLAUSE_IS_DEVICE_PTR:
ca4c3545 10046 case OMP_CLAUSE_ASYNC:
10047 case OMP_CLAUSE_WAIT:
ca4c3545 10048 case OMP_CLAUSE_INDEPENDENT:
10049 case OMP_CLAUSE_NUM_GANGS:
10050 case OMP_CLAUSE_NUM_WORKERS:
10051 case OMP_CLAUSE_VECTOR_LENGTH:
10052 case OMP_CLAUSE_GANG:
10053 case OMP_CLAUSE_WORKER:
10054 case OMP_CLAUSE_VECTOR:
10055 case OMP_CLAUSE_AUTO:
10056 case OMP_CLAUSE_SEQ:
ef014f95 10057 case OMP_CLAUSE_TILE:
737cc978 10058 case OMP_CLAUSE_IF_PRESENT:
10059 case OMP_CLAUSE_FINALIZE:
1e8e9920 10060 break;
10061
10062 default:
10063 gcc_unreachable ();
10064 }
10065
10066 if (remove)
10067 *list_p = OMP_CLAUSE_CHAIN (c);
10068 else
10069 list_p = &OMP_CLAUSE_CHAIN (c);
10070 }
10071
10072 /* Add in any implicit data sharing. */
691447ab 10073 struct gimplify_adjust_omp_clauses_data data;
10074 data.list_p = list_p;
10075 data.pre_p = pre_p;
10076 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
48e1416a 10077
1e8e9920 10078 gimplify_omp_ctxp = ctx->outer_context;
10079 delete_omp_context (ctx);
10080}
10081
ca4c3545 10082/* Gimplify OACC_CACHE. */
10083
10084static void
10085gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10086{
10087 tree expr = *expr_p;
10088
b656be3a 10089 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
43895be5 10090 OACC_CACHE);
98588013 10091 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10092 OACC_CACHE);
ca4c3545 10093
10094 /* TODO: Do something sensible with this information. */
10095
10096 *expr_p = NULL_TREE;
10097}
10098
2fc5e987 10099/* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10100 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10101 kind. The entry kind will replace the one in CLAUSE, while the exit
10102 kind will be used in a new omp_clause and returned to the caller. */
10103
10104static tree
10105gimplify_oacc_declare_1 (tree clause)
10106{
10107 HOST_WIDE_INT kind, new_op;
10108 bool ret = false;
10109 tree c = NULL;
10110
10111 kind = OMP_CLAUSE_MAP_KIND (clause);
10112
10113 switch (kind)
10114 {
10115 case GOMP_MAP_ALLOC:
737cc978 10116 new_op = GOMP_MAP_RELEASE;
2fc5e987 10117 ret = true;
10118 break;
10119
10120 case GOMP_MAP_FROM:
10121 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10122 new_op = GOMP_MAP_FROM;
10123 ret = true;
10124 break;
10125
10126 case GOMP_MAP_TOFROM:
10127 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10128 new_op = GOMP_MAP_FROM;
10129 ret = true;
10130 break;
10131
10132 case GOMP_MAP_DEVICE_RESIDENT:
10133 case GOMP_MAP_FORCE_DEVICEPTR:
10134 case GOMP_MAP_FORCE_PRESENT:
10135 case GOMP_MAP_LINK:
10136 case GOMP_MAP_POINTER:
10137 case GOMP_MAP_TO:
10138 break;
10139
10140 default:
10141 gcc_unreachable ();
10142 break;
10143 }
10144
10145 if (ret)
10146 {
10147 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10148 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10149 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10150 }
10151
10152 return c;
10153}
10154
10155/* Gimplify OACC_DECLARE. */
10156
10157static void
10158gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10159{
10160 tree expr = *expr_p;
10161 gomp_target *stmt;
c4c30562 10162 tree clauses, t, decl;
2fc5e987 10163
10164 clauses = OACC_DECLARE_CLAUSES (expr);
10165
10166 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
c4c30562 10167 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
2fc5e987 10168
10169 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10170 {
c4c30562 10171 decl = OMP_CLAUSE_DECL (t);
2fc5e987 10172
10173 if (TREE_CODE (decl) == MEM_REF)
c4c30562 10174 decl = TREE_OPERAND (decl, 0);
10175
10176 if (VAR_P (decl) && !is_oacc_declared (decl))
10177 {
10178 tree attr = get_identifier ("oacc declare target");
10179 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10180 DECL_ATTRIBUTES (decl));
10181 }
2fc5e987 10182
53e9c5c4 10183 if (VAR_P (decl)
2fc5e987 10184 && !is_global_var (decl)
10185 && DECL_CONTEXT (decl) == current_function_decl)
10186 {
10187 tree c = gimplify_oacc_declare_1 (t);
10188 if (c)
10189 {
10190 if (oacc_declare_returns == NULL)
10191 oacc_declare_returns = new hash_map<tree, tree>;
10192
10193 oacc_declare_returns->put (decl, c);
10194 }
10195 }
10196
c4c30562 10197 if (gimplify_omp_ctxp)
10198 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
2fc5e987 10199 }
10200
10201 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10202 clauses);
10203
10204 gimplify_seq_add_stmt (pre_p, stmt);
10205
10206 *expr_p = NULL_TREE;
10207}
10208
1e8e9920 10209/* Gimplify the contents of an OMP_PARALLEL statement. This involves
10210 gimplification of the body, as well as scanning the body for used
10211 variables. We need to do this scan now, because variable-sized
10212 decls will be decomposed during gimplification. */
10213
75a70cf9 10214static void
10215gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
1e8e9920 10216{
10217 tree expr = *expr_p;
42acab1c 10218 gimple *g;
75a70cf9 10219 gimple_seq body = NULL;
1e8e9920 10220
fd6481cf 10221 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10222 OMP_PARALLEL_COMBINED (expr)
10223 ? ORT_COMBINED_PARALLEL
43895be5 10224 : ORT_PARALLEL, OMP_PARALLEL);
1e8e9920 10225
8a4a28a8 10226 push_gimplify_context ();
1e8e9920 10227
75a70cf9 10228 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10229 if (gimple_code (g) == GIMPLE_BIND)
10230 pop_gimplify_context (g);
773c5ba7 10231 else
75a70cf9 10232 pop_gimplify_context (NULL);
1e8e9920 10233
98588013 10234 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
43895be5 10235 OMP_PARALLEL);
1e8e9920 10236
75a70cf9 10237 g = gimple_build_omp_parallel (body,
10238 OMP_PARALLEL_CLAUSES (expr),
10239 NULL_TREE, NULL_TREE);
10240 if (OMP_PARALLEL_COMBINED (expr))
10241 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10242 gimplify_seq_add_stmt (pre_p, g);
10243 *expr_p = NULL_TREE;
1e8e9920 10244}
10245
fd6481cf 10246/* Gimplify the contents of an OMP_TASK statement. This involves
10247 gimplification of the body, as well as scanning the body for used
10248 variables. We need to do this scan now, because variable-sized
10249 decls will be decomposed during gimplification. */
1e8e9920 10250
75a70cf9 10251static void
10252gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
1e8e9920 10253{
fd6481cf 10254 tree expr = *expr_p;
42acab1c 10255 gimple *g;
75a70cf9 10256 gimple_seq body = NULL;
1e8e9920 10257
7e5a76c8 10258 if (OMP_TASK_BODY (expr) == NULL_TREE)
10259 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10260 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10261 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10262 {
10263 error_at (OMP_CLAUSE_LOCATION (c),
10264 "%<mutexinoutset%> kind in %<depend%> clause on a "
10265 "%<taskwait%> construct");
10266 break;
10267 }
10268
b0b48c1d 10269 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
4954efd4 10270 omp_find_clause (OMP_TASK_CLAUSES (expr),
b0b48c1d 10271 OMP_CLAUSE_UNTIED)
43895be5 10272 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
1e8e9920 10273
7e5a76c8 10274 if (OMP_TASK_BODY (expr))
10275 {
10276 push_gimplify_context ();
1e8e9920 10277
7e5a76c8 10278 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10279 if (gimple_code (g) == GIMPLE_BIND)
10280 pop_gimplify_context (g);
10281 else
10282 pop_gimplify_context (NULL);
10283 }
1e8e9920 10284
98588013 10285 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10286 OMP_TASK);
79acaae1 10287
75a70cf9 10288 g = gimple_build_omp_task (body,
10289 OMP_TASK_CLAUSES (expr),
10290 NULL_TREE, NULL_TREE,
10291 NULL_TREE, NULL_TREE, NULL_TREE);
7e5a76c8 10292 if (OMP_TASK_BODY (expr) == NULL_TREE)
10293 gimple_omp_task_set_taskwait_p (g, true);
75a70cf9 10294 gimplify_seq_add_stmt (pre_p, g);
10295 *expr_p = NULL_TREE;
fd6481cf 10296}
10297
bc7bff74 10298/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
ed321d14 10299 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10300 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10301 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10302 OMP_FOR in between if any and pdata[3] is address of the inner
10303 OMP_FOR/OMP_SIMD. */
bc7bff74 10304
10305static tree
ed321d14 10306find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
bc7bff74 10307{
ed321d14 10308 tree **pdata = (tree **) data;
bc7bff74 10309 *walk_subtrees = 0;
10310 switch (TREE_CODE (*tp))
10311 {
10312 case OMP_FOR:
ed321d14 10313 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10314 {
10315 pdata[3] = tp;
10316 return *tp;
10317 }
10318 pdata[2] = tp;
bc7bff74 10319 *walk_subtrees = 1;
ed321d14 10320 break;
bc7bff74 10321 case OMP_SIMD:
10322 if (OMP_FOR_INIT (*tp) != NULL_TREE)
ed321d14 10323 {
10324 pdata[3] = tp;
10325 return *tp;
10326 }
bc7bff74 10327 break;
10328 case BIND_EXPR:
ed321d14 10329 if (BIND_EXPR_VARS (*tp)
10330 || (BIND_EXPR_BLOCK (*tp)
10331 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
10332 pdata[0] = tp;
10333 *walk_subtrees = 1;
10334 break;
bc7bff74 10335 case STATEMENT_LIST:
ed321d14 10336 if (!tsi_one_before_end_p (tsi_start (*tp)))
10337 pdata[0] = tp;
10338 *walk_subtrees = 1;
10339 break;
10340 case TRY_FINALLY_EXPR:
10341 pdata[0] = tp;
10342 *walk_subtrees = 1;
10343 break;
bc7bff74 10344 case OMP_PARALLEL:
ed321d14 10345 pdata[1] = tp;
bc7bff74 10346 *walk_subtrees = 1;
10347 break;
10348 default:
10349 break;
10350 }
10351 return NULL_TREE;
10352}
10353
fd6481cf 10354/* Gimplify the gross structure of an OMP_FOR statement. */
10355
10356static enum gimplify_status
75a70cf9 10357gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
fd6481cf 10358{
441f9074 10359 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
8458f4ca 10360 enum gimplify_status ret = GS_ALL_DONE;
10361 enum gimplify_status tret;
1a91d914 10362 gomp_for *gfor;
75a70cf9 10363 gimple_seq for_body, for_pre_body;
fd6481cf 10364 int i;
3d483a94 10365 bitmap has_decl_expr = NULL;
43895be5 10366 enum omp_region_type ort = ORT_WORKSHARE;
fd6481cf 10367
bc7bff74 10368 orig_for_stmt = for_stmt = *expr_p;
fd6481cf 10369
ed321d14 10370 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10371 {
10372 tree *data[4] = { NULL, NULL, NULL, NULL };
10373 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
10374 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
10375 find_combined_omp_for, data, NULL);
10376 if (inner_for_stmt == NULL_TREE)
10377 {
10378 gcc_assert (seen_error ());
10379 *expr_p = NULL_TREE;
10380 return GS_ERROR;
10381 }
10382 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
10383 {
10384 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
10385 &OMP_FOR_PRE_BODY (for_stmt));
10386 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
10387 }
10388 if (OMP_FOR_PRE_BODY (inner_for_stmt))
10389 {
10390 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
10391 &OMP_FOR_PRE_BODY (for_stmt));
10392 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
10393 }
10394
10395 if (data[0])
10396 {
10397 /* We have some statements or variable declarations in between
10398 the composite construct directives. Move them around the
10399 inner_for_stmt. */
10400 data[0] = expr_p;
10401 for (i = 0; i < 3; i++)
10402 if (data[i])
10403 {
10404 tree t = *data[i];
10405 if (i < 2 && data[i + 1] == &OMP_BODY (t))
10406 data[i + 1] = data[i];
10407 *data[i] = OMP_BODY (t);
10408 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
10409 NULL_TREE, make_node (BLOCK));
10410 OMP_BODY (t) = body;
10411 append_to_statement_list_force (inner_for_stmt,
10412 &BIND_EXPR_BODY (body));
10413 *data[3] = t;
10414 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
10415 gcc_assert (*data[3] == inner_for_stmt);
10416 }
10417 return GS_OK;
10418 }
10419
10420 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10421 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10422 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
7e5a76c8 10423 i)) == TREE_LIST
10424 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10425 i)))
ed321d14 10426 {
10427 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10428 /* Class iterators aren't allowed on OMP_SIMD, so the only
10429 case we need to solve is distribute parallel for. */
10430 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
10431 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
10432 && data[1]);
10433 tree orig_decl = TREE_PURPOSE (orig);
10434 tree last = TREE_VALUE (orig);
10435 tree *pc;
10436 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
10437 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
10438 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
10439 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
10440 && OMP_CLAUSE_DECL (*pc) == orig_decl)
10441 break;
10442 if (*pc == NULL_TREE)
10443 ;
10444 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
10445 {
10446 /* private clause will appear only on inner_for_stmt.
10447 Change it into firstprivate, and add private clause
10448 on for_stmt. */
10449 tree c = copy_node (*pc);
10450 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10451 OMP_FOR_CLAUSES (for_stmt) = c;
10452 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
10453 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10454 }
10455 else
10456 {
10457 /* lastprivate clause will appear on both inner_for_stmt
10458 and for_stmt. Add firstprivate clause to
10459 inner_for_stmt. */
10460 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
10461 OMP_CLAUSE_FIRSTPRIVATE);
10462 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
10463 OMP_CLAUSE_CHAIN (c) = *pc;
10464 *pc = c;
10465 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10466 }
10467 tree c = build_omp_clause (UNKNOWN_LOCATION,
10468 OMP_CLAUSE_FIRSTPRIVATE);
10469 OMP_CLAUSE_DECL (c) = last;
10470 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10471 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10472 c = build_omp_clause (UNKNOWN_LOCATION,
10473 *pc ? OMP_CLAUSE_SHARED
10474 : OMP_CLAUSE_FIRSTPRIVATE);
10475 OMP_CLAUSE_DECL (c) = orig_decl;
10476 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10477 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10478 }
7e5a76c8 10479 /* Similarly, take care of C++ range for temporaries, those should
10480 be firstprivate on OMP_PARALLEL if any. */
10481 if (data[1])
10482 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10483 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10484 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10485 i)) == TREE_LIST
10486 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10487 i)))
10488 {
10489 tree orig
10490 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10491 tree v = TREE_CHAIN (orig);
10492 tree c = build_omp_clause (UNKNOWN_LOCATION,
10493 OMP_CLAUSE_FIRSTPRIVATE);
10494 /* First add firstprivate clause for the __for_end artificial
10495 decl. */
10496 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
10497 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10498 == REFERENCE_TYPE)
10499 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10500 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10501 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10502 if (TREE_VEC_ELT (v, 0))
10503 {
10504 /* And now the same for __for_range artificial decl if it
10505 exists. */
10506 c = build_omp_clause (UNKNOWN_LOCATION,
10507 OMP_CLAUSE_FIRSTPRIVATE);
10508 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
10509 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10510 == REFERENCE_TYPE)
10511 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10512 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10513 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10514 }
10515 }
ed321d14 10516 }
10517
ca4c3545 10518 switch (TREE_CODE (for_stmt))
10519 {
10520 case OMP_FOR:
ca4c3545 10521 case OMP_DISTRIBUTE:
b656be3a 10522 break;
ca4c3545 10523 case OACC_LOOP:
b656be3a 10524 ort = ORT_ACC;
43895be5 10525 break;
10526 case OMP_TASKLOOP:
4954efd4 10527 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
7e5a76c8 10528 ort = ORT_UNTIED_TASKLOOP;
43895be5 10529 else
7e5a76c8 10530 ort = ORT_TASKLOOP;
ca4c3545 10531 break;
10532 case OMP_SIMD:
43895be5 10533 ort = ORT_SIMD;
ca4c3545 10534 break;
10535 default:
10536 gcc_unreachable ();
10537 }
10538
d7729e26 10539 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
10540 clause for the IV. */
43895be5 10541 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
d7729e26 10542 {
10543 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
10544 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10545 decl = TREE_OPERAND (t, 0);
10546 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10547 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10548 && OMP_CLAUSE_DECL (c) == decl)
10549 {
10550 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10551 break;
10552 }
10553 }
10554
43895be5 10555 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
10556 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
10557 TREE_CODE (for_stmt));
10558
5fddcf34 10559 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
10560 gimplify_omp_ctxp->distribute = true;
79acaae1 10561
75a70cf9 10562 /* Handle OMP_FOR_INIT. */
10563 for_pre_body = NULL;
ed321d14 10564 if ((ort == ORT_SIMD
10565 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
10566 && OMP_FOR_PRE_BODY (for_stmt))
3d483a94 10567 {
10568 has_decl_expr = BITMAP_ALLOC (NULL);
10569 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
10570 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
bc7bff74 10571 == VAR_DECL)
3d483a94 10572 {
10573 t = OMP_FOR_PRE_BODY (for_stmt);
10574 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10575 }
10576 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
10577 {
10578 tree_stmt_iterator si;
10579 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
10580 tsi_next (&si))
10581 {
10582 t = tsi_stmt (si);
10583 if (TREE_CODE (t) == DECL_EXPR
10584 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
10585 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10586 }
10587 }
10588 }
43895be5 10589 if (OMP_FOR_PRE_BODY (for_stmt))
10590 {
10591 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
10592 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10593 else
10594 {
10595 struct gimplify_omp_ctx ctx;
10596 memset (&ctx, 0, sizeof (ctx));
10597 ctx.region_type = ORT_NONE;
10598 gimplify_omp_ctxp = &ctx;
10599 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10600 gimplify_omp_ctxp = NULL;
10601 }
10602 }
75a70cf9 10603 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
fd6481cf 10604
bc7bff74 10605 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
43895be5 10606 for_stmt = inner_for_stmt;
10607
10608 /* For taskloop, need to gimplify the start, end and step before the
10609 taskloop, outside of the taskloop omp context. */
10610 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
bc7bff74 10611 {
43895be5 10612 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10613 {
10614 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10615 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10616 {
987a432c 10617 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
43895be5 10618 TREE_OPERAND (t, 1)
10619 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
987a432c 10620 gimple_seq_empty_p (for_pre_body)
10621 ? pre_p : &for_pre_body, NULL,
10622 false);
10623 /* Reference to pointer conversion is considered useless,
10624 but is significant for firstprivate clause. Force it
10625 here. */
10626 if (TREE_CODE (type) == POINTER_TYPE
10627 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10628 == REFERENCE_TYPE))
10629 {
10630 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10631 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10632 TREE_OPERAND (t, 1));
10633 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10634 ? pre_p : &for_pre_body);
10635 TREE_OPERAND (t, 1) = v;
10636 }
43895be5 10637 tree c = build_omp_clause (input_location,
10638 OMP_CLAUSE_FIRSTPRIVATE);
10639 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10640 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10641 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10642 }
10643
10644 /* Handle OMP_FOR_COND. */
10645 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10646 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10647 {
987a432c 10648 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
43895be5 10649 TREE_OPERAND (t, 1)
10650 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10651 gimple_seq_empty_p (for_pre_body)
9ae1b28a 10652 ? pre_p : &for_pre_body, NULL,
10653 false);
987a432c 10654 /* Reference to pointer conversion is considered useless,
10655 but is significant for firstprivate clause. Force it
10656 here. */
10657 if (TREE_CODE (type) == POINTER_TYPE
10658 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10659 == REFERENCE_TYPE))
10660 {
10661 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10662 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10663 TREE_OPERAND (t, 1));
10664 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10665 ? pre_p : &for_pre_body);
10666 TREE_OPERAND (t, 1) = v;
10667 }
43895be5 10668 tree c = build_omp_clause (input_location,
10669 OMP_CLAUSE_FIRSTPRIVATE);
10670 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10671 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10672 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10673 }
10674
10675 /* Handle OMP_FOR_INCR. */
10676 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10677 if (TREE_CODE (t) == MODIFY_EXPR)
10678 {
10679 decl = TREE_OPERAND (t, 0);
10680 t = TREE_OPERAND (t, 1);
10681 tree *tp = &TREE_OPERAND (t, 1);
10682 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
10683 tp = &TREE_OPERAND (t, 0);
10684
10685 if (!is_gimple_constant (*tp))
10686 {
10687 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
10688 ? pre_p : &for_pre_body;
9ae1b28a 10689 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
43895be5 10690 tree c = build_omp_clause (input_location,
10691 OMP_CLAUSE_FIRSTPRIVATE);
10692 OMP_CLAUSE_DECL (c) = *tp;
10693 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10694 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10695 }
10696 }
10697 }
10698
10699 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
10700 OMP_TASKLOOP);
bc7bff74 10701 }
10702
43895be5 10703 if (orig_for_stmt != for_stmt)
10704 gimplify_omp_ctxp->combined_loop = true;
10705
e3a19533 10706 for_body = NULL;
fd6481cf 10707 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10708 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
10709 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10710 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
43895be5 10711
4954efd4 10712 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
43895be5 10713 bool is_doacross = false;
10714 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
10715 {
10716 is_doacross = true;
10717 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
10718 (OMP_FOR_INIT (for_stmt))
10719 * 2);
10720 }
719a7570 10721 int collapse = 1, tile = 0;
4954efd4 10722 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
43895be5 10723 if (c)
10724 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
719a7570 10725 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
10726 if (c)
10727 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
fd6481cf 10728 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10729 {
10730 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
75a70cf9 10731 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10732 decl = TREE_OPERAND (t, 0);
fd6481cf 10733 gcc_assert (DECL_P (decl));
10734 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
10735 || POINTER_TYPE_P (TREE_TYPE (decl)));
43895be5 10736 if (is_doacross)
10737 {
10738 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
ed321d14 10739 {
10740 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
10741 if (TREE_CODE (orig_decl) == TREE_LIST)
7e5a76c8 10742 {
10743 orig_decl = TREE_PURPOSE (orig_decl);
10744 if (!orig_decl)
10745 orig_decl = decl;
10746 }
ed321d14 10747 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
10748 }
43895be5 10749 else
10750 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10751 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
10752 }
fd6481cf 10753
10754 /* Make sure the iteration variable is private. */
3d483a94 10755 tree c = NULL_TREE;
2b536a17 10756 tree c2 = NULL_TREE;
bc7bff74 10757 if (orig_for_stmt != for_stmt)
ed321d14 10758 {
10759 /* Preserve this information until we gimplify the inner simd. */
10760 if (has_decl_expr
10761 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10762 TREE_PRIVATE (t) = 1;
10763 }
43895be5 10764 else if (ort == ORT_SIMD)
3d483a94 10765 {
10766 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
43895be5 10767 (splay_tree_key) decl);
2b536a17 10768 omp_is_private (gimplify_omp_ctxp, decl,
10769 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10770 != 1));
3d483a94 10771 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10772 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10773 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10774 {
10775 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10776 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
d7729e26 10777 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
ed321d14 10778 if ((has_decl_expr
10779 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
10780 || TREE_PRIVATE (t))
d7729e26 10781 {
10782 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10783 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10784 }
36c158de 10785 struct gimplify_omp_ctx *outer
10786 = gimplify_omp_ctxp->outer_context;
10787 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10788 {
10789 if (outer->region_type == ORT_WORKSHARE
10790 && outer->combined_loop)
10791 {
10792 n = splay_tree_lookup (outer->variables,
10793 (splay_tree_key)decl);
10794 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10795 {
10796 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10797 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10798 }
9561765e 10799 else
10800 {
10801 struct gimplify_omp_ctx *octx = outer->outer_context;
10802 if (octx
10803 && octx->region_type == ORT_COMBINED_PARALLEL
10804 && octx->outer_context
10805 && (octx->outer_context->region_type
10806 == ORT_WORKSHARE)
10807 && octx->outer_context->combined_loop)
10808 {
10809 octx = octx->outer_context;
10810 n = splay_tree_lookup (octx->variables,
10811 (splay_tree_key)decl);
10812 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10813 {
10814 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
10815 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
10816 }
10817 }
10818 }
36c158de 10819 }
10820 }
10821
3d483a94 10822 OMP_CLAUSE_DECL (c) = decl;
10823 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10824 OMP_FOR_CLAUSES (for_stmt) = c;
d7729e26 10825 omp_add_variable (gimplify_omp_ctxp, decl, flags);
d7729e26 10826 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
10827 {
10828 if (outer->region_type == ORT_WORKSHARE
10829 && outer->combined_loop)
10830 {
10831 if (outer->outer_context
10832 && (outer->outer_context->region_type
10833 == ORT_COMBINED_PARALLEL))
10834 outer = outer->outer_context;
10835 else if (omp_check_private (outer, decl, false))
10836 outer = NULL;
10837 }
7e5a76c8 10838 else if (((outer->region_type & ORT_TASKLOOP)
10839 == ORT_TASKLOOP)
43895be5 10840 && outer->combined_loop
10841 && !omp_check_private (gimplify_omp_ctxp,
10842 decl, false))
10843 ;
d7729e26 10844 else if (outer->region_type != ORT_COMBINED_PARALLEL)
6ca29134 10845 {
10846 omp_notice_variable (outer, decl, true);
10847 outer = NULL;
10848 }
d7729e26 10849 if (outer)
10850 {
dc72899b 10851 n = splay_tree_lookup (outer->variables,
10852 (splay_tree_key)decl);
10853 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10854 {
10855 omp_add_variable (outer, decl,
10856 GOVD_LASTPRIVATE | GOVD_SEEN);
9561765e 10857 if (outer->region_type == ORT_COMBINED_PARALLEL
10858 && outer->outer_context
10859 && (outer->outer_context->region_type
10860 == ORT_WORKSHARE)
10861 && outer->outer_context->combined_loop)
10862 {
10863 outer = outer->outer_context;
10864 n = splay_tree_lookup (outer->variables,
10865 (splay_tree_key)decl);
10866 if (omp_check_private (outer, decl, false))
10867 outer = NULL;
10868 else if (n == NULL
10869 || ((n->value & GOVD_DATA_SHARE_CLASS)
10870 == 0))
10871 omp_add_variable (outer, decl,
10872 GOVD_LASTPRIVATE
10873 | GOVD_SEEN);
10874 else
10875 outer = NULL;
10876 }
10877 if (outer && outer->outer_context
7e5a76c8 10878 && ((outer->outer_context->region_type
10879 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10880 || (((outer->region_type & ORT_TASKLOOP)
10881 == ORT_TASKLOOP)
10882 && (outer->outer_context->region_type
10883 == ORT_COMBINED_PARALLEL))))
9561765e 10884 {
10885 outer = outer->outer_context;
10886 n = splay_tree_lookup (outer->variables,
10887 (splay_tree_key)decl);
10888 if (n == NULL
10889 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10890 omp_add_variable (outer, decl,
10891 GOVD_SHARED | GOVD_SEEN);
10892 else
10893 outer = NULL;
10894 }
10895 if (outer && outer->outer_context)
dc72899b 10896 omp_notice_variable (outer->outer_context, decl,
10897 true);
10898 }
d7729e26 10899 }
10900 }
3d483a94 10901 }
10902 else
10903 {
10904 bool lastprivate
10905 = (!has_decl_expr
44b49e6b 10906 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
ed321d14 10907 if (TREE_PRIVATE (t))
10908 lastprivate = false;
d7729e26 10909 struct gimplify_omp_ctx *outer
10910 = gimplify_omp_ctxp->outer_context;
10911 if (outer && lastprivate)
e471cc6f 10912 {
d7729e26 10913 if (outer->region_type == ORT_WORKSHARE
10914 && outer->combined_loop)
10915 {
36c158de 10916 n = splay_tree_lookup (outer->variables,
10917 (splay_tree_key)decl);
10918 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10919 {
10920 lastprivate = false;
10921 outer = NULL;
10922 }
10923 else if (outer->outer_context
10924 && (outer->outer_context->region_type
10925 == ORT_COMBINED_PARALLEL))
d7729e26 10926 outer = outer->outer_context;
10927 else if (omp_check_private (outer, decl, false))
10928 outer = NULL;
10929 }
7e5a76c8 10930 else if (((outer->region_type & ORT_TASKLOOP)
10931 == ORT_TASKLOOP)
43895be5 10932 && outer->combined_loop
10933 && !omp_check_private (gimplify_omp_ctxp,
10934 decl, false))
10935 ;
d7729e26 10936 else if (outer->region_type != ORT_COMBINED_PARALLEL)
6ca29134 10937 {
10938 omp_notice_variable (outer, decl, true);
10939 outer = NULL;
10940 }
d7729e26 10941 if (outer)
e471cc6f 10942 {
dc72899b 10943 n = splay_tree_lookup (outer->variables,
10944 (splay_tree_key)decl);
10945 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10946 {
10947 omp_add_variable (outer, decl,
10948 GOVD_LASTPRIVATE | GOVD_SEEN);
9561765e 10949 if (outer->region_type == ORT_COMBINED_PARALLEL
10950 && outer->outer_context
10951 && (outer->outer_context->region_type
10952 == ORT_WORKSHARE)
10953 && outer->outer_context->combined_loop)
10954 {
10955 outer = outer->outer_context;
10956 n = splay_tree_lookup (outer->variables,
10957 (splay_tree_key)decl);
10958 if (omp_check_private (outer, decl, false))
10959 outer = NULL;
10960 else if (n == NULL
10961 || ((n->value & GOVD_DATA_SHARE_CLASS)
10962 == 0))
10963 omp_add_variable (outer, decl,
10964 GOVD_LASTPRIVATE
10965 | GOVD_SEEN);
10966 else
10967 outer = NULL;
10968 }
10969 if (outer && outer->outer_context
7e5a76c8 10970 && ((outer->outer_context->region_type
10971 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
10972 || (((outer->region_type & ORT_TASKLOOP)
10973 == ORT_TASKLOOP)
10974 && (outer->outer_context->region_type
10975 == ORT_COMBINED_PARALLEL))))
9561765e 10976 {
10977 outer = outer->outer_context;
10978 n = splay_tree_lookup (outer->variables,
10979 (splay_tree_key)decl);
10980 if (n == NULL
10981 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10982 omp_add_variable (outer, decl,
10983 GOVD_SHARED | GOVD_SEEN);
10984 else
10985 outer = NULL;
10986 }
10987 if (outer && outer->outer_context)
dc72899b 10988 omp_notice_variable (outer->outer_context, decl,
10989 true);
10990 }
e471cc6f 10991 }
10992 }
d7729e26 10993
3d483a94 10994 c = build_omp_clause (input_location,
10995 lastprivate ? OMP_CLAUSE_LASTPRIVATE
10996 : OMP_CLAUSE_PRIVATE);
10997 OMP_CLAUSE_DECL (c) = decl;
10998 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
2b536a17 10999 OMP_FOR_CLAUSES (for_stmt) = c;
3d483a94 11000 omp_add_variable (gimplify_omp_ctxp, decl,
11001 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
2b536a17 11002 | GOVD_EXPLICIT | GOVD_SEEN);
3d483a94 11003 c = NULL_TREE;
11004 }
11005 }
2b536a17 11006 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
fd6481cf 11007 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11008 else
11009 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11010
11011 /* If DECL is not a gimple register, create a temporary variable to act
11012 as an iteration counter. This is valid, since DECL cannot be
e471cc6f 11013 modified in the body of the loop. Similarly for any iteration vars
11014 in simd with collapse > 1 where the iterator vars must be
11015 lastprivate. */
bc7bff74 11016 if (orig_for_stmt != for_stmt)
11017 var = decl;
e471cc6f 11018 else if (!is_gimple_reg (decl)
43895be5 11019 || (ort == ORT_SIMD
11020 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
fd6481cf 11021 {
6ab9cde4 11022 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11023 /* Make sure omp_add_variable is not called on it prematurely.
11024 We call it ourselves a few lines later. */
11025 gimplify_omp_ctxp = NULL;
fd6481cf 11026 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6ab9cde4 11027 gimplify_omp_ctxp = ctx;
75a70cf9 11028 TREE_OPERAND (t, 0) = var;
48e1416a 11029
75a70cf9 11030 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
1e8e9920 11031
43895be5 11032 if (ort == ORT_SIMD
11033 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
2b536a17 11034 {
11035 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11036 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11037 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11038 OMP_CLAUSE_DECL (c2) = var;
11039 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11040 OMP_FOR_CLAUSES (for_stmt) = c2;
11041 omp_add_variable (gimplify_omp_ctxp, var,
11042 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11043 if (c == NULL_TREE)
11044 {
11045 c = c2;
11046 c2 = NULL_TREE;
11047 }
11048 }
11049 else
11050 omp_add_variable (gimplify_omp_ctxp, var,
11051 GOVD_PRIVATE | GOVD_SEEN);
fd6481cf 11052 }
11053 else
11054 var = decl;
35cc02b5 11055
8458f4ca 11056 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9ae1b28a 11057 is_gimple_val, fb_rvalue, false);
8458f4ca 11058 ret = MIN (ret, tret);
75a70cf9 11059 if (ret == GS_ERROR)
11060 return ret;
1e8e9920 11061
75a70cf9 11062 /* Handle OMP_FOR_COND. */
fd6481cf 11063 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11064 gcc_assert (COMPARISON_CLASS_P (t));
75a70cf9 11065 gcc_assert (TREE_OPERAND (t, 0) == decl);
a0147880 11066
8458f4ca 11067 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9ae1b28a 11068 is_gimple_val, fb_rvalue, false);
8458f4ca 11069 ret = MIN (ret, tret);
79acaae1 11070
75a70cf9 11071 /* Handle OMP_FOR_INCR. */
fd6481cf 11072 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
1e8e9920 11073 switch (TREE_CODE (t))
11074 {
fd6481cf 11075 case PREINCREMENT_EXPR:
11076 case POSTINCREMENT_EXPR:
f2697631 11077 {
11078 tree decl = TREE_OPERAND (t, 0);
9580cb79 11079 /* c_omp_for_incr_canonicalize_ptr() should have been
11080 called to massage things appropriately. */
f2697631 11081 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11082
11083 if (orig_for_stmt != for_stmt)
11084 break;
11085 t = build_int_cst (TREE_TYPE (decl), 1);
11086 if (c)
11087 OMP_CLAUSE_LINEAR_STEP (c) = t;
11088 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11089 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11090 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
bc7bff74 11091 break;
f2697631 11092 }
fd6481cf 11093
11094 case PREDECREMENT_EXPR:
11095 case POSTDECREMENT_EXPR:
9580cb79 11096 /* c_omp_for_incr_canonicalize_ptr() should have been
11097 called to massage things appropriately. */
11098 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
bc7bff74 11099 if (orig_for_stmt != for_stmt)
11100 break;
fd6481cf 11101 t = build_int_cst (TREE_TYPE (decl), -1);
3d483a94 11102 if (c)
11103 OMP_CLAUSE_LINEAR_STEP (c) = t;
fd6481cf 11104 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
75a70cf9 11105 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
fd6481cf 11106 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11107 break;
11108
75a70cf9 11109 case MODIFY_EXPR:
11110 gcc_assert (TREE_OPERAND (t, 0) == decl);
11111 TREE_OPERAND (t, 0) = var;
fd6481cf 11112
75a70cf9 11113 t = TREE_OPERAND (t, 1);
fd6481cf 11114 switch (TREE_CODE (t))
1e8e9920 11115 {
fd6481cf 11116 case PLUS_EXPR:
11117 if (TREE_OPERAND (t, 1) == decl)
11118 {
11119 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11120 TREE_OPERAND (t, 0) = var;
11121 break;
11122 }
11123
11124 /* Fallthru. */
11125 case MINUS_EXPR:
11126 case POINTER_PLUS_EXPR:
11127 gcc_assert (TREE_OPERAND (t, 0) == decl);
79acaae1 11128 TREE_OPERAND (t, 0) = var;
1e8e9920 11129 break;
fd6481cf 11130 default:
11131 gcc_unreachable ();
1e8e9920 11132 }
79acaae1 11133
8458f4ca 11134 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9ae1b28a 11135 is_gimple_val, fb_rvalue, false);
8458f4ca 11136 ret = MIN (ret, tret);
3d483a94 11137 if (c)
11138 {
9580cb79 11139 tree step = TREE_OPERAND (t, 1);
11140 tree stept = TREE_TYPE (decl);
11141 if (POINTER_TYPE_P (stept))
11142 stept = sizetype;
11143 step = fold_convert (stept, step);
3d483a94 11144 if (TREE_CODE (t) == MINUS_EXPR)
9580cb79 11145 step = fold_build1 (NEGATE_EXPR, stept, step);
11146 OMP_CLAUSE_LINEAR_STEP (c) = step;
11147 if (step != TREE_OPERAND (t, 1))
3d483a94 11148 {
3d483a94 11149 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11150 &for_pre_body, NULL,
9ae1b28a 11151 is_gimple_val, fb_rvalue, false);
3d483a94 11152 ret = MIN (ret, tret);
11153 }
11154 }
1e8e9920 11155 break;
fd6481cf 11156
1e8e9920 11157 default:
11158 gcc_unreachable ();
11159 }
11160
2b536a17 11161 if (c2)
11162 {
11163 gcc_assert (c);
11164 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11165 }
11166
719a7570 11167 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
fd6481cf 11168 {
fd6481cf 11169 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
2b536a17 11170 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11171 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11172 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11173 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11174 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11175 && OMP_CLAUSE_DECL (c) == decl)
75a70cf9 11176 {
43895be5 11177 if (is_doacross && (collapse == 1 || i >= collapse))
11178 t = var;
11179 else
11180 {
11181 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11182 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11183 gcc_assert (TREE_OPERAND (t, 0) == var);
11184 t = TREE_OPERAND (t, 1);
11185 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11186 || TREE_CODE (t) == MINUS_EXPR
11187 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11188 gcc_assert (TREE_OPERAND (t, 0) == var);
11189 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11190 is_doacross ? var : decl,
11191 TREE_OPERAND (t, 1));
11192 }
2b536a17 11193 gimple_seq *seq;
11194 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11195 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11196 else
11197 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
a24d5a80 11198 push_gimplify_context ();
2b536a17 11199 gimplify_assign (decl, t, seq);
a24d5a80 11200 gimple *bind = NULL;
11201 if (gimplify_ctxp->temps)
11202 {
11203 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11204 *seq = NULL;
11205 gimplify_seq_add_stmt (seq, bind);
11206 }
11207 pop_gimplify_context (bind);
11208 }
fd6481cf 11209 }
1e8e9920 11210 }
11211
3d483a94 11212 BITMAP_FREE (has_decl_expr);
11213
43895be5 11214 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11215 {
11216 push_gimplify_context ();
11217 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11218 {
11219 OMP_FOR_BODY (orig_for_stmt)
11220 = build3 (BIND_EXPR, void_type_node, NULL,
11221 OMP_FOR_BODY (orig_for_stmt), NULL);
11222 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11223 }
11224 }
11225
11226 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11227 &for_body);
11228
11229 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11230 {
11231 if (gimple_code (g) == GIMPLE_BIND)
11232 pop_gimplify_context (g);
11233 else
11234 pop_gimplify_context (NULL);
11235 }
75a70cf9 11236
bc7bff74 11237 if (orig_for_stmt != for_stmt)
11238 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11239 {
11240 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11241 decl = TREE_OPERAND (t, 0);
43895be5 11242 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11243 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11244 gimplify_omp_ctxp = ctx->outer_context;
bc7bff74 11245 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
43895be5 11246 gimplify_omp_ctxp = ctx;
bc7bff74 11247 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11248 TREE_OPERAND (t, 0) = var;
11249 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11250 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11251 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11252 }
11253
98588013 11254 gimplify_adjust_omp_clauses (pre_p, for_body,
11255 &OMP_FOR_CLAUSES (orig_for_stmt),
43895be5 11256 TREE_CODE (orig_for_stmt));
1e8e9920 11257
3d483a94 11258 int kind;
bc7bff74 11259 switch (TREE_CODE (orig_for_stmt))
3d483a94 11260 {
11261 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11262 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
bc7bff74 11263 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
43895be5 11264 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
ca4c3545 11265 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
3d483a94 11266 default:
11267 gcc_unreachable ();
11268 }
bc7bff74 11269 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
75a70cf9 11270 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
11271 for_pre_body);
bc7bff74 11272 if (orig_for_stmt != for_stmt)
11273 gimple_omp_for_set_combined_p (gfor, true);
11274 if (gimplify_omp_ctxp
11275 && (gimplify_omp_ctxp->combined_loop
11276 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11277 && gimplify_omp_ctxp->outer_context
11278 && gimplify_omp_ctxp->outer_context->combined_loop)))
11279 {
11280 gimple_omp_for_set_combined_into_p (gfor, true);
11281 if (gimplify_omp_ctxp->combined_loop)
11282 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
11283 else
11284 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
11285 }
75a70cf9 11286
11287 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11288 {
11289 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11290 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
11291 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
11292 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11293 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
11294 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
11295 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11296 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
11297 }
11298
43895be5 11299 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11300 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11301 The outer taskloop stands for computing the number of iterations,
11302 counts for collapsed loops and holding taskloop specific clauses.
11303 The task construct stands for the effect of data sharing on the
11304 explicit task it creates and the inner taskloop stands for expansion
11305 of the static loop inside of the explicit task construct. */
11306 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11307 {
11308 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
11309 tree task_clauses = NULL_TREE;
11310 tree c = *gfor_clauses_ptr;
11311 tree *gtask_clauses_ptr = &task_clauses;
11312 tree outer_for_clauses = NULL_TREE;
11313 tree *gforo_clauses_ptr = &outer_for_clauses;
11314 for (; c; c = OMP_CLAUSE_CHAIN (c))
11315 switch (OMP_CLAUSE_CODE (c))
11316 {
11317 /* These clauses are allowed on task, move them there. */
11318 case OMP_CLAUSE_SHARED:
11319 case OMP_CLAUSE_FIRSTPRIVATE:
11320 case OMP_CLAUSE_DEFAULT:
11321 case OMP_CLAUSE_IF:
11322 case OMP_CLAUSE_UNTIED:
11323 case OMP_CLAUSE_FINAL:
11324 case OMP_CLAUSE_MERGEABLE:
11325 case OMP_CLAUSE_PRIORITY:
7e5a76c8 11326 case OMP_CLAUSE_REDUCTION:
11327 case OMP_CLAUSE_IN_REDUCTION:
43895be5 11328 *gtask_clauses_ptr = c;
11329 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11330 break;
11331 case OMP_CLAUSE_PRIVATE:
11332 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
11333 {
11334 /* We want private on outer for and firstprivate
11335 on task. */
11336 *gtask_clauses_ptr
11337 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11338 OMP_CLAUSE_FIRSTPRIVATE);
11339 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11340 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11341 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11342 *gforo_clauses_ptr = c;
11343 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11344 }
11345 else
11346 {
11347 *gtask_clauses_ptr = c;
11348 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11349 }
11350 break;
11351 /* These clauses go into outer taskloop clauses. */
11352 case OMP_CLAUSE_GRAINSIZE:
11353 case OMP_CLAUSE_NUM_TASKS:
11354 case OMP_CLAUSE_NOGROUP:
11355 *gforo_clauses_ptr = c;
11356 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11357 break;
11358 /* Taskloop clause we duplicate on both taskloops. */
11359 case OMP_CLAUSE_COLLAPSE:
11360 *gfor_clauses_ptr = c;
11361 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11362 *gforo_clauses_ptr = copy_node (c);
11363 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11364 break;
11365 /* For lastprivate, keep the clause on inner taskloop, and add
11366 a shared clause on task. If the same decl is also firstprivate,
11367 add also firstprivate clause on the inner taskloop. */
11368 case OMP_CLAUSE_LASTPRIVATE:
11369 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
11370 {
11371 /* For taskloop C++ lastprivate IVs, we want:
11372 1) private on outer taskloop
11373 2) firstprivate and shared on task
11374 3) lastprivate on inner taskloop */
11375 *gtask_clauses_ptr
11376 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11377 OMP_CLAUSE_FIRSTPRIVATE);
11378 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11379 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11380 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11381 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
11382 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11383 OMP_CLAUSE_PRIVATE);
11384 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
11385 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
11386 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
11387 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11388 }
11389 *gfor_clauses_ptr = c;
11390 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11391 *gtask_clauses_ptr
11392 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
11393 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11394 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
11395 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
11396 gtask_clauses_ptr
11397 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11398 break;
11399 default:
11400 gcc_unreachable ();
11401 }
11402 *gfor_clauses_ptr = NULL_TREE;
11403 *gtask_clauses_ptr = NULL_TREE;
11404 *gforo_clauses_ptr = NULL_TREE;
11405 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
11406 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
11407 NULL_TREE, NULL_TREE, NULL_TREE);
11408 gimple_omp_task_set_taskloop_p (g, true);
11409 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
11410 gomp_for *gforo
11411 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
11412 gimple_omp_for_collapse (gfor),
11413 gimple_omp_for_pre_body (gfor));
11414 gimple_omp_for_set_pre_body (gfor, NULL);
11415 gimple_omp_for_set_combined_p (gforo, true);
11416 gimple_omp_for_set_combined_into_p (gfor, true);
11417 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
11418 {
111af714 11419 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
11420 tree v = create_tmp_var (type);
11421 gimple_omp_for_set_index (gforo, i, v);
43895be5 11422 t = unshare_expr (gimple_omp_for_initial (gfor, i));
11423 gimple_omp_for_set_initial (gforo, i, t);
11424 gimple_omp_for_set_cond (gforo, i,
11425 gimple_omp_for_cond (gfor, i));
11426 t = unshare_expr (gimple_omp_for_final (gfor, i));
11427 gimple_omp_for_set_final (gforo, i, t);
11428 t = unshare_expr (gimple_omp_for_incr (gfor, i));
111af714 11429 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
11430 TREE_OPERAND (t, 0) = v;
43895be5 11431 gimple_omp_for_set_incr (gforo, i, t);
111af714 11432 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
11433 OMP_CLAUSE_DECL (t) = v;
11434 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
11435 gimple_omp_for_set_clauses (gforo, t);
43895be5 11436 }
11437 gimplify_seq_add_stmt (pre_p, gforo);
11438 }
11439 else
11440 gimplify_seq_add_stmt (pre_p, gfor);
3d483a94 11441 if (ret != GS_ALL_DONE)
11442 return GS_ERROR;
11443 *expr_p = NULL_TREE;
11444 return GS_ALL_DONE;
1e8e9920 11445}
11446
9561765e 11447/* Helper function of optimize_target_teams, find OMP_TEAMS inside
11448 of OMP_TARGET's body. */
11449
11450static tree
11451find_omp_teams (tree *tp, int *walk_subtrees, void *)
11452{
11453 *walk_subtrees = 0;
11454 switch (TREE_CODE (*tp))
11455 {
11456 case OMP_TEAMS:
11457 return *tp;
11458 case BIND_EXPR:
11459 case STATEMENT_LIST:
11460 *walk_subtrees = 1;
11461 break;
11462 default:
11463 break;
11464 }
11465 return NULL_TREE;
11466}
11467
11468/* Helper function of optimize_target_teams, determine if the expression
11469 can be computed safely before the target construct on the host. */
11470
11471static tree
11472computable_teams_clause (tree *tp, int *walk_subtrees, void *)
11473{
11474 splay_tree_node n;
11475
11476 if (TYPE_P (*tp))
11477 {
11478 *walk_subtrees = 0;
11479 return NULL_TREE;
11480 }
11481 switch (TREE_CODE (*tp))
11482 {
11483 case VAR_DECL:
11484 case PARM_DECL:
11485 case RESULT_DECL:
11486 *walk_subtrees = 0;
11487 if (error_operand_p (*tp)
11488 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
11489 || DECL_HAS_VALUE_EXPR_P (*tp)
11490 || DECL_THREAD_LOCAL_P (*tp)
11491 || TREE_SIDE_EFFECTS (*tp)
11492 || TREE_THIS_VOLATILE (*tp))
11493 return *tp;
11494 if (is_global_var (*tp)
11495 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
11496 || lookup_attribute ("omp declare target link",
11497 DECL_ATTRIBUTES (*tp))))
11498 return *tp;
44b49e6b 11499 if (VAR_P (*tp)
11500 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
11501 && !is_global_var (*tp)
11502 && decl_function_context (*tp) == current_function_decl)
11503 return *tp;
9561765e 11504 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11505 (splay_tree_key) *tp);
11506 if (n == NULL)
11507 {
7e5a76c8 11508 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
9561765e 11509 return NULL_TREE;
11510 return *tp;
11511 }
11512 else if (n->value & GOVD_LOCAL)
11513 return *tp;
11514 else if (n->value & GOVD_FIRSTPRIVATE)
11515 return NULL_TREE;
11516 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11517 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
11518 return NULL_TREE;
11519 return *tp;
11520 case INTEGER_CST:
11521 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11522 return *tp;
11523 return NULL_TREE;
11524 case TARGET_EXPR:
11525 if (TARGET_EXPR_INITIAL (*tp)
11526 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
11527 return *tp;
11528 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
11529 walk_subtrees, NULL);
11530 /* Allow some reasonable subset of integral arithmetics. */
11531 case PLUS_EXPR:
11532 case MINUS_EXPR:
11533 case MULT_EXPR:
11534 case TRUNC_DIV_EXPR:
11535 case CEIL_DIV_EXPR:
11536 case FLOOR_DIV_EXPR:
11537 case ROUND_DIV_EXPR:
11538 case TRUNC_MOD_EXPR:
11539 case CEIL_MOD_EXPR:
11540 case FLOOR_MOD_EXPR:
11541 case ROUND_MOD_EXPR:
11542 case RDIV_EXPR:
11543 case EXACT_DIV_EXPR:
11544 case MIN_EXPR:
11545 case MAX_EXPR:
11546 case LSHIFT_EXPR:
11547 case RSHIFT_EXPR:
11548 case BIT_IOR_EXPR:
11549 case BIT_XOR_EXPR:
11550 case BIT_AND_EXPR:
11551 case NEGATE_EXPR:
11552 case ABS_EXPR:
11553 case BIT_NOT_EXPR:
11554 case NON_LVALUE_EXPR:
11555 CASE_CONVERT:
11556 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
11557 return *tp;
11558 return NULL_TREE;
11559 /* And disallow anything else, except for comparisons. */
11560 default:
11561 if (COMPARISON_CLASS_P (*tp))
11562 return NULL_TREE;
11563 return *tp;
11564 }
11565}
11566
11567/* Try to determine if the num_teams and/or thread_limit expressions
11568 can have their values determined already before entering the
11569 target construct.
11570 INTEGER_CSTs trivially are,
11571 integral decls that are firstprivate (explicitly or implicitly)
11572 or explicitly map(always, to:) or map(always, tofrom:) on the target
11573 region too, and expressions involving simple arithmetics on those
11574 too, function calls are not ok, dereferencing something neither etc.
11575 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
11576 EXPR based on what we find:
11577 0 stands for clause not specified at all, use implementation default
11578 -1 stands for value that can't be determined easily before entering
11579 the target construct.
11580 If teams construct is not present at all, use 1 for num_teams
11581 and 0 for thread_limit (only one team is involved, and the thread
11582 limit is implementation defined. */
11583
11584static void
11585optimize_target_teams (tree target, gimple_seq *pre_p)
11586{
11587 tree body = OMP_BODY (target);
11588 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
11589 tree num_teams = integer_zero_node;
11590 tree thread_limit = integer_zero_node;
11591 location_t num_teams_loc = EXPR_LOCATION (target);
11592 location_t thread_limit_loc = EXPR_LOCATION (target);
11593 tree c, *p, expr;
11594 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
11595
11596 if (teams == NULL_TREE)
11597 num_teams = integer_one_node;
11598 else
11599 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
11600 {
11601 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
11602 {
11603 p = &num_teams;
11604 num_teams_loc = OMP_CLAUSE_LOCATION (c);
11605 }
11606 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
11607 {
11608 p = &thread_limit;
11609 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
11610 }
11611 else
11612 continue;
11613 expr = OMP_CLAUSE_OPERAND (c, 0);
11614 if (TREE_CODE (expr) == INTEGER_CST)
11615 {
11616 *p = expr;
11617 continue;
11618 }
11619 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
11620 {
11621 *p = integer_minus_one_node;
11622 continue;
11623 }
11624 *p = expr;
11625 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
9ae1b28a 11626 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
9561765e 11627 == GS_ERROR)
11628 {
11629 gimplify_omp_ctxp = target_ctx;
11630 *p = integer_minus_one_node;
11631 continue;
11632 }
11633 gimplify_omp_ctxp = target_ctx;
11634 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
11635 OMP_CLAUSE_OPERAND (c, 0) = *p;
11636 }
11637 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
11638 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
11639 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11640 OMP_TARGET_CLAUSES (target) = c;
11641 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
11642 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
11643 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
11644 OMP_TARGET_CLAUSES (target) = c;
11645}
11646
ca4c3545 11647/* Gimplify the gross structure of several OMP constructs. */
1e8e9920 11648
75a70cf9 11649static void
11650gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
1e8e9920 11651{
75a70cf9 11652 tree expr = *expr_p;
42acab1c 11653 gimple *stmt;
75a70cf9 11654 gimple_seq body = NULL;
ca4c3545 11655 enum omp_region_type ort;
1e8e9920 11656
bc7bff74 11657 switch (TREE_CODE (expr))
11658 {
11659 case OMP_SECTIONS:
11660 case OMP_SINGLE:
ca4c3545 11661 ort = ORT_WORKSHARE;
bc7bff74 11662 break;
43895be5 11663 case OMP_TARGET:
11664 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
11665 break;
ca4c3545 11666 case OACC_KERNELS:
b656be3a 11667 ort = ORT_ACC_KERNELS;
11668 break;
ca4c3545 11669 case OACC_PARALLEL:
b656be3a 11670 ort = ORT_ACC_PARALLEL;
bc7bff74 11671 break;
ca4c3545 11672 case OACC_DATA:
b656be3a 11673 ort = ORT_ACC_DATA;
11674 break;
bc7bff74 11675 case OMP_TARGET_DATA:
11676 ort = ORT_TARGET_DATA;
11677 break;
11678 case OMP_TEAMS:
d7729e26 11679 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
7e5a76c8 11680 if (gimplify_omp_ctxp == NULL
11681 || (gimplify_omp_ctxp->region_type == ORT_TARGET
11682 && gimplify_omp_ctxp->outer_context == NULL
11683 && lookup_attribute ("omp declare target",
11684 DECL_ATTRIBUTES (current_function_decl))))
11685 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
bc7bff74 11686 break;
571b3486 11687 case OACC_HOST_DATA:
11688 ort = ORT_ACC_HOST_DATA;
11689 break;
bc7bff74 11690 default:
11691 gcc_unreachable ();
11692 }
43895be5 11693 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
11694 TREE_CODE (expr));
9561765e 11695 if (TREE_CODE (expr) == OMP_TARGET)
11696 optimize_target_teams (expr, pre_p);
7e5a76c8 11697 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
11698 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
bc7bff74 11699 {
8a4a28a8 11700 push_gimplify_context ();
42acab1c 11701 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
bc7bff74 11702 if (gimple_code (g) == GIMPLE_BIND)
11703 pop_gimplify_context (g);
11704 else
11705 pop_gimplify_context (NULL);
b656be3a 11706 if ((ort & ORT_TARGET_DATA) != 0)
bc7bff74 11707 {
ca4c3545 11708 enum built_in_function end_ix;
11709 switch (TREE_CODE (expr))
11710 {
11711 case OACC_DATA:
571b3486 11712 case OACC_HOST_DATA:
ca4c3545 11713 end_ix = BUILT_IN_GOACC_DATA_END;
11714 break;
11715 case OMP_TARGET_DATA:
11716 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
11717 break;
11718 default:
11719 gcc_unreachable ();
11720 }
11721 tree fn = builtin_decl_explicit (end_ix);
bc7bff74 11722 g = gimple_build_call (fn, 0);
ca4c3545 11723 gimple_seq cleanup = NULL;
bc7bff74 11724 gimple_seq_add_stmt (&cleanup, g);
11725 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11726 body = NULL;
11727 gimple_seq_add_stmt (&body, g);
11728 }
11729 }
11730 else
11731 gimplify_and_add (OMP_BODY (expr), &body);
98588013 11732 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
11733 TREE_CODE (expr));
1e8e9920 11734
bc7bff74 11735 switch (TREE_CODE (expr))
11736 {
ca4c3545 11737 case OACC_DATA:
11738 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
11739 OMP_CLAUSES (expr));
11740 break;
11741 case OACC_KERNELS:
11742 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
11743 OMP_CLAUSES (expr));
11744 break;
571b3486 11745 case OACC_HOST_DATA:
11746 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
11747 OMP_CLAUSES (expr));
11748 break;
ca4c3545 11749 case OACC_PARALLEL:
11750 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
11751 OMP_CLAUSES (expr));
11752 break;
bc7bff74 11753 case OMP_SECTIONS:
11754 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
11755 break;
11756 case OMP_SINGLE:
11757 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
11758 break;
11759 case OMP_TARGET:
11760 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
11761 OMP_CLAUSES (expr));
11762 break;
11763 case OMP_TARGET_DATA:
11764 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
11765 OMP_CLAUSES (expr));
11766 break;
11767 case OMP_TEAMS:
11768 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7e5a76c8 11769 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
11770 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
bc7bff74 11771 break;
11772 default:
11773 gcc_unreachable ();
11774 }
11775
11776 gimplify_seq_add_stmt (pre_p, stmt);
11777 *expr_p = NULL_TREE;
11778}
11779
ca4c3545 11780/* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
11781 target update constructs. */
bc7bff74 11782
11783static void
11784gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
11785{
95cfd391 11786 tree expr = *expr_p;
ca4c3545 11787 int kind;
1a91d914 11788 gomp_target *stmt;
b656be3a 11789 enum omp_region_type ort = ORT_WORKSHARE;
bc7bff74 11790
ca4c3545 11791 switch (TREE_CODE (expr))
11792 {
11793 case OACC_ENTER_DATA:
ca4c3545 11794 case OACC_EXIT_DATA:
ca4c3545 11795 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
b656be3a 11796 ort = ORT_ACC;
ca4c3545 11797 break;
11798 case OACC_UPDATE:
ca4c3545 11799 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
b656be3a 11800 ort = ORT_ACC;
ca4c3545 11801 break;
11802 case OMP_TARGET_UPDATE:
ca4c3545 11803 kind = GF_OMP_TARGET_KIND_UPDATE;
11804 break;
43895be5 11805 case OMP_TARGET_ENTER_DATA:
11806 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
11807 break;
11808 case OMP_TARGET_EXIT_DATA:
11809 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
11810 break;
ca4c3545 11811 default:
11812 gcc_unreachable ();
11813 }
95cfd391 11814 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
b656be3a 11815 ort, TREE_CODE (expr));
98588013 11816 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
43895be5 11817 TREE_CODE (expr));
737cc978 11818 if (TREE_CODE (expr) == OACC_UPDATE
11819 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11820 OMP_CLAUSE_IF_PRESENT))
11821 {
11822 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
11823 clause. */
11824 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11825 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11826 switch (OMP_CLAUSE_MAP_KIND (c))
11827 {
11828 case GOMP_MAP_FORCE_TO:
11829 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
11830 break;
11831 case GOMP_MAP_FORCE_FROM:
11832 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
11833 break;
11834 default:
11835 break;
11836 }
11837 }
11838 else if (TREE_CODE (expr) == OACC_EXIT_DATA
11839 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
11840 OMP_CLAUSE_FINALIZE))
11841 {
11842 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize"
11843 semantics apply to all mappings of this OpenACC directive. */
11844 bool finalize_marked = false;
11845 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11846 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11847 switch (OMP_CLAUSE_MAP_KIND (c))
11848 {
11849 case GOMP_MAP_FROM:
11850 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
11851 finalize_marked = true;
11852 break;
11853 case GOMP_MAP_RELEASE:
11854 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
11855 finalize_marked = true;
11856 break;
11857 default:
11858 /* Check consistency: libgomp relies on the very first data
11859 mapping clause being marked, so make sure we did that before
11860 any other mapping clauses. */
11861 gcc_assert (finalize_marked);
11862 break;
11863 }
11864 }
95cfd391 11865 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
75a70cf9 11866
11867 gimplify_seq_add_stmt (pre_p, stmt);
bc7bff74 11868 *expr_p = NULL_TREE;
1e8e9920 11869}
11870
11871/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
48e1416a 11872 stabilized the lhs of the atomic operation as *ADDR. Return true if
1e8e9920 11873 EXPR is this stabilized form. */
11874
11875static bool
cb7f680b 11876goa_lhs_expr_p (tree expr, tree addr)
1e8e9920 11877{
11878 /* Also include casts to other type variants. The C front end is fond
48e1416a 11879 of adding these for e.g. volatile variables. This is like
1e8e9920 11880 STRIP_TYPE_NOPS but includes the main variant lookup. */
1ea6a73c 11881 STRIP_USELESS_TYPE_CONVERSION (expr);
1e8e9920 11882
dcaa067e 11883 if (TREE_CODE (expr) == INDIRECT_REF)
11884 {
11885 expr = TREE_OPERAND (expr, 0);
11886 while (expr != addr
72dd6141 11887 && (CONVERT_EXPR_P (expr)
dcaa067e 11888 || TREE_CODE (expr) == NON_LVALUE_EXPR)
11889 && TREE_CODE (expr) == TREE_CODE (addr)
1ea6a73c 11890 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
dcaa067e 11891 {
11892 expr = TREE_OPERAND (expr, 0);
11893 addr = TREE_OPERAND (addr, 0);
11894 }
83c5f690 11895 if (expr == addr)
11896 return true;
b2ae9563 11897 return (TREE_CODE (addr) == ADDR_EXPR
11898 && TREE_CODE (expr) == ADDR_EXPR
83c5f690 11899 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
dcaa067e 11900 }
1e8e9920 11901 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
11902 return true;
11903 return false;
11904}
11905
57859735 11906/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
11907 expression does not involve the lhs, evaluate it into a temporary.
11908 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
11909 or -1 if an error was encountered. */
1e8e9920 11910
11911static int
75a70cf9 11912goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
11913 tree lhs_var)
1e8e9920 11914{
11915 tree expr = *expr_p;
11916 int saw_lhs;
11917
11918 if (goa_lhs_expr_p (expr, lhs_addr))
11919 {
11920 *expr_p = lhs_var;
11921 return 1;
11922 }
11923 if (is_gimple_val (expr))
11924 return 0;
48e1416a 11925
1e8e9920 11926 saw_lhs = 0;
11927 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
11928 {
11929 case tcc_binary:
072866b6 11930 case tcc_comparison:
75a70cf9 11931 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
11932 lhs_var);
e3533433 11933 /* FALLTHRU */
1e8e9920 11934 case tcc_unary:
75a70cf9 11935 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
11936 lhs_var);
1e8e9920 11937 break;
072866b6 11938 case tcc_expression:
11939 switch (TREE_CODE (expr))
11940 {
11941 case TRUTH_ANDIF_EXPR:
11942 case TRUTH_ORIF_EXPR:
db28a91b 11943 case TRUTH_AND_EXPR:
11944 case TRUTH_OR_EXPR:
11945 case TRUTH_XOR_EXPR:
b4ae034c 11946 case BIT_INSERT_EXPR:
072866b6 11947 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
11948 lhs_addr, lhs_var);
e3533433 11949 /* FALLTHRU */
db28a91b 11950 case TRUTH_NOT_EXPR:
072866b6 11951 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11952 lhs_addr, lhs_var);
11953 break;
3115bda0 11954 case COMPOUND_EXPR:
11955 /* Break out any preevaluations from cp_build_modify_expr. */
11956 for (; TREE_CODE (expr) == COMPOUND_EXPR;
11957 expr = TREE_OPERAND (expr, 1))
11958 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
11959 *expr_p = expr;
11960 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
072866b6 11961 default:
11962 break;
11963 }
11964 break;
b4ae034c 11965 case tcc_reference:
11966 if (TREE_CODE (expr) == BIT_FIELD_REF)
11967 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11968 lhs_addr, lhs_var);
11969 break;
1e8e9920 11970 default:
11971 break;
11972 }
11973
11974 if (saw_lhs == 0)
11975 {
11976 enum gimplify_status gs;
11977 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
11978 if (gs != GS_ALL_DONE)
11979 saw_lhs = -1;
11980 }
11981
11982 return saw_lhs;
11983}
11984
1e8e9920 11985/* Gimplify an OMP_ATOMIC statement. */
11986
11987static enum gimplify_status
75a70cf9 11988gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
1e8e9920 11989{
11990 tree addr = TREE_OPERAND (*expr_p, 0);
2169f33b 11991 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
11992 ? NULL : TREE_OPERAND (*expr_p, 1);
1e8e9920 11993 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
75a70cf9 11994 tree tmp_load;
1a91d914 11995 gomp_atomic_load *loadstmt;
11996 gomp_atomic_store *storestmt;
1e8e9920 11997
f9e245b2 11998 tmp_load = create_tmp_reg (type);
2169f33b 11999 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
12000 return GS_ERROR;
12001
12002 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
12003 != GS_ALL_DONE)
12004 return GS_ERROR;
1e8e9920 12005
7e5a76c8 12006 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
12007 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
2169f33b 12008 gimplify_seq_add_stmt (pre_p, loadstmt);
f840847c 12009 if (rhs)
12010 {
12011 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
12012 representatives. Use BIT_FIELD_REF on the lhs instead. */
12013 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
12014 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
12015 {
12016 tree bitpos = TREE_OPERAND (rhs, 2);
12017 tree op1 = TREE_OPERAND (rhs, 1);
12018 tree bitsize;
12019 tree tmp_store = tmp_load;
12020 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
12021 tmp_store = get_initialized_tmp_var (tmp_load, pre_p, NULL);
12022 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
12023 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
12024 else
12025 bitsize = TYPE_SIZE (TREE_TYPE (op1));
12026 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
12027 tree t = build2_loc (EXPR_LOCATION (rhs),
12028 MODIFY_EXPR, void_type_node,
12029 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
12030 TREE_TYPE (op1), tmp_store, bitsize,
12031 bitpos), op1);
12032 gimplify_and_add (t, pre_p);
12033 rhs = tmp_store;
12034 }
12035 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
12036 != GS_ALL_DONE)
12037 return GS_ERROR;
12038 }
1e8e9920 12039
2169f33b 12040 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
12041 rhs = tmp_load;
7e5a76c8 12042 storestmt
12043 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
2169f33b 12044 gimplify_seq_add_stmt (pre_p, storestmt);
12045 switch (TREE_CODE (*expr_p))
12046 {
12047 case OMP_ATOMIC_READ:
12048 case OMP_ATOMIC_CAPTURE_OLD:
12049 *expr_p = tmp_load;
12050 gimple_omp_atomic_set_need_value (loadstmt);
12051 break;
12052 case OMP_ATOMIC_CAPTURE_NEW:
12053 *expr_p = rhs;
12054 gimple_omp_atomic_set_need_value (storestmt);
12055 break;
12056 default:
12057 *expr_p = NULL;
12058 break;
12059 }
cb7f680b 12060
bc7bff74 12061 return GS_ALL_DONE;
1e8e9920 12062}
4ee9c684 12063
4c0315d0 12064/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
12065 body, and adding some EH bits. */
12066
12067static enum gimplify_status
12068gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
12069{
12070 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
42acab1c 12071 gimple *body_stmt;
1a91d914 12072 gtransaction *trans_stmt;
4c0315d0 12073 gimple_seq body = NULL;
4c0315d0 12074 int subcode = 0;
12075
12076 /* Wrap the transaction body in a BIND_EXPR so we have a context
ca4c3545 12077 where to put decls for OMP. */
4c0315d0 12078 if (TREE_CODE (tbody) != BIND_EXPR)
12079 {
12080 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
12081 TREE_SIDE_EFFECTS (bind) = 1;
12082 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
12083 TRANSACTION_EXPR_BODY (expr) = bind;
12084 }
12085
8a4a28a8 12086 push_gimplify_context ();
4c0315d0 12087 temp = voidify_wrapper_expr (*expr_p, NULL);
12088
1a91d914 12089 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
12090 pop_gimplify_context (body_stmt);
4c0315d0 12091
a08574d7 12092 trans_stmt = gimple_build_transaction (body);
4c0315d0 12093 if (TRANSACTION_EXPR_OUTER (expr))
12094 subcode = GTMA_IS_OUTER;
12095 else if (TRANSACTION_EXPR_RELAXED (expr))
12096 subcode = GTMA_IS_RELAXED;
1a91d914 12097 gimple_transaction_set_subcode (trans_stmt, subcode);
4c0315d0 12098
1a91d914 12099 gimplify_seq_add_stmt (pre_p, trans_stmt);
4c0315d0 12100
12101 if (temp)
12102 {
12103 *expr_p = temp;
12104 return GS_OK;
12105 }
12106
12107 *expr_p = NULL_TREE;
12108 return GS_ALL_DONE;
12109}
12110
43895be5 12111/* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
12112 is the OMP_BODY of the original EXPR (which has already been
12113 gimplified so it's not present in the EXPR).
12114
12115 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
12116
12117static gimple *
12118gimplify_omp_ordered (tree expr, gimple_seq body)
12119{
12120 tree c, decls;
12121 int failures = 0;
12122 unsigned int i;
12123 tree source_c = NULL_TREE;
12124 tree sink_c = NULL_TREE;
12125
12126 if (gimplify_omp_ctxp)
d3831f71 12127 {
12128 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12129 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12130 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
12131 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
12132 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
12133 {
12134 error_at (OMP_CLAUSE_LOCATION (c),
12135 "%<ordered%> construct with %<depend%> clause must be "
12136 "closely nested inside a loop with %<ordered%> clause "
12137 "with a parameter");
12138 failures++;
12139 }
12140 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12141 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
12142 {
12143 bool fail = false;
12144 for (decls = OMP_CLAUSE_DECL (c), i = 0;
12145 decls && TREE_CODE (decls) == TREE_LIST;
12146 decls = TREE_CHAIN (decls), ++i)
12147 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
12148 continue;
12149 else if (TREE_VALUE (decls)
12150 != gimplify_omp_ctxp->loop_iter_var[2 * i])
12151 {
12152 error_at (OMP_CLAUSE_LOCATION (c),
12153 "variable %qE is not an iteration "
12154 "of outermost loop %d, expected %qE",
12155 TREE_VALUE (decls), i + 1,
12156 gimplify_omp_ctxp->loop_iter_var[2 * i]);
12157 fail = true;
12158 failures++;
12159 }
12160 else
12161 TREE_VALUE (decls)
12162 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
12163 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
12164 {
12165 error_at (OMP_CLAUSE_LOCATION (c),
71c5a0bc 12166 "number of variables in %<depend%> clause with "
12167 "%<sink%> modifier does not match number of "
d3831f71 12168 "iteration variables");
12169 failures++;
12170 }
12171 sink_c = c;
12172 }
12173 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12174 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
12175 {
12176 if (source_c)
43895be5 12177 {
12178 error_at (OMP_CLAUSE_LOCATION (c),
71c5a0bc 12179 "more than one %<depend%> clause with %<source%> "
12180 "modifier on an %<ordered%> construct");
43895be5 12181 failures++;
12182 }
12183 else
d3831f71 12184 source_c = c;
12185 }
12186 }
43895be5 12187 if (source_c && sink_c)
12188 {
12189 error_at (OMP_CLAUSE_LOCATION (source_c),
71c5a0bc 12190 "%<depend%> clause with %<source%> modifier specified "
12191 "together with %<depend%> clauses with %<sink%> modifier "
12192 "on the same construct");
43895be5 12193 failures++;
12194 }
12195
12196 if (failures)
12197 return gimple_build_nop ();
12198 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
12199}
12200
57859735 12201/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
75a70cf9 12202 expression produces a value to be used as an operand inside a GIMPLE
12203 statement, the value will be stored back in *EXPR_P. This value will
12204 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
12205 an SSA_NAME. The corresponding sequence of GIMPLE statements is
12206 emitted in PRE_P and POST_P.
12207
12208 Additionally, this process may overwrite parts of the input
12209 expression during gimplification. Ideally, it should be
12210 possible to do non-destructive gimplification.
12211
12212 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
12213 the expression needs to evaluate to a value to be used as
12214 an operand in a GIMPLE statement, this value will be stored in
12215 *EXPR_P on exit. This happens when the caller specifies one
12216 of fb_lvalue or fb_rvalue fallback flags.
12217
12218 PRE_P will contain the sequence of GIMPLE statements corresponding
12219 to the evaluation of EXPR and all the side-effects that must
12220 be executed before the main expression. On exit, the last
12221 statement of PRE_P is the core statement being gimplified. For
12222 instance, when gimplifying 'if (++a)' the last statement in
12223 PRE_P will be 'if (t.1)' where t.1 is the result of
12224 pre-incrementing 'a'.
12225
12226 POST_P will contain the sequence of GIMPLE statements corresponding
12227 to the evaluation of all the side-effects that must be executed
12228 after the main expression. If this is NULL, the post
12229 side-effects are stored at the end of PRE_P.
12230
12231 The reason why the output is split in two is to handle post
12232 side-effects explicitly. In some cases, an expression may have
12233 inner and outer post side-effects which need to be emitted in
12234 an order different from the one given by the recursive
12235 traversal. For instance, for the expression (*p--)++ the post
12236 side-effects of '--' must actually occur *after* the post
12237 side-effects of '++'. However, gimplification will first visit
12238 the inner expression, so if a separate POST sequence was not
12239 used, the resulting sequence would be:
12240
12241 1 t.1 = *p
12242 2 p = p - 1
12243 3 t.2 = t.1 + 1
12244 4 *p = t.2
12245
12246 However, the post-decrement operation in line #2 must not be
12247 evaluated until after the store to *p at line #4, so the
12248 correct sequence should be:
12249
12250 1 t.1 = *p
12251 2 t.2 = t.1 + 1
12252 3 *p = t.2
12253 4 p = p - 1
12254
12255 So, by specifying a separate post queue, it is possible
12256 to emit the post side-effects in the correct order.
12257 If POST_P is NULL, an internal queue will be used. Before
12258 returning to the caller, the sequence POST_P is appended to
12259 the main output sequence PRE_P.
12260
12261 GIMPLE_TEST_F points to a function that takes a tree T and
12262 returns nonzero if T is in the GIMPLE form requested by the
81943faa 12263 caller. The GIMPLE predicates are in gimple.c.
75a70cf9 12264
12265 FALLBACK tells the function what sort of a temporary we want if
12266 gimplification cannot produce an expression that complies with
12267 GIMPLE_TEST_F.
12268
12269 fb_none means that no temporary should be generated
12270 fb_rvalue means that an rvalue is OK to generate
12271 fb_lvalue means that an lvalue is OK to generate
12272 fb_either means that either is OK, but an lvalue is preferable.
12273 fb_mayfail means that gimplification may fail (in which case
12274 GS_ERROR will be returned)
12275
12276 The return value is either GS_ERROR or GS_ALL_DONE, since this
12277 function iterates until EXPR is completely gimplified or an error
12278 occurs. */
4ee9c684 12279
12280enum gimplify_status
75a70cf9 12281gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12282 bool (*gimple_test_f) (tree), fallback_t fallback)
4ee9c684 12283{
12284 tree tmp;
75a70cf9 12285 gimple_seq internal_pre = NULL;
12286 gimple_seq internal_post = NULL;
4ee9c684 12287 tree save_expr;
75a70cf9 12288 bool is_statement;
4ee9c684 12289 location_t saved_location;
12290 enum gimplify_status ret;
75a70cf9 12291 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
629b6abc 12292 tree label;
4ee9c684 12293
12294 save_expr = *expr_p;
12295 if (save_expr == NULL_TREE)
12296 return GS_ALL_DONE;
12297
75a70cf9 12298 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
12299 is_statement = gimple_test_f == is_gimple_stmt;
12300 if (is_statement)
12301 gcc_assert (pre_p);
12302
12303 /* Consistency checks. */
12304 if (gimple_test_f == is_gimple_reg)
12305 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
12306 else if (gimple_test_f == is_gimple_val
75a70cf9 12307 || gimple_test_f == is_gimple_call_addr
12308 || gimple_test_f == is_gimple_condexpr
12309 || gimple_test_f == is_gimple_mem_rhs
47f11e84 12310 || gimple_test_f == is_gimple_mem_rhs_or_call
75a70cf9 12311 || gimple_test_f == is_gimple_reg_rhs
47f11e84 12312 || gimple_test_f == is_gimple_reg_rhs_or_call
182cf5a9 12313 || gimple_test_f == is_gimple_asm_val
12314 || gimple_test_f == is_gimple_mem_ref_addr)
75a70cf9 12315 gcc_assert (fallback & fb_rvalue);
12316 else if (gimple_test_f == is_gimple_min_lval
12317 || gimple_test_f == is_gimple_lvalue)
12318 gcc_assert (fallback & fb_lvalue);
12319 else if (gimple_test_f == is_gimple_addressable)
12320 gcc_assert (fallback & fb_either);
12321 else if (gimple_test_f == is_gimple_stmt)
12322 gcc_assert (fallback == fb_none);
12323 else
12324 {
12325 /* We should have recognized the GIMPLE_TEST_F predicate to
12326 know what kind of fallback to use in case a temporary is
12327 needed to hold the value or address of *EXPR_P. */
12328 gcc_unreachable ();
12329 }
12330
4ee9c684 12331 /* We used to check the predicate here and return immediately if it
12332 succeeds. This is wrong; the design is for gimplification to be
12333 idempotent, and for the predicates to only test for valid forms, not
12334 whether they are fully simplified. */
4ee9c684 12335 if (pre_p == NULL)
12336 pre_p = &internal_pre;
75a70cf9 12337
4ee9c684 12338 if (post_p == NULL)
12339 post_p = &internal_post;
12340
75a70cf9 12341 /* Remember the last statements added to PRE_P and POST_P. Every
12342 new statement added by the gimplification helpers needs to be
12343 annotated with location information. To centralize the
12344 responsibility, we remember the last statement that had been
12345 added to both queues before gimplifying *EXPR_P. If
12346 gimplification produces new statements in PRE_P and POST_P, those
12347 statements will be annotated with the same location information
12348 as *EXPR_P. */
12349 pre_last_gsi = gsi_last (*pre_p);
12350 post_last_gsi = gsi_last (*post_p);
12351
4ee9c684 12352 saved_location = input_location;
2ed8b5d0 12353 if (save_expr != error_mark_node
12354 && EXPR_HAS_LOCATION (*expr_p))
12355 input_location = EXPR_LOCATION (*expr_p);
4ee9c684 12356
12357 /* Loop over the specific gimplifiers until the toplevel node
12358 remains the same. */
12359 do
12360 {
d62a6bf2 12361 /* Strip away as many useless type conversions as possible
12362 at the toplevel. */
12363 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
4ee9c684 12364
12365 /* Remember the expr. */
12366 save_expr = *expr_p;
12367
12368 /* Die, die, die, my darling. */
3888c819 12369 if (error_operand_p (save_expr))
4ee9c684 12370 {
12371 ret = GS_ERROR;
12372 break;
12373 }
12374
12375 /* Do any language-specific gimplification. */
8458f4ca 12376 ret = ((enum gimplify_status)
12377 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
4ee9c684 12378 if (ret == GS_OK)
12379 {
12380 if (*expr_p == NULL_TREE)
12381 break;
12382 if (*expr_p != save_expr)
12383 continue;
12384 }
12385 else if (ret != GS_UNHANDLED)
12386 break;
12387
5f52d2e2 12388 /* Make sure that all the cases set 'ret' appropriately. */
12389 ret = GS_UNHANDLED;
4ee9c684 12390 switch (TREE_CODE (*expr_p))
12391 {
12392 /* First deal with the special cases. */
12393
12394 case POSTINCREMENT_EXPR:
12395 case POSTDECREMENT_EXPR:
12396 case PREINCREMENT_EXPR:
12397 case PREDECREMENT_EXPR:
12398 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
b4c4a429 12399 fallback != fb_none,
12400 TREE_TYPE (*expr_p));
4ee9c684 12401 break;
12402
3370c0ec 12403 case VIEW_CONVERT_EXPR:
6977c762 12404 if ((fallback & fb_rvalue)
12405 && is_gimple_reg_type (TREE_TYPE (*expr_p))
3370c0ec 12406 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
12407 {
12408 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12409 post_p, is_gimple_val, fb_rvalue);
12410 recalculate_side_effects (*expr_p);
12411 break;
12412 }
12413 /* Fallthru. */
12414
4ee9c684 12415 case ARRAY_REF:
6374121b 12416 case ARRAY_RANGE_REF:
12417 case REALPART_EXPR:
12418 case IMAGPART_EXPR:
4ee9c684 12419 case COMPONENT_REF:
12420 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
f0ac919b 12421 fallback ? fallback : fb_rvalue);
4ee9c684 12422 break;
12423
12424 case COND_EXPR:
cf6b103e 12425 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
75a70cf9 12426
05fb348d 12427 /* C99 code may assign to an array in a structure value of a
12428 conditional expression, and this has undefined behavior
12429 only on execution, so create a temporary if an lvalue is
12430 required. */
12431 if (fallback == fb_lvalue)
12432 {
9ae1b28a 12433 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
f5224fc9 12434 mark_addressable (*expr_p);
5f52d2e2 12435 ret = GS_OK;
05fb348d 12436 }
4ee9c684 12437 break;
12438
12439 case CALL_EXPR:
f0ac919b 12440 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
75a70cf9 12441
05fb348d 12442 /* C99 code may assign to an array in a structure returned
12443 from a function, and this has undefined behavior only on
12444 execution, so create a temporary if an lvalue is
12445 required. */
12446 if (fallback == fb_lvalue)
12447 {
9ae1b28a 12448 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
f5224fc9 12449 mark_addressable (*expr_p);
5f52d2e2 12450 ret = GS_OK;
05fb348d 12451 }
4ee9c684 12452 break;
12453
12454 case TREE_LIST:
0d59b19d 12455 gcc_unreachable ();
4ee9c684 12456
12457 case COMPOUND_EXPR:
12458 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
12459 break;
12460
862f468c 12461 case COMPOUND_LITERAL_EXPR:
930802aa 12462 ret = gimplify_compound_literal_expr (expr_p, pre_p,
12463 gimple_test_f, fallback);
862f468c 12464 break;
12465
4ee9c684 12466 case MODIFY_EXPR:
12467 case INIT_EXPR:
e8f78e99 12468 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
12469 fallback != fb_none);
4ee9c684 12470 break;
12471
12472 case TRUTH_ANDIF_EXPR:
12473 case TRUTH_ORIF_EXPR:
4c7817e5 12474 {
12475 /* Preserve the original type of the expression and the
12476 source location of the outer expression. */
12477 tree org_type = TREE_TYPE (*expr_p);
12478 *expr_p = gimple_boolify (*expr_p);
eab3f3b8 12479 *expr_p = build3_loc (input_location, COND_EXPR,
4c7817e5 12480 org_type, *expr_p,
12481 fold_convert_loc
eab3f3b8 12482 (input_location,
4c7817e5 12483 org_type, boolean_true_node),
12484 fold_convert_loc
eab3f3b8 12485 (input_location,
4c7817e5 12486 org_type, boolean_false_node));
12487 ret = GS_OK;
12488 break;
12489 }
4ee9c684 12490
12491 case TRUTH_NOT_EXPR:
d994b25f 12492 {
569b4ac8 12493 tree type = TREE_TYPE (*expr_p);
12494 /* The parsers are careful to generate TRUTH_NOT_EXPR
12495 only with operands that are always zero or one.
12496 We do not fold here but handle the only interesting case
12497 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
d994b25f 12498 *expr_p = gimple_boolify (*expr_p);
569b4ac8 12499 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
12500 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
12501 TREE_TYPE (*expr_p),
12502 TREE_OPERAND (*expr_p, 0));
12503 else
12504 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
12505 TREE_TYPE (*expr_p),
12506 TREE_OPERAND (*expr_p, 0),
12507 build_int_cst (TREE_TYPE (*expr_p), 1));
12508 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
12509 *expr_p = fold_convert_loc (input_location, type, *expr_p);
12510 ret = GS_OK;
b19dcf9e 12511 break;
d994b25f 12512 }
538f00d1 12513
4ee9c684 12514 case ADDR_EXPR:
12515 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
12516 break;
12517
4644b593 12518 case ANNOTATE_EXPR:
12519 {
12520 tree cond = TREE_OPERAND (*expr_p, 0);
eb71996d 12521 tree kind = TREE_OPERAND (*expr_p, 1);
2a09b28c 12522 tree data = TREE_OPERAND (*expr_p, 2);
c291d34a 12523 tree type = TREE_TYPE (cond);
12524 if (!INTEGRAL_TYPE_P (type))
12525 {
12526 *expr_p = cond;
12527 ret = GS_OK;
12528 break;
12529 }
f9e245b2 12530 tree tmp = create_tmp_var (type);
4644b593 12531 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
1a91d914 12532 gcall *call
2a09b28c 12533 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
4644b593 12534 gimple_call_set_lhs (call, tmp);
12535 gimplify_seq_add_stmt (pre_p, call);
12536 *expr_p = tmp;
12537 ret = GS_ALL_DONE;
12538 break;
12539 }
12540
4ee9c684 12541 case VA_ARG_EXPR:
fcdd3ab3 12542 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
4ee9c684 12543 break;
12544
72dd6141 12545 CASE_CONVERT:
4ee9c684 12546 if (IS_EMPTY_STMT (*expr_p))
12547 {
12548 ret = GS_ALL_DONE;
12549 break;
12550 }
12551
12552 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
12553 || fallback == fb_none)
12554 {
12555 /* Just strip a conversion to void (or in void context) and
12556 try again. */
12557 *expr_p = TREE_OPERAND (*expr_p, 0);
5f52d2e2 12558 ret = GS_OK;
4ee9c684 12559 break;
12560 }
12561
12562 ret = gimplify_conversion (expr_p);
12563 if (ret == GS_ERROR)
12564 break;
12565 if (*expr_p != save_expr)
12566 break;
12567 /* FALLTHRU */
12568
12569 case FIX_TRUNC_EXPR:
4ee9c684 12570 /* unary_expr: ... | '(' cast ')' val | ... */
12571 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12572 is_gimple_val, fb_rvalue);
12573 recalculate_side_effects (*expr_p);
12574 break;
12575
1928904f 12576 case INDIRECT_REF:
182cf5a9 12577 {
12578 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
b93508c0 12579 bool notrap = TREE_THIS_NOTRAP (*expr_p);
182cf5a9 12580 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
12581
12582 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
12583 if (*expr_p != save_expr)
12584 {
12585 ret = GS_OK;
12586 break;
12587 }
12588
12589 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12590 is_gimple_reg, fb_rvalue);
bd992fe5 12591 if (ret == GS_ERROR)
12592 break;
182cf5a9 12593
bd992fe5 12594 recalculate_side_effects (*expr_p);
182cf5a9 12595 *expr_p = fold_build2_loc (input_location, MEM_REF,
12596 TREE_TYPE (*expr_p),
12597 TREE_OPERAND (*expr_p, 0),
12598 build_int_cst (saved_ptr_type, 0));
12599 TREE_THIS_VOLATILE (*expr_p) = volatilep;
b93508c0 12600 TREE_THIS_NOTRAP (*expr_p) = notrap;
182cf5a9 12601 ret = GS_OK;
12602 break;
12603 }
12604
12605 /* We arrive here through the various re-gimplifcation paths. */
12606 case MEM_REF:
12607 /* First try re-folding the whole thing. */
12608 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
12609 TREE_OPERAND (*expr_p, 0),
12610 TREE_OPERAND (*expr_p, 1));
12611 if (tmp)
5f52d2e2 12612 {
292237f3 12613 REF_REVERSE_STORAGE_ORDER (tmp)
12614 = REF_REVERSE_STORAGE_ORDER (*expr_p);
182cf5a9 12615 *expr_p = tmp;
12616 recalculate_side_effects (*expr_p);
5f52d2e2 12617 ret = GS_OK;
12618 break;
12619 }
6b55f6d9 12620 /* Avoid re-gimplifying the address operand if it is already
12621 in suitable form. Re-gimplifying would mark the address
12622 operand addressable. Always gimplify when not in SSA form
12623 as we still may have to gimplify decls with value-exprs. */
6fcaaf9b 12624 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
6b55f6d9 12625 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
12626 {
12627 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12628 is_gimple_mem_ref_addr, fb_rvalue);
12629 if (ret == GS_ERROR)
12630 break;
12631 }
4ee9c684 12632 recalculate_side_effects (*expr_p);
182cf5a9 12633 ret = GS_ALL_DONE;
4ee9c684 12634 break;
12635
6b55f6d9 12636 /* Constants need not be gimplified. */
4ee9c684 12637 case INTEGER_CST:
12638 case REAL_CST:
06f0b99c 12639 case FIXED_CST:
4ee9c684 12640 case STRING_CST:
12641 case COMPLEX_CST:
12642 case VECTOR_CST:
4a8f88ff 12643 /* Drop the overflow flag on constants, we do not want
12644 that in the GIMPLE IL. */
12645 if (TREE_OVERFLOW_P (*expr_p))
12646 *expr_p = drop_tree_overflow (*expr_p);
4ee9c684 12647 ret = GS_ALL_DONE;
12648 break;
12649
12650 case CONST_DECL:
e67e5e1f 12651 /* If we require an lvalue, such as for ADDR_EXPR, retain the
91275768 12652 CONST_DECL node. Otherwise the decl is replaceable by its
e67e5e1f 12653 value. */
12654 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
12655 if (fallback & fb_lvalue)
12656 ret = GS_ALL_DONE;
12657 else
5f52d2e2 12658 {
12659 *expr_p = DECL_INITIAL (*expr_p);
12660 ret = GS_OK;
12661 }
4ee9c684 12662 break;
12663
7dd37241 12664 case DECL_EXPR:
75a70cf9 12665 ret = gimplify_decl_expr (expr_p, pre_p);
7dd37241 12666 break;
12667
4ee9c684 12668 case BIND_EXPR:
c3d09d4d 12669 ret = gimplify_bind_expr (expr_p, pre_p);
4ee9c684 12670 break;
12671
12672 case LOOP_EXPR:
12673 ret = gimplify_loop_expr (expr_p, pre_p);
12674 break;
12675
12676 case SWITCH_EXPR:
12677 ret = gimplify_switch_expr (expr_p, pre_p);
12678 break;
12679
4ee9c684 12680 case EXIT_EXPR:
12681 ret = gimplify_exit_expr (expr_p);
12682 break;
12683
12684 case GOTO_EXPR:
12685 /* If the target is not LABEL, then it is a computed jump
12686 and the target needs to be gimplified. */
12687 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6e721d50 12688 {
12689 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
12690 NULL, is_gimple_val, fb_rvalue);
12691 if (ret == GS_ERROR)
12692 break;
12693 }
75a70cf9 12694 gimplify_seq_add_stmt (pre_p,
12695 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
5f52d2e2 12696 ret = GS_ALL_DONE;
4ee9c684 12697 break;
12698
4a1849e3 12699 case PREDICT_EXPR:
75a70cf9 12700 gimplify_seq_add_stmt (pre_p,
12701 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
12702 PREDICT_EXPR_OUTCOME (*expr_p)));
12703 ret = GS_ALL_DONE;
12704 break;
4a1849e3 12705
4ee9c684 12706 case LABEL_EXPR:
3c77f69c 12707 ret = gimplify_label_expr (expr_p, pre_p);
629b6abc 12708 label = LABEL_EXPR_LABEL (*expr_p);
12709 gcc_assert (decl_function_context (label) == current_function_decl);
12710
12711 /* If the label is used in a goto statement, or address of the label
12712 is taken, we need to unpoison all variables that were seen so far.
12713 Doing so would prevent us from reporting a false positives. */
1e35a093 12714 if (asan_poisoned_variables
629b6abc 12715 && asan_used_labels != NULL
12716 && asan_used_labels->contains (label))
12717 asan_poison_variables (asan_poisoned_variables, false, pre_p);
4ee9c684 12718 break;
12719
12720 case CASE_LABEL_EXPR:
75a70cf9 12721 ret = gimplify_case_label_expr (expr_p, pre_p);
629b6abc 12722
12723 if (gimplify_ctxp->live_switch_vars)
12724 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
12725 pre_p);
4ee9c684 12726 break;
12727
12728 case RETURN_EXPR:
12729 ret = gimplify_return_expr (*expr_p, pre_p);
12730 break;
12731
12732 case CONSTRUCTOR:
60f65a0a 12733 /* Don't reduce this in place; let gimplify_init_constructor work its
12734 magic. Buf if we're just elaborating this for side effects, just
12735 gimplify any element that has side-effects. */
12736 if (fallback == fb_none)
12737 {
c75b4594 12738 unsigned HOST_WIDE_INT ix;
48148244 12739 tree val;
9cb3832d 12740 tree temp = NULL_TREE;
48148244 12741 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
12742 if (TREE_SIDE_EFFECTS (val))
12743 append_to_statement_list (val, &temp);
60f65a0a 12744
9cb3832d 12745 *expr_p = temp;
5f52d2e2 12746 ret = temp ? GS_OK : GS_ALL_DONE;
60f65a0a 12747 }
49299ed6 12748 /* C99 code may assign to an array in a constructed
12749 structure or union, and this has undefined behavior only
12750 on execution, so create a temporary if an lvalue is
12751 required. */
12752 else if (fallback == fb_lvalue)
12753 {
9ae1b28a 12754 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
f5224fc9 12755 mark_addressable (*expr_p);
5f52d2e2 12756 ret = GS_OK;
49299ed6 12757 }
9cb3832d 12758 else
12759 ret = GS_ALL_DONE;
4ee9c684 12760 break;
12761
12762 /* The following are special cases that are not handled by the
12763 original GIMPLE grammar. */
12764
12765 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
12766 eliminated. */
12767 case SAVE_EXPR:
12768 ret = gimplify_save_expr (expr_p, pre_p, post_p);
12769 break;
12770
12771 case BIT_FIELD_REF:
2330f9c5 12772 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12773 post_p, is_gimple_lvalue, fb_either);
12774 recalculate_side_effects (*expr_p);
4ee9c684 12775 break;
12776
869bac23 12777 case TARGET_MEM_REF:
12778 {
12779 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
12780
e077c79b 12781 if (TMR_BASE (*expr_p))
869bac23 12782 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
28daba6f 12783 post_p, is_gimple_mem_ref_addr, fb_either);
869bac23 12784 if (TMR_INDEX (*expr_p))
12785 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
12786 post_p, is_gimple_val, fb_rvalue);
28daba6f 12787 if (TMR_INDEX2 (*expr_p))
12788 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
12789 post_p, is_gimple_val, fb_rvalue);
869bac23 12790 /* TMR_STEP and TMR_OFFSET are always integer constants. */
12791 ret = MIN (r0, r1);
12792 }
12793 break;
12794
4ee9c684 12795 case NON_LVALUE_EXPR:
12796 /* This should have been stripped above. */
0d59b19d 12797 gcc_unreachable ();
4ee9c684 12798
12799 case ASM_EXPR:
12800 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
12801 break;
12802
12803 case TRY_FINALLY_EXPR:
12804 case TRY_CATCH_EXPR:
75a70cf9 12805 {
12806 gimple_seq eval, cleanup;
1a91d914 12807 gtry *try_;
75a70cf9 12808
d7ebacec 12809 /* Calls to destructors are generated automatically in FINALLY/CATCH
12810 block. They should have location as UNKNOWN_LOCATION. However,
12811 gimplify_call_expr will reset these call stmts to input_location
12812 if it finds stmt's location is unknown. To prevent resetting for
12813 destructors, we set the input_location to unknown.
12814 Note that this only affects the destructor calls in FINALLY/CATCH
12815 block, and will automatically reset to its original value by the
12816 end of gimplify_expr. */
12817 input_location = UNKNOWN_LOCATION;
75a70cf9 12818 eval = cleanup = NULL;
12819 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
12820 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
d38ef55b 12821 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
12822 if (gimple_seq_empty_p (cleanup))
12823 {
12824 gimple_seq_add_seq (pre_p, eval);
12825 ret = GS_ALL_DONE;
12826 break;
12827 }
75a70cf9 12828 try_ = gimple_build_try (eval, cleanup,
12829 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
12830 ? GIMPLE_TRY_FINALLY
12831 : GIMPLE_TRY_CATCH);
54d28c30 12832 if (EXPR_HAS_LOCATION (save_expr))
ae117ec5 12833 gimple_set_location (try_, EXPR_LOCATION (save_expr));
54d28c30 12834 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
12835 gimple_set_location (try_, saved_location);
75a70cf9 12836 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
12837 gimple_try_set_catch_is_cleanup (try_,
12838 TRY_CATCH_IS_CLEANUP (*expr_p));
12839 gimplify_seq_add_stmt (pre_p, try_);
12840 ret = GS_ALL_DONE;
12841 break;
12842 }
4ee9c684 12843
12844 case CLEANUP_POINT_EXPR:
12845 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
12846 break;
12847
12848 case TARGET_EXPR:
12849 ret = gimplify_target_expr (expr_p, pre_p, post_p);
12850 break;
12851
12852 case CATCH_EXPR:
75a70cf9 12853 {
42acab1c 12854 gimple *c;
75a70cf9 12855 gimple_seq handler = NULL;
12856 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
12857 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
12858 gimplify_seq_add_stmt (pre_p, c);
12859 ret = GS_ALL_DONE;
12860 break;
12861 }
4ee9c684 12862
12863 case EH_FILTER_EXPR:
75a70cf9 12864 {
42acab1c 12865 gimple *ehf;
75a70cf9 12866 gimple_seq failure = NULL;
12867
12868 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
12869 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
e627cda1 12870 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
75a70cf9 12871 gimplify_seq_add_stmt (pre_p, ehf);
12872 ret = GS_ALL_DONE;
12873 break;
12874 }
4ee9c684 12875
215e2f1d 12876 case OBJ_TYPE_REF:
12877 {
12878 enum gimplify_status r0, r1;
75a70cf9 12879 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
12880 post_p, is_gimple_val, fb_rvalue);
12881 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
12882 post_p, is_gimple_val, fb_rvalue);
1eb667ae 12883 TREE_SIDE_EFFECTS (*expr_p) = 0;
215e2f1d 12884 ret = MIN (r0, r1);
12885 }
4ee9c684 12886 break;
12887
4ee9c684 12888 case LABEL_DECL:
12889 /* We get here when taking the address of a label. We mark
12890 the label as "forced"; meaning it can never be removed and
12891 it is a potential target for any computed goto. */
12892 FORCED_LABEL (*expr_p) = 1;
12893 ret = GS_ALL_DONE;
12894 break;
12895
12896 case STATEMENT_LIST:
c3d09d4d 12897 ret = gimplify_statement_list (expr_p, pre_p);
4ee9c684 12898 break;
12899
80f06481 12900 case WITH_SIZE_EXPR:
12901 {
98c5a6a3 12902 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12903 post_p == &internal_post ? NULL : post_p,
12904 gimple_test_f, fallback);
12905 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12906 is_gimple_val, fb_rvalue);
5f52d2e2 12907 ret = GS_ALL_DONE;
80f06481 12908 }
12909 break;
12910
4ee9c684 12911 case VAR_DECL:
6b275368 12912 case PARM_DECL:
22041d3f 12913 ret = gimplify_var_or_parm_decl (expr_p);
4ee9c684 12914 break;
12915
df2c34fc 12916 case RESULT_DECL:
ca4c3545 12917 /* When within an OMP context, notice uses of variables. */
df2c34fc 12918 if (gimplify_omp_ctxp)
12919 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
12920 ret = GS_ALL_DONE;
12921 break;
12922
90567983 12923 case DEBUG_EXPR_DECL:
12924 gcc_unreachable ();
12925
12926 case DEBUG_BEGIN_STMT:
12927 gimplify_seq_add_stmt (pre_p,
12928 gimple_build_debug_begin_stmt
12929 (TREE_BLOCK (*expr_p),
12930 EXPR_LOCATION (*expr_p)));
12931 ret = GS_ALL_DONE;
12932 *expr_p = NULL;
12933 break;
12934
eece3694 12935 case SSA_NAME:
12936 /* Allow callbacks into the gimplifier during optimization. */
12937 ret = GS_ALL_DONE;
12938 break;
12939
1e8e9920 12940 case OMP_PARALLEL:
75a70cf9 12941 gimplify_omp_parallel (expr_p, pre_p);
12942 ret = GS_ALL_DONE;
1e8e9920 12943 break;
12944
fd6481cf 12945 case OMP_TASK:
75a70cf9 12946 gimplify_omp_task (expr_p, pre_p);
12947 ret = GS_ALL_DONE;
fd6481cf 12948 break;
12949
1e8e9920 12950 case OMP_FOR:
3d483a94 12951 case OMP_SIMD:
bc7bff74 12952 case OMP_DISTRIBUTE:
43895be5 12953 case OMP_TASKLOOP:
ca4c3545 12954 case OACC_LOOP:
1e8e9920 12955 ret = gimplify_omp_for (expr_p, pre_p);
12956 break;
12957
ca4c3545 12958 case OACC_CACHE:
12959 gimplify_oacc_cache (expr_p, pre_p);
12960 ret = GS_ALL_DONE;
12961 break;
12962
2fc5e987 12963 case OACC_DECLARE:
12964 gimplify_oacc_declare (expr_p, pre_p);
12965 ret = GS_ALL_DONE;
12966 break;
12967
571b3486 12968 case OACC_HOST_DATA:
2c4c8725 12969 case OACC_DATA:
ca4c3545 12970 case OACC_KERNELS:
ca4c3545 12971 case OACC_PARALLEL:
1e8e9920 12972 case OMP_SECTIONS:
12973 case OMP_SINGLE:
bc7bff74 12974 case OMP_TARGET:
12975 case OMP_TARGET_DATA:
12976 case OMP_TEAMS:
75a70cf9 12977 gimplify_omp_workshare (expr_p, pre_p);
12978 ret = GS_ALL_DONE;
1e8e9920 12979 break;
12980
ca4c3545 12981 case OACC_ENTER_DATA:
12982 case OACC_EXIT_DATA:
12983 case OACC_UPDATE:
bc7bff74 12984 case OMP_TARGET_UPDATE:
43895be5 12985 case OMP_TARGET_ENTER_DATA:
12986 case OMP_TARGET_EXIT_DATA:
bc7bff74 12987 gimplify_omp_target_update (expr_p, pre_p);
12988 ret = GS_ALL_DONE;
12989 break;
12990
1e8e9920 12991 case OMP_SECTION:
12992 case OMP_MASTER:
12993 case OMP_ORDERED:
12994 case OMP_CRITICAL:
75a70cf9 12995 {
12996 gimple_seq body = NULL;
42acab1c 12997 gimple *g;
75a70cf9 12998
12999 gimplify_and_add (OMP_BODY (*expr_p), &body);
13000 switch (TREE_CODE (*expr_p))
13001 {
13002 case OMP_SECTION:
13003 g = gimple_build_omp_section (body);
13004 break;
13005 case OMP_MASTER:
13006 g = gimple_build_omp_master (body);
13007 break;
13008 case OMP_ORDERED:
43895be5 13009 g = gimplify_omp_ordered (*expr_p, body);
75a70cf9 13010 break;
13011 case OMP_CRITICAL:
43895be5 13012 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
13013 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
98588013 13014 gimplify_adjust_omp_clauses (pre_p, body,
43895be5 13015 &OMP_CRITICAL_CLAUSES (*expr_p),
13016 OMP_CRITICAL);
75a70cf9 13017 g = gimple_build_omp_critical (body,
43895be5 13018 OMP_CRITICAL_NAME (*expr_p),
13019 OMP_CRITICAL_CLAUSES (*expr_p));
75a70cf9 13020 break;
13021 default:
13022 gcc_unreachable ();
13023 }
13024 gimplify_seq_add_stmt (pre_p, g);
13025 ret = GS_ALL_DONE;
13026 break;
13027 }
1e8e9920 13028
7e5a76c8 13029 case OMP_TASKGROUP:
13030 {
13031 gimple_seq body = NULL;
13032
13033 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
13034 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
13035 OMP_TASKGROUP);
13036 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
13037 gimplify_and_add (OMP_BODY (*expr_p), &body);
13038 gimple_seq cleanup = NULL;
13039 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
13040 gimple *g = gimple_build_call (fn, 0);
13041 gimple_seq_add_stmt (&cleanup, g);
13042 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
13043 body = NULL;
13044 gimple_seq_add_stmt (&body, g);
13045 g = gimple_build_omp_taskgroup (body, *pclauses);
13046 gimplify_seq_add_stmt (pre_p, g);
13047 ret = GS_ALL_DONE;
13048 break;
13049 }
13050
1e8e9920 13051 case OMP_ATOMIC:
2169f33b 13052 case OMP_ATOMIC_READ:
13053 case OMP_ATOMIC_CAPTURE_OLD:
13054 case OMP_ATOMIC_CAPTURE_NEW:
1e8e9920 13055 ret = gimplify_omp_atomic (expr_p, pre_p);
13056 break;
13057
4c0315d0 13058 case TRANSACTION_EXPR:
13059 ret = gimplify_transaction (expr_p, pre_p);
13060 break;
13061
b9be572e 13062 case TRUTH_AND_EXPR:
13063 case TRUTH_OR_EXPR:
13064 case TRUTH_XOR_EXPR:
4c7817e5 13065 {
b19dcf9e 13066 tree orig_type = TREE_TYPE (*expr_p);
08fc122d 13067 tree new_type, xop0, xop1;
4c7817e5 13068 *expr_p = gimple_boolify (*expr_p);
08fc122d 13069 new_type = TREE_TYPE (*expr_p);
13070 if (!useless_type_conversion_p (orig_type, new_type))
4c7817e5 13071 {
eab3f3b8 13072 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
4c7817e5 13073 ret = GS_OK;
13074 break;
13075 }
d515ee79 13076
b19dcf9e 13077 /* Boolified binary truth expressions are semantically equivalent
13078 to bitwise binary expressions. Canonicalize them to the
13079 bitwise variant. */
13080 switch (TREE_CODE (*expr_p))
13081 {
13082 case TRUTH_AND_EXPR:
13083 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
13084 break;
13085 case TRUTH_OR_EXPR:
13086 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
13087 break;
13088 case TRUTH_XOR_EXPR:
13089 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
13090 break;
13091 default:
13092 break;
13093 }
08fc122d 13094 /* Now make sure that operands have compatible type to
13095 expression's new_type. */
13096 xop0 = TREE_OPERAND (*expr_p, 0);
13097 xop1 = TREE_OPERAND (*expr_p, 1);
13098 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
13099 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
13100 new_type,
13101 xop0);
13102 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
13103 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
13104 new_type,
13105 xop1);
b19dcf9e 13106 /* Continue classified as tcc_binary. */
13107 goto expr_2;
d515ee79 13108 }
b9be572e 13109
187663cb 13110 case VEC_COND_EXPR:
1642bc45 13111 {
13112 enum gimplify_status r0, r1, r2;
13113
13114 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13115 post_p, is_gimple_condexpr, fb_rvalue);
13116 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13117 post_p, is_gimple_val, fb_rvalue);
13118 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13119 post_p, is_gimple_val, fb_rvalue);
13120
13121 ret = MIN (MIN (r0, r1), r2);
13122 recalculate_side_effects (*expr_p);
13123 }
13124 break;
13125
f4803722 13126 case VEC_PERM_EXPR:
b9be572e 13127 /* Classified as tcc_expression. */
13128 goto expr_3;
13129
2506d97a 13130 case BIT_INSERT_EXPR:
13131 /* Argument 3 is a constant. */
13132 goto expr_2;
13133
0de36bdb 13134 case POINTER_PLUS_EXPR:
704d7315 13135 {
13136 enum gimplify_status r0, r1;
13137 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13138 post_p, is_gimple_val, fb_rvalue);
13139 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13140 post_p, is_gimple_val, fb_rvalue);
13141 recalculate_side_effects (*expr_p);
13142 ret = MIN (r0, r1);
704d7315 13143 break;
13144 }
75a70cf9 13145
4ee9c684 13146 default:
0d59b19d 13147 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
4ee9c684 13148 {
ce45a448 13149 case tcc_comparison:
9c530f25 13150 /* Handle comparison of objects of non scalar mode aggregates
13151 with a call to memcmp. It would be nice to only have to do
13152 this for variable-sized objects, but then we'd have to allow
13153 the same nest of reference nodes we allow for MODIFY_EXPR and
13154 that's too complex.
13155
13156 Compare scalar mode aggregates as scalar mode values. Using
13157 memcmp for them would be very inefficient at best, and is
13158 plain wrong if bitfields are involved. */
75a70cf9 13159 {
13160 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
9c530f25 13161
3a82f2b4 13162 /* Vector comparisons need no boolification. */
13163 if (TREE_CODE (type) == VECTOR_TYPE)
13164 goto expr_2;
13165 else if (!AGGREGATE_TYPE_P (type))
4b5f1658 13166 {
13167 tree org_type = TREE_TYPE (*expr_p);
13168 *expr_p = gimple_boolify (*expr_p);
13169 if (!useless_type_conversion_p (org_type,
13170 TREE_TYPE (*expr_p)))
13171 {
13172 *expr_p = fold_convert_loc (input_location,
13173 org_type, *expr_p);
13174 ret = GS_OK;
13175 }
13176 else
13177 goto expr_2;
13178 }
75a70cf9 13179 else if (TYPE_MODE (type) != BLKmode)
13180 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
13181 else
13182 ret = gimplify_variable_sized_compare (expr_p);
9c530f25 13183
75a70cf9 13184 break;
9c530f25 13185 }
7c2f0500 13186
0d59b19d 13187 /* If *EXPR_P does not need to be special-cased, handle it
13188 according to its class. */
ce45a448 13189 case tcc_unary:
0d59b19d 13190 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13191 post_p, is_gimple_val, fb_rvalue);
13192 break;
4ee9c684 13193
ce45a448 13194 case tcc_binary:
0d59b19d 13195 expr_2:
13196 {
13197 enum gimplify_status r0, r1;
7c2f0500 13198
0d59b19d 13199 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
75a70cf9 13200 post_p, is_gimple_val, fb_rvalue);
0d59b19d 13201 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13202 post_p, is_gimple_val, fb_rvalue);
7c2f0500 13203
0d59b19d 13204 ret = MIN (r0, r1);
13205 break;
13206 }
7c2f0500 13207
b9be572e 13208 expr_3:
13209 {
13210 enum gimplify_status r0, r1, r2;
13211
13212 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13213 post_p, is_gimple_val, fb_rvalue);
13214 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13215 post_p, is_gimple_val, fb_rvalue);
13216 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13217 post_p, is_gimple_val, fb_rvalue);
13218
13219 ret = MIN (MIN (r0, r1), r2);
13220 break;
13221 }
13222
ce45a448 13223 case tcc_declaration:
13224 case tcc_constant:
4ee9c684 13225 ret = GS_ALL_DONE;
0d59b19d 13226 goto dont_recalculate;
7c2f0500 13227
0d59b19d 13228 default:
b9be572e 13229 gcc_unreachable ();
4ee9c684 13230 }
4ee9c684 13231
13232 recalculate_side_effects (*expr_p);
75a70cf9 13233
0d59b19d 13234 dont_recalculate:
4ee9c684 13235 break;
13236 }
7c2f0500 13237
5f52d2e2 13238 gcc_assert (*expr_p || ret != GS_OK);
4ee9c684 13239 }
13240 while (ret == GS_OK);
13241
13242 /* If we encountered an error_mark somewhere nested inside, either
13243 stub out the statement or propagate the error back out. */
13244 if (ret == GS_ERROR)
13245 {
13246 if (is_statement)
b6431126 13247 *expr_p = NULL;
4ee9c684 13248 goto out;
13249 }
13250
4ee9c684 13251 /* This was only valid as a return value from the langhook, which
13252 we handled. Make sure it doesn't escape from any other context. */
0d59b19d 13253 gcc_assert (ret != GS_UNHANDLED);
4ee9c684 13254
b6431126 13255 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
4ee9c684 13256 {
13257 /* We aren't looking for a value, and we don't have a valid
3126ef49 13258 statement. If it doesn't have side-effects, throw it away.
13259 We can also get here with code such as "*&&L;", where L is
13260 a LABEL_DECL that is marked as FORCED_LABEL. */
13261 if (TREE_CODE (*expr_p) == LABEL_DECL
13262 || !TREE_SIDE_EFFECTS (*expr_p))
b6431126 13263 *expr_p = NULL;
4ee9c684 13264 else if (!TREE_THIS_VOLATILE (*expr_p))
6374121b 13265 {
13266 /* This is probably a _REF that contains something nested that
13267 has side effects. Recurse through the operands to find it. */
13268 enum tree_code code = TREE_CODE (*expr_p);
13269
0d59b19d 13270 switch (code)
6374121b 13271 {
0d59b19d 13272 case COMPONENT_REF:
e1f5655d 13273 case REALPART_EXPR:
13274 case IMAGPART_EXPR:
13275 case VIEW_CONVERT_EXPR:
0d59b19d 13276 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13277 gimple_test_f, fallback);
13278 break;
13279
feb12e1f 13280 case ARRAY_REF:
13281 case ARRAY_RANGE_REF:
6374121b 13282 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13283 gimple_test_f, fallback);
13284 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
0d59b19d 13285 gimple_test_f, fallback);
13286 break;
13287
13288 default:
13289 /* Anything else with side-effects must be converted to
feb12e1f 13290 a valid statement before we get here. */
0d59b19d 13291 gcc_unreachable ();
6374121b 13292 }
6374121b 13293
b6431126 13294 *expr_p = NULL;
6374121b 13295 }
feb12e1f 13296 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
13297 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
4ee9c684 13298 {
feb12e1f 13299 /* Historically, the compiler has treated a bare reference
13300 to a non-BLKmode volatile lvalue as forcing a load. */
a3986bc0 13301 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
75a70cf9 13302
d06e70e5 13303 /* Normally, we do not want to create a temporary for a
39da0446 13304 TREE_ADDRESSABLE type because such a type should not be
13305 copied by bitwise-assignment. However, we make an
13306 exception here, as all we are doing here is ensuring that
13307 we read the bytes that make up the type. We use
13308 create_tmp_var_raw because create_tmp_var will abort when
c7756fad 13309 given a TREE_ADDRESSABLE type. */
39da0446 13310 tree tmp = create_tmp_var_raw (type, "vol");
13311 gimple_add_tmp_var (tmp);
75a70cf9 13312 gimplify_assign (tmp, *expr_p, pre_p);
13313 *expr_p = NULL;
4ee9c684 13314 }
13315 else
13316 /* We can't do anything useful with a volatile reference to
feb12e1f 13317 an incomplete type, so just throw it away. Likewise for
13318 a BLKmode type, since any implicit inner load should
13319 already have been turned into an explicit one by the
13320 gimplification process. */
b6431126 13321 *expr_p = NULL;
4ee9c684 13322 }
13323
13324 /* If we are gimplifying at the statement level, we're done. Tack
75a70cf9 13325 everything together and return. */
2363ef00 13326 if (fallback == fb_none || is_statement)
4ee9c684 13327 {
75a70cf9 13328 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
13329 it out for GC to reclaim it. */
13330 *expr_p = NULL_TREE;
13331
13332 if (!gimple_seq_empty_p (internal_pre)
13333 || !gimple_seq_empty_p (internal_post))
20d111f2 13334 {
75a70cf9 13335 gimplify_seq_add_seq (&internal_pre, internal_post);
13336 gimplify_seq_add_seq (pre_p, internal_pre);
20d111f2 13337 }
75a70cf9 13338
13339 /* The result of gimplifying *EXPR_P is going to be the last few
13340 statements in *PRE_P and *POST_P. Add location information
13341 to all the statements that were added by the gimplification
13342 helpers. */
13343 if (!gimple_seq_empty_p (*pre_p))
13344 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
13345
13346 if (!gimple_seq_empty_p (*post_p))
13347 annotate_all_with_location_after (*post_p, post_last_gsi,
13348 input_location);
13349
4ee9c684 13350 goto out;
13351 }
13352
75a70cf9 13353#ifdef ENABLE_GIMPLE_CHECKING
13354 if (*expr_p)
13355 {
13356 enum tree_code code = TREE_CODE (*expr_p);
13357 /* These expressions should already be in gimple IR form. */
13358 gcc_assert (code != MODIFY_EXPR
13359 && code != ASM_EXPR
13360 && code != BIND_EXPR
13361 && code != CATCH_EXPR
a368a029 13362 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
75a70cf9 13363 && code != EH_FILTER_EXPR
13364 && code != GOTO_EXPR
13365 && code != LABEL_EXPR
13366 && code != LOOP_EXPR
75a70cf9 13367 && code != SWITCH_EXPR
13368 && code != TRY_FINALLY_EXPR
ca4c3545 13369 && code != OACC_PARALLEL
13370 && code != OACC_KERNELS
13371 && code != OACC_DATA
13372 && code != OACC_HOST_DATA
13373 && code != OACC_DECLARE
13374 && code != OACC_UPDATE
13375 && code != OACC_ENTER_DATA
13376 && code != OACC_EXIT_DATA
13377 && code != OACC_CACHE
75a70cf9 13378 && code != OMP_CRITICAL
13379 && code != OMP_FOR
ca4c3545 13380 && code != OACC_LOOP
75a70cf9 13381 && code != OMP_MASTER
bc7bff74 13382 && code != OMP_TASKGROUP
75a70cf9 13383 && code != OMP_ORDERED
13384 && code != OMP_PARALLEL
13385 && code != OMP_SECTIONS
13386 && code != OMP_SECTION
13387 && code != OMP_SINGLE);
13388 }
13389#endif
4ee9c684 13390
75a70cf9 13391 /* Otherwise we're gimplifying a subexpression, so the resulting
13392 value is interesting. If it's a valid operand that matches
13393 GIMPLE_TEST_F, we're done. Unless we are handling some
13394 post-effects internally; if that's the case, we need to copy into
13395 a temporary before adding the post-effects to POST_P. */
13396 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
4ee9c684 13397 goto out;
13398
13399 /* Otherwise, we need to create a new temporary for the gimplified
13400 expression. */
13401
13402 /* We can't return an lvalue if we have an internal postqueue. The
13403 object the lvalue refers to would (probably) be modified by the
13404 postqueue; we need to copy the value out first, which means an
13405 rvalue. */
75a70cf9 13406 if ((fallback & fb_lvalue)
13407 && gimple_seq_empty_p (internal_post)
c2514472 13408 && is_gimple_addressable (*expr_p))
4ee9c684 13409 {
13410 /* An lvalue will do. Take the address of the expression, store it
13411 in a temporary, and replace the expression with an INDIRECT_REF of
13412 that temporary. */
f2160a0e 13413 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
13414 unsigned int ref_align = get_object_alignment (*expr_p);
13415 tree ref_type = TREE_TYPE (*expr_p);
389dd41b 13416 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
4ee9c684 13417 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
f2160a0e 13418 if (TYPE_ALIGN (ref_type) != ref_align)
13419 ref_type = build_aligned_type (ref_type, ref_align);
13420 *expr_p = build2 (MEM_REF, ref_type,
13421 tmp, build_zero_cst (ref_alias_type));
4ee9c684 13422 }
47f11e84 13423 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
4ee9c684 13424 {
75a70cf9 13425 /* An rvalue will do. Assign the gimplified expression into a
13426 new temporary TMP and replace the original expression with
13427 TMP. First, make sure that the expression has a type so that
13428 it can be assigned into a temporary. */
0d59b19d 13429 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
a31fefa3 13430 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4ee9c684 13431 }
0d59b19d 13432 else
4ee9c684 13433 {
75a70cf9 13434#ifdef ENABLE_GIMPLE_CHECKING
0d59b19d 13435 if (!(fallback & fb_mayfail))
13436 {
13437 fprintf (stderr, "gimplification failed:\n");
1ffa4346 13438 print_generic_expr (stderr, *expr_p);
0d59b19d 13439 debug_tree (*expr_p);
13440 internal_error ("gimplification failed");
13441 }
13442#endif
13443 gcc_assert (fallback & fb_mayfail);
75a70cf9 13444
0d59b19d 13445 /* If this is an asm statement, and the user asked for the
89f18f73 13446 impossible, don't die. Fail and let gimplify_asm_expr
0d59b19d 13447 issue an error. */
4ee9c684 13448 ret = GS_ERROR;
13449 goto out;
13450 }
4ee9c684 13451
4ee9c684 13452 /* Make sure the temporary matches our predicate. */
0d59b19d 13453 gcc_assert ((*gimple_test_f) (*expr_p));
4ee9c684 13454
75a70cf9 13455 if (!gimple_seq_empty_p (internal_post))
4ee9c684 13456 {
75a70cf9 13457 annotate_all_with_location (internal_post, input_location);
13458 gimplify_seq_add_seq (pre_p, internal_post);
4ee9c684 13459 }
13460
13461 out:
13462 input_location = saved_location;
13463 return ret;
13464}
13465
9ae1b28a 13466/* Like gimplify_expr but make sure the gimplified result is not itself
13467 a SSA name (but a decl if it were). Temporaries required by
13468 evaluating *EXPR_P may be still SSA names. */
13469
13470static enum gimplify_status
13471gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13472 bool (*gimple_test_f) (tree), fallback_t fallback,
13473 bool allow_ssa)
13474{
13475 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
13476 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
13477 gimple_test_f, fallback);
13478 if (! allow_ssa
13479 && TREE_CODE (*expr_p) == SSA_NAME)
13480 {
13481 tree name = *expr_p;
13482 if (was_ssa_name_p)
13483 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
13484 else
13485 {
13486 /* Avoid the extra copy if possible. */
13487 *expr_p = create_tmp_reg (TREE_TYPE (name));
13488 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
13489 release_ssa_name (name);
13490 }
13491 }
13492 return ret;
13493}
13494
6374121b 13495/* Look through TYPE for variable-sized objects and gimplify each such
b6431126 13496 size that we find. Add to LIST_P any statements generated. */
6374121b 13497
b6431126 13498void
75a70cf9 13499gimplify_type_sizes (tree type, gimple_seq *list_p)
6374121b 13500{
bc97b18f 13501 tree field, t;
13502
7008e67a 13503 if (type == NULL || type == error_mark_node)
017775ce 13504 return;
bc97b18f 13505
f7f07c95 13506 /* We first do the main variant, then copy into any other variants. */
bc97b18f 13507 type = TYPE_MAIN_VARIANT (type);
6374121b 13508
017775ce 13509 /* Avoid infinite recursion. */
7008e67a 13510 if (TYPE_SIZES_GIMPLIFIED (type))
017775ce 13511 return;
13512
13513 TYPE_SIZES_GIMPLIFIED (type) = 1;
13514
6374121b 13515 switch (TREE_CODE (type))
13516 {
6374121b 13517 case INTEGER_TYPE:
13518 case ENUMERAL_TYPE:
13519 case BOOLEAN_TYPE:
6374121b 13520 case REAL_TYPE:
06f0b99c 13521 case FIXED_POINT_TYPE:
b6431126 13522 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
13523 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
bc97b18f 13524
13525 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
13526 {
13527 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
13528 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
bc97b18f 13529 }
6374121b 13530 break;
13531
13532 case ARRAY_TYPE:
bc97b18f 13533 /* These types may not have declarations, so handle them here. */
017775ce 13534 gimplify_type_sizes (TREE_TYPE (type), list_p);
13535 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
60d11eb6 13536 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
13537 with assigned stack slots, for -O1+ -g they should be tracked
13538 by VTA. */
28fa603b 13539 if (!(TYPE_NAME (type)
13540 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13541 && DECL_IGNORED_P (TYPE_NAME (type)))
13542 && TYPE_DOMAIN (type)
78fa9ba7 13543 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
13544 {
13545 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
53e9c5c4 13546 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
78fa9ba7 13547 DECL_IGNORED_P (t) = 0;
13548 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
53e9c5c4 13549 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
78fa9ba7 13550 DECL_IGNORED_P (t) = 0;
13551 }
6374121b 13552 break;
13553
13554 case RECORD_TYPE:
13555 case UNION_TYPE:
13556 case QUAL_UNION_TYPE:
1767a056 13557 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6374121b 13558 if (TREE_CODE (field) == FIELD_DECL)
017775ce 13559 {
13560 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
48855189 13561 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
13562 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
017775ce 13563 gimplify_type_sizes (TREE_TYPE (field), list_p);
13564 }
13565 break;
13566
13567 case POINTER_TYPE:
13568 case REFERENCE_TYPE:
6e8435a1 13569 /* We used to recurse on the pointed-to type here, which turned out to
13570 be incorrect because its definition might refer to variables not
13571 yet initialized at this point if a forward declaration is involved.
13572
13573 It was actually useful for anonymous pointed-to types to ensure
13574 that the sizes evaluation dominates every possible later use of the
13575 values. Restricting to such types here would be safe since there
eed50407 13576 is no possible forward declaration around, but would introduce an
13577 undesirable middle-end semantic to anonymity. We then defer to
13578 front-ends the responsibility of ensuring that the sizes are
13579 evaluated both early and late enough, e.g. by attaching artificial
6e8435a1 13580 type declarations to the tree. */
6374121b 13581 break;
13582
13583 default:
13584 break;
13585 }
13586
b6431126 13587 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
13588 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
6374121b 13589
bc97b18f 13590 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
676a6eea 13591 {
bc97b18f 13592 TYPE_SIZE (t) = TYPE_SIZE (type);
13593 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
13594 TYPE_SIZES_GIMPLIFIED (t) = 1;
676a6eea 13595 }
676a6eea 13596}
13597
13598/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
13599 a size or position, has had all of its SAVE_EXPRs evaluated.
75a70cf9 13600 We add any required statements to *STMT_P. */
6374121b 13601
13602void
75a70cf9 13603gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
6374121b 13604{
5a1fe2db 13605 tree expr = *expr_p;
78317330 13606
6374121b 13607 /* We don't do anything if the value isn't there, is constant, or contains
98c21655 13608 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
06b27565 13609 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
98c21655 13610 will want to replace it with a new variable, but that will cause problems
13611 if this type is from outside the function. It's OK to have that here. */
a5dd5c0e 13612 if (expr == NULL_TREE
13613 || is_gimple_constant (expr)
13614 || TREE_CODE (expr) == VAR_DECL
13615 || CONTAINS_PLACEHOLDER_P (expr))
6374121b 13616 return;
13617
78317330 13618 *expr_p = unshare_expr (expr);
13619
9ae1b28a 13620 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
13621 if the def vanishes. */
13622 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
a5dd5c0e 13623
13624 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
13625 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
13626 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
13627 if (is_gimple_constant (*expr_p))
13628 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
6374121b 13629}
4ee9c684 13630
7e3aae05 13631/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
13632 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
13633 is true, also gimplify the parameters. */
75a70cf9 13634
1a91d914 13635gbind *
7e3aae05 13636gimplify_body (tree fndecl, bool do_parms)
4ee9c684 13637{
13638 location_t saved_location = input_location;
9311b397 13639 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
42acab1c 13640 gimple *outer_stmt;
1a91d914 13641 gbind *outer_bind;
4ee9c684 13642
13643 timevar_push (TV_TREE_GIMPLIFY);
1e8e9920 13644
9ae1b28a 13645 init_tree_ssa (cfun);
13646
c6871992 13647 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
13648 gimplification. */
13649 default_rtl_profile ();
13650
1e8e9920 13651 gcc_assert (gimplify_ctxp == NULL);
9ae1b28a 13652 push_gimplify_context (true);
4ee9c684 13653
ca4c3545 13654 if (flag_openacc || flag_openmp)
bc7bff74 13655 {
13656 gcc_assert (gimplify_omp_ctxp == NULL);
13657 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
13658 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
13659 }
13660
6374121b 13661 /* Unshare most shared trees in the body and in that of any nested functions.
13662 It would seem we don't have to do this for nested functions because
13663 they are supposed to be output and then the outer function gimplified
13664 first, but the g++ front end doesn't always do it that way. */
7e3aae05 13665 unshare_body (fndecl);
13666 unvisit_body (fndecl);
4ee9c684 13667
f0b5f617 13668 /* Make sure input_location isn't set to something weird. */
4ee9c684 13669 input_location = DECL_SOURCE_LOCATION (fndecl);
13670
6b275368 13671 /* Resolve callee-copies. This has to be done before processing
13672 the body so that DECL_VALUE_EXPR gets processed correctly. */
9311b397 13673 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
6b275368 13674
4ee9c684 13675 /* Gimplify the function's body. */
75a70cf9 13676 seq = NULL;
7e3aae05 13677 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
1a91d914 13678 outer_stmt = gimple_seq_first_stmt (seq);
13679 if (!outer_stmt)
4ee9c684 13680 {
1a91d914 13681 outer_stmt = gimple_build_nop ();
13682 gimplify_seq_add_stmt (&seq, outer_stmt);
4ee9c684 13683 }
6374121b 13684
75a70cf9 13685 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
13686 not the case, wrap everything in a GIMPLE_BIND to make it so. */
1a91d914 13687 if (gimple_code (outer_stmt) == GIMPLE_BIND
75a70cf9 13688 && gimple_seq_first (seq) == gimple_seq_last (seq))
1a91d914 13689 outer_bind = as_a <gbind *> (outer_stmt);
75a70cf9 13690 else
13691 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
13692
7e3aae05 13693 DECL_SAVED_TREE (fndecl) = NULL_TREE;
6b275368 13694
13695 /* If we had callee-copies statements, insert them at the beginning
4847708a 13696 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
75a70cf9 13697 if (!gimple_seq_empty_p (parm_stmts))
6b275368 13698 {
4847708a 13699 tree parm;
13700
75a70cf9 13701 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
9311b397 13702 if (parm_cleanup)
13703 {
13704 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
13705 GIMPLE_TRY_FINALLY);
13706 parm_stmts = NULL;
13707 gimple_seq_add_stmt (&parm_stmts, g);
13708 }
75a70cf9 13709 gimple_bind_set_body (outer_bind, parm_stmts);
4847708a 13710
13711 for (parm = DECL_ARGUMENTS (current_function_decl);
1767a056 13712 parm; parm = DECL_CHAIN (parm))
4847708a 13713 if (DECL_HAS_VALUE_EXPR_P (parm))
13714 {
13715 DECL_HAS_VALUE_EXPR_P (parm) = 0;
13716 DECL_IGNORED_P (parm) = 0;
13717 }
6b275368 13718 }
13719
ca4c3545 13720 if ((flag_openacc || flag_openmp || flag_openmp_simd)
13721 && gimplify_omp_ctxp)
bc7bff74 13722 {
13723 delete_omp_context (gimplify_omp_ctxp);
13724 gimplify_omp_ctxp = NULL;
13725 }
13726
75a70cf9 13727 pop_gimplify_context (outer_bind);
1e8e9920 13728 gcc_assert (gimplify_ctxp == NULL);
4ee9c684 13729
382ecba7 13730 if (flag_checking && !seen_error ())
fd7ad53b 13731 verify_gimple_in_seq (gimple_bind_body (outer_bind));
4ee9c684 13732
13733 timevar_pop (TV_TREE_GIMPLIFY);
13734 input_location = saved_location;
75a70cf9 13735
13736 return outer_bind;
4ee9c684 13737}
13738
470a0ecd 13739typedef char *char_p; /* For DEF_VEC_P. */
470a0ecd 13740
13741/* Return whether we should exclude FNDECL from instrumentation. */
13742
13743static bool
13744flag_instrument_functions_exclude_p (tree fndecl)
13745{
f1f41a6c 13746 vec<char_p> *v;
470a0ecd 13747
f1f41a6c 13748 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
13749 if (v && v->length () > 0)
470a0ecd 13750 {
13751 const char *name;
13752 int i;
13753 char *s;
13754
13755 name = lang_hooks.decl_printable_name (fndecl, 0);
f1f41a6c 13756 FOR_EACH_VEC_ELT (*v, i, s)
470a0ecd 13757 if (strstr (name, s) != NULL)
13758 return true;
13759 }
13760
f1f41a6c 13761 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
13762 if (v && v->length () > 0)
470a0ecd 13763 {
13764 const char *name;
13765 int i;
13766 char *s;
13767
13768 name = DECL_SOURCE_FILE (fndecl);
f1f41a6c 13769 FOR_EACH_VEC_ELT (*v, i, s)
470a0ecd 13770 if (strstr (name, s) != NULL)
13771 return true;
13772 }
13773
13774 return false;
13775}
13776
4ee9c684 13777/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
75a70cf9 13778 node for the function we want to gimplify.
48e1416a 13779
57859735 13780 Return the sequence of GIMPLE statements corresponding to the body
75a70cf9 13781 of FNDECL. */
4ee9c684 13782
13783void
13784gimplify_function_tree (tree fndecl)
13785{
9078126c 13786 tree parm, ret;
75a70cf9 13787 gimple_seq seq;
1a91d914 13788 gbind *bind;
4ee9c684 13789
bfec3452 13790 gcc_assert (!gimple_body (fndecl));
13791
87d4aa85 13792 if (DECL_STRUCT_FUNCTION (fndecl))
13793 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
13794 else
13795 push_struct_function (fndecl);
4ee9c684 13796
8e21ebb1 13797 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
13798 if necessary. */
13799 cfun->curr_properties |= PROP_gimple_lva;
13800
1767a056 13801 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
50c96bdc 13802 {
13803 /* Preliminarily mark non-addressed complex variables as eligible
13804 for promotion to gimple registers. We'll transform their uses
13805 as we find them. */
8ea8de24 13806 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
13807 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
50c96bdc 13808 && !TREE_THIS_VOLATILE (parm)
13809 && !needs_to_live_in_memory (parm))
8ea8de24 13810 DECL_GIMPLE_REG_P (parm) = 1;
50c96bdc 13811 }
13812
13813 ret = DECL_RESULT (fndecl);
8ea8de24 13814 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
a70770d2 13815 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
50c96bdc 13816 && !needs_to_live_in_memory (ret))
8ea8de24 13817 DECL_GIMPLE_REG_P (ret) = 1;
50c96bdc 13818
9917317a 13819 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
1e35a093 13820 asan_poisoned_variables = new hash_set<tree> ();
7e3aae05 13821 bind = gimplify_body (fndecl, true);
1e35a093 13822 if (asan_poisoned_variables)
13823 {
13824 delete asan_poisoned_variables;
13825 asan_poisoned_variables = NULL;
13826 }
75a70cf9 13827
13828 /* The tree body of the function is no longer needed, replace it
13829 with the new GIMPLE body. */
e3a19533 13830 seq = NULL;
75a70cf9 13831 gimple_seq_add_stmt (&seq, bind);
13832 gimple_set_body (fndecl, seq);
4ee9c684 13833
13834 /* If we're instrumenting function entry/exit, then prepend the call to
13835 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
13836 catch the exit hook. */
13837 /* ??? Add some way to ignore exceptions for this TFE. */
13838 if (flag_instrument_function_entry_exit
a95c0776 13839 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
ac924c09 13840 /* Do not instrument extern inline functions. */
13841 && !(DECL_DECLARED_INLINE_P (fndecl)
13842 && DECL_EXTERNAL (fndecl)
13843 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
a95c0776 13844 && !flag_instrument_functions_exclude_p (fndecl))
4ee9c684 13845 {
75a70cf9 13846 tree x;
1a91d914 13847 gbind *new_bind;
42acab1c 13848 gimple *tf;
75a70cf9 13849 gimple_seq cleanup = NULL, body = NULL;
a35ffc7c 13850 tree tmp_var, this_fn_addr;
1a91d914 13851 gcall *call;
ce9ec841 13852
a35ffc7c 13853 /* The instrumentation hooks aren't going to call the instrumented
13854 function and the address they receive is expected to be matchable
13855 against symbol addresses. Make sure we don't create a trampoline,
13856 in case the current function is nested. */
13857 this_fn_addr = build_fold_addr_expr (current_function_decl);
13858 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
13859
b9a16870 13860 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
6b37ba94 13861 call = gimple_build_call (x, 1, integer_zero_node);
ce9ec841 13862 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13863 gimple_call_set_lhs (call, tmp_var);
13864 gimplify_seq_add_stmt (&cleanup, call);
b9a16870 13865 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
a35ffc7c 13866 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
ce9ec841 13867 gimplify_seq_add_stmt (&cleanup, call);
75a70cf9 13868 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
4ee9c684 13869
b9a16870 13870 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
6b37ba94 13871 call = gimple_build_call (x, 1, integer_zero_node);
ce9ec841 13872 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
13873 gimple_call_set_lhs (call, tmp_var);
13874 gimplify_seq_add_stmt (&body, call);
b9a16870 13875 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
a35ffc7c 13876 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
ce9ec841 13877 gimplify_seq_add_stmt (&body, call);
75a70cf9 13878 gimplify_seq_add_stmt (&body, tf);
08d3e973 13879 new_bind = gimple_build_bind (NULL, body, NULL);
4ee9c684 13880
75a70cf9 13881 /* Replace the current function body with the body
13882 wrapped in the try/finally TF. */
e3a19533 13883 seq = NULL;
75a70cf9 13884 gimple_seq_add_stmt (&seq, new_bind);
13885 gimple_set_body (fndecl, seq);
ed53cebe 13886 bind = new_bind;
13887 }
13888
9917317a 13889 if (sanitize_flags_p (SANITIZE_THREAD))
ed53cebe 13890 {
13891 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
42acab1c 13892 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
08d3e973 13893 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
ed53cebe 13894 /* Replace the current function body with the body
13895 wrapped in the try/finally TF. */
13896 seq = NULL;
13897 gimple_seq_add_stmt (&seq, new_bind);
13898 gimple_set_body (fndecl, seq);
4ee9c684 13899 }
13900
75a70cf9 13901 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8e21ebb1 13902 cfun->curr_properties |= PROP_gimple_any;
75a70cf9 13903
87d4aa85 13904 pop_cfun ();
6167e851 13905
73714718 13906 dump_function (TDI_gimple, fndecl);
4ee9c684 13907}
75a70cf9 13908
b94f16f4 13909/* Return a dummy expression of type TYPE in order to keep going after an
13910 error. */
09c41ee4 13911
b94f16f4 13912static tree
13913dummy_object (tree type)
09c41ee4 13914{
b94f16f4 13915 tree t = build_int_cst (build_pointer_type (type), 0);
13916 return build2 (MEM_REF, type, t, t);
09c41ee4 13917}
13918
b94f16f4 13919/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
13920 builtin function, but a very special sort of operator. */
09c41ee4 13921
b94f16f4 13922enum gimplify_status
82fc0e0a 13923gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
13924 gimple_seq *post_p ATTRIBUTE_UNUSED)
b94f16f4 13925{
13926 tree promoted_type, have_va_type;
13927 tree valist = TREE_OPERAND (*expr_p, 0);
13928 tree type = TREE_TYPE (*expr_p);
c9595c51 13929 tree t, tag, aptag;
b94f16f4 13930 location_t loc = EXPR_LOCATION (*expr_p);
09c41ee4 13931
b94f16f4 13932 /* Verify that valist is of the proper type. */
13933 have_va_type = TREE_TYPE (valist);
13934 if (have_va_type == error_mark_node)
13935 return GS_ERROR;
13936 have_va_type = targetm.canonical_va_list_type (have_va_type);
3bb45f76 13937 if (have_va_type == NULL_TREE
0e96068d 13938 && POINTER_TYPE_P (TREE_TYPE (valist)))
3bb45f76 13939 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
13940 have_va_type
13941 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
ea2ec8f6 13942 gcc_assert (have_va_type != NULL_TREE);
09c41ee4 13943
b94f16f4 13944 /* Generate a diagnostic for requesting data of a type that cannot
13945 be passed through `...' due to type promotion at the call site. */
13946 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
13947 != type)
13948 {
13949 static bool gave_help;
13950 bool warned;
60773515 13951 /* Use the expansion point to handle cases such as passing bool (defined
13952 in a system header) through `...'. */
be1e7283 13953 location_t xloc
60773515 13954 = expansion_point_location_if_in_system_header (loc);
09c41ee4 13955
b94f16f4 13956 /* Unfortunately, this is merely undefined, rather than a constraint
13957 violation, so we cannot make this an error. If this call is never
13958 executed, the program is still strictly conforming. */
bc35ef65 13959 auto_diagnostic_group d;
60773515 13960 warned = warning_at (xloc, 0,
13961 "%qT is promoted to %qT when passed through %<...%>",
b94f16f4 13962 type, promoted_type);
13963 if (!gave_help && warned)
13964 {
13965 gave_help = true;
60773515 13966 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
b94f16f4 13967 promoted_type, type);
13968 }
09c41ee4 13969
b94f16f4 13970 /* We can, however, treat "undefined" any way we please.
13971 Call abort to encourage the user to fix the program. */
13972 if (warned)
60773515 13973 inform (xloc, "if this code is reached, the program will abort");
b94f16f4 13974 /* Before the abort, allow the evaluation of the va_list
13975 expression to exit or longjmp. */
13976 gimplify_and_add (valist, pre_p);
13977 t = build_call_expr_loc (loc,
13978 builtin_decl_implicit (BUILT_IN_TRAP), 0);
09c41ee4 13979 gimplify_and_add (t, pre_p);
13980
b94f16f4 13981 /* This is dead code, but go ahead and finish so that the
13982 mode of the result comes out right. */
13983 *expr_p = dummy_object (type);
13984 return GS_ALL_DONE;
09c41ee4 13985 }
09c41ee4 13986
82fc0e0a 13987 tag = build_int_cst (build_pointer_type (type), 0);
c9595c51 13988 aptag = build_int_cst (TREE_TYPE (valist), 0);
13989
13990 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
13991 valist, tag, aptag);
09c41ee4 13992
8e21ebb1 13993 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
13994 needs to be expanded. */
13995 cfun->curr_properties &= ~PROP_gimple_lva;
13996
82fc0e0a 13997 return GS_OK;
09c41ee4 13998}
c2acfe90 13999
a8783bee 14000/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
14001
14002 DST/SRC are the destination and source respectively. You can pass
14003 ungimplified trees in DST or SRC, in which case they will be
14004 converted to a gimple operand if necessary.
14005
14006 This function returns the newly created GIMPLE_ASSIGN tuple. */
14007
42acab1c 14008gimple *
a8783bee 14009gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
14010{
14011 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
14012 gimplify_and_add (t, seq_p);
14013 ggc_free (t);
14014 return gimple_seq_last_stmt (*seq_p);
14015}
14016
e795d6e1 14017inline hashval_t
9969c043 14018gimplify_hasher::hash (const elt_t *p)
e795d6e1 14019{
14020 tree t = p->val;
14021 return iterative_hash_expr (t, 0);
14022}
14023
14024inline bool
9969c043 14025gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
e795d6e1 14026{
14027 tree t1 = p1->val;
14028 tree t2 = p2->val;
14029 enum tree_code code = TREE_CODE (t1);
14030
14031 if (TREE_CODE (t2) != code
14032 || TREE_TYPE (t1) != TREE_TYPE (t2))
14033 return false;
14034
14035 if (!operand_equal_p (t1, t2, 0))
14036 return false;
14037
e795d6e1 14038 /* Only allow them to compare equal if they also hash equal; otherwise
14039 results are nondeterminate, and we fail bootstrap comparison. */
382ecba7 14040 gcc_checking_assert (hash (p1) == hash (p2));
e795d6e1 14041
14042 return true;
14043}