]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
i386: Improve vector mode and TFmode ABS and NEG patterns
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
8d9254fc 3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
6de9cd9a
DN
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
c7131fb2 26#include "backend.h"
957060b5
AM
27#include "target.h"
28#include "rtl.h"
6de9cd9a 29#include "tree.h"
6b139f0d 30#include "memmodel.h"
87039aa2 31#include "tm_p.h"
c7131fb2 32#include "gimple.h"
9fdcd34e 33#include "gimple-predict.h"
957060b5 34#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
c7131fb2 35#include "ssa.h"
957060b5
AM
36#include "cgraph.h"
37#include "tree-pretty-print.h"
38#include "diagnostic-core.h"
39#include "alias.h"
c7131fb2 40#include "fold-const.h"
36566b39 41#include "calls.h"
36566b39
PK
42#include "varasm.h"
43#include "stmt.h"
44#include "expr.h"
2fb9a547
AM
45#include "gimple-fold.h"
46#include "tree-eh.h"
45b0be94 47#include "gimplify.h"
5be5c238 48#include "gimple-iterator.h"
d8a2d370 49#include "stor-layout.h"
d8a2d370 50#include "print-tree.h"
726a989a 51#include "tree-iterator.h"
6de9cd9a 52#include "tree-inline.h"
6de9cd9a 53#include "langhooks.h"
442b4905 54#include "tree-cfg.h"
442b4905 55#include "tree-ssa.h"
629b3d75 56#include "omp-general.h"
0645c1a2 57#include "omp-low.h"
4484a35a 58#include "gimple-low.h"
41dbbb37 59#include "gomp-constants.h"
c24e924f 60#include "splay-tree.h"
1a80d6b8 61#include "gimple-walk.h"
7ee2468b 62#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
9b2b7279 63#include "builtins.h"
314e6352
ML
64#include "stringpool.h"
65#include "attribs.h"
6dc4a604 66#include "asan.h"
fcd1b8df 67#include "dbgcnt.h"
211b7533
JJ
68#include "omp-offload.h"
69#include "context.h"
6dc4a604
ML
70
71/* Hash set of poisoned variables in a bind expr. */
72static hash_set<tree> *asan_poisoned_variables = NULL;
953ff289
DN
73
74enum gimplify_omp_var_data
75{
6c7ae8c5
JJ
76 GOVD_SEEN = 0x000001,
77 GOVD_EXPLICIT = 0x000002,
78 GOVD_SHARED = 0x000004,
79 GOVD_PRIVATE = 0x000008,
80 GOVD_FIRSTPRIVATE = 0x000010,
81 GOVD_LASTPRIVATE = 0x000020,
82 GOVD_REDUCTION = 0x000040,
83 GOVD_LOCAL = 0x00080,
84 GOVD_MAP = 0x000100,
85 GOVD_DEBUG_PRIVATE = 0x000200,
86 GOVD_PRIVATE_OUTER_REF = 0x000400,
87 GOVD_LINEAR = 0x000800,
88 GOVD_ALIGNED = 0x001000,
41dbbb37
TS
89
90 /* Flag for GOVD_MAP: don't copy back. */
6c7ae8c5 91 GOVD_MAP_TO_ONLY = 0x002000,
41dbbb37 92
41b37d5e 93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
6c7ae8c5 94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
41b37d5e 95
6c7ae8c5 96 GOVD_MAP_0LEN_ARRAY = 0x008000,
d9a6bd32 97
e01d41e5 98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
6c7ae8c5 99 GOVD_MAP_ALWAYS_TO = 0x010000,
e01d41e5 100
1a80d6b8 101 /* Flag for shared vars that are or might be stored to in the region. */
6c7ae8c5 102 GOVD_WRITTEN = 0x020000,
1a80d6b8 103
db0f1c7a 104 /* Flag for GOVD_MAP, if it is a forced mapping. */
6c7ae8c5 105 GOVD_MAP_FORCE = 0x040000,
db0f1c7a 106
7fd549d2 107 /* Flag for GOVD_MAP: must be present already. */
6c7ae8c5 108 GOVD_MAP_FORCE_PRESENT = 0x080000,
7fd549d2 109
28567c40 110 /* Flag for GOVD_MAP: only allocate. */
6c7ae8c5 111 GOVD_MAP_ALLOC_ONLY = 0x100000,
28567c40
JJ
112
113 /* Flag for GOVD_MAP: only copy back. */
6c7ae8c5 114 GOVD_MAP_FROM_ONLY = 0x200000,
28567c40 115
6c7ae8c5
JJ
116 GOVD_NONTEMPORAL = 0x400000,
117
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
28567c40 120
8221c30b
JJ
121 GOVD_CONDTEMP = 0x1000000,
122
bf38f7e9
JJ
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN = 0x2000000,
125
4fd872bc
JB
126 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
127 fields. */
128 GOVD_MAP_HAS_ATTACHMENTS = 8388608,
129
953ff289 130 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
74bf76ed
JJ
131 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
132 | GOVD_LOCAL)
953ff289
DN
133};
134
726a989a 135
a68ab351
JJ
136enum omp_region_type
137{
182190f2 138 ORT_WORKSHARE = 0x00,
28567c40
JJ
139 ORT_TASKGROUP = 0x01,
140 ORT_SIMD = 0x04,
182190f2 141
28567c40
JJ
142 ORT_PARALLEL = 0x08,
143 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
182190f2 144
28567c40
JJ
145 ORT_TASK = 0x10,
146 ORT_UNTIED_TASK = ORT_TASK | 1,
147 ORT_TASKLOOP = ORT_TASK | 2,
148 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
182190f2 149
28567c40
JJ
150 ORT_TEAMS = 0x20,
151 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
152 ORT_HOST_TEAMS = ORT_TEAMS | 2,
153 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
182190f2 154
41dbbb37 155 /* Data region. */
28567c40 156 ORT_TARGET_DATA = 0x40,
182190f2 157
41dbbb37 158 /* Data region with offloading. */
28567c40
JJ
159 ORT_TARGET = 0x80,
160 ORT_COMBINED_TARGET = ORT_TARGET | 1,
554a530f 161 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
182190f2
NS
162
163 /* OpenACC variants. */
28567c40 164 ORT_ACC = 0x100, /* A generic OpenACC region. */
182190f2
NS
165 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
166 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
28567c40 167 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
62aee289 168 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
28567c40 169 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
182190f2 170
d9a6bd32
JJ
171 /* Dummy OpenMP region, used to disable expansion of
172 DECL_VALUE_EXPRs in taskloop pre body. */
28567c40 173 ORT_NONE = 0x200
a68ab351
JJ
174};
175
45852dcc
AM
176/* Gimplify hashtable helper. */
177
95fbe13e 178struct gimplify_hasher : free_ptr_hash <elt_t>
45852dcc 179{
67f58944
TS
180 static inline hashval_t hash (const elt_t *);
181 static inline bool equal (const elt_t *, const elt_t *);
45852dcc
AM
182};
183
184struct gimplify_ctx
185{
186 struct gimplify_ctx *prev_context;
187
538dd0b7 188 vec<gbind *> bind_expr_stack;
45852dcc
AM
189 tree temps;
190 gimple_seq conditional_cleanups;
191 tree exit_label;
192 tree return_temp;
193
194 vec<tree> case_labels;
6dc4a604 195 hash_set<tree> *live_switch_vars;
45852dcc 196 /* The formal temporary table. Should this be persistent? */
c203e8a7 197 hash_table<gimplify_hasher> *temp_htab;
45852dcc
AM
198
199 int conditions;
03c00798
EB
200 unsigned into_ssa : 1;
201 unsigned allow_rhs_cond_expr : 1;
202 unsigned in_cleanup_point_expr : 1;
203 unsigned keep_stack : 1;
204 unsigned save_stack : 1;
81fea426 205 unsigned in_switch_expr : 1;
45852dcc
AM
206};
207
28567c40
JJ
208enum gimplify_defaultmap_kind
209{
210 GDMK_SCALAR,
211 GDMK_AGGREGATE,
212 GDMK_ALLOCATABLE,
213 GDMK_POINTER
214};
215
953ff289 216struct gimplify_omp_ctx
6de9cd9a 217{
953ff289
DN
218 struct gimplify_omp_ctx *outer_context;
219 splay_tree variables;
6e2830c3 220 hash_set<tree> *privatized_types;
7e47198b 221 tree clauses;
d9a6bd32
JJ
222 /* Iteration variables in an OMP_FOR. */
223 vec<tree> loop_iter_var;
953ff289
DN
224 location_t location;
225 enum omp_clause_default_kind default_kind;
a68ab351 226 enum omp_region_type region_type;
135df52c 227 enum tree_code code;
acf0174b 228 bool combined_loop;
9cf32741 229 bool distribute;
d9a6bd32 230 bool target_firstprivatize_array_bases;
ec03bc90 231 bool add_safelen1;
70468604 232 bool order_concurrent;
28567c40 233 int defaultmap[4];
953ff289
DN
234};
235
45852dcc 236static struct gimplify_ctx *gimplify_ctxp;
953ff289 237static struct gimplify_omp_ctx *gimplify_omp_ctxp;
554a530f 238static bool in_omp_construct;
953ff289 239
ad19c4be 240/* Forward declaration. */
726a989a 241static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
6e232ba4 242static hash_map<tree, tree> *oacc_declare_returns;
381cdae4
RB
243static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
244 bool (*) (tree), fallback_t, bool);
eb6127a4 245
a1a6c5b2
JJ
246/* Shorter alias name for the above function for use in gimplify.c
247 only. */
248
249static inline void
355fe088 250gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
a1a6c5b2
JJ
251{
252 gimple_seq_add_stmt_without_update (seq_p, gs);
253}
254
726a989a
RB
255/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
256 NULL, a new sequence is allocated. This function is
257 similar to gimple_seq_add_seq, but does not scan the operands.
258 During gimplification, we need to manipulate statement sequences
259 before the def/use vectors have been constructed. */
260
261static void
262gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
263{
264 gimple_stmt_iterator si;
265
266 if (src == NULL)
267 return;
268
726a989a
RB
269 si = gsi_last (*dst_p);
270 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
271}
272
45852dcc
AM
273
274/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
275 and popping gimplify contexts. */
276
277static struct gimplify_ctx *ctx_pool = NULL;
278
279/* Return a gimplify context struct from the pool. */
280
281static inline struct gimplify_ctx *
282ctx_alloc (void)
283{
284 struct gimplify_ctx * c = ctx_pool;
285
286 if (c)
287 ctx_pool = c->prev_context;
288 else
289 c = XNEW (struct gimplify_ctx);
290
291 memset (c, '\0', sizeof (*c));
292 return c;
293}
294
295/* Put gimplify context C back into the pool. */
296
297static inline void
298ctx_free (struct gimplify_ctx *c)
299{
300 c->prev_context = ctx_pool;
301 ctx_pool = c;
302}
303
304/* Free allocated ctx stack memory. */
305
306void
307free_gimplify_stack (void)
308{
309 struct gimplify_ctx *c;
310
311 while ((c = ctx_pool))
312 {
313 ctx_pool = c->prev_context;
314 free (c);
315 }
316}
317
318
6de9cd9a
DN
319/* Set up a context for the gimplifier. */
320
321void
45852dcc 322push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
6de9cd9a 323{
45852dcc
AM
324 struct gimplify_ctx *c = ctx_alloc ();
325
953ff289 326 c->prev_context = gimplify_ctxp;
953ff289 327 gimplify_ctxp = c;
45852dcc
AM
328 gimplify_ctxp->into_ssa = in_ssa;
329 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
6de9cd9a
DN
330}
331
332/* Tear down a context for the gimplifier. If BODY is non-null, then
333 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
334 in the local_decls.
335
336 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
337
338void
355fe088 339pop_gimplify_context (gimple *body)
6de9cd9a 340{
953ff289 341 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 342
9771b263
DN
343 gcc_assert (c
344 && (!c->bind_expr_stack.exists ()
345 || c->bind_expr_stack.is_empty ()));
346 c->bind_expr_stack.release ();
953ff289 347 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
348
349 if (body)
5123ad09 350 declare_vars (c->temps, body, false);
6de9cd9a 351 else
953ff289 352 record_vars (c->temps);
6de9cd9a 353
c203e8a7
TS
354 delete c->temp_htab;
355 c->temp_htab = NULL;
45852dcc 356 ctx_free (c);
6de9cd9a
DN
357}
358
ad19c4be
EB
359/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
360
c24b7de9 361static void
538dd0b7 362gimple_push_bind_expr (gbind *bind_stmt)
6de9cd9a 363{
9771b263 364 gimplify_ctxp->bind_expr_stack.reserve (8);
538dd0b7 365 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
6de9cd9a
DN
366}
367
ad19c4be
EB
368/* Pop the first element off the stack of bindings. */
369
c24b7de9 370static void
6de9cd9a
DN
371gimple_pop_bind_expr (void)
372{
9771b263 373 gimplify_ctxp->bind_expr_stack.pop ();
6de9cd9a
DN
374}
375
ad19c4be
EB
376/* Return the first element of the stack of bindings. */
377
538dd0b7 378gbind *
6de9cd9a
DN
379gimple_current_bind_expr (void)
380{
9771b263 381 return gimplify_ctxp->bind_expr_stack.last ();
726a989a
RB
382}
383
ad19c4be 384/* Return the stack of bindings created during gimplification. */
726a989a 385
538dd0b7 386vec<gbind *>
726a989a
RB
387gimple_bind_expr_stack (void)
388{
389 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
390}
391
ad19c4be 392/* Return true iff there is a COND_EXPR between us and the innermost
6de9cd9a
DN
393 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
394
395static bool
396gimple_conditional_context (void)
397{
398 return gimplify_ctxp->conditions > 0;
399}
400
401/* Note that we've entered a COND_EXPR. */
402
403static void
404gimple_push_condition (void)
405{
726a989a 406#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 407 if (gimplify_ctxp->conditions == 0)
726a989a 408 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 409#endif
6de9cd9a
DN
410 ++(gimplify_ctxp->conditions);
411}
412
413/* Note that we've left a COND_EXPR. If we're back at unconditional scope
414 now, add any conditional cleanups we've seen to the prequeue. */
415
416static void
726a989a 417gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
418{
419 int conds = --(gimplify_ctxp->conditions);
aa4a53af 420
282899df 421 gcc_assert (conds >= 0);
6de9cd9a
DN
422 if (conds == 0)
423 {
726a989a
RB
424 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
425 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 426 }
6de9cd9a
DN
427}
428
953ff289
DN
429/* A stable comparison routine for use with splay trees and DECLs. */
430
431static int
432splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
433{
434 tree a = (tree) xa;
435 tree b = (tree) xb;
436
437 return DECL_UID (a) - DECL_UID (b);
438}
439
440/* Create a new omp construct that deals with variable remapping. */
441
442static struct gimplify_omp_ctx *
a68ab351 443new_omp_context (enum omp_region_type region_type)
953ff289
DN
444{
445 struct gimplify_omp_ctx *c;
446
447 c = XCNEW (struct gimplify_omp_ctx);
448 c->outer_context = gimplify_omp_ctxp;
449 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
6e2830c3 450 c->privatized_types = new hash_set<tree>;
953ff289 451 c->location = input_location;
a68ab351 452 c->region_type = region_type;
f22f4340 453 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
454 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
455 else
456 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
28567c40
JJ
457 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
458 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
459 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
460 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
953ff289
DN
461
462 return c;
463}
464
465/* Destroy an omp construct that deals with variable remapping. */
466
467static void
468delete_omp_context (struct gimplify_omp_ctx *c)
469{
470 splay_tree_delete (c->variables);
6e2830c3 471 delete c->privatized_types;
d9a6bd32 472 c->loop_iter_var.release ();
953ff289
DN
473 XDELETE (c);
474}
475
476static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
477static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
478
726a989a
RB
479/* Both gimplify the statement T and append it to *SEQ_P. This function
480 behaves exactly as gimplify_stmt, but you don't have to pass T as a
481 reference. */
cd3ce9b4
JM
482
483void
726a989a
RB
484gimplify_and_add (tree t, gimple_seq *seq_p)
485{
486 gimplify_stmt (&t, seq_p);
487}
488
489/* Gimplify statement T into sequence *SEQ_P, and return the first
490 tuple in the sequence of generated tuples for this statement.
491 Return NULL if gimplifying T produced no tuples. */
492
355fe088 493static gimple *
726a989a 494gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 495{
726a989a
RB
496 gimple_stmt_iterator last = gsi_last (*seq_p);
497
498 gimplify_and_add (t, seq_p);
499
500 if (!gsi_end_p (last))
501 {
502 gsi_next (&last);
503 return gsi_stmt (last);
504 }
505 else
506 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
507}
508
216820a4
RG
509/* Returns true iff T is a valid RHS for an assignment to an un-renamed
510 LHS, or for a call argument. */
511
512static bool
513is_gimple_mem_rhs (tree t)
514{
515 /* If we're dealing with a renamable type, either source or dest must be
516 a renamed variable. */
517 if (is_gimple_reg_type (TREE_TYPE (t)))
518 return is_gimple_val (t);
519 else
520 return is_gimple_val (t) || is_gimple_lvalue (t);
521}
522
726a989a 523/* Return true if T is a CALL_EXPR or an expression that can be
12947319 524 assigned to a temporary. Note that this predicate should only be
726a989a
RB
525 used during gimplification. See the rationale for this in
526 gimplify_modify_expr. */
527
528static bool
ba4d8f9d 529is_gimple_reg_rhs_or_call (tree t)
726a989a 530{
ba4d8f9d
RG
531 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
532 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
533}
534
535/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
536 this predicate should only be used during gimplification. See the
537 rationale for this in gimplify_modify_expr. */
538
539static bool
ba4d8f9d 540is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
541{
542 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
543 a renamed variable. */
544 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
545 return is_gimple_val (t);
546 else
cd7fac96
RB
547 return (is_gimple_val (t)
548 || is_gimple_lvalue (t)
549 || TREE_CLOBBER_P (t)
ba4d8f9d 550 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
551}
552
2ad728d2
RG
553/* Create a temporary with a name derived from VAL. Subroutine of
554 lookup_tmp_var; nobody else should call this function. */
555
556static inline tree
947ca6a0 557create_tmp_from_val (tree val)
2ad728d2
RG
558{
559 /* Drop all qualifiers and address-space information from the value type. */
560 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
561 tree var = create_tmp_var (type, get_name (val));
2ad728d2
RG
562 return var;
563}
564
565/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
566 an existing expression temporary. */
567
568static tree
569lookup_tmp_var (tree val, bool is_formal)
570{
571 tree ret;
572
573 /* If not optimizing, never really reuse a temporary. local-alloc
574 won't allocate any variable that is used in more than one basic
575 block, which means it will go into memory, causing much extra
576 work in reload and final and poorer code generation, outweighing
577 the extra memory allocation here. */
578 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
947ca6a0 579 ret = create_tmp_from_val (val);
2ad728d2
RG
580 else
581 {
582 elt_t elt, *elt_p;
4a8fb1a1 583 elt_t **slot;
2ad728d2
RG
584
585 elt.val = val;
c203e8a7
TS
586 if (!gimplify_ctxp->temp_htab)
587 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
588 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
2ad728d2
RG
589 if (*slot == NULL)
590 {
591 elt_p = XNEW (elt_t);
592 elt_p->val = val;
947ca6a0 593 elt_p->temp = ret = create_tmp_from_val (val);
4a8fb1a1 594 *slot = elt_p;
2ad728d2
RG
595 }
596 else
597 {
4a8fb1a1 598 elt_p = *slot;
2ad728d2
RG
599 ret = elt_p->temp;
600 }
601 }
602
603 return ret;
604}
605
ba4d8f9d 606/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
607
608static tree
726a989a 609internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
381cdae4 610 bool is_formal, bool allow_ssa)
6de9cd9a
DN
611{
612 tree t, mod;
6de9cd9a 613
726a989a
RB
614 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
615 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 616 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 617 fb_rvalue);
6de9cd9a 618
381cdae4
RB
619 if (allow_ssa
620 && gimplify_ctxp->into_ssa
2ad728d2 621 && is_gimple_reg_type (TREE_TYPE (val)))
381cdae4
RB
622 {
623 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
624 if (! gimple_in_ssa_p (cfun))
625 {
626 const char *name = get_name (val);
627 if (name)
628 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
629 }
630 }
2ad728d2
RG
631 else
632 t = lookup_tmp_var (val, is_formal);
e41d82f5 633
2e929cf3 634 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 635
8400e75e 636 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
6de9cd9a 637
fff34d35
RK
638 /* gimplify_modify_expr might want to reduce this further. */
639 gimplify_and_add (mod, pre_p);
726a989a 640 ggc_free (mod);
8b11a64c 641
6de9cd9a
DN
642 return t;
643}
644
ad19c4be 645/* Return a formal temporary variable initialized with VAL. PRE_P is as
ba4d8f9d
RG
646 in gimplify_expr. Only use this function if:
647
648 1) The value of the unfactored expression represented by VAL will not
649 change between the initialization and use of the temporary, and
650 2) The temporary will not be otherwise modified.
651
652 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
653 and #2 means it is inappropriate for && temps.
654
655 For other cases, use get_initialized_tmp_var instead. */
50674e96 656
6de9cd9a 657tree
726a989a 658get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a 659{
381cdae4 660 return internal_get_tmp_var (val, pre_p, NULL, true, true);
6de9cd9a
DN
661}
662
ad19c4be 663/* Return a temporary variable initialized with VAL. PRE_P and POST_P
6de9cd9a
DN
664 are as in gimplify_expr. */
665
666tree
8e5993e2
JM
667get_initialized_tmp_var (tree val, gimple_seq *pre_p,
668 gimple_seq *post_p /* = NULL */,
669 bool allow_ssa /* = true */)
6de9cd9a 670{
381cdae4 671 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
6de9cd9a
DN
672}
673
ad19c4be
EB
674/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
675 generate debug info for them; otherwise don't. */
6de9cd9a
DN
676
677void
355fe088 678declare_vars (tree vars, gimple *gs, bool debug_info)
6de9cd9a
DN
679{
680 tree last = vars;
681 if (last)
682 {
5123ad09 683 tree temps, block;
6de9cd9a 684
538dd0b7 685 gbind *scope = as_a <gbind *> (gs);
6de9cd9a
DN
686
687 temps = nreverse (last);
5123ad09 688
524d9a45 689 block = gimple_bind_block (scope);
726a989a 690 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
691 if (!block || !debug_info)
692 {
910ad8de 693 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 694 gimple_bind_set_vars (scope, temps);
5123ad09
EB
695 }
696 else
697 {
698 /* We need to attach the nodes both to the BIND_EXPR and to its
699 associated BLOCK for debugging purposes. The key point here
700 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
701 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
702 if (BLOCK_VARS (block))
703 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
704 else
705 {
726a989a
RB
706 gimple_bind_set_vars (scope,
707 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
708 BLOCK_VARS (block) = temps;
709 }
710 }
6de9cd9a
DN
711 }
712}
713
a441447f
OH
714/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
715 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
716 no such upper bound can be obtained. */
717
718static void
719force_constant_size (tree var)
720{
721 /* The only attempt we make is by querying the maximum size of objects
722 of the variable's type. */
723
724 HOST_WIDE_INT max_size;
725
8813a647 726 gcc_assert (VAR_P (var));
a441447f
OH
727
728 max_size = max_int_size_in_bytes (TREE_TYPE (var));
729
730 gcc_assert (max_size >= 0);
731
732 DECL_SIZE_UNIT (var)
733 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
734 DECL_SIZE (var)
735 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
736}
737
ad19c4be
EB
738/* Push the temporary variable TMP into the current binding. */
739
45b62594
RB
740void
741gimple_add_tmp_var_fn (struct function *fn, tree tmp)
742{
743 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
744
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
825010bb 748 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
45b62594
RB
749 force_constant_size (tmp);
750
751 DECL_CONTEXT (tmp) = fn->decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
753
754 record_vars_into (tmp, fn->decl);
755}
756
757/* Push the temporary variable TMP into the current binding. */
758
6de9cd9a
DN
759void
760gimple_add_tmp_var (tree tmp)
761{
910ad8de 762 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 763
a441447f
OH
764 /* Later processing assumes that the object size is constant, which might
765 not be true at this point. Force the use of a constant upper bound in
766 this case. */
825010bb 767 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
a441447f
OH
768 force_constant_size (tmp);
769
6de9cd9a 770 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 771 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
772
773 if (gimplify_ctxp)
774 {
910ad8de 775 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 776 gimplify_ctxp->temps = tmp;
953ff289
DN
777
778 /* Mark temporaries local within the nearest enclosing parallel. */
779 if (gimplify_omp_ctxp)
780 {
781 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
d81ab49d 782 int flag = GOVD_LOCAL;
74bf76ed
JJ
783 while (ctx
784 && (ctx->region_type == ORT_WORKSHARE
28567c40 785 || ctx->region_type == ORT_TASKGROUP
182190f2
NS
786 || ctx->region_type == ORT_SIMD
787 || ctx->region_type == ORT_ACC))
d81ab49d
JJ
788 {
789 if (ctx->region_type == ORT_SIMD
790 && TREE_ADDRESSABLE (tmp)
791 && !TREE_STATIC (tmp))
792 {
793 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
794 ctx->add_safelen1 = true;
795 else
796 flag = GOVD_PRIVATE;
797 break;
798 }
799 ctx = ctx->outer_context;
800 }
953ff289 801 if (ctx)
d81ab49d 802 omp_add_variable (ctx, tmp, flag | GOVD_SEEN);
953ff289 803 }
6de9cd9a
DN
804 }
805 else if (cfun)
806 record_vars (tmp);
807 else
726a989a
RB
808 {
809 gimple_seq body_seq;
810
811 /* This case is for nested functions. We need to expose the locals
812 they create. */
813 body_seq = gimple_body (current_function_decl);
814 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
815 }
816}
817
726a989a 818
616f1431
EB
819\f
820/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
821 nodes that are referenced more than once in GENERIC functions. This is
822 necessary because gimplification (translation into GIMPLE) is performed
823 by modifying tree nodes in-place, so gimplication of a shared node in a
824 first context could generate an invalid GIMPLE form in a second context.
825
826 This is achieved with a simple mark/copy/unmark algorithm that walks the
827 GENERIC representation top-down, marks nodes with TREE_VISITED the first
828 time it encounters them, duplicates them if they already have TREE_VISITED
829 set, and finally removes the TREE_VISITED marks it has set.
830
831 The algorithm works only at the function level, i.e. it generates a GENERIC
832 representation of a function with no nodes shared within the function when
833 passed a GENERIC function (except for nodes that are allowed to be shared).
834
835 At the global level, it is also necessary to unshare tree nodes that are
836 referenced in more than one function, for the same aforementioned reason.
837 This requires some cooperation from the front-end. There are 2 strategies:
838
839 1. Manual unsharing. The front-end needs to call unshare_expr on every
840 expression that might end up being shared across functions.
841
842 2. Deep unsharing. This is an extension of regular unsharing. Instead
843 of calling unshare_expr on expressions that might be shared across
844 functions, the front-end pre-marks them with TREE_VISITED. This will
845 ensure that they are unshared on the first reference within functions
846 when the regular unsharing algorithm runs. The counterpart is that
847 this algorithm must look deeper than for manual unsharing, which is
848 specified by LANG_HOOKS_DEEP_UNSHARING.
849
850 If there are only few specific cases of node sharing across functions, it is
851 probably easier for a front-end to unshare the expressions manually. On the
852 contrary, if the expressions generated at the global level are as widespread
853 as expressions generated within functions, deep unsharing is very likely the
854 way to go. */
855
856/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
3ad065ef
EB
857 These nodes model computations that must be done once. If we were to
858 unshare something like SAVE_EXPR(i++), the gimplification process would
859 create wrong code. However, if DATA is non-null, it must hold a pointer
860 set that is used to unshare the subtrees of these nodes. */
6de9cd9a
DN
861
862static tree
863mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
864{
616f1431
EB
865 tree t = *tp;
866 enum tree_code code = TREE_CODE (t);
867
6687b740
EB
868 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
869 copy their subtrees if we can make sure to do it only once. */
870 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431 871 {
6e2830c3 872 if (data && !((hash_set<tree> *)data)->add (t))
616f1431
EB
873 ;
874 else
875 *walk_subtrees = 0;
876 }
877
878 /* Stop at types, decls, constants like copy_tree_r. */
879 else if (TREE_CODE_CLASS (code) == tcc_type
880 || TREE_CODE_CLASS (code) == tcc_declaration
3dd93025 881 || TREE_CODE_CLASS (code) == tcc_constant)
6de9cd9a 882 *walk_subtrees = 0;
616f1431
EB
883
884 /* Cope with the statement expression extension. */
885 else if (code == STATEMENT_LIST)
886 ;
887
888 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 889 else
6687b740 890 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
891
892 return NULL_TREE;
893}
894
3ad065ef
EB
895/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
896 If *TP has been visited already, then *TP is deeply copied by calling
897 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
6de9cd9a
DN
898
899static tree
616f1431 900copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 901{
f0638e1d
RH
902 tree t = *tp;
903 enum tree_code code = TREE_CODE (t);
904
44de5aeb
RK
905 /* Skip types, decls, and constants. But we do want to look at their
906 types and the bounds of types. Mark them as visited so we properly
907 unmark their subtrees on the unmark pass. If we've already seen them,
908 don't look down further. */
6615c446
JO
909 if (TREE_CODE_CLASS (code) == tcc_type
910 || TREE_CODE_CLASS (code) == tcc_declaration
911 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
912 {
913 if (TREE_VISITED (t))
914 *walk_subtrees = 0;
915 else
916 TREE_VISITED (t) = 1;
917 }
f0638e1d 918
6de9cd9a
DN
919 /* If this node has been visited already, unshare it and don't look
920 any deeper. */
f0638e1d 921 else if (TREE_VISITED (t))
6de9cd9a 922 {
616f1431 923 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
924 *walk_subtrees = 0;
925 }
f0638e1d 926
616f1431 927 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 928 else
77c9db77 929 TREE_VISITED (t) = 1;
f0638e1d 930
6de9cd9a
DN
931 return NULL_TREE;
932}
933
3ad065ef
EB
934/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
935 copy_if_shared_r callback unmodified. */
6de9cd9a 936
640b23d7 937void
3ad065ef 938copy_if_shared (tree *tp, void *data)
616f1431 939{
3ad065ef 940 walk_tree (tp, copy_if_shared_r, data, NULL);
6de9cd9a
DN
941}
942
3ad065ef
EB
943/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
944 any nested functions. */
44de5aeb
RK
945
946static void
3ad065ef 947unshare_body (tree fndecl)
44de5aeb 948{
d52f5295 949 struct cgraph_node *cgn = cgraph_node::get (fndecl);
3ad065ef
EB
950 /* If the language requires deep unsharing, we need a pointer set to make
951 sure we don't repeatedly unshare subtrees of unshareable nodes. */
6e2830c3
TS
952 hash_set<tree> *visited
953 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
44de5aeb 954
3ad065ef
EB
955 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
956 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
957 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
958
6e2830c3 959 delete visited;
616f1431 960
3ad065ef 961 if (cgn)
48eb4e53 962 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 963 unshare_body (cgn->decl);
44de5aeb
RK
964}
965
616f1431
EB
966/* Callback for walk_tree to unmark the visited trees rooted at *TP.
967 Subtrees are walked until the first unvisited node is encountered. */
968
969static tree
970unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
971{
972 tree t = *tp;
973
974 /* If this node has been visited, unmark it and keep looking. */
975 if (TREE_VISITED (t))
976 TREE_VISITED (t) = 0;
977
978 /* Otherwise, don't look any deeper. */
979 else
980 *walk_subtrees = 0;
981
982 return NULL_TREE;
983}
984
985/* Unmark the visited trees rooted at *TP. */
986
987static inline void
988unmark_visited (tree *tp)
989{
990 walk_tree (tp, unmark_visited_r, NULL, NULL);
991}
992
44de5aeb
RK
993/* Likewise, but mark all trees as not visited. */
994
995static void
3ad065ef 996unvisit_body (tree fndecl)
44de5aeb 997{
d52f5295 998 struct cgraph_node *cgn = cgraph_node::get (fndecl);
44de5aeb 999
3ad065ef
EB
1000 unmark_visited (&DECL_SAVED_TREE (fndecl));
1001 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1002 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
616f1431 1003
3ad065ef 1004 if (cgn)
48eb4e53 1005 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 1006 unvisit_body (cgn->decl);
44de5aeb
RK
1007}
1008
6de9cd9a
DN
1009/* Unconditionally make an unshared copy of EXPR. This is used when using
1010 stored expressions which span multiple functions, such as BINFO_VTABLE,
1011 as the normal unsharing process can't tell that they're shared. */
1012
1013tree
1014unshare_expr (tree expr)
1015{
1016 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1017 return expr;
1018}
d1f98542
RB
1019
1020/* Worker for unshare_expr_without_location. */
1021
1022static tree
1023prune_expr_location (tree *tp, int *walk_subtrees, void *)
1024{
1025 if (EXPR_P (*tp))
1026 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1027 else
1028 *walk_subtrees = 0;
1029 return NULL_TREE;
1030}
1031
1032/* Similar to unshare_expr but also prune all expression locations
1033 from EXPR. */
1034
1035tree
1036unshare_expr_without_location (tree expr)
1037{
1038 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1039 if (EXPR_P (expr))
1040 walk_tree (&expr, prune_expr_location, NULL, NULL);
1041 return expr;
1042}
96a95ac1
AO
1043
1044/* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1045 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1046 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1047 EXPR is the location of the EXPR. */
1048
1049static location_t
1050rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1051{
1052 if (!expr)
1053 return or_else;
1054
1055 if (EXPR_HAS_LOCATION (expr))
1056 return EXPR_LOCATION (expr);
1057
1058 if (TREE_CODE (expr) != STATEMENT_LIST)
1059 return or_else;
1060
1061 tree_stmt_iterator i = tsi_start (expr);
1062
1063 bool found = false;
1064 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1065 {
1066 found = true;
1067 tsi_next (&i);
1068 }
1069
1070 if (!found || !tsi_one_before_end_p (i))
1071 return or_else;
1072
1073 return rexpr_location (tsi_stmt (i), or_else);
1074}
1075
1076/* Return TRUE iff EXPR (maybe recursively) has a location; see
1077 rexpr_location for the potential recursion. */
1078
1079static inline bool
1080rexpr_has_location (tree expr)
1081{
1082 return rexpr_location (expr) != UNKNOWN_LOCATION;
1083}
1084
6de9cd9a
DN
1085\f
1086/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1087 contain statements and have a value. Assign its value to a temporary
ad19c4be 1088 and give it void_type_node. Return the temporary, or NULL_TREE if
6de9cd9a
DN
1089 WRAPPER was already void. */
1090
1091tree
325c3691 1092voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 1093{
4832214a
JM
1094 tree type = TREE_TYPE (wrapper);
1095 if (type && !VOID_TYPE_P (type))
6de9cd9a 1096 {
c6c7698d 1097 tree *p;
6de9cd9a 1098
c6c7698d
JM
1099 /* Set p to point to the body of the wrapper. Loop until we find
1100 something that isn't a wrapper. */
1101 for (p = &wrapper; p && *p; )
d3147f64 1102 {
c6c7698d 1103 switch (TREE_CODE (*p))
6de9cd9a 1104 {
c6c7698d
JM
1105 case BIND_EXPR:
1106 TREE_SIDE_EFFECTS (*p) = 1;
1107 TREE_TYPE (*p) = void_type_node;
1108 /* For a BIND_EXPR, the body is operand 1. */
1109 p = &BIND_EXPR_BODY (*p);
1110 break;
1111
1112 case CLEANUP_POINT_EXPR:
1113 case TRY_FINALLY_EXPR:
1114 case TRY_CATCH_EXPR:
6de9cd9a
DN
1115 TREE_SIDE_EFFECTS (*p) = 1;
1116 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
1117 p = &TREE_OPERAND (*p, 0);
1118 break;
1119
1120 case STATEMENT_LIST:
1121 {
1122 tree_stmt_iterator i = tsi_last (*p);
1123 TREE_SIDE_EFFECTS (*p) = 1;
1124 TREE_TYPE (*p) = void_type_node;
1125 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1126 }
1127 break;
1128
1129 case COMPOUND_EXPR:
ad19c4be
EB
1130 /* Advance to the last statement. Set all container types to
1131 void. */
c6c7698d
JM
1132 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1133 {
1134 TREE_SIDE_EFFECTS (*p) = 1;
1135 TREE_TYPE (*p) = void_type_node;
1136 }
1137 break;
1138
0a35513e
AH
1139 case TRANSACTION_EXPR:
1140 TREE_SIDE_EFFECTS (*p) = 1;
1141 TREE_TYPE (*p) = void_type_node;
1142 p = &TRANSACTION_EXPR_BODY (*p);
1143 break;
1144
c6c7698d 1145 default:
5f23640f
TR
1146 /* Assume that any tree upon which voidify_wrapper_expr is
1147 directly called is a wrapper, and that its body is op0. */
1148 if (p == &wrapper)
1149 {
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1152 p = &TREE_OPERAND (*p, 0);
1153 break;
1154 }
c6c7698d 1155 goto out;
6de9cd9a
DN
1156 }
1157 }
1158
c6c7698d 1159 out:
325c3691 1160 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
1161 temp = NULL_TREE;
1162 else if (temp)
6de9cd9a 1163 {
c6c7698d
JM
1164 /* The wrapper is on the RHS of an assignment that we're pushing
1165 down. */
1166 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1167 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 1168 TREE_OPERAND (temp, 1) = *p;
c6c7698d 1169 *p = temp;
6de9cd9a
DN
1170 }
1171 else
1172 {
c6c7698d
JM
1173 temp = create_tmp_var (type, "retval");
1174 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
1175 }
1176
6de9cd9a
DN
1177 return temp;
1178 }
1179
1180 return NULL_TREE;
1181}
1182
1183/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 1184 a temporary through which they communicate. */
6de9cd9a
DN
1185
1186static void
538dd0b7 1187build_stack_save_restore (gcall **save, gcall **restore)
6de9cd9a 1188{
726a989a 1189 tree tmp_var;
6de9cd9a 1190
e79983f4 1191 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
6de9cd9a 1192 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 1193 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 1194
ad19c4be 1195 *restore
e79983f4 1196 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
ad19c4be 1197 1, tmp_var);
6de9cd9a
DN
1198}
1199
6dc4a604
ML
1200/* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1201
1202static tree
1203build_asan_poison_call_expr (tree decl)
1204{
1205 /* Do not poison variables that have size equal to zero. */
1206 tree unit_size = DECL_SIZE_UNIT (decl);
1207 if (zerop (unit_size))
1208 return NULL_TREE;
1209
1210 tree base = build_fold_addr_expr (decl);
1211
1212 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1213 void_type_node, 3,
1214 build_int_cst (integer_type_node,
56b7aede 1215 ASAN_MARK_POISON),
6dc4a604
ML
1216 base, unit_size);
1217}
1218
1219/* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1220 on POISON flag, shadow memory of a DECL variable. The call will be
1221 put on location identified by IT iterator, where BEFORE flag drives
1222 position where the stmt will be put. */
1223
1224static void
1225asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1226 bool before)
1227{
6dc4a604
ML
1228 tree unit_size = DECL_SIZE_UNIT (decl);
1229 tree base = build_fold_addr_expr (decl);
1230
1231 /* Do not poison variables that have size equal to zero. */
1232 if (zerop (unit_size))
1233 return;
1234
1235 /* It's necessary to have all stack variables aligned to ASAN granularity
1236 bytes. */
1237 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1238 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1239
56b7aede 1240 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
6dc4a604
ML
1241
1242 gimple *g
1243 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1244 build_int_cst (integer_type_node, flags),
1245 base, unit_size);
1246
1247 if (before)
1248 gsi_insert_before (it, g, GSI_NEW_STMT);
1249 else
1250 gsi_insert_after (it, g, GSI_NEW_STMT);
1251}
1252
1253/* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1254 either poisons or unpoisons a DECL. Created statement is appended
1255 to SEQ_P gimple sequence. */
1256
1257static void
1258asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1259{
1260 gimple_stmt_iterator it = gsi_last (*seq_p);
1261 bool before = false;
1262
1263 if (gsi_end_p (it))
1264 before = true;
1265
1266 asan_poison_variable (decl, poison, &it, before);
1267}
1268
1269/* Sort pair of VAR_DECLs A and B by DECL_UID. */
1270
1271static int
1272sort_by_decl_uid (const void *a, const void *b)
1273{
1274 const tree *t1 = (const tree *)a;
1275 const tree *t2 = (const tree *)b;
1276
1277 int uid1 = DECL_UID (*t1);
1278 int uid2 = DECL_UID (*t2);
1279
1280 if (uid1 < uid2)
1281 return -1;
1282 else if (uid1 > uid2)
1283 return 1;
1284 else
1285 return 0;
1286}
1287
1288/* Generate IFN_ASAN_MARK internal call for all VARIABLES
1289 depending on POISON flag. Created statement is appended
1290 to SEQ_P gimple sequence. */
1291
1292static void
1293asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1294{
1295 unsigned c = variables->elements ();
1296 if (c == 0)
1297 return;
1298
1299 auto_vec<tree> sorted_variables (c);
1300
1301 for (hash_set<tree>::iterator it = variables->begin ();
1302 it != variables->end (); ++it)
1303 sorted_variables.safe_push (*it);
1304
1305 sorted_variables.qsort (sort_by_decl_uid);
1306
f6b9f2ff
ML
1307 unsigned i;
1308 tree var;
1309 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1310 {
1311 asan_poison_variable (var, poison, seq_p);
1312
1313 /* Add use_after_scope_memory attribute for the variable in order
1314 to prevent re-written into SSA. */
1315 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1316 DECL_ATTRIBUTES (var)))
1317 DECL_ATTRIBUTES (var)
1318 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1319 integer_one_node,
1320 DECL_ATTRIBUTES (var));
1321 }
6dc4a604
ML
1322}
1323
6de9cd9a
DN
1324/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1325
1326static enum gimplify_status
726a989a 1327gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1328{
1329 tree bind_expr = *expr_p;
03c00798 1330 bool old_keep_stack = gimplify_ctxp->keep_stack;
6de9cd9a
DN
1331 bool old_save_stack = gimplify_ctxp->save_stack;
1332 tree t;
538dd0b7 1333 gbind *bind_stmt;
47598145 1334 gimple_seq body, cleanup;
538dd0b7 1335 gcall *stack_save;
a5852bea 1336 location_t start_locus = 0, end_locus = 0;
6e232ba4 1337 tree ret_clauses = NULL;
6de9cd9a 1338
c6c7698d 1339 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 1340
6de9cd9a 1341 /* Mark variables seen in this bind expr. */
910ad8de 1342 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 1343 {
8813a647 1344 if (VAR_P (t))
8cb86b65
JJ
1345 {
1346 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1347
1348 /* Mark variable as local. */
211b7533 1349 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
c74559df 1350 {
211b7533
JJ
1351 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1352 || splay_tree_lookup (ctx->variables,
1353 (splay_tree_key) t) == NULL)
1354 {
ec03bc90 1355 int flag = GOVD_LOCAL;
211b7533
JJ
1356 if (ctx->region_type == ORT_SIMD
1357 && TREE_ADDRESSABLE (t)
1358 && !TREE_STATIC (t))
ec03bc90
JJ
1359 {
1360 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1361 ctx->add_safelen1 = true;
1362 else
1363 flag = GOVD_PRIVATE;
1364 }
1365 omp_add_variable (ctx, t, flag | GOVD_SEEN);
211b7533
JJ
1366 }
1367 /* Static locals inside of target construct or offloaded
1368 routines need to be "omp declare target". */
1369 if (TREE_STATIC (t))
1370 for (; ctx; ctx = ctx->outer_context)
1371 if ((ctx->region_type & ORT_TARGET) != 0)
1372 {
1373 if (!lookup_attribute ("omp declare target",
1374 DECL_ATTRIBUTES (t)))
1375 {
1376 tree id = get_identifier ("omp declare target");
1377 DECL_ATTRIBUTES (t)
1378 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1379 varpool_node *node = varpool_node::get (t);
1380 if (node)
1381 {
1382 node->offloadable = 1;
1383 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1384 {
1385 g->have_offload = true;
1386 if (!in_lto_p)
1387 vec_safe_push (offload_vars, t);
1388 }
1389 }
1390 }
1391 break;
1392 }
c74559df 1393 }
8cb86b65
JJ
1394
1395 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
1396
1397 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1398 cfun->has_local_explicit_reg_vars = true;
8cb86b65 1399 }
e41d82f5 1400 }
6de9cd9a 1401
538dd0b7 1402 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
03c00798 1403 BIND_EXPR_BLOCK (bind_expr));
538dd0b7 1404 gimple_push_bind_expr (bind_stmt);
726a989a 1405
03c00798 1406 gimplify_ctxp->keep_stack = false;
6de9cd9a
DN
1407 gimplify_ctxp->save_stack = false;
1408
726a989a
RB
1409 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1410 body = NULL;
1411 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
538dd0b7 1412 gimple_bind_set_body (bind_stmt, body);
6de9cd9a 1413
a5852bea
OH
1414 /* Source location wise, the cleanup code (stack_restore and clobbers)
1415 belongs to the end of the block, so propagate what we have. The
1416 stack_save operation belongs to the beginning of block, which we can
1417 infer from the bind_expr directly if the block has no explicit
1418 assignment. */
1419 if (BIND_EXPR_BLOCK (bind_expr))
1420 {
1421 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1422 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1423 }
1424 if (start_locus == 0)
1425 start_locus = EXPR_LOCATION (bind_expr);
1426
47598145
MM
1427 cleanup = NULL;
1428 stack_save = NULL;
03c00798
EB
1429
1430 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1431 the stack space allocated to the VLAs. */
1432 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
6de9cd9a 1433 {
538dd0b7 1434 gcall *stack_restore;
6de9cd9a
DN
1435
1436 /* Save stack on entry and restore it on exit. Add a try_finally
98906124 1437 block to achieve this. */
6de9cd9a
DN
1438 build_stack_save_restore (&stack_save, &stack_restore);
1439
a5852bea
OH
1440 gimple_set_location (stack_save, start_locus);
1441 gimple_set_location (stack_restore, end_locus);
1442
726a989a 1443 gimplify_seq_add_stmt (&cleanup, stack_restore);
47598145
MM
1444 }
1445
1446 /* Add clobbers for all variables that go out of scope. */
1447 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1448 {
8813a647 1449 if (VAR_P (t)
47598145 1450 && !is_global_var (t)
f9faea37 1451 && DECL_CONTEXT (t) == current_function_decl)
47598145 1452 {
f9faea37
JJ
1453 if (!DECL_HARD_REGISTER (t)
1454 && !TREE_THIS_VOLATILE (t)
1455 && !DECL_HAS_VALUE_EXPR_P (t)
1456 /* Only care for variables that have to be in memory. Others
1457 will be rewritten into SSA names, hence moved to the
1458 top-level. */
1459 && !is_gimple_reg (t)
1460 && flag_stack_reuse != SR_NONE)
1461 {
94b2a1e5 1462 tree clobber = build_clobber (TREE_TYPE (t));
f9faea37 1463 gimple *clobber_stmt;
f9faea37
JJ
1464 clobber_stmt = gimple_build_assign (t, clobber);
1465 gimple_set_location (clobber_stmt, end_locus);
1466 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1467 }
6e232ba4
JN
1468
1469 if (flag_openacc && oacc_declare_returns != NULL)
1470 {
1471 tree *c = oacc_declare_returns->get (t);
1472 if (c != NULL)
1473 {
1474 if (ret_clauses)
1475 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1476
1477 ret_clauses = *c;
1478
1479 oacc_declare_returns->remove (t);
1480
b119c055 1481 if (oacc_declare_returns->is_empty ())
6e232ba4
JN
1482 {
1483 delete oacc_declare_returns;
1484 oacc_declare_returns = NULL;
1485 }
1486 }
1487 }
47598145 1488 }
6dc4a604
ML
1489
1490 if (asan_poisoned_variables != NULL
1491 && asan_poisoned_variables->contains (t))
1492 {
1493 asan_poisoned_variables->remove (t);
1494 asan_poison_variable (t, true, &cleanup);
1495 }
1496
1497 if (gimplify_ctxp->live_switch_vars != NULL
1498 && gimplify_ctxp->live_switch_vars->contains (t))
1499 gimplify_ctxp->live_switch_vars->remove (t);
47598145
MM
1500 }
1501
6e232ba4
JN
1502 if (ret_clauses)
1503 {
1504 gomp_target *stmt;
1505 gimple_stmt_iterator si = gsi_start (cleanup);
1506
1507 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1508 ret_clauses);
1509 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1510 }
1511
47598145
MM
1512 if (cleanup)
1513 {
538dd0b7 1514 gtry *gs;
47598145
MM
1515 gimple_seq new_body;
1516
1517 new_body = NULL;
538dd0b7 1518 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
726a989a 1519 GIMPLE_TRY_FINALLY);
6de9cd9a 1520
47598145
MM
1521 if (stack_save)
1522 gimplify_seq_add_stmt (&new_body, stack_save);
726a989a 1523 gimplify_seq_add_stmt (&new_body, gs);
538dd0b7 1524 gimple_bind_set_body (bind_stmt, new_body);
6de9cd9a
DN
1525 }
1526
03c00798
EB
1527 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1528 if (!gimplify_ctxp->keep_stack)
1529 gimplify_ctxp->keep_stack = old_keep_stack;
6de9cd9a 1530 gimplify_ctxp->save_stack = old_save_stack;
03c00798 1531
6de9cd9a
DN
1532 gimple_pop_bind_expr ();
1533
538dd0b7 1534 gimplify_seq_add_stmt (pre_p, bind_stmt);
726a989a 1535
6de9cd9a
DN
1536 if (temp)
1537 {
1538 *expr_p = temp;
6de9cd9a
DN
1539 return GS_OK;
1540 }
726a989a
RB
1541
1542 *expr_p = NULL_TREE;
1543 return GS_ALL_DONE;
6de9cd9a
DN
1544}
1545
e59a1c22
ML
1546/* Maybe add early return predict statement to PRE_P sequence. */
1547
1548static void
1549maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1550{
1551 /* If we are not in a conditional context, add PREDICT statement. */
1552 if (gimple_conditional_context ())
1553 {
1554 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1555 NOT_TAKEN);
1556 gimplify_seq_add_stmt (pre_p, predict);
1557 }
1558}
1559
6de9cd9a
DN
1560/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1561 GIMPLE value, it is assigned to a new temporary and the statement is
1562 re-written to return the temporary.
1563
726a989a 1564 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1565 STMT should be stored. */
1566
1567static enum gimplify_status
726a989a 1568gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1569{
538dd0b7 1570 greturn *ret;
6de9cd9a 1571 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1572 tree result_decl, result;
6de9cd9a 1573
726a989a
RB
1574 if (ret_expr == error_mark_node)
1575 return GS_ERROR;
1576
1577 if (!ret_expr
76a2bcdc 1578 || TREE_CODE (ret_expr) == RESULT_DECL)
726a989a 1579 {
e59a1c22 1580 maybe_add_early_return_predict_stmt (pre_p);
538dd0b7 1581 greturn *ret = gimple_build_return (ret_expr);
726a989a
RB
1582 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1583 gimplify_seq_add_stmt (pre_p, ret);
1584 return GS_ALL_DONE;
1585 }
6de9cd9a 1586
6de9cd9a 1587 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1588 result_decl = NULL_TREE;
7c82dd6c
JM
1589 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1590 {
1591 /* Used in C++ for handling EH cleanup of the return value if a local
1592 cleanup throws. Assume the front-end knows what it's doing. */
1593 result_decl = DECL_RESULT (current_function_decl);
1594 /* But crash if we end up trying to modify ret_expr below. */
1595 ret_expr = NULL_TREE;
1596 }
6de9cd9a
DN
1597 else
1598 {
726a989a
RB
1599 result_decl = TREE_OPERAND (ret_expr, 0);
1600
1601 /* See through a return by reference. */
cc77ae10 1602 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1603 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1604
1605 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1606 || TREE_CODE (ret_expr) == INIT_EXPR)
1607 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1608 }
1609
71877985
RH
1610 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1611 Recall that aggregate_value_p is FALSE for any aggregate type that is
1612 returned in registers. If we're returning values in registers, then
1613 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1614 across another call. In addition, for those aggregates for which
535a42b1 1615 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1616 expansion of structure assignments; there's special code in expand_return
1617 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1618 if (!result_decl)
1619 result = NULL_TREE;
1620 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1621 {
507de5ee 1622 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
ca361dec
EB
1623 {
1624 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1625 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1626 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1627 should be effectively allocated by the caller, i.e. all calls to
1628 this function must be subject to the Return Slot Optimization. */
1629 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1630 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1631 }
1632 result = result_decl;
1633 }
71877985
RH
1634 else if (gimplify_ctxp->return_temp)
1635 result = gimplify_ctxp->return_temp;
1636 else
1637 {
b731b390 1638 result = create_tmp_reg (TREE_TYPE (result_decl));
ff98621c
RH
1639
1640 /* ??? With complex control flow (usually involving abnormal edges),
1641 we can wind up warning about an uninitialized value for this. Due
1642 to how this variable is constructed and initialized, this is never
1643 true. Give up and never warn. */
1644 TREE_NO_WARNING (result) = 1;
1645
71877985
RH
1646 gimplify_ctxp->return_temp = result;
1647 }
1648
726a989a 1649 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1650 Then gimplify the whole thing. */
1651 if (result != result_decl)
726a989a 1652 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1653
1654 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1655
e59a1c22 1656 maybe_add_early_return_predict_stmt (pre_p);
726a989a
RB
1657 ret = gimple_build_return (result);
1658 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1659 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1660
6de9cd9a
DN
1661 return GS_ALL_DONE;
1662}
1663
ad19c4be
EB
1664/* Gimplify a variable-length array DECL. */
1665
786025ea 1666static void
726a989a 1667gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1668{
1669 /* This is a variable-sized decl. Simplify its size and mark it
98906124 1670 for deferred expansion. */
786025ea
JJ
1671 tree t, addr, ptr_type;
1672
726a989a
RB
1673 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1674 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea 1675
0138d6b2
JM
1676 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1677 if (DECL_HAS_VALUE_EXPR_P (decl))
1678 return;
1679
786025ea
JJ
1680 /* All occurrences of this decl in final gimplified code will be
1681 replaced by indirection. Setting DECL_VALUE_EXPR does two
1682 things: First, it lets the rest of the gimplifier know what
1683 replacement to use. Second, it lets the debug info know
1684 where to find the value. */
1685 ptr_type = build_pointer_type (TREE_TYPE (decl));
1686 addr = create_tmp_var (ptr_type, get_name (decl));
1687 DECL_IGNORED_P (addr) = 0;
1688 t = build_fold_indirect_ref (addr);
31408f60 1689 TREE_THIS_NOTRAP (t) = 1;
786025ea
JJ
1690 SET_DECL_VALUE_EXPR (decl, t);
1691 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1692
9e878cf1
EB
1693 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1694 max_int_size_in_bytes (TREE_TYPE (decl)));
d3c12306 1695 /* The call has been built for a variable-sized object. */
63d2a353 1696 CALL_ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1697 t = fold_convert (ptr_type, t);
726a989a 1698 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1699
726a989a 1700 gimplify_and_add (t, seq_p);
3cf3da88
EB
1701
1702 /* Record the dynamic allocation associated with DECL if requested. */
1703 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1704 record_dynamic_alloc (decl);
786025ea
JJ
1705}
1706
45b0be94
AM
1707/* A helper function to be called via walk_tree. Mark all labels under *TP
1708 as being forced. To be called for DECL_INITIAL of static variables. */
1709
1710static tree
1711force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1712{
1713 if (TYPE_P (*tp))
1714 *walk_subtrees = 0;
1715 if (TREE_CODE (*tp) == LABEL_DECL)
aa43616c
RH
1716 {
1717 FORCED_LABEL (*tp) = 1;
1718 cfun->has_forced_label_in_static = 1;
1719 }
45b0be94
AM
1720
1721 return NULL_TREE;
1722}
1723
ad19c4be 1724/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
350fae66
RK
1725 and initialization explicit. */
1726
1727static enum gimplify_status
726a989a 1728gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1729{
1730 tree stmt = *stmt_p;
1731 tree decl = DECL_EXPR_DECL (stmt);
1732
1733 *stmt_p = NULL_TREE;
1734
1735 if (TREE_TYPE (decl) == error_mark_node)
1736 return GS_ERROR;
1737
8e0a600b 1738 if ((TREE_CODE (decl) == TYPE_DECL
8813a647 1739 || VAR_P (decl))
8e0a600b 1740 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
da3d46cb
JJ
1741 {
1742 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1743 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1744 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1745 }
350fae66 1746
d400d17e
EB
1747 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1748 in case its size expressions contain problematic nodes like CALL_EXPR. */
1749 if (TREE_CODE (decl) == TYPE_DECL
1750 && DECL_ORIGINAL_TYPE (decl)
1751 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
da3d46cb
JJ
1752 {
1753 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1754 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1755 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1756 }
d400d17e 1757
8813a647 1758 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
350fae66
RK
1759 {
1760 tree init = DECL_INITIAL (decl);
6dc4a604 1761 bool is_vla = false;
350fae66 1762
22b62991
RS
1763 poly_uint64 size;
1764 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
b38f3813
EB
1765 || (!TREE_STATIC (decl)
1766 && flag_stack_check == GENERIC_STACK_CHECK
22b62991
RS
1767 && maybe_gt (size,
1768 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
6dc4a604
ML
1769 {
1770 gimplify_vla_decl (decl, seq_p);
1771 is_vla = true;
1772 }
1773
6ff92497 1774 if (asan_poisoned_variables
6dc4a604
ML
1775 && !is_vla
1776 && TREE_ADDRESSABLE (decl)
fcd1b8df
ML
1777 && !TREE_STATIC (decl)
1778 && !DECL_HAS_VALUE_EXPR_P (decl)
36208e60 1779 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
d838c2d5
JJ
1780 && dbg_cnt (asan_use_after_scope)
1781 && !gimplify_omp_ctxp)
6dc4a604
ML
1782 {
1783 asan_poisoned_variables->add (decl);
1784 asan_poison_variable (decl, false, seq_p);
fcd1b8df 1785 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
6dc4a604
ML
1786 gimplify_ctxp->live_switch_vars->add (decl);
1787 }
350fae66 1788
22192559
JM
1789 /* Some front ends do not explicitly declare all anonymous
1790 artificial variables. We compensate here by declaring the
1791 variables, though it would be better if the front ends would
1792 explicitly declare them. */
1793 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1794 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1795 gimple_add_tmp_var (decl);
1796
350fae66
RK
1797 if (init && init != error_mark_node)
1798 {
1799 if (!TREE_STATIC (decl))
1800 {
1801 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1802 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1803 gimplify_and_add (init, seq_p);
1804 ggc_free (init);
350fae66
RK
1805 }
1806 else
1807 /* We must still examine initializers for static variables
1808 as they may contain a label address. */
1809 walk_tree (&init, force_labels_r, NULL, NULL);
1810 }
350fae66
RK
1811 }
1812
1813 return GS_ALL_DONE;
1814}
1815
6de9cd9a
DN
1816/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1817 and replacing the LOOP_EXPR with goto, but if the loop contains an
1818 EXIT_EXPR, we need to append a label for it to jump to. */
1819
1820static enum gimplify_status
726a989a 1821gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1822{
1823 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1824 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1825
726a989a 1826 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1827
1828 gimplify_ctxp->exit_label = NULL_TREE;
1829
fff34d35 1830 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1831
726a989a
RB
1832 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1833
6de9cd9a 1834 if (gimplify_ctxp->exit_label)
ad19c4be
EB
1835 gimplify_seq_add_stmt (pre_p,
1836 gimple_build_label (gimplify_ctxp->exit_label));
726a989a
RB
1837
1838 gimplify_ctxp->exit_label = saved_label;
1839
1840 *expr_p = NULL;
1841 return GS_ALL_DONE;
1842}
1843
ad19c4be 1844/* Gimplify a statement list onto a sequence. These may be created either
726a989a
RB
1845 by an enlightened front-end, or by shortcut_cond_expr. */
1846
1847static enum gimplify_status
1848gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1849{
1850 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1851
1852 tree_stmt_iterator i = tsi_start (*expr_p);
1853
1854 while (!tsi_end_p (i))
6de9cd9a 1855 {
726a989a
RB
1856 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1857 tsi_delink (&i);
6de9cd9a 1858 }
6de9cd9a 1859
726a989a
RB
1860 if (temp)
1861 {
1862 *expr_p = temp;
1863 return GS_OK;
1864 }
6de9cd9a
DN
1865
1866 return GS_ALL_DONE;
1867}
0f1f6967 1868
146c55da
MP
1869/* Callback for walk_gimple_seq. */
1870
1871static tree
1872warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1873 struct walk_stmt_info *wi)
1874{
1875 gimple *stmt = gsi_stmt (*gsi_p);
1876
1877 *handled_ops_p = true;
1878 switch (gimple_code (stmt))
1879 {
1880 case GIMPLE_TRY:
1881 /* A compiler-generated cleanup or a user-written try block.
1882 If it's empty, don't dive into it--that would result in
1883 worse location info. */
1884 if (gimple_try_eval (stmt) == NULL)
1885 {
1886 wi->info = stmt;
1887 return integer_zero_node;
1888 }
1889 /* Fall through. */
1890 case GIMPLE_BIND:
1891 case GIMPLE_CATCH:
1892 case GIMPLE_EH_FILTER:
1893 case GIMPLE_TRANSACTION:
1894 /* Walk the sub-statements. */
1895 *handled_ops_p = false;
1896 break;
96a95ac1
AO
1897
1898 case GIMPLE_DEBUG:
1899 /* Ignore these. We may generate them before declarations that
1900 are never executed. If there's something to warn about,
1901 there will be non-debug stmts too, and we'll catch those. */
1902 break;
1903
6dc4a604
ML
1904 case GIMPLE_CALL:
1905 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1906 {
1907 *handled_ops_p = false;
1908 break;
1909 }
1910 /* Fall through. */
146c55da
MP
1911 default:
1912 /* Save the first "real" statement (not a decl/lexical scope/...). */
1913 wi->info = stmt;
1914 return integer_zero_node;
1915 }
1916 return NULL_TREE;
1917}
1918
1919/* Possibly warn about unreachable statements between switch's controlling
1920 expression and the first case. SEQ is the body of a switch expression. */
1921
1922static void
1923maybe_warn_switch_unreachable (gimple_seq seq)
1924{
1925 if (!warn_switch_unreachable
1926 /* This warning doesn't play well with Fortran when optimizations
1927 are on. */
1928 || lang_GNU_Fortran ()
1929 || seq == NULL)
1930 return;
1931
1932 struct walk_stmt_info wi;
1933 memset (&wi, 0, sizeof (wi));
1934 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1935 gimple *stmt = (gimple *) wi.info;
1936
1937 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1938 {
1939 if (gimple_code (stmt) == GIMPLE_GOTO
1940 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1941 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1942 /* Don't warn for compiler-generated gotos. These occur
1943 in Duff's devices, for example. */;
1944 else
1945 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1946 "statement will never be executed");
1947 }
1948}
1949
81fea426
MP
1950
1951/* A label entry that pairs label and a location. */
1952struct label_entry
1953{
1954 tree label;
1955 location_t loc;
1956};
1957
1958/* Find LABEL in vector of label entries VEC. */
1959
1960static struct label_entry *
1961find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1962{
1963 unsigned int i;
1964 struct label_entry *l;
1965
1966 FOR_EACH_VEC_ELT (*vec, i, l)
1967 if (l->label == label)
1968 return l;
1969 return NULL;
1970}
1971
1972/* Return true if LABEL, a LABEL_DECL, represents a case label
1973 in a vector of labels CASES. */
1974
1975static bool
1976case_label_p (const vec<tree> *cases, tree label)
1977{
1978 unsigned int i;
1979 tree l;
1980
1981 FOR_EACH_VEC_ELT (*cases, i, l)
1982 if (CASE_LABEL (l) == label)
1983 return true;
1984 return false;
1985}
1986
65f4b875 1987/* Find the last nondebug statement in a scope STMT. */
81fea426
MP
1988
1989static gimple *
1990last_stmt_in_scope (gimple *stmt)
1991{
1992 if (!stmt)
1993 return NULL;
1994
1995 switch (gimple_code (stmt))
1996 {
1997 case GIMPLE_BIND:
1998 {
1999 gbind *bind = as_a <gbind *> (stmt);
65f4b875 2000 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
81fea426
MP
2001 return last_stmt_in_scope (stmt);
2002 }
2003
2004 case GIMPLE_TRY:
2005 {
2006 gtry *try_stmt = as_a <gtry *> (stmt);
65f4b875 2007 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
81fea426
MP
2008 gimple *last_eval = last_stmt_in_scope (stmt);
2009 if (gimple_stmt_may_fallthru (last_eval)
d4bd4646
MP
2010 && (last_eval == NULL
2011 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
81fea426
MP
2012 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2013 {
65f4b875 2014 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
81fea426
MP
2015 return last_stmt_in_scope (stmt);
2016 }
2017 else
2018 return last_eval;
2019 }
2020
65f4b875
AO
2021 case GIMPLE_DEBUG:
2022 gcc_unreachable ();
2023
81fea426
MP
2024 default:
2025 return stmt;
2026 }
2027}
2028
2029/* Collect interesting labels in LABELS and return the statement preceding
62ec3fe8
MM
2030 another case label, or a user-defined label. Store a location useful
2031 to give warnings at *PREVLOC (usually the location of the returned
2032 statement or of its surrounding scope). */
81fea426
MP
2033
2034static gimple *
2035collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
725891a0
MM
2036 auto_vec <struct label_entry> *labels,
2037 location_t *prevloc)
81fea426
MP
2038{
2039 gimple *prev = NULL;
2040
725891a0 2041 *prevloc = UNKNOWN_LOCATION;
81fea426
MP
2042 do
2043 {
65791f42
JJ
2044 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2045 {
2046 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2047 which starts on a GIMPLE_SWITCH and ends with a break label.
2048 Handle that as a single statement that can fall through. */
2049 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2050 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2051 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2052 if (last
2053 && gimple_code (first) == GIMPLE_SWITCH
2054 && gimple_code (last) == GIMPLE_LABEL)
2055 {
2056 tree label = gimple_label_label (as_a <glabel *> (last));
2057 if (SWITCH_BREAK_LABEL_P (label))
2058 {
2059 prev = bind;
2060 gsi_next (gsi_p);
2061 continue;
2062 }
2063 }
2064 }
81fea426
MP
2065 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2066 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2067 {
2068 /* Nested scope. Only look at the last statement of
2069 the innermost scope. */
2070 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2071 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2072 if (last)
2073 {
2074 prev = last;
2075 /* It might be a label without a location. Use the
2076 location of the scope then. */
2077 if (!gimple_has_location (prev))
725891a0 2078 *prevloc = bind_loc;
81fea426
MP
2079 }
2080 gsi_next (gsi_p);
2081 continue;
2082 }
2083
2084 /* Ifs are tricky. */
2085 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2086 {
2087 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2088 tree false_lab = gimple_cond_false_label (cond_stmt);
2089 location_t if_loc = gimple_location (cond_stmt);
2090
2091 /* If we have e.g.
2092 if (i > 1) goto <D.2259>; else goto D;
2093 we can't do much with the else-branch. */
2094 if (!DECL_ARTIFICIAL (false_lab))
2095 break;
2096
2097 /* Go on until the false label, then one step back. */
2098 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2099 {
2100 gimple *stmt = gsi_stmt (*gsi_p);
2101 if (gimple_code (stmt) == GIMPLE_LABEL
2102 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2103 break;
2104 }
2105
2106 /* Not found? Oops. */
2107 if (gsi_end_p (*gsi_p))
2108 break;
2109
2110 struct label_entry l = { false_lab, if_loc };
2111 labels->safe_push (l);
2112
2113 /* Go to the last statement of the then branch. */
2114 gsi_prev (gsi_p);
2115
2116 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2117 <D.1759>:
2118 <stmt>;
2119 goto <D.1761>;
2120 <D.1760>:
2121 */
2122 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2123 && !gimple_has_location (gsi_stmt (*gsi_p)))
2124 {
2125 /* Look at the statement before, it might be
2126 attribute fallthrough, in which case don't warn. */
2127 gsi_prev (gsi_p);
2128 bool fallthru_before_dest
2129 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2130 gsi_next (gsi_p);
2131 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2132 if (!fallthru_before_dest)
2133 {
2134 struct label_entry l = { goto_dest, if_loc };
2135 labels->safe_push (l);
2136 }
2137 }
2138 /* And move back. */
2139 gsi_next (gsi_p);
2140 }
2141
2142 /* Remember the last statement. Skip labels that are of no interest
2143 to us. */
2144 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2145 {
2146 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2147 if (find_label_entry (labels, label))
2148 prev = gsi_stmt (*gsi_p);
2149 }
6dc4a604
ML
2150 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2151 ;
95bb6e7a
JJ
2152 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2153 ;
65f4b875 2154 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
81fea426
MP
2155 prev = gsi_stmt (*gsi_p);
2156 gsi_next (gsi_p);
2157 }
2158 while (!gsi_end_p (*gsi_p)
2159 /* Stop if we find a case or a user-defined label. */
2160 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2161 || !gimple_has_location (gsi_stmt (*gsi_p))));
2162
725891a0
MM
2163 if (prev && gimple_has_location (prev))
2164 *prevloc = gimple_location (prev);
81fea426
MP
2165 return prev;
2166}
2167
2168/* Return true if the switch fallthough warning should occur. LABEL is
2169 the label statement that we're falling through to. */
2170
2171static bool
2172should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2173{
2174 gimple_stmt_iterator gsi = *gsi_p;
2175
a9172bf3
MP
2176 /* Don't warn if the label is marked with a "falls through" comment. */
2177 if (FALLTHROUGH_LABEL_P (label))
2178 return false;
2179
d2aadab1 2180 /* Don't warn for non-case labels followed by a statement:
81fea426
MP
2181 case 0:
2182 foo ();
2183 label:
2184 bar ();
2185 as these are likely intentional. */
2186 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2187 {
d2aadab1
MP
2188 tree l;
2189 while (!gsi_end_p (gsi)
2190 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2191 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2192 && !case_label_p (&gimplify_ctxp->case_labels, l))
65f4b875 2193 gsi_next_nondebug (&gsi);
81fea426
MP
2194 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2195 return false;
2196 }
2197
2198 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2199 immediately breaks. */
2200 gsi = *gsi_p;
2201
2202 /* Skip all immediately following labels. */
7fef86d3
JH
2203 while (!gsi_end_p (gsi)
2204 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2205 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
65f4b875 2206 gsi_next_nondebug (&gsi);
81fea426
MP
2207
2208 /* { ... something; default:; } */
2209 if (gsi_end_p (gsi)
2210 /* { ... something; default: break; } or
2211 { ... something; default: goto L; } */
2212 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2213 /* { ... something; default: return; } */
2214 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2215 return false;
2216
2217 return true;
2218}
2219
2220/* Callback for walk_gimple_seq. */
2221
2222static tree
2223warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2224 struct walk_stmt_info *)
2225{
2226 gimple *stmt = gsi_stmt (*gsi_p);
2227
2228 *handled_ops_p = true;
2229 switch (gimple_code (stmt))
2230 {
2231 case GIMPLE_TRY:
2232 case GIMPLE_BIND:
2233 case GIMPLE_CATCH:
2234 case GIMPLE_EH_FILTER:
2235 case GIMPLE_TRANSACTION:
2236 /* Walk the sub-statements. */
2237 *handled_ops_p = false;
2238 break;
2239
2240 /* Find a sequence of form:
2241
2242 GIMPLE_LABEL
2243 [...]
2244 <may fallthru stmt>
2245 GIMPLE_LABEL
2246
2247 and possibly warn. */
2248 case GIMPLE_LABEL:
2249 {
2250 /* Found a label. Skip all immediately following labels. */
2251 while (!gsi_end_p (*gsi_p)
2252 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
65f4b875 2253 gsi_next_nondebug (gsi_p);
81fea426
MP
2254
2255 /* There might be no more statements. */
2256 if (gsi_end_p (*gsi_p))
2257 return integer_zero_node;
2258
2259 /* Vector of labels that fall through. */
2260 auto_vec <struct label_entry> labels;
725891a0
MM
2261 location_t prevloc;
2262 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
81fea426
MP
2263
2264 /* There might be no more statements. */
2265 if (gsi_end_p (*gsi_p))
2266 return integer_zero_node;
2267
2268 gimple *next = gsi_stmt (*gsi_p);
2269 tree label;
2270 /* If what follows is a label, then we may have a fallthrough. */
2271 if (gimple_code (next) == GIMPLE_LABEL
2272 && gimple_has_location (next)
2273 && (label = gimple_label_label (as_a <glabel *> (next)))
81fea426
MP
2274 && prev != NULL)
2275 {
2276 struct label_entry *l;
2277 bool warned_p = false;
097f82ec 2278 auto_diagnostic_group d;
81fea426
MP
2279 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2280 /* Quiet. */;
2281 else if (gimple_code (prev) == GIMPLE_LABEL
2282 && (label = gimple_label_label (as_a <glabel *> (prev)))
2283 && (l = find_label_entry (&labels, label)))
70f6d5e1 2284 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
81fea426
MP
2285 "this statement may fall through");
2286 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2287 /* Try to be clever and don't warn when the statement
2288 can't actually fall through. */
2289 && gimple_stmt_may_fallthru (prev)
725891a0
MM
2290 && prevloc != UNKNOWN_LOCATION)
2291 warned_p = warning_at (prevloc,
70f6d5e1 2292 OPT_Wimplicit_fallthrough_,
81fea426
MP
2293 "this statement may fall through");
2294 if (warned_p)
2295 inform (gimple_location (next), "here");
2296
2297 /* Mark this label as processed so as to prevent multiple
2298 warnings in nested switches. */
2299 FALLTHROUGH_LABEL_P (label) = true;
2300
2301 /* So that next warn_implicit_fallthrough_r will start looking for
2302 a new sequence starting with this label. */
2303 gsi_prev (gsi_p);
2304 }
2305 }
2306 break;
2307 default:
2308 break;
2309 }
2310 return NULL_TREE;
2311}
2312
2313/* Warn when a switch case falls through. */
2314
2315static void
2316maybe_warn_implicit_fallthrough (gimple_seq seq)
2317{
2318 if (!warn_implicit_fallthrough)
2319 return;
2320
2321 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2322 if (!(lang_GNU_C ()
2323 || lang_GNU_CXX ()
2324 || lang_GNU_OBJC ()))
2325 return;
2326
2327 struct walk_stmt_info wi;
2328 memset (&wi, 0, sizeof (wi));
2329 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2330}
2331
2332/* Callback for walk_gimple_seq. */
2333
2334static tree
2335expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
cc284d9c 2336 struct walk_stmt_info *wi)
81fea426
MP
2337{
2338 gimple *stmt = gsi_stmt (*gsi_p);
2339
2340 *handled_ops_p = true;
2341 switch (gimple_code (stmt))
2342 {
2343 case GIMPLE_TRY:
2344 case GIMPLE_BIND:
2345 case GIMPLE_CATCH:
2346 case GIMPLE_EH_FILTER:
2347 case GIMPLE_TRANSACTION:
2348 /* Walk the sub-statements. */
2349 *handled_ops_p = false;
2350 break;
2351 case GIMPLE_CALL:
2352 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2353 {
2354 gsi_remove (gsi_p, true);
2355 if (gsi_end_p (*gsi_p))
cc284d9c
MP
2356 {
2357 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2358 return integer_zero_node;
2359 }
81fea426
MP
2360
2361 bool found = false;
2362 location_t loc = gimple_location (stmt);
2363
2364 gimple_stmt_iterator gsi2 = *gsi_p;
2365 stmt = gsi_stmt (gsi2);
2366 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2367 {
2368 /* Go on until the artificial label. */
2369 tree goto_dest = gimple_goto_dest (stmt);
2370 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2371 {
2372 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2373 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2374 == goto_dest)
2375 break;
2376 }
2377
2378 /* Not found? Stop. */
2379 if (gsi_end_p (gsi2))
2380 break;
2381
2382 /* Look one past it. */
2383 gsi_next (&gsi2);
2384 }
2385
2386 /* We're looking for a case label or default label here. */
2387 while (!gsi_end_p (gsi2))
2388 {
2389 stmt = gsi_stmt (gsi2);
9f6fbcd3 2390 if (gimple_code (stmt) == GIMPLE_LABEL)
81fea426
MP
2391 {
2392 tree label = gimple_label_label (as_a <glabel *> (stmt));
2393 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2394 {
2395 found = true;
2396 break;
2397 }
2398 }
9f6fbcd3 2399 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
053ee6a7 2400 ;
65f4b875
AO
2401 else if (!is_gimple_debug (stmt))
2402 /* Anything else is not expected. */
81fea426
MP
2403 break;
2404 gsi_next (&gsi2);
2405 }
2406 if (!found)
6c80b1b5
JM
2407 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2408 "a case label or default label");
81fea426
MP
2409 }
2410 break;
2411 default:
2412 break;
2413 }
2414 return NULL_TREE;
2415}
2416
2417/* Expand all FALLTHROUGH () calls in SEQ. */
2418
2419static void
2420expand_FALLTHROUGH (gimple_seq *seq_p)
2421{
2422 struct walk_stmt_info wi;
cc284d9c 2423 location_t loc;
81fea426 2424 memset (&wi, 0, sizeof (wi));
cc284d9c 2425 wi.info = (void *) &loc;
81fea426 2426 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
cc284d9c
MP
2427 if (wi.callback_result == integer_zero_node)
2428 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2429 standard says is ill-formed; see [dcl.attr.fallthrough]. */
6c80b1b5
JM
2430 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2431 "a case label or default label");
81fea426
MP
2432}
2433
68e72840
SB
2434\f
2435/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
6de9cd9a
DN
2436 branch to. */
2437
2438static enum gimplify_status
726a989a 2439gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
2440{
2441 tree switch_expr = *expr_p;
726a989a 2442 gimple_seq switch_body_seq = NULL;
6de9cd9a 2443 enum gimplify_status ret;
0cd2402d
SB
2444 tree index_type = TREE_TYPE (switch_expr);
2445 if (index_type == NULL_TREE)
2446 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
6de9cd9a 2447
726a989a
RB
2448 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2449 fb_rvalue);
2450 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2451 return ret;
6de9cd9a
DN
2452
2453 if (SWITCH_BODY (switch_expr))
2454 {
9771b263
DN
2455 vec<tree> labels;
2456 vec<tree> saved_labels;
86bc8506 2457 hash_set<tree> *saved_live_switch_vars = NULL;
726a989a 2458 tree default_case = NULL_TREE;
538dd0b7 2459 gswitch *switch_stmt;
b8698a0f 2460
0cd2402d 2461 /* Save old labels, get new ones from body, then restore the old
726a989a 2462 labels. Save all the things from the switch body to append after. */
6de9cd9a 2463 saved_labels = gimplify_ctxp->case_labels;
9771b263 2464 gimplify_ctxp->case_labels.create (8);
86bc8506
ML
2465
2466 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
080140bc 2467 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
a6e5212a
ML
2468 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2469 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
080140bc
ML
2470 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2471 else
2472 gimplify_ctxp->live_switch_vars = NULL;
86bc8506 2473
81fea426
MP
2474 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2475 gimplify_ctxp->in_switch_expr = true;
6de9cd9a 2476
726a989a 2477 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
a7dc5980 2478
81fea426 2479 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
146c55da 2480 maybe_warn_switch_unreachable (switch_body_seq);
81fea426
MP
2481 maybe_warn_implicit_fallthrough (switch_body_seq);
2482 /* Only do this for the outermost GIMPLE_SWITCH. */
2483 if (!gimplify_ctxp->in_switch_expr)
2484 expand_FALLTHROUGH (&switch_body_seq);
146c55da 2485
6de9cd9a
DN
2486 labels = gimplify_ctxp->case_labels;
2487 gimplify_ctxp->case_labels = saved_labels;
86bc8506
ML
2488
2489 if (gimplify_ctxp->live_switch_vars)
2490 {
b119c055 2491 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
86bc8506
ML
2492 delete gimplify_ctxp->live_switch_vars;
2493 }
6dc4a604 2494 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
b8698a0f 2495
68e72840
SB
2496 preprocess_case_label_vec_for_gimple (labels, index_type,
2497 &default_case);
32f579f6 2498
65791f42 2499 bool add_bind = false;
726a989a 2500 if (!default_case)
6de9cd9a 2501 {
538dd0b7 2502 glabel *new_default;
6de9cd9a 2503
68e72840
SB
2504 default_case
2505 = build_case_label (NULL_TREE, NULL_TREE,
2506 create_artificial_label (UNKNOWN_LOCATION));
65791f42
JJ
2507 if (old_in_switch_expr)
2508 {
2509 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2510 add_bind = true;
2511 }
68e72840
SB
2512 new_default = gimple_build_label (CASE_LABEL (default_case));
2513 gimplify_seq_add_stmt (&switch_body_seq, new_default);
32f579f6 2514 }
65791f42
JJ
2515 else if (old_in_switch_expr)
2516 {
2517 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2518 if (last && gimple_code (last) == GIMPLE_LABEL)
2519 {
2520 tree label = gimple_label_label (as_a <glabel *> (last));
2521 if (SWITCH_BREAK_LABEL_P (label))
2522 add_bind = true;
2523 }
2524 }
f667741c 2525
538dd0b7 2526 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
65791f42
JJ
2527 default_case, labels);
2528 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2529 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2530 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2531 so that we can easily find the start and end of the switch
2532 statement. */
2533 if (add_bind)
2534 {
2535 gimple_seq bind_body = NULL;
2536 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2537 gimple_seq_add_seq (&bind_body, switch_body_seq);
2538 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2539 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2540 gimplify_seq_add_stmt (pre_p, bind);
2541 }
2542 else
2543 {
2544 gimplify_seq_add_stmt (pre_p, switch_stmt);
2545 gimplify_seq_add_seq (pre_p, switch_body_seq);
2546 }
9771b263 2547 labels.release ();
6de9cd9a 2548 }
282899df 2549 else
9e851845 2550 gcc_unreachable ();
6de9cd9a 2551
726a989a 2552 return GS_ALL_DONE;
6de9cd9a
DN
2553}
2554
81fea426
MP
2555/* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2556
2557static enum gimplify_status
2558gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2559{
2560 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2561 == current_function_decl);
2562
93c18375
ML
2563 tree label = LABEL_EXPR_LABEL (*expr_p);
2564 glabel *label_stmt = gimple_build_label (label);
81fea426
MP
2565 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2566 gimplify_seq_add_stmt (pre_p, label_stmt);
2567
93c18375
ML
2568 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2569 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2570 NOT_TAKEN));
2571 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2572 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2573 TAKEN));
2574
81fea426
MP
2575 return GS_ALL_DONE;
2576}
2577
ad19c4be 2578/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
726a989a 2579
6de9cd9a 2580static enum gimplify_status
726a989a 2581gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 2582{
953ff289 2583 struct gimplify_ctx *ctxp;
538dd0b7 2584 glabel *label_stmt;
953ff289 2585
41dbbb37 2586 /* Invalid programs can play Duff's Device type games with, for example,
953ff289 2587 #pragma omp parallel. At least in the C front end, we don't
41dbbb37
TS
2588 detect such invalid branches until after gimplification, in the
2589 diagnose_omp_blocks pass. */
953ff289 2590 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
9771b263 2591 if (ctxp->case_labels.exists ())
953ff289 2592 break;
282899df 2593
2674fa47
JM
2594 tree label = CASE_LABEL (*expr_p);
2595 label_stmt = gimple_build_label (label);
81fea426 2596 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
9771b263 2597 ctxp->case_labels.safe_push (*expr_p);
538dd0b7 2598 gimplify_seq_add_stmt (pre_p, label_stmt);
726a989a 2599
2674fa47
JM
2600 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2601 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2602 NOT_TAKEN));
2603 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2604 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2605 TAKEN));
2606
6de9cd9a
DN
2607 return GS_ALL_DONE;
2608}
2609
6de9cd9a
DN
2610/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2611 if necessary. */
2612
2613tree
2614build_and_jump (tree *label_p)
2615{
2616 if (label_p == NULL)
2617 /* If there's nowhere to jump, just fall through. */
65355d53 2618 return NULL_TREE;
6de9cd9a
DN
2619
2620 if (*label_p == NULL_TREE)
2621 {
c2255bc4 2622 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
2623 *label_p = label;
2624 }
2625
2626 return build1 (GOTO_EXPR, void_type_node, *label_p);
2627}
2628
2629/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2630 This also involves building a label to jump to and communicating it to
2631 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2632
2633static enum gimplify_status
2634gimplify_exit_expr (tree *expr_p)
2635{
2636 tree cond = TREE_OPERAND (*expr_p, 0);
2637 tree expr;
2638
2639 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 2640 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
2641 *expr_p = expr;
2642
2643 return GS_OK;
2644}
2645
26d44ae2
RH
2646/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2647 different from its canonical type, wrap the whole thing inside a
2648 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2649 type.
6de9cd9a 2650
26d44ae2
RH
2651 The canonical type of a COMPONENT_REF is the type of the field being
2652 referenced--unless the field is a bit-field which can be read directly
2653 in a smaller mode, in which case the canonical type is the
2654 sign-appropriate type corresponding to that mode. */
6de9cd9a 2655
26d44ae2
RH
2656static void
2657canonicalize_component_ref (tree *expr_p)
6de9cd9a 2658{
26d44ae2
RH
2659 tree expr = *expr_p;
2660 tree type;
6de9cd9a 2661
282899df 2662 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 2663
26d44ae2
RH
2664 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2665 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2666 else
2667 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 2668
b26c6d55
RG
2669 /* One could argue that all the stuff below is not necessary for
2670 the non-bitfield case and declare it a FE error if type
2671 adjustment would be needed. */
26d44ae2 2672 if (TREE_TYPE (expr) != type)
6de9cd9a 2673 {
b26c6d55 2674#ifdef ENABLE_TYPES_CHECKING
26d44ae2 2675 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
2676#endif
2677 int type_quals;
2678
2679 /* We need to preserve qualifiers and propagate them from
2680 operand 0. */
2681 type_quals = TYPE_QUALS (type)
2682 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2683 if (TYPE_QUALS (type) != type_quals)
2684 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 2685
26d44ae2
RH
2686 /* Set the type of the COMPONENT_REF to the underlying type. */
2687 TREE_TYPE (expr) = type;
6de9cd9a 2688
b26c6d55
RG
2689#ifdef ENABLE_TYPES_CHECKING
2690 /* It is now a FE error, if the conversion from the canonical
2691 type to the original expression type is not useless. */
2692 gcc_assert (useless_type_conversion_p (old_type, type));
2693#endif
26d44ae2
RH
2694 }
2695}
6de9cd9a 2696
26d44ae2 2697/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 2698 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
2699 T array[U];
2700 (T *)&array
2701 ==>
2702 &array[L]
2703 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
2704 lower bound.
2705 The constraint is that the type of &array[L] is trivially convertible
2706 to T *. */
6de9cd9a 2707
26d44ae2
RH
2708static void
2709canonicalize_addr_expr (tree *expr_p)
2710{
2711 tree expr = *expr_p;
26d44ae2 2712 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 2713 tree datype, ddatype, pddatype;
6de9cd9a 2714
04d86531
RG
2715 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2716 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2717 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 2718 return;
6de9cd9a 2719
26d44ae2 2720 /* The addr_expr type should be a pointer to an array. */
04d86531 2721 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
2722 if (TREE_CODE (datype) != ARRAY_TYPE)
2723 return;
6de9cd9a 2724
04d86531
RG
2725 /* The pointer to element type shall be trivially convertible to
2726 the expression pointer type. */
26d44ae2 2727 ddatype = TREE_TYPE (datype);
04d86531 2728 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
2729 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2730 pddatype))
26d44ae2 2731 return;
6de9cd9a 2732
26d44ae2 2733 /* The lower bound and element sizes must be constant. */
04d86531
RG
2734 if (!TYPE_SIZE_UNIT (ddatype)
2735 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
2736 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2737 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2738 return;
6de9cd9a 2739
26d44ae2 2740 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 2741 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 2742 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 2743 NULL_TREE, NULL_TREE);
04d86531 2744 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
2745
2746 /* We can have stripped a required restrict qualifier above. */
2747 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2748 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 2749}
6de9cd9a 2750
26d44ae2
RH
2751/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2752 underneath as appropriate. */
6de9cd9a 2753
26d44ae2
RH
2754static enum gimplify_status
2755gimplify_conversion (tree *expr_p)
d3147f64 2756{
db3927fb 2757 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 2758 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 2759
0710ccff
NS
2760 /* Then strip away all but the outermost conversion. */
2761 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2762
2763 /* And remove the outermost conversion if it's useless. */
2764 if (tree_ssa_useless_type_conversion (*expr_p))
2765 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 2766
26d44ae2
RH
2767 /* If we still have a conversion at the toplevel,
2768 then canonicalize some constructs. */
1043771b 2769 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
2770 {
2771 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 2772
26d44ae2
RH
2773 /* If a NOP conversion is changing the type of a COMPONENT_REF
2774 expression, then canonicalize its type now in order to expose more
2775 redundant conversions. */
2776 if (TREE_CODE (sub) == COMPONENT_REF)
2777 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 2778
26d44ae2
RH
2779 /* If a NOP conversion is changing a pointer to array of foo
2780 to a pointer to foo, embed that change in the ADDR_EXPR. */
2781 else if (TREE_CODE (sub) == ADDR_EXPR)
2782 canonicalize_addr_expr (expr_p);
2783 }
6de9cd9a 2784
8b17cc05
RG
2785 /* If we have a conversion to a non-register type force the
2786 use of a VIEW_CONVERT_EXPR instead. */
4f934809 2787 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 2788 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 2789 TREE_OPERAND (*expr_p, 0));
8b17cc05 2790
741233cf
RB
2791 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2792 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2793 TREE_SET_CODE (*expr_p, NOP_EXPR);
2794
6de9cd9a
DN
2795 return GS_OK;
2796}
2797
ad19c4be 2798/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
a9f7c570
RH
2799 DECL_VALUE_EXPR, and it's worth re-examining things. */
2800
2801static enum gimplify_status
2802gimplify_var_or_parm_decl (tree *expr_p)
2803{
2804 tree decl = *expr_p;
2805
2806 /* ??? If this is a local variable, and it has not been seen in any
2807 outer BIND_EXPR, then it's probably the result of a duplicate
2808 declaration, for which we've already issued an error. It would
2809 be really nice if the front end wouldn't leak these at all.
2810 Currently the only known culprit is C++ destructors, as seen
2811 in g++.old-deja/g++.jason/binding.C. */
8813a647 2812 if (VAR_P (decl)
a9f7c570
RH
2813 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2814 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2815 && decl_function_context (decl) == current_function_decl)
2816 {
1da2ed5f 2817 gcc_assert (seen_error ());
a9f7c570
RH
2818 return GS_ERROR;
2819 }
2820
41dbbb37 2821 /* When within an OMP context, notice uses of variables. */
953ff289
DN
2822 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2823 return GS_ALL_DONE;
2824
a9f7c570
RH
2825 /* If the decl is an alias for another expression, substitute it now. */
2826 if (DECL_HAS_VALUE_EXPR_P (decl))
2827 {
d70ba0c1 2828 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
a9f7c570
RH
2829 return GS_OK;
2830 }
2831
2832 return GS_ALL_DONE;
2833}
2834
66c14933
EB
2835/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2836
2837static void
2fb9a547
AM
2838recalculate_side_effects (tree t)
2839{
2840 enum tree_code code = TREE_CODE (t);
2841 int len = TREE_OPERAND_LENGTH (t);
2842 int i;
2843
2844 switch (TREE_CODE_CLASS (code))
2845 {
2846 case tcc_expression:
2847 switch (code)
2848 {
2849 case INIT_EXPR:
2850 case MODIFY_EXPR:
2851 case VA_ARG_EXPR:
2852 case PREDECREMENT_EXPR:
2853 case PREINCREMENT_EXPR:
2854 case POSTDECREMENT_EXPR:
2855 case POSTINCREMENT_EXPR:
2856 /* All of these have side-effects, no matter what their
2857 operands are. */
2858 return;
2859
2860 default:
2861 break;
2862 }
2863 /* Fall through. */
2864
2865 case tcc_comparison: /* a comparison expression */
2866 case tcc_unary: /* a unary arithmetic expression */
2867 case tcc_binary: /* a binary arithmetic expression */
2868 case tcc_reference: /* a reference */
2869 case tcc_vl_exp: /* a function call */
2870 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2871 for (i = 0; i < len; ++i)
2872 {
2873 tree op = TREE_OPERAND (t, i);
2874 if (op && TREE_SIDE_EFFECTS (op))
2875 TREE_SIDE_EFFECTS (t) = 1;
2876 }
2877 break;
2878
2879 case tcc_constant:
2880 /* No side-effects. */
2881 return;
2882
2883 default:
2884 gcc_unreachable ();
2885 }
2886}
2887
6de9cd9a 2888/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 2889 node *EXPR_P.
6de9cd9a
DN
2890
2891 compound_lval
2892 : min_lval '[' val ']'
2893 | min_lval '.' ID
2894 | compound_lval '[' val ']'
2895 | compound_lval '.' ID
2896
2897 This is not part of the original SIMPLE definition, which separates
2898 array and member references, but it seems reasonable to handle them
2899 together. Also, this way we don't run into problems with union
2900 aliasing; gcc requires that for accesses through a union to alias, the
2901 union reference must be explicit, which was not always the case when we
2902 were splitting up array and member refs.
2903
726a989a 2904 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
2905 *EXPR_P should be stored.
2906
726a989a 2907 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
2908 *EXPR_P should be stored. */
2909
2910static enum gimplify_status
726a989a
RB
2911gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2912 fallback_t fallback)
6de9cd9a
DN
2913{
2914 tree *p;
941f78d1 2915 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 2916 int i;
db3927fb 2917 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 2918 tree expr = *expr_p;
6de9cd9a 2919
6de9cd9a 2920 /* Create a stack of the subexpressions so later we can walk them in
ec234842 2921 order from inner to outer. */
00f96dc9 2922 auto_vec<tree, 10> expr_stack;
6de9cd9a 2923
afe84921 2924 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
2925 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2926 {
a9f7c570 2927 restart:
6a720599
JM
2928 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2929 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 2930 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
2931
2932 if (handled_component_p (*p))
2933 ;
2934 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2935 additional COMPONENT_REFs. */
8813a647 2936 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
a9f7c570
RH
2937 && gimplify_var_or_parm_decl (p) == GS_OK)
2938 goto restart;
2939 else
6a720599 2940 break;
b8698a0f 2941
9771b263 2942 expr_stack.safe_push (*p);
6a720599 2943 }
6de9cd9a 2944
9771b263 2945 gcc_assert (expr_stack.length ());
9e51aaf5 2946
0823efed
DN
2947 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2948 walked through and P points to the innermost expression.
6de9cd9a 2949
af72267c
RK
2950 Java requires that we elaborated nodes in source order. That
2951 means we must gimplify the inner expression followed by each of
2952 the indices, in order. But we can't gimplify the inner
2953 expression until we deal with any variable bounds, sizes, or
2954 positions in order to deal with PLACEHOLDER_EXPRs.
2955
2956 So we do this in three steps. First we deal with the annotations
2957 for any variables in the components, then we gimplify the base,
2958 then we gimplify any indices, from left to right. */
9771b263 2959 for (i = expr_stack.length () - 1; i >= 0; i--)
6de9cd9a 2960 {
9771b263 2961 tree t = expr_stack[i];
44de5aeb
RK
2962
2963 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 2964 {
44de5aeb
RK
2965 /* Gimplify the low bound and element type size and put them into
2966 the ARRAY_REF. If these values are set, they have already been
2967 gimplified. */
726a989a 2968 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 2969 {
a7cc468a
RH
2970 tree low = unshare_expr (array_ref_low_bound (t));
2971 if (!is_gimple_min_invariant (low))
44de5aeb 2972 {
726a989a
RB
2973 TREE_OPERAND (t, 2) = low;
2974 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2975 post_p, is_gimple_reg,
726a989a 2976 fb_rvalue);
44de5aeb
RK
2977 ret = MIN (ret, tret);
2978 }
2979 }
19c44640
JJ
2980 else
2981 {
2982 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2983 is_gimple_reg, fb_rvalue);
2984 ret = MIN (ret, tret);
2985 }
44de5aeb 2986
19c44640 2987 if (TREE_OPERAND (t, 3) == NULL_TREE)
44de5aeb 2988 {
bd8a2482 2989 tree elmt_size = array_ref_element_size (t);
a7cc468a 2990 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 2991 {
bd8a2482
RS
2992 elmt_size = unshare_expr (elmt_size);
2993 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2994 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2995
2996 /* Divide the element size by the alignment of the element
2997 type (above). */
2998 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
2999 elmt_size, factor);
3000
726a989a
RB
3001 TREE_OPERAND (t, 3) = elmt_size;
3002 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 3003 post_p, is_gimple_reg,
726a989a 3004 fb_rvalue);
44de5aeb
RK
3005 ret = MIN (ret, tret);
3006 }
6de9cd9a 3007 }
19c44640
JJ
3008 else
3009 {
3010 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3011 is_gimple_reg, fb_rvalue);
3012 ret = MIN (ret, tret);
3013 }
6de9cd9a 3014 }
44de5aeb
RK
3015 else if (TREE_CODE (t) == COMPONENT_REF)
3016 {
3017 /* Set the field offset into T and gimplify it. */
19c44640 3018 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 3019 {
bd8a2482 3020 tree offset = component_ref_field_offset (t);
a7cc468a 3021 if (!is_gimple_min_invariant (offset))
44de5aeb 3022 {
bd8a2482
RS
3023 offset = unshare_expr (offset);
3024 tree field = TREE_OPERAND (t, 1);
3025 tree factor
3026 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3027
3028 /* Divide the offset by its alignment. */
3029 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3030 offset, factor);
3031
726a989a
RB
3032 TREE_OPERAND (t, 2) = offset;
3033 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 3034 post_p, is_gimple_reg,
726a989a 3035 fb_rvalue);
44de5aeb
RK
3036 ret = MIN (ret, tret);
3037 }
3038 }
19c44640
JJ
3039 else
3040 {
3041 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3042 is_gimple_reg, fb_rvalue);
3043 ret = MIN (ret, tret);
3044 }
44de5aeb 3045 }
af72267c
RK
3046 }
3047
a9f7c570
RH
3048 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3049 so as to match the min_lval predicate. Failure to do so may result
3050 in the creation of large aggregate temporaries. */
3051 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3052 fallback | fb_lvalue);
af72267c
RK
3053 ret = MIN (ret, tret);
3054
ea814c66 3055 /* And finally, the indices and operands of ARRAY_REF. During this
48eb4e53 3056 loop we also remove any useless conversions. */
9771b263 3057 for (; expr_stack.length () > 0; )
af72267c 3058 {
9771b263 3059 tree t = expr_stack.pop ();
af72267c
RK
3060
3061 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3062 {
ba4d8f9d 3063 /* Gimplify the dimension. */
af72267c
RK
3064 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3065 {
3066 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 3067 is_gimple_val, fb_rvalue);
af72267c
RK
3068 ret = MIN (ret, tret);
3069 }
3070 }
48eb4e53
RK
3071
3072 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3073
726a989a
RB
3074 /* The innermost expression P may have originally had
3075 TREE_SIDE_EFFECTS set which would have caused all the outer
3076 expressions in *EXPR_P leading to P to also have had
3077 TREE_SIDE_EFFECTS set. */
6de9cd9a 3078 recalculate_side_effects (t);
6de9cd9a
DN
3079 }
3080
3081 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 3082 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
3083 {
3084 canonicalize_component_ref (expr_p);
6de9cd9a
DN
3085 }
3086
9771b263 3087 expr_stack.release ();
07724022 3088
941f78d1
JM
3089 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3090
6de9cd9a
DN
3091 return ret;
3092}
3093
206048bd
VR
3094/* Gimplify the self modifying expression pointed to by EXPR_P
3095 (++, --, +=, -=).
6de9cd9a
DN
3096
3097 PRE_P points to the list where side effects that must happen before
3098 *EXPR_P should be stored.
3099
3100 POST_P points to the list where side effects that must happen after
3101 *EXPR_P should be stored.
3102
3103 WANT_VALUE is nonzero iff we want to use the value of this expression
cc3c4f62 3104 in another expression.
6de9cd9a 3105
cc3c4f62
RB
3106 ARITH_TYPE is the type the computation should be performed in. */
3107
3108enum gimplify_status
726a989a 3109gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
cc3c4f62 3110 bool want_value, tree arith_type)
6de9cd9a
DN
3111{
3112 enum tree_code code;
726a989a
RB
3113 tree lhs, lvalue, rhs, t1;
3114 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
3115 bool postfix;
3116 enum tree_code arith_code;
3117 enum gimplify_status ret;
db3927fb 3118 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
3119
3120 code = TREE_CODE (*expr_p);
3121
282899df
NS
3122 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3123 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
3124
3125 /* Prefix or postfix? */
3126 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3127 /* Faster to treat as prefix if result is not used. */
3128 postfix = want_value;
3129 else
3130 postfix = false;
3131
82181741
JJ
3132 /* For postfix, make sure the inner expression's post side effects
3133 are executed after side effects from this expression. */
3134 if (postfix)
3135 post_p = &post;
3136
6de9cd9a
DN
3137 /* Add or subtract? */
3138 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3139 arith_code = PLUS_EXPR;
3140 else
3141 arith_code = MINUS_EXPR;
3142
3143 /* Gimplify the LHS into a GIMPLE lvalue. */
3144 lvalue = TREE_OPERAND (*expr_p, 0);
3145 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3146 if (ret == GS_ERROR)
3147 return ret;
3148
3149 /* Extract the operands to the arithmetic operation. */
3150 lhs = lvalue;
3151 rhs = TREE_OPERAND (*expr_p, 1);
3152
3153 /* For postfix operator, we evaluate the LHS to an rvalue and then use
d97c9b22 3154 that as the result value and in the postqueue operation. */
6de9cd9a
DN
3155 if (postfix)
3156 {
3157 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3158 if (ret == GS_ERROR)
3159 return ret;
6de9cd9a 3160
8e5993e2 3161 lhs = get_initialized_tmp_var (lhs, pre_p);
d97c9b22 3162 }
cc3c4f62 3163
5be014d5
AP
3164 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3165 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3166 {
0d82a1c8 3167 rhs = convert_to_ptrofftype_loc (loc, rhs);
5be014d5 3168 if (arith_code == MINUS_EXPR)
db3927fb 3169 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
cc3c4f62 3170 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
5be014d5 3171 }
cc3c4f62
RB
3172 else
3173 t1 = fold_convert (TREE_TYPE (*expr_p),
3174 fold_build2 (arith_code, arith_type,
3175 fold_convert (arith_type, lhs),
3176 fold_convert (arith_type, rhs)));
5be014d5 3177
6de9cd9a
DN
3178 if (postfix)
3179 {
cf1867a0 3180 gimplify_assign (lvalue, t1, pre_p);
726a989a 3181 gimplify_seq_add_seq (orig_post_p, post);
cc3c4f62 3182 *expr_p = lhs;
6de9cd9a
DN
3183 return GS_ALL_DONE;
3184 }
3185 else
3186 {
726a989a 3187 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
3188 return GS_OK;
3189 }
3190}
3191
d25cee4d
RH
3192/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3193
3194static void
3195maybe_with_size_expr (tree *expr_p)
3196{
61025d1b
RK
3197 tree expr = *expr_p;
3198 tree type = TREE_TYPE (expr);
3199 tree size;
d25cee4d 3200
61025d1b
RK
3201 /* If we've already wrapped this or the type is error_mark_node, we can't do
3202 anything. */
3203 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3204 || type == error_mark_node)
d25cee4d
RH
3205 return;
3206
61025d1b 3207 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 3208 size = TYPE_SIZE_UNIT (type);
36fd6408 3209 if (!size || poly_int_tree_p (size))
61025d1b
RK
3210 return;
3211
3212 /* Otherwise, make a WITH_SIZE_EXPR. */
3213 size = unshare_expr (size);
3214 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3215 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
3216}
3217
726a989a 3218/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f 3219 Store any side-effects in PRE_P. CALL_LOCATION is the location of
381cdae4
RB
3220 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3221 gimplified to an SSA name. */
e4f78bd4 3222
fe6ebcf1 3223enum gimplify_status
381cdae4
RB
3224gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3225 bool allow_ssa)
e4f78bd4
JM
3226{
3227 bool (*test) (tree);
3228 fallback_t fb;
3229
3230 /* In general, we allow lvalues for function arguments to avoid
3231 extra overhead of copying large aggregates out of even larger
3232 aggregates into temporaries only to copy the temporaries to
3233 the argument list. Make optimizers happy by pulling out to
3234 temporaries those types that fit in registers. */
726a989a 3235 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
3236 test = is_gimple_val, fb = fb_rvalue;
3237 else
b4ef8aac
JM
3238 {
3239 test = is_gimple_lvalue, fb = fb_either;
3240 /* Also strip a TARGET_EXPR that would force an extra copy. */
3241 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3242 {
3243 tree init = TARGET_EXPR_INITIAL (*arg_p);
3244 if (init
3245 && !VOID_TYPE_P (TREE_TYPE (init)))
3246 *arg_p = init;
3247 }
3248 }
e4f78bd4 3249
d25cee4d 3250 /* If this is a variable sized type, we must remember the size. */
726a989a 3251 maybe_with_size_expr (arg_p);
d25cee4d 3252
c2255bc4 3253 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
3254 /* Make sure arguments have the same location as the function call
3255 itself. */
3256 protected_set_expr_location (*arg_p, call_location);
3257
e4f78bd4
JM
3258 /* There is a sequence point before a function call. Side effects in
3259 the argument list must occur before the actual call. So, when
3260 gimplifying arguments, force gimplify_expr to use an internal
3261 post queue which is then appended to the end of PRE_P. */
381cdae4 3262 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
e4f78bd4
JM
3263}
3264
d26fc979
JJ
3265/* Don't fold inside offloading or taskreg regions: it can break code by
3266 adding decl references that weren't in the source. We'll do it during
3267 omplower pass instead. */
88ac13da
TS
3268
3269static bool
3270maybe_fold_stmt (gimple_stmt_iterator *gsi)
3271{
3272 struct gimplify_omp_ctx *ctx;
3273 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
d9a6bd32 3274 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
88ac13da 3275 return false;
28567c40
JJ
3276 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3277 return false;
665db3ae
JL
3278 /* Delay folding of builtins until the IL is in consistent state
3279 so the diagnostic machinery can do a better job. */
3280 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3281 return false;
88ac13da
TS
3282 return fold_stmt (gsi);
3283}
3284
726a989a 3285/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 3286 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
3287
3288static enum gimplify_status
726a989a 3289gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 3290{
f20ca725 3291 tree fndecl, parms, p, fnptrtype;
6de9cd9a 3292 enum gimplify_status ret;
5039610b 3293 int i, nargs;
538dd0b7 3294 gcall *call;
ed9c79e1 3295 bool builtin_va_start_p = false;
db3927fb 3296 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 3297
282899df 3298 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 3299
d3147f64 3300 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 3301 every call_expr be annotated with file and line. */
a281759f
PB
3302 if (! EXPR_HAS_LOCATION (*expr_p))
3303 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a 3304
0e37a2f3
MP
3305 /* Gimplify internal functions created in the FEs. */
3306 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3307 {
1304953e
JJ
3308 if (want_value)
3309 return GS_ALL_DONE;
3310
0e37a2f3
MP
3311 nargs = call_expr_nargs (*expr_p);
3312 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3313 auto_vec<tree> vargs (nargs);
3314
3315 for (i = 0; i < nargs; i++)
3316 {
3317 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3318 EXPR_LOCATION (*expr_p));
3319 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3320 }
815d9cc6 3321
a844293d
RS
3322 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3323 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
0e37a2f3
MP
3324 gimplify_seq_add_stmt (pre_p, call);
3325 return GS_ALL_DONE;
3326 }
3327
6de9cd9a
DN
3328 /* This may be a call to a builtin function.
3329
3330 Builtin function calls may be transformed into different
3331 (and more efficient) builtin function calls under certain
3332 circumstances. Unfortunately, gimplification can muck things
3333 up enough that the builtin expanders are not aware that certain
3334 transformations are still valid.
3335
3336 So we attempt transformation/gimplification of the call before
3337 we gimplify the CALL_EXPR. At this time we do not manage to
3338 transform all calls in the same manner as the expanders do, but
3339 we do transform most of them. */
726a989a 3340 fndecl = get_callee_fndecl (*expr_p);
3d78e008 3341 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3537a0cd
RG
3342 switch (DECL_FUNCTION_CODE (fndecl))
3343 {
9e878cf1 3344 CASE_BUILT_IN_ALLOCA:
03c00798
EB
3345 /* If the call has been built for a variable-sized object, then we
3346 want to restore the stack level when the enclosing BIND_EXPR is
3347 exited to reclaim the allocated space; otherwise, we precisely
3348 need to do the opposite and preserve the latest stack level. */
3349 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3350 gimplify_ctxp->save_stack = true;
3351 else
3352 gimplify_ctxp->keep_stack = true;
3353 break;
3354
3537a0cd 3355 case BUILT_IN_VA_START:
2efcfa4e 3356 {
726a989a 3357 builtin_va_start_p = TRUE;
5039610b 3358 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
3359 {
3360 error ("too few arguments to function %<va_start%>");
c2255bc4 3361 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
3362 return GS_OK;
3363 }
b8698a0f 3364
5039610b 3365 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 3366 {
c2255bc4 3367 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
3368 return GS_OK;
3369 }
3537a0cd
RG
3370 break;
3371 }
b25aad5f 3372
500e4868
JJ
3373 case BUILT_IN_EH_RETURN:
3374 cfun->calls_eh_return = true;
3375 break;
3376
3537a0cd
RG
3377 default:
3378 ;
3379 }
3d78e008 3380 if (fndecl && fndecl_built_in_p (fndecl))
3537a0cd
RG
3381 {
3382 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3383 if (new_tree && new_tree != *expr_p)
3384 {
3385 /* There was a transformation of this call which computes the
3386 same value, but in a more efficient way. Return and try
3387 again. */
3388 *expr_p = new_tree;
3389 return GS_OK;
2efcfa4e 3390 }
6de9cd9a
DN
3391 }
3392
f20ca725
RG
3393 /* Remember the original function pointer type. */
3394 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3395
0227ffa9
JJ
3396 if (flag_openmp
3397 && fndecl
3398 && cfun
3399 && (cfun->curr_properties & PROP_gimple_any) == 0)
135df52c
JJ
3400 {
3401 tree variant = omp_resolve_declare_variant (fndecl);
3402 if (variant != fndecl)
3403 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3404 }
3405
6de9cd9a
DN
3406 /* There is a sequence point before the call, so any side effects in
3407 the calling expression must occur before the actual call. Force
3408 gimplify_expr to use an internal post queue. */
5039610b 3409 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 3410 is_gimple_call_addr, fb_rvalue);
6de9cd9a 3411
5039610b
SL
3412 nargs = call_expr_nargs (*expr_p);
3413
e36711f3 3414 /* Get argument types for verification. */
726a989a 3415 fndecl = get_callee_fndecl (*expr_p);
e36711f3 3416 parms = NULL_TREE;
726a989a
RB
3417 if (fndecl)
3418 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
a96c6a62
RB
3419 else
3420 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
e36711f3 3421
726a989a 3422 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 3423 p = DECL_ARGUMENTS (fndecl);
004e2fa7 3424 else if (parms)
f9487002 3425 p = parms;
6ef5231b 3426 else
498e51ca 3427 p = NULL_TREE;
f9487002
JJ
3428 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3429 ;
6ef5231b
JJ
3430
3431 /* If the last argument is __builtin_va_arg_pack () and it is not
3432 passed as a named argument, decrease the number of CALL_EXPR
3433 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3434 if (!p
3435 && i < nargs
3436 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3437 {
3438 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3439 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3440
3441 if (last_arg_fndecl
3d78e008 3442 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
6ef5231b
JJ
3443 {
3444 tree call = *expr_p;
3445
3446 --nargs;
db3927fb
AH
3447 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3448 CALL_EXPR_FN (call),
3449 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
3450
3451 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
3452 CALL_EXPR_VA_ARG_PACK flag. */
3453 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3454 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3455 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3456 = CALL_EXPR_RETURN_SLOT_OPT (call);
3457 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
5e278028 3458 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
726a989a 3459
6ef5231b
JJ
3460 /* Set CALL_EXPR_VA_ARG_PACK. */
3461 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3462 }
3463 }
e36711f3 3464
381cdae4
RB
3465 /* If the call returns twice then after building the CFG the call
3466 argument computations will no longer dominate the call because
3467 we add an abnormal incoming edge to the call. So do not use SSA
3468 vars there. */
3469 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3470
f2d3d07e 3471 /* Gimplify the function arguments. */
726a989a 3472 if (nargs > 0)
6de9cd9a 3473 {
726a989a
RB
3474 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3475 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3476 PUSH_ARGS_REVERSED ? i-- : i++)
3477 {
3478 enum gimplify_status t;
6de9cd9a 3479
726a989a
RB
3480 /* Avoid gimplifying the second argument to va_start, which needs to
3481 be the plain PARM_DECL. */
3482 if ((i != 1) || !builtin_va_start_p)
3483 {
1282697f 3484 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
381cdae4 3485 EXPR_LOCATION (*expr_p), ! returns_twice);
6de9cd9a 3486
726a989a
RB
3487 if (t == GS_ERROR)
3488 ret = GS_ERROR;
3489 }
3490 }
6de9cd9a 3491 }
6de9cd9a 3492
f2d3d07e
RH
3493 /* Gimplify the static chain. */
3494 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3495 {
3496 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3497 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3498 else
3499 {
3500 enum gimplify_status t;
3501 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
381cdae4 3502 EXPR_LOCATION (*expr_p), ! returns_twice);
f2d3d07e
RH
3503 if (t == GS_ERROR)
3504 ret = GS_ERROR;
3505 }
3506 }
3507
33922890
RG
3508 /* Verify the function result. */
3509 if (want_value && fndecl
f20ca725 3510 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
33922890
RG
3511 {
3512 error_at (loc, "using result of function returning %<void%>");
3513 ret = GS_ERROR;
3514 }
3515
6de9cd9a 3516 /* Try this again in case gimplification exposed something. */
6f538523 3517 if (ret != GS_ERROR)
6de9cd9a 3518 {
db3927fb 3519 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 3520
82d6e6fc 3521 if (new_tree && new_tree != *expr_p)
5039610b
SL
3522 {
3523 /* There was a transformation of this call which computes the
3524 same value, but in a more efficient way. Return and try
3525 again. */
82d6e6fc 3526 *expr_p = new_tree;
5039610b 3527 return GS_OK;
6de9cd9a
DN
3528 }
3529 }
726a989a
RB
3530 else
3531 {
df8fa700 3532 *expr_p = error_mark_node;
726a989a
RB
3533 return GS_ERROR;
3534 }
6de9cd9a
DN
3535
3536 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3537 decl. This allows us to eliminate redundant or useless
3538 calls to "const" functions. */
becfd6e5
KZ
3539 if (TREE_CODE (*expr_p) == CALL_EXPR)
3540 {
3541 int flags = call_expr_flags (*expr_p);
3542 if (flags & (ECF_CONST | ECF_PURE)
3543 /* An infinite loop is considered a side effect. */
3544 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3545 TREE_SIDE_EFFECTS (*expr_p) = 0;
3546 }
726a989a
RB
3547
3548 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3549 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3550 form and delegate the creation of a GIMPLE_CALL to
3551 gimplify_modify_expr. This is always possible because when
3552 WANT_VALUE is true, the caller wants the result of this call into
3553 a temporary, which means that we will emit an INIT_EXPR in
3554 internal_get_tmp_var which will then be handled by
3555 gimplify_modify_expr. */
3556 if (!want_value)
3557 {
3558 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3559 have to do is replicate it as a GIMPLE_CALL tuple. */
64e0f5ff 3560 gimple_stmt_iterator gsi;
5c5f0b65 3561 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
f6b64c35 3562 notice_special_calls (call);
726a989a 3563 gimplify_seq_add_stmt (pre_p, call);
64e0f5ff 3564 gsi = gsi_last (*pre_p);
88ac13da 3565 maybe_fold_stmt (&gsi);
726a989a
RB
3566 *expr_p = NULL_TREE;
3567 }
f20ca725
RG
3568 else
3569 /* Remember the original function type. */
3570 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3571 CALL_EXPR_FN (*expr_p));
726a989a 3572
6de9cd9a
DN
3573 return ret;
3574}
3575
3576/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3577 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3578
3579 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3580 condition is true or false, respectively. If null, we should generate
3581 our own to skip over the evaluation of this specific expression.
3582
ca80e52b
EB
3583 LOCUS is the source location of the COND_EXPR.
3584
6de9cd9a
DN
3585 This function is the tree equivalent of do_jump.
3586
3587 shortcut_cond_r should only be called by shortcut_cond_expr. */
3588
3589static tree
ca80e52b
EB
3590shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3591 location_t locus)
6de9cd9a
DN
3592{
3593 tree local_label = NULL_TREE;
3594 tree t, expr = NULL;
3595
3596 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3597 retain the shortcut semantics. Just insert the gotos here;
3598 shortcut_cond_expr will append the real blocks later. */
3599 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3600 {
ca80e52b
EB
3601 location_t new_locus;
3602
6de9cd9a
DN
3603 /* Turn if (a && b) into
3604
3605 if (a); else goto no;
3606 if (b) goto yes; else goto no;
3607 (no:) */
3608
3609 if (false_label_p == NULL)
3610 false_label_p = &local_label;
3611
ca80e52b
EB
3612 /* Keep the original source location on the first 'if'. */
3613 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
3614 append_to_statement_list (t, &expr);
3615
ca80e52b 3616 /* Set the source location of the && on the second 'if'. */
96a95ac1 3617 new_locus = rexpr_location (pred, locus);
ca80e52b
EB
3618 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3619 new_locus);
6de9cd9a
DN
3620 append_to_statement_list (t, &expr);
3621 }
3622 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3623 {
ca80e52b
EB
3624 location_t new_locus;
3625
6de9cd9a
DN
3626 /* Turn if (a || b) into
3627
3628 if (a) goto yes;
3629 if (b) goto yes; else goto no;
3630 (yes:) */
3631
3632 if (true_label_p == NULL)
3633 true_label_p = &local_label;
3634
ca80e52b
EB
3635 /* Keep the original source location on the first 'if'. */
3636 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
3637 append_to_statement_list (t, &expr);
3638
ca80e52b 3639 /* Set the source location of the || on the second 'if'. */
96a95ac1 3640 new_locus = rexpr_location (pred, locus);
ca80e52b
EB
3641 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3642 new_locus);
6de9cd9a
DN
3643 append_to_statement_list (t, &expr);
3644 }
1537737f
JJ
3645 else if (TREE_CODE (pred) == COND_EXPR
3646 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3647 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
6de9cd9a 3648 {
ca80e52b
EB
3649 location_t new_locus;
3650
6de9cd9a
DN
3651 /* As long as we're messing with gotos, turn if (a ? b : c) into
3652 if (a)
3653 if (b) goto yes; else goto no;
3654 else
1537737f
JJ
3655 if (c) goto yes; else goto no;
3656
3657 Don't do this if one of the arms has void type, which can happen
3658 in C++ when the arm is throw. */
ca80e52b
EB
3659
3660 /* Keep the original source location on the first 'if'. Set the source
3661 location of the ? on the second 'if'. */
96a95ac1 3662 new_locus = rexpr_location (pred, locus);
b4257cfc
RG
3663 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3664 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 3665 false_label_p, locus),
b4257cfc 3666 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 3667 false_label_p, new_locus));
6de9cd9a
DN
3668 }
3669 else
3670 {
b4257cfc
RG
3671 expr = build3 (COND_EXPR, void_type_node, pred,
3672 build_and_jump (true_label_p),
3673 build_and_jump (false_label_p));
ca80e52b 3674 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
3675 }
3676
3677 if (local_label)
3678 {
3679 t = build1 (LABEL_EXPR, void_type_node, local_label);
3680 append_to_statement_list (t, &expr);
3681 }
3682
3683 return expr;
3684}
3685
96a95ac1
AO
3686/* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3687 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3688 statement, if it is the last one. Otherwise, return NULL. */
3689
3690static tree
3691find_goto (tree expr)
3692{
3693 if (!expr)
3694 return NULL_TREE;
3695
3696 if (TREE_CODE (expr) == GOTO_EXPR)
3697 return expr;
3698
3699 if (TREE_CODE (expr) != STATEMENT_LIST)
3700 return NULL_TREE;
3701
3702 tree_stmt_iterator i = tsi_start (expr);
3703
3704 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3705 tsi_next (&i);
3706
3707 if (!tsi_one_before_end_p (i))
3708 return NULL_TREE;
3709
3710 return find_goto (tsi_stmt (i));
3711}
3712
3713/* Same as find_goto, except that it returns NULL if the destination
3714 is not a LABEL_DECL. */
3715
3716static inline tree
3717find_goto_label (tree expr)
3718{
3719 tree dest = find_goto (expr);
3720 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3721 return dest;
3722 return NULL_TREE;
3723}
3724
726a989a
RB
3725/* Given a conditional expression EXPR with short-circuit boolean
3726 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
073a8998 3727 predicate apart into the equivalent sequence of conditionals. */
726a989a 3728
6de9cd9a
DN
3729static tree
3730shortcut_cond_expr (tree expr)
3731{
3732 tree pred = TREE_OPERAND (expr, 0);
3733 tree then_ = TREE_OPERAND (expr, 1);
3734 tree else_ = TREE_OPERAND (expr, 2);
3735 tree true_label, false_label, end_label, t;
3736 tree *true_label_p;
3737 tree *false_label_p;
089efaa4 3738 bool emit_end, emit_false, jump_over_else;
65355d53
RH
3739 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3740 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
3741
3742 /* First do simple transformations. */
65355d53 3743 if (!else_se)
6de9cd9a 3744 {
ca80e52b
EB
3745 /* If there is no 'else', turn
3746 if (a && b) then c
3747 into
3748 if (a) if (b) then c. */
6de9cd9a
DN
3749 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3750 {
ca80e52b 3751 /* Keep the original source location on the first 'if'. */
8400e75e 3752 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 3753 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b 3754 /* Set the source location of the && on the second 'if'. */
96a95ac1
AO
3755 if (rexpr_has_location (pred))
3756 SET_EXPR_LOCATION (expr, rexpr_location (pred));
6de9cd9a 3757 then_ = shortcut_cond_expr (expr);
4356a1bf 3758 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 3759 pred = TREE_OPERAND (pred, 0);
b4257cfc 3760 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 3761 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
3762 }
3763 }
726a989a 3764
65355d53 3765 if (!then_se)
6de9cd9a
DN
3766 {
3767 /* If there is no 'then', turn
3768 if (a || b); else d
3769 into
3770 if (a); else if (b); else d. */
3771 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3772 {
ca80e52b 3773 /* Keep the original source location on the first 'if'. */
8400e75e 3774 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 3775 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b 3776 /* Set the source location of the || on the second 'if'. */
96a95ac1
AO
3777 if (rexpr_has_location (pred))
3778 SET_EXPR_LOCATION (expr, rexpr_location (pred));
6de9cd9a 3779 else_ = shortcut_cond_expr (expr);
4356a1bf 3780 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 3781 pred = TREE_OPERAND (pred, 0);
b4257cfc 3782 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 3783 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
3784 }
3785 }
3786
3787 /* If we're done, great. */
3788 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3789 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3790 return expr;
3791
3792 /* Otherwise we need to mess with gotos. Change
3793 if (a) c; else d;
3794 to
3795 if (a); else goto no;
3796 c; goto end;
3797 no: d; end:
3798 and recursively gimplify the condition. */
3799
3800 true_label = false_label = end_label = NULL_TREE;
3801
3802 /* If our arms just jump somewhere, hijack those labels so we don't
3803 generate jumps to jumps. */
3804
96a95ac1 3805 if (tree then_goto = find_goto_label (then_))
6de9cd9a 3806 {
96a95ac1 3807 true_label = GOTO_DESTINATION (then_goto);
65355d53
RH
3808 then_ = NULL;
3809 then_se = false;
6de9cd9a
DN
3810 }
3811
96a95ac1 3812 if (tree else_goto = find_goto_label (else_))
6de9cd9a 3813 {
96a95ac1 3814 false_label = GOTO_DESTINATION (else_goto);
65355d53
RH
3815 else_ = NULL;
3816 else_se = false;
6de9cd9a
DN
3817 }
3818
9cf737f8 3819 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
3820 if (true_label)
3821 true_label_p = &true_label;
3822 else
3823 true_label_p = NULL;
3824
3825 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 3826 if (false_label || else_se)
6de9cd9a
DN
3827 false_label_p = &false_label;
3828 else
3829 false_label_p = NULL;
3830
3831 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 3832 if (!then_se && !else_se)
ca80e52b 3833 return shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 3834 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
3835
3836 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 3837 if (else_se)
ca80e52b 3838 t = expr_last (else_);
65355d53 3839 else if (then_se)
ca80e52b 3840 t = expr_last (then_);
65355d53 3841 else
ca80e52b
EB
3842 t = NULL;
3843 if (t && TREE_CODE (t) == LABEL_EXPR)
3844 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
3845
3846 /* If we don't care about jumping to the 'else' branch, jump to the end
3847 if the condition is false. */
3848 if (!false_label_p)
3849 false_label_p = &end_label;
3850
3851 /* We only want to emit these labels if we aren't hijacking them. */
3852 emit_end = (end_label == NULL_TREE);
3853 emit_false = (false_label == NULL_TREE);
3854
089efaa4
ILT
3855 /* We only emit the jump over the else clause if we have to--if the
3856 then clause may fall through. Otherwise we can wind up with a
3857 useless jump and a useless label at the end of gimplified code,
3858 which will cause us to think that this conditional as a whole
3859 falls through even if it doesn't. If we then inline a function
3860 which ends with such a condition, that can cause us to issue an
3861 inappropriate warning about control reaching the end of a
3862 non-void function. */
3863 jump_over_else = block_may_fallthru (then_);
3864
ca80e52b 3865 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 3866 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
3867
3868 expr = NULL;
3869 append_to_statement_list (pred, &expr);
3870
3871 append_to_statement_list (then_, &expr);
65355d53 3872 if (else_se)
6de9cd9a 3873 {
089efaa4
ILT
3874 if (jump_over_else)
3875 {
ca80e52b 3876 tree last = expr_last (expr);
089efaa4 3877 t = build_and_jump (&end_label);
96a95ac1
AO
3878 if (rexpr_has_location (last))
3879 SET_EXPR_LOCATION (t, rexpr_location (last));
089efaa4
ILT
3880 append_to_statement_list (t, &expr);
3881 }
6de9cd9a
DN
3882 if (emit_false)
3883 {
3884 t = build1 (LABEL_EXPR, void_type_node, false_label);
3885 append_to_statement_list (t, &expr);
3886 }
3887 append_to_statement_list (else_, &expr);
3888 }
3889 if (emit_end && end_label)
3890 {
3891 t = build1 (LABEL_EXPR, void_type_node, end_label);
3892 append_to_statement_list (t, &expr);
3893 }
3894
3895 return expr;
3896}
3897
3898/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3899
50674e96 3900tree
6de9cd9a
DN
3901gimple_boolify (tree expr)
3902{
3903 tree type = TREE_TYPE (expr);
db3927fb 3904 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 3905
554cf330
JJ
3906 if (TREE_CODE (expr) == NE_EXPR
3907 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3908 && integer_zerop (TREE_OPERAND (expr, 1)))
3909 {
3910 tree call = TREE_OPERAND (expr, 0);
3911 tree fn = get_callee_fndecl (call);
3912
d53c73e0
JJ
3913 /* For __builtin_expect ((long) (x), y) recurse into x as well
3914 if x is truth_value_p. */
554cf330 3915 if (fn
3d78e008 3916 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
554cf330
JJ
3917 && call_expr_nargs (call) == 2)
3918 {
3919 tree arg = CALL_EXPR_ARG (call, 0);
3920 if (arg)
3921 {
3922 if (TREE_CODE (arg) == NOP_EXPR
3923 && TREE_TYPE (arg) == TREE_TYPE (call))
3924 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
3925 if (truth_value_p (TREE_CODE (arg)))
3926 {
3927 arg = gimple_boolify (arg);
3928 CALL_EXPR_ARG (call, 0)
3929 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3930 }
554cf330
JJ
3931 }
3932 }
3933 }
3934
6de9cd9a
DN
3935 switch (TREE_CODE (expr))
3936 {
3937 case TRUTH_AND_EXPR:
3938 case TRUTH_OR_EXPR:
3939 case TRUTH_XOR_EXPR:
3940 case TRUTH_ANDIF_EXPR:
3941 case TRUTH_ORIF_EXPR:
3942 /* Also boolify the arguments of truth exprs. */
3943 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3944 /* FALLTHRU */
3945
3946 case TRUTH_NOT_EXPR:
3947 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
6de9cd9a 3948
6de9cd9a 3949 /* These expressions always produce boolean results. */
7f3ff782
KT
3950 if (TREE_CODE (type) != BOOLEAN_TYPE)
3951 TREE_TYPE (expr) = boolean_type_node;
6de9cd9a 3952 return expr;
d3147f64 3953
8170608b 3954 case ANNOTATE_EXPR:
718c4601 3955 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
8170608b 3956 {
718c4601 3957 case annot_expr_ivdep_kind:
ac9effed 3958 case annot_expr_unroll_kind:
718c4601
EB
3959 case annot_expr_no_vector_kind:
3960 case annot_expr_vector_kind:
34705fdc 3961 case annot_expr_parallel_kind:
8170608b
TB
3962 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3963 if (TREE_CODE (type) != BOOLEAN_TYPE)
3964 TREE_TYPE (expr) = boolean_type_node;
3965 return expr;
718c4601
EB
3966 default:
3967 gcc_unreachable ();
8170608b 3968 }
8170608b 3969
6de9cd9a 3970 default:
7f3ff782
KT
3971 if (COMPARISON_CLASS_P (expr))
3972 {
3973 /* There expressions always prduce boolean results. */
3974 if (TREE_CODE (type) != BOOLEAN_TYPE)
3975 TREE_TYPE (expr) = boolean_type_node;
3976 return expr;
3977 }
6de9cd9a
DN
3978 /* Other expressions that get here must have boolean values, but
3979 might need to be converted to the appropriate mode. */
7f3ff782 3980 if (TREE_CODE (type) == BOOLEAN_TYPE)
1d15f620 3981 return expr;
db3927fb 3982 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
3983 }
3984}
3985
aea74440
JJ
3986/* Given a conditional expression *EXPR_P without side effects, gimplify
3987 its operands. New statements are inserted to PRE_P. */
3988
3989static enum gimplify_status
726a989a 3990gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
3991{
3992 tree expr = *expr_p, cond;
3993 enum gimplify_status ret, tret;
3994 enum tree_code code;
3995
3996 cond = gimple_boolify (COND_EXPR_COND (expr));
3997
3998 /* We need to handle && and || specially, as their gimplification
3999 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4000 code = TREE_CODE (cond);
4001 if (code == TRUTH_ANDIF_EXPR)
4002 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4003 else if (code == TRUTH_ORIF_EXPR)
4004 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 4005 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
4006 COND_EXPR_COND (*expr_p) = cond;
4007
4008 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4009 is_gimple_val, fb_rvalue);
4010 ret = MIN (ret, tret);
4011 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4012 is_gimple_val, fb_rvalue);
4013
4014 return MIN (ret, tret);
4015}
4016
ad19c4be 4017/* Return true if evaluating EXPR could trap.
aea74440
JJ
4018 EXPR is GENERIC, while tree_could_trap_p can be called
4019 only on GIMPLE. */
4020
3f6f3319 4021bool
aea74440
JJ
4022generic_expr_could_trap_p (tree expr)
4023{
4024 unsigned i, n;
4025
4026 if (!expr || is_gimple_val (expr))
4027 return false;
4028
4029 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4030 return true;
4031
4032 n = TREE_OPERAND_LENGTH (expr);
4033 for (i = 0; i < n; i++)
4034 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4035 return true;
4036
4037 return false;
4038}
4039
206048bd 4040/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
4041 into
4042
4043 if (p) if (p)
4044 t1 = a; a;
4045 else or else
4046 t1 = b; b;
4047 t1;
4048
4049 The second form is used when *EXPR_P is of type void.
4050
4051 PRE_P points to the list where side effects that must happen before
dae7ec87 4052 *EXPR_P should be stored. */
6de9cd9a
DN
4053
4054static enum gimplify_status
726a989a 4055gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
4056{
4057 tree expr = *expr_p;
06ec59e6
EB
4058 tree type = TREE_TYPE (expr);
4059 location_t loc = EXPR_LOCATION (expr);
4060 tree tmp, arm1, arm2;
6de9cd9a 4061 enum gimplify_status ret;
726a989a
RB
4062 tree label_true, label_false, label_cont;
4063 bool have_then_clause_p, have_else_clause_p;
538dd0b7 4064 gcond *cond_stmt;
726a989a
RB
4065 enum tree_code pred_code;
4066 gimple_seq seq = NULL;
26d44ae2
RH
4067
4068 /* If this COND_EXPR has a value, copy the values into a temporary within
4069 the arms. */
06ec59e6 4070 if (!VOID_TYPE_P (type))
26d44ae2 4071 {
06ec59e6 4072 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
4073 tree result;
4074
06ec59e6 4075 /* If either an rvalue is ok or we do not require an lvalue, create the
c2465dae 4076 temporary. But we cannot do that if the type is addressable. */
06ec59e6 4077 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c2465dae 4078 && !TREE_ADDRESSABLE (type))
aff98faf 4079 {
aea74440
JJ
4080 if (gimplify_ctxp->allow_rhs_cond_expr
4081 /* If either branch has side effects or could trap, it can't be
4082 evaluated unconditionally. */
06ec59e6
EB
4083 && !TREE_SIDE_EFFECTS (then_)
4084 && !generic_expr_could_trap_p (then_)
4085 && !TREE_SIDE_EFFECTS (else_)
4086 && !generic_expr_could_trap_p (else_))
aea74440
JJ
4087 return gimplify_pure_cond_expr (expr_p, pre_p);
4088
06ec59e6
EB
4089 tmp = create_tmp_var (type, "iftmp");
4090 result = tmp;
aff98faf 4091 }
06ec59e6
EB
4092
4093 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
4094 else
4095 {
06ec59e6 4096 type = build_pointer_type (type);
aff98faf 4097
06ec59e6
EB
4098 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4099 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 4100
06ec59e6
EB
4101 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4102 else_ = build_fold_addr_expr_loc (loc, else_);
4103
4104 expr
4105 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 4106
726a989a 4107 tmp = create_tmp_var (type, "iftmp");
70f34814 4108 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
4109 }
4110
06ec59e6
EB
4111 /* Build the new then clause, `tmp = then_;'. But don't build the
4112 assignment if the value is void; in C++ it can be if it's a throw. */
4113 if (!VOID_TYPE_P (TREE_TYPE (then_)))
507318f1 4114 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
26d44ae2 4115
06ec59e6
EB
4116 /* Similarly, build the new else clause, `tmp = else_;'. */
4117 if (!VOID_TYPE_P (TREE_TYPE (else_)))
507318f1 4118 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
26d44ae2
RH
4119
4120 TREE_TYPE (expr) = void_type_node;
4121 recalculate_side_effects (expr);
4122
d91ba7b0 4123 /* Move the COND_EXPR to the prequeue. */
726a989a 4124 gimplify_stmt (&expr, pre_p);
26d44ae2 4125
aff98faf 4126 *expr_p = result;
726a989a 4127 return GS_ALL_DONE;
26d44ae2
RH
4128 }
4129
f2f81d57
EB
4130 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4131 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4132 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4133 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4134
26d44ae2
RH
4135 /* Make sure the condition has BOOLEAN_TYPE. */
4136 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4137
4138 /* Break apart && and || conditions. */
4139 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4140 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4141 {
4142 expr = shortcut_cond_expr (expr);
4143
4144 if (expr != *expr_p)
4145 {
4146 *expr_p = expr;
4147
4148 /* We can't rely on gimplify_expr to re-gimplify the expanded
4149 form properly, as cleanups might cause the target labels to be
4150 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4151 set up a conditional context. */
4152 gimple_push_condition ();
726a989a 4153 gimplify_stmt (expr_p, &seq);
26d44ae2 4154 gimple_pop_condition (pre_p);
726a989a 4155 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
4156
4157 return GS_ALL_DONE;
4158 }
4159 }
4160
4161 /* Now do the normal gimplification. */
26d44ae2 4162
726a989a 4163 /* Gimplify condition. */
70e2a30a
IL
4164 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4165 is_gimple_condexpr_for_cond, fb_rvalue);
26d44ae2 4166 if (ret == GS_ERROR)
726a989a
RB
4167 return GS_ERROR;
4168 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4169
4170 gimple_push_condition ();
26d44ae2 4171
726a989a 4172 have_then_clause_p = have_else_clause_p = false;
96a95ac1
AO
4173 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4174 if (label_true
4175 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
726a989a
RB
4176 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4177 have different locations, otherwise we end up with incorrect
4178 location information on the branches. */
4179 && (optimize
4180 || !EXPR_HAS_LOCATION (expr)
96a95ac1
AO
4181 || !rexpr_has_location (label_true)
4182 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
726a989a 4183 {
726a989a 4184 have_then_clause_p = true;
96a95ac1 4185 label_true = GOTO_DESTINATION (label_true);
26d44ae2
RH
4186 }
4187 else
c2255bc4 4188 label_true = create_artificial_label (UNKNOWN_LOCATION);
96a95ac1
AO
4189 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4190 if (label_false
4191 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
726a989a
RB
4192 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4193 have different locations, otherwise we end up with incorrect
4194 location information on the branches. */
4195 && (optimize
4196 || !EXPR_HAS_LOCATION (expr)
96a95ac1
AO
4197 || !rexpr_has_location (label_false)
4198 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
726a989a 4199 {
726a989a 4200 have_else_clause_p = true;
96a95ac1 4201 label_false = GOTO_DESTINATION (label_false);
726a989a
RB
4202 }
4203 else
c2255bc4 4204 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 4205
726a989a
RB
4206 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4207 &arm2);
538dd0b7 4208 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
c3bea076 4209 label_false);
932c0da4 4210 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
538dd0b7 4211 gimplify_seq_add_stmt (&seq, cond_stmt);
c3bea076
RB
4212 gimple_stmt_iterator gsi = gsi_last (seq);
4213 maybe_fold_stmt (&gsi);
4214
726a989a
RB
4215 label_cont = NULL_TREE;
4216 if (!have_then_clause_p)
4217 {
4218 /* For if (...) {} else { code; } put label_true after
4219 the else block. */
4220 if (TREE_OPERAND (expr, 1) == NULL_TREE
4221 && !have_else_clause_p
4222 && TREE_OPERAND (expr, 2) != NULL_TREE)
4223 label_cont = label_true;
4224 else
4225 {
4226 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4227 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4228 /* For if (...) { code; } else {} or
4229 if (...) { code; } else goto label; or
4230 if (...) { code; return; } else { ... }
4231 label_cont isn't needed. */
4232 if (!have_else_clause_p
4233 && TREE_OPERAND (expr, 2) != NULL_TREE
4234 && gimple_seq_may_fallthru (seq))
4235 {
355fe088 4236 gimple *g;
c2255bc4 4237 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
4238
4239 g = gimple_build_goto (label_cont);
4240
4241 /* GIMPLE_COND's are very low level; they have embedded
4242 gotos. This particular embedded goto should not be marked
4243 with the location of the original COND_EXPR, as it would
4244 correspond to the COND_EXPR's condition, not the ELSE or the
4245 THEN arms. To avoid marking it with the wrong location, flag
4246 it as "no location". */
4247 gimple_set_do_not_emit_location (g);
4248
4249 gimplify_seq_add_stmt (&seq, g);
4250 }
4251 }
4252 }
4253 if (!have_else_clause_p)
4254 {
4255 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4256 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4257 }
4258 if (label_cont)
4259 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4260
4261 gimple_pop_condition (pre_p);
4262 gimple_seq_add_seq (pre_p, seq);
4263
4264 if (ret == GS_ERROR)
4265 ; /* Do nothing. */
4266 else if (have_then_clause_p || have_else_clause_p)
4267 ret = GS_ALL_DONE;
4268 else
4269 {
4270 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4271 expr = TREE_OPERAND (expr, 0);
4272 gimplify_stmt (&expr, pre_p);
4273 }
4274
4275 *expr_p = NULL;
4276 return ret;
4277}
4278
f76d6e6f
EB
4279/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4280 to be marked addressable.
4281
4282 We cannot rely on such an expression being directly markable if a temporary
4283 has been created by the gimplification. In this case, we create another
4284 temporary and initialize it with a copy, which will become a store after we
4285 mark it addressable. This can happen if the front-end passed us something
4286 that it could not mark addressable yet, like a Fortran pass-by-reference
4287 parameter (int) floatvar. */
4288
4289static void
4290prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4291{
4292 while (handled_component_p (*expr_p))
4293 expr_p = &TREE_OPERAND (*expr_p, 0);
4294 if (is_gimple_reg (*expr_p))
947ca6a0 4295 {
381cdae4
RB
4296 /* Do not allow an SSA name as the temporary. */
4297 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
eb72dc66 4298 DECL_NOT_GIMPLE_REG_P (var) = 1;
947ca6a0
RB
4299 *expr_p = var;
4300 }
f76d6e6f
EB
4301}
4302
726a989a
RB
4303/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4304 a call to __builtin_memcpy. */
4305
4306static enum gimplify_status
4307gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4308 gimple_seq *seq_p)
26d44ae2 4309{
5039610b 4310 tree t, to, to_ptr, from, from_ptr;
538dd0b7 4311 gcall *gs;
db3927fb 4312 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 4313
726a989a
RB
4314 to = TREE_OPERAND (*expr_p, 0);
4315 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 4316
f76d6e6f
EB
4317 /* Mark the RHS addressable. Beware that it may not be possible to do so
4318 directly if a temporary has been created by the gimplification. */
4319 prepare_gimple_addressable (&from, seq_p);
4320
628c189e 4321 mark_addressable (from);
db3927fb
AH
4322 from_ptr = build_fold_addr_expr_loc (loc, from);
4323 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 4324
628c189e 4325 mark_addressable (to);
db3927fb
AH
4326 to_ptr = build_fold_addr_expr_loc (loc, to);
4327 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 4328
e79983f4 4329 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
726a989a
RB
4330
4331 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
4332
4333 if (want_value)
4334 {
726a989a 4335 /* tmp = memcpy() */
b731b390 4336 t = create_tmp_var (TREE_TYPE (to_ptr));
726a989a
RB
4337 gimple_call_set_lhs (gs, t);
4338 gimplify_seq_add_stmt (seq_p, gs);
4339
70f34814 4340 *expr_p = build_simple_mem_ref (t);
726a989a 4341 return GS_ALL_DONE;
26d44ae2
RH
4342 }
4343
726a989a
RB
4344 gimplify_seq_add_stmt (seq_p, gs);
4345 *expr_p = NULL;
4346 return GS_ALL_DONE;
26d44ae2
RH
4347}
4348
4349/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4350 a call to __builtin_memset. In this case we know that the RHS is
4351 a CONSTRUCTOR with an empty element list. */
4352
4353static enum gimplify_status
726a989a
RB
4354gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4355 gimple_seq *seq_p)
26d44ae2 4356{
1a13360e 4357 tree t, from, to, to_ptr;
538dd0b7 4358 gcall *gs;
db3927fb 4359 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 4360
1a13360e
OH
4361 /* Assert our assumptions, to abort instead of producing wrong code
4362 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4363 not be immediately exposed. */
b8698a0f 4364 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
4365 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4366 from = TREE_OPERAND (from, 0);
4367
4368 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
9771b263 4369 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
1a13360e
OH
4370
4371 /* Now proceed. */
726a989a 4372 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 4373
db3927fb
AH
4374 to_ptr = build_fold_addr_expr_loc (loc, to);
4375 gimplify_arg (&to_ptr, seq_p, loc);
e79983f4 4376 t = builtin_decl_implicit (BUILT_IN_MEMSET);
726a989a
RB
4377
4378 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
4379
4380 if (want_value)
4381 {
726a989a 4382 /* tmp = memset() */
b731b390 4383 t = create_tmp_var (TREE_TYPE (to_ptr));
726a989a
RB
4384 gimple_call_set_lhs (gs, t);
4385 gimplify_seq_add_stmt (seq_p, gs);
4386
4387 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4388 return GS_ALL_DONE;
26d44ae2
RH
4389 }
4390
726a989a
RB
4391 gimplify_seq_add_stmt (seq_p, gs);
4392 *expr_p = NULL;
4393 return GS_ALL_DONE;
26d44ae2
RH
4394}
4395
57d1dd87
RH
4396/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4397 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
ad19c4be 4398 assignment. Return non-null if we detect a potential overlap. */
57d1dd87
RH
4399
4400struct gimplify_init_ctor_preeval_data
4401{
4402 /* The base decl of the lhs object. May be NULL, in which case we
4403 have to assume the lhs is indirect. */
4404 tree lhs_base_decl;
4405
4406 /* The alias set of the lhs object. */
4862826d 4407 alias_set_type lhs_alias_set;
57d1dd87
RH
4408};
4409
4410static tree
4411gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4412{
4413 struct gimplify_init_ctor_preeval_data *data
4414 = (struct gimplify_init_ctor_preeval_data *) xdata;
4415 tree t = *tp;
4416
4417 /* If we find the base object, obviously we have overlap. */
4418 if (data->lhs_base_decl == t)
4419 return t;
4420
4421 /* If the constructor component is indirect, determine if we have a
4422 potential overlap with the lhs. The only bits of information we
4423 have to go on at this point are addressability and alias sets. */
70f34814
RG
4424 if ((INDIRECT_REF_P (t)
4425 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
4426 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4427 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4428 return t;
4429
df10ee2a 4430 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
4431 potential overlap with the lhs through an INDIRECT_REF like above.
4432 ??? Ugh - this is completely broken. In fact this whole analysis
4433 doesn't look conservative. */
df10ee2a
EB
4434 if (TREE_CODE (t) == CALL_EXPR)
4435 {
4436 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4437
4438 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4439 if (POINTER_TYPE_P (TREE_VALUE (type))
4440 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4441 && alias_sets_conflict_p (data->lhs_alias_set,
4442 get_alias_set
4443 (TREE_TYPE (TREE_VALUE (type)))))
4444 return t;
4445 }
4446
6615c446 4447 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
4448 *walk_subtrees = 0;
4449 return NULL;
4450}
4451
726a989a 4452/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
4453 force values that overlap with the lhs (as described by *DATA)
4454 into temporaries. */
4455
4456static void
726a989a 4457gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
4458 struct gimplify_init_ctor_preeval_data *data)
4459{
4460 enum gimplify_status one;
4461
51eed280
PB
4462 /* If the value is constant, then there's nothing to pre-evaluate. */
4463 if (TREE_CONSTANT (*expr_p))
4464 {
4465 /* Ensure it does not have side effects, it might contain a reference to
4466 the object we're initializing. */
4467 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4468 return;
4469 }
57d1dd87
RH
4470
4471 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4472 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4473 return;
4474
4475 /* Recurse for nested constructors. */
4476 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4477 {
4038c495
GB
4478 unsigned HOST_WIDE_INT ix;
4479 constructor_elt *ce;
9771b263 4480 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4038c495 4481
9771b263 4482 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4038c495 4483 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 4484
57d1dd87
RH
4485 return;
4486 }
4487
0461b801
EB
4488 /* If this is a variable sized type, we must remember the size. */
4489 maybe_with_size_expr (expr_p);
57d1dd87
RH
4490
4491 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 4492 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 4493 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
4494 gimplification now means that we won't have to deal with complicated
4495 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 4496 exponential search behavior. */
57d1dd87
RH
4497 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4498 if (one == GS_ERROR)
4499 {
4500 *expr_p = NULL;
4501 return;
4502 }
4503
4504 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4505 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4506 always be true for all scalars, since is_gimple_mem_rhs insists on a
4507 temporary variable for them. */
4508 if (DECL_P (*expr_p))
4509 return;
4510
4511 /* If this is of variable size, we have no choice but to assume it doesn't
4512 overlap since we can't make a temporary for it. */
4c923c28 4513 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
4514 return;
4515
4516 /* Otherwise, we must search for overlap ... */
4517 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4518 return;
4519
4520 /* ... and if found, force the value into a temporary. */
4521 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4522}
4523
6fa91b48
SB
4524/* A subroutine of gimplify_init_ctor_eval. Create a loop for
4525 a RANGE_EXPR in a CONSTRUCTOR for an array.
4526
4527 var = lower;
4528 loop_entry:
4529 object[var] = value;
4530 if (var == upper)
4531 goto loop_exit;
4532 var = var + 1;
4533 goto loop_entry;
4534 loop_exit:
4535
4536 We increment var _after_ the loop exit check because we might otherwise
4537 fail if upper == TYPE_MAX_VALUE (type for upper).
4538
4539 Note that we never have to deal with SAVE_EXPRs here, because this has
4540 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4541
9771b263 4542static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
726a989a 4543 gimple_seq *, bool);
6fa91b48
SB
4544
4545static void
4546gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4547 tree value, tree array_elt_type,
726a989a 4548 gimple_seq *pre_p, bool cleared)
6fa91b48 4549{
726a989a 4550 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 4551 tree var, var_type, cref, tmp;
6fa91b48 4552
c2255bc4
AH
4553 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4554 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4555 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
4556
4557 /* Create and initialize the index variable. */
4558 var_type = TREE_TYPE (upper);
b731b390 4559 var = create_tmp_var (var_type);
726a989a 4560 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
4561
4562 /* Add the loop entry label. */
726a989a 4563 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
4564
4565 /* Build the reference. */
4566 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4567 var, NULL_TREE, NULL_TREE);
4568
4569 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4570 the store. Otherwise just assign value to the reference. */
4571
4572 if (TREE_CODE (value) == CONSTRUCTOR)
4573 /* NB we might have to call ourself recursively through
4574 gimplify_init_ctor_eval if the value is a constructor. */
4575 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4576 pre_p, cleared);
4577 else
726a989a 4578 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
4579
4580 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
4581 gimplify_seq_add_stmt (pre_p,
4582 gimple_build_cond (EQ_EXPR, var, upper,
4583 loop_exit_label, fall_thru_label));
4584
4585 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
4586
4587 /* Otherwise, increment the index var... */
b56b9fe3
RS
4588 tmp = build2 (PLUS_EXPR, var_type, var,
4589 fold_convert (var_type, integer_one_node));
726a989a 4590 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
4591
4592 /* ...and jump back to the loop entry. */
726a989a 4593 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
4594
4595 /* Add the loop exit label. */
726a989a 4596 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
4597}
4598
292a398f 4599/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 4600
292a398f 4601static bool
22ea9ec0 4602zero_sized_field_decl (const_tree fdecl)
292a398f 4603{
b8698a0f 4604 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
4605 && integer_zerop (DECL_SIZE (fdecl)))
4606 return true;
4607 return false;
4608}
4609
d06526b7 4610/* Return true if TYPE is zero sized. */
b8698a0f 4611
d06526b7 4612static bool
22ea9ec0 4613zero_sized_type (const_tree type)
d06526b7
AP
4614{
4615 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4616 && integer_zerop (TYPE_SIZE (type)))
4617 return true;
4618 return false;
4619}
4620
57d1dd87
RH
4621/* A subroutine of gimplify_init_constructor. Generate individual
4622 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 4623 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
4624 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4625 zeroed first. */
4626
4627static void
9771b263 4628gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
726a989a 4629 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
4630{
4631 tree array_elt_type = NULL;
4038c495
GB
4632 unsigned HOST_WIDE_INT ix;
4633 tree purpose, value;
57d1dd87
RH
4634
4635 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4636 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4637
4038c495 4638 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 4639 {
726a989a 4640 tree cref;
57d1dd87
RH
4641
4642 /* NULL values are created above for gimplification errors. */
4643 if (value == NULL)
4644 continue;
4645
4646 if (cleared && initializer_zerop (value))
4647 continue;
4648
6fa91b48
SB
4649 /* ??? Here's to hoping the front end fills in all of the indices,
4650 so we don't have to figure out what's missing ourselves. */
4651 gcc_assert (purpose);
4652
816fa80a
OH
4653 /* Skip zero-sized fields, unless value has side-effects. This can
4654 happen with calls to functions returning a zero-sized type, which
4655 we shouldn't discard. As a number of downstream passes don't
4656 expect sets of zero-sized fields, we rely on the gimplification of
4657 the MODIFY_EXPR we make below to drop the assignment statement. */
4658 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
4659 continue;
4660
6fa91b48
SB
4661 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4662 whole range. */
4663 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 4664 {
6fa91b48
SB
4665 tree lower = TREE_OPERAND (purpose, 0);
4666 tree upper = TREE_OPERAND (purpose, 1);
4667
4668 /* If the lower bound is equal to upper, just treat it as if
4669 upper was the index. */
4670 if (simple_cst_equal (lower, upper))
4671 purpose = upper;
4672 else
4673 {
4674 gimplify_init_ctor_eval_range (object, lower, upper, value,
4675 array_elt_type, pre_p, cleared);
4676 continue;
4677 }
4678 }
57d1dd87 4679
6fa91b48
SB
4680 if (array_elt_type)
4681 {
1a1640db
RG
4682 /* Do not use bitsizetype for ARRAY_REF indices. */
4683 if (TYPE_DOMAIN (TREE_TYPE (object)))
ad19c4be
EB
4684 purpose
4685 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4686 purpose);
b4257cfc
RG
4687 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4688 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
4689 }
4690 else
cf0efa6a
ILT
4691 {
4692 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
4693 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4694 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 4695 }
57d1dd87 4696
cf0efa6a
ILT
4697 if (TREE_CODE (value) == CONSTRUCTOR
4698 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
4699 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4700 pre_p, cleared);
4701 else
4702 {
726a989a 4703 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 4704 gimplify_and_add (init, pre_p);
726a989a 4705 ggc_free (init);
57d1dd87
RH
4706 }
4707 }
4708}
4709
ad19c4be 4710/* Return the appropriate RHS predicate for this LHS. */
726a989a 4711
18f429e2 4712gimple_predicate
726a989a
RB
4713rhs_predicate_for (tree lhs)
4714{
ba4d8f9d
RG
4715 if (is_gimple_reg (lhs))
4716 return is_gimple_reg_rhs_or_call;
726a989a 4717 else
ba4d8f9d 4718 return is_gimple_mem_rhs_or_call;
726a989a
RB
4719}
4720
8a1b7b7f
JM
4721/* Return the initial guess for an appropriate RHS predicate for this LHS,
4722 before the LHS has been gimplified. */
4723
4724static gimple_predicate
4725initial_rhs_predicate_for (tree lhs)
4726{
4727 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4728 return is_gimple_reg_rhs_or_call;
4729 else
4730 return is_gimple_mem_rhs_or_call;
4731}
4732
2ec5deb5
PB
4733/* Gimplify a C99 compound literal expression. This just means adding
4734 the DECL_EXPR before the current statement and using its anonymous
4735 decl instead. */
4736
4737static enum gimplify_status
a845a7f5 4738gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4c53d183 4739 bool (*gimple_test_f) (tree),
a845a7f5 4740 fallback_t fallback)
2ec5deb5
PB
4741{
4742 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4743 tree decl = DECL_EXPR_DECL (decl_s);
4c53d183 4744 tree init = DECL_INITIAL (decl);
2ec5deb5
PB
4745 /* Mark the decl as addressable if the compound literal
4746 expression is addressable now, otherwise it is marked too late
4747 after we gimplify the initialization expression. */
4748 if (TREE_ADDRESSABLE (*expr_p))
4749 TREE_ADDRESSABLE (decl) = 1;
4c53d183
MM
4750 /* Otherwise, if we don't need an lvalue and have a literal directly
4751 substitute it. Check if it matches the gimple predicate, as
4752 otherwise we'd generate a new temporary, and we can as well just
4753 use the decl we already have. */
4754 else if (!TREE_ADDRESSABLE (decl)
77527d8a 4755 && !TREE_THIS_VOLATILE (decl)
4c53d183
MM
4756 && init
4757 && (fallback & fb_lvalue) == 0
4758 && gimple_test_f (init))
4759 {
4760 *expr_p = init;
4761 return GS_OK;
4762 }
2ec5deb5 4763
a845a7f5
ILT
4764 /* If the decl is not addressable, then it is being used in some
4765 expression or on the right hand side of a statement, and it can
4766 be put into a readonly data section. */
4767 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4768 TREE_READONLY (decl) = 1;
4769
2ec5deb5
PB
4770 /* This decl isn't mentioned in the enclosing block, so add it to the
4771 list of temps. FIXME it seems a bit of a kludge to say that
4772 anonymous artificial vars aren't pushed, but everything else is. */
4773 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4774 gimple_add_tmp_var (decl);
4775
4776 gimplify_and_add (decl_s, pre_p);
4777 *expr_p = decl;
4778 return GS_OK;
4779}
4780
4781/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4782 return a new CONSTRUCTOR if something changed. */
4783
4784static tree
4785optimize_compound_literals_in_ctor (tree orig_ctor)
4786{
4787 tree ctor = orig_ctor;
9771b263
DN
4788 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4789 unsigned int idx, num = vec_safe_length (elts);
2ec5deb5
PB
4790
4791 for (idx = 0; idx < num; idx++)
4792 {
9771b263 4793 tree value = (*elts)[idx].value;
2ec5deb5
PB
4794 tree newval = value;
4795 if (TREE_CODE (value) == CONSTRUCTOR)
4796 newval = optimize_compound_literals_in_ctor (value);
4797 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4798 {
4799 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4800 tree decl = DECL_EXPR_DECL (decl_s);
4801 tree init = DECL_INITIAL (decl);
4802
4803 if (!TREE_ADDRESSABLE (value)
4804 && !TREE_ADDRESSABLE (decl)
6f8f67e9
JJ
4805 && init
4806 && TREE_CODE (init) == CONSTRUCTOR)
2ec5deb5
PB
4807 newval = optimize_compound_literals_in_ctor (init);
4808 }
4809 if (newval == value)
4810 continue;
4811
4812 if (ctor == orig_ctor)
4813 {
4814 ctor = copy_node (orig_ctor);
9771b263 4815 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
2ec5deb5
PB
4816 elts = CONSTRUCTOR_ELTS (ctor);
4817 }
9771b263 4818 (*elts)[idx].value = newval;
2ec5deb5
PB
4819 }
4820 return ctor;
4821}
4822
26d44ae2
RH
4823/* A subroutine of gimplify_modify_expr. Break out elements of a
4824 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4825
4826 Note that we still need to clear any elements that don't have explicit
4827 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
4828 original MODIFY_EXPR, we just remove all of the constructor elements.
4829
4830 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4831 GS_ERROR if we would have to create a temporary when gimplifying
4832 this constructor. Otherwise, return GS_OK.
4833
4834 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
4835
4836static enum gimplify_status
726a989a
RB
4837gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4838 bool want_value, bool notify_temp_creation)
26d44ae2 4839{
f5a1f0d0 4840 tree object, ctor, type;
26d44ae2 4841 enum gimplify_status ret;
9771b263 4842 vec<constructor_elt, va_gc> *elts;
26d44ae2 4843
f5a1f0d0 4844 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 4845
ffed8a01
AH
4846 if (!notify_temp_creation)
4847 {
726a989a 4848 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
4849 is_gimple_lvalue, fb_lvalue);
4850 if (ret == GS_ERROR)
4851 return ret;
4852 }
57d1dd87 4853
726a989a 4854 object = TREE_OPERAND (*expr_p, 0);
98e92fb2
JJ
4855 ctor = TREE_OPERAND (*expr_p, 1)
4856 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
f5a1f0d0
PB
4857 type = TREE_TYPE (ctor);
4858 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 4859 ret = GS_ALL_DONE;
726a989a 4860
26d44ae2
RH
4861 switch (TREE_CODE (type))
4862 {
4863 case RECORD_TYPE:
4864 case UNION_TYPE:
4865 case QUAL_UNION_TYPE:
4866 case ARRAY_TYPE:
4867 {
57d1dd87 4868 struct gimplify_init_ctor_preeval_data preeval_data;
953d0c90 4869 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
0fca07e3 4870 HOST_WIDE_INT num_unique_nonzero_elements;
953d0c90 4871 bool cleared, complete_p, valid_const_initializer;
0fca07e3
JJ
4872 /* Use readonly data for initializers of this or smaller size
4873 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4874 ratio. */
4875 const HOST_WIDE_INT min_unique_size = 64;
4876 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4877 is smaller than this, use readonly data. */
4878 const int unique_nonzero_ratio = 8;
26d44ae2
RH
4879
4880 /* Aggregate types must lower constructors to initialization of
4881 individual elements. The exception is that a CONSTRUCTOR node
4882 with no elements indicates zero-initialization of the whole. */
9771b263 4883 if (vec_safe_is_empty (elts))
ffed8a01
AH
4884 {
4885 if (notify_temp_creation)
4886 return GS_OK;
4887 break;
4888 }
b8698a0f 4889
fe24d485
OH
4890 /* Fetch information about the constructor to direct later processing.
4891 We might want to make static versions of it in various cases, and
4892 can only do so if it known to be a valid constant initializer. */
4893 valid_const_initializer
4894 = categorize_ctor_elements (ctor, &num_nonzero_elements,
0fca07e3 4895 &num_unique_nonzero_elements,
953d0c90 4896 &num_ctor_elements, &complete_p);
26d44ae2
RH
4897
4898 /* If a const aggregate variable is being initialized, then it
4899 should never be a lose to promote the variable to be static. */
fe24d485 4900 if (valid_const_initializer
6f642f98 4901 && num_nonzero_elements > 1
26d44ae2 4902 && TREE_READONLY (object)
8813a647 4903 && VAR_P (object)
1956773c 4904 && !DECL_REGISTER (object)
0fca07e3
JJ
4905 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4906 /* For ctors that have many repeated nonzero elements
4907 represented through RANGE_EXPRs, prefer initializing
4908 those through runtime loops over copies of large amounts
4909 of data from readonly data section. */
4910 && (num_unique_nonzero_elements
4911 > num_nonzero_elements / unique_nonzero_ratio
4912 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4913 <= (unsigned HOST_WIDE_INT) min_unique_size)))
26d44ae2 4914 {
ffed8a01
AH
4915 if (notify_temp_creation)
4916 return GS_ERROR;
26d44ae2
RH
4917 DECL_INITIAL (object) = ctor;
4918 TREE_STATIC (object) = 1;
4919 if (!DECL_NAME (object))
4920 DECL_NAME (object) = create_tmp_var_name ("C");
4921 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4922
4923 /* ??? C++ doesn't automatically append a .<number> to the
6bdf3519 4924 assembler name, and even when it does, it looks at FE private
26d44ae2
RH
4925 data structures to figure out what that number should be,
4926 which are not set for this variable. I suppose this is
4927 important for local statics for inline functions, which aren't
4928 "local" in the object file sense. So in order to get a unique
4929 TU-local symbol, we must invoke the lhd version now. */
4930 lhd_set_decl_assembler_name (object);
4931
4932 *expr_p = NULL_TREE;
4933 break;
4934 }
4935
cce70747
JC
4936 /* If there are "lots" of initialized elements, even discounting
4937 those that are not address constants (and thus *must* be
4938 computed at runtime), then partition the constructor into
4939 constant and non-constant parts. Block copy the constant
4940 parts in, then generate code for the non-constant parts. */
4941 /* TODO. There's code in cp/typeck.c to do this. */
4942
953d0c90
RS
4943 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4944 /* store_constructor will ignore the clearing of variable-sized
4945 objects. Initializers for such objects must explicitly set
4946 every field that needs to be set. */
4947 cleared = false;
7f1387e0 4948 else if (!complete_p)
953d0c90 4949 /* If the constructor isn't complete, clear the whole object
d368135f 4950 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
953d0c90
RS
4951
4952 ??? This ought not to be needed. For any element not present
4953 in the initializer, we should simply set them to zero. Except
4954 we'd need to *find* the elements that are not present, and that
4955 requires trickery to avoid quadratic compile-time behavior in
4956 large cases or excessive memory use in small cases. */
7f1387e0 4957 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
953d0c90 4958 else if (num_ctor_elements - num_nonzero_elements
e04ad03d 4959 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
953d0c90
RS
4960 && num_nonzero_elements < num_ctor_elements / 4)
4961 /* If there are "lots" of zeros, it's more efficient to clear
4962 the memory and then set the nonzero elements. */
cce70747 4963 cleared = true;
953d0c90
RS
4964 else
4965 cleared = false;
cce70747 4966
26d44ae2
RH
4967 /* If there are "lots" of initialized elements, and all of them
4968 are valid address constants, then the entire initializer can
cce70747
JC
4969 be dropped to memory, and then memcpy'd out. Don't do this
4970 for sparse arrays, though, as it's more efficient to follow
4971 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
4972 individual element initialization. Also don't do this for small
4973 all-zero initializers (which aren't big enough to merit
4974 clearing), and don't try to make bitwise copies of
31db0fe0 4975 TREE_ADDRESSABLE types. */
8afd015a 4976 if (valid_const_initializer
2badc988 4977 && complete_p
8afd015a 4978 && !(cleared || num_nonzero_elements == 0)
31db0fe0 4979 && !TREE_ADDRESSABLE (type))
26d44ae2
RH
4980 {
4981 HOST_WIDE_INT size = int_size_in_bytes (type);
4982 unsigned int align;
4983
4984 /* ??? We can still get unbounded array types, at least
4985 from the C++ front end. This seems wrong, but attempt
4986 to work around it for now. */
4987 if (size < 0)
4988 {
4989 size = int_size_in_bytes (TREE_TYPE (object));
4990 if (size >= 0)
4991 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4992 }
4993
4994 /* Find the maximum alignment we can assume for the object. */
4995 /* ??? Make use of DECL_OFFSET_ALIGN. */
4996 if (DECL_P (object))
4997 align = DECL_ALIGN (object);
4998 else
4999 align = TYPE_ALIGN (type);
5000
f301837e
EB
5001 /* Do a block move either if the size is so small as to make
5002 each individual move a sub-unit move on average, or if it
5003 is so large as to make individual moves inefficient. */
329ad380
JJ
5004 if (size > 0
5005 && num_nonzero_elements > 1
0fca07e3
JJ
5006 /* For ctors that have many repeated nonzero elements
5007 represented through RANGE_EXPRs, prefer initializing
5008 those through runtime loops over copies of large amounts
5009 of data from readonly data section. */
5010 && (num_unique_nonzero_elements
5011 > num_nonzero_elements / unique_nonzero_ratio
5012 || size <= min_unique_size)
f301837e
EB
5013 && (size < num_nonzero_elements
5014 || !can_move_by_pieces (size, align)))
26d44ae2 5015 {
ffed8a01
AH
5016 if (notify_temp_creation)
5017 return GS_ERROR;
5018
46314d3e
EB
5019 walk_tree (&ctor, force_labels_r, NULL, NULL);
5020 ctor = tree_output_constant_def (ctor);
5021 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5022 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5023 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
5024
5025 /* This is no longer an assignment of a CONSTRUCTOR, but
5026 we still may have processing to do on the LHS. So
5027 pretend we didn't do anything here to let that happen. */
5028 return GS_UNHANDLED;
26d44ae2
RH
5029 }
5030 }
5031
558af7ca
EB
5032 /* If the target is volatile, we have non-zero elements and more than
5033 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
5034 if (TREE_THIS_VOLATILE (object)
5035 && !TREE_ADDRESSABLE (type)
3e66e7b0 5036 && (num_nonzero_elements > 0 || !cleared)
9771b263 5037 && vec_safe_length (elts) > 1)
61c7cbf8 5038 {
b731b390 5039 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
61c7cbf8
RG
5040 TREE_OPERAND (*expr_p, 0) = temp;
5041 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5042 *expr_p,
5043 build2 (MODIFY_EXPR, void_type_node,
5044 object, temp));
5045 return GS_OK;
5046 }
5047
ffed8a01
AH
5048 if (notify_temp_creation)
5049 return GS_OK;
5050
675c873b
EB
5051 /* If there are nonzero elements and if needed, pre-evaluate to capture
5052 elements overlapping with the lhs into temporaries. We must do this
5053 before clearing to fetch the values before they are zeroed-out. */
5054 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
5055 {
5056 preeval_data.lhs_base_decl = get_base_address (object);
5057 if (!DECL_P (preeval_data.lhs_base_decl))
5058 preeval_data.lhs_base_decl = NULL;
5059 preeval_data.lhs_alias_set = get_alias_set (object);
5060
726a989a 5061 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
5062 pre_p, post_p, &preeval_data);
5063 }
5064
2234a9cb
PP
5065 bool ctor_has_side_effects_p
5066 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5067
26d44ae2
RH
5068 if (cleared)
5069 {
5070 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5071 Note that we still have to gimplify, in order to handle the
57d1dd87 5072 case of variable sized types. Avoid shared tree structures. */
4038c495 5073 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 5074 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 5075 object = unshare_expr (object);
726a989a 5076 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
5077 }
5078
6fa91b48 5079 /* If we have not block cleared the object, or if there are nonzero
2234a9cb
PP
5080 elements in the constructor, or if the constructor has side effects,
5081 add assignments to the individual scalar fields of the object. */
5082 if (!cleared
5083 || num_nonzero_elements > 0
5084 || ctor_has_side_effects_p)
85d89e76 5085 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
5086
5087 *expr_p = NULL_TREE;
5088 }
5089 break;
5090
5091 case COMPLEX_TYPE:
5092 {
5093 tree r, i;
5094
ffed8a01
AH
5095 if (notify_temp_creation)
5096 return GS_OK;
5097
26d44ae2 5098 /* Extract the real and imaginary parts out of the ctor. */
9771b263
DN
5099 gcc_assert (elts->length () == 2);
5100 r = (*elts)[0].value;
5101 i = (*elts)[1].value;
26d44ae2
RH
5102 if (r == NULL || i == NULL)
5103 {
e8160c9a 5104 tree zero = build_zero_cst (TREE_TYPE (type));
26d44ae2
RH
5105 if (r == NULL)
5106 r = zero;
5107 if (i == NULL)
5108 i = zero;
5109 }
5110
5111 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5112 represent creation of a complex value. */
5113 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5114 {
5115 ctor = build_complex (type, r, i);
5116 TREE_OPERAND (*expr_p, 1) = ctor;
5117 }
5118 else
5119 {
b4257cfc 5120 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 5121 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
5122 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5123 pre_p,
5124 post_p,
17ad5b5e
RH
5125 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5126 fb_rvalue);
26d44ae2
RH
5127 }
5128 }
5129 break;
506e2710 5130
26d44ae2 5131 case VECTOR_TYPE:
4038c495
GB
5132 {
5133 unsigned HOST_WIDE_INT ix;
5134 constructor_elt *ce;
e89be13b 5135
ffed8a01
AH
5136 if (notify_temp_creation)
5137 return GS_OK;
5138
4038c495
GB
5139 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5140 if (TREE_CONSTANT (ctor))
5141 {
5142 bool constant_p = true;
5143 tree value;
5144
5145 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
5146 elements, such as addresses or trapping values like
5147 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5148 in VECTOR_CST nodes. */
4038c495
GB
5149 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5150 if (!CONSTANT_CLASS_P (value))
5151 {
5152 constant_p = false;
5153 break;
5154 }
e89be13b 5155
4038c495
GB
5156 if (constant_p)
5157 {
5158 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5159 break;
5160 }
84816907 5161
9f1da821 5162 TREE_CONSTANT (ctor) = 0;
4038c495 5163 }
e89be13b 5164
4038c495 5165 /* Vector types use CONSTRUCTOR all the way through gimple
37947cd0 5166 compilation as a general initializer. */
9771b263 5167 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038c495
GB
5168 {
5169 enum gimplify_status tret;
726a989a
RB
5170 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5171 fb_rvalue);
4038c495
GB
5172 if (tret == GS_ERROR)
5173 ret = GS_ERROR;
37947cd0
JJ
5174 else if (TREE_STATIC (ctor)
5175 && !initializer_constant_valid_p (ce->value,
5176 TREE_TYPE (ce->value)))
5177 TREE_STATIC (ctor) = 0;
4038c495 5178 }
726a989a
RB
5179 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5180 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 5181 }
26d44ae2 5182 break;
6de9cd9a 5183
26d44ae2
RH
5184 default:
5185 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 5186 gcc_unreachable ();
26d44ae2 5187 }
6de9cd9a 5188
26d44ae2
RH
5189 if (ret == GS_ERROR)
5190 return GS_ERROR;
0faf9ab4
WS
5191 /* If we have gimplified both sides of the initializer but have
5192 not emitted an assignment, do so now. */
5193 if (*expr_p)
5194 {
5195 tree lhs = TREE_OPERAND (*expr_p, 0);
5196 tree rhs = TREE_OPERAND (*expr_p, 1);
98e92fb2
JJ
5197 if (want_value && object == lhs)
5198 lhs = unshare_expr (lhs);
0faf9ab4
WS
5199 gassign *init = gimple_build_assign (lhs, rhs);
5200 gimplify_seq_add_stmt (pre_p, init);
5201 }
5202 if (want_value)
26d44ae2 5203 {
26d44ae2
RH
5204 *expr_p = object;
5205 return GS_OK;
6de9cd9a 5206 }
26d44ae2 5207 else
726a989a 5208 {
0faf9ab4 5209 *expr_p = NULL;
726a989a
RB
5210 return GS_ALL_DONE;
5211 }
26d44ae2 5212}
6de9cd9a 5213
de4af523
JJ
5214/* Given a pointer value OP0, return a simplified version of an
5215 indirection through OP0, or NULL_TREE if no simplification is
5216 possible. This may only be applied to a rhs of an expression.
5217 Note that the resulting type may be different from the type pointed
5218 to in the sense that it is still compatible from the langhooks
5219 point of view. */
5220
5221static tree
5222gimple_fold_indirect_ref_rhs (tree t)
5223{
5224 return gimple_fold_indirect_ref (t);
5225}
5226
4caa08da
AH
5227/* Subroutine of gimplify_modify_expr to do simplifications of
5228 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5229 something changes. */
6de9cd9a 5230
26d44ae2 5231static enum gimplify_status
726a989a
RB
5232gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5233 gimple_seq *pre_p, gimple_seq *post_p,
5234 bool want_value)
26d44ae2 5235{
6d729f28
JM
5236 enum gimplify_status ret = GS_UNHANDLED;
5237 bool changed;
6de9cd9a 5238
6d729f28
JM
5239 do
5240 {
5241 changed = false;
5242 switch (TREE_CODE (*from_p))
5243 {
5244 case VAR_DECL:
5245 /* If we're assigning from a read-only variable initialized with
5246 a constructor, do the direct assignment from the constructor,
5247 but only if neither source nor target are volatile since this
5248 latter assignment might end up being done on a per-field basis. */
5249 if (DECL_INITIAL (*from_p)
5250 && TREE_READONLY (*from_p)
5251 && !TREE_THIS_VOLATILE (*from_p)
5252 && !TREE_THIS_VOLATILE (*to_p)
5253 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5254 {
5255 tree old_from = *from_p;
5256 enum gimplify_status subret;
5257
5258 /* Move the constructor into the RHS. */
5259 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5260
5261 /* Let's see if gimplify_init_constructor will need to put
5262 it in memory. */
5263 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5264 false, true);
5265 if (subret == GS_ERROR)
5266 {
5267 /* If so, revert the change. */
5268 *from_p = old_from;
5269 }
5270 else
5271 {
5272 ret = GS_OK;
5273 changed = true;
5274 }
5275 }
5276 break;
5277 case INDIRECT_REF:
4caa08da 5278 {
6d729f28 5279 /* If we have code like
ffed8a01 5280
6d729f28 5281 *(const A*)(A*)&x
ffed8a01 5282
6d729f28
JM
5283 where the type of "x" is a (possibly cv-qualified variant
5284 of "A"), treat the entire expression as identical to "x".
5285 This kind of code arises in C++ when an object is bound
5286 to a const reference, and if "x" is a TARGET_EXPR we want
5287 to take advantage of the optimization below. */
06baaba3 5288 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6d729f28
JM
5289 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5290 if (t)
ffed8a01 5291 {
06baaba3
RG
5292 if (TREE_THIS_VOLATILE (t) != volatile_p)
5293 {
3a65ee74 5294 if (DECL_P (t))
06baaba3
RG
5295 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5296 build_fold_addr_expr (t));
5297 if (REFERENCE_CLASS_P (t))
5298 TREE_THIS_VOLATILE (t) = volatile_p;
5299 }
6d729f28
JM
5300 *from_p = t;
5301 ret = GS_OK;
5302 changed = true;
ffed8a01 5303 }
6d729f28
JM
5304 break;
5305 }
5306
5307 case TARGET_EXPR:
5308 {
5309 /* If we are initializing something from a TARGET_EXPR, strip the
5310 TARGET_EXPR and initialize it directly, if possible. This can't
5311 be done if the initializer is void, since that implies that the
5312 temporary is set in some non-trivial way.
5313
5314 ??? What about code that pulls out the temp and uses it
5315 elsewhere? I think that such code never uses the TARGET_EXPR as
5316 an initializer. If I'm wrong, we'll die because the temp won't
5317 have any RTL. In that case, I guess we'll need to replace
5318 references somehow. */
5319 tree init = TARGET_EXPR_INITIAL (*from_p);
5320
5321 if (init
570f86f9
JJ
5322 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5323 || !TARGET_EXPR_NO_ELIDE (*from_p))
6d729f28 5324 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 5325 {
6d729f28 5326 *from_p = init;
ffed8a01 5327 ret = GS_OK;
6d729f28 5328 changed = true;
ffed8a01 5329 }
4caa08da 5330 }
6d729f28 5331 break;
f98625f6 5332
6d729f28
JM
5333 case COMPOUND_EXPR:
5334 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5335 caught. */
5336 gimplify_compound_expr (from_p, pre_p, true);
5337 ret = GS_OK;
5338 changed = true;
5339 break;
f98625f6 5340
6d729f28 5341 case CONSTRUCTOR:
ce3beba3
JM
5342 /* If we already made some changes, let the front end have a
5343 crack at this before we break it down. */
5344 if (ret != GS_UNHANDLED)
5345 break;
6d729f28
JM
5346 /* If we're initializing from a CONSTRUCTOR, break this into
5347 individual MODIFY_EXPRs. */
5348 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5349 false);
5350
5351 case COND_EXPR:
5352 /* If we're assigning to a non-register type, push the assignment
5353 down into the branches. This is mandatory for ADDRESSABLE types,
5354 since we cannot generate temporaries for such, but it saves a
5355 copy in other cases as well. */
5356 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 5357 {
6d729f28
JM
5358 /* This code should mirror the code in gimplify_cond_expr. */
5359 enum tree_code code = TREE_CODE (*expr_p);
5360 tree cond = *from_p;
5361 tree result = *to_p;
5362
5363 ret = gimplify_expr (&result, pre_p, post_p,
5364 is_gimple_lvalue, fb_lvalue);
5365 if (ret != GS_ERROR)
5366 ret = GS_OK;
5367
68ed2ba0
JJ
5368 /* If we are going to write RESULT more than once, clear
5369 TREE_READONLY flag, otherwise we might incorrectly promote
5370 the variable to static const and initialize it at compile
5371 time in one of the branches. */
5372 if (VAR_P (result)
5373 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5374 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5375 TREE_READONLY (result) = 0;
6d729f28
JM
5376 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5377 TREE_OPERAND (cond, 1)
5378 = build2 (code, void_type_node, result,
5379 TREE_OPERAND (cond, 1));
5380 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5381 TREE_OPERAND (cond, 2)
5382 = build2 (code, void_type_node, unshare_expr (result),
5383 TREE_OPERAND (cond, 2));
5384
5385 TREE_TYPE (cond) = void_type_node;
5386 recalculate_side_effects (cond);
5387
5388 if (want_value)
5389 {
5390 gimplify_and_add (cond, pre_p);
5391 *expr_p = unshare_expr (result);
5392 }
5393 else
5394 *expr_p = cond;
5395 return ret;
f98625f6 5396 }
f98625f6 5397 break;
f98625f6 5398
6d729f28
JM
5399 case CALL_EXPR:
5400 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5401 return slot so that we don't generate a temporary. */
5402 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5403 && aggregate_value_p (*from_p, *from_p))
26d44ae2 5404 {
6d729f28
JM
5405 bool use_target;
5406
5407 if (!(rhs_predicate_for (*to_p))(*from_p))
5408 /* If we need a temporary, *to_p isn't accurate. */
5409 use_target = false;
ad19c4be 5410 /* It's OK to use the return slot directly unless it's an NRV. */
6d729f28
JM
5411 else if (TREE_CODE (*to_p) == RESULT_DECL
5412 && DECL_NAME (*to_p) == NULL_TREE
5413 && needs_to_live_in_memory (*to_p))
6d729f28
JM
5414 use_target = true;
5415 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5416 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5417 /* Don't force regs into memory. */
5418 use_target = false;
5419 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5420 /* It's OK to use the target directly if it's being
5421 initialized. */
5422 use_target = true;
e6a54b01
EB
5423 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5424 != INTEGER_CST)
aabb90e5
RG
5425 /* Always use the target and thus RSO for variable-sized types.
5426 GIMPLE cannot deal with a variable-sized assignment
5427 embedded in a call statement. */
5428 use_target = true;
345ae177
AH
5429 else if (TREE_CODE (*to_p) != SSA_NAME
5430 && (!is_gimple_variable (*to_p)
5431 || needs_to_live_in_memory (*to_p)))
6d729f28
JM
5432 /* Don't use the original target if it's already addressable;
5433 if its address escapes, and the called function uses the
5434 NRV optimization, a conforming program could see *to_p
5435 change before the called function returns; see c++/19317.
5436 When optimizing, the return_slot pass marks more functions
5437 as safe after we have escape info. */
5438 use_target = false;
5439 else
5440 use_target = true;
5441
5442 if (use_target)
5443 {
5444 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5445 mark_addressable (*to_p);
5446 }
26d44ae2 5447 }
6d729f28 5448 break;
6de9cd9a 5449
6d729f28
JM
5450 case WITH_SIZE_EXPR:
5451 /* Likewise for calls that return an aggregate of non-constant size,
5452 since we would not be able to generate a temporary at all. */
5453 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5454 {
5455 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
5456 /* We don't change ret in this case because the
5457 WITH_SIZE_EXPR might have been added in
5458 gimplify_modify_expr, so returning GS_OK would lead to an
5459 infinite loop. */
6d729f28
JM
5460 changed = true;
5461 }
5462 break;
6de9cd9a 5463
6d729f28
JM
5464 /* If we're initializing from a container, push the initialization
5465 inside it. */
5466 case CLEANUP_POINT_EXPR:
5467 case BIND_EXPR:
5468 case STATEMENT_LIST:
26d44ae2 5469 {
6d729f28
JM
5470 tree wrap = *from_p;
5471 tree t;
dae7ec87 5472
6d729f28
JM
5473 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5474 fb_lvalue);
dae7ec87
JM
5475 if (ret != GS_ERROR)
5476 ret = GS_OK;
5477
6d729f28
JM
5478 t = voidify_wrapper_expr (wrap, *expr_p);
5479 gcc_assert (t == *expr_p);
dae7ec87
JM
5480
5481 if (want_value)
5482 {
6d729f28
JM
5483 gimplify_and_add (wrap, pre_p);
5484 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
5485 }
5486 else
6d729f28
JM
5487 *expr_p = wrap;
5488 return GS_OK;
26d44ae2 5489 }
6de9cd9a 5490
6d729f28 5491 case COMPOUND_LITERAL_EXPR:
fa47911c 5492 {
6d729f28
JM
5493 tree complit = TREE_OPERAND (*expr_p, 1);
5494 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5495 tree decl = DECL_EXPR_DECL (decl_s);
5496 tree init = DECL_INITIAL (decl);
5497
5498 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5499 into struct T x = { 0, 1, 2 } if the address of the
5500 compound literal has never been taken. */
5501 if (!TREE_ADDRESSABLE (complit)
5502 && !TREE_ADDRESSABLE (decl)
5503 && init)
fa47911c 5504 {
6d729f28
JM
5505 *expr_p = copy_node (*expr_p);
5506 TREE_OPERAND (*expr_p, 1) = init;
5507 return GS_OK;
fa47911c
JM
5508 }
5509 }
5510
6d729f28
JM
5511 default:
5512 break;
2ec5deb5 5513 }
6d729f28
JM
5514 }
5515 while (changed);
6de9cd9a 5516
6de9cd9a
DN
5517 return ret;
5518}
5519
216820a4
RG
5520
5521/* Return true if T looks like a valid GIMPLE statement. */
5522
5523static bool
5524is_gimple_stmt (tree t)
5525{
5526 const enum tree_code code = TREE_CODE (t);
5527
5528 switch (code)
5529 {
5530 case NOP_EXPR:
5531 /* The only valid NOP_EXPR is the empty statement. */
5532 return IS_EMPTY_STMT (t);
5533
5534 case BIND_EXPR:
5535 case COND_EXPR:
5536 /* These are only valid if they're void. */
5537 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5538
5539 case SWITCH_EXPR:
5540 case GOTO_EXPR:
5541 case RETURN_EXPR:
5542 case LABEL_EXPR:
5543 case CASE_LABEL_EXPR:
5544 case TRY_CATCH_EXPR:
5545 case TRY_FINALLY_EXPR:
5546 case EH_FILTER_EXPR:
5547 case CATCH_EXPR:
5548 case ASM_EXPR:
5549 case STATEMENT_LIST:
41dbbb37
TS
5550 case OACC_PARALLEL:
5551 case OACC_KERNELS:
62aee289 5552 case OACC_SERIAL:
41dbbb37
TS
5553 case OACC_DATA:
5554 case OACC_HOST_DATA:
5555 case OACC_DECLARE:
5556 case OACC_UPDATE:
5557 case OACC_ENTER_DATA:
5558 case OACC_EXIT_DATA:
5559 case OACC_CACHE:
216820a4
RG
5560 case OMP_PARALLEL:
5561 case OMP_FOR:
74bf76ed 5562 case OMP_SIMD:
acf0174b 5563 case OMP_DISTRIBUTE:
554a530f 5564 case OMP_LOOP:
41dbbb37 5565 case OACC_LOOP:
bf38f7e9 5566 case OMP_SCAN:
216820a4
RG
5567 case OMP_SECTIONS:
5568 case OMP_SECTION:
5569 case OMP_SINGLE:
5570 case OMP_MASTER:
acf0174b 5571 case OMP_TASKGROUP:
216820a4
RG
5572 case OMP_ORDERED:
5573 case OMP_CRITICAL:
5574 case OMP_TASK:
d9a6bd32
JJ
5575 case OMP_TARGET:
5576 case OMP_TARGET_DATA:
5577 case OMP_TARGET_UPDATE:
5578 case OMP_TARGET_ENTER_DATA:
5579 case OMP_TARGET_EXIT_DATA:
5580 case OMP_TASKLOOP:
5581 case OMP_TEAMS:
216820a4
RG
5582 /* These are always void. */
5583 return true;
5584
5585 case CALL_EXPR:
5586 case MODIFY_EXPR:
5587 case PREDICT_EXPR:
5588 /* These are valid regardless of their type. */
5589 return true;
5590
5591 default:
5592 return false;
5593 }
5594}
5595
5596
d9c2d296 5597/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
eb72dc66 5598 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
7b7e6ecd
EB
5599
5600 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5601 other, unmodified part of the complex object just before the total store.
5602 As a consequence, if the object is still uninitialized, an undefined value
5603 will be loaded into a register, which may result in a spurious exception
5604 if the register is floating-point and the value happens to be a signaling
5605 NaN for example. Then the fully-fledged complex operations lowering pass
5606 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
5607
5608static enum gimplify_status
726a989a
RB
5609gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5610 bool want_value)
d9c2d296
AP
5611{
5612 enum tree_code code, ocode;
5613 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5614
726a989a
RB
5615 lhs = TREE_OPERAND (*expr_p, 0);
5616 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
5617 code = TREE_CODE (lhs);
5618 lhs = TREE_OPERAND (lhs, 0);
5619
5620 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5621 other = build1 (ocode, TREE_TYPE (rhs), lhs);
8d2b0410 5622 TREE_NO_WARNING (other) = 1;
d9c2d296
AP
5623 other = get_formal_tmp_var (other, pre_p);
5624
5625 realpart = code == REALPART_EXPR ? rhs : other;
5626 imagpart = code == REALPART_EXPR ? other : rhs;
5627
5628 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5629 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5630 else
5631 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5632
726a989a
RB
5633 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5634 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
5635
5636 return GS_ALL_DONE;
5637}
5638
206048bd 5639/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
5640
5641 modify_expr
5642 : varname '=' rhs
5643 | '*' ID '=' rhs
5644
5645 PRE_P points to the list where side effects that must happen before
5646 *EXPR_P should be stored.
5647
5648 POST_P points to the list where side effects that must happen after
5649 *EXPR_P should be stored.
5650
5651 WANT_VALUE is nonzero iff we want to use the value of this expression
5652 in another expression. */
5653
5654static enum gimplify_status
726a989a
RB
5655gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5656 bool want_value)
6de9cd9a 5657{
726a989a
RB
5658 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5659 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 5660 enum gimplify_status ret = GS_UNHANDLED;
355fe088 5661 gimple *assign;
db3927fb 5662 location_t loc = EXPR_LOCATION (*expr_p);
6da8be89 5663 gimple_stmt_iterator gsi;
6de9cd9a 5664
282899df
NS
5665 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5666 || TREE_CODE (*expr_p) == INIT_EXPR);
6de9cd9a 5667
d0ad58f9
JM
5668 /* Trying to simplify a clobber using normal logic doesn't work,
5669 so handle it here. */
5670 if (TREE_CLOBBER_P (*from_p))
5671 {
5d751b0c
JJ
5672 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5673 if (ret == GS_ERROR)
5674 return ret;
cdc18417
JM
5675 gcc_assert (!want_value);
5676 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5677 {
5678 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5679 pre_p, post_p);
5680 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5681 }
d0ad58f9
JM
5682 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5683 *expr_p = NULL;
5684 return GS_ALL_DONE;
5685 }
5686
1b24a790
RG
5687 /* Insert pointer conversions required by the middle-end that are not
5688 required by the frontend. This fixes middle-end type checking for
5689 for example gcc.dg/redecl-6.c. */
daad0278 5690 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
5691 {
5692 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5693 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 5694 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
5695 }
5696
83d7e8f0
JM
5697 /* See if any simplifications can be done based on what the RHS is. */
5698 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5699 want_value);
5700 if (ret != GS_UNHANDLED)
5701 return ret;
5702
5703 /* For zero sized types only gimplify the left hand side and right hand
5704 side as statements and throw away the assignment. Do this after
5705 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5706 types properly. */
b55c7343
JJ
5707 if (zero_sized_type (TREE_TYPE (*from_p))
5708 && !want_value
5709 /* Don't do this for calls that return addressable types, expand_call
5710 relies on those having a lhs. */
5711 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5712 && TREE_CODE (*from_p) == CALL_EXPR))
412f8986 5713 {
726a989a
RB
5714 gimplify_stmt (from_p, pre_p);
5715 gimplify_stmt (to_p, pre_p);
412f8986
AP
5716 *expr_p = NULL_TREE;
5717 return GS_ALL_DONE;
5718 }
6de9cd9a 5719
d25cee4d
RH
5720 /* If the value being copied is of variable width, compute the length
5721 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5722 before gimplifying any of the operands so that we can resolve any
5723 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5724 the size of the expression to be copied, not of the destination, so
726a989a 5725 that is what we must do here. */
d25cee4d 5726 maybe_with_size_expr (from_p);
6de9cd9a 5727
726a989a
RB
5728 /* As a special case, we have to temporarily allow for assignments
5729 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5730 a toplevel statement, when gimplifying the GENERIC expression
5731 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5732 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5733
5734 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5735 prevent gimplify_expr from trying to create a new temporary for
5736 foo's LHS, we tell it that it should only gimplify until it
5737 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5738 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5739 and all we need to do here is set 'a' to be its LHS. */
8a1b7b7f
JM
5740
5741 /* Gimplify the RHS first for C++17 and bug 71104. */
5742 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5743 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5744 if (ret == GS_ERROR)
5745 return ret;
5746
5747 /* Then gimplify the LHS. */
7f15b177
RB
5748 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5749 twice we have to make sure to gimplify into non-SSA as otherwise
5750 the abnormal edge added later will make those defs not dominate
5751 their uses.
5752 ??? Technically this applies only to the registers used in the
5753 resulting non-register *TO_P. */
5754 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5755 if (saved_into_ssa
5756 && TREE_CODE (*from_p) == CALL_EXPR
5757 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5758 gimplify_ctxp->into_ssa = false;
8a1b7b7f 5759 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
7f15b177 5760 gimplify_ctxp->into_ssa = saved_into_ssa;
6de9cd9a
DN
5761 if (ret == GS_ERROR)
5762 return ret;
5763
8a1b7b7f
JM
5764 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5765 guess for the predicate was wrong. */
5766 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5767 if (final_pred != initial_pred)
5768 {
5769 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5770 if (ret == GS_ERROR)
5771 return ret;
5772 }
5773
f8e89441 5774 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
026c3cfd 5775 size as argument to the call. */
f8e89441
TV
5776 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5777 {
5778 tree call = TREE_OPERAND (*from_p, 0);
5779 tree vlasize = TREE_OPERAND (*from_p, 1);
5780
5781 if (TREE_CODE (call) == CALL_EXPR
5782 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5783 {
2fe1d762 5784 int nargs = call_expr_nargs (call);
f8e89441
TV
5785 tree type = TREE_TYPE (call);
5786 tree ap = CALL_EXPR_ARG (call, 0);
5787 tree tag = CALL_EXPR_ARG (call, 1);
33f0852f 5788 tree aptag = CALL_EXPR_ARG (call, 2);
f8e89441 5789 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
2fe1d762
TV
5790 IFN_VA_ARG, type,
5791 nargs + 1, ap, tag,
33f0852f
JJ
5792 aptag, vlasize);
5793 TREE_OPERAND (*from_p, 0) = newcall;
f8e89441
TV
5794 }
5795 }
5796
44de5aeb
RK
5797 /* Now see if the above changed *from_p to something we handle specially. */
5798 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5799 want_value);
6de9cd9a
DN
5800 if (ret != GS_UNHANDLED)
5801 return ret;
5802
d25cee4d
RH
5803 /* If we've got a variable sized assignment between two lvalues (i.e. does
5804 not involve a call), then we can make things a bit more straightforward
5805 by converting the assignment to memcpy or memset. */
5806 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5807 {
5808 tree from = TREE_OPERAND (*from_p, 0);
5809 tree size = TREE_OPERAND (*from_p, 1);
5810
5811 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
5812 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5813
e847cc68 5814 if (is_gimple_addressable (from))
d25cee4d
RH
5815 {
5816 *from_p = from;
726a989a
RB
5817 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5818 pre_p);
d25cee4d
RH
5819 }
5820 }
5821
e41d82f5
RH
5822 /* Transform partial stores to non-addressable complex variables into
5823 total stores. This allows us to use real instead of virtual operands
5824 for these variables, which improves optimization. */
5825 if ((TREE_CODE (*to_p) == REALPART_EXPR
5826 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5827 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5828 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5829
f173837a 5830 /* Try to alleviate the effects of the gimplification creating artificial
b4771722
EB
5831 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5832 make sure not to create DECL_DEBUG_EXPR links across functions. */
f173837a 5833 if (!gimplify_ctxp->into_ssa
8813a647 5834 && VAR_P (*from_p)
726a989a
RB
5835 && DECL_IGNORED_P (*from_p)
5836 && DECL_P (*to_p)
b4771722 5837 && !DECL_IGNORED_P (*to_p)
0a37d40c
PMR
5838 && decl_function_context (*to_p) == current_function_decl
5839 && decl_function_context (*from_p) == current_function_decl)
f173837a
EB
5840 {
5841 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5842 DECL_NAME (*from_p)
5843 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
839b422f 5844 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
f173837a 5845 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
5846 }
5847
8f0fe813
NS
5848 if (want_value && TREE_THIS_VOLATILE (*to_p))
5849 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5850
726a989a
RB
5851 if (TREE_CODE (*from_p) == CALL_EXPR)
5852 {
5853 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5854 instead of a GIMPLE_ASSIGN. */
538dd0b7 5855 gcall *call_stmt;
1304953e
JJ
5856 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5857 {
5858 /* Gimplify internal functions created in the FEs. */
5859 int nargs = call_expr_nargs (*from_p), i;
5860 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5861 auto_vec<tree> vargs (nargs);
5862
5863 for (i = 0; i < nargs; i++)
5864 {
5865 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5866 EXPR_LOCATION (*from_p));
5867 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5868 }
538dd0b7 5869 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
a844293d 5870 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
538dd0b7 5871 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
1304953e 5872 }
ed9c79e1
JJ
5873 else
5874 {
1304953e
JJ
5875 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5876 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5877 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5878 tree fndecl = get_callee_fndecl (*from_p);
5879 if (fndecl
3d78e008 5880 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
1304953e 5881 && call_expr_nargs (*from_p) == 3)
538dd0b7
DM
5882 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5883 CALL_EXPR_ARG (*from_p, 0),
5884 CALL_EXPR_ARG (*from_p, 1),
5885 CALL_EXPR_ARG (*from_p, 2));
1304953e
JJ
5886 else
5887 {
5c5f0b65 5888 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
1304953e 5889 }
ed9c79e1 5890 }
538dd0b7 5891 notice_special_calls (call_stmt);
abd3a68c 5892 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
538dd0b7 5893 gimple_call_set_lhs (call_stmt, *to_p);
381cdae4
RB
5894 else if (TREE_CODE (*to_p) == SSA_NAME)
5895 /* The above is somewhat premature, avoid ICEing later for a
5896 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5897 ??? This doesn't make it a default-def. */
5898 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
815d9cc6 5899
538dd0b7 5900 assign = call_stmt;
f173837a 5901 }
726a989a 5902 else
c2255bc4
AH
5903 {
5904 assign = gimple_build_assign (*to_p, *from_p);
5905 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4cde512c
JJ
5906 if (COMPARISON_CLASS_P (*from_p))
5907 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
c2255bc4 5908 }
f173837a 5909
726a989a 5910 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 5911 {
2ad728d2 5912 /* We should have got an SSA name from the start. */
381cdae4
RB
5913 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5914 || ! gimple_in_ssa_p (cfun));
726a989a 5915 }
07beea0d 5916
6da8be89
MM
5917 gimplify_seq_add_stmt (pre_p, assign);
5918 gsi = gsi_last (*pre_p);
88ac13da 5919 maybe_fold_stmt (&gsi);
6da8be89 5920
726a989a
RB
5921 if (want_value)
5922 {
8f0fe813 5923 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 5924 return GS_OK;
6de9cd9a 5925 }
726a989a
RB
5926 else
5927 *expr_p = NULL;
6de9cd9a 5928
17ad5b5e 5929 return GS_ALL_DONE;
6de9cd9a
DN
5930}
5931
ad19c4be
EB
5932/* Gimplify a comparison between two variable-sized objects. Do this
5933 with a call to BUILT_IN_MEMCMP. */
44de5aeb
RK
5934
5935static enum gimplify_status
5936gimplify_variable_sized_compare (tree *expr_p)
5937{
692ad9aa 5938 location_t loc = EXPR_LOCATION (*expr_p);
44de5aeb
RK
5939 tree op0 = TREE_OPERAND (*expr_p, 0);
5940 tree op1 = TREE_OPERAND (*expr_p, 1);
692ad9aa 5941 tree t, arg, dest, src, expr;
5039610b
SL
5942
5943 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5944 arg = unshare_expr (arg);
5945 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
5946 src = build_fold_addr_expr_loc (loc, op1);
5947 dest = build_fold_addr_expr_loc (loc, op0);
e79983f4 5948 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
db3927fb 5949 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
692ad9aa
EB
5950
5951 expr
b4257cfc 5952 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
692ad9aa
EB
5953 SET_EXPR_LOCATION (expr, loc);
5954 *expr_p = expr;
44de5aeb
RK
5955
5956 return GS_OK;
5957}
5958
ad19c4be
EB
5959/* Gimplify a comparison between two aggregate objects of integral scalar
5960 mode as a comparison between the bitwise equivalent scalar values. */
61c25908
OH
5961
5962static enum gimplify_status
5963gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5964{
db3927fb 5965 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
5966 tree op0 = TREE_OPERAND (*expr_p, 0);
5967 tree op1 = TREE_OPERAND (*expr_p, 1);
5968
5969 tree type = TREE_TYPE (op0);
5970 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5971
db3927fb
AH
5972 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5973 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
5974
5975 *expr_p
db3927fb 5976 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
5977
5978 return GS_OK;
5979}
5980
ad19c4be
EB
5981/* Gimplify an expression sequence. This function gimplifies each
5982 expression and rewrites the original expression with the last
6de9cd9a
DN
5983 expression of the sequence in GIMPLE form.
5984
5985 PRE_P points to the list where the side effects for all the
5986 expressions in the sequence will be emitted.
d3147f64 5987
6de9cd9a 5988 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
5989
5990static enum gimplify_status
726a989a 5991gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
5992{
5993 tree t = *expr_p;
5994
5995 do
5996 {
5997 tree *sub_p = &TREE_OPERAND (t, 0);
5998
5999 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6000 gimplify_compound_expr (sub_p, pre_p, false);
6001 else
726a989a 6002 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
6003
6004 t = TREE_OPERAND (t, 1);
6005 }
6006 while (TREE_CODE (t) == COMPOUND_EXPR);
6007
6008 *expr_p = t;
6009 if (want_value)
6010 return GS_OK;
6011 else
6012 {
726a989a 6013 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
6014 return GS_ALL_DONE;
6015 }
6016}
6017
726a989a
RB
6018/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6019 gimplify. After gimplification, EXPR_P will point to a new temporary
6020 that holds the original value of the SAVE_EXPR node.
6de9cd9a 6021
726a989a 6022 PRE_P points to the list where side effects that must happen before
ad19c4be 6023 *EXPR_P should be stored. */
6de9cd9a
DN
6024
6025static enum gimplify_status
726a989a 6026gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
6027{
6028 enum gimplify_status ret = GS_ALL_DONE;
6029 tree val;
6030
282899df 6031 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
6032 val = TREE_OPERAND (*expr_p, 0);
6033
7f5e6307
RH
6034 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6035 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 6036 {
3dd93025 6037 /* The operand may be a void-valued expression. It is
519087cf
EB
6038 being executed only for its side-effects. */
6039 if (TREE_TYPE (val) == void_type_node)
6040 {
6041 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6042 is_gimple_stmt, fb_none);
6043 val = NULL;
6044 }
6045 else
6046 /* The temporary may not be an SSA name as later abnormal and EH
b18f161d
RB
6047 control flow may invalidate use/def domination. When in SSA
6048 form then assume there are no such issues and SAVE_EXPRs only
6049 appear via GENERIC foldings. */
6050 val = get_initialized_tmp_var (val, pre_p, post_p,
6051 gimple_in_ssa_p (cfun));
7f5e6307
RH
6052
6053 TREE_OPERAND (*expr_p, 0) = val;
6054 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 6055 }
6de9cd9a 6056
7f5e6307
RH
6057 *expr_p = val;
6058
6de9cd9a
DN
6059 return ret;
6060}
6061
ad19c4be 6062/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
6063
6064 unary_expr
6065 : ...
6066 | '&' varname
6067 ...
6068
6069 PRE_P points to the list where side effects that must happen before
6070 *EXPR_P should be stored.
6071
6072 POST_P points to the list where side effects that must happen after
6073 *EXPR_P should be stored. */
6074
6075static enum gimplify_status
726a989a 6076gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
6077{
6078 tree expr = *expr_p;
6079 tree op0 = TREE_OPERAND (expr, 0);
6080 enum gimplify_status ret;
db3927fb 6081 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
6082
6083 switch (TREE_CODE (op0))
6084 {
6085 case INDIRECT_REF:
67f23620 6086 do_indirect_ref:
6de9cd9a
DN
6087 /* Check if we are dealing with an expression of the form '&*ptr'.
6088 While the front end folds away '&*ptr' into 'ptr', these
6089 expressions may be generated internally by the compiler (e.g.,
6090 builtins like __builtin_va_end). */
67f23620
RH
6091 /* Caution: the silent array decomposition semantics we allow for
6092 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
6093 /* Gimplification of the ADDR_EXPR operand may drop
6094 cv-qualification conversions, so make sure we add them if
6095 needed. */
67f23620
RH
6096 {
6097 tree op00 = TREE_OPERAND (op0, 0);
6098 tree t_expr = TREE_TYPE (expr);
6099 tree t_op00 = TREE_TYPE (op00);
6100
f4088621 6101 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 6102 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
6103 *expr_p = op00;
6104 ret = GS_OK;
6105 }
6de9cd9a
DN
6106 break;
6107
44de5aeb
RK
6108 case VIEW_CONVERT_EXPR:
6109 /* Take the address of our operand and then convert it to the type of
af72267c
RK
6110 this ADDR_EXPR.
6111
6112 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6113 all clear. The impact of this transformation is even less clear. */
91804752
EB
6114
6115 /* If the operand is a useless conversion, look through it. Doing so
6116 guarantees that the ADDR_EXPR and its operand will remain of the
6117 same type. */
6118 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 6119 op0 = TREE_OPERAND (op0, 0);
91804752 6120
db3927fb
AH
6121 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6122 build_fold_addr_expr_loc (loc,
6123 TREE_OPERAND (op0, 0)));
44de5aeb 6124 ret = GS_OK;
6de9cd9a
DN
6125 break;
6126
903eccd1
EB
6127 case MEM_REF:
6128 if (integer_zerop (TREE_OPERAND (op0, 1)))
6129 goto do_indirect_ref;
6130
191816a3 6131 /* fall through */
903eccd1 6132
6de9cd9a 6133 default:
cbf5d0e7
RB
6134 /* If we see a call to a declared builtin or see its address
6135 being taken (we can unify those cases here) then we can mark
6136 the builtin for implicit generation by GCC. */
6137 if (TREE_CODE (op0) == FUNCTION_DECL
3d78e008 6138 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
cbf5d0e7
RB
6139 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6140 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6141
6de9cd9a 6142 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
6143 the address of a call that returns a struct; see
6144 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6145 the implied temporary explicit. */
936d04b6 6146
f76d6e6f 6147 /* Make the operand addressable. */
6de9cd9a 6148 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 6149 is_gimple_addressable, fb_either);
8b17cc05
RG
6150 if (ret == GS_ERROR)
6151 break;
67f23620 6152
f76d6e6f
EB
6153 /* Then mark it. Beware that it may not be possible to do so directly
6154 if a temporary has been created by the gimplification. */
6155 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 6156
8b17cc05 6157 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 6158
8b17cc05
RG
6159 /* For various reasons, the gimplification of the expression
6160 may have made a new INDIRECT_REF. */
3d947f1f
RB
6161 if (TREE_CODE (op0) == INDIRECT_REF
6162 || (TREE_CODE (op0) == MEM_REF
6163 && integer_zerop (TREE_OPERAND (op0, 1))))
8b17cc05
RG
6164 goto do_indirect_ref;
6165
6b8b9e42
RG
6166 mark_addressable (TREE_OPERAND (expr, 0));
6167
6168 /* The FEs may end up building ADDR_EXPRs early on a decl with
6169 an incomplete type. Re-build ADDR_EXPRs in canonical form
6170 here. */
6171 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6172 *expr_p = build_fold_addr_expr (op0);
6173
8b17cc05 6174 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
6175 recompute_tree_invariant_for_addr_expr (*expr_p);
6176
6177 /* If we re-built the ADDR_EXPR add a conversion to the original type
6178 if required. */
6179 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6180 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 6181
6de9cd9a
DN
6182 break;
6183 }
6184
6de9cd9a
DN
6185 return ret;
6186}
6187
6188/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6189 value; output operands should be a gimple lvalue. */
6190
6191static enum gimplify_status
726a989a 6192gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 6193{
726a989a
RB
6194 tree expr;
6195 int noutputs;
6196 const char **oconstraints;
6de9cd9a
DN
6197 int i;
6198 tree link;
6199 const char *constraint;
6200 bool allows_mem, allows_reg, is_inout;
6201 enum gimplify_status ret, tret;
538dd0b7 6202 gasm *stmt;
9771b263
DN
6203 vec<tree, va_gc> *inputs;
6204 vec<tree, va_gc> *outputs;
6205 vec<tree, va_gc> *clobbers;
6206 vec<tree, va_gc> *labels;
726a989a 6207 tree link_next;
b8698a0f 6208
726a989a
RB
6209 expr = *expr_p;
6210 noutputs = list_length (ASM_OUTPUTS (expr));
6211 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6212
9771b263
DN
6213 inputs = NULL;
6214 outputs = NULL;
6215 clobbers = NULL;
6216 labels = NULL;
6de9cd9a 6217
6de9cd9a 6218 ret = GS_ALL_DONE;
726a989a
RB
6219 link_next = NULL_TREE;
6220 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 6221 {
2c68ba8e 6222 bool ok;
726a989a
RB
6223 size_t constraint_len;
6224
6225 link_next = TREE_CHAIN (link);
6226
6227 oconstraints[i]
6228 = constraint
6de9cd9a 6229 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
6230 constraint_len = strlen (constraint);
6231 if (constraint_len == 0)
6232 continue;
6de9cd9a 6233
2c68ba8e
LB
6234 ok = parse_output_constraint (&constraint, i, 0, 0,
6235 &allows_mem, &allows_reg, &is_inout);
6236 if (!ok)
6237 {
6238 ret = GS_ERROR;
6239 is_inout = false;
6240 }
6de9cd9a 6241
5f6705b7
JJ
6242 /* If we can't make copies, we can only accept memory.
6243 Similarly for VLAs. */
6244 tree outtype = TREE_TYPE (TREE_VALUE (link));
6245 if (outtype != error_mark_node
6246 && (TREE_ADDRESSABLE (outtype)
6247 || !COMPLETE_TYPE_P (outtype)
6248 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
edff2a05
JJ
6249 {
6250 if (allows_mem)
6251 allows_reg = 0;
6252 else
6253 {
6254 error ("impossible constraint in %<asm%>");
6255 error ("non-memory output %d must stay in memory", i);
6256 return GS_ERROR;
6257 }
6258 }
6259
6de9cd9a 6260 if (!allows_reg && allows_mem)
936d04b6 6261 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
6262
6263 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6264 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6265 fb_lvalue | fb_mayfail);
6266 if (tret == GS_ERROR)
6267 {
a9c697b8 6268 error ("invalid lvalue in %<asm%> output %d", i);
6de9cd9a
DN
6269 ret = tret;
6270 }
6271
ed87de55
RB
6272 /* If the constraint does not allow memory make sure we gimplify
6273 it to a register if it is not already but its base is. This
6274 happens for complex and vector components. */
6275 if (!allows_mem)
6276 {
6277 tree op = TREE_VALUE (link);
6278 if (! is_gimple_val (op)
6279 && is_gimple_reg_type (TREE_TYPE (op))
6280 && is_gimple_reg (get_base_address (op)))
6281 {
6282 tree tem = create_tmp_reg (TREE_TYPE (op));
6283 tree ass;
6284 if (is_inout)
6285 {
6286 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6287 tem, unshare_expr (op));
6288 gimplify_and_add (ass, pre_p);
6289 }
6290 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6291 gimplify_and_add (ass, post_p);
6292
6293 TREE_VALUE (link) = tem;
6294 tret = GS_OK;
6295 }
6296 }
6297
9771b263 6298 vec_safe_push (outputs, link);
726a989a
RB
6299 TREE_CHAIN (link) = NULL_TREE;
6300
6de9cd9a
DN
6301 if (is_inout)
6302 {
6303 /* An input/output operand. To give the optimizers more
6304 flexibility, split it into separate input and output
6305 operands. */
6306 tree input;
3d7b83b6
MS
6307 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6308 char buf[11];
6de9cd9a
DN
6309
6310 /* Turn the in/out constraint into an output constraint. */
6311 char *p = xstrdup (constraint);
6312 p[0] = '=';
6313 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
6314
6315 /* And add a matching input constraint. */
6316 if (allows_reg)
6317 {
3d7b83b6 6318 sprintf (buf, "%u", i);
372d72d9
JJ
6319
6320 /* If there are multiple alternatives in the constraint,
6321 handle each of them individually. Those that allow register
6322 will be replaced with operand number, the others will stay
6323 unchanged. */
6324 if (strchr (p, ',') != NULL)
6325 {
6326 size_t len = 0, buflen = strlen (buf);
6327 char *beg, *end, *str, *dst;
6328
6329 for (beg = p + 1;;)
6330 {
6331 end = strchr (beg, ',');
6332 if (end == NULL)
6333 end = strchr (beg, '\0');
6334 if ((size_t) (end - beg) < buflen)
6335 len += buflen + 1;
6336 else
6337 len += end - beg + 1;
6338 if (*end)
6339 beg = end + 1;
6340 else
6341 break;
6342 }
6343
858904db 6344 str = (char *) alloca (len);
372d72d9
JJ
6345 for (beg = p + 1, dst = str;;)
6346 {
6347 const char *tem;
6348 bool mem_p, reg_p, inout_p;
6349
6350 end = strchr (beg, ',');
6351 if (end)
6352 *end = '\0';
6353 beg[-1] = '=';
6354 tem = beg - 1;
6355 parse_output_constraint (&tem, i, 0, 0,
6356 &mem_p, &reg_p, &inout_p);
6357 if (dst != str)
6358 *dst++ = ',';
6359 if (reg_p)
6360 {
6361 memcpy (dst, buf, buflen);
6362 dst += buflen;
6363 }
6364 else
6365 {
6366 if (end)
6367 len = end - beg;
6368 else
6369 len = strlen (beg);
6370 memcpy (dst, beg, len);
6371 dst += len;
6372 }
6373 if (end)
6374 beg = end + 1;
6375 else
6376 break;
6377 }
6378 *dst = '\0';
6379 input = build_string (dst - str, str);
6380 }
6381 else
6382 input = build_string (strlen (buf), buf);
6de9cd9a
DN
6383 }
6384 else
6385 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
6386
6387 free (p);
6388
6de9cd9a
DN
6389 input = build_tree_list (build_tree_list (NULL_TREE, input),
6390 unshare_expr (TREE_VALUE (link)));
6391 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6392 }
6393 }
6394
726a989a
RB
6395 link_next = NULL_TREE;
6396 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 6397 {
726a989a
RB
6398 link_next = TREE_CHAIN (link);
6399 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
6400 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6401 oconstraints, &allows_mem, &allows_reg);
6402
f497c16c 6403 /* If we can't make copies, we can only accept memory. */
5f6705b7
JJ
6404 tree intype = TREE_TYPE (TREE_VALUE (link));
6405 if (intype != error_mark_node
6406 && (TREE_ADDRESSABLE (intype)
6407 || !COMPLETE_TYPE_P (intype)
6408 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
f497c16c
JJ
6409 {
6410 if (allows_mem)
6411 allows_reg = 0;
6412 else
6413 {
6414 error ("impossible constraint in %<asm%>");
6415 error ("non-memory input %d must stay in memory", i);
6416 return GS_ERROR;
6417 }
6418 }
6419
6de9cd9a
DN
6420 /* If the operand is a memory input, it should be an lvalue. */
6421 if (!allows_reg && allows_mem)
6422 {
502c5084
JJ
6423 tree inputv = TREE_VALUE (link);
6424 STRIP_NOPS (inputv);
6425 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6426 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6427 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
8f1e28e0
MP
6428 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6429 || TREE_CODE (inputv) == MODIFY_EXPR)
502c5084 6430 TREE_VALUE (link) = error_mark_node;
6de9cd9a
DN
6431 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6432 is_gimple_lvalue, fb_lvalue | fb_mayfail);
bdd3aea6
JJ
6433 if (tret != GS_ERROR)
6434 {
6435 /* Unlike output operands, memory inputs are not guaranteed
6436 to be lvalues by the FE, and while the expressions are
6437 marked addressable there, if it is e.g. a statement
6438 expression, temporaries in it might not end up being
6439 addressable. They might be already used in the IL and thus
6440 it is too late to make them addressable now though. */
6441 tree x = TREE_VALUE (link);
6442 while (handled_component_p (x))
6443 x = TREE_OPERAND (x, 0);
6444 if (TREE_CODE (x) == MEM_REF
6445 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6446 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
8813a647 6447 if ((VAR_P (x)
bdd3aea6
JJ
6448 || TREE_CODE (x) == PARM_DECL
6449 || TREE_CODE (x) == RESULT_DECL)
6450 && !TREE_ADDRESSABLE (x)
6451 && is_gimple_reg (x))
6452 {
6453 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6454 input_location), 0,
6455 "memory input %d is not directly addressable",
6456 i);
6457 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6458 }
6459 }
936d04b6 6460 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
6461 if (tret == GS_ERROR)
6462 {
bdd3aea6
JJ
6463 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6464 "memory input %d is not directly addressable", i);
6de9cd9a
DN
6465 ret = tret;
6466 }
6467 }
6468 else
6469 {
6470 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 6471 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
6472 if (tret == GS_ERROR)
6473 ret = tret;
6474 }
726a989a
RB
6475
6476 TREE_CHAIN (link) = NULL_TREE;
9771b263 6477 vec_safe_push (inputs, link);
6de9cd9a 6478 }
b8698a0f 6479
ca081cc8
EB
6480 link_next = NULL_TREE;
6481 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6482 {
6483 link_next = TREE_CHAIN (link);
6484 TREE_CHAIN (link) = NULL_TREE;
6485 vec_safe_push (clobbers, link);
6486 }
1c384bf1 6487
ca081cc8
EB
6488 link_next = NULL_TREE;
6489 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6490 {
6491 link_next = TREE_CHAIN (link);
6492 TREE_CHAIN (link) = NULL_TREE;
6493 vec_safe_push (labels, link);
6494 }
726a989a 6495
a406865a
RG
6496 /* Do not add ASMs with errors to the gimple IL stream. */
6497 if (ret != GS_ERROR)
6498 {
6499 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 6500 inputs, outputs, clobbers, labels);
726a989a 6501
15a85b05 6502 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
a406865a 6503 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5b76e75f 6504 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
a406865a
RG
6505
6506 gimplify_seq_add_stmt (pre_p, stmt);
6507 }
6de9cd9a
DN
6508
6509 return ret;
6510}
6511
6512/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 6513 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
6514 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6515 return to this function.
6516
6517 FIXME should we complexify the prequeue handling instead? Or use flags
6518 for all the cleanups and let the optimizer tighten them up? The current
6519 code seems pretty fragile; it will break on a cleanup within any
6520 non-conditional nesting. But any such nesting would be broken, anyway;
6521 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6522 and continues out of it. We can do that at the RTL level, though, so
6523 having an optimizer to tighten up try/finally regions would be a Good
6524 Thing. */
6525
6526static enum gimplify_status
726a989a 6527gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 6528{
726a989a
RB
6529 gimple_stmt_iterator iter;
6530 gimple_seq body_sequence = NULL;
6de9cd9a 6531
325c3691 6532 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
6533
6534 /* We only care about the number of conditions between the innermost
df77f454
JM
6535 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6536 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 6537 int old_conds = gimplify_ctxp->conditions;
726a989a 6538 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
32be32af 6539 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6de9cd9a 6540 gimplify_ctxp->conditions = 0;
726a989a 6541 gimplify_ctxp->conditional_cleanups = NULL;
32be32af 6542 gimplify_ctxp->in_cleanup_point_expr = true;
6de9cd9a 6543
726a989a 6544 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
6545
6546 gimplify_ctxp->conditions = old_conds;
df77f454 6547 gimplify_ctxp->conditional_cleanups = old_cleanups;
32be32af 6548 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6de9cd9a 6549
726a989a 6550 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 6551 {
355fe088 6552 gimple *wce = gsi_stmt (iter);
6de9cd9a 6553
726a989a 6554 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 6555 {
726a989a 6556 if (gsi_one_before_end_p (iter))
6de9cd9a 6557 {
726a989a
RB
6558 /* Note that gsi_insert_seq_before and gsi_remove do not
6559 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
6560 if (!gimple_wce_cleanup_eh_only (wce))
6561 gsi_insert_seq_before_without_update (&iter,
6562 gimple_wce_cleanup (wce),
6563 GSI_SAME_STMT);
726a989a 6564 gsi_remove (&iter, true);
6de9cd9a
DN
6565 break;
6566 }
6567 else
6568 {
538dd0b7 6569 gtry *gtry;
726a989a
RB
6570 gimple_seq seq;
6571 enum gimple_try_flags kind;
40aac948 6572
726a989a
RB
6573 if (gimple_wce_cleanup_eh_only (wce))
6574 kind = GIMPLE_TRY_CATCH;
40aac948 6575 else
726a989a
RB
6576 kind = GIMPLE_TRY_FINALLY;
6577 seq = gsi_split_seq_after (iter);
6578
82d6e6fc 6579 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
6580 /* Do not use gsi_replace here, as it may scan operands.
6581 We want to do a simple structural modification only. */
355a7673 6582 gsi_set_stmt (&iter, gtry);
daa6e488 6583 iter = gsi_start (gtry->eval);
6de9cd9a
DN
6584 }
6585 }
6586 else
726a989a 6587 gsi_next (&iter);
6de9cd9a
DN
6588 }
6589
726a989a 6590 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
6591 if (temp)
6592 {
6593 *expr_p = temp;
6de9cd9a
DN
6594 return GS_OK;
6595 }
6596 else
6597 {
726a989a 6598 *expr_p = NULL;
6de9cd9a
DN
6599 return GS_ALL_DONE;
6600 }
6601}
6602
6603/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a 6604 is the cleanup action required. EH_ONLY is true if the cleanup should
e650ea2a
RB
6605 only be executed if an exception is thrown, not on normal exit.
6606 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6607 only valid for clobbers. */
6de9cd9a
DN
6608
6609static void
e650ea2a
RB
6610gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6611 bool force_uncond = false)
6de9cd9a 6612{
355fe088 6613 gimple *wce;
726a989a 6614 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
6615
6616 /* Errors can result in improperly nested cleanups. Which results in
726a989a 6617 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 6618 if (seen_error ())
6de9cd9a
DN
6619 return;
6620
cda4d053 6621 if (gimple_conditional_context ())
6de9cd9a
DN
6622 {
6623 /* If we're in a conditional context, this is more complex. We only
6624 want to run the cleanup if we actually ran the initialization that
6625 necessitates it, but we want to run it after the end of the
6626 conditional context. So we wrap the try/finally around the
6627 condition and use a flag to determine whether or not to actually
6628 run the destructor. Thus
6629
6630 test ? f(A()) : 0
6631
6632 becomes (approximately)
6633
6634 flag = 0;
6635 try {
6636 if (test) { A::A(temp); flag = 1; val = f(temp); }
6637 else { val = 0; }
6638 } finally {
6639 if (flag) A::~A(temp);
6640 }
6641 val
6642 */
cda4d053
RB
6643 if (force_uncond)
6644 {
6645 gimplify_stmt (&cleanup, &cleanup_stmts);
6646 wce = gimple_build_wce (cleanup_stmts);
6647 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6648 }
6649 else
6650 {
6651 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6652 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6653 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6654
6655 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6656 gimplify_stmt (&cleanup, &cleanup_stmts);
6657 wce = gimple_build_wce (cleanup_stmts);
6658
6659 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6660 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6661 gimplify_seq_add_stmt (pre_p, ftrue);
6662
6663 /* Because of this manipulation, and the EH edges that jump
6664 threading cannot redirect, the temporary (VAR) will appear
6665 to be used uninitialized. Don't warn. */
6666 TREE_NO_WARNING (var) = 1;
6667 }
6de9cd9a
DN
6668 }
6669 else
6670 {
726a989a
RB
6671 gimplify_stmt (&cleanup, &cleanup_stmts);
6672 wce = gimple_build_wce (cleanup_stmts);
6673 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6674 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 6675 }
6de9cd9a
DN
6676}
6677
6678/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6679
6680static enum gimplify_status
726a989a 6681gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
6682{
6683 tree targ = *expr_p;
6684 tree temp = TARGET_EXPR_SLOT (targ);
6685 tree init = TARGET_EXPR_INITIAL (targ);
6686 enum gimplify_status ret;
6687
6dc4a604
ML
6688 bool unpoison_empty_seq = false;
6689 gimple_stmt_iterator unpoison_it;
6690
6de9cd9a
DN
6691 if (init)
6692 {
d0ad58f9
JM
6693 tree cleanup = NULL_TREE;
6694
3a5b9284 6695 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea 6696 to the temps list. Handle also variable length TARGET_EXPRs. */
507de5ee 6697 if (!poly_int_tree_p (DECL_SIZE (temp)))
786025ea
JJ
6698 {
6699 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6700 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6701 gimplify_vla_decl (temp, pre_p);
6702 }
6703 else
6dc4a604
ML
6704 {
6705 /* Save location where we need to place unpoisoning. It's possible
6706 that a variable will be converted to needs_to_live_in_memory. */
6707 unpoison_it = gsi_last (*pre_p);
6708 unpoison_empty_seq = gsi_end_p (unpoison_it);
6709
6710 gimple_add_tmp_var (temp);
6711 }
6de9cd9a 6712
3a5b9284
RH
6713 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6714 expression is supposed to initialize the slot. */
6715 if (VOID_TYPE_P (TREE_TYPE (init)))
6716 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6717 else
325c3691 6718 {
726a989a
RB
6719 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6720 init = init_expr;
6721 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6722 init = NULL;
6723 ggc_free (init_expr);
325c3691 6724 }
3a5b9284 6725 if (ret == GS_ERROR)
abc67de1
SM
6726 {
6727 /* PR c++/28266 Make sure this is expanded only once. */
6728 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6729 return GS_ERROR;
6730 }
726a989a
RB
6731 if (init)
6732 gimplify_and_add (init, pre_p);
6de9cd9a
DN
6733
6734 /* If needed, push the cleanup for the temp. */
6735 if (TARGET_EXPR_CLEANUP (targ))
d0ad58f9
JM
6736 {
6737 if (CLEANUP_EH_ONLY (targ))
6738 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6739 CLEANUP_EH_ONLY (targ), pre_p);
6740 else
6741 cleanup = TARGET_EXPR_CLEANUP (targ);
6742 }
6743
6744 /* Add a clobber for the temporary going out of scope, like
6745 gimplify_bind_expr. */
32be32af 6746 if (gimplify_ctxp->in_cleanup_point_expr
6dc4a604 6747 && needs_to_live_in_memory (temp))
d0ad58f9 6748 {
6dc4a604
ML
6749 if (flag_stack_reuse == SR_ALL)
6750 {
94b2a1e5 6751 tree clobber = build_clobber (TREE_TYPE (temp));
6dc4a604 6752 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
e650ea2a 6753 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6dc4a604 6754 }
36208e60
ML
6755 if (asan_poisoned_variables
6756 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
832ece86 6757 && !TREE_STATIC (temp)
d838c2d5
JJ
6758 && dbg_cnt (asan_use_after_scope)
6759 && !gimplify_omp_ctxp)
6dc4a604
ML
6760 {
6761 tree asan_cleanup = build_asan_poison_call_expr (temp);
6762 if (asan_cleanup)
6763 {
6764 if (unpoison_empty_seq)
6765 unpoison_it = gsi_start (*pre_p);
d0ad58f9 6766
6dc4a604
ML
6767 asan_poison_variable (temp, false, &unpoison_it,
6768 unpoison_empty_seq);
6769 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6770 }
6771 }
6772 }
d0ad58f9
JM
6773 if (cleanup)
6774 gimple_push_cleanup (temp, cleanup, false, pre_p);
6de9cd9a
DN
6775
6776 /* Only expand this once. */
6777 TREE_OPERAND (targ, 3) = init;
6778 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6779 }
282899df 6780 else
6de9cd9a 6781 /* We should have expanded this before. */
282899df 6782 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
6783
6784 *expr_p = temp;
6785 return GS_OK;
6786}
6787
6788/* Gimplification of expression trees. */
6789
726a989a
RB
6790/* Gimplify an expression which appears at statement context. The
6791 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6792 NULL, a new sequence is allocated.
6de9cd9a 6793
726a989a
RB
6794 Return true if we actually added a statement to the queue. */
6795
6796bool
6797gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 6798{
726a989a 6799 gimple_seq_node last;
6de9cd9a 6800
726a989a
RB
6801 last = gimple_seq_last (*seq_p);
6802 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6803 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
6804}
6805
953ff289
DN
6806/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6807 to CTX. If entries already exist, force them to be some flavor of private.
6808 If there is no enclosing parallel, do nothing. */
6809
6810void
6811omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6812{
6813 splay_tree_node n;
6814
d9a6bd32 6815 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
953ff289
DN
6816 return;
6817
6818 do
6819 {
6820 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6821 if (n != NULL)
6822 {
6823 if (n->value & GOVD_SHARED)
6824 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
acf0174b
JJ
6825 else if (n->value & GOVD_MAP)
6826 n->value |= GOVD_MAP_TO_ONLY;
953ff289
DN
6827 else
6828 return;
6829 }
d9a6bd32
JJ
6830 else if ((ctx->region_type & ORT_TARGET) != 0)
6831 {
28567c40 6832 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
d9a6bd32
JJ
6833 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6834 else
6835 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6836 }
74bf76ed 6837 else if (ctx->region_type != ORT_WORKSHARE
28567c40 6838 && ctx->region_type != ORT_TASKGROUP
acf0174b 6839 && ctx->region_type != ORT_SIMD
182190f2
NS
6840 && ctx->region_type != ORT_ACC
6841 && !(ctx->region_type & ORT_TARGET_DATA))
953ff289
DN
6842 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6843
6844 ctx = ctx->outer_context;
6845 }
6846 while (ctx);
6847}
6848
6849/* Similarly for each of the type sizes of TYPE. */
6850
6851static void
6852omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6853{
6854 if (type == NULL || type == error_mark_node)
6855 return;
6856 type = TYPE_MAIN_VARIANT (type);
6857
6e2830c3 6858 if (ctx->privatized_types->add (type))
953ff289
DN
6859 return;
6860
6861 switch (TREE_CODE (type))
6862 {
6863 case INTEGER_TYPE:
6864 case ENUMERAL_TYPE:
6865 case BOOLEAN_TYPE:
953ff289 6866 case REAL_TYPE:
325217ed 6867 case FIXED_POINT_TYPE:
953ff289
DN
6868 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6869 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6870 break;
6871
6872 case ARRAY_TYPE:
6873 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6874 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6875 break;
6876
6877 case RECORD_TYPE:
6878 case UNION_TYPE:
6879 case QUAL_UNION_TYPE:
6880 {
6881 tree field;
910ad8de 6882 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
6883 if (TREE_CODE (field) == FIELD_DECL)
6884 {
6885 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6886 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6887 }
6888 }
6889 break;
6890
6891 case POINTER_TYPE:
6892 case REFERENCE_TYPE:
6893 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6894 break;
6895
6896 default:
6897 break;
6898 }
6899
6900 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6901 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6902 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6903}
6904
41dbbb37 6905/* Add an entry for DECL in the OMP context CTX with FLAGS. */
953ff289
DN
6906
6907static void
6908omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6909{
6910 splay_tree_node n;
6911 unsigned int nflags;
6912 tree t;
6913
d9a6bd32 6914 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
953ff289
DN
6915 return;
6916
6917 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
d54d1fc3
JJ
6918 there are constructors involved somewhere. Exception is a shared clause,
6919 there is nothing privatized in that case. */
6920 if ((flags & GOVD_SHARED) == 0
6921 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6922 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
953ff289
DN
6923 flags |= GOVD_SEEN;
6924
6925 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
d9a6bd32 6926 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
953ff289
DN
6927 {
6928 /* We shouldn't be re-adding the decl with the same data
6929 sharing class. */
6930 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
953ff289 6931 nflags = n->value | flags;
182190f2
NS
6932 /* The only combination of data sharing classes we should see is
6933 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6934 reduction variables to be used in data sharing clauses. */
6935 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6936 || ((nflags & GOVD_DATA_SHARE_CLASS)
6937 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
74bf76ed 6938 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
953ff289
DN
6939 n->value = nflags;
6940 return;
6941 }
6942
6943 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 6944 of additional bits of data: the pointer replacement variable, and
953ff289 6945 the parameters of the type. */
4c923c28 6946 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
6947 {
6948 /* Add the pointer replacement variable as PRIVATE if the variable
6949 replacement is private, else FIRSTPRIVATE since we'll need the
6950 address of the original variable either for SHARED, or for the
6951 copy into or out of the context. */
28567c40 6952 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
953ff289 6953 {
41dbbb37
TS
6954 if (flags & GOVD_MAP)
6955 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6956 else if (flags & GOVD_PRIVATE)
6957 nflags = GOVD_PRIVATE;
8860d270
JJ
6958 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6959 && (flags & GOVD_FIRSTPRIVATE))
6960 || (ctx->region_type == ORT_TARGET_DATA
6961 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
d9a6bd32 6962 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
41dbbb37
TS
6963 else
6964 nflags = GOVD_FIRSTPRIVATE;
953ff289
DN
6965 nflags |= flags & GOVD_SEEN;
6966 t = DECL_VALUE_EXPR (decl);
6967 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6968 t = TREE_OPERAND (t, 0);
6969 gcc_assert (DECL_P (t));
6970 omp_add_variable (ctx, t, nflags);
6971 }
6972
6973 /* Add all of the variable and type parameters (which should have
6974 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6975 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6976 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6977 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6978
6979 /* The variable-sized variable itself is never SHARED, only some form
6980 of PRIVATE. The sharing would take place via the pointer variable
6981 which we remapped above. */
6982 if (flags & GOVD_SHARED)
e9e2ef9f 6983 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
953ff289
DN
6984 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6985
b8698a0f 6986 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
6987 alloca statement we generate for the variable, so make sure it
6988 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
6989 case, since we won't be allocating local storage then.
6990 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6991 in this case omp_notice_variable will be called later
6992 on when it is gimplified. */
acf0174b 6993 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
423ed416 6994 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
953ff289
DN
6995 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6996 }
acf0174b
JJ
6997 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6998 && lang_hooks.decls.omp_privatize_by_reference (decl))
953ff289 6999 {
953ff289
DN
7000 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7001
7002 /* Similar to the direct variable sized case above, we'll need the
7003 size of references being privatized. */
7004 if ((flags & GOVD_SHARED) == 0)
7005 {
7006 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
d9a6bd32 7007 if (DECL_P (t))
953ff289
DN
7008 omp_notice_variable (ctx, t, true);
7009 }
7010 }
7011
74bf76ed
JJ
7012 if (n != NULL)
7013 n->value |= flags;
7014 else
7015 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
2c71d454
CLT
7016
7017 /* For reductions clauses in OpenACC loop directives, by default create a
7018 copy clause on the enclosing parallel construct for carrying back the
7019 results. */
7020 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7021 {
7022 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7023 while (outer_ctx)
7024 {
7025 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7026 if (n != NULL)
7027 {
7028 /* Ignore local variables and explicitly declared clauses. */
7029 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7030 break;
7031 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7032 {
7033 /* According to the OpenACC spec, such a reduction variable
7034 should already have a copy map on a kernels construct,
7035 verify that here. */
7036 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7037 && (n->value & GOVD_MAP));
7038 }
7039 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7040 {
7041 /* Remove firstprivate and make it a copy map. */
7042 n->value &= ~GOVD_FIRSTPRIVATE;
7043 n->value |= GOVD_MAP;
7044 }
7045 }
7046 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7047 {
7048 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7049 GOVD_MAP | GOVD_SEEN);
7050 break;
7051 }
7052 outer_ctx = outer_ctx->outer_context;
7053 }
7054 }
953ff289
DN
7055}
7056
41dbbb37 7057/* Notice a threadprivate variable DECL used in OMP context CTX.
f22f4340
JJ
7058 This just prints out diagnostics about threadprivate variable uses
7059 in untied tasks. If DECL2 is non-NULL, prevent this warning
7060 on that variable. */
7061
7062static bool
7063omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7064 tree decl2)
7065{
7066 splay_tree_node n;
acf0174b
JJ
7067 struct gimplify_omp_ctx *octx;
7068
7069 for (octx = ctx; octx; octx = octx->outer_context)
70468604
JJ
7070 if ((octx->region_type & ORT_TARGET) != 0
7071 || octx->order_concurrent)
acf0174b
JJ
7072 {
7073 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7074 if (n == NULL)
7075 {
70468604
JJ
7076 if (octx->order_concurrent)
7077 {
7078 error ("threadprivate variable %qE used in a region with"
7079 " %<order(concurrent)%> clause", DECL_NAME (decl));
7080 error_at (octx->location, "enclosing region");
7081 }
7082 else
7083 {
7084 error ("threadprivate variable %qE used in target region",
7085 DECL_NAME (decl));
7086 error_at (octx->location, "enclosing target region");
7087 }
acf0174b
JJ
7088 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7089 }
7090 if (decl2)
7091 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7092 }
f22f4340
JJ
7093
7094 if (ctx->region_type != ORT_UNTIED_TASK)
7095 return false;
7096 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7097 if (n == NULL)
7098 {
ad19c4be
EB
7099 error ("threadprivate variable %qE used in untied task",
7100 DECL_NAME (decl));
f22f4340
JJ
7101 error_at (ctx->location, "enclosing task");
7102 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7103 }
7104 if (decl2)
7105 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7106 return false;
7107}
7108
6e232ba4
JN
7109/* Return true if global var DECL is device resident. */
7110
7111static bool
7112device_resident_p (tree decl)
7113{
7114 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7115
7116 if (!attr)
7117 return false;
7118
7119 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7120 {
7121 tree c = TREE_VALUE (t);
7122 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7123 return true;
7124 }
7125
7126 return false;
7127}
7128
7ba8651e
CP
7129/* Return true if DECL has an ACC DECLARE attribute. */
7130
7131static bool
7132is_oacc_declared (tree decl)
7133{
7134 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7135 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7136 return declared != NULL_TREE;
7137}
7138
72500605
NS
7139/* Determine outer default flags for DECL mentioned in an OMP region
7140 but not declared in an enclosing clause.
7141
7142 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7143 remapped firstprivate instead of shared. To some extent this is
7144 addressed in omp_firstprivatize_type_sizes, but not
7145 effectively. */
7146
7147static unsigned
7148omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7149 bool in_code, unsigned flags)
7150{
7151 enum omp_clause_default_kind default_kind = ctx->default_kind;
7152 enum omp_clause_default_kind kind;
7153
7154 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7155 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7156 default_kind = kind;
00798c58
JJ
7157 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7158 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
72500605
NS
7159
7160 switch (default_kind)
7161 {
7162 case OMP_CLAUSE_DEFAULT_NONE:
7163 {
7164 const char *rtype;
7165
7166 if (ctx->region_type & ORT_PARALLEL)
7167 rtype = "parallel";
28567c40
JJ
7168 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7169 rtype = "taskloop";
72500605
NS
7170 else if (ctx->region_type & ORT_TASK)
7171 rtype = "task";
7172 else if (ctx->region_type & ORT_TEAMS)
7173 rtype = "teams";
7174 else
7175 gcc_unreachable ();
7176
724d25f3 7177 error ("%qE not specified in enclosing %qs",
72500605 7178 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
724d25f3 7179 error_at (ctx->location, "enclosing %qs", rtype);
72500605
NS
7180 }
7181 /* FALLTHRU */
7182 case OMP_CLAUSE_DEFAULT_SHARED:
7183 flags |= GOVD_SHARED;
7184 break;
7185 case OMP_CLAUSE_DEFAULT_PRIVATE:
7186 flags |= GOVD_PRIVATE;
7187 break;
7188 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7189 flags |= GOVD_FIRSTPRIVATE;
7190 break;
7191 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7192 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7193 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7194 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7195 {
7196 omp_notice_variable (octx, decl, in_code);
7197 for (; octx; octx = octx->outer_context)
7198 {
7199 splay_tree_node n2;
7200
72500605 7201 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
e01d41e5
JJ
7202 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7203 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7204 continue;
72500605
NS
7205 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7206 {
7207 flags |= GOVD_FIRSTPRIVATE;
7208 goto found_outer;
7209 }
7210 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7211 {
7212 flags |= GOVD_SHARED;
7213 goto found_outer;
7214 }
7215 }
7216 }
7217
7218 if (TREE_CODE (decl) == PARM_DECL
7219 || (!is_global_var (decl)
7220 && DECL_CONTEXT (decl) == current_function_decl))
7221 flags |= GOVD_FIRSTPRIVATE;
7222 else
7223 flags |= GOVD_SHARED;
7224 found_outer:
7225 break;
7226
7227 default:
7228 gcc_unreachable ();
7229 }
7230
7231 return flags;
7232}
7233
fffeedeb
NS
7234
7235/* Determine outer default flags for DECL mentioned in an OACC region
7236 but not declared in an enclosing clause. */
7237
7238static unsigned
7239oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7240{
7241 const char *rkind;
6e232ba4 7242 bool on_device = false;
ec084613 7243 bool is_private = false;
7ba8651e 7244 bool declared = is_oacc_declared (decl);
33a126a6
CP
7245 tree type = TREE_TYPE (decl);
7246
7247 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7248 type = TREE_TYPE (type);
6e232ba4 7249
ec084613
CP
7250 /* For Fortran COMMON blocks, only used variables in those blocks are
7251 transfered and remapped. The block itself will have a private clause to
7252 avoid transfering the data twice.
7253 The hook evaluates to false by default. For a variable in Fortran's COMMON
7254 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7255 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7256 the whole block. For C++ and Fortran, it can also be true under certain
7257 other conditions, if DECL_HAS_VALUE_EXPR. */
7258 if (RECORD_OR_UNION_TYPE_P (type))
7259 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7260
6e232ba4
JN
7261 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7262 && is_global_var (decl)
ec084613
CP
7263 && device_resident_p (decl)
7264 && !is_private)
6e232ba4
JN
7265 {
7266 on_device = true;
7267 flags |= GOVD_MAP_TO_ONLY;
7268 }
fffeedeb
NS
7269
7270 switch (ctx->region_type)
7271 {
fffeedeb 7272 case ORT_ACC_KERNELS:
fffeedeb 7273 rkind = "kernels";
0d0afa9f 7274
ec084613
CP
7275 if (is_private)
7276 flags |= GOVD_FIRSTPRIVATE;
7277 else if (AGGREGATE_TYPE_P (type))
7fd549d2
TS
7278 {
7279 /* Aggregates default to 'present_or_copy', or 'present'. */
7280 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7281 flags |= GOVD_MAP;
7282 else
7283 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7284 }
0d0afa9f
TS
7285 else
7286 /* Scalars default to 'copy'. */
7287 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7288
fffeedeb
NS
7289 break;
7290
7291 case ORT_ACC_PARALLEL:
62aee289
MR
7292 case ORT_ACC_SERIAL:
7293 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
0d0afa9f 7294
ec084613
CP
7295 if (is_private)
7296 flags |= GOVD_FIRSTPRIVATE;
7297 else if (on_device || declared)
0d0afa9f
TS
7298 flags |= GOVD_MAP;
7299 else if (AGGREGATE_TYPE_P (type))
7fd549d2
TS
7300 {
7301 /* Aggregates default to 'present_or_copy', or 'present'. */
7302 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7303 flags |= GOVD_MAP;
7304 else
7305 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7306 }
0d0afa9f
TS
7307 else
7308 /* Scalars default to 'firstprivate'. */
7309 flags |= GOVD_FIRSTPRIVATE;
7310
fffeedeb 7311 break;
0d0afa9f
TS
7312
7313 default:
7314 gcc_unreachable ();
fffeedeb
NS
7315 }
7316
7317 if (DECL_ARTIFICIAL (decl))
7318 ; /* We can get compiler-generated decls, and should not complain
7319 about them. */
7320 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7321 {
7c602779 7322 error ("%qE not specified in enclosing OpenACC %qs construct",
fffeedeb 7323 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
20e8b68f 7324 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
fffeedeb 7325 }
7fd549d2
TS
7326 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7327 ; /* Handled above. */
fffeedeb
NS
7328 else
7329 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7330
7331 return flags;
7332}
7333
41dbbb37 7334/* Record the fact that DECL was used within the OMP context CTX.
953ff289
DN
7335 IN_CODE is true when real code uses DECL, and false when we should
7336 merely emit default(none) errors. Return true if DECL is going to
7337 be remapped and thus DECL shouldn't be gimplified into its
7338 DECL_VALUE_EXPR (if any). */
7339
7340static bool
7341omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7342{
7343 splay_tree_node n;
7344 unsigned flags = in_code ? GOVD_SEEN : 0;
7345 bool ret = false, shared;
7346
b504a918 7347 if (error_operand_p (decl))
953ff289
DN
7348 return false;
7349
d9a6bd32
JJ
7350 if (ctx->region_type == ORT_NONE)
7351 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7352
953ff289
DN
7353 if (is_global_var (decl))
7354 {
eb077516 7355 /* Threadprivate variables are predetermined. */
953ff289 7356 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 7357 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
7358
7359 if (DECL_HAS_VALUE_EXPR_P (decl))
7360 {
ec084613
CP
7361 if (ctx->region_type & ORT_ACC)
7362 /* For OpenACC, defer expansion of value to avoid transfering
7363 privatized common block data instead of im-/explicitly transfered
7364 variables which are in common blocks. */
7365 ;
7366 else
7367 {
7368 tree value = get_base_address (DECL_VALUE_EXPR (decl));
953ff289 7369
ec084613
CP
7370 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7371 return omp_notice_threadprivate_variable (ctx, decl, value);
7372 }
953ff289 7373 }
eb077516
JN
7374
7375 if (gimplify_omp_ctxp->outer_context == NULL
7376 && VAR_P (decl)
629b3d75 7377 && oacc_get_fn_attrib (current_function_decl))
eb077516
JN
7378 {
7379 location_t loc = DECL_SOURCE_LOCATION (decl);
7380
7381 if (lookup_attribute ("omp declare target link",
7382 DECL_ATTRIBUTES (decl)))
7383 {
7384 error_at (loc,
7385 "%qE with %<link%> clause used in %<routine%> function",
7386 DECL_NAME (decl));
7387 return false;
7388 }
7389 else if (!lookup_attribute ("omp declare target",
7390 DECL_ATTRIBUTES (decl)))
7391 {
7392 error_at (loc,
7393 "%qE requires a %<declare%> directive for use "
7394 "in a %<routine%> function", DECL_NAME (decl));
7395 return false;
7396 }
7397 }
953ff289
DN
7398 }
7399
7400 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
d9a6bd32 7401 if ((ctx->region_type & ORT_TARGET) != 0)
acf0174b 7402 {
ec084613
CP
7403 if (ctx->region_type & ORT_ACC)
7404 /* For OpenACC, as remarked above, defer expansion. */
7405 shared = false;
7406 else
7407 shared = true;
7408
7409 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
acf0174b
JJ
7410 if (n == NULL)
7411 {
d9a6bd32 7412 unsigned nflags = flags;
28567c40 7413 if ((ctx->region_type & ORT_ACC) == 0)
d9a6bd32
JJ
7414 {
7415 bool is_declare_target = false;
d9a6bd32
JJ
7416 if (is_global_var (decl)
7417 && varpool_node::get_create (decl)->offloadable)
7418 {
7419 struct gimplify_omp_ctx *octx;
7420 for (octx = ctx->outer_context;
7421 octx; octx = octx->outer_context)
7422 {
7423 n = splay_tree_lookup (octx->variables,
7424 (splay_tree_key)decl);
7425 if (n
7426 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7427 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7428 break;
7429 }
7430 is_declare_target = octx == NULL;
7431 }
28567c40
JJ
7432 if (!is_declare_target)
7433 {
7434 int gdmk;
7435 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7436 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7437 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7438 == POINTER_TYPE)))
7439 gdmk = GDMK_POINTER;
7440 else if (lang_hooks.decls.omp_scalar_p (decl))
7441 gdmk = GDMK_SCALAR;
7442 else
7443 gdmk = GDMK_AGGREGATE;
7444 if (ctx->defaultmap[gdmk] == 0)
7445 {
7446 tree d = lang_hooks.decls.omp_report_decl (decl);
7447 error ("%qE not specified in enclosing %<target%>",
7448 DECL_NAME (d));
7449 error_at (ctx->location, "enclosing %<target%>");
7450 }
7451 else if (ctx->defaultmap[gdmk]
7452 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7453 nflags |= ctx->defaultmap[gdmk];
7454 else
7455 {
7456 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7457 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7458 }
7459 }
d9a6bd32 7460 }
182190f2
NS
7461
7462 struct gimplify_omp_ctx *octx = ctx->outer_context;
7463 if ((ctx->region_type & ORT_ACC) && octx)
acf0174b 7464 {
182190f2
NS
7465 /* Look in outer OpenACC contexts, to see if there's a
7466 data attribute for this variable. */
7467 omp_notice_variable (octx, decl, in_code);
7468
7469 for (; octx; octx = octx->outer_context)
7470 {
7471 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7472 break;
7473 splay_tree_node n2
7474 = splay_tree_lookup (octx->variables,
7475 (splay_tree_key) decl);
7476 if (n2)
7477 {
37d5ad46
JB
7478 if (octx->region_type == ORT_ACC_HOST_DATA)
7479 error ("variable %qE declared in enclosing "
7480 "%<host_data%> region", DECL_NAME (decl));
182190f2 7481 nflags |= GOVD_MAP;
e46c7770
CP
7482 if (octx->region_type == ORT_ACC_DATA
7483 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7484 nflags |= GOVD_MAP_0LEN_ARRAY;
182190f2
NS
7485 goto found_outer;
7486 }
7487 }
acf0174b 7488 }
182190f2 7489
28567c40
JJ
7490 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7491 | GOVD_MAP_ALLOC_ONLY)) == flags)
7492 {
7493 tree type = TREE_TYPE (decl);
7494
7495 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7496 && lang_hooks.decls.omp_privatize_by_reference (decl))
7497 type = TREE_TYPE (type);
7498 if (!lang_hooks.types.omp_mappable_type (type))
7499 {
7500 error ("%qD referenced in target region does not have "
7501 "a mappable type", decl);
7502 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7503 }
7504 else
7505 {
7506 if ((ctx->region_type & ORT_ACC) != 0)
7507 nflags = oacc_default_clause (ctx, decl, flags);
7508 else
7509 nflags |= GOVD_MAP;
7510 }
7511 }
182190f2 7512 found_outer:
d9a6bd32 7513 omp_add_variable (ctx, decl, nflags);
acf0174b
JJ
7514 }
7515 else
f014c653
JJ
7516 {
7517 /* If nothing changed, there's nothing left to do. */
7518 if ((n->value & flags) == flags)
7519 return ret;
1a80d6b8
JJ
7520 flags |= n->value;
7521 n->value = flags;
f014c653 7522 }
acf0174b
JJ
7523 goto do_outer;
7524 }
7525
953ff289
DN
7526 if (n == NULL)
7527 {
74bf76ed 7528 if (ctx->region_type == ORT_WORKSHARE
28567c40 7529 || ctx->region_type == ORT_TASKGROUP
acf0174b 7530 || ctx->region_type == ORT_SIMD
182190f2
NS
7531 || ctx->region_type == ORT_ACC
7532 || (ctx->region_type & ORT_TARGET_DATA) != 0)
953ff289
DN
7533 goto do_outer;
7534
72500605 7535 flags = omp_default_clause (ctx, decl, in_code, flags);
953ff289 7536
a68ab351
JJ
7537 if ((flags & GOVD_PRIVATE)
7538 && lang_hooks.decls.omp_private_outer_ref (decl))
7539 flags |= GOVD_PRIVATE_OUTER_REF;
7540
953ff289
DN
7541 omp_add_variable (ctx, decl, flags);
7542
7543 shared = (flags & GOVD_SHARED) != 0;
7544 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7545 goto do_outer;
7546 }
7547
3ad6b266
JJ
7548 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7549 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
da3d46cb
JJ
7550 && DECL_SIZE (decl))
7551 {
7552 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7553 {
7554 splay_tree_node n2;
7555 tree t = DECL_VALUE_EXPR (decl);
7556 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7557 t = TREE_OPERAND (t, 0);
7558 gcc_assert (DECL_P (t));
7559 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7560 n2->value |= GOVD_SEEN;
7561 }
7562 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7563 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7564 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7565 != INTEGER_CST))
7566 {
7567 splay_tree_node n2;
7568 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7569 gcc_assert (DECL_P (t));
7570 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7571 if (n2)
ebff5c3f 7572 omp_notice_variable (ctx, t, true);
da3d46cb 7573 }
3ad6b266
JJ
7574 }
7575
ec084613
CP
7576 if (ctx->region_type & ORT_ACC)
7577 /* For OpenACC, as remarked above, defer expansion. */
7578 shared = false;
7579 else
7580 shared = ((flags | n->value) & GOVD_SHARED) != 0;
953ff289
DN
7581 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7582
7583 /* If nothing changed, there's nothing left to do. */
7584 if ((n->value & flags) == flags)
7585 return ret;
7586 flags |= n->value;
7587 n->value = flags;
7588
7589 do_outer:
7590 /* If the variable is private in the current context, then we don't
7591 need to propagate anything to an outer context. */
a68ab351 7592 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289 7593 return ret;
41b37d5e
JJ
7594 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7595 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7596 return ret;
7597 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7598 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7599 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7600 return ret;
953ff289
DN
7601 if (ctx->outer_context
7602 && omp_notice_variable (ctx->outer_context, decl, in_code))
7603 return true;
7604 return ret;
7605}
7606
7607/* Verify that DECL is private within CTX. If there's specific information
7608 to the contrary in the innermost scope, generate an error. */
7609
7610static bool
f7468577 7611omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
953ff289
DN
7612{
7613 splay_tree_node n;
7614
7615 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7616 if (n != NULL)
7617 {
7618 if (n->value & GOVD_SHARED)
7619 {
7620 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 7621 {
74bf76ed
JJ
7622 if (simd)
7623 error ("iteration variable %qE is predetermined linear",
7624 DECL_NAME (decl));
7625 else
7626 error ("iteration variable %qE should be private",
7627 DECL_NAME (decl));
f6a5ffbf
JJ
7628 n->value = GOVD_PRIVATE;
7629 return true;
7630 }
7631 else
7632 return false;
953ff289 7633 }
761041be
JJ
7634 else if ((n->value & GOVD_EXPLICIT) != 0
7635 && (ctx == gimplify_omp_ctxp
a68ab351 7636 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
7637 && gimplify_omp_ctxp->outer_context == ctx)))
7638 {
7639 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
7640 error ("iteration variable %qE should not be firstprivate",
7641 DECL_NAME (decl));
761041be 7642 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
7643 error ("iteration variable %qE should not be reduction",
7644 DECL_NAME (decl));
28567c40 7645 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
e01d41e5
JJ
7646 error ("iteration variable %qE should not be linear",
7647 DECL_NAME (decl));
761041be 7648 }
ca2b1311
JJ
7649 return (ctx == gimplify_omp_ctxp
7650 || (ctx->region_type == ORT_COMBINED_PARALLEL
7651 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
7652 }
7653
74bf76ed 7654 if (ctx->region_type != ORT_WORKSHARE
28567c40 7655 && ctx->region_type != ORT_TASKGROUP
182190f2
NS
7656 && ctx->region_type != ORT_SIMD
7657 && ctx->region_type != ORT_ACC)
953ff289 7658 return false;
f6a5ffbf 7659 else if (ctx->outer_context)
74bf76ed 7660 return omp_is_private (ctx->outer_context, decl, simd);
ca2b1311 7661 return false;
953ff289
DN
7662}
7663
07b7aade
JJ
7664/* Return true if DECL is private within a parallel region
7665 that binds to the current construct's context or in parallel
7666 region's REDUCTION clause. */
7667
7668static bool
cab37c89 7669omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
07b7aade
JJ
7670{
7671 splay_tree_node n;
7672
7673 do
7674 {
7675 ctx = ctx->outer_context;
7676 if (ctx == NULL)
d9a6bd32
JJ
7677 {
7678 if (is_global_var (decl))
7679 return false;
7680
7681 /* References might be private, but might be shared too,
7682 when checking for copyprivate, assume they might be
7683 private, otherwise assume they might be shared. */
7684 if (copyprivate)
7685 return true;
7686
7687 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7688 return false;
7689
7690 /* Treat C++ privatized non-static data members outside
7691 of the privatization the same. */
7692 if (omp_member_access_dummy_var (decl))
7693 return false;
7694
7695 return true;
7696 }
07b7aade 7697
e01d41e5
JJ
7698 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7699
7700 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7701 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
acf0174b
JJ
7702 continue;
7703
07b7aade 7704 if (n != NULL)
d9a6bd32
JJ
7705 {
7706 if ((n->value & GOVD_LOCAL) != 0
7707 && omp_member_access_dummy_var (decl))
7708 return false;
7709 return (n->value & GOVD_SHARED) == 0;
7710 }
07b7aade 7711 }
74bf76ed 7712 while (ctx->region_type == ORT_WORKSHARE
28567c40 7713 || ctx->region_type == ORT_TASKGROUP
182190f2
NS
7714 || ctx->region_type == ORT_SIMD
7715 || ctx->region_type == ORT_ACC);
07b7aade
JJ
7716 return false;
7717}
7718
d9a6bd32
JJ
7719/* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7720
7721static tree
7722find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7723{
7724 tree t = *tp;
7725
7726 /* If this node has been visited, unmark it and keep looking. */
7727 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7728 return t;
7729
7730 if (IS_TYPE_OR_DECL_P (t))
7731 *walk_subtrees = 0;
7732 return NULL_TREE;
7733}
7734
28567c40
JJ
7735/* If *LIST_P contains any OpenMP depend clauses with iterators,
7736 lower all the depend clauses by populating corresponding depend
7737 array. Returns 0 if there are no such depend clauses, or
7738 2 if all depend clauses should be removed, 1 otherwise. */
7739
7740static int
7741gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7742{
7743 tree c;
7744 gimple *g;
7745 size_t n[4] = { 0, 0, 0, 0 };
7746 bool unused[4];
7747 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7748 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7749 size_t i, j;
7750 location_t first_loc = UNKNOWN_LOCATION;
7751
7752 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7753 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7754 {
7755 switch (OMP_CLAUSE_DEPEND_KIND (c))
7756 {
7757 case OMP_CLAUSE_DEPEND_IN:
7758 i = 2;
7759 break;
7760 case OMP_CLAUSE_DEPEND_OUT:
7761 case OMP_CLAUSE_DEPEND_INOUT:
7762 i = 0;
7763 break;
7764 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7765 i = 1;
7766 break;
7767 case OMP_CLAUSE_DEPEND_DEPOBJ:
7768 i = 3;
7769 break;
7770 case OMP_CLAUSE_DEPEND_SOURCE:
7771 case OMP_CLAUSE_DEPEND_SINK:
7772 continue;
7773 default:
7774 gcc_unreachable ();
7775 }
7776 tree t = OMP_CLAUSE_DECL (c);
7777 if (first_loc == UNKNOWN_LOCATION)
7778 first_loc = OMP_CLAUSE_LOCATION (c);
7779 if (TREE_CODE (t) == TREE_LIST
7780 && TREE_PURPOSE (t)
7781 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7782 {
7783 if (TREE_PURPOSE (t) != last_iter)
7784 {
7785 tree tcnt = size_one_node;
7786 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7787 {
7788 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7789 is_gimple_val, fb_rvalue) == GS_ERROR
7790 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7791 is_gimple_val, fb_rvalue) == GS_ERROR
7792 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7793 is_gimple_val, fb_rvalue) == GS_ERROR
7794 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7795 is_gimple_val, fb_rvalue)
7796 == GS_ERROR))
7797 return 2;
7798 tree var = TREE_VEC_ELT (it, 0);
7799 tree begin = TREE_VEC_ELT (it, 1);
7800 tree end = TREE_VEC_ELT (it, 2);
7801 tree step = TREE_VEC_ELT (it, 3);
7802 tree orig_step = TREE_VEC_ELT (it, 4);
7803 tree type = TREE_TYPE (var);
7804 tree stype = TREE_TYPE (step);
7805 location_t loc = DECL_SOURCE_LOCATION (var);
7806 tree endmbegin;
7807 /* Compute count for this iterator as
7808 orig_step > 0
7809 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7810 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7811 and compute product of those for the entire depend
7812 clause. */
7813 if (POINTER_TYPE_P (type))
7814 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7815 stype, end, begin);
7816 else
7817 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7818 end, begin);
7819 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7820 step,
7821 build_int_cst (stype, 1));
7822 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7823 build_int_cst (stype, 1));
7824 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7825 unshare_expr (endmbegin),
7826 stepm1);
7827 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7828 pos, step);
7829 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7830 endmbegin, stepp1);
7831 if (TYPE_UNSIGNED (stype))
7832 {
7833 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7834 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7835 }
7836 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7837 neg, step);
7838 step = NULL_TREE;
7839 tree cond = fold_build2_loc (loc, LT_EXPR,
7840 boolean_type_node,
7841 begin, end);
7842 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7843 build_int_cst (stype, 0));
7844 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7845 end, begin);
7846 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7847 build_int_cst (stype, 0));
7848 tree osteptype = TREE_TYPE (orig_step);
7849 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7850 orig_step,
7851 build_int_cst (osteptype, 0));
7852 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7853 cond, pos, neg);
7854 cnt = fold_convert_loc (loc, sizetype, cnt);
7855 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7856 fb_rvalue) == GS_ERROR)
7857 return 2;
7858 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7859 }
7860 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7861 fb_rvalue) == GS_ERROR)
7862 return 2;
7863 last_iter = TREE_PURPOSE (t);
7864 last_count = tcnt;
7865 }
7866 if (counts[i] == NULL_TREE)
7867 counts[i] = last_count;
7868 else
7869 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7870 PLUS_EXPR, counts[i], last_count);
7871 }
7872 else
7873 n[i]++;
7874 }
7875 for (i = 0; i < 4; i++)
7876 if (counts[i])
7877 break;
7878 if (i == 4)
7879 return 0;
7880
7881 tree total = size_zero_node;
7882 for (i = 0; i < 4; i++)
7883 {
7884 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7885 if (counts[i] == NULL_TREE)
7886 counts[i] = size_zero_node;
7887 if (n[i])
7888 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7889 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7890 fb_rvalue) == GS_ERROR)
7891 return 2;
7892 total = size_binop (PLUS_EXPR, total, counts[i]);
7893 }
7894
7895 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7896 == GS_ERROR)
7897 return 2;
7898 bool is_old = unused[1] && unused[3];
7899 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7900 size_int (is_old ? 1 : 4));
7901 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7902 tree array = create_tmp_var_raw (type);
7903 TREE_ADDRESSABLE (array) = 1;
507de5ee 7904 if (!poly_int_tree_p (totalpx))
28567c40
JJ
7905 {
7906 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7907 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7908 if (gimplify_omp_ctxp)
7909 {
7910 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7911 while (ctx
7912 && (ctx->region_type == ORT_WORKSHARE
7913 || ctx->region_type == ORT_TASKGROUP
7914 || ctx->region_type == ORT_SIMD
7915 || ctx->region_type == ORT_ACC))
7916 ctx = ctx->outer_context;
7917 if (ctx)
7918 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7919 }
7920 gimplify_vla_decl (array, pre_p);
7921 }
7922 else
7923 gimple_add_tmp_var (array);
7924 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7925 NULL_TREE);
7926 tree tem;
7927 if (!is_old)
7928 {
7929 tem = build2 (MODIFY_EXPR, void_type_node, r,
7930 build_int_cst (ptr_type_node, 0));
7931 gimplify_and_add (tem, pre_p);
7932 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7933 NULL_TREE);
7934 }
7935 tem = build2 (MODIFY_EXPR, void_type_node, r,
7936 fold_convert (ptr_type_node, total));
7937 gimplify_and_add (tem, pre_p);
7938 for (i = 1; i < (is_old ? 2 : 4); i++)
7939 {
7940 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7941 NULL_TREE, NULL_TREE);
7942 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7943 gimplify_and_add (tem, pre_p);
7944 }
7945
7946 tree cnts[4];
7947 for (j = 4; j; j--)
7948 if (!unused[j - 1])
7949 break;
7950 for (i = 0; i < 4; i++)
7951 {
7952 if (i && (i >= j || unused[i - 1]))
7953 {
7954 cnts[i] = cnts[i - 1];
7955 continue;
7956 }
7957 cnts[i] = create_tmp_var (sizetype);
7958 if (i == 0)
7959 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7960 else
7961 {
7962 tree t;
7963 if (is_old)
7964 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7965 else
7966 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7967 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7968 == GS_ERROR)
7969 return 2;
7970 g = gimple_build_assign (cnts[i], t);
7971 }
7972 gimple_seq_add_stmt (pre_p, g);
7973 }
7974
7975 last_iter = NULL_TREE;
7976 tree last_bind = NULL_TREE;
7977 tree *last_body = NULL;
7978 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7979 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7980 {
7981 switch (OMP_CLAUSE_DEPEND_KIND (c))
7982 {
7983 case OMP_CLAUSE_DEPEND_IN:
7984 i = 2;
7985 break;
7986 case OMP_CLAUSE_DEPEND_OUT:
7987 case OMP_CLAUSE_DEPEND_INOUT:
7988 i = 0;
7989 break;
7990 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7991 i = 1;
7992 break;
7993 case OMP_CLAUSE_DEPEND_DEPOBJ:
7994 i = 3;
7995 break;
7996 case OMP_CLAUSE_DEPEND_SOURCE:
7997 case OMP_CLAUSE_DEPEND_SINK:
7998 continue;
7999 default:
8000 gcc_unreachable ();
8001 }
8002 tree t = OMP_CLAUSE_DECL (c);
8003 if (TREE_CODE (t) == TREE_LIST
8004 && TREE_PURPOSE (t)
8005 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8006 {
8007 if (TREE_PURPOSE (t) != last_iter)
8008 {
8009 if (last_bind)
8010 gimplify_and_add (last_bind, pre_p);
8011 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8012 last_bind = build3 (BIND_EXPR, void_type_node,
8013 BLOCK_VARS (block), NULL, block);
8014 TREE_SIDE_EFFECTS (last_bind) = 1;
8015 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8016 tree *p = &BIND_EXPR_BODY (last_bind);
8017 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8018 {
8019 tree var = TREE_VEC_ELT (it, 0);
8020 tree begin = TREE_VEC_ELT (it, 1);
8021 tree end = TREE_VEC_ELT (it, 2);
8022 tree step = TREE_VEC_ELT (it, 3);
8023 tree orig_step = TREE_VEC_ELT (it, 4);
8024 tree type = TREE_TYPE (var);
8025 location_t loc = DECL_SOURCE_LOCATION (var);
8026 /* Emit:
8027 var = begin;
8028 goto cond_label;
8029 beg_label:
8030 ...
8031 var = var + step;
8032 cond_label:
8033 if (orig_step > 0) {
8034 if (var < end) goto beg_label;
8035 } else {
8036 if (var > end) goto beg_label;
8037 }
8038 for each iterator, with inner iterators added to
8039 the ... above. */
8040 tree beg_label = create_artificial_label (loc);
8041 tree cond_label = NULL_TREE;
8042 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8043 var, begin);
8044 append_to_statement_list_force (tem, p);
8045 tem = build_and_jump (&cond_label);
8046 append_to_statement_list_force (tem, p);
8047 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8048 append_to_statement_list (tem, p);
8049 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8050 NULL_TREE, NULL_TREE);
8051 TREE_SIDE_EFFECTS (bind) = 1;
8052 SET_EXPR_LOCATION (bind, loc);
8053 append_to_statement_list_force (bind, p);
8054 if (POINTER_TYPE_P (type))
8055 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8056 var, fold_convert_loc (loc, sizetype,
8057 step));
8058 else
8059 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8060 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8061 var, tem);
8062 append_to_statement_list_force (tem, p);
8063 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8064 append_to_statement_list (tem, p);
8065 tree cond = fold_build2_loc (loc, LT_EXPR,
8066 boolean_type_node,
8067 var, end);
8068 tree pos
8069 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8070 cond, build_and_jump (&beg_label),
8071 void_node);
8072 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8073 var, end);
8074 tree neg
8075 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8076 cond, build_and_jump (&beg_label),
8077 void_node);
8078 tree osteptype = TREE_TYPE (orig_step);
8079 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8080 orig_step,
8081 build_int_cst (osteptype, 0));
8082 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8083 cond, pos, neg);
8084 append_to_statement_list_force (tem, p);
8085 p = &BIND_EXPR_BODY (bind);
8086 }
8087 last_body = p;
8088 }
8089 last_iter = TREE_PURPOSE (t);
8090 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8091 {
8092 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8093 0), last_body);
8094 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8095 }
8096 if (error_operand_p (TREE_VALUE (t)))
8097 return 2;
8098 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8099 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8100 NULL_TREE, NULL_TREE);
8101 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8102 void_type_node, r, TREE_VALUE (t));
8103 append_to_statement_list_force (tem, last_body);
8104 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8105 void_type_node, cnts[i],
8106 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8107 append_to_statement_list_force (tem, last_body);
8108 TREE_VALUE (t) = null_pointer_node;
8109 }
8110 else
8111 {
8112 if (last_bind)
8113 {
8114 gimplify_and_add (last_bind, pre_p);
8115 last_bind = NULL_TREE;
8116 }
8117 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8118 {
8119 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8120 NULL, is_gimple_val, fb_rvalue);
8121 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8122 }
8123 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8124 return 2;
8125 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8126 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8127 is_gimple_val, fb_rvalue) == GS_ERROR)
8128 return 2;
8129 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8130 NULL_TREE, NULL_TREE);
8131 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8132 gimplify_and_add (tem, pre_p);
8133 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8134 size_int (1)));
8135 gimple_seq_add_stmt (pre_p, g);
8136 }
8137 }
8138 if (last_bind)
8139 gimplify_and_add (last_bind, pre_p);
8140 tree cond = boolean_false_node;
8141 if (is_old)
8142 {
8143 if (!unused[0])
8144 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8145 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8146 size_int (2)));
8147 if (!unused[2])
8148 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8149 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8150 cnts[2],
8151 size_binop_loc (first_loc, PLUS_EXPR,
8152 totalpx,
8153 size_int (1))));
8154 }
8155 else
8156 {
8157 tree prev = size_int (5);
8158 for (i = 0; i < 4; i++)
8159 {
8160 if (unused[i])
8161 continue;
8162 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8163 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8164 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8165 cnts[i], unshare_expr (prev)));
8166 }
8167 }
8168 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8169 build_call_expr_loc (first_loc,
8170 builtin_decl_explicit (BUILT_IN_TRAP),
8171 0), void_node);
8172 gimplify_and_add (tem, pre_p);
8173 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8174 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8175 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8176 OMP_CLAUSE_CHAIN (c) = *list_p;
8177 *list_p = c;
8178 return 1;
8179}
8180
4d83edf7
JB
8181/* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8182 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8183 the struct node to insert the new mapping after (when the struct node is
8184 initially created). PREV_NODE is the first of two or three mappings for a
8185 pointer, and is either:
8186 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8187 array section.
8188 - not the node before C. This is true when we have a reference-to-pointer
8189 type (with a mapping for the reference and for the pointer), or for
8190 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8191 If SCP is non-null, the new node is inserted before *SCP.
8192 if SCP is null, the new node is inserted before PREV_NODE.
8193 The return type is:
8194 - PREV_NODE, if SCP is non-null.
8195 - The newly-created ALLOC or RELEASE node, if SCP is null.
8196 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8197 reference to a pointer. */
8198
8199static tree
8200insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8201 tree prev_node, tree *scp)
8202{
8203 enum gomp_map_kind mkind
4fd872bc
JB
8204 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8205 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
4d83edf7
JB
8206
8207 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8208 tree cl = scp ? prev_node : c2;
8209 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8210 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8211 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
4fd872bc
JB
8212 if (OMP_CLAUSE_CHAIN (prev_node) != c
8213 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8214 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8215 == GOMP_MAP_TO_PSET))
8216 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8217 else
8218 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
4d83edf7
JB
8219 if (struct_node)
8220 OMP_CLAUSE_CHAIN (struct_node) = c2;
8221
8222 /* We might need to create an additional mapping if we have a reference to a
4fd872bc
JB
8223 pointer (in C++). Don't do this if we have something other than a
8224 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8225 if (OMP_CLAUSE_CHAIN (prev_node) != c
8226 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8227 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8228 == GOMP_MAP_ALWAYS_POINTER)
8229 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8230 == GOMP_MAP_ATTACH_DETACH)))
4d83edf7
JB
8231 {
8232 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8233 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8234 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8235 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8236 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8237 OMP_CLAUSE_CHAIN (c3) = prev_node;
8238 if (!scp)
8239 OMP_CLAUSE_CHAIN (c2) = c3;
8240 else
8241 cl = c3;
8242 }
8243
8244 if (scp)
8245 *scp = c2;
8246
8247 return cl;
8248}
8249
8250/* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8251 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8252 If BASE_REF is non-NULL and the containing object is a reference, set
8253 *BASE_REF to that reference before dereferencing the object.
8254 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8255 has array type, else return NULL. */
8256
8257static tree
8258extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8259 poly_offset_int *poffsetp)
8260{
8261 tree offset;
8262 poly_int64 bitsize, bitpos;
8263 machine_mode mode;
8264 int unsignedp, reversep, volatilep = 0;
8265 poly_offset_int poffset;
8266
8267 if (base_ref)
8268 {
8269 *base_ref = NULL_TREE;
8270
8271 while (TREE_CODE (base) == ARRAY_REF)
8272 base = TREE_OPERAND (base, 0);
8273
8274 if (TREE_CODE (base) == INDIRECT_REF)
8275 base = TREE_OPERAND (base, 0);
8276 }
8277 else
8278 {
8279 if (TREE_CODE (base) == ARRAY_REF)
8280 {
8281 while (TREE_CODE (base) == ARRAY_REF)
8282 base = TREE_OPERAND (base, 0);
8283 if (TREE_CODE (base) != COMPONENT_REF
8284 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8285 return NULL_TREE;
8286 }
8287 else if (TREE_CODE (base) == INDIRECT_REF
8288 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8289 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8290 == REFERENCE_TYPE))
8291 base = TREE_OPERAND (base, 0);
8292 }
8293
8294 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8295 &unsignedp, &reversep, &volatilep);
8296
8297 tree orig_base = base;
8298
8299 if ((TREE_CODE (base) == INDIRECT_REF
8300 || (TREE_CODE (base) == MEM_REF
8301 && integer_zerop (TREE_OPERAND (base, 1))))
8302 && DECL_P (TREE_OPERAND (base, 0))
8303 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8304 base = TREE_OPERAND (base, 0);
8305
8306 gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8307
8308 if (offset)
8309 poffset = wi::to_poly_offset (offset);
8310 else
8311 poffset = 0;
8312
8313 if (maybe_ne (bitpos, 0))
8314 poffset += bits_to_bytes_round_down (bitpos);
8315
8316 *bitposp = bitpos;
8317 *poffsetp = poffset;
8318
8319 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8320 if (base_ref && orig_base != base)
8321 *base_ref = orig_base;
8322
8323 return base;
8324}
8325
41dbbb37 8326/* Scan the OMP clauses in *LIST_P, installing mappings into a new
953ff289
DN
8327 and previous omp contexts. */
8328
8329static void
726a989a 8330gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
d9a6bd32
JJ
8331 enum omp_region_type region_type,
8332 enum tree_code code)
953ff289
DN
8333{
8334 struct gimplify_omp_ctx *ctx, *outer_ctx;
8335 tree c;
d9a6bd32 8336 hash_map<tree, tree> *struct_map_to_clause = NULL;
4fd872bc 8337 hash_set<tree> *struct_deref_set = NULL;
7e47198b 8338 tree *prev_list_p = NULL, *orig_list_p = list_p;
28567c40
JJ
8339 int handled_depend_iterators = -1;
8340 int nowait = -1;
953ff289 8341
a68ab351 8342 ctx = new_omp_context (region_type);
135df52c 8343 ctx->code = code;
953ff289 8344 outer_ctx = ctx->outer_context;
b4c3a85b 8345 if (code == OMP_TARGET)
d9a6bd32 8346 {
b4c3a85b 8347 if (!lang_GNU_Fortran ())
28567c40
JJ
8348 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8349 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
d9a6bd32
JJ
8350 }
8351 if (!lang_GNU_Fortran ())
8352 switch (code)
8353 {
8354 case OMP_TARGET:
8355 case OMP_TARGET_DATA:
8356 case OMP_TARGET_ENTER_DATA:
8357 case OMP_TARGET_EXIT_DATA:
7ba8651e 8358 case OACC_DECLARE:
37d5ad46 8359 case OACC_HOST_DATA:
8e36332c
CP
8360 case OACC_PARALLEL:
8361 case OACC_KERNELS:
d9a6bd32
JJ
8362 ctx->target_firstprivatize_array_bases = true;
8363 default:
8364 break;
8365 }
953ff289
DN
8366
8367 while ((c = *list_p) != NULL)
8368 {
953ff289
DN
8369 bool remove = false;
8370 bool notice_outer = true;
07b7aade 8371 const char *check_non_private = NULL;
953ff289
DN
8372 unsigned int flags;
8373 tree decl;
8374
aaf46ef9 8375 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
8376 {
8377 case OMP_CLAUSE_PRIVATE:
8378 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
8379 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8380 {
8381 flags |= GOVD_PRIVATE_OUTER_REF;
8382 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8383 }
8384 else
8385 notice_outer = false;
953ff289
DN
8386 goto do_add;
8387 case OMP_CLAUSE_SHARED:
8388 flags = GOVD_SHARED | GOVD_EXPLICIT;
8389 goto do_add;
8390 case OMP_CLAUSE_FIRSTPRIVATE:
8391 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 8392 check_non_private = "firstprivate";
953ff289
DN
8393 goto do_add;
8394 case OMP_CLAUSE_LASTPRIVATE:
28567c40
JJ
8395 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8396 switch (code)
8397 {
8398 case OMP_DISTRIBUTE:
8399 error_at (OMP_CLAUSE_LOCATION (c),
8400 "conditional %<lastprivate%> clause on "
bf38f7e9 8401 "%qs construct", "distribute");
28567c40
JJ
8402 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8403 break;
8404 case OMP_TASKLOOP:
8405 error_at (OMP_CLAUSE_LOCATION (c),
8406 "conditional %<lastprivate%> clause on "
bf38f7e9 8407 "%qs construct", "taskloop");
28567c40
JJ
8408 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8409 break;
8410 default:
8411 break;
8412 }
953ff289 8413 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
554a530f
JJ
8414 if (code != OMP_LOOP)
8415 check_non_private = "lastprivate";
41b37d5e 8416 decl = OMP_CLAUSE_DECL (c);
b4c3a85b 8417 if (error_operand_p (decl))
41b37d5e 8418 goto do_add;
28567c40
JJ
8419 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8420 && !lang_hooks.decls.omp_scalar_p (decl))
8421 {
8422 error_at (OMP_CLAUSE_LOCATION (c),
8423 "non-scalar variable %qD in conditional "
8424 "%<lastprivate%> clause", decl);
8425 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8426 }
4302dd3d 8427 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7855700e 8428 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
28567c40
JJ
8429 if (outer_ctx
8430 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8431 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8432 == ORT_COMBINED_TEAMS))
8433 && splay_tree_lookup (outer_ctx->variables,
8434 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
8435 {
8436 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8437 if (outer_ctx->outer_context)
8438 omp_notice_variable (outer_ctx->outer_context, decl, true);
8439 }
d9a6bd32
JJ
8440 else if (outer_ctx
8441 && (outer_ctx->region_type & ORT_TASK) != 0
8442 && outer_ctx->combined_loop
8443 && splay_tree_lookup (outer_ctx->variables,
8444 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
8445 {
8446 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8447 if (outer_ctx->outer_context)
8448 omp_notice_variable (outer_ctx->outer_context, decl, true);
8449 }
41b37d5e 8450 else if (outer_ctx
182190f2
NS
8451 && (outer_ctx->region_type == ORT_WORKSHARE
8452 || outer_ctx->region_type == ORT_ACC)
41b37d5e
JJ
8453 && outer_ctx->combined_loop
8454 && splay_tree_lookup (outer_ctx->variables,
8455 (splay_tree_key) decl) == NULL
8456 && !omp_check_private (outer_ctx, decl, false))
8457 {
8458 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8459 if (outer_ctx->outer_context
8460 && (outer_ctx->outer_context->region_type
8461 == ORT_COMBINED_PARALLEL)
8462 && splay_tree_lookup (outer_ctx->outer_context->variables,
8463 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
8464 {
8465 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8466 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8467 if (octx->outer_context)
b4c3a85b
JJ
8468 {
8469 octx = octx->outer_context;
8470 if (octx->region_type == ORT_WORKSHARE
8471 && octx->combined_loop
8472 && splay_tree_lookup (octx->variables,
8473 (splay_tree_key) decl) == NULL
8474 && !omp_check_private (octx, decl, false))
8475 {
8476 omp_add_variable (octx, decl,
8477 GOVD_LASTPRIVATE | GOVD_SEEN);
8478 octx = octx->outer_context;
8479 if (octx
28567c40
JJ
8480 && ((octx->region_type & ORT_COMBINED_TEAMS)
8481 == ORT_COMBINED_TEAMS)
b4c3a85b
JJ
8482 && (splay_tree_lookup (octx->variables,
8483 (splay_tree_key) decl)
8484 == NULL))
8485 {
8486 omp_add_variable (octx, decl,
8487 GOVD_SHARED | GOVD_SEEN);
8488 octx = octx->outer_context;
8489 }
8490 }
8491 if (octx)
8492 omp_notice_variable (octx, decl, true);
8493 }
e01d41e5
JJ
8494 }
8495 else if (outer_ctx->outer_context)
8496 omp_notice_variable (outer_ctx->outer_context, decl, true);
41b37d5e 8497 }
953ff289
DN
8498 goto do_add;
8499 case OMP_CLAUSE_REDUCTION:
28567c40
JJ
8500 if (OMP_CLAUSE_REDUCTION_TASK (c))
8501 {
8502 if (region_type == ORT_WORKSHARE)
8503 {
8504 if (nowait == -1)
8505 nowait = omp_find_clause (*list_p,
8506 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8507 if (nowait
8508 && (outer_ctx == NULL
8509 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8510 {
8511 error_at (OMP_CLAUSE_LOCATION (c),
8512 "%<task%> reduction modifier on a construct "
8513 "with a %<nowait%> clause");
8514 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8515 }
8516 }
8517 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8518 {
8519 error_at (OMP_CLAUSE_LOCATION (c),
8520 "invalid %<task%> reduction modifier on construct "
8521 "other than %<parallel%>, %<for%> or %<sections%>");
8522 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8523 }
8524 }
bf38f7e9
JJ
8525 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8526 switch (code)
8527 {
8528 case OMP_SECTIONS:
8529 error_at (OMP_CLAUSE_LOCATION (c),
8530 "%<inscan%> %<reduction%> clause on "
8531 "%qs construct", "sections");
8532 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8533 break;
8534 case OMP_PARALLEL:
8535 error_at (OMP_CLAUSE_LOCATION (c),
8536 "%<inscan%> %<reduction%> clause on "
8537 "%qs construct", "parallel");
8538 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8539 break;
8540 case OMP_TEAMS:
8541 error_at (OMP_CLAUSE_LOCATION (c),
8542 "%<inscan%> %<reduction%> clause on "
8543 "%qs construct", "teams");
8544 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8545 break;
8546 case OMP_TASKLOOP:
8547 error_at (OMP_CLAUSE_LOCATION (c),
8548 "%<inscan%> %<reduction%> clause on "
8549 "%qs construct", "taskloop");
8550 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8551 break;
8552 default:
8553 break;
8554 }
28567c40
JJ
8555 /* FALLTHRU */
8556 case OMP_CLAUSE_IN_REDUCTION:
8557 case OMP_CLAUSE_TASK_REDUCTION:
953ff289 8558 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
182190f2 8559 /* OpenACC permits reductions on private variables. */
28567c40
JJ
8560 if (!(region_type & ORT_ACC)
8561 /* taskgroup is actually not a worksharing region. */
8562 && code != OMP_TASKGROUP)
8563 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
d9a6bd32
JJ
8564 decl = OMP_CLAUSE_DECL (c);
8565 if (TREE_CODE (decl) == MEM_REF)
8566 {
8567 tree type = TREE_TYPE (decl);
8568 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
381cdae4
RB
8569 NULL, is_gimple_val, fb_rvalue, false)
8570 == GS_ERROR)
d9a6bd32
JJ
8571 {
8572 remove = true;
8573 break;
8574 }
8575 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8576 if (DECL_P (v))
8577 {
8578 omp_firstprivatize_variable (ctx, v);
8579 omp_notice_variable (ctx, v, true);
8580 }
8581 decl = TREE_OPERAND (decl, 0);
e01d41e5
JJ
8582 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8583 {
8584 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
381cdae4 8585 NULL, is_gimple_val, fb_rvalue, false)
e01d41e5
JJ
8586 == GS_ERROR)
8587 {
8588 remove = true;
8589 break;
8590 }
8591 v = TREE_OPERAND (decl, 1);
8592 if (DECL_P (v))
8593 {
8594 omp_firstprivatize_variable (ctx, v);
8595 omp_notice_variable (ctx, v, true);
8596 }
8597 decl = TREE_OPERAND (decl, 0);
8598 }
d9a6bd32
JJ
8599 if (TREE_CODE (decl) == ADDR_EXPR
8600 || TREE_CODE (decl) == INDIRECT_REF)
8601 decl = TREE_OPERAND (decl, 0);
8602 }
8603 goto do_add_decl;
acf0174b
JJ
8604 case OMP_CLAUSE_LINEAR:
8605 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8606 is_gimple_val, fb_rvalue) == GS_ERROR)
8607 {
8608 remove = true;
8609 break;
8610 }
41b37d5e
JJ
8611 else
8612 {
d9a6bd32
JJ
8613 if (code == OMP_SIMD
8614 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8615 {
8616 struct gimplify_omp_ctx *octx = outer_ctx;
8617 if (octx
8618 && octx->region_type == ORT_WORKSHARE
8619 && octx->combined_loop
8620 && !octx->distribute)
8621 {
8622 if (octx->outer_context
8623 && (octx->outer_context->region_type
8624 == ORT_COMBINED_PARALLEL))
8625 octx = octx->outer_context->outer_context;
8626 else
8627 octx = octx->outer_context;
8628 }
8629 if (octx
8630 && octx->region_type == ORT_WORKSHARE
8631 && octx->combined_loop
b4c3a85b 8632 && octx->distribute)
d9a6bd32
JJ
8633 {
8634 error_at (OMP_CLAUSE_LOCATION (c),
8635 "%<linear%> clause for variable other than "
8636 "loop iterator specified on construct "
8637 "combined with %<distribute%>");
8638 remove = true;
8639 break;
8640 }
8641 }
41b37d5e
JJ
8642 /* For combined #pragma omp parallel for simd, need to put
8643 lastprivate and perhaps firstprivate too on the
8644 parallel. Similarly for #pragma omp for simd. */
8645 struct gimplify_omp_ctx *octx = outer_ctx;
8646 decl = NULL_TREE;
41b37d5e
JJ
8647 do
8648 {
8649 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8650 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8651 break;
8652 decl = OMP_CLAUSE_DECL (c);
8653 if (error_operand_p (decl))
8654 {
8655 decl = NULL_TREE;
8656 break;
8657 }
d9a6bd32
JJ
8658 flags = GOVD_SEEN;
8659 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8660 flags |= GOVD_FIRSTPRIVATE;
8661 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8662 flags |= GOVD_LASTPRIVATE;
41b37d5e
JJ
8663 if (octx
8664 && octx->region_type == ORT_WORKSHARE
8665 && octx->combined_loop)
8666 {
8667 if (octx->outer_context
8668 && (octx->outer_context->region_type
e01d41e5 8669 == ORT_COMBINED_PARALLEL))
41b37d5e
JJ
8670 octx = octx->outer_context;
8671 else if (omp_check_private (octx, decl, false))
8672 break;
8673 }
d9a6bd32
JJ
8674 else if (octx
8675 && (octx->region_type & ORT_TASK) != 0
8676 && octx->combined_loop)
8677 ;
8678 else if (octx
8679 && octx->region_type == ORT_COMBINED_PARALLEL
8680 && ctx->region_type == ORT_WORKSHARE
8681 && octx == outer_ctx)
8682 flags = GOVD_SEEN | GOVD_SHARED;
e01d41e5 8683 else if (octx
28567c40
JJ
8684 && ((octx->region_type & ORT_COMBINED_TEAMS)
8685 == ORT_COMBINED_TEAMS))
e01d41e5 8686 flags = GOVD_SEEN | GOVD_SHARED;
d9a6bd32
JJ
8687 else if (octx
8688 && octx->region_type == ORT_COMBINED_TARGET)
e01d41e5
JJ
8689 {
8690 flags &= ~GOVD_LASTPRIVATE;
8691 if (flags == GOVD_SEEN)
8692 break;
8693 }
41b37d5e
JJ
8694 else
8695 break;
d9a6bd32
JJ
8696 splay_tree_node on
8697 = splay_tree_lookup (octx->variables,
8698 (splay_tree_key) decl);
8699 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
2ee10f81
JJ
8700 {
8701 octx = NULL;
8702 break;
8703 }
41b37d5e
JJ
8704 omp_add_variable (octx, decl, flags);
8705 if (octx->outer_context == NULL)
8706 break;
8707 octx = octx->outer_context;
8708 }
8709 while (1);
8710 if (octx
8711 && decl
8712 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8713 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8714 omp_notice_variable (octx, decl, true);
8715 }
acf0174b 8716 flags = GOVD_LINEAR | GOVD_EXPLICIT;
41b37d5e
JJ
8717 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8718 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8719 {
8720 notice_outer = false;
8721 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8722 }
acf0174b
JJ
8723 goto do_add;
8724
8725 case OMP_CLAUSE_MAP:
b46ebd6c
JJ
8726 decl = OMP_CLAUSE_DECL (c);
8727 if (error_operand_p (decl))
d9a6bd32
JJ
8728 remove = true;
8729 switch (code)
b46ebd6c 8730 {
d9a6bd32
JJ
8731 case OMP_TARGET:
8732 break;
e46c7770
CP
8733 case OACC_DATA:
8734 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8735 break;
191816a3 8736 /* FALLTHRU */
d9a6bd32
JJ
8737 case OMP_TARGET_DATA:
8738 case OMP_TARGET_ENTER_DATA:
8739 case OMP_TARGET_EXIT_DATA:
37d5ad46 8740 case OACC_HOST_DATA:
e01d41e5
JJ
8741 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8742 || (OMP_CLAUSE_MAP_KIND (c)
8743 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32
JJ
8744 /* For target {,enter ,exit }data only the array slice is
8745 mapped, but not the pointer to it. */
8746 remove = true;
8747 break;
4fd872bc
JB
8748 case OACC_ENTER_DATA:
8749 case OACC_EXIT_DATA:
8750 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8751 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET
8752 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8753 || (OMP_CLAUSE_MAP_KIND (c)
8754 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8755 remove = true;
8756 break;
d9a6bd32 8757 default:
b46ebd6c
JJ
8758 break;
8759 }
91ae0a46
TB
8760 /* For Fortran, not only the pointer to the data is mapped but also
8761 the address of the pointer, the array descriptor etc.; for
8762 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8763 does not make sense. Likewise, for 'update' only transferring the
8764 data itself is needed as the rest has been handled in previous
af557050
TB
8765 directives. However, for 'exit data', the array descriptor needs
8766 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE. */
8767 if (code == OMP_TARGET_EXIT_DATA
8768 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
9f0f7da9
TB
8769 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
8770 == GOMP_MAP_DELETE
8771 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
af557050
TB
8772 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
8773 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8774 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
91ae0a46
TB
8775 remove = true;
8776
d9a6bd32
JJ
8777 if (remove)
8778 break;
37d5ad46
JB
8779 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8780 {
8781 struct gimplify_omp_ctx *octx;
8782 for (octx = outer_ctx; octx; octx = octx->outer_context)
8783 {
8784 if (octx->region_type != ORT_ACC_HOST_DATA)
8785 break;
8786 splay_tree_node n2
8787 = splay_tree_lookup (octx->variables,
8788 (splay_tree_key) decl);
8789 if (n2)
8790 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8791 "declared in enclosing %<host_data%> region",
8792 DECL_NAME (decl));
8793 }
8794 }
b46ebd6c
JJ
8795 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8796 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8797 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8798 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8799 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
8800 {
8801 remove = true;
8802 break;
8803 }
e01d41e5
JJ
8804 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8805 || (OMP_CLAUSE_MAP_KIND (c)
8806 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32
JJ
8807 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8808 {
8809 OMP_CLAUSE_SIZE (c)
381cdae4
RB
8810 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8811 false);
d9a6bd32
JJ
8812 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8813 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8814 }
acf0174b
JJ
8815 if (!DECL_P (decl))
8816 {
d9a6bd32
JJ
8817 tree d = decl, *pd;
8818 if (TREE_CODE (d) == ARRAY_REF)
8819 {
8820 while (TREE_CODE (d) == ARRAY_REF)
8821 d = TREE_OPERAND (d, 0);
8822 if (TREE_CODE (d) == COMPONENT_REF
8823 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8824 decl = d;
8825 }
8826 pd = &OMP_CLAUSE_DECL (c);
8827 if (d == decl
8828 && TREE_CODE (decl) == INDIRECT_REF
8829 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8830 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8831 == REFERENCE_TYPE))
8832 {
8833 pd = &TREE_OPERAND (decl, 0);
8834 decl = TREE_OPERAND (decl, 0);
8835 }
4fd872bc
JB
8836 bool indir_p = false;
8837 tree orig_decl = decl;
8838 tree decl_ref = NULL_TREE;
8839 if ((region_type & ORT_ACC) != 0
8840 && TREE_CODE (*pd) == COMPONENT_REF
8841 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
8842 && code != OACC_UPDATE)
8843 {
8844 while (TREE_CODE (decl) == COMPONENT_REF)
8845 {
8846 decl = TREE_OPERAND (decl, 0);
8847 if ((TREE_CODE (decl) == MEM_REF
8848 && integer_zerop (TREE_OPERAND (decl, 1)))
8849 || INDIRECT_REF_P (decl))
8850 {
8851 indir_p = true;
8852 decl = TREE_OPERAND (decl, 0);
8853 }
8854 if (TREE_CODE (decl) == INDIRECT_REF
8855 && DECL_P (TREE_OPERAND (decl, 0))
8856 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8857 == REFERENCE_TYPE))
8858 {
8859 decl_ref = decl;
8860 decl = TREE_OPERAND (decl, 0);
8861 }
8862 }
8863 }
8864 else if (TREE_CODE (decl) == COMPONENT_REF)
d9a6bd32
JJ
8865 {
8866 while (TREE_CODE (decl) == COMPONENT_REF)
8867 decl = TREE_OPERAND (decl, 0);
283635f9
JJ
8868 if (TREE_CODE (decl) == INDIRECT_REF
8869 && DECL_P (TREE_OPERAND (decl, 0))
8870 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8871 == REFERENCE_TYPE))
8872 decl = TREE_OPERAND (decl, 0);
d9a6bd32 8873 }
4fd872bc
JB
8874 if (decl != orig_decl && DECL_P (decl) && indir_p)
8875 {
8876 gomp_map_kind k = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
8877 : GOMP_MAP_ATTACH;
8878 /* We have a dereference of a struct member. Make this an
8879 attach/detach operation, and ensure the base pointer is
8880 mapped as a FIRSTPRIVATE_POINTER. */
8881 OMP_CLAUSE_SET_MAP_KIND (c, k);
8882 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
8883 tree next_clause = OMP_CLAUSE_CHAIN (c);
8884 if (k == GOMP_MAP_ATTACH
8885 && code != OACC_ENTER_DATA
8886 && (!next_clause
8887 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
8888 || (OMP_CLAUSE_MAP_KIND (next_clause)
8889 != GOMP_MAP_POINTER)
8890 || OMP_CLAUSE_DECL (next_clause) != decl)
8891 && (!struct_deref_set
8892 || !struct_deref_set->contains (decl)))
8893 {
8894 if (!struct_deref_set)
8895 struct_deref_set = new hash_set<tree> ();
8896 /* As well as the attach, we also need a
8897 FIRSTPRIVATE_POINTER clause to properly map the
8898 pointer to the struct base. */
8899 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8900 OMP_CLAUSE_MAP);
8901 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
8902 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
8903 = 1;
8904 tree charptr_zero
8905 = build_int_cst (build_pointer_type (char_type_node),
8906 0);
8907 OMP_CLAUSE_DECL (c2)
8908 = build2 (MEM_REF, char_type_node,
8909 decl_ref ? decl_ref : decl, charptr_zero);
8910 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8911 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8912 OMP_CLAUSE_MAP);
8913 OMP_CLAUSE_SET_MAP_KIND (c3,
8914 GOMP_MAP_FIRSTPRIVATE_POINTER);
8915 OMP_CLAUSE_DECL (c3) = decl;
8916 OMP_CLAUSE_SIZE (c3) = size_zero_node;
8917 tree mapgrp = *prev_list_p;
8918 *prev_list_p = c2;
8919 OMP_CLAUSE_CHAIN (c3) = mapgrp;
8920 OMP_CLAUSE_CHAIN (c2) = c3;
8921
8922 struct_deref_set->add (decl);
8923 }
8924 goto do_add_decl;
8925 }
8926 /* An "attach/detach" operation on an update directive should
8927 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
8928 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
8929 depends on the previous mapping. */
8930 if (code == OACC_UPDATE
8931 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8932 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
d9a6bd32 8933 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
acf0174b
JJ
8934 == GS_ERROR)
8935 {
8936 remove = true;
8937 break;
8938 }
4fd872bc
JB
8939 if (DECL_P (decl)
8940 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
8941 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
8942 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
8943 && code != OACC_UPDATE)
d9a6bd32
JJ
8944 {
8945 if (error_operand_p (decl))
8946 {
8947 remove = true;
8948 break;
8949 }
8950
283635f9
JJ
8951 tree stype = TREE_TYPE (decl);
8952 if (TREE_CODE (stype) == REFERENCE_TYPE)
8953 stype = TREE_TYPE (stype);
8954 if (TYPE_SIZE_UNIT (stype) == NULL
8955 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
d9a6bd32
JJ
8956 {
8957 error_at (OMP_CLAUSE_LOCATION (c),
8958 "mapping field %qE of variable length "
8959 "structure", OMP_CLAUSE_DECL (c));
8960 remove = true;
8961 break;
8962 }
8963
4fd872bc
JB
8964 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
8965 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
e01d41e5
JJ
8966 {
8967 /* Error recovery. */
8968 if (prev_list_p == NULL)
8969 {
8970 remove = true;
8971 break;
8972 }
8973 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8974 {
8975 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8976 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8977 {
8978 remove = true;
8979 break;
8980 }
8981 }
8982 }
8983
4d83edf7
JB
8984 poly_offset_int offset1;
8985 poly_int64 bitpos1;
8986 tree base_ref;
8987
8988 tree base
8989 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
8990 &bitpos1, &offset1);
8991
8992 gcc_assert (base == decl);
d9a6bd32
JJ
8993
8994 splay_tree_node n
8995 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8996 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
e01d41e5 8997 == GOMP_MAP_ALWAYS_POINTER);
4fd872bc
JB
8998 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
8999 == GOMP_MAP_ATTACH_DETACH);
9000 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9001 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9002 bool has_attachments = false;
9003 /* For OpenACC, pointers in structs should trigger an
9004 attach action. */
9005 if (attach_detach && (region_type & ORT_ACC) != 0)
9006 {
9007 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9008 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9009 have detected a case that needs a GOMP_MAP_STRUCT
9010 mapping added. */
9011 gomp_map_kind k
9012 = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
9013 : GOMP_MAP_ATTACH;
9014 OMP_CLAUSE_SET_MAP_KIND (c, k);
9015 has_attachments = true;
9016 }
e01d41e5 9017 if (n == NULL || (n->value & GOVD_MAP) == 0)
d9a6bd32 9018 {
e01d41e5
JJ
9019 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9020 OMP_CLAUSE_MAP);
4fd872bc
JB
9021 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9022 : GOMP_MAP_STRUCT;
9023
9024 OMP_CLAUSE_SET_MAP_KIND (l, k);
4d83edf7
JB
9025 if (base_ref)
9026 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
283635f9
JJ
9027 else
9028 OMP_CLAUSE_DECL (l) = decl;
4fd872bc
JB
9029 OMP_CLAUSE_SIZE (l)
9030 = (!attach
9031 ? size_int (1)
9032 : DECL_P (OMP_CLAUSE_DECL (l))
9033 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9034 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
e01d41e5
JJ
9035 if (struct_map_to_clause == NULL)
9036 struct_map_to_clause = new hash_map<tree, tree>;
9037 struct_map_to_clause->put (decl, l);
4fd872bc 9038 if (ptr || attach_detach)
d9a6bd32 9039 {
4d83edf7
JB
9040 insert_struct_comp_map (code, c, l, *prev_list_p,
9041 NULL);
e01d41e5
JJ
9042 *prev_list_p = l;
9043 prev_list_p = NULL;
9044 }
9045 else
9046 {
9047 OMP_CLAUSE_CHAIN (l) = c;
9048 *list_p = l;
9049 list_p = &OMP_CLAUSE_CHAIN (l);
d9a6bd32 9050 }
4d83edf7 9051 if (base_ref && code == OMP_TARGET)
283635f9
JJ
9052 {
9053 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9054 OMP_CLAUSE_MAP);
9055 enum gomp_map_kind mkind
9056 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9057 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9058 OMP_CLAUSE_DECL (c2) = decl;
9059 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9060 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9061 OMP_CLAUSE_CHAIN (l) = c2;
9062 }
d9a6bd32 9063 flags = GOVD_MAP | GOVD_EXPLICIT;
4fd872bc
JB
9064 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9065 || ptr
9066 || attach_detach)
d9a6bd32 9067 flags |= GOVD_SEEN;
4fd872bc
JB
9068 if (has_attachments)
9069 flags |= GOVD_MAP_HAS_ATTACHMENTS;
d9a6bd32
JJ
9070 goto do_add_decl;
9071 }
4fd872bc 9072 else if (struct_map_to_clause)
d9a6bd32
JJ
9073 {
9074 tree *osc = struct_map_to_clause->get (decl);
e01d41e5 9075 tree *sc = NULL, *scp = NULL;
4fd872bc
JB
9076 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9077 || ptr
9078 || attach_detach)
d9a6bd32 9079 n->value |= GOVD_SEEN;
283635f9
JJ
9080 sc = &OMP_CLAUSE_CHAIN (*osc);
9081 if (*sc != c
9082 && (OMP_CLAUSE_MAP_KIND (*sc)
4fd872bc 9083 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
283635f9 9084 sc = &OMP_CLAUSE_CHAIN (*sc);
4fd872bc
JB
9085 /* Here "prev_list_p" is the end of the inserted
9086 alloc/release nodes after the struct node, OSC. */
283635f9 9087 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
4fd872bc 9088 if ((ptr || attach_detach) && sc == prev_list_p)
e01d41e5
JJ
9089 break;
9090 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9091 != COMPONENT_REF
9092 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9093 != INDIRECT_REF)
9094 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9095 != ARRAY_REF))
d9a6bd32
JJ
9096 break;
9097 else
9098 {
4d83edf7
JB
9099 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9100 poly_offset_int offsetn;
9101 poly_int64 bitposn;
9102 tree base
9103 = extract_base_bit_offset (sc_decl, NULL,
9104 &bitposn, &offsetn);
d9a6bd32
JJ
9105 if (base != decl)
9106 break;
e01d41e5
JJ
9107 if (scp)
9108 continue;
d9a6bd32
JJ
9109 tree d1 = OMP_CLAUSE_DECL (*sc);
9110 tree d2 = OMP_CLAUSE_DECL (c);
9111 while (TREE_CODE (d1) == ARRAY_REF)
9112 d1 = TREE_OPERAND (d1, 0);
9113 while (TREE_CODE (d2) == ARRAY_REF)
9114 d2 = TREE_OPERAND (d2, 0);
9115 if (TREE_CODE (d1) == INDIRECT_REF)
9116 d1 = TREE_OPERAND (d1, 0);
9117 if (TREE_CODE (d2) == INDIRECT_REF)
9118 d2 = TREE_OPERAND (d2, 0);
9119 while (TREE_CODE (d1) == COMPONENT_REF)
9120 if (TREE_CODE (d2) == COMPONENT_REF
9121 && TREE_OPERAND (d1, 1)
9122 == TREE_OPERAND (d2, 1))
9123 {
9124 d1 = TREE_OPERAND (d1, 0);
9125 d2 = TREE_OPERAND (d2, 0);
9126 }
9127 else
9128 break;
9129 if (d1 == d2)
9130 {
9131 error_at (OMP_CLAUSE_LOCATION (c),
9132 "%qE appears more than once in map "
9133 "clauses", OMP_CLAUSE_DECL (c));
9134 remove = true;
9135 break;
9136 }
4d83edf7
JB
9137 if (maybe_lt (offset1, offsetn)
9138 || (known_eq (offset1, offsetn)
9139 && maybe_lt (bitpos1, bitposn)))
e01d41e5 9140 {
4fd872bc 9141 if (ptr || attach_detach)
e01d41e5
JJ
9142 scp = sc;
9143 else
9144 break;
9145 }
d9a6bd32 9146 }
e01d41e5
JJ
9147 if (remove)
9148 break;
4fd872bc
JB
9149 if (!attach)
9150 OMP_CLAUSE_SIZE (*osc)
9151 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9152 size_one_node);
9153 if (ptr || attach_detach)
d9a6bd32 9154 {
4d83edf7
JB
9155 tree cl = insert_struct_comp_map (code, c, NULL,
9156 *prev_list_p, scp);
e01d41e5
JJ
9157 if (sc == prev_list_p)
9158 {
9159 *sc = cl;
9160 prev_list_p = NULL;
9161 }
9162 else
9163 {
9164 *prev_list_p = OMP_CLAUSE_CHAIN (c);
9165 list_p = prev_list_p;
9166 prev_list_p = NULL;
9167 OMP_CLAUSE_CHAIN (c) = *sc;
9168 *sc = cl;
9169 continue;
9170 }
d9a6bd32 9171 }
e01d41e5 9172 else if (*sc != c)
d9a6bd32
JJ
9173 {
9174 *list_p = OMP_CLAUSE_CHAIN (c);
9175 OMP_CLAUSE_CHAIN (c) = *sc;
9176 *sc = c;
9177 continue;
9178 }
9179 }
9180 }
e01d41e5
JJ
9181 if (!remove
9182 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
4fd872bc
JB
9183 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9184 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
e01d41e5
JJ
9185 && OMP_CLAUSE_CHAIN (c)
9186 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
4fd872bc
JB
9187 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9188 == GOMP_MAP_ALWAYS_POINTER)
9189 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9190 == GOMP_MAP_ATTACH_DETACH)
9191 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9192 == GOMP_MAP_TO_PSET)))
e01d41e5 9193 prev_list_p = list_p;
4fd872bc 9194
acf0174b
JJ
9195 break;
9196 }
9197 flags = GOVD_MAP | GOVD_EXPLICIT;
e01d41e5
JJ
9198 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9199 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9200 flags |= GOVD_MAP_ALWAYS_TO;
acf0174b
JJ
9201 goto do_add;
9202
9203 case OMP_CLAUSE_DEPEND:
b4c3a85b 9204 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
d9a6bd32 9205 {
b4c3a85b
JJ
9206 tree deps = OMP_CLAUSE_DECL (c);
9207 while (deps && TREE_CODE (deps) == TREE_LIST)
9208 {
9209 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9210 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9211 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9212 pre_p, NULL, is_gimple_val, fb_rvalue);
9213 deps = TREE_CHAIN (deps);
9214 }
d9a6bd32
JJ
9215 break;
9216 }
b4c3a85b
JJ
9217 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9218 break;
28567c40
JJ
9219 if (handled_depend_iterators == -1)
9220 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9221 if (handled_depend_iterators)
9222 {
9223 if (handled_depend_iterators == 2)
9224 remove = true;
9225 break;
9226 }
acf0174b
JJ
9227 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9228 {
9229 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9230 NULL, is_gimple_val, fb_rvalue);
9231 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9232 }
9233 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9234 {
9235 remove = true;
9236 break;
9237 }
9238 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9239 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9240 is_gimple_val, fb_rvalue) == GS_ERROR)
9241 {
9242 remove = true;
9243 break;
9244 }
9245 break;
9246
9247 case OMP_CLAUSE_TO:
9248 case OMP_CLAUSE_FROM:
41dbbb37 9249 case OMP_CLAUSE__CACHE_:
b46ebd6c
JJ
9250 decl = OMP_CLAUSE_DECL (c);
9251 if (error_operand_p (decl))
acf0174b
JJ
9252 {
9253 remove = true;
9254 break;
9255 }
b46ebd6c
JJ
9256 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9257 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9258 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9259 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9260 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
9261 {
9262 remove = true;
9263 break;
9264 }
9265 if (!DECL_P (decl))
9266 {
9267 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9268 NULL, is_gimple_lvalue, fb_lvalue)
9269 == GS_ERROR)
9270 {
9271 remove = true;
9272 break;
9273 }
9274 break;
9275 }
9276 goto do_notice;
953ff289 9277
d9a6bd32 9278 case OMP_CLAUSE_USE_DEVICE_PTR:
398e3feb 9279 case OMP_CLAUSE_USE_DEVICE_ADDR:
8860d270
JJ
9280 flags = GOVD_EXPLICIT;
9281 goto do_add;
9282
d9a6bd32
JJ
9283 case OMP_CLAUSE_IS_DEVICE_PTR:
9284 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9285 goto do_add;
9286
953ff289
DN
9287 do_add:
9288 decl = OMP_CLAUSE_DECL (c);
d9a6bd32 9289 do_add_decl:
b504a918 9290 if (error_operand_p (decl))
953ff289
DN
9291 {
9292 remove = true;
9293 break;
9294 }
d9a6bd32
JJ
9295 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9296 {
9297 tree t = omp_member_access_dummy_var (decl);
9298 if (t)
9299 {
9300 tree v = DECL_VALUE_EXPR (decl);
9301 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9302 if (outer_ctx)
9303 omp_notice_variable (outer_ctx, t, true);
9304 }
9305 }
e46c7770
CP
9306 if (code == OACC_DATA
9307 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9308 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9309 flags |= GOVD_MAP_0LEN_ARRAY;
953ff289 9310 omp_add_variable (ctx, decl, flags);
28567c40
JJ
9311 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9312 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9313 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
953ff289
DN
9314 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9315 {
9316 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 9317 GOVD_LOCAL | GOVD_SEEN);
d9a6bd32
JJ
9318 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9319 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9320 find_decl_expr,
9321 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9322 NULL) == NULL_TREE)
9323 omp_add_variable (ctx,
9324 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9325 GOVD_LOCAL | GOVD_SEEN);
953ff289 9326 gimplify_omp_ctxp = ctx;
45852dcc 9327 push_gimplify_context ();
726a989a 9328
355a7673
MM
9329 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9330 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
726a989a
RB
9331
9332 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9333 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9334 pop_gimplify_context
9335 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
45852dcc 9336 push_gimplify_context ();
726a989a
RB
9337 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9338 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 9339 pop_gimplify_context
726a989a
RB
9340 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9341 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9342 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9343
953ff289
DN
9344 gimplify_omp_ctxp = outer_ctx;
9345 }
a68ab351
JJ
9346 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9347 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9348 {
9349 gimplify_omp_ctxp = ctx;
45852dcc 9350 push_gimplify_context ();
a68ab351
JJ
9351 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9352 {
9353 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9354 NULL, NULL);
9355 TREE_SIDE_EFFECTS (bind) = 1;
9356 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9357 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9358 }
726a989a
RB
9359 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9360 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9361 pop_gimplify_context
9362 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9363 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9364
dd2fc525
JJ
9365 gimplify_omp_ctxp = outer_ctx;
9366 }
9367 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9368 && OMP_CLAUSE_LINEAR_STMT (c))
9369 {
9370 gimplify_omp_ctxp = ctx;
9371 push_gimplify_context ();
9372 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9373 {
9374 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9375 NULL, NULL);
9376 TREE_SIDE_EFFECTS (bind) = 1;
9377 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9378 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9379 }
9380 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9381 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9382 pop_gimplify_context
9383 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9384 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9385
a68ab351
JJ
9386 gimplify_omp_ctxp = outer_ctx;
9387 }
953ff289
DN
9388 if (notice_outer)
9389 goto do_notice;
9390 break;
9391
9392 case OMP_CLAUSE_COPYIN:
9393 case OMP_CLAUSE_COPYPRIVATE:
9394 decl = OMP_CLAUSE_DECL (c);
b504a918 9395 if (error_operand_p (decl))
953ff289
DN
9396 {
9397 remove = true;
9398 break;
9399 }
cab37c89
JJ
9400 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9401 && !remove
9402 && !omp_check_private (ctx, decl, true))
9403 {
9404 remove = true;
9405 if (is_global_var (decl))
9406 {
9407 if (DECL_THREAD_LOCAL_P (decl))
9408 remove = false;
9409 else if (DECL_HAS_VALUE_EXPR_P (decl))
9410 {
9411 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9412
9413 if (value
9414 && DECL_P (value)
9415 && DECL_THREAD_LOCAL_P (value))
9416 remove = false;
9417 }
9418 }
9419 if (remove)
9420 error_at (OMP_CLAUSE_LOCATION (c),
9421 "copyprivate variable %qE is not threadprivate"
9422 " or private in outer context", DECL_NAME (decl));
9423 }
953ff289 9424 do_notice:
554a530f
JJ
9425 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9426 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9427 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
28567c40 9428 && outer_ctx
554a530f
JJ
9429 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9430 || (region_type == ORT_WORKSHARE
9431 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9432 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9433 || code == OMP_LOOP)))
9434 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9435 || (code == OMP_LOOP
9436 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9437 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9438 == ORT_COMBINED_TEAMS))))
28567c40
JJ
9439 {
9440 splay_tree_node on
9441 = splay_tree_lookup (outer_ctx->variables,
9442 (splay_tree_key)decl);
9443 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9444 {
9445 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9446 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9447 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9448 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9449 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9450 == POINTER_TYPE))))
9451 omp_firstprivatize_variable (outer_ctx, decl);
9452 else
cf785618
JJ
9453 {
9454 omp_add_variable (outer_ctx, decl,
9455 GOVD_SEEN | GOVD_SHARED);
9456 if (outer_ctx->outer_context)
9457 omp_notice_variable (outer_ctx->outer_context, decl,
9458 true);
9459 }
28567c40
JJ
9460 }
9461 }
953ff289
DN
9462 if (outer_ctx)
9463 omp_notice_variable (outer_ctx, decl, true);
07b7aade 9464 if (check_non_private
a68ab351 9465 && region_type == ORT_WORKSHARE
d9a6bd32
JJ
9466 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9467 || decl == OMP_CLAUSE_DECL (c)
9468 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9469 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
e01d41e5
JJ
9470 == ADDR_EXPR
9471 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9472 == POINTER_PLUS_EXPR
9473 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9474 (OMP_CLAUSE_DECL (c), 0), 0))
9475 == ADDR_EXPR)))))
cab37c89 9476 && omp_check_private (ctx, decl, false))
07b7aade 9477 {
4f1e4960
JM
9478 error ("%s variable %qE is private in outer context",
9479 check_non_private, DECL_NAME (decl));
07b7aade
JJ
9480 remove = true;
9481 }
953ff289
DN
9482 break;
9483
953ff289 9484 case OMP_CLAUSE_IF:
d9a6bd32
JJ
9485 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9486 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9487 {
9488 const char *p[2];
9489 for (int i = 0; i < 2; i++)
9490 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9491 {
28567c40 9492 case VOID_CST: p[i] = "cancel"; break;
d9a6bd32 9493 case OMP_PARALLEL: p[i] = "parallel"; break;
28567c40 9494 case OMP_SIMD: p[i] = "simd"; break;
d9a6bd32
JJ
9495 case OMP_TASK: p[i] = "task"; break;
9496 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9497 case OMP_TARGET_DATA: p[i] = "target data"; break;
9498 case OMP_TARGET: p[i] = "target"; break;
9499 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9500 case OMP_TARGET_ENTER_DATA:
9501 p[i] = "target enter data"; break;
9502 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9503 default: gcc_unreachable ();
9504 }
9505 error_at (OMP_CLAUSE_LOCATION (c),
9506 "expected %qs %<if%> clause modifier rather than %qs",
9507 p[0], p[1]);
9508 remove = true;
9509 }
9510 /* Fall through. */
9511
9512 case OMP_CLAUSE_FINAL:
d568d1a8
RS
9513 OMP_CLAUSE_OPERAND (c, 0)
9514 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9515 /* Fall through. */
9516
9517 case OMP_CLAUSE_SCHEDULE:
953ff289 9518 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
9519 case OMP_CLAUSE_NUM_TEAMS:
9520 case OMP_CLAUSE_THREAD_LIMIT:
9521 case OMP_CLAUSE_DIST_SCHEDULE:
9522 case OMP_CLAUSE_DEVICE:
d9a6bd32
JJ
9523 case OMP_CLAUSE_PRIORITY:
9524 case OMP_CLAUSE_GRAINSIZE:
9525 case OMP_CLAUSE_NUM_TASKS:
9526 case OMP_CLAUSE_HINT:
41dbbb37
TS
9527 case OMP_CLAUSE_ASYNC:
9528 case OMP_CLAUSE_WAIT:
9529 case OMP_CLAUSE_NUM_GANGS:
9530 case OMP_CLAUSE_NUM_WORKERS:
9531 case OMP_CLAUSE_VECTOR_LENGTH:
41dbbb37
TS
9532 case OMP_CLAUSE_WORKER:
9533 case OMP_CLAUSE_VECTOR:
726a989a
RB
9534 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9535 is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b 9536 remove = true;
d9a6bd32
JJ
9537 break;
9538
9539 case OMP_CLAUSE_GANG:
9540 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9541 is_gimple_val, fb_rvalue) == GS_ERROR)
9542 remove = true;
9543 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9544 is_gimple_val, fb_rvalue) == GS_ERROR)
41dbbb37
TS
9545 remove = true;
9546 break;
9547
953ff289 9548 case OMP_CLAUSE_NOWAIT:
28567c40
JJ
9549 nowait = 1;
9550 break;
9551
953ff289 9552 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
9553 case OMP_CLAUSE_UNTIED:
9554 case OMP_CLAUSE_COLLAPSE:
02889d23 9555 case OMP_CLAUSE_TILE:
41dbbb37
TS
9556 case OMP_CLAUSE_AUTO:
9557 case OMP_CLAUSE_SEQ:
7a5e4956 9558 case OMP_CLAUSE_INDEPENDENT:
20906c66 9559 case OMP_CLAUSE_MERGEABLE:
acf0174b 9560 case OMP_CLAUSE_PROC_BIND:
74bf76ed 9561 case OMP_CLAUSE_SAFELEN:
d9a6bd32
JJ
9562 case OMP_CLAUSE_SIMDLEN:
9563 case OMP_CLAUSE_NOGROUP:
9564 case OMP_CLAUSE_THREADS:
9565 case OMP_CLAUSE_SIMD:
554a530f 9566 case OMP_CLAUSE_BIND:
829c6349
CLT
9567 case OMP_CLAUSE_IF_PRESENT:
9568 case OMP_CLAUSE_FINALIZE:
d9a6bd32
JJ
9569 break;
9570
70468604
JJ
9571 case OMP_CLAUSE_ORDER:
9572 ctx->order_concurrent = true;
9573 break;
9574
d9a6bd32 9575 case OMP_CLAUSE_DEFAULTMAP:
28567c40
JJ
9576 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9577 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9578 {
9579 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9580 gdmkmin = GDMK_SCALAR;
9581 gdmkmax = GDMK_POINTER;
9582 break;
9583 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9584 gdmkmin = gdmkmax = GDMK_SCALAR;
9585 break;
9586 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9587 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9588 break;
9589 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9590 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9591 break;
9592 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9593 gdmkmin = gdmkmax = GDMK_POINTER;
9594 break;
9595 default:
9596 gcc_unreachable ();
9597 }
9598 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9599 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9600 {
9601 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9602 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9603 break;
9604 case OMP_CLAUSE_DEFAULTMAP_TO:
9605 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9606 break;
9607 case OMP_CLAUSE_DEFAULTMAP_FROM:
9608 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9609 break;
9610 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9611 ctx->defaultmap[gdmk] = GOVD_MAP;
9612 break;
9613 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9614 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9615 break;
9616 case OMP_CLAUSE_DEFAULTMAP_NONE:
9617 ctx->defaultmap[gdmk] = 0;
9618 break;
9619 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9620 switch (gdmk)
9621 {
9622 case GDMK_SCALAR:
9623 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9624 break;
9625 case GDMK_AGGREGATE:
9626 case GDMK_ALLOCATABLE:
9627 ctx->defaultmap[gdmk] = GOVD_MAP;
9628 break;
9629 case GDMK_POINTER:
9630 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9631 break;
9632 default:
9633 gcc_unreachable ();
9634 }
9635 break;
9636 default:
9637 gcc_unreachable ();
9638 }
953ff289
DN
9639 break;
9640
acf0174b
JJ
9641 case OMP_CLAUSE_ALIGNED:
9642 decl = OMP_CLAUSE_DECL (c);
9643 if (error_operand_p (decl))
9644 {
9645 remove = true;
9646 break;
9647 }
b46ebd6c
JJ
9648 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9649 is_gimple_val, fb_rvalue) == GS_ERROR)
9650 {
9651 remove = true;
9652 break;
9653 }
acf0174b
JJ
9654 if (!is_global_var (decl)
9655 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9656 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9657 break;
9658
28567c40
JJ
9659 case OMP_CLAUSE_NONTEMPORAL:
9660 decl = OMP_CLAUSE_DECL (c);
9661 if (error_operand_p (decl))
9662 {
9663 remove = true;
9664 break;
9665 }
9666 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9667 break;
9668
953ff289
DN
9669 case OMP_CLAUSE_DEFAULT:
9670 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9671 break;
9672
bf38f7e9
JJ
9673 case OMP_CLAUSE_INCLUSIVE:
9674 case OMP_CLAUSE_EXCLUSIVE:
9675 decl = OMP_CLAUSE_DECL (c);
9676 {
9677 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9678 (splay_tree_key) decl);
9679 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9680 {
9681 error_at (OMP_CLAUSE_LOCATION (c),
9682 "%qD specified in %qs clause but not in %<inscan%> "
9683 "%<reduction%> clause on the containing construct",
9684 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9685 remove = true;
9686 }
9687 else
9688 {
9689 n->value |= GOVD_REDUCTION_INSCAN;
9690 if (outer_ctx->region_type == ORT_SIMD
9691 && outer_ctx->outer_context
9692 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9693 {
9694 n = splay_tree_lookup (outer_ctx->outer_context->variables,
9695 (splay_tree_key) decl);
9696 if (n && (n->value & GOVD_REDUCTION) != 0)
9697 n->value |= GOVD_REDUCTION_INSCAN;
9698 }
9699 }
9700 }
9701 break;
9702
953ff289
DN
9703 default:
9704 gcc_unreachable ();
9705 }
9706
e46c7770
CP
9707 if (code == OACC_DATA
9708 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8e36332c
CP
9709 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9710 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
e46c7770 9711 remove = true;
953ff289
DN
9712 if (remove)
9713 *list_p = OMP_CLAUSE_CHAIN (c);
9714 else
9715 list_p = &OMP_CLAUSE_CHAIN (c);
9716 }
9717
7e47198b 9718 ctx->clauses = *orig_list_p;
953ff289 9719 gimplify_omp_ctxp = ctx;
d9a6bd32
JJ
9720 if (struct_map_to_clause)
9721 delete struct_map_to_clause;
4fd872bc
JB
9722 if (struct_deref_set)
9723 delete struct_deref_set;
953ff289
DN
9724}
9725
1a80d6b8
JJ
9726/* Return true if DECL is a candidate for shared to firstprivate
9727 optimization. We only consider non-addressable scalars, not
9728 too big, and not references. */
9729
9730static bool
9731omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9732{
9733 if (TREE_ADDRESSABLE (decl))
9734 return false;
9735 tree type = TREE_TYPE (decl);
9736 if (!is_gimple_reg_type (type)
9737 || TREE_CODE (type) == REFERENCE_TYPE
9738 || TREE_ADDRESSABLE (type))
9739 return false;
9740 /* Don't optimize too large decls, as each thread/task will have
9741 its own. */
9742 HOST_WIDE_INT len = int_size_in_bytes (type);
9743 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9744 return false;
9745 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9746 return false;
9747 return true;
9748}
9749
9750/* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9751 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9752 GOVD_WRITTEN in outer contexts. */
9753
9754static void
9755omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9756{
9757 for (; ctx; ctx = ctx->outer_context)
9758 {
9759 splay_tree_node n = splay_tree_lookup (ctx->variables,
9760 (splay_tree_key) decl);
9761 if (n == NULL)
9762 continue;
9763 else if (n->value & GOVD_SHARED)
9764 {
9765 n->value |= GOVD_WRITTEN;
9766 return;
9767 }
9768 else if (n->value & GOVD_DATA_SHARE_CLASS)
9769 return;
9770 }
9771}
9772
9773/* Helper callback for walk_gimple_seq to discover possible stores
9774 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9775 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9776 for those. */
9777
9778static tree
9779omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9780{
9781 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9782
9783 *walk_subtrees = 0;
9784 if (!wi->is_lhs)
9785 return NULL_TREE;
9786
9787 tree op = *tp;
9788 do
9789 {
9790 if (handled_component_p (op))
9791 op = TREE_OPERAND (op, 0);
9792 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9793 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9794 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9795 else
9796 break;
9797 }
9798 while (1);
9799 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9800 return NULL_TREE;
9801
9802 omp_mark_stores (gimplify_omp_ctxp, op);
9803 return NULL_TREE;
9804}
9805
9806/* Helper callback for walk_gimple_seq to discover possible stores
9807 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9808 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9809 for those. */
9810
9811static tree
9812omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9813 bool *handled_ops_p,
9814 struct walk_stmt_info *wi)
9815{
9816 gimple *stmt = gsi_stmt (*gsi_p);
9817 switch (gimple_code (stmt))
9818 {
9819 /* Don't recurse on OpenMP constructs for which
9820 gimplify_adjust_omp_clauses already handled the bodies,
9821 except handle gimple_omp_for_pre_body. */
9822 case GIMPLE_OMP_FOR:
9823 *handled_ops_p = true;
9824 if (gimple_omp_for_pre_body (stmt))
9825 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9826 omp_find_stores_stmt, omp_find_stores_op, wi);
9827 break;
9828 case GIMPLE_OMP_PARALLEL:
9829 case GIMPLE_OMP_TASK:
9830 case GIMPLE_OMP_SECTIONS:
9831 case GIMPLE_OMP_SINGLE:
9832 case GIMPLE_OMP_TARGET:
9833 case GIMPLE_OMP_TEAMS:
9834 case GIMPLE_OMP_CRITICAL:
9835 *handled_ops_p = true;
9836 break;
9837 default:
9838 break;
9839 }
9840 return NULL_TREE;
9841}
9842
f014c653
JJ
9843struct gimplify_adjust_omp_clauses_data
9844{
9845 tree *list_p;
9846 gimple_seq *pre_p;
9847};
9848
953ff289
DN
9849/* For all variables that were not actually used within the context,
9850 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9851
9852static int
9853gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9854{
f014c653
JJ
9855 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9856 gimple_seq *pre_p
9857 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
953ff289
DN
9858 tree decl = (tree) n->key;
9859 unsigned flags = n->value;
aaf46ef9 9860 enum omp_clause_code code;
953ff289
DN
9861 tree clause;
9862 bool private_debug;
9863
7e47198b
JJ
9864 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9865 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
9866 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
953ff289
DN
9867 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9868 return 0;
9869 if ((flags & GOVD_SEEN) == 0)
9870 return 0;
4fd872bc
JB
9871 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
9872 return 0;
953ff289
DN
9873 if (flags & GOVD_DEBUG_PRIVATE)
9874 {
e9e2ef9f 9875 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
953ff289
DN
9876 private_debug = true;
9877 }
acf0174b
JJ
9878 else if (flags & GOVD_MAP)
9879 private_debug = false;
953ff289
DN
9880 else
9881 private_debug
9882 = lang_hooks.decls.omp_private_debug_clause (decl,
9883 !!(flags & GOVD_SHARED));
9884 if (private_debug)
9885 code = OMP_CLAUSE_PRIVATE;
acf0174b 9886 else if (flags & GOVD_MAP)
9dc5773f
JJ
9887 {
9888 code = OMP_CLAUSE_MAP;
9889 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9890 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9891 {
9892 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9893 return 0;
9894 }
9bc3b95d
JJ
9895 if (VAR_P (decl)
9896 && DECL_IN_CONSTANT_POOL (decl)
9897 && !lookup_attribute ("omp declare target",
9898 DECL_ATTRIBUTES (decl)))
9899 {
9900 tree id = get_identifier ("omp declare target");
9901 DECL_ATTRIBUTES (decl)
9902 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
9903 varpool_node *node = varpool_node::get (decl);
9904 if (node)
9905 {
9906 node->offloadable = 1;
9907 if (ENABLE_OFFLOADING)
9908 g->have_offload = true;
9909 }
9910 }
9dc5773f 9911 }
953ff289
DN
9912 else if (flags & GOVD_SHARED)
9913 {
9914 if (is_global_var (decl))
64964499
JJ
9915 {
9916 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9917 while (ctx != NULL)
9918 {
9919 splay_tree_node on
9920 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9921 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
74bf76ed 9922 | GOVD_PRIVATE | GOVD_REDUCTION
7de20fbd 9923 | GOVD_LINEAR | GOVD_MAP)) != 0)
64964499
JJ
9924 break;
9925 ctx = ctx->outer_context;
9926 }
9927 if (ctx == NULL)
9928 return 0;
9929 }
953ff289
DN
9930 code = OMP_CLAUSE_SHARED;
9931 }
9932 else if (flags & GOVD_PRIVATE)
9933 code = OMP_CLAUSE_PRIVATE;
9934 else if (flags & GOVD_FIRSTPRIVATE)
9dc5773f
JJ
9935 {
9936 code = OMP_CLAUSE_FIRSTPRIVATE;
9937 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9938 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9939 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9940 {
9941 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9942 "%<target%> construct", decl);
9943 return 0;
9944 }
9945 }
74bf76ed
JJ
9946 else if (flags & GOVD_LASTPRIVATE)
9947 code = OMP_CLAUSE_LASTPRIVATE;
28567c40 9948 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
acf0174b 9949 return 0;
8221c30b
JJ
9950 else if (flags & GOVD_CONDTEMP)
9951 {
9952 code = OMP_CLAUSE__CONDTEMP_;
9953 gimple_add_tmp_var (decl);
9954 }
953ff289
DN
9955 else
9956 gcc_unreachable ();
9957
1a80d6b8
JJ
9958 if (((flags & GOVD_LASTPRIVATE)
9959 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9960 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9961 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9962
3693a620 9963 tree chain = *list_p;
c2255bc4 9964 clause = build_omp_clause (input_location, code);
aaf46ef9 9965 OMP_CLAUSE_DECL (clause) = decl;
3693a620 9966 OMP_CLAUSE_CHAIN (clause) = chain;
953ff289
DN
9967 if (private_debug)
9968 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
9969 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9970 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
1a80d6b8
JJ
9971 else if (code == OMP_CLAUSE_SHARED
9972 && (flags & GOVD_WRITTEN) == 0
9973 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9974 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
ec35ea45
JJ
9975 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9976 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
d9a6bd32
JJ
9977 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9978 {
9979 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9980 OMP_CLAUSE_DECL (nc) = decl;
9981 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9983 OMP_CLAUSE_DECL (clause)
9984 = build_simple_mem_ref_loc (input_location, decl);
9985 OMP_CLAUSE_DECL (clause)
9986 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9987 build_int_cst (build_pointer_type (char_type_node), 0));
9988 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9989 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9990 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9991 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9992 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
3693a620 9993 OMP_CLAUSE_CHAIN (nc) = chain;
d9a6bd32
JJ
9994 OMP_CLAUSE_CHAIN (clause) = nc;
9995 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9996 gimplify_omp_ctxp = ctx->outer_context;
9997 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9998 pre_p, NULL, is_gimple_val, fb_rvalue);
9999 gimplify_omp_ctxp = ctx;
10000 }
acf0174b
JJ
10001 else if (code == OMP_CLAUSE_MAP)
10002 {
7fd549d2
TS
10003 int kind;
10004 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10005 switch (flags & (GOVD_MAP_TO_ONLY
10006 | GOVD_MAP_FORCE
28567c40
JJ
10007 | GOVD_MAP_FORCE_PRESENT
10008 | GOVD_MAP_ALLOC_ONLY
10009 | GOVD_MAP_FROM_ONLY))
7fd549d2
TS
10010 {
10011 case 0:
10012 kind = GOMP_MAP_TOFROM;
10013 break;
10014 case GOVD_MAP_FORCE:
10015 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10016 break;
10017 case GOVD_MAP_TO_ONLY:
10018 kind = GOMP_MAP_TO;
10019 break;
28567c40
JJ
10020 case GOVD_MAP_FROM_ONLY:
10021 kind = GOMP_MAP_FROM;
10022 break;
10023 case GOVD_MAP_ALLOC_ONLY:
10024 kind = GOMP_MAP_ALLOC;
10025 break;
7fd549d2
TS
10026 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10027 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10028 break;
10029 case GOVD_MAP_FORCE_PRESENT:
10030 kind = GOMP_MAP_FORCE_PRESENT;
10031 break;
10032 default:
10033 gcc_unreachable ();
10034 }
db0f1c7a 10035 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
acf0174b
JJ
10036 if (DECL_SIZE (decl)
10037 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10038 {
10039 tree decl2 = DECL_VALUE_EXPR (decl);
10040 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10041 decl2 = TREE_OPERAND (decl2, 0);
10042 gcc_assert (DECL_P (decl2));
10043 tree mem = build_simple_mem_ref (decl2);
10044 OMP_CLAUSE_DECL (clause) = mem;
10045 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10046 if (gimplify_omp_ctxp->outer_context)
10047 {
10048 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10049 omp_notice_variable (ctx, decl2, true);
10050 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10051 }
10052 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10053 OMP_CLAUSE_MAP);
10054 OMP_CLAUSE_DECL (nc) = decl;
10055 OMP_CLAUSE_SIZE (nc) = size_zero_node;
d9a6bd32
JJ
10056 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10057 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10058 else
10059 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
acf0174b
JJ
10060 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10061 OMP_CLAUSE_CHAIN (clause) = nc;
10062 }
e01d41e5
JJ
10063 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10064 && lang_hooks.decls.omp_privatize_by_reference (decl))
10065 {
10066 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10067 OMP_CLAUSE_SIZE (clause)
10068 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10069 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10070 gimplify_omp_ctxp = ctx->outer_context;
10071 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10072 pre_p, NULL, is_gimple_val, fb_rvalue);
10073 gimplify_omp_ctxp = ctx;
10074 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10075 OMP_CLAUSE_MAP);
10076 OMP_CLAUSE_DECL (nc) = decl;
10077 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10078 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10079 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10080 OMP_CLAUSE_CHAIN (clause) = nc;
10081 }
b46ebd6c
JJ
10082 else
10083 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
acf0174b 10084 }
95782571
JJ
10085 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10086 {
10087 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10088 OMP_CLAUSE_DECL (nc) = decl;
10089 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
3693a620 10090 OMP_CLAUSE_CHAIN (nc) = chain;
95782571 10091 OMP_CLAUSE_CHAIN (clause) = nc;
f014c653
JJ
10092 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10093 gimplify_omp_ctxp = ctx->outer_context;
10094 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10095 gimplify_omp_ctxp = ctx;
95782571 10096 }
953ff289 10097 *list_p = clause;
f014c653
JJ
10098 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10099 gimplify_omp_ctxp = ctx->outer_context;
10100 lang_hooks.decls.omp_finish_clause (clause, pre_p);
3693a620
JJ
10101 if (gimplify_omp_ctxp)
10102 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10103 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10104 && DECL_P (OMP_CLAUSE_SIZE (clause)))
10105 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10106 true);
f014c653 10107 gimplify_omp_ctxp = ctx;
953ff289
DN
10108 return 0;
10109}
10110
10111static void
1a80d6b8 10112gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
d9a6bd32 10113 enum tree_code code)
953ff289
DN
10114{
10115 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
bf38f7e9 10116 tree *orig_list_p = list_p;
953ff289 10117 tree c, decl;
bf38f7e9 10118 bool has_inscan_reductions = false;
953ff289 10119
1a80d6b8
JJ
10120 if (body)
10121 {
10122 struct gimplify_omp_ctx *octx;
10123 for (octx = ctx; octx; octx = octx->outer_context)
10124 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10125 break;
10126 if (octx)
10127 {
10128 struct walk_stmt_info wi;
10129 memset (&wi, 0, sizeof (wi));
10130 walk_gimple_seq (body, omp_find_stores_stmt,
10131 omp_find_stores_op, &wi);
10132 }
10133 }
7e47198b 10134
ec03bc90
JJ
10135 if (ctx->add_safelen1)
10136 {
10137 /* If there are VLAs in the body of simd loop, prevent
10138 vectorization. */
10139 gcc_assert (ctx->region_type == ORT_SIMD);
10140 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10141 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10142 OMP_CLAUSE_CHAIN (c) = *list_p;
10143 *list_p = c;
10144 list_p = &OMP_CLAUSE_CHAIN (c);
10145 }
10146
7e47198b
JJ
10147 if (ctx->region_type == ORT_WORKSHARE
10148 && ctx->outer_context
10149 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10150 {
10151 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10152 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10153 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10154 {
10155 decl = OMP_CLAUSE_DECL (c);
10156 splay_tree_node n
10157 = splay_tree_lookup (ctx->outer_context->variables,
10158 (splay_tree_key) decl);
10159 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10160 (splay_tree_key) decl));
10161 omp_add_variable (ctx, decl, n->value);
10162 tree c2 = copy_node (c);
10163 OMP_CLAUSE_CHAIN (c2) = *list_p;
10164 *list_p = c2;
10165 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10166 continue;
10167 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10168 OMP_CLAUSE_FIRSTPRIVATE);
10169 OMP_CLAUSE_DECL (c2) = decl;
10170 OMP_CLAUSE_CHAIN (c2) = *list_p;
10171 *list_p = c2;
10172 }
10173 }
953ff289
DN
10174 while ((c = *list_p) != NULL)
10175 {
10176 splay_tree_node n;
10177 bool remove = false;
10178
aaf46ef9 10179 switch (OMP_CLAUSE_CODE (c))
953ff289 10180 {
9dc5773f
JJ
10181 case OMP_CLAUSE_FIRSTPRIVATE:
10182 if ((ctx->region_type & ORT_TARGET)
10183 && (ctx->region_type & ORT_ACC) == 0
10184 && TYPE_ATOMIC (strip_array_types
10185 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10186 {
10187 error_at (OMP_CLAUSE_LOCATION (c),
10188 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10189 "%<target%> construct", OMP_CLAUSE_DECL (c));
10190 remove = true;
10191 break;
10192 }
10193 /* FALLTHRU */
953ff289
DN
10194 case OMP_CLAUSE_PRIVATE:
10195 case OMP_CLAUSE_SHARED:
74bf76ed 10196 case OMP_CLAUSE_LINEAR:
953ff289
DN
10197 decl = OMP_CLAUSE_DECL (c);
10198 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10199 remove = !(n->value & GOVD_SEEN);
7e47198b
JJ
10200 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10201 && code == OMP_PARALLEL
10202 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10203 remove = true;
953ff289
DN
10204 if (! remove)
10205 {
aaf46ef9 10206 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
10207 if ((n->value & GOVD_DEBUG_PRIVATE)
10208 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10209 {
10210 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10211 || ((n->value & GOVD_DATA_SHARE_CLASS)
e9e2ef9f 10212 == GOVD_SHARED));
aaf46ef9 10213 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
10214 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10215 }
1a80d6b8
JJ
10216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10217 && (n->value & GOVD_WRITTEN) == 0
10218 && DECL_P (decl)
10219 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10220 OMP_CLAUSE_SHARED_READONLY (c) = 1;
10221 else if (DECL_P (decl)
10222 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
cca6b724 10223 && (n->value & GOVD_WRITTEN) != 0)
1a80d6b8
JJ
10224 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10225 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10226 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10227 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
953ff289
DN
10228 }
10229 break;
10230
10231 case OMP_CLAUSE_LASTPRIVATE:
10232 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10233 accurately reflect the presence of a FIRSTPRIVATE clause. */
10234 decl = OMP_CLAUSE_DECL (c);
10235 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10236 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10237 = (n->value & GOVD_FIRSTPRIVATE) != 0;
b4c3a85b
JJ
10238 if (code == OMP_DISTRIBUTE
10239 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
e01d41e5
JJ
10240 {
10241 remove = true;
10242 error_at (OMP_CLAUSE_LOCATION (c),
10243 "same variable used in %<firstprivate%> and "
10244 "%<lastprivate%> clauses on %<distribute%> "
10245 "construct");
10246 }
1a80d6b8
JJ
10247 if (!remove
10248 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10249 && DECL_P (decl)
10250 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10251 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7e47198b
JJ
10252 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10253 remove = true;
953ff289 10254 break;
b8698a0f 10255
acf0174b
JJ
10256 case OMP_CLAUSE_ALIGNED:
10257 decl = OMP_CLAUSE_DECL (c);
10258 if (!is_global_var (decl))
10259 {
10260 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10261 remove = n == NULL || !(n->value & GOVD_SEEN);
10262 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10263 {
10264 struct gimplify_omp_ctx *octx;
10265 if (n != NULL
10266 && (n->value & (GOVD_DATA_SHARE_CLASS
10267 & ~GOVD_FIRSTPRIVATE)))
10268 remove = true;
10269 else
10270 for (octx = ctx->outer_context; octx;
10271 octx = octx->outer_context)
10272 {
10273 n = splay_tree_lookup (octx->variables,
10274 (splay_tree_key) decl);
10275 if (n == NULL)
10276 continue;
10277 if (n->value & GOVD_LOCAL)
10278 break;
10279 /* We have to avoid assigning a shared variable
10280 to itself when trying to add
10281 __builtin_assume_aligned. */
10282 if (n->value & GOVD_SHARED)
10283 {
10284 remove = true;
10285 break;
10286 }
10287 }
10288 }
10289 }
10290 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10291 {
10292 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10293 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10294 remove = true;
10295 }
10296 break;
10297
28567c40
JJ
10298 case OMP_CLAUSE_NONTEMPORAL:
10299 decl = OMP_CLAUSE_DECL (c);
10300 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10301 remove = n == NULL || !(n->value & GOVD_SEEN);
10302 break;
10303
acf0174b 10304 case OMP_CLAUSE_MAP:
e01d41e5
JJ
10305 if (code == OMP_TARGET_EXIT_DATA
10306 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10307 {
10308 remove = true;
10309 break;
10310 }
acf0174b 10311 decl = OMP_CLAUSE_DECL (c);
62aee289 10312 /* Data clauses associated with reductions must be
c42cfb5c
CP
10313 compatible with present_or_copy. Warn and adjust the clause
10314 if that is not the case. */
62aee289
MR
10315 if (ctx->region_type == ORT_ACC_PARALLEL
10316 || ctx->region_type == ORT_ACC_SERIAL)
c42cfb5c
CP
10317 {
10318 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10319 n = NULL;
10320
10321 if (DECL_P (t))
10322 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10323
10324 if (n && (n->value & GOVD_REDUCTION))
10325 {
10326 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10327
10328 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10329 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10330 && kind != GOMP_MAP_FORCE_PRESENT
10331 && kind != GOMP_MAP_POINTER)
10332 {
10333 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10334 "incompatible data clause with reduction "
a9c697b8 10335 "on %qE; promoting to %<present_or_copy%>",
c42cfb5c
CP
10336 DECL_NAME (t));
10337 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10338 }
10339 }
10340 }
acf0174b 10341 if (!DECL_P (decl))
d9a6bd32
JJ
10342 {
10343 if ((ctx->region_type & ORT_TARGET) != 0
10344 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10345 {
10346 if (TREE_CODE (decl) == INDIRECT_REF
10347 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10348 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10349 == REFERENCE_TYPE))
10350 decl = TREE_OPERAND (decl, 0);
10351 if (TREE_CODE (decl) == COMPONENT_REF)
10352 {
10353 while (TREE_CODE (decl) == COMPONENT_REF)
10354 decl = TREE_OPERAND (decl, 0);
10355 if (DECL_P (decl))
10356 {
10357 n = splay_tree_lookup (ctx->variables,
10358 (splay_tree_key) decl);
10359 if (!(n->value & GOVD_SEEN))
10360 remove = true;
10361 }
10362 }
10363 }
10364 break;
10365 }
acf0174b 10366 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
d9a6bd32
JJ
10367 if ((ctx->region_type & ORT_TARGET) != 0
10368 && !(n->value & GOVD_SEEN)
4a38b02b 10369 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
56f71478
JJ
10370 && (!is_global_var (decl)
10371 || !lookup_attribute ("omp declare target link",
10372 DECL_ATTRIBUTES (decl))))
d9a6bd32
JJ
10373 {
10374 remove = true;
10375 /* For struct element mapping, if struct is never referenced
10376 in target block and none of the mapping has always modifier,
10377 remove all the struct element mappings, which immediately
10378 follow the GOMP_MAP_STRUCT map clause. */
10379 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10380 {
10381 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10382 while (cnt--)
10383 OMP_CLAUSE_CHAIN (c)
10384 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10385 }
10386 }
10387 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10388 && code == OMP_TARGET_EXIT_DATA)
acf0174b
JJ
10389 remove = true;
10390 else if (DECL_SIZE (decl)
10391 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
d9a6bd32 10392 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
e01d41e5
JJ
10393 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10394 && (OMP_CLAUSE_MAP_KIND (c)
10395 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
acf0174b 10396 {
41dbbb37
TS
10397 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10398 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10399 INTEGER_CST. */
10400 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10401
acf0174b
JJ
10402 tree decl2 = DECL_VALUE_EXPR (decl);
10403 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10404 decl2 = TREE_OPERAND (decl2, 0);
10405 gcc_assert (DECL_P (decl2));
10406 tree mem = build_simple_mem_ref (decl2);
10407 OMP_CLAUSE_DECL (c) = mem;
10408 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10409 if (ctx->outer_context)
10410 {
10411 omp_notice_variable (ctx->outer_context, decl2, true);
10412 omp_notice_variable (ctx->outer_context,
10413 OMP_CLAUSE_SIZE (c), true);
10414 }
d9a6bd32
JJ
10415 if (((ctx->region_type & ORT_TARGET) != 0
10416 || !ctx->target_firstprivatize_array_bases)
10417 && ((n->value & GOVD_SEEN) == 0
10418 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10419 {
10420 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10421 OMP_CLAUSE_MAP);
10422 OMP_CLAUSE_DECL (nc) = decl;
10423 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10424 if (ctx->target_firstprivatize_array_bases)
10425 OMP_CLAUSE_SET_MAP_KIND (nc,
10426 GOMP_MAP_FIRSTPRIVATE_POINTER);
10427 else
10428 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10429 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10430 OMP_CLAUSE_CHAIN (c) = nc;
10431 c = nc;
10432 }
10433 }
10434 else
10435 {
10436 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10437 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
e01d41e5
JJ
10438 gcc_assert ((n->value & GOVD_SEEN) == 0
10439 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10440 == 0));
acf0174b
JJ
10441 }
10442 break;
10443
10444 case OMP_CLAUSE_TO:
10445 case OMP_CLAUSE_FROM:
41dbbb37 10446 case OMP_CLAUSE__CACHE_:
acf0174b
JJ
10447 decl = OMP_CLAUSE_DECL (c);
10448 if (!DECL_P (decl))
10449 break;
10450 if (DECL_SIZE (decl)
10451 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10452 {
10453 tree decl2 = DECL_VALUE_EXPR (decl);
10454 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10455 decl2 = TREE_OPERAND (decl2, 0);
10456 gcc_assert (DECL_P (decl2));
10457 tree mem = build_simple_mem_ref (decl2);
10458 OMP_CLAUSE_DECL (c) = mem;
10459 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10460 if (ctx->outer_context)
10461 {
10462 omp_notice_variable (ctx->outer_context, decl2, true);
10463 omp_notice_variable (ctx->outer_context,
10464 OMP_CLAUSE_SIZE (c), true);
10465 }
10466 }
b46ebd6c
JJ
10467 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10468 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
acf0174b
JJ
10469 break;
10470
953ff289 10471 case OMP_CLAUSE_REDUCTION:
bf38f7e9
JJ
10472 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10473 {
10474 decl = OMP_CLAUSE_DECL (c);
10475 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10476 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10477 {
10478 remove = true;
10479 error_at (OMP_CLAUSE_LOCATION (c),
10480 "%qD specified in %<inscan%> %<reduction%> clause "
10481 "but not in %<scan%> directive clause", decl);
10482 break;
10483 }
10484 has_inscan_reductions = true;
10485 }
10486 /* FALLTHRU */
28567c40
JJ
10487 case OMP_CLAUSE_IN_REDUCTION:
10488 case OMP_CLAUSE_TASK_REDUCTION:
1a80d6b8 10489 decl = OMP_CLAUSE_DECL (c);
c42cfb5c 10490 /* OpenACC reductions need a present_or_copy data clause.
bd1cab35 10491 Add one if necessary. Emit error when the reduction is private. */
62aee289
MR
10492 if (ctx->region_type == ORT_ACC_PARALLEL
10493 || ctx->region_type == ORT_ACC_SERIAL)
c42cfb5c
CP
10494 {
10495 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10496 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
bd1cab35
CLT
10497 {
10498 remove = true;
10499 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10500 "reduction on %qE", DECL_NAME (decl));
10501 }
c42cfb5c
CP
10502 else if ((n->value & GOVD_MAP) == 0)
10503 {
10504 tree next = OMP_CLAUSE_CHAIN (c);
10505 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10506 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10507 OMP_CLAUSE_DECL (nc) = decl;
10508 OMP_CLAUSE_CHAIN (c) = nc;
10509 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10510 while (1)
10511 {
10512 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10513 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10514 break;
10515 nc = OMP_CLAUSE_CHAIN (nc);
10516 }
10517 OMP_CLAUSE_CHAIN (nc) = next;
10518 n->value |= GOVD_MAP;
10519 }
10520 }
1a80d6b8
JJ
10521 if (DECL_P (decl)
10522 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10523 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10524 break;
953ff289
DN
10525 case OMP_CLAUSE_COPYIN:
10526 case OMP_CLAUSE_COPYPRIVATE:
10527 case OMP_CLAUSE_IF:
10528 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
10529 case OMP_CLAUSE_NUM_TEAMS:
10530 case OMP_CLAUSE_THREAD_LIMIT:
10531 case OMP_CLAUSE_DIST_SCHEDULE:
10532 case OMP_CLAUSE_DEVICE:
953ff289
DN
10533 case OMP_CLAUSE_SCHEDULE:
10534 case OMP_CLAUSE_NOWAIT:
10535 case OMP_CLAUSE_ORDERED:
10536 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
10537 case OMP_CLAUSE_UNTIED:
10538 case OMP_CLAUSE_COLLAPSE:
20906c66
JJ
10539 case OMP_CLAUSE_FINAL:
10540 case OMP_CLAUSE_MERGEABLE:
acf0174b 10541 case OMP_CLAUSE_PROC_BIND:
74bf76ed 10542 case OMP_CLAUSE_SAFELEN:
d9a6bd32 10543 case OMP_CLAUSE_SIMDLEN:
acf0174b 10544 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
10545 case OMP_CLAUSE_PRIORITY:
10546 case OMP_CLAUSE_GRAINSIZE:
10547 case OMP_CLAUSE_NUM_TASKS:
10548 case OMP_CLAUSE_NOGROUP:
10549 case OMP_CLAUSE_THREADS:
10550 case OMP_CLAUSE_SIMD:
10551 case OMP_CLAUSE_HINT:
10552 case OMP_CLAUSE_DEFAULTMAP:
1fdd6f04 10553 case OMP_CLAUSE_ORDER:
554a530f 10554 case OMP_CLAUSE_BIND:
d9a6bd32 10555 case OMP_CLAUSE_USE_DEVICE_PTR:
398e3feb 10556 case OMP_CLAUSE_USE_DEVICE_ADDR:
d9a6bd32 10557 case OMP_CLAUSE_IS_DEVICE_PTR:
41dbbb37
TS
10558 case OMP_CLAUSE_ASYNC:
10559 case OMP_CLAUSE_WAIT:
41dbbb37
TS
10560 case OMP_CLAUSE_INDEPENDENT:
10561 case OMP_CLAUSE_NUM_GANGS:
10562 case OMP_CLAUSE_NUM_WORKERS:
10563 case OMP_CLAUSE_VECTOR_LENGTH:
10564 case OMP_CLAUSE_GANG:
10565 case OMP_CLAUSE_WORKER:
10566 case OMP_CLAUSE_VECTOR:
10567 case OMP_CLAUSE_AUTO:
10568 case OMP_CLAUSE_SEQ:
7a5e4956 10569 case OMP_CLAUSE_TILE:
829c6349
CLT
10570 case OMP_CLAUSE_IF_PRESENT:
10571 case OMP_CLAUSE_FINALIZE:
bf38f7e9
JJ
10572 case OMP_CLAUSE_INCLUSIVE:
10573 case OMP_CLAUSE_EXCLUSIVE:
953ff289
DN
10574 break;
10575
10576 default:
10577 gcc_unreachable ();
10578 }
10579
10580 if (remove)
10581 *list_p = OMP_CLAUSE_CHAIN (c);
10582 else
10583 list_p = &OMP_CLAUSE_CHAIN (c);
10584 }
10585
10586 /* Add in any implicit data sharing. */
f014c653
JJ
10587 struct gimplify_adjust_omp_clauses_data data;
10588 data.list_p = list_p;
10589 data.pre_p = pre_p;
10590 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
b8698a0f 10591
bf38f7e9
JJ
10592 if (has_inscan_reductions)
10593 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10594 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10595 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10596 {
10597 error_at (OMP_CLAUSE_LOCATION (c),
10598 "%<inscan%> %<reduction%> clause used together with "
10599 "%<linear%> clause for a variable other than loop "
10600 "iterator");
10601 break;
10602 }
10603
953ff289
DN
10604 gimplify_omp_ctxp = ctx->outer_context;
10605 delete_omp_context (ctx);
10606}
10607
135df52c
JJ
10608/* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10609 -1 if unknown yet (simd is involved, won't be known until vectorization)
d0c464d2
JJ
10610 and 1 if they do. If SCORES is non-NULL, it should point to an array
10611 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10612 of the CONSTRUCTS (position -1 if it will never match) followed by
10613 number of constructs in the OpenMP context construct trait. If the
10614 score depends on whether it will be in a declare simd clone or not,
10615 the function returns 2 and there will be two sets of the scores, the first
10616 one for the case that it is not in a declare simd clone, the other
10617 that it is in a declare simd clone. */
10618
10619int
10620omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
10621 int *scores)
135df52c
JJ
10622{
10623 int matched = 0, cnt = 0;
10624 bool simd_seen = false;
d0c464d2
JJ
10625 bool target_seen = false;
10626 int declare_simd_cnt = -1;
10627 auto_vec<enum tree_code, 16> codes;
135df52c
JJ
10628 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
10629 {
10630 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
10631 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
10632 == ORT_TARGET && ctx->code == OMP_TARGET)
10633 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
10634 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
10635 || (ctx->region_type == ORT_SIMD
10636 && ctx->code == OMP_SIMD
10637 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
10638 {
10639 ++cnt;
d0c464d2
JJ
10640 if (scores)
10641 codes.safe_push (ctx->code);
10642 else if (matched < nconstructs && ctx->code == constructs[matched])
135df52c
JJ
10643 {
10644 if (ctx->code == OMP_SIMD)
10645 {
10646 if (matched)
10647 return 0;
10648 simd_seen = true;
10649 }
10650 ++matched;
10651 }
10652 if (ctx->code == OMP_TARGET)
d0c464d2
JJ
10653 {
10654 if (scores == NULL)
10655 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
10656 target_seen = true;
10657 break;
10658 }
135df52c
JJ
10659 }
10660 else if (ctx->region_type == ORT_WORKSHARE
10661 && ctx->code == OMP_LOOP
10662 && ctx->outer_context
10663 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
10664 && ctx->outer_context->outer_context
10665 && ctx->outer_context->outer_context->code == OMP_LOOP
10666 && ctx->outer_context->outer_context->distribute)
10667 ctx = ctx->outer_context->outer_context;
10668 ctx = ctx->outer_context;
10669 }
d0c464d2 10670 if (!target_seen
135df52c
JJ
10671 && lookup_attribute ("omp declare simd",
10672 DECL_ATTRIBUTES (current_function_decl)))
10673 {
10674 /* Declare simd is a maybe case, it is supposed to be added only to the
10675 omp-simd-clone.c added clones and not to the base function. */
d0c464d2
JJ
10676 declare_simd_cnt = cnt++;
10677 if (scores)
10678 codes.safe_push (OMP_SIMD);
10679 else if (cnt == 0
10680 && constructs[0] == OMP_SIMD)
10681 {
10682 gcc_assert (matched == 0);
10683 simd_seen = true;
10684 if (++matched == nconstructs)
10685 return -1;
10686 }
135df52c
JJ
10687 }
10688 if (tree attr = lookup_attribute ("omp declare variant variant",
10689 DECL_ATTRIBUTES (current_function_decl)))
10690 {
10691 enum tree_code variant_constructs[5];
d0c464d2
JJ
10692 int variant_nconstructs = 0;
10693 if (!target_seen)
10694 variant_nconstructs
10695 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
10696 variant_constructs);
135df52c
JJ
10697 for (int i = 0; i < variant_nconstructs; i++)
10698 {
10699 ++cnt;
d0c464d2
JJ
10700 if (scores)
10701 codes.safe_push (variant_constructs[i]);
10702 else if (matched < nconstructs
10703 && variant_constructs[i] == constructs[matched])
135df52c
JJ
10704 {
10705 if (variant_constructs[i] == OMP_SIMD)
10706 {
10707 if (matched)
10708 return 0;
10709 simd_seen = true;
10710 }
10711 ++matched;
10712 }
10713 }
10714 }
d0c464d2
JJ
10715 if (!target_seen
10716 && lookup_attribute ("omp declare target block",
10717 DECL_ATTRIBUTES (current_function_decl)))
135df52c 10718 {
d0c464d2
JJ
10719 if (scores)
10720 codes.safe_push (OMP_TARGET);
10721 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
135df52c
JJ
10722 ++matched;
10723 }
d0c464d2
JJ
10724 if (scores)
10725 {
10726 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
10727 {
10728 int j = codes.length () - 1;
10729 for (int i = nconstructs - 1; i >= 0; i--)
10730 {
10731 while (j >= 0
10732 && (pass != 0 || declare_simd_cnt != j)
10733 && constructs[i] != codes[j])
10734 --j;
10735 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
10736 *scores++ = j - 1;
10737 else
10738 *scores++ = j;
10739 }
10740 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
10741 ? codes.length () - 1 : codes.length ());
10742 }
10743 return declare_simd_cnt == -1 ? 1 : 2;
10744 }
135df52c 10745 if (matched == nconstructs)
d0c464d2 10746 return simd_seen ? -1 : 1;
135df52c
JJ
10747 return 0;
10748}
10749
41dbbb37
TS
10750/* Gimplify OACC_CACHE. */
10751
10752static void
10753gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10754{
10755 tree expr = *expr_p;
10756
182190f2 10757 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
d9a6bd32 10758 OACC_CACHE);
1a80d6b8
JJ
10759 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10760 OACC_CACHE);
41dbbb37
TS
10761
10762 /* TODO: Do something sensible with this information. */
10763
10764 *expr_p = NULL_TREE;
10765}
10766
6e232ba4
JN
10767/* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10768 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10769 kind. The entry kind will replace the one in CLAUSE, while the exit
10770 kind will be used in a new omp_clause and returned to the caller. */
10771
10772static tree
10773gimplify_oacc_declare_1 (tree clause)
10774{
10775 HOST_WIDE_INT kind, new_op;
10776 bool ret = false;
10777 tree c = NULL;
10778
10779 kind = OMP_CLAUSE_MAP_KIND (clause);
10780
10781 switch (kind)
10782 {
10783 case GOMP_MAP_ALLOC:
829c6349 10784 new_op = GOMP_MAP_RELEASE;
6e232ba4
JN
10785 ret = true;
10786 break;
10787
10788 case GOMP_MAP_FROM:
10789 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10790 new_op = GOMP_MAP_FROM;
10791 ret = true;
10792 break;
10793
10794 case GOMP_MAP_TOFROM:
10795 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10796 new_op = GOMP_MAP_FROM;
10797 ret = true;
10798 break;
10799
10800 case GOMP_MAP_DEVICE_RESIDENT:
10801 case GOMP_MAP_FORCE_DEVICEPTR:
10802 case GOMP_MAP_FORCE_PRESENT:
10803 case GOMP_MAP_LINK:
10804 case GOMP_MAP_POINTER:
10805 case GOMP_MAP_TO:
10806 break;
10807
10808 default:
10809 gcc_unreachable ();
10810 break;
10811 }
10812
10813 if (ret)
10814 {
10815 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10816 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10817 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10818 }
10819
10820 return c;
10821}
10822
10823/* Gimplify OACC_DECLARE. */
10824
10825static void
10826gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10827{
10828 tree expr = *expr_p;
10829 gomp_target *stmt;
7ba8651e 10830 tree clauses, t, decl;
6e232ba4
JN
10831
10832 clauses = OACC_DECLARE_CLAUSES (expr);
10833
10834 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
7ba8651e 10835 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
6e232ba4
JN
10836
10837 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10838 {
7ba8651e 10839 decl = OMP_CLAUSE_DECL (t);
6e232ba4
JN
10840
10841 if (TREE_CODE (decl) == MEM_REF)
7ba8651e
CP
10842 decl = TREE_OPERAND (decl, 0);
10843
10844 if (VAR_P (decl) && !is_oacc_declared (decl))
10845 {
10846 tree attr = get_identifier ("oacc declare target");
10847 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10848 DECL_ATTRIBUTES (decl));
10849 }
6e232ba4 10850
8813a647 10851 if (VAR_P (decl)
6e232ba4
JN
10852 && !is_global_var (decl)
10853 && DECL_CONTEXT (decl) == current_function_decl)
10854 {
10855 tree c = gimplify_oacc_declare_1 (t);
10856 if (c)
10857 {
10858 if (oacc_declare_returns == NULL)
10859 oacc_declare_returns = new hash_map<tree, tree>;
10860
10861 oacc_declare_returns->put (decl, c);
10862 }
10863 }
10864
7ba8651e
CP
10865 if (gimplify_omp_ctxp)
10866 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
6e232ba4
JN
10867 }
10868
10869 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10870 clauses);
10871
10872 gimplify_seq_add_stmt (pre_p, stmt);
10873
10874 *expr_p = NULL_TREE;
10875}
10876
953ff289
DN
10877/* Gimplify the contents of an OMP_PARALLEL statement. This involves
10878 gimplification of the body, as well as scanning the body for used
10879 variables. We need to do this scan now, because variable-sized
10880 decls will be decomposed during gimplification. */
10881
726a989a
RB
10882static void
10883gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
10884{
10885 tree expr = *expr_p;
355fe088 10886 gimple *g;
726a989a 10887 gimple_seq body = NULL;
953ff289 10888
a68ab351
JJ
10889 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10890 OMP_PARALLEL_COMBINED (expr)
10891 ? ORT_COMBINED_PARALLEL
d9a6bd32 10892 : ORT_PARALLEL, OMP_PARALLEL);
953ff289 10893
45852dcc 10894 push_gimplify_context ();
953ff289 10895
726a989a
RB
10896 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10897 if (gimple_code (g) == GIMPLE_BIND)
10898 pop_gimplify_context (g);
50674e96 10899 else
726a989a 10900 pop_gimplify_context (NULL);
953ff289 10901
1a80d6b8 10902 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
d9a6bd32 10903 OMP_PARALLEL);
953ff289 10904
726a989a
RB
10905 g = gimple_build_omp_parallel (body,
10906 OMP_PARALLEL_CLAUSES (expr),
10907 NULL_TREE, NULL_TREE);
10908 if (OMP_PARALLEL_COMBINED (expr))
10909 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10910 gimplify_seq_add_stmt (pre_p, g);
10911 *expr_p = NULL_TREE;
953ff289
DN
10912}
10913
a68ab351
JJ
10914/* Gimplify the contents of an OMP_TASK statement. This involves
10915 gimplification of the body, as well as scanning the body for used
10916 variables. We need to do this scan now, because variable-sized
10917 decls will be decomposed during gimplification. */
953ff289 10918
726a989a
RB
10919static void
10920gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 10921{
a68ab351 10922 tree expr = *expr_p;
355fe088 10923 gimple *g;
726a989a 10924 gimple_seq body = NULL;
953ff289 10925
28567c40
JJ
10926 if (OMP_TASK_BODY (expr) == NULL_TREE)
10927 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10928 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10929 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10930 {
10931 error_at (OMP_CLAUSE_LOCATION (c),
10932 "%<mutexinoutset%> kind in %<depend%> clause on a "
10933 "%<taskwait%> construct");
10934 break;
10935 }
10936
f22f4340 10937 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
629b3d75 10938 omp_find_clause (OMP_TASK_CLAUSES (expr),
f22f4340 10939 OMP_CLAUSE_UNTIED)
d9a6bd32 10940 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
953ff289 10941
28567c40
JJ
10942 if (OMP_TASK_BODY (expr))
10943 {
10944 push_gimplify_context ();
953ff289 10945
28567c40
JJ
10946 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10947 if (gimple_code (g) == GIMPLE_BIND)
10948 pop_gimplify_context (g);
10949 else
10950 pop_gimplify_context (NULL);
10951 }
953ff289 10952
1a80d6b8
JJ
10953 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10954 OMP_TASK);
917948d3 10955
726a989a
RB
10956 g = gimple_build_omp_task (body,
10957 OMP_TASK_CLAUSES (expr),
10958 NULL_TREE, NULL_TREE,
10959 NULL_TREE, NULL_TREE, NULL_TREE);
28567c40
JJ
10960 if (OMP_TASK_BODY (expr) == NULL_TREE)
10961 gimple_omp_task_set_taskwait_p (g, true);
726a989a
RB
10962 gimplify_seq_add_stmt (pre_p, g);
10963 *expr_p = NULL_TREE;
a68ab351
JJ
10964}
10965
acf0174b 10966/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
0b27c3ed
JJ
10967 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10968 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10969 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10970 OMP_FOR in between if any and pdata[3] is address of the inner
10971 OMP_FOR/OMP_SIMD. */
acf0174b
JJ
10972
10973static tree
0b27c3ed 10974find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
acf0174b 10975{
0b27c3ed 10976 tree **pdata = (tree **) data;
acf0174b
JJ
10977 *walk_subtrees = 0;
10978 switch (TREE_CODE (*tp))
10979 {
10980 case OMP_FOR:
0b27c3ed
JJ
10981 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10982 {
10983 pdata[3] = tp;
10984 return *tp;
10985 }
10986 pdata[2] = tp;
acf0174b 10987 *walk_subtrees = 1;
0b27c3ed 10988 break;
acf0174b
JJ
10989 case OMP_SIMD:
10990 if (OMP_FOR_INIT (*tp) != NULL_TREE)
0b27c3ed
JJ
10991 {
10992 pdata[3] = tp;
10993 return *tp;
10994 }
acf0174b
JJ
10995 break;
10996 case BIND_EXPR:
0b27c3ed
JJ
10997 if (BIND_EXPR_VARS (*tp)
10998 || (BIND_EXPR_BLOCK (*tp)
10999 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
11000 pdata[0] = tp;
11001 *walk_subtrees = 1;
11002 break;
acf0174b 11003 case STATEMENT_LIST:
0b27c3ed
JJ
11004 if (!tsi_one_before_end_p (tsi_start (*tp)))
11005 pdata[0] = tp;
11006 *walk_subtrees = 1;
11007 break;
11008 case TRY_FINALLY_EXPR:
11009 pdata[0] = tp;
11010 *walk_subtrees = 1;
11011 break;
acf0174b 11012 case OMP_PARALLEL:
0b27c3ed 11013 pdata[1] = tp;
acf0174b
JJ
11014 *walk_subtrees = 1;
11015 break;
11016 default:
11017 break;
11018 }
11019 return NULL_TREE;
11020}
11021
a68ab351
JJ
11022/* Gimplify the gross structure of an OMP_FOR statement. */
11023
11024static enum gimplify_status
726a989a 11025gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 11026{
9ce1688b 11027 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
32e8bb8e
ILT
11028 enum gimplify_status ret = GS_ALL_DONE;
11029 enum gimplify_status tret;
538dd0b7 11030 gomp_for *gfor;
726a989a 11031 gimple_seq for_body, for_pre_body;
a68ab351 11032 int i;
74bf76ed 11033 bitmap has_decl_expr = NULL;
d9a6bd32 11034 enum omp_region_type ort = ORT_WORKSHARE;
a68ab351 11035
acf0174b 11036 orig_for_stmt = for_stmt = *expr_p;
a68ab351 11037
d81ab49d
JJ
11038 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
11039 != NULL_TREE);
0b27c3ed
JJ
11040 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11041 {
11042 tree *data[4] = { NULL, NULL, NULL, NULL };
11043 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
11044 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
11045 find_combined_omp_for, data, NULL);
11046 if (inner_for_stmt == NULL_TREE)
11047 {
11048 gcc_assert (seen_error ());
11049 *expr_p = NULL_TREE;
11050 return GS_ERROR;
11051 }
11052 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
11053 {
11054 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
11055 &OMP_FOR_PRE_BODY (for_stmt));
11056 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11057 }
11058 if (OMP_FOR_PRE_BODY (inner_for_stmt))
11059 {
11060 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11061 &OMP_FOR_PRE_BODY (for_stmt));
11062 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11063 }
11064
11065 if (data[0])
11066 {
11067 /* We have some statements or variable declarations in between
11068 the composite construct directives. Move them around the
11069 inner_for_stmt. */
11070 data[0] = expr_p;
11071 for (i = 0; i < 3; i++)
11072 if (data[i])
11073 {
11074 tree t = *data[i];
11075 if (i < 2 && data[i + 1] == &OMP_BODY (t))
11076 data[i + 1] = data[i];
11077 *data[i] = OMP_BODY (t);
11078 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11079 NULL_TREE, make_node (BLOCK));
11080 OMP_BODY (t) = body;
11081 append_to_statement_list_force (inner_for_stmt,
11082 &BIND_EXPR_BODY (body));
11083 *data[3] = t;
11084 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11085 gcc_assert (*data[3] == inner_for_stmt);
11086 }
11087 return GS_OK;
11088 }
11089
11090 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
d81ab49d
JJ
11091 if (!loop_p
11092 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
0b27c3ed 11093 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
28567c40
JJ
11094 i)) == TREE_LIST
11095 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11096 i)))
0b27c3ed
JJ
11097 {
11098 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11099 /* Class iterators aren't allowed on OMP_SIMD, so the only
d81ab49d
JJ
11100 case we need to solve is distribute parallel for. They are
11101 allowed on the loop construct, but that is already handled
11102 in gimplify_omp_loop. */
0b27c3ed
JJ
11103 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11104 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11105 && data[1]);
11106 tree orig_decl = TREE_PURPOSE (orig);
11107 tree last = TREE_VALUE (orig);
11108 tree *pc;
11109 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11110 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11111 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11112 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11113 && OMP_CLAUSE_DECL (*pc) == orig_decl)
11114 break;
c3ac76aa
JJ
11115 if (*pc == NULL_TREE)
11116 {
11117 tree *spc;
11118 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11119 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11120 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11121 && OMP_CLAUSE_DECL (*spc) == orig_decl)
11122 break;
11123 if (*spc)
11124 {
11125 tree c = *spc;
11126 *spc = OMP_CLAUSE_CHAIN (c);
11127 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11128 *pc = c;
11129 }
11130 }
0b27c3ed
JJ
11131 if (*pc == NULL_TREE)
11132 ;
11133 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11134 {
11135 /* private clause will appear only on inner_for_stmt.
11136 Change it into firstprivate, and add private clause
11137 on for_stmt. */
11138 tree c = copy_node (*pc);
11139 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11140 OMP_FOR_CLAUSES (for_stmt) = c;
11141 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11142 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11143 }
11144 else
11145 {
11146 /* lastprivate clause will appear on both inner_for_stmt
11147 and for_stmt. Add firstprivate clause to
11148 inner_for_stmt. */
11149 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11150 OMP_CLAUSE_FIRSTPRIVATE);
11151 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11152 OMP_CLAUSE_CHAIN (c) = *pc;
11153 *pc = c;
11154 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11155 }
11156 tree c = build_omp_clause (UNKNOWN_LOCATION,
11157 OMP_CLAUSE_FIRSTPRIVATE);
11158 OMP_CLAUSE_DECL (c) = last;
11159 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11160 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11161 c = build_omp_clause (UNKNOWN_LOCATION,
11162 *pc ? OMP_CLAUSE_SHARED
11163 : OMP_CLAUSE_FIRSTPRIVATE);
11164 OMP_CLAUSE_DECL (c) = orig_decl;
11165 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11166 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11167 }
28567c40
JJ
11168 /* Similarly, take care of C++ range for temporaries, those should
11169 be firstprivate on OMP_PARALLEL if any. */
11170 if (data[1])
11171 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11172 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11173 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11174 i)) == TREE_LIST
11175 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11176 i)))
11177 {
11178 tree orig
11179 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11180 tree v = TREE_CHAIN (orig);
11181 tree c = build_omp_clause (UNKNOWN_LOCATION,
11182 OMP_CLAUSE_FIRSTPRIVATE);
11183 /* First add firstprivate clause for the __for_end artificial
11184 decl. */
11185 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11186 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11187 == REFERENCE_TYPE)
11188 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11189 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11190 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11191 if (TREE_VEC_ELT (v, 0))
11192 {
11193 /* And now the same for __for_range artificial decl if it
11194 exists. */
11195 c = build_omp_clause (UNKNOWN_LOCATION,
11196 OMP_CLAUSE_FIRSTPRIVATE);
11197 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11198 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11199 == REFERENCE_TYPE)
11200 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11201 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11202 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11203 }
11204 }
0b27c3ed
JJ
11205 }
11206
41dbbb37
TS
11207 switch (TREE_CODE (for_stmt))
11208 {
11209 case OMP_FOR:
41dbbb37 11210 case OMP_DISTRIBUTE:
182190f2 11211 break;
41dbbb37 11212 case OACC_LOOP:
182190f2 11213 ort = ORT_ACC;
d9a6bd32
JJ
11214 break;
11215 case OMP_TASKLOOP:
629b3d75 11216 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
28567c40 11217 ort = ORT_UNTIED_TASKLOOP;
d9a6bd32 11218 else
28567c40 11219 ort = ORT_TASKLOOP;
41dbbb37
TS
11220 break;
11221 case OMP_SIMD:
d9a6bd32 11222 ort = ORT_SIMD;
41dbbb37
TS
11223 break;
11224 default:
11225 gcc_unreachable ();
11226 }
11227
41b37d5e
JJ
11228 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11229 clause for the IV. */
d9a6bd32 11230 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
41b37d5e
JJ
11231 {
11232 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11233 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11234 decl = TREE_OPERAND (t, 0);
11235 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11236 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11237 && OMP_CLAUSE_DECL (c) == decl)
11238 {
11239 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11240 break;
11241 }
11242 }
11243
d9a6bd32
JJ
11244 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11245 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
554a530f
JJ
11246 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11247 ? OMP_LOOP : TREE_CODE (for_stmt));
d9a6bd32 11248
9cf32741
JJ
11249 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11250 gimplify_omp_ctxp->distribute = true;
917948d3 11251
726a989a
RB
11252 /* Handle OMP_FOR_INIT. */
11253 for_pre_body = NULL;
0b27c3ed
JJ
11254 if ((ort == ORT_SIMD
11255 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11256 && OMP_FOR_PRE_BODY (for_stmt))
74bf76ed
JJ
11257 {
11258 has_decl_expr = BITMAP_ALLOC (NULL);
11259 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11260 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
acf0174b 11261 == VAR_DECL)
74bf76ed
JJ
11262 {
11263 t = OMP_FOR_PRE_BODY (for_stmt);
11264 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11265 }
11266 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11267 {
11268 tree_stmt_iterator si;
11269 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11270 tsi_next (&si))
11271 {
11272 t = tsi_stmt (si);
11273 if (TREE_CODE (t) == DECL_EXPR
11274 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11275 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11276 }
11277 }
11278 }
d9a6bd32
JJ
11279 if (OMP_FOR_PRE_BODY (for_stmt))
11280 {
11281 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11282 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11283 else
11284 {
11285 struct gimplify_omp_ctx ctx;
11286 memset (&ctx, 0, sizeof (ctx));
11287 ctx.region_type = ORT_NONE;
11288 gimplify_omp_ctxp = &ctx;
11289 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11290 gimplify_omp_ctxp = NULL;
11291 }
11292 }
726a989a 11293 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 11294
acf0174b 11295 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
d9a6bd32
JJ
11296 for_stmt = inner_for_stmt;
11297
11298 /* For taskloop, need to gimplify the start, end and step before the
11299 taskloop, outside of the taskloop omp context. */
11300 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
acf0174b 11301 {
d9a6bd32
JJ
11302 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11303 {
11304 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11305 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11306 {
3a106211 11307 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
d9a6bd32
JJ
11308 TREE_OPERAND (t, 1)
11309 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
3a106211
JJ
11310 gimple_seq_empty_p (for_pre_body)
11311 ? pre_p : &for_pre_body, NULL,
11312 false);
11313 /* Reference to pointer conversion is considered useless,
11314 but is significant for firstprivate clause. Force it
11315 here. */
11316 if (TREE_CODE (type) == POINTER_TYPE
11317 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11318 == REFERENCE_TYPE))
11319 {
11320 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11321 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11322 TREE_OPERAND (t, 1));
11323 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11324 ? pre_p : &for_pre_body);
11325 TREE_OPERAND (t, 1) = v;
11326 }
d9a6bd32
JJ
11327 tree c = build_omp_clause (input_location,
11328 OMP_CLAUSE_FIRSTPRIVATE);
11329 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11330 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11331 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11332 }
11333
11334 /* Handle OMP_FOR_COND. */
11335 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11336 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11337 {
3a106211 11338 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
d9a6bd32
JJ
11339 TREE_OPERAND (t, 1)
11340 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11341 gimple_seq_empty_p (for_pre_body)
381cdae4
RB
11342 ? pre_p : &for_pre_body, NULL,
11343 false);
3a106211
JJ
11344 /* Reference to pointer conversion is considered useless,
11345 but is significant for firstprivate clause. Force it
11346 here. */
11347 if (TREE_CODE (type) == POINTER_TYPE
11348 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11349 == REFERENCE_TYPE))
11350 {
11351 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11352 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11353 TREE_OPERAND (t, 1));
11354 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11355 ? pre_p : &for_pre_body);
11356 TREE_OPERAND (t, 1) = v;
11357 }
d9a6bd32
JJ
11358 tree c = build_omp_clause (input_location,
11359 OMP_CLAUSE_FIRSTPRIVATE);
11360 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11361 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11362 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11363 }
11364
11365 /* Handle OMP_FOR_INCR. */
11366 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11367 if (TREE_CODE (t) == MODIFY_EXPR)
11368 {
11369 decl = TREE_OPERAND (t, 0);
11370 t = TREE_OPERAND (t, 1);
11371 tree *tp = &TREE_OPERAND (t, 1);
11372 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
11373 tp = &TREE_OPERAND (t, 0);
11374
11375 if (!is_gimple_constant (*tp))
11376 {
11377 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
11378 ? pre_p : &for_pre_body;
381cdae4 11379 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
d9a6bd32
JJ
11380 tree c = build_omp_clause (input_location,
11381 OMP_CLAUSE_FIRSTPRIVATE);
11382 OMP_CLAUSE_DECL (c) = *tp;
11383 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11384 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11385 }
11386 }
11387 }
11388
11389 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
11390 OMP_TASKLOOP);
acf0174b
JJ
11391 }
11392
d9a6bd32
JJ
11393 if (orig_for_stmt != for_stmt)
11394 gimplify_omp_ctxp->combined_loop = true;
11395
355a7673 11396 for_body = NULL;
a68ab351
JJ
11397 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11398 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
11399 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11400 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
d9a6bd32 11401
629b3d75 11402 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
d9a6bd32
JJ
11403 bool is_doacross = false;
11404 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
11405 {
11406 is_doacross = true;
11407 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
11408 (OMP_FOR_INIT (for_stmt))
11409 * 2);
11410 }
02889d23 11411 int collapse = 1, tile = 0;
629b3d75 11412 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
d9a6bd32
JJ
11413 if (c)
11414 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
02889d23
CLT
11415 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
11416 if (c)
11417 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
a68ab351
JJ
11418 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11419 {
11420 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
11421 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11422 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
11423 gcc_assert (DECL_P (decl));
11424 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
11425 || POINTER_TYPE_P (TREE_TYPE (decl)));
d9a6bd32
JJ
11426 if (is_doacross)
11427 {
11428 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
0b27c3ed
JJ
11429 {
11430 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11431 if (TREE_CODE (orig_decl) == TREE_LIST)
28567c40
JJ
11432 {
11433 orig_decl = TREE_PURPOSE (orig_decl);
11434 if (!orig_decl)
11435 orig_decl = decl;
11436 }
0b27c3ed
JJ
11437 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11438 }
d9a6bd32
JJ
11439 else
11440 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11441 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11442 }
a68ab351
JJ
11443
11444 /* Make sure the iteration variable is private. */
74bf76ed 11445 tree c = NULL_TREE;
f7468577 11446 tree c2 = NULL_TREE;
acf0174b 11447 if (orig_for_stmt != for_stmt)
0b27c3ed
JJ
11448 {
11449 /* Preserve this information until we gimplify the inner simd. */
11450 if (has_decl_expr
11451 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11452 TREE_PRIVATE (t) = 1;
11453 }
d9a6bd32 11454 else if (ort == ORT_SIMD)
74bf76ed
JJ
11455 {
11456 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
d9a6bd32 11457 (splay_tree_key) decl);
f7468577
JJ
11458 omp_is_private (gimplify_omp_ctxp, decl,
11459 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11460 != 1));
74bf76ed 11461 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6c7ae8c5
JJ
11462 {
11463 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11464 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11465 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11466 OMP_CLAUSE_LASTPRIVATE);
11467 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11468 OMP_CLAUSE_LASTPRIVATE))
11469 if (OMP_CLAUSE_DECL (c3) == decl)
11470 {
11471 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11472 "conditional %<lastprivate%> on loop "
11473 "iterator %qD ignored", decl);
11474 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11475 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11476 }
11477 }
554a530f 11478 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
74bf76ed
JJ
11479 {
11480 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11481 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
41b37d5e 11482 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
0b27c3ed
JJ
11483 if ((has_decl_expr
11484 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11485 || TREE_PRIVATE (t))
41b37d5e
JJ
11486 {
11487 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11488 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11489 }
499c20bb
JJ
11490 struct gimplify_omp_ctx *outer
11491 = gimplify_omp_ctxp->outer_context;
11492 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11493 {
11494 if (outer->region_type == ORT_WORKSHARE
11495 && outer->combined_loop)
11496 {
11497 n = splay_tree_lookup (outer->variables,
11498 (splay_tree_key)decl);
11499 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11500 {
11501 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11502 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11503 }
e01d41e5
JJ
11504 else
11505 {
11506 struct gimplify_omp_ctx *octx = outer->outer_context;
11507 if (octx
11508 && octx->region_type == ORT_COMBINED_PARALLEL
11509 && octx->outer_context
11510 && (octx->outer_context->region_type
11511 == ORT_WORKSHARE)
11512 && octx->outer_context->combined_loop)
11513 {
11514 octx = octx->outer_context;
11515 n = splay_tree_lookup (octx->variables,
11516 (splay_tree_key)decl);
11517 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11518 {
11519 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11520 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11521 }
11522 }
11523 }
499c20bb
JJ
11524 }
11525 }
11526
74bf76ed
JJ
11527 OMP_CLAUSE_DECL (c) = decl;
11528 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11529 OMP_FOR_CLAUSES (for_stmt) = c;
41b37d5e 11530 omp_add_variable (gimplify_omp_ctxp, decl, flags);
41b37d5e
JJ
11531 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11532 {
11533 if (outer->region_type == ORT_WORKSHARE
11534 && outer->combined_loop)
11535 {
11536 if (outer->outer_context
11537 && (outer->outer_context->region_type
11538 == ORT_COMBINED_PARALLEL))
11539 outer = outer->outer_context;
11540 else if (omp_check_private (outer, decl, false))
11541 outer = NULL;
11542 }
28567c40
JJ
11543 else if (((outer->region_type & ORT_TASKLOOP)
11544 == ORT_TASKLOOP)
d9a6bd32
JJ
11545 && outer->combined_loop
11546 && !omp_check_private (gimplify_omp_ctxp,
11547 decl, false))
11548 ;
41b37d5e 11549 else if (outer->region_type != ORT_COMBINED_PARALLEL)
84311083
JJ
11550 {
11551 omp_notice_variable (outer, decl, true);
11552 outer = NULL;
11553 }
41b37d5e
JJ
11554 if (outer)
11555 {
cbdfbde8
JJ
11556 n = splay_tree_lookup (outer->variables,
11557 (splay_tree_key)decl);
11558 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11559 {
11560 omp_add_variable (outer, decl,
11561 GOVD_LASTPRIVATE | GOVD_SEEN);
e01d41e5
JJ
11562 if (outer->region_type == ORT_COMBINED_PARALLEL
11563 && outer->outer_context
11564 && (outer->outer_context->region_type
11565 == ORT_WORKSHARE)
11566 && outer->outer_context->combined_loop)
11567 {
11568 outer = outer->outer_context;
11569 n = splay_tree_lookup (outer->variables,
11570 (splay_tree_key)decl);
11571 if (omp_check_private (outer, decl, false))
11572 outer = NULL;
11573 else if (n == NULL
11574 || ((n->value & GOVD_DATA_SHARE_CLASS)
11575 == 0))
11576 omp_add_variable (outer, decl,
11577 GOVD_LASTPRIVATE
11578 | GOVD_SEEN);
11579 else
11580 outer = NULL;
11581 }
11582 if (outer && outer->outer_context
28567c40
JJ
11583 && ((outer->outer_context->region_type
11584 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11585 || (((outer->region_type & ORT_TASKLOOP)
11586 == ORT_TASKLOOP)
11587 && (outer->outer_context->region_type
11588 == ORT_COMBINED_PARALLEL))))
e01d41e5
JJ
11589 {
11590 outer = outer->outer_context;
11591 n = splay_tree_lookup (outer->variables,
11592 (splay_tree_key)decl);
11593 if (n == NULL
11594 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11595 omp_add_variable (outer, decl,
11596 GOVD_SHARED | GOVD_SEEN);
11597 else
11598 outer = NULL;
11599 }
11600 if (outer && outer->outer_context)
cbdfbde8
JJ
11601 omp_notice_variable (outer->outer_context, decl,
11602 true);
11603 }
41b37d5e
JJ
11604 }
11605 }
74bf76ed
JJ
11606 }
11607 else
11608 {
11609 bool lastprivate
11610 = (!has_decl_expr
b4c3a85b 11611 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
0b27c3ed
JJ
11612 if (TREE_PRIVATE (t))
11613 lastprivate = false;
d81ab49d
JJ
11614 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11615 {
11616 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11617 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11618 lastprivate = false;
11619 }
11620
41b37d5e
JJ
11621 struct gimplify_omp_ctx *outer
11622 = gimplify_omp_ctxp->outer_context;
11623 if (outer && lastprivate)
56ad0e38 11624 {
41b37d5e
JJ
11625 if (outer->region_type == ORT_WORKSHARE
11626 && outer->combined_loop)
11627 {
499c20bb
JJ
11628 n = splay_tree_lookup (outer->variables,
11629 (splay_tree_key)decl);
11630 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11631 {
11632 lastprivate = false;
11633 outer = NULL;
11634 }
11635 else if (outer->outer_context
11636 && (outer->outer_context->region_type
11637 == ORT_COMBINED_PARALLEL))
41b37d5e
JJ
11638 outer = outer->outer_context;
11639 else if (omp_check_private (outer, decl, false))
11640 outer = NULL;
11641 }
28567c40
JJ
11642 else if (((outer->region_type & ORT_TASKLOOP)
11643 == ORT_TASKLOOP)
d9a6bd32
JJ
11644 && outer->combined_loop
11645 && !omp_check_private (gimplify_omp_ctxp,
11646 decl, false))
11647 ;
41b37d5e 11648 else if (outer->region_type != ORT_COMBINED_PARALLEL)
84311083
JJ
11649 {
11650 omp_notice_variable (outer, decl, true);
11651 outer = NULL;
11652 }
41b37d5e 11653 if (outer)
56ad0e38 11654 {
cbdfbde8
JJ
11655 n = splay_tree_lookup (outer->variables,
11656 (splay_tree_key)decl);
11657 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11658 {
11659 omp_add_variable (outer, decl,
11660 GOVD_LASTPRIVATE | GOVD_SEEN);
e01d41e5
JJ
11661 if (outer->region_type == ORT_COMBINED_PARALLEL
11662 && outer->outer_context
11663 && (outer->outer_context->region_type
11664 == ORT_WORKSHARE)
11665 && outer->outer_context->combined_loop)
11666 {
11667 outer = outer->outer_context;
11668 n = splay_tree_lookup (outer->variables,
11669 (splay_tree_key)decl);
11670 if (omp_check_private (outer, decl, false))
11671 outer = NULL;
11672 else if (n == NULL
11673 || ((n->value & GOVD_DATA_SHARE_CLASS)
11674 == 0))
11675 omp_add_variable (outer, decl,
11676 GOVD_LASTPRIVATE
11677 | GOVD_SEEN);
11678 else
11679 outer = NULL;
11680 }
11681 if (outer && outer->outer_context
28567c40
JJ
11682 && ((outer->outer_context->region_type
11683 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11684 || (((outer->region_type & ORT_TASKLOOP)
11685 == ORT_TASKLOOP)
11686 && (outer->outer_context->region_type
11687 == ORT_COMBINED_PARALLEL))))
e01d41e5
JJ
11688 {
11689 outer = outer->outer_context;
11690 n = splay_tree_lookup (outer->variables,
11691 (splay_tree_key)decl);
11692 if (n == NULL
11693 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11694 omp_add_variable (outer, decl,
11695 GOVD_SHARED | GOVD_SEEN);
11696 else
11697 outer = NULL;
11698 }
11699 if (outer && outer->outer_context)
cbdfbde8
JJ
11700 omp_notice_variable (outer->outer_context, decl,
11701 true);
11702 }
56ad0e38
JJ
11703 }
11704 }
41b37d5e 11705
74bf76ed
JJ
11706 c = build_omp_clause (input_location,
11707 lastprivate ? OMP_CLAUSE_LASTPRIVATE
11708 : OMP_CLAUSE_PRIVATE);
11709 OMP_CLAUSE_DECL (c) = decl;
11710 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
f7468577 11711 OMP_FOR_CLAUSES (for_stmt) = c;
74bf76ed
JJ
11712 omp_add_variable (gimplify_omp_ctxp, decl,
11713 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
f7468577 11714 | GOVD_EXPLICIT | GOVD_SEEN);
74bf76ed
JJ
11715 c = NULL_TREE;
11716 }
11717 }
f7468577 11718 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
6c7ae8c5
JJ
11719 {
11720 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11721 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11722 (splay_tree_key) decl);
11723 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
11724 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11725 OMP_CLAUSE_LASTPRIVATE);
11726 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11727 OMP_CLAUSE_LASTPRIVATE))
11728 if (OMP_CLAUSE_DECL (c3) == decl)
11729 {
11730 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11731 "conditional %<lastprivate%> on loop "
11732 "iterator %qD ignored", decl);
11733 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11734 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11735 }
11736 }
a68ab351
JJ
11737 else
11738 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11739
11740 /* If DECL is not a gimple register, create a temporary variable to act
11741 as an iteration counter. This is valid, since DECL cannot be
56ad0e38
JJ
11742 modified in the body of the loop. Similarly for any iteration vars
11743 in simd with collapse > 1 where the iterator vars must be
11744 lastprivate. */
acf0174b
JJ
11745 if (orig_for_stmt != for_stmt)
11746 var = decl;
56ad0e38 11747 else if (!is_gimple_reg (decl)
d9a6bd32
JJ
11748 || (ort == ORT_SIMD
11749 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
a68ab351 11750 {
ab62397a
JJ
11751 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11752 /* Make sure omp_add_variable is not called on it prematurely.
11753 We call it ourselves a few lines later. */
11754 gimplify_omp_ctxp = NULL;
a68ab351 11755 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
ab62397a 11756 gimplify_omp_ctxp = ctx;
726a989a 11757 TREE_OPERAND (t, 0) = var;
b8698a0f 11758
726a989a 11759 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 11760
d9a6bd32
JJ
11761 if (ort == ORT_SIMD
11762 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
f7468577
JJ
11763 {
11764 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11765 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11766 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11767 OMP_CLAUSE_DECL (c2) = var;
11768 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11769 OMP_FOR_CLAUSES (for_stmt) = c2;
11770 omp_add_variable (gimplify_omp_ctxp, var,
11771 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11772 if (c == NULL_TREE)
11773 {
11774 c = c2;
11775 c2 = NULL_TREE;
11776 }
11777 }
11778 else
11779 omp_add_variable (gimplify_omp_ctxp, var,
11780 GOVD_PRIVATE | GOVD_SEEN);
a68ab351
JJ
11781 }
11782 else
11783 var = decl;
07beea0d 11784
32e8bb8e 11785 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
381cdae4 11786 is_gimple_val, fb_rvalue, false);
32e8bb8e 11787 ret = MIN (ret, tret);
726a989a
RB
11788 if (ret == GS_ERROR)
11789 return ret;
953ff289 11790
726a989a 11791 /* Handle OMP_FOR_COND. */
a68ab351
JJ
11792 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11793 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 11794 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 11795
32e8bb8e 11796 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
381cdae4 11797 is_gimple_val, fb_rvalue, false);
32e8bb8e 11798 ret = MIN (ret, tret);
917948d3 11799
726a989a 11800 /* Handle OMP_FOR_INCR. */
a68ab351 11801 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
11802 switch (TREE_CODE (t))
11803 {
a68ab351
JJ
11804 case PREINCREMENT_EXPR:
11805 case POSTINCREMENT_EXPR:
c02065fc
AH
11806 {
11807 tree decl = TREE_OPERAND (t, 0);
da6f124d
JJ
11808 /* c_omp_for_incr_canonicalize_ptr() should have been
11809 called to massage things appropriately. */
c02065fc
AH
11810 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11811
11812 if (orig_for_stmt != for_stmt)
11813 break;
11814 t = build_int_cst (TREE_TYPE (decl), 1);
11815 if (c)
11816 OMP_CLAUSE_LINEAR_STEP (c) = t;
11817 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11818 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11819 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
acf0174b 11820 break;
c02065fc 11821 }
a68ab351
JJ
11822
11823 case PREDECREMENT_EXPR:
11824 case POSTDECREMENT_EXPR:
da6f124d
JJ
11825 /* c_omp_for_incr_canonicalize_ptr() should have been
11826 called to massage things appropriately. */
11827 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
acf0174b
JJ
11828 if (orig_for_stmt != for_stmt)
11829 break;
a68ab351 11830 t = build_int_cst (TREE_TYPE (decl), -1);
74bf76ed
JJ
11831 if (c)
11832 OMP_CLAUSE_LINEAR_STEP (c) = t;
a68ab351 11833 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 11834 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
11835 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11836 break;
11837
726a989a
RB
11838 case MODIFY_EXPR:
11839 gcc_assert (TREE_OPERAND (t, 0) == decl);
11840 TREE_OPERAND (t, 0) = var;
a68ab351 11841
726a989a 11842 t = TREE_OPERAND (t, 1);
a68ab351 11843 switch (TREE_CODE (t))
953ff289 11844 {
a68ab351
JJ
11845 case PLUS_EXPR:
11846 if (TREE_OPERAND (t, 1) == decl)
11847 {
11848 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11849 TREE_OPERAND (t, 0) = var;
11850 break;
11851 }
11852
11853 /* Fallthru. */
11854 case MINUS_EXPR:
11855 case POINTER_PLUS_EXPR:
11856 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 11857 TREE_OPERAND (t, 0) = var;
953ff289 11858 break;
a68ab351
JJ
11859 default:
11860 gcc_unreachable ();
953ff289 11861 }
917948d3 11862
32e8bb8e 11863 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
381cdae4 11864 is_gimple_val, fb_rvalue, false);
32e8bb8e 11865 ret = MIN (ret, tret);
74bf76ed
JJ
11866 if (c)
11867 {
da6f124d
JJ
11868 tree step = TREE_OPERAND (t, 1);
11869 tree stept = TREE_TYPE (decl);
11870 if (POINTER_TYPE_P (stept))
11871 stept = sizetype;
11872 step = fold_convert (stept, step);
74bf76ed 11873 if (TREE_CODE (t) == MINUS_EXPR)
da6f124d
JJ
11874 step = fold_build1 (NEGATE_EXPR, stept, step);
11875 OMP_CLAUSE_LINEAR_STEP (c) = step;
11876 if (step != TREE_OPERAND (t, 1))
74bf76ed 11877 {
74bf76ed
JJ
11878 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11879 &for_pre_body, NULL,
381cdae4 11880 is_gimple_val, fb_rvalue, false);
74bf76ed
JJ
11881 ret = MIN (ret, tret);
11882 }
11883 }
953ff289 11884 break;
a68ab351 11885
953ff289
DN
11886 default:
11887 gcc_unreachable ();
11888 }
11889
f7468577
JJ
11890 if (c2)
11891 {
11892 gcc_assert (c);
11893 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11894 }
11895
02889d23 11896 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
a68ab351 11897 {
a68ab351 11898 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
f7468577
JJ
11899 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11900 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11901 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11902 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11903 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11904 && OMP_CLAUSE_DECL (c) == decl)
726a989a 11905 {
d9a6bd32
JJ
11906 if (is_doacross && (collapse == 1 || i >= collapse))
11907 t = var;
11908 else
11909 {
11910 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11911 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11912 gcc_assert (TREE_OPERAND (t, 0) == var);
11913 t = TREE_OPERAND (t, 1);
11914 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11915 || TREE_CODE (t) == MINUS_EXPR
11916 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11917 gcc_assert (TREE_OPERAND (t, 0) == var);
11918 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11919 is_doacross ? var : decl,
11920 TREE_OPERAND (t, 1));
11921 }
f7468577
JJ
11922 gimple_seq *seq;
11923 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11924 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11925 else
11926 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
be3a87e7 11927 push_gimplify_context ();
f7468577 11928 gimplify_assign (decl, t, seq);
be3a87e7
JJ
11929 gimple *bind = NULL;
11930 if (gimplify_ctxp->temps)
11931 {
11932 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11933 *seq = NULL;
11934 gimplify_seq_add_stmt (seq, bind);
11935 }
11936 pop_gimplify_context (bind);
11937 }
a68ab351 11938 }
953ff289
DN
11939 }
11940
74bf76ed
JJ
11941 BITMAP_FREE (has_decl_expr);
11942
d81ab49d
JJ
11943 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11944 || (loop_p && orig_for_stmt == for_stmt))
d9a6bd32
JJ
11945 {
11946 push_gimplify_context ();
11947 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11948 {
11949 OMP_FOR_BODY (orig_for_stmt)
11950 = build3 (BIND_EXPR, void_type_node, NULL,
11951 OMP_FOR_BODY (orig_for_stmt), NULL);
11952 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11953 }
11954 }
11955
11956 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11957 &for_body);
11958
d81ab49d
JJ
11959 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11960 || (loop_p && orig_for_stmt == for_stmt))
d9a6bd32
JJ
11961 {
11962 if (gimple_code (g) == GIMPLE_BIND)
11963 pop_gimplify_context (g);
11964 else
11965 pop_gimplify_context (NULL);
11966 }
726a989a 11967
acf0174b
JJ
11968 if (orig_for_stmt != for_stmt)
11969 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11970 {
11971 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11972 decl = TREE_OPERAND (t, 0);
d9a6bd32
JJ
11973 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11974 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11975 gimplify_omp_ctxp = ctx->outer_context;
acf0174b 11976 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
d9a6bd32 11977 gimplify_omp_ctxp = ctx;
acf0174b
JJ
11978 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11979 TREE_OPERAND (t, 0) = var;
11980 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11981 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11982 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11983 }
11984
1a80d6b8
JJ
11985 gimplify_adjust_omp_clauses (pre_p, for_body,
11986 &OMP_FOR_CLAUSES (orig_for_stmt),
d9a6bd32 11987 TREE_CODE (orig_for_stmt));
953ff289 11988
74bf76ed 11989 int kind;
acf0174b 11990 switch (TREE_CODE (orig_for_stmt))
74bf76ed
JJ
11991 {
11992 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11993 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
acf0174b 11994 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
d9a6bd32 11995 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
41dbbb37 11996 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
74bf76ed
JJ
11997 default:
11998 gcc_unreachable ();
11999 }
d81ab49d
JJ
12000 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
12001 {
12002 gimplify_seq_add_seq (pre_p, for_pre_body);
12003 for_pre_body = NULL;
12004 }
acf0174b 12005 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
726a989a
RB
12006 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
12007 for_pre_body);
acf0174b
JJ
12008 if (orig_for_stmt != for_stmt)
12009 gimple_omp_for_set_combined_p (gfor, true);
12010 if (gimplify_omp_ctxp
12011 && (gimplify_omp_ctxp->combined_loop
12012 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12013 && gimplify_omp_ctxp->outer_context
12014 && gimplify_omp_ctxp->outer_context->combined_loop)))
12015 {
12016 gimple_omp_for_set_combined_into_p (gfor, true);
12017 if (gimplify_omp_ctxp->combined_loop)
12018 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
12019 else
12020 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
12021 }
726a989a
RB
12022
12023 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12024 {
12025 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12026 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
12027 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
12028 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12029 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
12030 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
12031 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12032 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
12033 }
12034
d9a6bd32
JJ
12035 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12036 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12037 The outer taskloop stands for computing the number of iterations,
12038 counts for collapsed loops and holding taskloop specific clauses.
12039 The task construct stands for the effect of data sharing on the
12040 explicit task it creates and the inner taskloop stands for expansion
12041 of the static loop inside of the explicit task construct. */
12042 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12043 {
12044 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
12045 tree task_clauses = NULL_TREE;
12046 tree c = *gfor_clauses_ptr;
12047 tree *gtask_clauses_ptr = &task_clauses;
12048 tree outer_for_clauses = NULL_TREE;
12049 tree *gforo_clauses_ptr = &outer_for_clauses;
12050 for (; c; c = OMP_CLAUSE_CHAIN (c))
12051 switch (OMP_CLAUSE_CODE (c))
12052 {
12053 /* These clauses are allowed on task, move them there. */
12054 case OMP_CLAUSE_SHARED:
12055 case OMP_CLAUSE_FIRSTPRIVATE:
12056 case OMP_CLAUSE_DEFAULT:
12057 case OMP_CLAUSE_IF:
12058 case OMP_CLAUSE_UNTIED:
12059 case OMP_CLAUSE_FINAL:
12060 case OMP_CLAUSE_MERGEABLE:
12061 case OMP_CLAUSE_PRIORITY:
28567c40
JJ
12062 case OMP_CLAUSE_REDUCTION:
12063 case OMP_CLAUSE_IN_REDUCTION:
d9a6bd32
JJ
12064 *gtask_clauses_ptr = c;
12065 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12066 break;
12067 case OMP_CLAUSE_PRIVATE:
12068 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12069 {
12070 /* We want private on outer for and firstprivate
12071 on task. */
12072 *gtask_clauses_ptr
12073 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12074 OMP_CLAUSE_FIRSTPRIVATE);
12075 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12076 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12077 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12078 *gforo_clauses_ptr = c;
12079 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12080 }
12081 else
12082 {
12083 *gtask_clauses_ptr = c;
12084 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12085 }
12086 break;
12087 /* These clauses go into outer taskloop clauses. */
12088 case OMP_CLAUSE_GRAINSIZE:
12089 case OMP_CLAUSE_NUM_TASKS:
12090 case OMP_CLAUSE_NOGROUP:
12091 *gforo_clauses_ptr = c;
12092 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12093 break;
12094 /* Taskloop clause we duplicate on both taskloops. */
12095 case OMP_CLAUSE_COLLAPSE:
12096 *gfor_clauses_ptr = c;
12097 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12098 *gforo_clauses_ptr = copy_node (c);
12099 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12100 break;
12101 /* For lastprivate, keep the clause on inner taskloop, and add
12102 a shared clause on task. If the same decl is also firstprivate,
12103 add also firstprivate clause on the inner taskloop. */
12104 case OMP_CLAUSE_LASTPRIVATE:
d81ab49d 12105 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
d9a6bd32
JJ
12106 {
12107 /* For taskloop C++ lastprivate IVs, we want:
12108 1) private on outer taskloop
12109 2) firstprivate and shared on task
12110 3) lastprivate on inner taskloop */
12111 *gtask_clauses_ptr
12112 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12113 OMP_CLAUSE_FIRSTPRIVATE);
12114 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12115 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12116 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12117 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12118 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12119 OMP_CLAUSE_PRIVATE);
12120 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12121 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12122 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12123 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12124 }
12125 *gfor_clauses_ptr = c;
12126 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12127 *gtask_clauses_ptr
12128 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12129 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12130 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12131 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12132 gtask_clauses_ptr
12133 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12134 break;
12135 default:
12136 gcc_unreachable ();
12137 }
12138 *gfor_clauses_ptr = NULL_TREE;
12139 *gtask_clauses_ptr = NULL_TREE;
12140 *gforo_clauses_ptr = NULL_TREE;
12141 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12142 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12143 NULL_TREE, NULL_TREE, NULL_TREE);
12144 gimple_omp_task_set_taskloop_p (g, true);
12145 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12146 gomp_for *gforo
12147 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12148 gimple_omp_for_collapse (gfor),
12149 gimple_omp_for_pre_body (gfor));
12150 gimple_omp_for_set_pre_body (gfor, NULL);
12151 gimple_omp_for_set_combined_p (gforo, true);
12152 gimple_omp_for_set_combined_into_p (gfor, true);
12153 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12154 {
eebc5e2d
JJ
12155 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12156 tree v = create_tmp_var (type);
12157 gimple_omp_for_set_index (gforo, i, v);
d9a6bd32
JJ
12158 t = unshare_expr (gimple_omp_for_initial (gfor, i));
12159 gimple_omp_for_set_initial (gforo, i, t);
12160 gimple_omp_for_set_cond (gforo, i,
12161 gimple_omp_for_cond (gfor, i));
12162 t = unshare_expr (gimple_omp_for_final (gfor, i));
12163 gimple_omp_for_set_final (gforo, i, t);
12164 t = unshare_expr (gimple_omp_for_incr (gfor, i));
eebc5e2d
JJ
12165 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12166 TREE_OPERAND (t, 0) = v;
d9a6bd32 12167 gimple_omp_for_set_incr (gforo, i, t);
eebc5e2d
JJ
12168 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12169 OMP_CLAUSE_DECL (t) = v;
12170 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12171 gimple_omp_for_set_clauses (gforo, t);
d9a6bd32
JJ
12172 }
12173 gimplify_seq_add_stmt (pre_p, gforo);
12174 }
12175 else
12176 gimplify_seq_add_stmt (pre_p, gfor);
8221c30b
JJ
12177
12178 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12179 {
12180 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12181 unsigned lastprivate_conditional = 0;
12182 while (ctx
12183 && (ctx->region_type == ORT_TARGET_DATA
12184 || ctx->region_type == ORT_TASKGROUP))
12185 ctx = ctx->outer_context;
12186 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12187 for (tree c = gimple_omp_for_clauses (gfor);
12188 c; c = OMP_CLAUSE_CHAIN (c))
12189 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12190 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12191 ++lastprivate_conditional;
12192 if (lastprivate_conditional)
12193 {
12194 struct omp_for_data fd;
12195 omp_extract_for_data (gfor, &fd, NULL);
12196 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12197 lastprivate_conditional);
12198 tree var = create_tmp_var_raw (type);
12199 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12200 OMP_CLAUSE_DECL (c) = var;
12201 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12202 gimple_omp_for_set_clauses (gfor, c);
12203 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12204 }
12205 }
e7393c89
JJ
12206 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12207 {
12208 unsigned lastprivate_conditional = 0;
12209 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12211 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12212 ++lastprivate_conditional;
12213 if (lastprivate_conditional)
12214 {
12215 struct omp_for_data fd;
12216 omp_extract_for_data (gfor, &fd, NULL);
12217 tree type = unsigned_type_for (fd.iter_type);
12218 while (lastprivate_conditional--)
12219 {
12220 tree c = build_omp_clause (UNKNOWN_LOCATION,
12221 OMP_CLAUSE__CONDTEMP_);
12222 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12223 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12224 gimple_omp_for_set_clauses (gfor, c);
12225 }
12226 }
12227 }
8221c30b 12228
74bf76ed
JJ
12229 if (ret != GS_ALL_DONE)
12230 return GS_ERROR;
12231 *expr_p = NULL_TREE;
12232 return GS_ALL_DONE;
953ff289
DN
12233}
12234
554a530f
JJ
12235/* Helper for gimplify_omp_loop, called through walk_tree. */
12236
12237static tree
12238replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
12239{
12240 if (DECL_P (*tp))
12241 {
12242 tree *d = (tree *) data;
12243 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
12244 {
12245 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
12246 *walk_subtrees = 0;
12247 }
12248 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
12249 {
12250 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
12251 *walk_subtrees = 0;
12252 }
12253 }
12254 return NULL_TREE;
12255}
12256
12257/* Gimplify the gross structure of an OMP_LOOP statement. */
12258
12259static enum gimplify_status
12260gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12261{
12262 tree for_stmt = *expr_p;
12263 tree clauses = OMP_FOR_CLAUSES (for_stmt);
12264 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12265 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12266 int i;
12267
12268 /* If order is not present, the behavior is as if order(concurrent)
12269 appeared. */
12270 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12271 if (order == NULL_TREE)
12272 {
12273 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12274 OMP_CLAUSE_CHAIN (order) = clauses;
12275 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12276 }
12277
12278 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12279 if (bind == NULL_TREE)
12280 {
12281 if (!flag_openmp) /* flag_openmp_simd */
12282 ;
12283 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12284 kind = OMP_CLAUSE_BIND_TEAMS;
12285 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12286 kind = OMP_CLAUSE_BIND_PARALLEL;
12287 else
12288 {
12289 for (; octx; octx = octx->outer_context)
12290 {
12291 if ((octx->region_type & ORT_ACC) != 0
12292 || octx->region_type == ORT_NONE
12293 || octx->region_type == ORT_IMPLICIT_TARGET)
12294 continue;
12295 break;
12296 }
12297 if (octx == NULL && !in_omp_construct)
12298 error_at (EXPR_LOCATION (for_stmt),
12299 "%<bind%> clause not specified on a %<loop%> "
12300 "construct not nested inside another OpenMP construct");
12301 }
12302 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12303 OMP_CLAUSE_CHAIN (bind) = clauses;
12304 OMP_CLAUSE_BIND_KIND (bind) = kind;
12305 OMP_FOR_CLAUSES (for_stmt) = bind;
12306 }
12307 else
12308 switch (OMP_CLAUSE_BIND_KIND (bind))
12309 {
12310 case OMP_CLAUSE_BIND_THREAD:
12311 break;
12312 case OMP_CLAUSE_BIND_PARALLEL:
12313 if (!flag_openmp) /* flag_openmp_simd */
12314 {
12315 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12316 break;
12317 }
12318 for (; octx; octx = octx->outer_context)
12319 if (octx->region_type == ORT_SIMD
12320 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12321 {
12322 error_at (EXPR_LOCATION (for_stmt),
12323 "%<bind(parallel)%> on a %<loop%> construct nested "
12324 "inside %<simd%> construct");
12325 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12326 break;
12327 }
12328 kind = OMP_CLAUSE_BIND_PARALLEL;
12329 break;
12330 case OMP_CLAUSE_BIND_TEAMS:
12331 if (!flag_openmp) /* flag_openmp_simd */
12332 {
12333 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12334 break;
12335 }
12336 if ((octx
12337 && octx->region_type != ORT_IMPLICIT_TARGET
12338 && octx->region_type != ORT_NONE
12339 && (octx->region_type & ORT_TEAMS) == 0)
12340 || in_omp_construct)
12341 {
12342 error_at (EXPR_LOCATION (for_stmt),
12343 "%<bind(teams)%> on a %<loop%> region not strictly "
12344 "nested inside of a %<teams%> region");
12345 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12346 break;
12347 }
12348 kind = OMP_CLAUSE_BIND_TEAMS;
12349 break;
12350 default:
12351 gcc_unreachable ();
12352 }
12353
12354 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12355 switch (OMP_CLAUSE_CODE (*pc))
12356 {
12357 case OMP_CLAUSE_REDUCTION:
12358 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12359 {
12360 error_at (OMP_CLAUSE_LOCATION (*pc),
12361 "%<inscan%> %<reduction%> clause on "
12362 "%qs construct", "loop");
12363 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12364 }
12365 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12366 {
12367 error_at (OMP_CLAUSE_LOCATION (*pc),
12368 "invalid %<task%> reduction modifier on construct "
12369 "other than %<parallel%>, %<for%> or %<sections%>");
12370 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12371 }
12372 pc = &OMP_CLAUSE_CHAIN (*pc);
12373 break;
12374 case OMP_CLAUSE_LASTPRIVATE:
12375 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12376 {
12377 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12378 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12379 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12380 break;
12381 if (OMP_FOR_ORIG_DECLS (for_stmt)
12382 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12383 i)) == TREE_LIST
12384 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12385 i)))
12386 {
12387 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12388 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
12389 break;
12390 }
12391 }
12392 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
12393 {
12394 error_at (OMP_CLAUSE_LOCATION (*pc),
12395 "%<lastprivate%> clause on a %<loop%> construct refers "
12396 "to a variable %qD which is not the loop iterator",
12397 OMP_CLAUSE_DECL (*pc));
12398 *pc = OMP_CLAUSE_CHAIN (*pc);
12399 break;
12400 }
12401 pc = &OMP_CLAUSE_CHAIN (*pc);
12402 break;
12403 default:
12404 pc = &OMP_CLAUSE_CHAIN (*pc);
12405 break;
12406 }
12407
12408 TREE_SET_CODE (for_stmt, OMP_SIMD);
12409
12410 int last;
12411 switch (kind)
12412 {
12413 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
12414 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
12415 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
12416 }
12417 for (int pass = 1; pass <= last; pass++)
12418 {
12419 if (pass == 2)
12420 {
12421 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12422 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
12423 *expr_p = make_node (OMP_PARALLEL);
12424 TREE_TYPE (*expr_p) = void_type_node;
12425 OMP_PARALLEL_BODY (*expr_p) = bind;
12426 OMP_PARALLEL_COMBINED (*expr_p) = 1;
12427 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
d81ab49d
JJ
12428 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12429 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12430 if (OMP_FOR_ORIG_DECLS (for_stmt)
12431 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12432 == TREE_LIST))
12433 {
12434 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12435 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12436 {
12437 *pc = build_omp_clause (UNKNOWN_LOCATION,
12438 OMP_CLAUSE_FIRSTPRIVATE);
12439 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12440 pc = &OMP_CLAUSE_CHAIN (*pc);
12441 }
12442 }
554a530f
JJ
12443 }
12444 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12445 tree *pc = &OMP_FOR_CLAUSES (t);
12446 TREE_TYPE (t) = void_type_node;
12447 OMP_FOR_BODY (t) = *expr_p;
12448 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12449 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12450 switch (OMP_CLAUSE_CODE (c))
12451 {
12452 case OMP_CLAUSE_BIND:
12453 case OMP_CLAUSE_ORDER:
12454 case OMP_CLAUSE_COLLAPSE:
12455 *pc = copy_node (c);
12456 pc = &OMP_CLAUSE_CHAIN (*pc);
12457 break;
12458 case OMP_CLAUSE_PRIVATE:
d81ab49d 12459 case OMP_CLAUSE_FIRSTPRIVATE:
554a530f
JJ
12460 /* Only needed on innermost. */
12461 break;
12462 case OMP_CLAUSE_LASTPRIVATE:
d81ab49d
JJ
12463 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12464 {
12465 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12466 OMP_CLAUSE_FIRSTPRIVATE);
12467 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12468 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12469 pc = &OMP_CLAUSE_CHAIN (*pc);
12470 }
554a530f
JJ
12471 *pc = copy_node (c);
12472 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12473 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
d81ab49d
JJ
12474 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12475 {
12476 if (pass != last)
12477 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12478 else
12479 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12480 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12481 }
554a530f
JJ
12482 pc = &OMP_CLAUSE_CHAIN (*pc);
12483 break;
12484 case OMP_CLAUSE_REDUCTION:
12485 *pc = copy_node (c);
12486 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12487 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12488 OMP_CLAUSE_REDUCTION_INIT (*pc)
12489 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12490 OMP_CLAUSE_REDUCTION_MERGE (*pc)
12491 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12492 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12493 {
12494 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12495 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12496 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12497 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12498 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12499 tree nc = *pc;
12500 tree data[2] = { c, nc };
12501 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
12502 replace_reduction_placeholders,
12503 data);
12504 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
12505 replace_reduction_placeholders,
12506 data);
12507 }
12508 pc = &OMP_CLAUSE_CHAIN (*pc);
12509 break;
12510 default:
12511 gcc_unreachable ();
12512 }
12513 *pc = NULL_TREE;
12514 *expr_p = t;
12515 }
12516 return gimplify_omp_for (expr_p, pre_p);
12517}
12518
12519
e01d41e5
JJ
12520/* Helper function of optimize_target_teams, find OMP_TEAMS inside
12521 of OMP_TARGET's body. */
12522
12523static tree
12524find_omp_teams (tree *tp, int *walk_subtrees, void *)
12525{
12526 *walk_subtrees = 0;
12527 switch (TREE_CODE (*tp))
12528 {
12529 case OMP_TEAMS:
12530 return *tp;
12531 case BIND_EXPR:
12532 case STATEMENT_LIST:
12533 *walk_subtrees = 1;
12534 break;
12535 default:
12536 break;
12537 }
12538 return NULL_TREE;
12539}
12540
12541/* Helper function of optimize_target_teams, determine if the expression
12542 can be computed safely before the target construct on the host. */
12543
12544static tree
12545computable_teams_clause (tree *tp, int *walk_subtrees, void *)
12546{
12547 splay_tree_node n;
12548
12549 if (TYPE_P (*tp))
12550 {
12551 *walk_subtrees = 0;
12552 return NULL_TREE;
12553 }
12554 switch (TREE_CODE (*tp))
12555 {
12556 case VAR_DECL:
12557 case PARM_DECL:
12558 case RESULT_DECL:
12559 *walk_subtrees = 0;
12560 if (error_operand_p (*tp)
12561 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
12562 || DECL_HAS_VALUE_EXPR_P (*tp)
12563 || DECL_THREAD_LOCAL_P (*tp)
12564 || TREE_SIDE_EFFECTS (*tp)
12565 || TREE_THIS_VOLATILE (*tp))
12566 return *tp;
12567 if (is_global_var (*tp)
12568 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
12569 || lookup_attribute ("omp declare target link",
12570 DECL_ATTRIBUTES (*tp))))
12571 return *tp;
b4c3a85b
JJ
12572 if (VAR_P (*tp)
12573 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
12574 && !is_global_var (*tp)
12575 && decl_function_context (*tp) == current_function_decl)
12576 return *tp;
e01d41e5
JJ
12577 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12578 (splay_tree_key) *tp);
12579 if (n == NULL)
12580 {
28567c40 12581 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
e01d41e5
JJ
12582 return NULL_TREE;
12583 return *tp;
12584 }
12585 else if (n->value & GOVD_LOCAL)
12586 return *tp;
12587 else if (n->value & GOVD_FIRSTPRIVATE)
12588 return NULL_TREE;
12589 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12590 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12591 return NULL_TREE;
12592 return *tp;
12593 case INTEGER_CST:
12594 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12595 return *tp;
12596 return NULL_TREE;
12597 case TARGET_EXPR:
12598 if (TARGET_EXPR_INITIAL (*tp)
12599 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
12600 return *tp;
12601 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
12602 walk_subtrees, NULL);
12603 /* Allow some reasonable subset of integral arithmetics. */
12604 case PLUS_EXPR:
12605 case MINUS_EXPR:
12606 case MULT_EXPR:
12607 case TRUNC_DIV_EXPR:
12608 case CEIL_DIV_EXPR:
12609 case FLOOR_DIV_EXPR:
12610 case ROUND_DIV_EXPR:
12611 case TRUNC_MOD_EXPR:
12612 case CEIL_MOD_EXPR:
12613 case FLOOR_MOD_EXPR:
12614 case ROUND_MOD_EXPR:
12615 case RDIV_EXPR:
12616 case EXACT_DIV_EXPR:
12617 case MIN_EXPR:
12618 case MAX_EXPR:
12619 case LSHIFT_EXPR:
12620 case RSHIFT_EXPR:
12621 case BIT_IOR_EXPR:
12622 case BIT_XOR_EXPR:
12623 case BIT_AND_EXPR:
12624 case NEGATE_EXPR:
12625 case ABS_EXPR:
12626 case BIT_NOT_EXPR:
12627 case NON_LVALUE_EXPR:
12628 CASE_CONVERT:
12629 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12630 return *tp;
12631 return NULL_TREE;
12632 /* And disallow anything else, except for comparisons. */
12633 default:
12634 if (COMPARISON_CLASS_P (*tp))
12635 return NULL_TREE;
12636 return *tp;
12637 }
12638}
12639
12640/* Try to determine if the num_teams and/or thread_limit expressions
12641 can have their values determined already before entering the
12642 target construct.
12643 INTEGER_CSTs trivially are,
12644 integral decls that are firstprivate (explicitly or implicitly)
12645 or explicitly map(always, to:) or map(always, tofrom:) on the target
12646 region too, and expressions involving simple arithmetics on those
12647 too, function calls are not ok, dereferencing something neither etc.
12648 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12649 EXPR based on what we find:
12650 0 stands for clause not specified at all, use implementation default
12651 -1 stands for value that can't be determined easily before entering
12652 the target construct.
12653 If teams construct is not present at all, use 1 for num_teams
12654 and 0 for thread_limit (only one team is involved, and the thread
12655 limit is implementation defined. */
12656
12657static void
12658optimize_target_teams (tree target, gimple_seq *pre_p)
12659{
12660 tree body = OMP_BODY (target);
12661 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
12662 tree num_teams = integer_zero_node;
12663 tree thread_limit = integer_zero_node;
12664 location_t num_teams_loc = EXPR_LOCATION (target);
12665 location_t thread_limit_loc = EXPR_LOCATION (target);
12666 tree c, *p, expr;
12667 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
12668
12669 if (teams == NULL_TREE)
12670 num_teams = integer_one_node;
12671 else
12672 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
12673 {
12674 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
12675 {
12676 p = &num_teams;
12677 num_teams_loc = OMP_CLAUSE_LOCATION (c);
12678 }
12679 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
12680 {
12681 p = &thread_limit;
12682 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
12683 }
12684 else
12685 continue;
12686 expr = OMP_CLAUSE_OPERAND (c, 0);
12687 if (TREE_CODE (expr) == INTEGER_CST)
12688 {
12689 *p = expr;
12690 continue;
12691 }
12692 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
12693 {
12694 *p = integer_minus_one_node;
12695 continue;
12696 }
12697 *p = expr;
12698 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
381cdae4 12699 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
e01d41e5
JJ
12700 == GS_ERROR)
12701 {
12702 gimplify_omp_ctxp = target_ctx;
12703 *p = integer_minus_one_node;
12704 continue;
12705 }
12706 gimplify_omp_ctxp = target_ctx;
12707 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
12708 OMP_CLAUSE_OPERAND (c, 0) = *p;
12709 }
12710 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
12711 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
12712 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12713 OMP_TARGET_CLAUSES (target) = c;
12714 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
12715 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
12716 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12717 OMP_TARGET_CLAUSES (target) = c;
12718}
12719
41dbbb37 12720/* Gimplify the gross structure of several OMP constructs. */
953ff289 12721
726a989a
RB
12722static void
12723gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 12724{
726a989a 12725 tree expr = *expr_p;
355fe088 12726 gimple *stmt;
726a989a 12727 gimple_seq body = NULL;
41dbbb37 12728 enum omp_region_type ort;
953ff289 12729
acf0174b
JJ
12730 switch (TREE_CODE (expr))
12731 {
12732 case OMP_SECTIONS:
12733 case OMP_SINGLE:
41dbbb37 12734 ort = ORT_WORKSHARE;
acf0174b 12735 break;
d9a6bd32
JJ
12736 case OMP_TARGET:
12737 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
12738 break;
41dbbb37 12739 case OACC_KERNELS:
182190f2
NS
12740 ort = ORT_ACC_KERNELS;
12741 break;
41dbbb37 12742 case OACC_PARALLEL:
182190f2 12743 ort = ORT_ACC_PARALLEL;
acf0174b 12744 break;
62aee289
MR
12745 case OACC_SERIAL:
12746 ort = ORT_ACC_SERIAL;
12747 break;
41dbbb37 12748 case OACC_DATA:
182190f2
NS
12749 ort = ORT_ACC_DATA;
12750 break;
acf0174b
JJ
12751 case OMP_TARGET_DATA:
12752 ort = ORT_TARGET_DATA;
12753 break;
12754 case OMP_TEAMS:
41b37d5e 12755 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
28567c40 12756 if (gimplify_omp_ctxp == NULL
554a530f 12757 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
28567c40 12758 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
acf0174b 12759 break;
37d5ad46
JB
12760 case OACC_HOST_DATA:
12761 ort = ORT_ACC_HOST_DATA;
12762 break;
acf0174b
JJ
12763 default:
12764 gcc_unreachable ();
12765 }
554a530f
JJ
12766
12767 bool save_in_omp_construct = in_omp_construct;
12768 if ((ort & ORT_ACC) == 0)
12769 in_omp_construct = false;
d9a6bd32
JJ
12770 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
12771 TREE_CODE (expr));
e01d41e5
JJ
12772 if (TREE_CODE (expr) == OMP_TARGET)
12773 optimize_target_teams (expr, pre_p);
28567c40
JJ
12774 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
12775 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
acf0174b 12776 {
45852dcc 12777 push_gimplify_context ();
355fe088 12778 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
acf0174b
JJ
12779 if (gimple_code (g) == GIMPLE_BIND)
12780 pop_gimplify_context (g);
12781 else
12782 pop_gimplify_context (NULL);
182190f2 12783 if ((ort & ORT_TARGET_DATA) != 0)
acf0174b 12784 {
41dbbb37
TS
12785 enum built_in_function end_ix;
12786 switch (TREE_CODE (expr))
12787 {
12788 case OACC_DATA:
37d5ad46 12789 case OACC_HOST_DATA:
41dbbb37
TS
12790 end_ix = BUILT_IN_GOACC_DATA_END;
12791 break;
12792 case OMP_TARGET_DATA:
12793 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
12794 break;
12795 default:
12796 gcc_unreachable ();
12797 }
12798 tree fn = builtin_decl_explicit (end_ix);
acf0174b 12799 g = gimple_build_call (fn, 0);
41dbbb37 12800 gimple_seq cleanup = NULL;
acf0174b
JJ
12801 gimple_seq_add_stmt (&cleanup, g);
12802 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12803 body = NULL;
12804 gimple_seq_add_stmt (&body, g);
12805 }
12806 }
12807 else
12808 gimplify_and_add (OMP_BODY (expr), &body);
1a80d6b8
JJ
12809 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
12810 TREE_CODE (expr));
554a530f 12811 in_omp_construct = save_in_omp_construct;
953ff289 12812
acf0174b
JJ
12813 switch (TREE_CODE (expr))
12814 {
41dbbb37
TS
12815 case OACC_DATA:
12816 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
12817 OMP_CLAUSES (expr));
12818 break;
37d5ad46 12819 case OACC_HOST_DATA:
b3b75e66
TS
12820 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
12821 {
12822 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12823 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
12824 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
12825 }
12826
37d5ad46
JB
12827 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
12828 OMP_CLAUSES (expr));
12829 break;
b3b75e66
TS
12830 case OACC_KERNELS:
12831 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
12832 OMP_CLAUSES (expr));
12833 break;
41dbbb37
TS
12834 case OACC_PARALLEL:
12835 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
12836 OMP_CLAUSES (expr));
12837 break;
62aee289
MR
12838 case OACC_SERIAL:
12839 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
12840 OMP_CLAUSES (expr));
12841 break;
acf0174b
JJ
12842 case OMP_SECTIONS:
12843 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
12844 break;
12845 case OMP_SINGLE:
12846 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
12847 break;
12848 case OMP_TARGET:
12849 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
12850 OMP_CLAUSES (expr));
12851 break;
12852 case OMP_TARGET_DATA:
8860d270
JJ
12853 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
12854 to be evaluated before the use_device_{ptr,addr} clauses if they
12855 refer to the same variables. */
12856 {
12857 tree use_device_clauses;
12858 tree *pc, *uc = &use_device_clauses;
12859 for (pc = &OMP_CLAUSES (expr); *pc; )
12860 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
12861 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
12862 {
12863 *uc = *pc;
12864 *pc = OMP_CLAUSE_CHAIN (*pc);
12865 uc = &OMP_CLAUSE_CHAIN (*uc);
12866 }
12867 else
12868 pc = &OMP_CLAUSE_CHAIN (*pc);
12869 *uc = NULL_TREE;
12870 *pc = use_device_clauses;
12871 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
12872 OMP_CLAUSES (expr));
12873 }
acf0174b
JJ
12874 break;
12875 case OMP_TEAMS:
12876 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
28567c40
JJ
12877 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12878 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
acf0174b
JJ
12879 break;
12880 default:
12881 gcc_unreachable ();
12882 }
12883
12884 gimplify_seq_add_stmt (pre_p, stmt);
12885 *expr_p = NULL_TREE;
12886}
12887
41dbbb37
TS
12888/* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12889 target update constructs. */
acf0174b
JJ
12890
12891static void
12892gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
12893{
b811915d 12894 tree expr = *expr_p;
41dbbb37 12895 int kind;
538dd0b7 12896 gomp_target *stmt;
182190f2 12897 enum omp_region_type ort = ORT_WORKSHARE;
acf0174b 12898
41dbbb37
TS
12899 switch (TREE_CODE (expr))
12900 {
12901 case OACC_ENTER_DATA:
41dbbb37 12902 case OACC_EXIT_DATA:
41dbbb37 12903 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
182190f2 12904 ort = ORT_ACC;
41dbbb37
TS
12905 break;
12906 case OACC_UPDATE:
41dbbb37 12907 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
182190f2 12908 ort = ORT_ACC;
41dbbb37
TS
12909 break;
12910 case OMP_TARGET_UPDATE:
41dbbb37
TS
12911 kind = GF_OMP_TARGET_KIND_UPDATE;
12912 break;
d9a6bd32
JJ
12913 case OMP_TARGET_ENTER_DATA:
12914 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
12915 break;
12916 case OMP_TARGET_EXIT_DATA:
12917 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
12918 break;
41dbbb37
TS
12919 default:
12920 gcc_unreachable ();
12921 }
b811915d 12922 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
182190f2 12923 ort, TREE_CODE (expr));
1a80d6b8 12924 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
d9a6bd32 12925 TREE_CODE (expr));
829c6349
CLT
12926 if (TREE_CODE (expr) == OACC_UPDATE
12927 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12928 OMP_CLAUSE_IF_PRESENT))
12929 {
12930 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12931 clause. */
12932 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12933 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12934 switch (OMP_CLAUSE_MAP_KIND (c))
12935 {
12936 case GOMP_MAP_FORCE_TO:
12937 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
12938 break;
12939 case GOMP_MAP_FORCE_FROM:
12940 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
12941 break;
12942 default:
12943 break;
12944 }
12945 }
12946 else if (TREE_CODE (expr) == OACC_EXIT_DATA
12947 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12948 OMP_CLAUSE_FINALIZE))
12949 {
32128577
TS
12950 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
12951 semantics. */
12952 bool have_clause = false;
829c6349
CLT
12953 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12954 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12955 switch (OMP_CLAUSE_MAP_KIND (c))
12956 {
12957 case GOMP_MAP_FROM:
12958 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
32128577 12959 have_clause = true;
829c6349
CLT
12960 break;
12961 case GOMP_MAP_RELEASE:
12962 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
32128577 12963 have_clause = true;
829c6349 12964 break;
32128577
TS
12965 case GOMP_MAP_POINTER:
12966 case GOMP_MAP_TO_PSET:
12967 /* TODO PR92929: we may see these here, but they'll always follow
12968 one of the clauses above, and will be handled by libgomp as
12969 one group, so no handling required here. */
12970 gcc_assert (have_clause);
829c6349 12971 break;
4fd872bc
JB
12972 case GOMP_MAP_DETACH:
12973 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
12974 have_clause = false;
12975 break;
12976 case GOMP_MAP_STRUCT:
12977 have_clause = false;
12978 break;
32128577
TS
12979 default:
12980 gcc_unreachable ();
829c6349
CLT
12981 }
12982 }
b811915d 12983 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
726a989a
RB
12984
12985 gimplify_seq_add_stmt (pre_p, stmt);
acf0174b 12986 *expr_p = NULL_TREE;
953ff289
DN
12987}
12988
12989/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 12990 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
12991 EXPR is this stabilized form. */
12992
12993static bool
a509ebb5 12994goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
12995{
12996 /* Also include casts to other type variants. The C front end is fond
b8698a0f 12997 of adding these for e.g. volatile variables. This is like
953ff289 12998 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 12999 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 13000
78e47463
JJ
13001 if (TREE_CODE (expr) == INDIRECT_REF)
13002 {
13003 expr = TREE_OPERAND (expr, 0);
13004 while (expr != addr
1043771b 13005 && (CONVERT_EXPR_P (expr)
78e47463
JJ
13006 || TREE_CODE (expr) == NON_LVALUE_EXPR)
13007 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 13008 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
13009 {
13010 expr = TREE_OPERAND (expr, 0);
13011 addr = TREE_OPERAND (addr, 0);
13012 }
251923f5
JJ
13013 if (expr == addr)
13014 return true;
71458b8a
JJ
13015 return (TREE_CODE (addr) == ADDR_EXPR
13016 && TREE_CODE (expr) == ADDR_EXPR
251923f5 13017 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 13018 }
953ff289
DN
13019 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
13020 return true;
13021 return false;
13022}
13023
ad19c4be
EB
13024/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13025 expression does not involve the lhs, evaluate it into a temporary.
13026 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13027 or -1 if an error was encountered. */
953ff289
DN
13028
13029static int
726a989a
RB
13030goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
13031 tree lhs_var)
953ff289
DN
13032{
13033 tree expr = *expr_p;
13034 int saw_lhs;
13035
13036 if (goa_lhs_expr_p (expr, lhs_addr))
13037 {
13038 *expr_p = lhs_var;
13039 return 1;
13040 }
13041 if (is_gimple_val (expr))
13042 return 0;
b8698a0f 13043
953ff289
DN
13044 saw_lhs = 0;
13045 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
13046 {
13047 case tcc_binary:
067dd3c9 13048 case tcc_comparison:
726a989a
RB
13049 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
13050 lhs_var);
191816a3 13051 /* FALLTHRU */
953ff289 13052 case tcc_unary:
726a989a
RB
13053 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
13054 lhs_var);
953ff289 13055 break;
067dd3c9
JJ
13056 case tcc_expression:
13057 switch (TREE_CODE (expr))
13058 {
13059 case TRUTH_ANDIF_EXPR:
13060 case TRUTH_ORIF_EXPR:
f2b11865
JJ
13061 case TRUTH_AND_EXPR:
13062 case TRUTH_OR_EXPR:
13063 case TRUTH_XOR_EXPR:
56b5041c 13064 case BIT_INSERT_EXPR:
067dd3c9
JJ
13065 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13066 lhs_addr, lhs_var);
191816a3 13067 /* FALLTHRU */
f2b11865 13068 case TRUTH_NOT_EXPR:
067dd3c9
JJ
13069 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13070 lhs_addr, lhs_var);
13071 break;
4063e61b
JM
13072 case COMPOUND_EXPR:
13073 /* Break out any preevaluations from cp_build_modify_expr. */
13074 for (; TREE_CODE (expr) == COMPOUND_EXPR;
13075 expr = TREE_OPERAND (expr, 1))
13076 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13077 *expr_p = expr;
13078 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
067dd3c9
JJ
13079 default:
13080 break;
13081 }
13082 break;
56b5041c
JJ
13083 case tcc_reference:
13084 if (TREE_CODE (expr) == BIT_FIELD_REF)
13085 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13086 lhs_addr, lhs_var);
13087 break;
953ff289
DN
13088 default:
13089 break;
13090 }
13091
13092 if (saw_lhs == 0)
13093 {
13094 enum gimplify_status gs;
13095 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13096 if (gs != GS_ALL_DONE)
13097 saw_lhs = -1;
13098 }
13099
13100 return saw_lhs;
13101}
13102
953ff289
DN
13103/* Gimplify an OMP_ATOMIC statement. */
13104
13105static enum gimplify_status
726a989a 13106gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
13107{
13108 tree addr = TREE_OPERAND (*expr_p, 0);
20906c66
JJ
13109 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13110 ? NULL : TREE_OPERAND (*expr_p, 1);
953ff289 13111 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 13112 tree tmp_load;
538dd0b7
DM
13113 gomp_atomic_load *loadstmt;
13114 gomp_atomic_store *storestmt;
953ff289 13115
b731b390 13116 tmp_load = create_tmp_reg (type);
20906c66
JJ
13117 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13118 return GS_ERROR;
13119
13120 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13121 != GS_ALL_DONE)
13122 return GS_ERROR;
953ff289 13123
28567c40
JJ
13124 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13125 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
20906c66 13126 gimplify_seq_add_stmt (pre_p, loadstmt);
18a23298
JJ
13127 if (rhs)
13128 {
13129 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13130 representatives. Use BIT_FIELD_REF on the lhs instead. */
13131 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13132 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13133 {
13134 tree bitpos = TREE_OPERAND (rhs, 2);
13135 tree op1 = TREE_OPERAND (rhs, 1);
13136 tree bitsize;
13137 tree tmp_store = tmp_load;
13138 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
8e5993e2 13139 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
18a23298
JJ
13140 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13141 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13142 else
13143 bitsize = TYPE_SIZE (TREE_TYPE (op1));
13144 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13145 tree t = build2_loc (EXPR_LOCATION (rhs),
13146 MODIFY_EXPR, void_type_node,
13147 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13148 TREE_TYPE (op1), tmp_store, bitsize,
13149 bitpos), op1);
13150 gimplify_and_add (t, pre_p);
13151 rhs = tmp_store;
13152 }
13153 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13154 != GS_ALL_DONE)
13155 return GS_ERROR;
13156 }
953ff289 13157
20906c66
JJ
13158 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13159 rhs = tmp_load;
28567c40
JJ
13160 storestmt
13161 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
20906c66
JJ
13162 gimplify_seq_add_stmt (pre_p, storestmt);
13163 switch (TREE_CODE (*expr_p))
13164 {
13165 case OMP_ATOMIC_READ:
13166 case OMP_ATOMIC_CAPTURE_OLD:
13167 *expr_p = tmp_load;
13168 gimple_omp_atomic_set_need_value (loadstmt);
13169 break;
13170 case OMP_ATOMIC_CAPTURE_NEW:
13171 *expr_p = rhs;
13172 gimple_omp_atomic_set_need_value (storestmt);
13173 break;
13174 default:
13175 *expr_p = NULL;
13176 break;
13177 }
a509ebb5 13178
acf0174b 13179 return GS_ALL_DONE;
953ff289 13180}
6de9cd9a 13181
0a35513e
AH
13182/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13183 body, and adding some EH bits. */
13184
13185static enum gimplify_status
13186gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13187{
13188 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
355fe088 13189 gimple *body_stmt;
538dd0b7 13190 gtransaction *trans_stmt;
0a35513e 13191 gimple_seq body = NULL;
0a35513e
AH
13192 int subcode = 0;
13193
13194 /* Wrap the transaction body in a BIND_EXPR so we have a context
41dbbb37 13195 where to put decls for OMP. */
0a35513e
AH
13196 if (TREE_CODE (tbody) != BIND_EXPR)
13197 {
13198 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13199 TREE_SIDE_EFFECTS (bind) = 1;
13200 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13201 TRANSACTION_EXPR_BODY (expr) = bind;
13202 }
13203
45852dcc 13204 push_gimplify_context ();
0a35513e
AH
13205 temp = voidify_wrapper_expr (*expr_p, NULL);
13206
538dd0b7
DM
13207 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13208 pop_gimplify_context (body_stmt);
0a35513e 13209
7c11b0fe 13210 trans_stmt = gimple_build_transaction (body);
0a35513e
AH
13211 if (TRANSACTION_EXPR_OUTER (expr))
13212 subcode = GTMA_IS_OUTER;
13213 else if (TRANSACTION_EXPR_RELAXED (expr))
13214 subcode = GTMA_IS_RELAXED;
538dd0b7 13215 gimple_transaction_set_subcode (trans_stmt, subcode);
0a35513e 13216
538dd0b7 13217 gimplify_seq_add_stmt (pre_p, trans_stmt);
0a35513e
AH
13218
13219 if (temp)
13220 {
13221 *expr_p = temp;
13222 return GS_OK;
13223 }
13224
13225 *expr_p = NULL_TREE;
13226 return GS_ALL_DONE;
13227}
13228
d9a6bd32
JJ
13229/* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13230 is the OMP_BODY of the original EXPR (which has already been
13231 gimplified so it's not present in the EXPR).
13232
13233 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13234
13235static gimple *
13236gimplify_omp_ordered (tree expr, gimple_seq body)
13237{
13238 tree c, decls;
13239 int failures = 0;
13240 unsigned int i;
13241 tree source_c = NULL_TREE;
13242 tree sink_c = NULL_TREE;
13243
13244 if (gimplify_omp_ctxp)
6b37bdaf
PP
13245 {
13246 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13247 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13248 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13249 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13250 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13251 {
13252 error_at (OMP_CLAUSE_LOCATION (c),
13253 "%<ordered%> construct with %<depend%> clause must be "
13254 "closely nested inside a loop with %<ordered%> clause "
13255 "with a parameter");
13256 failures++;
13257 }
13258 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13259 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13260 {
13261 bool fail = false;
13262 for (decls = OMP_CLAUSE_DECL (c), i = 0;
13263 decls && TREE_CODE (decls) == TREE_LIST;
13264 decls = TREE_CHAIN (decls), ++i)
13265 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13266 continue;
13267 else if (TREE_VALUE (decls)
13268 != gimplify_omp_ctxp->loop_iter_var[2 * i])
13269 {
13270 error_at (OMP_CLAUSE_LOCATION (c),
13271 "variable %qE is not an iteration "
13272 "of outermost loop %d, expected %qE",
13273 TREE_VALUE (decls), i + 1,
13274 gimplify_omp_ctxp->loop_iter_var[2 * i]);
13275 fail = true;
13276 failures++;
13277 }
13278 else
13279 TREE_VALUE (decls)
13280 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13281 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13282 {
13283 error_at (OMP_CLAUSE_LOCATION (c),
90a0bf4e
JJ
13284 "number of variables in %<depend%> clause with "
13285 "%<sink%> modifier does not match number of "
6b37bdaf
PP
13286 "iteration variables");
13287 failures++;
13288 }
13289 sink_c = c;
13290 }
13291 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13292 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13293 {
13294 if (source_c)
d9a6bd32
JJ
13295 {
13296 error_at (OMP_CLAUSE_LOCATION (c),
90a0bf4e
JJ
13297 "more than one %<depend%> clause with %<source%> "
13298 "modifier on an %<ordered%> construct");
d9a6bd32
JJ
13299 failures++;
13300 }
13301 else
6b37bdaf
PP
13302 source_c = c;
13303 }
13304 }
d9a6bd32
JJ
13305 if (source_c && sink_c)
13306 {
13307 error_at (OMP_CLAUSE_LOCATION (source_c),
90a0bf4e
JJ
13308 "%<depend%> clause with %<source%> modifier specified "
13309 "together with %<depend%> clauses with %<sink%> modifier "
13310 "on the same construct");
d9a6bd32
JJ
13311 failures++;
13312 }
13313
13314 if (failures)
13315 return gimple_build_nop ();
13316 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13317}
13318
ad19c4be 13319/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
726a989a
RB
13320 expression produces a value to be used as an operand inside a GIMPLE
13321 statement, the value will be stored back in *EXPR_P. This value will
13322 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13323 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13324 emitted in PRE_P and POST_P.
13325
13326 Additionally, this process may overwrite parts of the input
13327 expression during gimplification. Ideally, it should be
13328 possible to do non-destructive gimplification.
13329
13330 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13331 the expression needs to evaluate to a value to be used as
13332 an operand in a GIMPLE statement, this value will be stored in
13333 *EXPR_P on exit. This happens when the caller specifies one
13334 of fb_lvalue or fb_rvalue fallback flags.
13335
13336 PRE_P will contain the sequence of GIMPLE statements corresponding
13337 to the evaluation of EXPR and all the side-effects that must
13338 be executed before the main expression. On exit, the last
13339 statement of PRE_P is the core statement being gimplified. For
13340 instance, when gimplifying 'if (++a)' the last statement in
13341 PRE_P will be 'if (t.1)' where t.1 is the result of
13342 pre-incrementing 'a'.
13343
13344 POST_P will contain the sequence of GIMPLE statements corresponding
13345 to the evaluation of all the side-effects that must be executed
13346 after the main expression. If this is NULL, the post
13347 side-effects are stored at the end of PRE_P.
13348
13349 The reason why the output is split in two is to handle post
13350 side-effects explicitly. In some cases, an expression may have
13351 inner and outer post side-effects which need to be emitted in
13352 an order different from the one given by the recursive
13353 traversal. For instance, for the expression (*p--)++ the post
13354 side-effects of '--' must actually occur *after* the post
13355 side-effects of '++'. However, gimplification will first visit
13356 the inner expression, so if a separate POST sequence was not
13357 used, the resulting sequence would be:
13358
13359 1 t.1 = *p
13360 2 p = p - 1
13361 3 t.2 = t.1 + 1
13362 4 *p = t.2
13363
13364 However, the post-decrement operation in line #2 must not be
13365 evaluated until after the store to *p at line #4, so the
13366 correct sequence should be:
13367
13368 1 t.1 = *p
13369 2 t.2 = t.1 + 1
13370 3 *p = t.2
13371 4 p = p - 1
13372
13373 So, by specifying a separate post queue, it is possible
13374 to emit the post side-effects in the correct order.
13375 If POST_P is NULL, an internal queue will be used. Before
13376 returning to the caller, the sequence POST_P is appended to
13377 the main output sequence PRE_P.
13378
13379 GIMPLE_TEST_F points to a function that takes a tree T and
13380 returns nonzero if T is in the GIMPLE form requested by the
12947319 13381 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
13382
13383 FALLBACK tells the function what sort of a temporary we want if
13384 gimplification cannot produce an expression that complies with
13385 GIMPLE_TEST_F.
13386
13387 fb_none means that no temporary should be generated
13388 fb_rvalue means that an rvalue is OK to generate
13389 fb_lvalue means that an lvalue is OK to generate
13390 fb_either means that either is OK, but an lvalue is preferable.
13391 fb_mayfail means that gimplification may fail (in which case
13392 GS_ERROR will be returned)
13393
13394 The return value is either GS_ERROR or GS_ALL_DONE, since this
13395 function iterates until EXPR is completely gimplified or an error
13396 occurs. */
6de9cd9a
DN
13397
13398enum gimplify_status
726a989a
RB
13399gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13400 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
13401{
13402 tree tmp;
726a989a
RB
13403 gimple_seq internal_pre = NULL;
13404 gimple_seq internal_post = NULL;
6de9cd9a 13405 tree save_expr;
726a989a 13406 bool is_statement;
6de9cd9a
DN
13407 location_t saved_location;
13408 enum gimplify_status ret;
726a989a 13409 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6dc4a604 13410 tree label;
6de9cd9a
DN
13411
13412 save_expr = *expr_p;
13413 if (save_expr == NULL_TREE)
13414 return GS_ALL_DONE;
13415
726a989a
RB
13416 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13417 is_statement = gimple_test_f == is_gimple_stmt;
13418 if (is_statement)
13419 gcc_assert (pre_p);
13420
13421 /* Consistency checks. */
13422 if (gimple_test_f == is_gimple_reg)
13423 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
13424 else if (gimple_test_f == is_gimple_val
726a989a
RB
13425 || gimple_test_f == is_gimple_call_addr
13426 || gimple_test_f == is_gimple_condexpr
70e2a30a 13427 || gimple_test_f == is_gimple_condexpr_for_cond
726a989a 13428 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 13429 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 13430 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 13431 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
13432 || gimple_test_f == is_gimple_asm_val
13433 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
13434 gcc_assert (fallback & fb_rvalue);
13435 else if (gimple_test_f == is_gimple_min_lval
13436 || gimple_test_f == is_gimple_lvalue)
13437 gcc_assert (fallback & fb_lvalue);
13438 else if (gimple_test_f == is_gimple_addressable)
13439 gcc_assert (fallback & fb_either);
13440 else if (gimple_test_f == is_gimple_stmt)
13441 gcc_assert (fallback == fb_none);
13442 else
13443 {
13444 /* We should have recognized the GIMPLE_TEST_F predicate to
13445 know what kind of fallback to use in case a temporary is
13446 needed to hold the value or address of *EXPR_P. */
13447 gcc_unreachable ();
13448 }
13449
6de9cd9a
DN
13450 /* We used to check the predicate here and return immediately if it
13451 succeeds. This is wrong; the design is for gimplification to be
13452 idempotent, and for the predicates to only test for valid forms, not
13453 whether they are fully simplified. */
6de9cd9a
DN
13454 if (pre_p == NULL)
13455 pre_p = &internal_pre;
726a989a 13456
6de9cd9a
DN
13457 if (post_p == NULL)
13458 post_p = &internal_post;
13459
726a989a
RB
13460 /* Remember the last statements added to PRE_P and POST_P. Every
13461 new statement added by the gimplification helpers needs to be
13462 annotated with location information. To centralize the
13463 responsibility, we remember the last statement that had been
13464 added to both queues before gimplifying *EXPR_P. If
13465 gimplification produces new statements in PRE_P and POST_P, those
13466 statements will be annotated with the same location information
13467 as *EXPR_P. */
13468 pre_last_gsi = gsi_last (*pre_p);
13469 post_last_gsi = gsi_last (*post_p);
13470
6de9cd9a 13471 saved_location = input_location;
a281759f
PB
13472 if (save_expr != error_mark_node
13473 && EXPR_HAS_LOCATION (*expr_p))
13474 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
13475
13476 /* Loop over the specific gimplifiers until the toplevel node
13477 remains the same. */
13478 do
13479 {
73d6ddef
RK
13480 /* Strip away as many useless type conversions as possible
13481 at the toplevel. */
13482 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
13483
13484 /* Remember the expr. */
13485 save_expr = *expr_p;
13486
13487 /* Die, die, die, my darling. */
d0c4eaee 13488 if (error_operand_p (save_expr))
6de9cd9a
DN
13489 {
13490 ret = GS_ERROR;
13491 break;
13492 }
13493
13494 /* Do any language-specific gimplification. */
32e8bb8e
ILT
13495 ret = ((enum gimplify_status)
13496 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
13497 if (ret == GS_OK)
13498 {
13499 if (*expr_p == NULL_TREE)
13500 break;
13501 if (*expr_p != save_expr)
13502 continue;
13503 }
13504 else if (ret != GS_UNHANDLED)
13505 break;
13506
941f78d1
JM
13507 /* Make sure that all the cases set 'ret' appropriately. */
13508 ret = GS_UNHANDLED;
6de9cd9a
DN
13509 switch (TREE_CODE (*expr_p))
13510 {
13511 /* First deal with the special cases. */
13512
13513 case POSTINCREMENT_EXPR:
13514 case POSTDECREMENT_EXPR:
13515 case PREINCREMENT_EXPR:
13516 case PREDECREMENT_EXPR:
13517 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
cc3c4f62
RB
13518 fallback != fb_none,
13519 TREE_TYPE (*expr_p));
6de9cd9a
DN
13520 break;
13521
0bd34ae4 13522 case VIEW_CONVERT_EXPR:
f47f687a
RB
13523 if ((fallback & fb_rvalue)
13524 && is_gimple_reg_type (TREE_TYPE (*expr_p))
0bd34ae4
RB
13525 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13526 {
13527 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13528 post_p, is_gimple_val, fb_rvalue);
13529 recalculate_side_effects (*expr_p);
13530 break;
13531 }
13532 /* Fallthru. */
13533
6de9cd9a 13534 case ARRAY_REF:
44de5aeb
RK
13535 case ARRAY_RANGE_REF:
13536 case REALPART_EXPR:
13537 case IMAGPART_EXPR:
6de9cd9a
DN
13538 case COMPONENT_REF:
13539 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 13540 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
13541 break;
13542
13543 case COND_EXPR:
dae7ec87 13544 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 13545
0223e4f5
JM
13546 /* C99 code may assign to an array in a structure value of a
13547 conditional expression, and this has undefined behavior
13548 only on execution, so create a temporary if an lvalue is
13549 required. */
13550 if (fallback == fb_lvalue)
13551 {
381cdae4 13552 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
936d04b6 13553 mark_addressable (*expr_p);
941f78d1 13554 ret = GS_OK;
0223e4f5 13555 }
6de9cd9a
DN
13556 break;
13557
13558 case CALL_EXPR:
90051e16 13559 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 13560
0223e4f5
JM
13561 /* C99 code may assign to an array in a structure returned
13562 from a function, and this has undefined behavior only on
13563 execution, so create a temporary if an lvalue is
13564 required. */
13565 if (fallback == fb_lvalue)
13566 {
381cdae4 13567 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
936d04b6 13568 mark_addressable (*expr_p);
941f78d1 13569 ret = GS_OK;
0223e4f5 13570 }
6de9cd9a
DN
13571 break;
13572
13573 case TREE_LIST:
282899df 13574 gcc_unreachable ();
6de9cd9a
DN
13575
13576 case COMPOUND_EXPR:
13577 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
13578 break;
13579
2ec5deb5 13580 case COMPOUND_LITERAL_EXPR:
4c53d183
MM
13581 ret = gimplify_compound_literal_expr (expr_p, pre_p,
13582 gimple_test_f, fallback);
2ec5deb5
PB
13583 break;
13584
6de9cd9a
DN
13585 case MODIFY_EXPR:
13586 case INIT_EXPR:
ebad5233
JM
13587 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
13588 fallback != fb_none);
6de9cd9a
DN
13589 break;
13590
13591 case TRUTH_ANDIF_EXPR:
13592 case TRUTH_ORIF_EXPR:
1d15f620
KT
13593 {
13594 /* Preserve the original type of the expression and the
13595 source location of the outer expression. */
13596 tree org_type = TREE_TYPE (*expr_p);
13597 *expr_p = gimple_boolify (*expr_p);
4b4455e5 13598 *expr_p = build3_loc (input_location, COND_EXPR,
1d15f620
KT
13599 org_type, *expr_p,
13600 fold_convert_loc
4b4455e5 13601 (input_location,
1d15f620
KT
13602 org_type, boolean_true_node),
13603 fold_convert_loc
4b4455e5 13604 (input_location,
1d15f620
KT
13605 org_type, boolean_false_node));
13606 ret = GS_OK;
13607 break;
13608 }
6de9cd9a
DN
13609
13610 case TRUTH_NOT_EXPR:
3c6cbf7a 13611 {
53020648
RG
13612 tree type = TREE_TYPE (*expr_p);
13613 /* The parsers are careful to generate TRUTH_NOT_EXPR
13614 only with operands that are always zero or one.
13615 We do not fold here but handle the only interesting case
13616 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
3c6cbf7a 13617 *expr_p = gimple_boolify (*expr_p);
53020648
RG
13618 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
13619 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
13620 TREE_TYPE (*expr_p),
13621 TREE_OPERAND (*expr_p, 0));
13622 else
13623 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
13624 TREE_TYPE (*expr_p),
13625 TREE_OPERAND (*expr_p, 0),
13626 build_int_cst (TREE_TYPE (*expr_p), 1));
13627 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
13628 *expr_p = fold_convert_loc (input_location, type, *expr_p);
13629 ret = GS_OK;
bd5d002e 13630 break;
3c6cbf7a 13631 }
67339062 13632
6de9cd9a
DN
13633 case ADDR_EXPR:
13634 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
13635 break;
13636
8170608b
TB
13637 case ANNOTATE_EXPR:
13638 {
13639 tree cond = TREE_OPERAND (*expr_p, 0);
718c4601 13640 tree kind = TREE_OPERAND (*expr_p, 1);
ac9effed 13641 tree data = TREE_OPERAND (*expr_p, 2);
664ceb1e
JJ
13642 tree type = TREE_TYPE (cond);
13643 if (!INTEGRAL_TYPE_P (type))
13644 {
13645 *expr_p = cond;
13646 ret = GS_OK;
13647 break;
13648 }
b731b390 13649 tree tmp = create_tmp_var (type);
8170608b 13650 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
538dd0b7 13651 gcall *call
ac9effed 13652 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
8170608b
TB
13653 gimple_call_set_lhs (call, tmp);
13654 gimplify_seq_add_stmt (pre_p, call);
13655 *expr_p = tmp;
13656 ret = GS_ALL_DONE;
13657 break;
13658 }
13659
6de9cd9a 13660 case VA_ARG_EXPR:
cd3ce9b4 13661 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
13662 break;
13663
1043771b 13664 CASE_CONVERT:
6de9cd9a
DN
13665 if (IS_EMPTY_STMT (*expr_p))
13666 {
13667 ret = GS_ALL_DONE;
13668 break;
13669 }
13670
13671 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
13672 || fallback == fb_none)
13673 {
13674 /* Just strip a conversion to void (or in void context) and
13675 try again. */
13676 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 13677 ret = GS_OK;
6de9cd9a
DN
13678 break;
13679 }
13680
13681 ret = gimplify_conversion (expr_p);
13682 if (ret == GS_ERROR)
13683 break;
13684 if (*expr_p != save_expr)
13685 break;
13686 /* FALLTHRU */
13687
13688 case FIX_TRUNC_EXPR:
6de9cd9a
DN
13689 /* unary_expr: ... | '(' cast ')' val | ... */
13690 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13691 is_gimple_val, fb_rvalue);
13692 recalculate_side_effects (*expr_p);
13693 break;
13694
6a720599 13695 case INDIRECT_REF:
70f34814
RG
13696 {
13697 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
3748f5c9 13698 bool notrap = TREE_THIS_NOTRAP (*expr_p);
70f34814
RG
13699 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
13700
13701 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
13702 if (*expr_p != save_expr)
13703 {
13704 ret = GS_OK;
13705 break;
13706 }
13707
13708 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13709 is_gimple_reg, fb_rvalue);
dca26746
RG
13710 if (ret == GS_ERROR)
13711 break;
70f34814 13712
dca26746 13713 recalculate_side_effects (*expr_p);
70f34814
RG
13714 *expr_p = fold_build2_loc (input_location, MEM_REF,
13715 TREE_TYPE (*expr_p),
13716 TREE_OPERAND (*expr_p, 0),
13717 build_int_cst (saved_ptr_type, 0));
13718 TREE_THIS_VOLATILE (*expr_p) = volatilep;
3748f5c9 13719 TREE_THIS_NOTRAP (*expr_p) = notrap;
70f34814
RG
13720 ret = GS_OK;
13721 break;
13722 }
13723
13724 /* We arrive here through the various re-gimplifcation paths. */
13725 case MEM_REF:
13726 /* First try re-folding the whole thing. */
13727 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
13728 TREE_OPERAND (*expr_p, 0),
13729 TREE_OPERAND (*expr_p, 1));
13730 if (tmp)
941f78d1 13731 {
ee45a32d
EB
13732 REF_REVERSE_STORAGE_ORDER (tmp)
13733 = REF_REVERSE_STORAGE_ORDER (*expr_p);
70f34814
RG
13734 *expr_p = tmp;
13735 recalculate_side_effects (*expr_p);
941f78d1
JM
13736 ret = GS_OK;
13737 break;
13738 }
01718e96
RG
13739 /* Avoid re-gimplifying the address operand if it is already
13740 in suitable form. Re-gimplifying would mark the address
13741 operand addressable. Always gimplify when not in SSA form
13742 as we still may have to gimplify decls with value-exprs. */
ebc1b29e 13743 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
01718e96
RG
13744 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
13745 {
13746 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13747 is_gimple_mem_ref_addr, fb_rvalue);
13748 if (ret == GS_ERROR)
13749 break;
13750 }
6de9cd9a 13751 recalculate_side_effects (*expr_p);
70f34814 13752 ret = GS_ALL_DONE;
6de9cd9a
DN
13753 break;
13754
01718e96 13755 /* Constants need not be gimplified. */
6de9cd9a
DN
13756 case INTEGER_CST:
13757 case REAL_CST:
325217ed 13758 case FIXED_CST:
6de9cd9a
DN
13759 case STRING_CST:
13760 case COMPLEX_CST:
13761 case VECTOR_CST:
3f5c390d
RB
13762 /* Drop the overflow flag on constants, we do not want
13763 that in the GIMPLE IL. */
13764 if (TREE_OVERFLOW_P (*expr_p))
13765 *expr_p = drop_tree_overflow (*expr_p);
6de9cd9a
DN
13766 ret = GS_ALL_DONE;
13767 break;
13768
13769 case CONST_DECL:
0534fa56 13770 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 13771 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
13772 value. */
13773 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
13774 if (fallback & fb_lvalue)
13775 ret = GS_ALL_DONE;
13776 else
941f78d1
JM
13777 {
13778 *expr_p = DECL_INITIAL (*expr_p);
13779 ret = GS_OK;
13780 }
6de9cd9a
DN
13781 break;
13782
350fae66 13783 case DECL_EXPR:
726a989a 13784 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
13785 break;
13786
6de9cd9a 13787 case BIND_EXPR:
c6c7698d 13788 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
13789 break;
13790
13791 case LOOP_EXPR:
13792 ret = gimplify_loop_expr (expr_p, pre_p);
13793 break;
13794
13795 case SWITCH_EXPR:
13796 ret = gimplify_switch_expr (expr_p, pre_p);
13797 break;
13798
6de9cd9a
DN
13799 case EXIT_EXPR:
13800 ret = gimplify_exit_expr (expr_p);
13801 break;
13802
13803 case GOTO_EXPR:
13804 /* If the target is not LABEL, then it is a computed jump
13805 and the target needs to be gimplified. */
13806 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
13807 {
13808 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
13809 NULL, is_gimple_val, fb_rvalue);
13810 if (ret == GS_ERROR)
13811 break;
13812 }
726a989a
RB
13813 gimplify_seq_add_stmt (pre_p,
13814 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 13815 ret = GS_ALL_DONE;
6de9cd9a
DN
13816 break;
13817
2e28e797 13818 case PREDICT_EXPR:
726a989a
RB
13819 gimplify_seq_add_stmt (pre_p,
13820 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
13821 PREDICT_EXPR_OUTCOME (*expr_p)));
13822 ret = GS_ALL_DONE;
13823 break;
2e28e797 13824
6de9cd9a 13825 case LABEL_EXPR:
81fea426 13826 ret = gimplify_label_expr (expr_p, pre_p);
6dc4a604
ML
13827 label = LABEL_EXPR_LABEL (*expr_p);
13828 gcc_assert (decl_function_context (label) == current_function_decl);
13829
13830 /* If the label is used in a goto statement, or address of the label
13831 is taken, we need to unpoison all variables that were seen so far.
13832 Doing so would prevent us from reporting a false positives. */
6ff92497 13833 if (asan_poisoned_variables
6dc4a604
ML
13834 && asan_used_labels != NULL
13835 && asan_used_labels->contains (label))
13836 asan_poison_variables (asan_poisoned_variables, false, pre_p);
6de9cd9a
DN
13837 break;
13838
13839 case CASE_LABEL_EXPR:
726a989a 13840 ret = gimplify_case_label_expr (expr_p, pre_p);
6dc4a604
ML
13841
13842 if (gimplify_ctxp->live_switch_vars)
13843 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
13844 pre_p);
6de9cd9a
DN
13845 break;
13846
13847 case RETURN_EXPR:
13848 ret = gimplify_return_expr (*expr_p, pre_p);
13849 break;
13850
13851 case CONSTRUCTOR:
48eb4e53
RK
13852 /* Don't reduce this in place; let gimplify_init_constructor work its
13853 magic. Buf if we're just elaborating this for side effects, just
13854 gimplify any element that has side-effects. */
13855 if (fallback == fb_none)
13856 {
4038c495 13857 unsigned HOST_WIDE_INT ix;
ac47786e 13858 tree val;
08330ec2 13859 tree temp = NULL_TREE;
ac47786e
NF
13860 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
13861 if (TREE_SIDE_EFFECTS (val))
13862 append_to_statement_list (val, &temp);
48eb4e53 13863
08330ec2 13864 *expr_p = temp;
941f78d1 13865 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 13866 }
ca0b7d18
AP
13867 /* C99 code may assign to an array in a constructed
13868 structure or union, and this has undefined behavior only
13869 on execution, so create a temporary if an lvalue is
13870 required. */
13871 else if (fallback == fb_lvalue)
13872 {
381cdae4 13873 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
936d04b6 13874 mark_addressable (*expr_p);
941f78d1 13875 ret = GS_OK;
ca0b7d18 13876 }
08330ec2
AP
13877 else
13878 ret = GS_ALL_DONE;
6de9cd9a
DN
13879 break;
13880
13881 /* The following are special cases that are not handled by the
13882 original GIMPLE grammar. */
13883
13884 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13885 eliminated. */
13886 case SAVE_EXPR:
13887 ret = gimplify_save_expr (expr_p, pre_p, post_p);
13888 break;
13889
13890 case BIT_FIELD_REF:
ea814c66
EB
13891 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13892 post_p, is_gimple_lvalue, fb_either);
13893 recalculate_side_effects (*expr_p);
6de9cd9a
DN
13894 break;
13895
150e3929
RG
13896 case TARGET_MEM_REF:
13897 {
13898 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
13899
23a534a1 13900 if (TMR_BASE (*expr_p))
150e3929 13901 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
4d948885 13902 post_p, is_gimple_mem_ref_addr, fb_either);
150e3929
RG
13903 if (TMR_INDEX (*expr_p))
13904 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
13905 post_p, is_gimple_val, fb_rvalue);
4d948885
RG
13906 if (TMR_INDEX2 (*expr_p))
13907 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
13908 post_p, is_gimple_val, fb_rvalue);
150e3929
RG
13909 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13910 ret = MIN (r0, r1);
13911 }
13912 break;
13913
6de9cd9a
DN
13914 case NON_LVALUE_EXPR:
13915 /* This should have been stripped above. */
282899df 13916 gcc_unreachable ();
6de9cd9a
DN
13917
13918 case ASM_EXPR:
13919 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
13920 break;
13921
13922 case TRY_FINALLY_EXPR:
13923 case TRY_CATCH_EXPR:
726a989a
RB
13924 {
13925 gimple_seq eval, cleanup;
538dd0b7 13926 gtry *try_;
726a989a 13927
820055a0
DC
13928 /* Calls to destructors are generated automatically in FINALLY/CATCH
13929 block. They should have location as UNKNOWN_LOCATION. However,
13930 gimplify_call_expr will reset these call stmts to input_location
13931 if it finds stmt's location is unknown. To prevent resetting for
13932 destructors, we set the input_location to unknown.
13933 Note that this only affects the destructor calls in FINALLY/CATCH
13934 block, and will automatically reset to its original value by the
13935 end of gimplify_expr. */
13936 input_location = UNKNOWN_LOCATION;
726a989a
RB
13937 eval = cleanup = NULL;
13938 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
ebebc928
AO
13939 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13940 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
13941 {
13942 gimple_seq n = NULL, e = NULL;
13943 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13944 0), &n);
13945 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13946 1), &e);
13947 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
13948 {
13949 geh_else *stmt = gimple_build_eh_else (n, e);
13950 gimple_seq_add_stmt (&cleanup, stmt);
13951 }
13952 }
13953 else
13954 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
13955 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
13956 if (gimple_seq_empty_p (cleanup))
13957 {
13958 gimple_seq_add_seq (pre_p, eval);
13959 ret = GS_ALL_DONE;
13960 break;
13961 }
726a989a
RB
13962 try_ = gimple_build_try (eval, cleanup,
13963 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13964 ? GIMPLE_TRY_FINALLY
13965 : GIMPLE_TRY_CATCH);
220d905f 13966 if (EXPR_HAS_LOCATION (save_expr))
e368f44f 13967 gimple_set_location (try_, EXPR_LOCATION (save_expr));
220d905f
AH
13968 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
13969 gimple_set_location (try_, saved_location);
726a989a
RB
13970 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
13971 gimple_try_set_catch_is_cleanup (try_,
13972 TRY_CATCH_IS_CLEANUP (*expr_p));
13973 gimplify_seq_add_stmt (pre_p, try_);
13974 ret = GS_ALL_DONE;
13975 break;
13976 }
6de9cd9a
DN
13977
13978 case CLEANUP_POINT_EXPR:
13979 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
13980 break;
13981
13982 case TARGET_EXPR:
13983 ret = gimplify_target_expr (expr_p, pre_p, post_p);
13984 break;
13985
13986 case CATCH_EXPR:
726a989a 13987 {
355fe088 13988 gimple *c;
726a989a
RB
13989 gimple_seq handler = NULL;
13990 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
13991 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
13992 gimplify_seq_add_stmt (pre_p, c);
13993 ret = GS_ALL_DONE;
13994 break;
13995 }
6de9cd9a
DN
13996
13997 case EH_FILTER_EXPR:
726a989a 13998 {
355fe088 13999 gimple *ehf;
726a989a
RB
14000 gimple_seq failure = NULL;
14001
14002 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
14003 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 14004 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
14005 gimplify_seq_add_stmt (pre_p, ehf);
14006 ret = GS_ALL_DONE;
14007 break;
14008 }
6de9cd9a 14009
0f59171d
RH
14010 case OBJ_TYPE_REF:
14011 {
14012 enum gimplify_status r0, r1;
726a989a
RB
14013 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
14014 post_p, is_gimple_val, fb_rvalue);
14015 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
14016 post_p, is_gimple_val, fb_rvalue);
0f3a057a 14017 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
14018 ret = MIN (r0, r1);
14019 }
6de9cd9a
DN
14020 break;
14021
6de9cd9a
DN
14022 case LABEL_DECL:
14023 /* We get here when taking the address of a label. We mark
14024 the label as "forced"; meaning it can never be removed and
14025 it is a potential target for any computed goto. */
14026 FORCED_LABEL (*expr_p) = 1;
14027 ret = GS_ALL_DONE;
14028 break;
14029
14030 case STATEMENT_LIST:
c6c7698d 14031 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
14032 break;
14033
d25cee4d
RH
14034 case WITH_SIZE_EXPR:
14035 {
70e2829d
KH
14036 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14037 post_p == &internal_post ? NULL : post_p,
14038 gimple_test_f, fallback);
14039 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14040 is_gimple_val, fb_rvalue);
941f78d1 14041 ret = GS_ALL_DONE;
d25cee4d
RH
14042 }
14043 break;
14044
6de9cd9a 14045 case VAR_DECL:
4744afba 14046 case PARM_DECL:
a9f7c570 14047 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
14048 break;
14049
077b0dfb 14050 case RESULT_DECL:
41dbbb37 14051 /* When within an OMP context, notice uses of variables. */
077b0dfb
JJ
14052 if (gimplify_omp_ctxp)
14053 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
14054 ret = GS_ALL_DONE;
14055 break;
14056
96a95ac1
AO
14057 case DEBUG_EXPR_DECL:
14058 gcc_unreachable ();
14059
14060 case DEBUG_BEGIN_STMT:
14061 gimplify_seq_add_stmt (pre_p,
14062 gimple_build_debug_begin_stmt
14063 (TREE_BLOCK (*expr_p),
14064 EXPR_LOCATION (*expr_p)));
14065 ret = GS_ALL_DONE;
14066 *expr_p = NULL;
14067 break;
14068
71956db3
RH
14069 case SSA_NAME:
14070 /* Allow callbacks into the gimplifier during optimization. */
14071 ret = GS_ALL_DONE;
14072 break;
14073
953ff289 14074 case OMP_PARALLEL:
726a989a
RB
14075 gimplify_omp_parallel (expr_p, pre_p);
14076 ret = GS_ALL_DONE;
953ff289
DN
14077 break;
14078
a68ab351 14079 case OMP_TASK:
726a989a
RB
14080 gimplify_omp_task (expr_p, pre_p);
14081 ret = GS_ALL_DONE;
a68ab351
JJ
14082 break;
14083
953ff289 14084 case OMP_FOR:
74bf76ed 14085 case OMP_SIMD:
acf0174b 14086 case OMP_DISTRIBUTE:
d9a6bd32 14087 case OMP_TASKLOOP:
41dbbb37 14088 case OACC_LOOP:
953ff289
DN
14089 ret = gimplify_omp_for (expr_p, pre_p);
14090 break;
14091
554a530f
JJ
14092 case OMP_LOOP:
14093 ret = gimplify_omp_loop (expr_p, pre_p);
14094 break;
14095
41dbbb37
TS
14096 case OACC_CACHE:
14097 gimplify_oacc_cache (expr_p, pre_p);
14098 ret = GS_ALL_DONE;
14099 break;
14100
6e232ba4
JN
14101 case OACC_DECLARE:
14102 gimplify_oacc_declare (expr_p, pre_p);
14103 ret = GS_ALL_DONE;
14104 break;
14105
37d5ad46 14106 case OACC_HOST_DATA:
88bae6f4 14107 case OACC_DATA:
41dbbb37 14108 case OACC_KERNELS:
41dbbb37 14109 case OACC_PARALLEL:
62aee289 14110 case OACC_SERIAL:
953ff289
DN
14111 case OMP_SECTIONS:
14112 case OMP_SINGLE:
acf0174b
JJ
14113 case OMP_TARGET:
14114 case OMP_TARGET_DATA:
14115 case OMP_TEAMS:
726a989a
RB
14116 gimplify_omp_workshare (expr_p, pre_p);
14117 ret = GS_ALL_DONE;
953ff289
DN
14118 break;
14119
41dbbb37
TS
14120 case OACC_ENTER_DATA:
14121 case OACC_EXIT_DATA:
14122 case OACC_UPDATE:
acf0174b 14123 case OMP_TARGET_UPDATE:
d9a6bd32
JJ
14124 case OMP_TARGET_ENTER_DATA:
14125 case OMP_TARGET_EXIT_DATA:
acf0174b
JJ
14126 gimplify_omp_target_update (expr_p, pre_p);
14127 ret = GS_ALL_DONE;
14128 break;
14129
953ff289
DN
14130 case OMP_SECTION:
14131 case OMP_MASTER:
14132 case OMP_ORDERED:
14133 case OMP_CRITICAL:
bf38f7e9 14134 case OMP_SCAN:
726a989a
RB
14135 {
14136 gimple_seq body = NULL;
355fe088 14137 gimple *g;
554a530f 14138 bool saved_in_omp_construct = in_omp_construct;
726a989a 14139
554a530f 14140 in_omp_construct = true;
726a989a 14141 gimplify_and_add (OMP_BODY (*expr_p), &body);
554a530f 14142 in_omp_construct = saved_in_omp_construct;
726a989a
RB
14143 switch (TREE_CODE (*expr_p))
14144 {
14145 case OMP_SECTION:
14146 g = gimple_build_omp_section (body);
14147 break;
14148 case OMP_MASTER:
14149 g = gimple_build_omp_master (body);
14150 break;
14151 case OMP_ORDERED:
d9a6bd32 14152 g = gimplify_omp_ordered (*expr_p, body);
726a989a
RB
14153 break;
14154 case OMP_CRITICAL:
d9a6bd32
JJ
14155 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14156 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
1a80d6b8 14157 gimplify_adjust_omp_clauses (pre_p, body,
d9a6bd32
JJ
14158 &OMP_CRITICAL_CLAUSES (*expr_p),
14159 OMP_CRITICAL);
726a989a 14160 g = gimple_build_omp_critical (body,
d9a6bd32
JJ
14161 OMP_CRITICAL_NAME (*expr_p),
14162 OMP_CRITICAL_CLAUSES (*expr_p));
726a989a 14163 break;
bf38f7e9
JJ
14164 case OMP_SCAN:
14165 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14166 pre_p, ORT_WORKSHARE, OMP_SCAN);
14167 gimplify_adjust_omp_clauses (pre_p, body,
14168 &OMP_SCAN_CLAUSES (*expr_p),
14169 OMP_SCAN);
14170 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14171 break;
726a989a
RB
14172 default:
14173 gcc_unreachable ();
14174 }
14175 gimplify_seq_add_stmt (pre_p, g);
14176 ret = GS_ALL_DONE;
14177 break;
14178 }
953ff289 14179
28567c40
JJ
14180 case OMP_TASKGROUP:
14181 {
14182 gimple_seq body = NULL;
14183
14184 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
554a530f 14185 bool saved_in_omp_construct = in_omp_construct;
28567c40
JJ
14186 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14187 OMP_TASKGROUP);
14188 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
554a530f
JJ
14189
14190 in_omp_construct = true;
28567c40 14191 gimplify_and_add (OMP_BODY (*expr_p), &body);
554a530f 14192 in_omp_construct = saved_in_omp_construct;
28567c40
JJ
14193 gimple_seq cleanup = NULL;
14194 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14195 gimple *g = gimple_build_call (fn, 0);
14196 gimple_seq_add_stmt (&cleanup, g);
14197 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14198 body = NULL;
14199 gimple_seq_add_stmt (&body, g);
14200 g = gimple_build_omp_taskgroup (body, *pclauses);
14201 gimplify_seq_add_stmt (pre_p, g);
14202 ret = GS_ALL_DONE;
14203 break;
14204 }
14205
953ff289 14206 case OMP_ATOMIC:
20906c66
JJ
14207 case OMP_ATOMIC_READ:
14208 case OMP_ATOMIC_CAPTURE_OLD:
14209 case OMP_ATOMIC_CAPTURE_NEW:
953ff289
DN
14210 ret = gimplify_omp_atomic (expr_p, pre_p);
14211 break;
14212
0a35513e
AH
14213 case TRANSACTION_EXPR:
14214 ret = gimplify_transaction (expr_p, pre_p);
14215 break;
14216
16949072
RG
14217 case TRUTH_AND_EXPR:
14218 case TRUTH_OR_EXPR:
14219 case TRUTH_XOR_EXPR:
1d15f620 14220 {
bd5d002e 14221 tree orig_type = TREE_TYPE (*expr_p);
fc1f4caf 14222 tree new_type, xop0, xop1;
1d15f620 14223 *expr_p = gimple_boolify (*expr_p);
fc1f4caf
KT
14224 new_type = TREE_TYPE (*expr_p);
14225 if (!useless_type_conversion_p (orig_type, new_type))
1d15f620 14226 {
4b4455e5 14227 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
1d15f620
KT
14228 ret = GS_OK;
14229 break;
14230 }
da5fb469 14231
bd5d002e
RG
14232 /* Boolified binary truth expressions are semantically equivalent
14233 to bitwise binary expressions. Canonicalize them to the
14234 bitwise variant. */
14235 switch (TREE_CODE (*expr_p))
14236 {
14237 case TRUTH_AND_EXPR:
14238 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14239 break;
14240 case TRUTH_OR_EXPR:
14241 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14242 break;
14243 case TRUTH_XOR_EXPR:
14244 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14245 break;
14246 default:
14247 break;
14248 }
fc1f4caf
KT
14249 /* Now make sure that operands have compatible type to
14250 expression's new_type. */
14251 xop0 = TREE_OPERAND (*expr_p, 0);
14252 xop1 = TREE_OPERAND (*expr_p, 1);
14253 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14254 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14255 new_type,
14256 xop0);
14257 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14258 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14259 new_type,
14260 xop1);
bd5d002e
RG
14261 /* Continue classified as tcc_binary. */
14262 goto expr_2;
da5fb469 14263 }
16949072 14264
e6ed43b0 14265 case VEC_COND_EXPR:
e93ed60e
RH
14266 {
14267 enum gimplify_status r0, r1, r2;
14268
14269 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14270 post_p, is_gimple_condexpr, fb_rvalue);
14271 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14272 post_p, is_gimple_val, fb_rvalue);
14273 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14274 post_p, is_gimple_val, fb_rvalue);
14275
14276 ret = MIN (MIN (r0, r1), r2);
14277 recalculate_side_effects (*expr_p);
14278 }
14279 break;
14280
2205ed25 14281 case VEC_PERM_EXPR:
16949072
RG
14282 /* Classified as tcc_expression. */
14283 goto expr_3;
14284
483c6429
RG
14285 case BIT_INSERT_EXPR:
14286 /* Argument 3 is a constant. */
14287 goto expr_2;
14288
5be014d5 14289 case POINTER_PLUS_EXPR:
315f5f1b
RG
14290 {
14291 enum gimplify_status r0, r1;
14292 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14293 post_p, is_gimple_val, fb_rvalue);
14294 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14295 post_p, is_gimple_val, fb_rvalue);
14296 recalculate_side_effects (*expr_p);
14297 ret = MIN (r0, r1);
315f5f1b
RG
14298 break;
14299 }
726a989a 14300
6de9cd9a 14301 default:
282899df 14302 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 14303 {
6615c446 14304 case tcc_comparison:
61c25908
OH
14305 /* Handle comparison of objects of non scalar mode aggregates
14306 with a call to memcmp. It would be nice to only have to do
14307 this for variable-sized objects, but then we'd have to allow
14308 the same nest of reference nodes we allow for MODIFY_EXPR and
14309 that's too complex.
14310
14311 Compare scalar mode aggregates as scalar mode values. Using
14312 memcmp for them would be very inefficient at best, and is
14313 plain wrong if bitfields are involved. */
726a989a
RB
14314 {
14315 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 14316
544d960a
AS
14317 /* Vector comparisons need no boolification. */
14318 if (TREE_CODE (type) == VECTOR_TYPE)
14319 goto expr_2;
14320 else if (!AGGREGATE_TYPE_P (type))
7f3ff782
KT
14321 {
14322 tree org_type = TREE_TYPE (*expr_p);
14323 *expr_p = gimple_boolify (*expr_p);
14324 if (!useless_type_conversion_p (org_type,
14325 TREE_TYPE (*expr_p)))
14326 {
14327 *expr_p = fold_convert_loc (input_location,
14328 org_type, *expr_p);
14329 ret = GS_OK;
14330 }
14331 else
14332 goto expr_2;
14333 }
726a989a
RB
14334 else if (TYPE_MODE (type) != BLKmode)
14335 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14336 else
14337 ret = gimplify_variable_sized_compare (expr_p);
61c25908 14338
726a989a 14339 break;
61c25908 14340 }
d3147f64 14341
282899df
NS
14342 /* If *EXPR_P does not need to be special-cased, handle it
14343 according to its class. */
6615c446 14344 case tcc_unary:
282899df
NS
14345 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14346 post_p, is_gimple_val, fb_rvalue);
14347 break;
6de9cd9a 14348
6615c446 14349 case tcc_binary:
282899df
NS
14350 expr_2:
14351 {
14352 enum gimplify_status r0, r1;
d3147f64 14353
282899df 14354 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 14355 post_p, is_gimple_val, fb_rvalue);
282899df
NS
14356 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14357 post_p, is_gimple_val, fb_rvalue);
d3147f64 14358
282899df
NS
14359 ret = MIN (r0, r1);
14360 break;
14361 }
d3147f64 14362
16949072
RG
14363 expr_3:
14364 {
14365 enum gimplify_status r0, r1, r2;
14366
14367 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14368 post_p, is_gimple_val, fb_rvalue);
14369 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14370 post_p, is_gimple_val, fb_rvalue);
14371 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14372 post_p, is_gimple_val, fb_rvalue);
14373
14374 ret = MIN (MIN (r0, r1), r2);
14375 break;
14376 }
14377
6615c446
JO
14378 case tcc_declaration:
14379 case tcc_constant:
6de9cd9a 14380 ret = GS_ALL_DONE;
282899df 14381 goto dont_recalculate;
d3147f64 14382
282899df 14383 default:
16949072 14384 gcc_unreachable ();
6de9cd9a 14385 }
6de9cd9a
DN
14386
14387 recalculate_side_effects (*expr_p);
726a989a 14388
282899df 14389 dont_recalculate:
6de9cd9a
DN
14390 break;
14391 }
d3147f64 14392
941f78d1 14393 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
14394 }
14395 while (ret == GS_OK);
14396
14397 /* If we encountered an error_mark somewhere nested inside, either
14398 stub out the statement or propagate the error back out. */
14399 if (ret == GS_ERROR)
14400 {
14401 if (is_statement)
65355d53 14402 *expr_p = NULL;
6de9cd9a
DN
14403 goto out;
14404 }
14405
6de9cd9a
DN
14406 /* This was only valid as a return value from the langhook, which
14407 we handled. Make sure it doesn't escape from any other context. */
282899df 14408 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 14409
65355d53 14410 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
14411 {
14412 /* We aren't looking for a value, and we don't have a valid
80480cee
MP
14413 statement. If it doesn't have side-effects, throw it away.
14414 We can also get here with code such as "*&&L;", where L is
14415 a LABEL_DECL that is marked as FORCED_LABEL. */
14416 if (TREE_CODE (*expr_p) == LABEL_DECL
14417 || !TREE_SIDE_EFFECTS (*expr_p))
65355d53 14418 *expr_p = NULL;
6de9cd9a 14419 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
14420 {
14421 /* This is probably a _REF that contains something nested that
14422 has side effects. Recurse through the operands to find it. */
14423 enum tree_code code = TREE_CODE (*expr_p);
14424
282899df 14425 switch (code)
44de5aeb 14426 {
282899df 14427 case COMPONENT_REF:
02a5eac4
EB
14428 case REALPART_EXPR:
14429 case IMAGPART_EXPR:
14430 case VIEW_CONVERT_EXPR:
282899df
NS
14431 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14432 gimple_test_f, fallback);
14433 break;
14434
a9e64c63
EB
14435 case ARRAY_REF:
14436 case ARRAY_RANGE_REF:
44de5aeb
RK
14437 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14438 gimple_test_f, fallback);
14439 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
14440 gimple_test_f, fallback);
14441 break;
14442
14443 default:
14444 /* Anything else with side-effects must be converted to
a9e64c63 14445 a valid statement before we get here. */
282899df 14446 gcc_unreachable ();
44de5aeb 14447 }
44de5aeb 14448
65355d53 14449 *expr_p = NULL;
44de5aeb 14450 }
a9e64c63
EB
14451 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
14452 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 14453 {
a9e64c63
EB
14454 /* Historically, the compiler has treated a bare reference
14455 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 14456 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 14457
c22b1771 14458 /* Normally, we do not want to create a temporary for a
a38578e1
MM
14459 TREE_ADDRESSABLE type because such a type should not be
14460 copied by bitwise-assignment. However, we make an
14461 exception here, as all we are doing here is ensuring that
14462 we read the bytes that make up the type. We use
14463 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 14464 given a TREE_ADDRESSABLE type. */
a38578e1
MM
14465 tree tmp = create_tmp_var_raw (type, "vol");
14466 gimple_add_tmp_var (tmp);
726a989a
RB
14467 gimplify_assign (tmp, *expr_p, pre_p);
14468 *expr_p = NULL;
6de9cd9a
DN
14469 }
14470 else
14471 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
14472 an incomplete type, so just throw it away. Likewise for
14473 a BLKmode type, since any implicit inner load should
14474 already have been turned into an explicit one by the
14475 gimplification process. */
65355d53 14476 *expr_p = NULL;
6de9cd9a
DN
14477 }
14478
14479 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 14480 everything together and return. */
325c3691 14481 if (fallback == fb_none || is_statement)
6de9cd9a 14482 {
726a989a
RB
14483 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14484 it out for GC to reclaim it. */
14485 *expr_p = NULL_TREE;
14486
14487 if (!gimple_seq_empty_p (internal_pre)
14488 || !gimple_seq_empty_p (internal_post))
be00f578 14489 {
726a989a
RB
14490 gimplify_seq_add_seq (&internal_pre, internal_post);
14491 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 14492 }
726a989a
RB
14493
14494 /* The result of gimplifying *EXPR_P is going to be the last few
14495 statements in *PRE_P and *POST_P. Add location information
14496 to all the statements that were added by the gimplification
14497 helpers. */
14498 if (!gimple_seq_empty_p (*pre_p))
14499 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14500
14501 if (!gimple_seq_empty_p (*post_p))
14502 annotate_all_with_location_after (*post_p, post_last_gsi,
14503 input_location);
14504
6de9cd9a
DN
14505 goto out;
14506 }
14507
726a989a
RB
14508#ifdef ENABLE_GIMPLE_CHECKING
14509 if (*expr_p)
14510 {
14511 enum tree_code code = TREE_CODE (*expr_p);
14512 /* These expressions should already be in gimple IR form. */
14513 gcc_assert (code != MODIFY_EXPR
14514 && code != ASM_EXPR
14515 && code != BIND_EXPR
14516 && code != CATCH_EXPR
6fc4fb06 14517 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
14518 && code != EH_FILTER_EXPR
14519 && code != GOTO_EXPR
14520 && code != LABEL_EXPR
14521 && code != LOOP_EXPR
726a989a
RB
14522 && code != SWITCH_EXPR
14523 && code != TRY_FINALLY_EXPR
ebebc928 14524 && code != EH_ELSE_EXPR
41dbbb37
TS
14525 && code != OACC_PARALLEL
14526 && code != OACC_KERNELS
62aee289 14527 && code != OACC_SERIAL
41dbbb37
TS
14528 && code != OACC_DATA
14529 && code != OACC_HOST_DATA
14530 && code != OACC_DECLARE
14531 && code != OACC_UPDATE
14532 && code != OACC_ENTER_DATA
14533 && code != OACC_EXIT_DATA
14534 && code != OACC_CACHE
726a989a
RB
14535 && code != OMP_CRITICAL
14536 && code != OMP_FOR
41dbbb37 14537 && code != OACC_LOOP
726a989a 14538 && code != OMP_MASTER
acf0174b 14539 && code != OMP_TASKGROUP
726a989a
RB
14540 && code != OMP_ORDERED
14541 && code != OMP_PARALLEL
bf38f7e9 14542 && code != OMP_SCAN
726a989a
RB
14543 && code != OMP_SECTIONS
14544 && code != OMP_SECTION
14545 && code != OMP_SINGLE);
14546 }
14547#endif
6de9cd9a 14548
726a989a
RB
14549 /* Otherwise we're gimplifying a subexpression, so the resulting
14550 value is interesting. If it's a valid operand that matches
14551 GIMPLE_TEST_F, we're done. Unless we are handling some
14552 post-effects internally; if that's the case, we need to copy into
14553 a temporary before adding the post-effects to POST_P. */
14554 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
14555 goto out;
14556
14557 /* Otherwise, we need to create a new temporary for the gimplified
14558 expression. */
14559
14560 /* We can't return an lvalue if we have an internal postqueue. The
14561 object the lvalue refers to would (probably) be modified by the
14562 postqueue; we need to copy the value out first, which means an
14563 rvalue. */
726a989a
RB
14564 if ((fallback & fb_lvalue)
14565 && gimple_seq_empty_p (internal_post)
e847cc68 14566 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
14567 {
14568 /* An lvalue will do. Take the address of the expression, store it
14569 in a temporary, and replace the expression with an INDIRECT_REF of
14570 that temporary. */
468e1ef4
AO
14571 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
14572 unsigned int ref_align = get_object_alignment (*expr_p);
14573 tree ref_type = TREE_TYPE (*expr_p);
db3927fb 14574 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 14575 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
468e1ef4
AO
14576 if (TYPE_ALIGN (ref_type) != ref_align)
14577 ref_type = build_aligned_type (ref_type, ref_align);
14578 *expr_p = build2 (MEM_REF, ref_type,
14579 tmp, build_zero_cst (ref_alias_type));
6de9cd9a 14580 }
ba4d8f9d 14581 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 14582 {
726a989a
RB
14583 /* An rvalue will do. Assign the gimplified expression into a
14584 new temporary TMP and replace the original expression with
14585 TMP. First, make sure that the expression has a type so that
14586 it can be assigned into a temporary. */
282899df 14587 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
412a1d9e 14588 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6de9cd9a 14589 }
282899df 14590 else
6de9cd9a 14591 {
726a989a 14592#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
14593 if (!(fallback & fb_mayfail))
14594 {
14595 fprintf (stderr, "gimplification failed:\n");
ef6cb4c7 14596 print_generic_expr (stderr, *expr_p);
282899df
NS
14597 debug_tree (*expr_p);
14598 internal_error ("gimplification failed");
14599 }
14600#endif
14601 gcc_assert (fallback & fb_mayfail);
726a989a 14602
282899df 14603 /* If this is an asm statement, and the user asked for the
535a42b1 14604 impossible, don't die. Fail and let gimplify_asm_expr
282899df 14605 issue an error. */
6de9cd9a
DN
14606 ret = GS_ERROR;
14607 goto out;
14608 }
6de9cd9a 14609
6de9cd9a 14610 /* Make sure the temporary matches our predicate. */
282899df 14611 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 14612
726a989a 14613 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 14614 {
726a989a
RB
14615 annotate_all_with_location (internal_post, input_location);
14616 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
14617 }
14618
14619 out:
14620 input_location = saved_location;
14621 return ret;
14622}
14623
381cdae4
RB
14624/* Like gimplify_expr but make sure the gimplified result is not itself
14625 a SSA name (but a decl if it were). Temporaries required by
14626 evaluating *EXPR_P may be still SSA names. */
14627
14628static enum gimplify_status
14629gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14630 bool (*gimple_test_f) (tree), fallback_t fallback,
14631 bool allow_ssa)
14632{
14633 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
14634 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
14635 gimple_test_f, fallback);
14636 if (! allow_ssa
14637 && TREE_CODE (*expr_p) == SSA_NAME)
14638 {
14639 tree name = *expr_p;
14640 if (was_ssa_name_p)
14641 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
14642 else
14643 {
14644 /* Avoid the extra copy if possible. */
14645 *expr_p = create_tmp_reg (TREE_TYPE (name));
00294b18
RB
14646 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
14647 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
381cdae4
RB
14648 release_ssa_name (name);
14649 }
14650 }
14651 return ret;
14652}
14653
44de5aeb 14654/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 14655 size that we find. Add to LIST_P any statements generated. */
44de5aeb 14656
65355d53 14657void
726a989a 14658gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 14659{
ad50bc8d
RH
14660 tree field, t;
14661
19dbbf36 14662 if (type == NULL || type == error_mark_node)
8e0a600b 14663 return;
ad50bc8d 14664
6c6cfbfd 14665 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 14666 type = TYPE_MAIN_VARIANT (type);
44de5aeb 14667
8e0a600b 14668 /* Avoid infinite recursion. */
19dbbf36 14669 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
14670 return;
14671
14672 TYPE_SIZES_GIMPLIFIED (type) = 1;
14673
44de5aeb
RK
14674 switch (TREE_CODE (type))
14675 {
44de5aeb
RK
14676 case INTEGER_TYPE:
14677 case ENUMERAL_TYPE:
14678 case BOOLEAN_TYPE:
44de5aeb 14679 case REAL_TYPE:
325217ed 14680 case FIXED_POINT_TYPE:
65355d53
RH
14681 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
14682 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
14683
14684 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14685 {
14686 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
14687 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 14688 }
44de5aeb
RK
14689 break;
14690
14691 case ARRAY_TYPE:
ad50bc8d 14692 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
14693 gimplify_type_sizes (TREE_TYPE (type), list_p);
14694 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
14695 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
14696 with assigned stack slots, for -O1+ -g they should be tracked
14697 by VTA. */
08d78391
EB
14698 if (!(TYPE_NAME (type)
14699 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14700 && DECL_IGNORED_P (TYPE_NAME (type)))
14701 && TYPE_DOMAIN (type)
802e9f8e
JJ
14702 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
14703 {
14704 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8813a647 14705 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
802e9f8e
JJ
14706 DECL_IGNORED_P (t) = 0;
14707 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8813a647 14708 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
802e9f8e
JJ
14709 DECL_IGNORED_P (t) = 0;
14710 }
44de5aeb
RK
14711 break;
14712
14713 case RECORD_TYPE:
14714 case UNION_TYPE:
14715 case QUAL_UNION_TYPE:
910ad8de 14716 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 14717 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
14718 {
14719 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
14720 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
14721 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
14722 gimplify_type_sizes (TREE_TYPE (field), list_p);
14723 }
14724 break;
14725
14726 case POINTER_TYPE:
14727 case REFERENCE_TYPE:
706c4bb7
OH
14728 /* We used to recurse on the pointed-to type here, which turned out to
14729 be incorrect because its definition might refer to variables not
14730 yet initialized at this point if a forward declaration is involved.
14731
14732 It was actually useful for anonymous pointed-to types to ensure
14733 that the sizes evaluation dominates every possible later use of the
14734 values. Restricting to such types here would be safe since there
f63645be
KH
14735 is no possible forward declaration around, but would introduce an
14736 undesirable middle-end semantic to anonymity. We then defer to
14737 front-ends the responsibility of ensuring that the sizes are
14738 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 14739 type declarations to the tree. */
44de5aeb
RK
14740 break;
14741
14742 default:
14743 break;
14744 }
14745
65355d53
RH
14746 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
14747 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 14748
ad50bc8d 14749 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 14750 {
ad50bc8d
RH
14751 TYPE_SIZE (t) = TYPE_SIZE (type);
14752 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
14753 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 14754 }
b4830636
RH
14755}
14756
14757/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
14758 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 14759 We add any required statements to *STMT_P. */
44de5aeb
RK
14760
14761void
726a989a 14762gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 14763{
3ac8781c 14764 tree expr = *expr_p;
a9c5ddf9 14765
44de5aeb 14766 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 14767 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 14768 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
14769 will want to replace it with a new variable, but that will cause problems
14770 if this type is from outside the function. It's OK to have that here. */
47c268c4
JJ
14771 if (expr == NULL_TREE
14772 || is_gimple_constant (expr)
14773 || TREE_CODE (expr) == VAR_DECL
14774 || CONTAINS_PLACEHOLDER_P (expr))
44de5aeb
RK
14775 return;
14776
a9c5ddf9
RH
14777 *expr_p = unshare_expr (expr);
14778
381cdae4
RB
14779 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
14780 if the def vanishes. */
14781 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
47c268c4
JJ
14782
14783 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
14784 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
14785 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
14786 if (is_gimple_constant (*expr_p))
14787 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
44de5aeb 14788}
6de9cd9a 14789
3ad065ef
EB
14790/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
14791 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
14792 is true, also gimplify the parameters. */
726a989a 14793
538dd0b7 14794gbind *
3ad065ef 14795gimplify_body (tree fndecl, bool do_parms)
6de9cd9a
DN
14796{
14797 location_t saved_location = input_location;
6aee2fd0 14798 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
355fe088 14799 gimple *outer_stmt;
538dd0b7 14800 gbind *outer_bind;
6de9cd9a
DN
14801
14802 timevar_push (TV_TREE_GIMPLIFY);
953ff289 14803
381cdae4
RB
14804 init_tree_ssa (cfun);
14805
f66d6761
SB
14806 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
14807 gimplification. */
14808 default_rtl_profile ();
14809
953ff289 14810 gcc_assert (gimplify_ctxp == NULL);
381cdae4 14811 push_gimplify_context (true);
6de9cd9a 14812
41dbbb37 14813 if (flag_openacc || flag_openmp)
acf0174b
JJ
14814 {
14815 gcc_assert (gimplify_omp_ctxp == NULL);
14816 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
554a530f 14817 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
acf0174b
JJ
14818 }
14819
44de5aeb
RK
14820 /* Unshare most shared trees in the body and in that of any nested functions.
14821 It would seem we don't have to do this for nested functions because
14822 they are supposed to be output and then the outer function gimplified
14823 first, but the g++ front end doesn't always do it that way. */
3ad065ef
EB
14824 unshare_body (fndecl);
14825 unvisit_body (fndecl);
6de9cd9a 14826
fa10beec 14827 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
14828 input_location = DECL_SOURCE_LOCATION (fndecl);
14829
4744afba
RH
14830 /* Resolve callee-copies. This has to be done before processing
14831 the body so that DECL_VALUE_EXPR gets processed correctly. */
6aee2fd0 14832 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
4744afba 14833
6de9cd9a 14834 /* Gimplify the function's body. */
726a989a 14835 seq = NULL;
3ad065ef 14836 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
10ea09ee 14837 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
538dd0b7 14838 if (!outer_stmt)
6de9cd9a 14839 {
538dd0b7
DM
14840 outer_stmt = gimple_build_nop ();
14841 gimplify_seq_add_stmt (&seq, outer_stmt);
6de9cd9a 14842 }
44de5aeb 14843
726a989a
RB
14844 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
14845 not the case, wrap everything in a GIMPLE_BIND to make it so. */
538dd0b7 14846 if (gimple_code (outer_stmt) == GIMPLE_BIND
10ea09ee
JJ
14847 && (gimple_seq_first_nondebug_stmt (seq)
14848 == gimple_seq_last_nondebug_stmt (seq)))
14849 {
14850 outer_bind = as_a <gbind *> (outer_stmt);
14851 if (gimple_seq_first_stmt (seq) != outer_stmt
14852 || gimple_seq_last_stmt (seq) != outer_stmt)
14853 {
14854 /* If there are debug stmts before or after outer_stmt, move them
14855 inside of outer_bind body. */
14856 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
14857 gimple_seq second_seq = NULL;
14858 if (gimple_seq_first_stmt (seq) != outer_stmt
14859 && gimple_seq_last_stmt (seq) != outer_stmt)
14860 {
14861 second_seq = gsi_split_seq_after (gsi);
14862 gsi_remove (&gsi, false);
14863 }
14864 else if (gimple_seq_first_stmt (seq) != outer_stmt)
14865 gsi_remove (&gsi, false);
14866 else
14867 {
14868 gsi_remove (&gsi, false);
14869 second_seq = seq;
14870 seq = NULL;
14871 }
14872 gimple_seq_add_seq_without_update (&seq,
14873 gimple_bind_body (outer_bind));
14874 gimple_seq_add_seq_without_update (&seq, second_seq);
14875 gimple_bind_set_body (outer_bind, seq);
14876 }
14877 }
726a989a
RB
14878 else
14879 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
14880
3ad065ef 14881 DECL_SAVED_TREE (fndecl) = NULL_TREE;
4744afba
RH
14882
14883 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 14884 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 14885 if (!gimple_seq_empty_p (parm_stmts))
4744afba 14886 {
f0c10f0f
RG
14887 tree parm;
14888
726a989a 14889 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
6aee2fd0
JJ
14890 if (parm_cleanup)
14891 {
14892 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
14893 GIMPLE_TRY_FINALLY);
14894 parm_stmts = NULL;
14895 gimple_seq_add_stmt (&parm_stmts, g);
14896 }
726a989a 14897 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
14898
14899 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 14900 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
14901 if (DECL_HAS_VALUE_EXPR_P (parm))
14902 {
14903 DECL_HAS_VALUE_EXPR_P (parm) = 0;
14904 DECL_IGNORED_P (parm) = 0;
14905 }
4744afba
RH
14906 }
14907
41dbbb37
TS
14908 if ((flag_openacc || flag_openmp || flag_openmp_simd)
14909 && gimplify_omp_ctxp)
acf0174b
JJ
14910 {
14911 delete_omp_context (gimplify_omp_ctxp);
14912 gimplify_omp_ctxp = NULL;
14913 }
14914
726a989a 14915 pop_gimplify_context (outer_bind);
953ff289 14916 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 14917
b2b29377 14918 if (flag_checking && !seen_error ())
34019e28 14919 verify_gimple_in_seq (gimple_bind_body (outer_bind));
6de9cd9a
DN
14920
14921 timevar_pop (TV_TREE_GIMPLIFY);
14922 input_location = saved_location;
726a989a
RB
14923
14924 return outer_bind;
6de9cd9a
DN
14925}
14926
6a1f6c9c 14927typedef char *char_p; /* For DEF_VEC_P. */
6a1f6c9c
JM
14928
14929/* Return whether we should exclude FNDECL from instrumentation. */
14930
14931static bool
14932flag_instrument_functions_exclude_p (tree fndecl)
14933{
9771b263 14934 vec<char_p> *v;
6a1f6c9c 14935
9771b263
DN
14936 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
14937 if (v && v->length () > 0)
6a1f6c9c
JM
14938 {
14939 const char *name;
14940 int i;
14941 char *s;
14942
efab3e3a 14943 name = lang_hooks.decl_printable_name (fndecl, 1);
9771b263 14944 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
14945 if (strstr (name, s) != NULL)
14946 return true;
14947 }
14948
9771b263
DN
14949 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
14950 if (v && v->length () > 0)
6a1f6c9c
JM
14951 {
14952 const char *name;
14953 int i;
14954 char *s;
14955
14956 name = DECL_SOURCE_FILE (fndecl);
9771b263 14957 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
14958 if (strstr (name, s) != NULL)
14959 return true;
14960 }
14961
14962 return false;
14963}
14964
6de9cd9a 14965/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 14966 node for the function we want to gimplify.
b8698a0f 14967
ad19c4be 14968 Return the sequence of GIMPLE statements corresponding to the body
726a989a 14969 of FNDECL. */
6de9cd9a
DN
14970
14971void
14972gimplify_function_tree (tree fndecl)
14973{
726a989a 14974 gimple_seq seq;
538dd0b7 14975 gbind *bind;
6de9cd9a 14976
a406865a
RG
14977 gcc_assert (!gimple_body (fndecl));
14978
db2960f4
SL
14979 if (DECL_STRUCT_FUNCTION (fndecl))
14980 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
14981 else
14982 push_struct_function (fndecl);
6de9cd9a 14983
d67cb100
TV
14984 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
14985 if necessary. */
14986 cfun->curr_properties |= PROP_gimple_lva;
14987
45b2222a 14988 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
6ff92497 14989 asan_poisoned_variables = new hash_set<tree> ();
3ad065ef 14990 bind = gimplify_body (fndecl, true);
6ff92497
ML
14991 if (asan_poisoned_variables)
14992 {
14993 delete asan_poisoned_variables;
14994 asan_poisoned_variables = NULL;
14995 }
726a989a
RB
14996
14997 /* The tree body of the function is no longer needed, replace it
14998 with the new GIMPLE body. */
355a7673 14999 seq = NULL;
726a989a
RB
15000 gimple_seq_add_stmt (&seq, bind);
15001 gimple_set_body (fndecl, seq);
6de9cd9a
DN
15002
15003 /* If we're instrumenting function entry/exit, then prepend the call to
15004 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15005 catch the exit hook. */
15006 /* ??? Add some way to ignore exceptions for this TFE. */
15007 if (flag_instrument_function_entry_exit
8d5a7d1f 15008 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
d964ba07
RB
15009 /* Do not instrument extern inline functions. */
15010 && !(DECL_DECLARED_INLINE_P (fndecl)
15011 && DECL_EXTERNAL (fndecl)
15012 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
8d5a7d1f 15013 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 15014 {
726a989a 15015 tree x;
538dd0b7 15016 gbind *new_bind;
355fe088 15017 gimple *tf;
726a989a 15018 gimple_seq cleanup = NULL, body = NULL;
0fb6181e 15019 tree tmp_var, this_fn_addr;
538dd0b7 15020 gcall *call;
b01890ff 15021
0fb6181e
OH
15022 /* The instrumentation hooks aren't going to call the instrumented
15023 function and the address they receive is expected to be matchable
15024 against symbol addresses. Make sure we don't create a trampoline,
15025 in case the current function is nested. */
15026 this_fn_addr = build_fold_addr_expr (current_function_decl);
15027 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
15028
e79983f4 15029 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 15030 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
15031 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15032 gimple_call_set_lhs (call, tmp_var);
15033 gimplify_seq_add_stmt (&cleanup, call);
e79983f4 15034 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
0fb6181e 15035 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
b01890ff 15036 gimplify_seq_add_stmt (&cleanup, call);
726a989a 15037 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 15038
e79983f4 15039 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 15040 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
15041 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15042 gimple_call_set_lhs (call, tmp_var);
15043 gimplify_seq_add_stmt (&body, call);
e79983f4 15044 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
0fb6181e 15045 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
b01890ff 15046 gimplify_seq_add_stmt (&body, call);
726a989a 15047 gimplify_seq_add_stmt (&body, tf);
d6b1fea2 15048 new_bind = gimple_build_bind (NULL, body, NULL);
6de9cd9a 15049
726a989a
RB
15050 /* Replace the current function body with the body
15051 wrapped in the try/finally TF. */
355a7673 15052 seq = NULL;
726a989a
RB
15053 gimple_seq_add_stmt (&seq, new_bind);
15054 gimple_set_body (fndecl, seq);
fca4adf2
JJ
15055 bind = new_bind;
15056 }
15057
45b2222a 15058 if (sanitize_flags_p (SANITIZE_THREAD))
fca4adf2
JJ
15059 {
15060 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
355fe088 15061 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
d6b1fea2 15062 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
fca4adf2
JJ
15063 /* Replace the current function body with the body
15064 wrapped in the try/finally TF. */
15065 seq = NULL;
15066 gimple_seq_add_stmt (&seq, new_bind);
15067 gimple_set_body (fndecl, seq);
6de9cd9a
DN
15068 }
15069
726a989a 15070 DECL_SAVED_TREE (fndecl) = NULL_TREE;
d67cb100 15071 cfun->curr_properties |= PROP_gimple_any;
726a989a 15072
db2960f4 15073 pop_cfun ();
88d91afd 15074
363dc72c 15075 dump_function (TDI_gimple, fndecl);
6de9cd9a 15076}
726a989a 15077
4a7cb16f
AM
15078/* Return a dummy expression of type TYPE in order to keep going after an
15079 error. */
b184c8f1 15080
4a7cb16f
AM
15081static tree
15082dummy_object (tree type)
b184c8f1 15083{
4a7cb16f
AM
15084 tree t = build_int_cst (build_pointer_type (type), 0);
15085 return build2 (MEM_REF, type, t, t);
b184c8f1
AM
15086}
15087
4a7cb16f
AM
15088/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15089 builtin function, but a very special sort of operator. */
b184c8f1 15090
4a7cb16f 15091enum gimplify_status
f8e89441
TV
15092gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15093 gimple_seq *post_p ATTRIBUTE_UNUSED)
4a7cb16f
AM
15094{
15095 tree promoted_type, have_va_type;
15096 tree valist = TREE_OPERAND (*expr_p, 0);
15097 tree type = TREE_TYPE (*expr_p);
33f0852f 15098 tree t, tag, aptag;
4a7cb16f 15099 location_t loc = EXPR_LOCATION (*expr_p);
b184c8f1 15100
4a7cb16f
AM
15101 /* Verify that valist is of the proper type. */
15102 have_va_type = TREE_TYPE (valist);
15103 if (have_va_type == error_mark_node)
15104 return GS_ERROR;
15105 have_va_type = targetm.canonical_va_list_type (have_va_type);
5b28efbb 15106 if (have_va_type == NULL_TREE
3071bfa9 15107 && POINTER_TYPE_P (TREE_TYPE (valist)))
5b28efbb
TV
15108 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15109 have_va_type
15110 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
ba9bbd6f 15111 gcc_assert (have_va_type != NULL_TREE);
b184c8f1 15112
4a7cb16f
AM
15113 /* Generate a diagnostic for requesting data of a type that cannot
15114 be passed through `...' due to type promotion at the call site. */
15115 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15116 != type)
15117 {
15118 static bool gave_help;
15119 bool warned;
bd02f00f
MP
15120 /* Use the expansion point to handle cases such as passing bool (defined
15121 in a system header) through `...'. */
620e594b 15122 location_t xloc
bd02f00f 15123 = expansion_point_location_if_in_system_header (loc);
b184c8f1 15124
4a7cb16f
AM
15125 /* Unfortunately, this is merely undefined, rather than a constraint
15126 violation, so we cannot make this an error. If this call is never
15127 executed, the program is still strictly conforming. */
097f82ec 15128 auto_diagnostic_group d;
bd02f00f
MP
15129 warned = warning_at (xloc, 0,
15130 "%qT is promoted to %qT when passed through %<...%>",
4a7cb16f
AM
15131 type, promoted_type);
15132 if (!gave_help && warned)
15133 {
15134 gave_help = true;
bd02f00f 15135 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
4a7cb16f
AM
15136 promoted_type, type);
15137 }
b184c8f1 15138
4a7cb16f
AM
15139 /* We can, however, treat "undefined" any way we please.
15140 Call abort to encourage the user to fix the program. */
15141 if (warned)
bd02f00f 15142 inform (xloc, "if this code is reached, the program will abort");
4a7cb16f
AM
15143 /* Before the abort, allow the evaluation of the va_list
15144 expression to exit or longjmp. */
15145 gimplify_and_add (valist, pre_p);
15146 t = build_call_expr_loc (loc,
15147 builtin_decl_implicit (BUILT_IN_TRAP), 0);
b184c8f1
AM
15148 gimplify_and_add (t, pre_p);
15149
4a7cb16f
AM
15150 /* This is dead code, but go ahead and finish so that the
15151 mode of the result comes out right. */
15152 *expr_p = dummy_object (type);
15153 return GS_ALL_DONE;
b184c8f1 15154 }
b184c8f1 15155
f8e89441 15156 tag = build_int_cst (build_pointer_type (type), 0);
33f0852f
JJ
15157 aptag = build_int_cst (TREE_TYPE (valist), 0);
15158
15159 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15160 valist, tag, aptag);
b184c8f1 15161
d67cb100
TV
15162 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15163 needs to be expanded. */
15164 cfun->curr_properties &= ~PROP_gimple_lva;
15165
f8e89441 15166 return GS_OK;
b184c8f1 15167}
bcf71673 15168
45b0be94
AM
15169/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15170
15171 DST/SRC are the destination and source respectively. You can pass
15172 ungimplified trees in DST or SRC, in which case they will be
15173 converted to a gimple operand if necessary.
15174
15175 This function returns the newly created GIMPLE_ASSIGN tuple. */
15176
355fe088 15177gimple *
45b0be94
AM
15178gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15179{
15180 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15181 gimplify_and_add (t, seq_p);
15182 ggc_free (t);
15183 return gimple_seq_last_stmt (*seq_p);
15184}
15185
18f429e2 15186inline hashval_t
67f58944 15187gimplify_hasher::hash (const elt_t *p)
18f429e2
AM
15188{
15189 tree t = p->val;
15190 return iterative_hash_expr (t, 0);
15191}
15192
15193inline bool
67f58944 15194gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18f429e2
AM
15195{
15196 tree t1 = p1->val;
15197 tree t2 = p2->val;
15198 enum tree_code code = TREE_CODE (t1);
15199
15200 if (TREE_CODE (t2) != code
15201 || TREE_TYPE (t1) != TREE_TYPE (t2))
15202 return false;
15203
15204 if (!operand_equal_p (t1, t2, 0))
15205 return false;
15206
18f429e2
AM
15207 /* Only allow them to compare equal if they also hash equal; otherwise
15208 results are nondeterminate, and we fail bootstrap comparison. */
b2b29377 15209 gcc_checking_assert (hash (p1) == hash (p2));
18f429e2
AM
15210
15211 return true;
15212}