]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
Update copyright years.
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
818ab71a 3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
6de9cd9a
DN
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
c7131fb2 26#include "backend.h"
957060b5
AM
27#include "target.h"
28#include "rtl.h"
6de9cd9a 29#include "tree.h"
c7131fb2 30#include "gimple.h"
9fdcd34e 31#include "gimple-predict.h"
957060b5 32#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
c7131fb2 33#include "ssa.h"
957060b5
AM
34#include "cgraph.h"
35#include "tree-pretty-print.h"
36#include "diagnostic-core.h"
37#include "alias.h"
c7131fb2 38#include "fold-const.h"
36566b39 39#include "calls.h"
36566b39
PK
40#include "varasm.h"
41#include "stmt.h"
42#include "expr.h"
2fb9a547
AM
43#include "gimple-fold.h"
44#include "tree-eh.h"
45b0be94 45#include "gimplify.h"
5be5c238 46#include "gimple-iterator.h"
d8a2d370 47#include "stor-layout.h"
d8a2d370 48#include "print-tree.h"
726a989a 49#include "tree-iterator.h"
6de9cd9a 50#include "tree-inline.h"
6de9cd9a 51#include "langhooks.h"
442b4905 52#include "tree-cfg.h"
442b4905 53#include "tree-ssa.h"
0645c1a2 54#include "omp-low.h"
4484a35a 55#include "gimple-low.h"
939b37da 56#include "cilk.h"
41dbbb37 57#include "gomp-constants.h"
88d91afd 58#include "tree-dump.h"
1a80d6b8 59#include "gimple-walk.h"
7ee2468b 60#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
9b2b7279 61#include "builtins.h"
953ff289
DN
62
63enum gimplify_omp_var_data
64{
65 GOVD_SEEN = 1,
66 GOVD_EXPLICIT = 2,
67 GOVD_SHARED = 4,
68 GOVD_PRIVATE = 8,
69 GOVD_FIRSTPRIVATE = 16,
70 GOVD_LASTPRIVATE = 32,
71 GOVD_REDUCTION = 64,
72 GOVD_LOCAL = 128,
acf0174b
JJ
73 GOVD_MAP = 256,
74 GOVD_DEBUG_PRIVATE = 512,
75 GOVD_PRIVATE_OUTER_REF = 1024,
74bf76ed 76 GOVD_LINEAR = 2048,
acf0174b 77 GOVD_ALIGNED = 4096,
41dbbb37
TS
78
79 /* Flag for GOVD_MAP: don't copy back. */
acf0174b 80 GOVD_MAP_TO_ONLY = 8192,
41dbbb37 81
41b37d5e
JJ
82 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
83 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
84
d9a6bd32
JJ
85 GOVD_MAP_0LEN_ARRAY = 32768,
86
e01d41e5
JJ
87 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
88 GOVD_MAP_ALWAYS_TO = 65536,
89
1a80d6b8
JJ
90 /* Flag for shared vars that are or might be stored to in the region. */
91 GOVD_WRITTEN = 131072,
92
db0f1c7a
TV
93 /* Flag for GOVD_MAP, if it is a forced mapping. */
94 GOVD_MAP_FORCE = 262144,
95
953ff289 96 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
74bf76ed
JJ
97 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
98 | GOVD_LOCAL)
953ff289
DN
99};
100
726a989a 101
a68ab351
JJ
102enum omp_region_type
103{
182190f2
NS
104 ORT_WORKSHARE = 0x00,
105 ORT_SIMD = 0x01,
106
107 ORT_PARALLEL = 0x02,
108 ORT_COMBINED_PARALLEL = 0x03,
109
110 ORT_TASK = 0x04,
111 ORT_UNTIED_TASK = 0x05,
112
113 ORT_TEAMS = 0x08,
114 ORT_COMBINED_TEAMS = 0x09,
115
41dbbb37 116 /* Data region. */
182190f2
NS
117 ORT_TARGET_DATA = 0x10,
118
41dbbb37 119 /* Data region with offloading. */
182190f2
NS
120 ORT_TARGET = 0x20,
121 ORT_COMBINED_TARGET = 0x21,
122
123 /* OpenACC variants. */
124 ORT_ACC = 0x40, /* A generic OpenACC region. */
125 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
126 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
127 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
37d5ad46 128 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
182190f2 129
d9a6bd32
JJ
130 /* Dummy OpenMP region, used to disable expansion of
131 DECL_VALUE_EXPRs in taskloop pre body. */
182190f2 132 ORT_NONE = 0x100
a68ab351
JJ
133};
134
45852dcc
AM
135/* Gimplify hashtable helper. */
136
95fbe13e 137struct gimplify_hasher : free_ptr_hash <elt_t>
45852dcc 138{
67f58944
TS
139 static inline hashval_t hash (const elt_t *);
140 static inline bool equal (const elt_t *, const elt_t *);
45852dcc
AM
141};
142
143struct gimplify_ctx
144{
145 struct gimplify_ctx *prev_context;
146
538dd0b7 147 vec<gbind *> bind_expr_stack;
45852dcc
AM
148 tree temps;
149 gimple_seq conditional_cleanups;
150 tree exit_label;
151 tree return_temp;
152
153 vec<tree> case_labels;
154 /* The formal temporary table. Should this be persistent? */
c203e8a7 155 hash_table<gimplify_hasher> *temp_htab;
45852dcc
AM
156
157 int conditions;
03c00798
EB
158 unsigned into_ssa : 1;
159 unsigned allow_rhs_cond_expr : 1;
160 unsigned in_cleanup_point_expr : 1;
161 unsigned keep_stack : 1;
162 unsigned save_stack : 1;
45852dcc
AM
163};
164
953ff289 165struct gimplify_omp_ctx
6de9cd9a 166{
953ff289
DN
167 struct gimplify_omp_ctx *outer_context;
168 splay_tree variables;
6e2830c3 169 hash_set<tree> *privatized_types;
d9a6bd32
JJ
170 /* Iteration variables in an OMP_FOR. */
171 vec<tree> loop_iter_var;
953ff289
DN
172 location_t location;
173 enum omp_clause_default_kind default_kind;
a68ab351 174 enum omp_region_type region_type;
acf0174b 175 bool combined_loop;
9cf32741 176 bool distribute;
d9a6bd32
JJ
177 bool target_map_scalars_firstprivate;
178 bool target_map_pointers_as_0len_arrays;
179 bool target_firstprivatize_array_bases;
953ff289
DN
180};
181
45852dcc 182static struct gimplify_ctx *gimplify_ctxp;
953ff289
DN
183static struct gimplify_omp_ctx *gimplify_omp_ctxp;
184
ad19c4be 185/* Forward declaration. */
726a989a 186static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
6e232ba4 187static hash_map<tree, tree> *oacc_declare_returns;
eb6127a4 188
a1a6c5b2
JJ
189/* Shorter alias name for the above function for use in gimplify.c
190 only. */
191
192static inline void
355fe088 193gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
a1a6c5b2
JJ
194{
195 gimple_seq_add_stmt_without_update (seq_p, gs);
196}
197
726a989a
RB
198/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
199 NULL, a new sequence is allocated. This function is
200 similar to gimple_seq_add_seq, but does not scan the operands.
201 During gimplification, we need to manipulate statement sequences
202 before the def/use vectors have been constructed. */
203
204static void
205gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
206{
207 gimple_stmt_iterator si;
208
209 if (src == NULL)
210 return;
211
726a989a
RB
212 si = gsi_last (*dst_p);
213 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
214}
215
45852dcc
AM
216
217/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
218 and popping gimplify contexts. */
219
220static struct gimplify_ctx *ctx_pool = NULL;
221
222/* Return a gimplify context struct from the pool. */
223
224static inline struct gimplify_ctx *
225ctx_alloc (void)
226{
227 struct gimplify_ctx * c = ctx_pool;
228
229 if (c)
230 ctx_pool = c->prev_context;
231 else
232 c = XNEW (struct gimplify_ctx);
233
234 memset (c, '\0', sizeof (*c));
235 return c;
236}
237
238/* Put gimplify context C back into the pool. */
239
240static inline void
241ctx_free (struct gimplify_ctx *c)
242{
243 c->prev_context = ctx_pool;
244 ctx_pool = c;
245}
246
247/* Free allocated ctx stack memory. */
248
249void
250free_gimplify_stack (void)
251{
252 struct gimplify_ctx *c;
253
254 while ((c = ctx_pool))
255 {
256 ctx_pool = c->prev_context;
257 free (c);
258 }
259}
260
261
6de9cd9a
DN
262/* Set up a context for the gimplifier. */
263
264void
45852dcc 265push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
6de9cd9a 266{
45852dcc
AM
267 struct gimplify_ctx *c = ctx_alloc ();
268
953ff289 269 c->prev_context = gimplify_ctxp;
953ff289 270 gimplify_ctxp = c;
45852dcc
AM
271 gimplify_ctxp->into_ssa = in_ssa;
272 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
6de9cd9a
DN
273}
274
275/* Tear down a context for the gimplifier. If BODY is non-null, then
276 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
277 in the local_decls.
278
279 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
280
281void
355fe088 282pop_gimplify_context (gimple *body)
6de9cd9a 283{
953ff289 284 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 285
9771b263
DN
286 gcc_assert (c
287 && (!c->bind_expr_stack.exists ()
288 || c->bind_expr_stack.is_empty ()));
289 c->bind_expr_stack.release ();
953ff289 290 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
291
292 if (body)
5123ad09 293 declare_vars (c->temps, body, false);
6de9cd9a 294 else
953ff289 295 record_vars (c->temps);
6de9cd9a 296
c203e8a7
TS
297 delete c->temp_htab;
298 c->temp_htab = NULL;
45852dcc 299 ctx_free (c);
6de9cd9a
DN
300}
301
ad19c4be
EB
302/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
303
c24b7de9 304static void
538dd0b7 305gimple_push_bind_expr (gbind *bind_stmt)
6de9cd9a 306{
9771b263 307 gimplify_ctxp->bind_expr_stack.reserve (8);
538dd0b7 308 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
6de9cd9a
DN
309}
310
ad19c4be
EB
311/* Pop the first element off the stack of bindings. */
312
c24b7de9 313static void
6de9cd9a
DN
314gimple_pop_bind_expr (void)
315{
9771b263 316 gimplify_ctxp->bind_expr_stack.pop ();
6de9cd9a
DN
317}
318
ad19c4be
EB
319/* Return the first element of the stack of bindings. */
320
538dd0b7 321gbind *
6de9cd9a
DN
322gimple_current_bind_expr (void)
323{
9771b263 324 return gimplify_ctxp->bind_expr_stack.last ();
726a989a
RB
325}
326
ad19c4be 327/* Return the stack of bindings created during gimplification. */
726a989a 328
538dd0b7 329vec<gbind *>
726a989a
RB
330gimple_bind_expr_stack (void)
331{
332 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
333}
334
ad19c4be 335/* Return true iff there is a COND_EXPR between us and the innermost
6de9cd9a
DN
336 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
337
338static bool
339gimple_conditional_context (void)
340{
341 return gimplify_ctxp->conditions > 0;
342}
343
344/* Note that we've entered a COND_EXPR. */
345
346static void
347gimple_push_condition (void)
348{
726a989a 349#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 350 if (gimplify_ctxp->conditions == 0)
726a989a 351 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 352#endif
6de9cd9a
DN
353 ++(gimplify_ctxp->conditions);
354}
355
356/* Note that we've left a COND_EXPR. If we're back at unconditional scope
357 now, add any conditional cleanups we've seen to the prequeue. */
358
359static void
726a989a 360gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
361{
362 int conds = --(gimplify_ctxp->conditions);
aa4a53af 363
282899df 364 gcc_assert (conds >= 0);
6de9cd9a
DN
365 if (conds == 0)
366 {
726a989a
RB
367 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
368 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 369 }
6de9cd9a
DN
370}
371
953ff289
DN
372/* A stable comparison routine for use with splay trees and DECLs. */
373
374static int
375splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
376{
377 tree a = (tree) xa;
378 tree b = (tree) xb;
379
380 return DECL_UID (a) - DECL_UID (b);
381}
382
383/* Create a new omp construct that deals with variable remapping. */
384
385static struct gimplify_omp_ctx *
a68ab351 386new_omp_context (enum omp_region_type region_type)
953ff289
DN
387{
388 struct gimplify_omp_ctx *c;
389
390 c = XCNEW (struct gimplify_omp_ctx);
391 c->outer_context = gimplify_omp_ctxp;
392 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
6e2830c3 393 c->privatized_types = new hash_set<tree>;
953ff289 394 c->location = input_location;
a68ab351 395 c->region_type = region_type;
f22f4340 396 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
397 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
398 else
399 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
953ff289
DN
400
401 return c;
402}
403
404/* Destroy an omp construct that deals with variable remapping. */
405
406static void
407delete_omp_context (struct gimplify_omp_ctx *c)
408{
409 splay_tree_delete (c->variables);
6e2830c3 410 delete c->privatized_types;
d9a6bd32 411 c->loop_iter_var.release ();
953ff289
DN
412 XDELETE (c);
413}
414
415static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
416static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
417
726a989a
RB
418/* Both gimplify the statement T and append it to *SEQ_P. This function
419 behaves exactly as gimplify_stmt, but you don't have to pass T as a
420 reference. */
cd3ce9b4
JM
421
422void
726a989a
RB
423gimplify_and_add (tree t, gimple_seq *seq_p)
424{
425 gimplify_stmt (&t, seq_p);
426}
427
428/* Gimplify statement T into sequence *SEQ_P, and return the first
429 tuple in the sequence of generated tuples for this statement.
430 Return NULL if gimplifying T produced no tuples. */
431
355fe088 432static gimple *
726a989a 433gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 434{
726a989a
RB
435 gimple_stmt_iterator last = gsi_last (*seq_p);
436
437 gimplify_and_add (t, seq_p);
438
439 if (!gsi_end_p (last))
440 {
441 gsi_next (&last);
442 return gsi_stmt (last);
443 }
444 else
445 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
446}
447
216820a4
RG
448/* Returns true iff T is a valid RHS for an assignment to an un-renamed
449 LHS, or for a call argument. */
450
451static bool
452is_gimple_mem_rhs (tree t)
453{
454 /* If we're dealing with a renamable type, either source or dest must be
455 a renamed variable. */
456 if (is_gimple_reg_type (TREE_TYPE (t)))
457 return is_gimple_val (t);
458 else
459 return is_gimple_val (t) || is_gimple_lvalue (t);
460}
461
726a989a 462/* Return true if T is a CALL_EXPR or an expression that can be
12947319 463 assigned to a temporary. Note that this predicate should only be
726a989a
RB
464 used during gimplification. See the rationale for this in
465 gimplify_modify_expr. */
466
467static bool
ba4d8f9d 468is_gimple_reg_rhs_or_call (tree t)
726a989a 469{
ba4d8f9d
RG
470 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
471 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
472}
473
474/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
475 this predicate should only be used during gimplification. See the
476 rationale for this in gimplify_modify_expr. */
477
478static bool
ba4d8f9d 479is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
480{
481 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
482 a renamed variable. */
483 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
484 return is_gimple_val (t);
485 else
ba4d8f9d
RG
486 return (is_gimple_val (t) || is_gimple_lvalue (t)
487 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
488}
489
2ad728d2
RG
490/* Create a temporary with a name derived from VAL. Subroutine of
491 lookup_tmp_var; nobody else should call this function. */
492
493static inline tree
947ca6a0 494create_tmp_from_val (tree val)
2ad728d2
RG
495{
496 /* Drop all qualifiers and address-space information from the value type. */
497 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
498 tree var = create_tmp_var (type, get_name (val));
947ca6a0
RB
499 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
500 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
2ad728d2
RG
501 DECL_GIMPLE_REG_P (var) = 1;
502 return var;
503}
504
505/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
506 an existing expression temporary. */
507
508static tree
509lookup_tmp_var (tree val, bool is_formal)
510{
511 tree ret;
512
513 /* If not optimizing, never really reuse a temporary. local-alloc
514 won't allocate any variable that is used in more than one basic
515 block, which means it will go into memory, causing much extra
516 work in reload and final and poorer code generation, outweighing
517 the extra memory allocation here. */
518 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
947ca6a0 519 ret = create_tmp_from_val (val);
2ad728d2
RG
520 else
521 {
522 elt_t elt, *elt_p;
4a8fb1a1 523 elt_t **slot;
2ad728d2
RG
524
525 elt.val = val;
c203e8a7
TS
526 if (!gimplify_ctxp->temp_htab)
527 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
528 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
2ad728d2
RG
529 if (*slot == NULL)
530 {
531 elt_p = XNEW (elt_t);
532 elt_p->val = val;
947ca6a0 533 elt_p->temp = ret = create_tmp_from_val (val);
4a8fb1a1 534 *slot = elt_p;
2ad728d2
RG
535 }
536 else
537 {
4a8fb1a1 538 elt_p = *slot;
2ad728d2
RG
539 ret = elt_p->temp;
540 }
541 }
542
543 return ret;
544}
545
ba4d8f9d 546/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
547
548static tree
726a989a
RB
549internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
550 bool is_formal)
6de9cd9a
DN
551{
552 tree t, mod;
6de9cd9a 553
726a989a
RB
554 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
555 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 556 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 557 fb_rvalue);
6de9cd9a 558
2ad728d2
RG
559 if (gimplify_ctxp->into_ssa
560 && is_gimple_reg_type (TREE_TYPE (val)))
b731b390 561 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
2ad728d2
RG
562 else
563 t = lookup_tmp_var (val, is_formal);
e41d82f5 564
2e929cf3 565 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 566
8400e75e 567 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
6de9cd9a 568
fff34d35
RK
569 /* gimplify_modify_expr might want to reduce this further. */
570 gimplify_and_add (mod, pre_p);
726a989a 571 ggc_free (mod);
8b11a64c 572
6de9cd9a
DN
573 return t;
574}
575
ad19c4be 576/* Return a formal temporary variable initialized with VAL. PRE_P is as
ba4d8f9d
RG
577 in gimplify_expr. Only use this function if:
578
579 1) The value of the unfactored expression represented by VAL will not
580 change between the initialization and use of the temporary, and
581 2) The temporary will not be otherwise modified.
582
583 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
584 and #2 means it is inappropriate for && temps.
585
586 For other cases, use get_initialized_tmp_var instead. */
50674e96 587
6de9cd9a 588tree
726a989a 589get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a
DN
590{
591 return internal_get_tmp_var (val, pre_p, NULL, true);
592}
593
ad19c4be 594/* Return a temporary variable initialized with VAL. PRE_P and POST_P
6de9cd9a
DN
595 are as in gimplify_expr. */
596
597tree
726a989a 598get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
599{
600 return internal_get_tmp_var (val, pre_p, post_p, false);
601}
602
ad19c4be
EB
603/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
604 generate debug info for them; otherwise don't. */
6de9cd9a
DN
605
606void
355fe088 607declare_vars (tree vars, gimple *gs, bool debug_info)
6de9cd9a
DN
608{
609 tree last = vars;
610 if (last)
611 {
5123ad09 612 tree temps, block;
6de9cd9a 613
538dd0b7 614 gbind *scope = as_a <gbind *> (gs);
6de9cd9a
DN
615
616 temps = nreverse (last);
5123ad09 617
524d9a45 618 block = gimple_bind_block (scope);
726a989a 619 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
620 if (!block || !debug_info)
621 {
910ad8de 622 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 623 gimple_bind_set_vars (scope, temps);
5123ad09
EB
624 }
625 else
626 {
627 /* We need to attach the nodes both to the BIND_EXPR and to its
628 associated BLOCK for debugging purposes. The key point here
629 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
630 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
631 if (BLOCK_VARS (block))
632 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
633 else
634 {
726a989a
RB
635 gimple_bind_set_vars (scope,
636 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
637 BLOCK_VARS (block) = temps;
638 }
639 }
6de9cd9a
DN
640 }
641}
642
a441447f
OH
643/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
644 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
645 no such upper bound can be obtained. */
646
647static void
648force_constant_size (tree var)
649{
650 /* The only attempt we make is by querying the maximum size of objects
651 of the variable's type. */
652
653 HOST_WIDE_INT max_size;
654
655 gcc_assert (TREE_CODE (var) == VAR_DECL);
656
657 max_size = max_int_size_in_bytes (TREE_TYPE (var));
658
659 gcc_assert (max_size >= 0);
660
661 DECL_SIZE_UNIT (var)
662 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
663 DECL_SIZE (var)
664 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
665}
666
ad19c4be
EB
667/* Push the temporary variable TMP into the current binding. */
668
45b62594
RB
669void
670gimple_add_tmp_var_fn (struct function *fn, tree tmp)
671{
672 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
673
674 /* Later processing assumes that the object size is constant, which might
675 not be true at this point. Force the use of a constant upper bound in
676 this case. */
677 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
678 force_constant_size (tmp);
679
680 DECL_CONTEXT (tmp) = fn->decl;
681 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
682
683 record_vars_into (tmp, fn->decl);
684}
685
686/* Push the temporary variable TMP into the current binding. */
687
6de9cd9a
DN
688void
689gimple_add_tmp_var (tree tmp)
690{
910ad8de 691 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 692
a441447f
OH
693 /* Later processing assumes that the object size is constant, which might
694 not be true at this point. Force the use of a constant upper bound in
695 this case. */
cc269bb6 696 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
a441447f
OH
697 force_constant_size (tmp);
698
6de9cd9a 699 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 700 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
701
702 if (gimplify_ctxp)
703 {
910ad8de 704 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 705 gimplify_ctxp->temps = tmp;
953ff289
DN
706
707 /* Mark temporaries local within the nearest enclosing parallel. */
708 if (gimplify_omp_ctxp)
709 {
710 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
711 while (ctx
712 && (ctx->region_type == ORT_WORKSHARE
182190f2
NS
713 || ctx->region_type == ORT_SIMD
714 || ctx->region_type == ORT_ACC))
953ff289
DN
715 ctx = ctx->outer_context;
716 if (ctx)
717 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
718 }
6de9cd9a
DN
719 }
720 else if (cfun)
721 record_vars (tmp);
722 else
726a989a
RB
723 {
724 gimple_seq body_seq;
725
726 /* This case is for nested functions. We need to expose the locals
727 they create. */
728 body_seq = gimple_body (current_function_decl);
729 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
730 }
731}
732
726a989a 733
616f1431
EB
734\f
735/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
736 nodes that are referenced more than once in GENERIC functions. This is
737 necessary because gimplification (translation into GIMPLE) is performed
738 by modifying tree nodes in-place, so gimplication of a shared node in a
739 first context could generate an invalid GIMPLE form in a second context.
740
741 This is achieved with a simple mark/copy/unmark algorithm that walks the
742 GENERIC representation top-down, marks nodes with TREE_VISITED the first
743 time it encounters them, duplicates them if they already have TREE_VISITED
744 set, and finally removes the TREE_VISITED marks it has set.
745
746 The algorithm works only at the function level, i.e. it generates a GENERIC
747 representation of a function with no nodes shared within the function when
748 passed a GENERIC function (except for nodes that are allowed to be shared).
749
750 At the global level, it is also necessary to unshare tree nodes that are
751 referenced in more than one function, for the same aforementioned reason.
752 This requires some cooperation from the front-end. There are 2 strategies:
753
754 1. Manual unsharing. The front-end needs to call unshare_expr on every
755 expression that might end up being shared across functions.
756
757 2. Deep unsharing. This is an extension of regular unsharing. Instead
758 of calling unshare_expr on expressions that might be shared across
759 functions, the front-end pre-marks them with TREE_VISITED. This will
760 ensure that they are unshared on the first reference within functions
761 when the regular unsharing algorithm runs. The counterpart is that
762 this algorithm must look deeper than for manual unsharing, which is
763 specified by LANG_HOOKS_DEEP_UNSHARING.
764
765 If there are only few specific cases of node sharing across functions, it is
766 probably easier for a front-end to unshare the expressions manually. On the
767 contrary, if the expressions generated at the global level are as widespread
768 as expressions generated within functions, deep unsharing is very likely the
769 way to go. */
770
771/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
3ad065ef
EB
772 These nodes model computations that must be done once. If we were to
773 unshare something like SAVE_EXPR(i++), the gimplification process would
774 create wrong code. However, if DATA is non-null, it must hold a pointer
775 set that is used to unshare the subtrees of these nodes. */
6de9cd9a
DN
776
777static tree
778mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
779{
616f1431
EB
780 tree t = *tp;
781 enum tree_code code = TREE_CODE (t);
782
6687b740
EB
783 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
784 copy their subtrees if we can make sure to do it only once. */
785 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431 786 {
6e2830c3 787 if (data && !((hash_set<tree> *)data)->add (t))
616f1431
EB
788 ;
789 else
790 *walk_subtrees = 0;
791 }
792
793 /* Stop at types, decls, constants like copy_tree_r. */
794 else if (TREE_CODE_CLASS (code) == tcc_type
795 || TREE_CODE_CLASS (code) == tcc_declaration
796 || TREE_CODE_CLASS (code) == tcc_constant
797 /* We can't do anything sensible with a BLOCK used as an
798 expression, but we also can't just die when we see it
799 because of non-expression uses. So we avert our eyes
800 and cross our fingers. Silly Java. */
801 || code == BLOCK)
6de9cd9a 802 *walk_subtrees = 0;
616f1431
EB
803
804 /* Cope with the statement expression extension. */
805 else if (code == STATEMENT_LIST)
806 ;
807
808 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 809 else
6687b740 810 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
811
812 return NULL_TREE;
813}
814
3ad065ef
EB
815/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
816 If *TP has been visited already, then *TP is deeply copied by calling
817 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
6de9cd9a
DN
818
819static tree
616f1431 820copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 821{
f0638e1d
RH
822 tree t = *tp;
823 enum tree_code code = TREE_CODE (t);
824
44de5aeb
RK
825 /* Skip types, decls, and constants. But we do want to look at their
826 types and the bounds of types. Mark them as visited so we properly
827 unmark their subtrees on the unmark pass. If we've already seen them,
828 don't look down further. */
6615c446
JO
829 if (TREE_CODE_CLASS (code) == tcc_type
830 || TREE_CODE_CLASS (code) == tcc_declaration
831 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
832 {
833 if (TREE_VISITED (t))
834 *walk_subtrees = 0;
835 else
836 TREE_VISITED (t) = 1;
837 }
f0638e1d 838
6de9cd9a
DN
839 /* If this node has been visited already, unshare it and don't look
840 any deeper. */
f0638e1d 841 else if (TREE_VISITED (t))
6de9cd9a 842 {
616f1431 843 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
844 *walk_subtrees = 0;
845 }
f0638e1d 846
616f1431 847 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 848 else
77c9db77 849 TREE_VISITED (t) = 1;
f0638e1d 850
6de9cd9a
DN
851 return NULL_TREE;
852}
853
3ad065ef
EB
854/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
855 copy_if_shared_r callback unmodified. */
6de9cd9a 856
616f1431 857static inline void
3ad065ef 858copy_if_shared (tree *tp, void *data)
616f1431 859{
3ad065ef 860 walk_tree (tp, copy_if_shared_r, data, NULL);
6de9cd9a
DN
861}
862
3ad065ef
EB
863/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
864 any nested functions. */
44de5aeb
RK
865
866static void
3ad065ef 867unshare_body (tree fndecl)
44de5aeb 868{
d52f5295 869 struct cgraph_node *cgn = cgraph_node::get (fndecl);
3ad065ef
EB
870 /* If the language requires deep unsharing, we need a pointer set to make
871 sure we don't repeatedly unshare subtrees of unshareable nodes. */
6e2830c3
TS
872 hash_set<tree> *visited
873 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
44de5aeb 874
3ad065ef
EB
875 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
876 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
877 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
878
6e2830c3 879 delete visited;
616f1431 880
3ad065ef 881 if (cgn)
48eb4e53 882 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 883 unshare_body (cgn->decl);
44de5aeb
RK
884}
885
616f1431
EB
886/* Callback for walk_tree to unmark the visited trees rooted at *TP.
887 Subtrees are walked until the first unvisited node is encountered. */
888
889static tree
890unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
891{
892 tree t = *tp;
893
894 /* If this node has been visited, unmark it and keep looking. */
895 if (TREE_VISITED (t))
896 TREE_VISITED (t) = 0;
897
898 /* Otherwise, don't look any deeper. */
899 else
900 *walk_subtrees = 0;
901
902 return NULL_TREE;
903}
904
905/* Unmark the visited trees rooted at *TP. */
906
907static inline void
908unmark_visited (tree *tp)
909{
910 walk_tree (tp, unmark_visited_r, NULL, NULL);
911}
912
44de5aeb
RK
913/* Likewise, but mark all trees as not visited. */
914
915static void
3ad065ef 916unvisit_body (tree fndecl)
44de5aeb 917{
d52f5295 918 struct cgraph_node *cgn = cgraph_node::get (fndecl);
44de5aeb 919
3ad065ef
EB
920 unmark_visited (&DECL_SAVED_TREE (fndecl));
921 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
922 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
616f1431 923
3ad065ef 924 if (cgn)
48eb4e53 925 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 926 unvisit_body (cgn->decl);
44de5aeb
RK
927}
928
6de9cd9a
DN
929/* Unconditionally make an unshared copy of EXPR. This is used when using
930 stored expressions which span multiple functions, such as BINFO_VTABLE,
931 as the normal unsharing process can't tell that they're shared. */
932
933tree
934unshare_expr (tree expr)
935{
936 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
937 return expr;
938}
d1f98542
RB
939
940/* Worker for unshare_expr_without_location. */
941
942static tree
943prune_expr_location (tree *tp, int *walk_subtrees, void *)
944{
945 if (EXPR_P (*tp))
946 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
947 else
948 *walk_subtrees = 0;
949 return NULL_TREE;
950}
951
952/* Similar to unshare_expr but also prune all expression locations
953 from EXPR. */
954
955tree
956unshare_expr_without_location (tree expr)
957{
958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
959 if (EXPR_P (expr))
960 walk_tree (&expr, prune_expr_location, NULL, NULL);
961 return expr;
962}
6de9cd9a
DN
963\f
964/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
965 contain statements and have a value. Assign its value to a temporary
ad19c4be 966 and give it void_type_node. Return the temporary, or NULL_TREE if
6de9cd9a
DN
967 WRAPPER was already void. */
968
969tree
325c3691 970voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 971{
4832214a
JM
972 tree type = TREE_TYPE (wrapper);
973 if (type && !VOID_TYPE_P (type))
6de9cd9a 974 {
c6c7698d 975 tree *p;
6de9cd9a 976
c6c7698d
JM
977 /* Set p to point to the body of the wrapper. Loop until we find
978 something that isn't a wrapper. */
979 for (p = &wrapper; p && *p; )
d3147f64 980 {
c6c7698d 981 switch (TREE_CODE (*p))
6de9cd9a 982 {
c6c7698d
JM
983 case BIND_EXPR:
984 TREE_SIDE_EFFECTS (*p) = 1;
985 TREE_TYPE (*p) = void_type_node;
986 /* For a BIND_EXPR, the body is operand 1. */
987 p = &BIND_EXPR_BODY (*p);
988 break;
989
990 case CLEANUP_POINT_EXPR:
991 case TRY_FINALLY_EXPR:
992 case TRY_CATCH_EXPR:
6de9cd9a
DN
993 TREE_SIDE_EFFECTS (*p) = 1;
994 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
995 p = &TREE_OPERAND (*p, 0);
996 break;
997
998 case STATEMENT_LIST:
999 {
1000 tree_stmt_iterator i = tsi_last (*p);
1001 TREE_SIDE_EFFECTS (*p) = 1;
1002 TREE_TYPE (*p) = void_type_node;
1003 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1004 }
1005 break;
1006
1007 case COMPOUND_EXPR:
ad19c4be
EB
1008 /* Advance to the last statement. Set all container types to
1009 void. */
c6c7698d
JM
1010 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1011 {
1012 TREE_SIDE_EFFECTS (*p) = 1;
1013 TREE_TYPE (*p) = void_type_node;
1014 }
1015 break;
1016
0a35513e
AH
1017 case TRANSACTION_EXPR:
1018 TREE_SIDE_EFFECTS (*p) = 1;
1019 TREE_TYPE (*p) = void_type_node;
1020 p = &TRANSACTION_EXPR_BODY (*p);
1021 break;
1022
c6c7698d 1023 default:
5f23640f
TR
1024 /* Assume that any tree upon which voidify_wrapper_expr is
1025 directly called is a wrapper, and that its body is op0. */
1026 if (p == &wrapper)
1027 {
1028 TREE_SIDE_EFFECTS (*p) = 1;
1029 TREE_TYPE (*p) = void_type_node;
1030 p = &TREE_OPERAND (*p, 0);
1031 break;
1032 }
c6c7698d 1033 goto out;
6de9cd9a
DN
1034 }
1035 }
1036
c6c7698d 1037 out:
325c3691 1038 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
1039 temp = NULL_TREE;
1040 else if (temp)
6de9cd9a 1041 {
c6c7698d
JM
1042 /* The wrapper is on the RHS of an assignment that we're pushing
1043 down. */
1044 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1045 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 1046 TREE_OPERAND (temp, 1) = *p;
c6c7698d 1047 *p = temp;
6de9cd9a
DN
1048 }
1049 else
1050 {
c6c7698d
JM
1051 temp = create_tmp_var (type, "retval");
1052 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
1053 }
1054
6de9cd9a
DN
1055 return temp;
1056 }
1057
1058 return NULL_TREE;
1059}
1060
1061/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 1062 a temporary through which they communicate. */
6de9cd9a
DN
1063
1064static void
538dd0b7 1065build_stack_save_restore (gcall **save, gcall **restore)
6de9cd9a 1066{
726a989a 1067 tree tmp_var;
6de9cd9a 1068
e79983f4 1069 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
6de9cd9a 1070 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 1071 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 1072
ad19c4be 1073 *restore
e79983f4 1074 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
ad19c4be 1075 1, tmp_var);
6de9cd9a
DN
1076}
1077
1078/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1079
1080static enum gimplify_status
726a989a 1081gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1082{
1083 tree bind_expr = *expr_p;
03c00798 1084 bool old_keep_stack = gimplify_ctxp->keep_stack;
6de9cd9a
DN
1085 bool old_save_stack = gimplify_ctxp->save_stack;
1086 tree t;
538dd0b7 1087 gbind *bind_stmt;
47598145 1088 gimple_seq body, cleanup;
538dd0b7 1089 gcall *stack_save;
a5852bea 1090 location_t start_locus = 0, end_locus = 0;
6e232ba4 1091 tree ret_clauses = NULL;
6de9cd9a 1092
c6c7698d 1093 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 1094
6de9cd9a 1095 /* Mark variables seen in this bind expr. */
910ad8de 1096 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 1097 {
820cc88f 1098 if (TREE_CODE (t) == VAR_DECL)
8cb86b65
JJ
1099 {
1100 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1101
1102 /* Mark variable as local. */
d9a6bd32 1103 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
8cb86b65
JJ
1104 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1105 || splay_tree_lookup (ctx->variables,
1106 (splay_tree_key) t) == NULL))
c74559df
JJ
1107 {
1108 if (ctx->region_type == ORT_SIMD
1109 && TREE_ADDRESSABLE (t)
1110 && !TREE_STATIC (t))
1111 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1112 else
1113 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1114 }
8cb86b65
JJ
1115
1116 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
1117
1118 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1119 cfun->has_local_explicit_reg_vars = true;
8cb86b65 1120 }
e41d82f5
RH
1121
1122 /* Preliminarily mark non-addressed complex variables as eligible
1123 for promotion to gimple registers. We'll transform their uses
bd2e63a1
RG
1124 as we find them. */
1125 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1126 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
e41d82f5
RH
1127 && !TREE_THIS_VOLATILE (t)
1128 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1129 && !needs_to_live_in_memory (t))
0890b981 1130 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 1131 }
6de9cd9a 1132
538dd0b7 1133 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
03c00798 1134 BIND_EXPR_BLOCK (bind_expr));
538dd0b7 1135 gimple_push_bind_expr (bind_stmt);
726a989a 1136
03c00798 1137 gimplify_ctxp->keep_stack = false;
6de9cd9a
DN
1138 gimplify_ctxp->save_stack = false;
1139
726a989a
RB
1140 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1141 body = NULL;
1142 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
538dd0b7 1143 gimple_bind_set_body (bind_stmt, body);
6de9cd9a 1144
a5852bea
OH
1145 /* Source location wise, the cleanup code (stack_restore and clobbers)
1146 belongs to the end of the block, so propagate what we have. The
1147 stack_save operation belongs to the beginning of block, which we can
1148 infer from the bind_expr directly if the block has no explicit
1149 assignment. */
1150 if (BIND_EXPR_BLOCK (bind_expr))
1151 {
1152 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1153 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1154 }
1155 if (start_locus == 0)
1156 start_locus = EXPR_LOCATION (bind_expr);
1157
47598145
MM
1158 cleanup = NULL;
1159 stack_save = NULL;
03c00798
EB
1160
1161 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1162 the stack space allocated to the VLAs. */
1163 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
6de9cd9a 1164 {
538dd0b7 1165 gcall *stack_restore;
6de9cd9a
DN
1166
1167 /* Save stack on entry and restore it on exit. Add a try_finally
98906124 1168 block to achieve this. */
6de9cd9a
DN
1169 build_stack_save_restore (&stack_save, &stack_restore);
1170
a5852bea
OH
1171 gimple_set_location (stack_save, start_locus);
1172 gimple_set_location (stack_restore, end_locus);
1173
726a989a 1174 gimplify_seq_add_stmt (&cleanup, stack_restore);
47598145
MM
1175 }
1176
1177 /* Add clobbers for all variables that go out of scope. */
1178 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1179 {
1180 if (TREE_CODE (t) == VAR_DECL
1181 && !is_global_var (t)
1182 && DECL_CONTEXT (t) == current_function_decl
1183 && !DECL_HARD_REGISTER (t)
1184 && !TREE_THIS_VOLATILE (t)
1185 && !DECL_HAS_VALUE_EXPR_P (t)
1186 /* Only care for variables that have to be in memory. Others
1187 will be rewritten into SSA names, hence moved to the top-level. */
87e2a8fd
XDL
1188 && !is_gimple_reg (t)
1189 && flag_stack_reuse != SR_NONE)
47598145 1190 {
a5852bea 1191 tree clobber = build_constructor (TREE_TYPE (t), NULL);
355fe088 1192 gimple *clobber_stmt;
47598145 1193 TREE_THIS_VOLATILE (clobber) = 1;
a5852bea
OH
1194 clobber_stmt = gimple_build_assign (t, clobber);
1195 gimple_set_location (clobber_stmt, end_locus);
1196 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
6e232ba4
JN
1197
1198 if (flag_openacc && oacc_declare_returns != NULL)
1199 {
1200 tree *c = oacc_declare_returns->get (t);
1201 if (c != NULL)
1202 {
1203 if (ret_clauses)
1204 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1205
1206 ret_clauses = *c;
1207
1208 oacc_declare_returns->remove (t);
1209
1210 if (oacc_declare_returns->elements () == 0)
1211 {
1212 delete oacc_declare_returns;
1213 oacc_declare_returns = NULL;
1214 }
1215 }
1216 }
47598145
MM
1217 }
1218 }
1219
6e232ba4
JN
1220 if (ret_clauses)
1221 {
1222 gomp_target *stmt;
1223 gimple_stmt_iterator si = gsi_start (cleanup);
1224
1225 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1226 ret_clauses);
1227 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1228 }
1229
47598145
MM
1230 if (cleanup)
1231 {
538dd0b7 1232 gtry *gs;
47598145
MM
1233 gimple_seq new_body;
1234
1235 new_body = NULL;
538dd0b7 1236 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
726a989a 1237 GIMPLE_TRY_FINALLY);
6de9cd9a 1238
47598145
MM
1239 if (stack_save)
1240 gimplify_seq_add_stmt (&new_body, stack_save);
726a989a 1241 gimplify_seq_add_stmt (&new_body, gs);
538dd0b7 1242 gimple_bind_set_body (bind_stmt, new_body);
6de9cd9a
DN
1243 }
1244
03c00798
EB
1245 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1246 if (!gimplify_ctxp->keep_stack)
1247 gimplify_ctxp->keep_stack = old_keep_stack;
6de9cd9a 1248 gimplify_ctxp->save_stack = old_save_stack;
03c00798 1249
6de9cd9a
DN
1250 gimple_pop_bind_expr ();
1251
538dd0b7 1252 gimplify_seq_add_stmt (pre_p, bind_stmt);
726a989a 1253
6de9cd9a
DN
1254 if (temp)
1255 {
1256 *expr_p = temp;
6de9cd9a
DN
1257 return GS_OK;
1258 }
726a989a
RB
1259
1260 *expr_p = NULL_TREE;
1261 return GS_ALL_DONE;
6de9cd9a
DN
1262}
1263
1264/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1265 GIMPLE value, it is assigned to a new temporary and the statement is
1266 re-written to return the temporary.
1267
726a989a 1268 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1269 STMT should be stored. */
1270
1271static enum gimplify_status
726a989a 1272gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1273{
538dd0b7 1274 greturn *ret;
6de9cd9a 1275 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1276 tree result_decl, result;
6de9cd9a 1277
726a989a
RB
1278 if (ret_expr == error_mark_node)
1279 return GS_ERROR;
1280
939b37da
BI
1281 /* Implicit _Cilk_sync must be inserted right before any return statement
1282 if there is a _Cilk_spawn in the function. If the user has provided a
1283 _Cilk_sync, the optimizer should remove this duplicate one. */
1284 if (fn_contains_cilk_spawn_p (cfun))
1285 {
1286 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1287 gimplify_and_add (impl_sync, pre_p);
1288 }
1289
726a989a
RB
1290 if (!ret_expr
1291 || TREE_CODE (ret_expr) == RESULT_DECL
55e99d52 1292 || ret_expr == error_mark_node)
726a989a 1293 {
538dd0b7 1294 greturn *ret = gimple_build_return (ret_expr);
726a989a
RB
1295 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1296 gimplify_seq_add_stmt (pre_p, ret);
1297 return GS_ALL_DONE;
1298 }
6de9cd9a 1299
6de9cd9a 1300 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1301 result_decl = NULL_TREE;
6de9cd9a
DN
1302 else
1303 {
726a989a
RB
1304 result_decl = TREE_OPERAND (ret_expr, 0);
1305
1306 /* See through a return by reference. */
cc77ae10 1307 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1308 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1309
1310 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1311 || TREE_CODE (ret_expr) == INIT_EXPR)
1312 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1313 }
1314
71877985
RH
1315 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1316 Recall that aggregate_value_p is FALSE for any aggregate type that is
1317 returned in registers. If we're returning values in registers, then
1318 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1319 across another call. In addition, for those aggregates for which
535a42b1 1320 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1321 expansion of structure assignments; there's special code in expand_return
1322 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1323 if (!result_decl)
1324 result = NULL_TREE;
1325 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1326 {
1327 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1328 {
1329 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1330 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1331 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1332 should be effectively allocated by the caller, i.e. all calls to
1333 this function must be subject to the Return Slot Optimization. */
1334 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1335 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1336 }
1337 result = result_decl;
1338 }
71877985
RH
1339 else if (gimplify_ctxp->return_temp)
1340 result = gimplify_ctxp->return_temp;
1341 else
1342 {
b731b390 1343 result = create_tmp_reg (TREE_TYPE (result_decl));
ff98621c
RH
1344
1345 /* ??? With complex control flow (usually involving abnormal edges),
1346 we can wind up warning about an uninitialized value for this. Due
1347 to how this variable is constructed and initialized, this is never
1348 true. Give up and never warn. */
1349 TREE_NO_WARNING (result) = 1;
1350
71877985
RH
1351 gimplify_ctxp->return_temp = result;
1352 }
1353
726a989a 1354 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1355 Then gimplify the whole thing. */
1356 if (result != result_decl)
726a989a 1357 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1358
1359 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1360
726a989a
RB
1361 ret = gimple_build_return (result);
1362 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1363 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1364
6de9cd9a
DN
1365 return GS_ALL_DONE;
1366}
1367
ad19c4be
EB
1368/* Gimplify a variable-length array DECL. */
1369
786025ea 1370static void
726a989a 1371gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1372{
1373 /* This is a variable-sized decl. Simplify its size and mark it
98906124 1374 for deferred expansion. */
786025ea
JJ
1375 tree t, addr, ptr_type;
1376
726a989a
RB
1377 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1378 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea 1379
0138d6b2
JM
1380 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1381 if (DECL_HAS_VALUE_EXPR_P (decl))
1382 return;
1383
786025ea
JJ
1384 /* All occurrences of this decl in final gimplified code will be
1385 replaced by indirection. Setting DECL_VALUE_EXPR does two
1386 things: First, it lets the rest of the gimplifier know what
1387 replacement to use. Second, it lets the debug info know
1388 where to find the value. */
1389 ptr_type = build_pointer_type (TREE_TYPE (decl));
1390 addr = create_tmp_var (ptr_type, get_name (decl));
1391 DECL_IGNORED_P (addr) = 0;
1392 t = build_fold_indirect_ref (addr);
31408f60 1393 TREE_THIS_NOTRAP (t) = 1;
786025ea
JJ
1394 SET_DECL_VALUE_EXPR (decl, t);
1395 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1396
e79983f4 1397 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13e49da9
TV
1398 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1399 size_int (DECL_ALIGN (decl)));
d3c12306 1400 /* The call has been built for a variable-sized object. */
63d2a353 1401 CALL_ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1402 t = fold_convert (ptr_type, t);
726a989a 1403 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1404
726a989a 1405 gimplify_and_add (t, seq_p);
786025ea
JJ
1406}
1407
45b0be94
AM
1408/* A helper function to be called via walk_tree. Mark all labels under *TP
1409 as being forced. To be called for DECL_INITIAL of static variables. */
1410
1411static tree
1412force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1413{
1414 if (TYPE_P (*tp))
1415 *walk_subtrees = 0;
1416 if (TREE_CODE (*tp) == LABEL_DECL)
1417 FORCED_LABEL (*tp) = 1;
1418
1419 return NULL_TREE;
1420}
1421
ad19c4be 1422/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
350fae66
RK
1423 and initialization explicit. */
1424
1425static enum gimplify_status
726a989a 1426gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1427{
1428 tree stmt = *stmt_p;
1429 tree decl = DECL_EXPR_DECL (stmt);
1430
1431 *stmt_p = NULL_TREE;
1432
1433 if (TREE_TYPE (decl) == error_mark_node)
1434 return GS_ERROR;
1435
8e0a600b
JJ
1436 if ((TREE_CODE (decl) == TYPE_DECL
1437 || TREE_CODE (decl) == VAR_DECL)
1438 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
726a989a 1439 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
350fae66 1440
d400d17e
EB
1441 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1442 in case its size expressions contain problematic nodes like CALL_EXPR. */
1443 if (TREE_CODE (decl) == TYPE_DECL
1444 && DECL_ORIGINAL_TYPE (decl)
1445 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1446 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1447
8e0a600b 1448 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
350fae66
RK
1449 {
1450 tree init = DECL_INITIAL (decl);
1451
b38f3813
EB
1452 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1453 || (!TREE_STATIC (decl)
1454 && flag_stack_check == GENERIC_STACK_CHECK
1455 && compare_tree_int (DECL_SIZE_UNIT (decl),
1456 STACK_CHECK_MAX_VAR_SIZE) > 0))
726a989a 1457 gimplify_vla_decl (decl, seq_p);
350fae66 1458
22192559
JM
1459 /* Some front ends do not explicitly declare all anonymous
1460 artificial variables. We compensate here by declaring the
1461 variables, though it would be better if the front ends would
1462 explicitly declare them. */
1463 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1464 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1465 gimple_add_tmp_var (decl);
1466
350fae66
RK
1467 if (init && init != error_mark_node)
1468 {
1469 if (!TREE_STATIC (decl))
1470 {
1471 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1472 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1473 gimplify_and_add (init, seq_p);
1474 ggc_free (init);
350fae66
RK
1475 }
1476 else
1477 /* We must still examine initializers for static variables
1478 as they may contain a label address. */
1479 walk_tree (&init, force_labels_r, NULL, NULL);
1480 }
350fae66
RK
1481 }
1482
1483 return GS_ALL_DONE;
1484}
1485
6de9cd9a
DN
1486/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1487 and replacing the LOOP_EXPR with goto, but if the loop contains an
1488 EXIT_EXPR, we need to append a label for it to jump to. */
1489
1490static enum gimplify_status
726a989a 1491gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1492{
1493 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1494 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1495
726a989a 1496 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1497
1498 gimplify_ctxp->exit_label = NULL_TREE;
1499
fff34d35 1500 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1501
726a989a
RB
1502 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1503
6de9cd9a 1504 if (gimplify_ctxp->exit_label)
ad19c4be
EB
1505 gimplify_seq_add_stmt (pre_p,
1506 gimple_build_label (gimplify_ctxp->exit_label));
726a989a
RB
1507
1508 gimplify_ctxp->exit_label = saved_label;
1509
1510 *expr_p = NULL;
1511 return GS_ALL_DONE;
1512}
1513
ad19c4be 1514/* Gimplify a statement list onto a sequence. These may be created either
726a989a
RB
1515 by an enlightened front-end, or by shortcut_cond_expr. */
1516
1517static enum gimplify_status
1518gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1519{
1520 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1521
1522 tree_stmt_iterator i = tsi_start (*expr_p);
1523
1524 while (!tsi_end_p (i))
6de9cd9a 1525 {
726a989a
RB
1526 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1527 tsi_delink (&i);
6de9cd9a 1528 }
6de9cd9a 1529
726a989a
RB
1530 if (temp)
1531 {
1532 *expr_p = temp;
1533 return GS_OK;
1534 }
6de9cd9a
DN
1535
1536 return GS_ALL_DONE;
1537}
0f1f6967 1538
68e72840
SB
1539\f
1540/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
6de9cd9a
DN
1541 branch to. */
1542
1543static enum gimplify_status
726a989a 1544gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1545{
1546 tree switch_expr = *expr_p;
726a989a 1547 gimple_seq switch_body_seq = NULL;
6de9cd9a 1548 enum gimplify_status ret;
0cd2402d
SB
1549 tree index_type = TREE_TYPE (switch_expr);
1550 if (index_type == NULL_TREE)
1551 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
6de9cd9a 1552
726a989a
RB
1553 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1554 fb_rvalue);
1555 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1556 return ret;
6de9cd9a
DN
1557
1558 if (SWITCH_BODY (switch_expr))
1559 {
9771b263
DN
1560 vec<tree> labels;
1561 vec<tree> saved_labels;
726a989a 1562 tree default_case = NULL_TREE;
538dd0b7 1563 gswitch *switch_stmt;
b8698a0f 1564
6de9cd9a
DN
1565 /* If someone can be bothered to fill in the labels, they can
1566 be bothered to null out the body too. */
282899df 1567 gcc_assert (!SWITCH_LABELS (switch_expr));
6de9cd9a 1568
0cd2402d 1569 /* Save old labels, get new ones from body, then restore the old
726a989a 1570 labels. Save all the things from the switch body to append after. */
6de9cd9a 1571 saved_labels = gimplify_ctxp->case_labels;
9771b263 1572 gimplify_ctxp->case_labels.create (8);
6de9cd9a 1573
726a989a 1574 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
6de9cd9a
DN
1575 labels = gimplify_ctxp->case_labels;
1576 gimplify_ctxp->case_labels = saved_labels;
b8698a0f 1577
68e72840
SB
1578 preprocess_case_label_vec_for_gimple (labels, index_type,
1579 &default_case);
32f579f6 1580
726a989a 1581 if (!default_case)
6de9cd9a 1582 {
538dd0b7 1583 glabel *new_default;
6de9cd9a 1584
68e72840
SB
1585 default_case
1586 = build_case_label (NULL_TREE, NULL_TREE,
1587 create_artificial_label (UNKNOWN_LOCATION));
1588 new_default = gimple_build_label (CASE_LABEL (default_case));
1589 gimplify_seq_add_stmt (&switch_body_seq, new_default);
32f579f6 1590 }
f667741c 1591
538dd0b7 1592 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
fd8d363e 1593 default_case, labels);
538dd0b7 1594 gimplify_seq_add_stmt (pre_p, switch_stmt);
726a989a 1595 gimplify_seq_add_seq (pre_p, switch_body_seq);
9771b263 1596 labels.release ();
6de9cd9a 1597 }
282899df
NS
1598 else
1599 gcc_assert (SWITCH_LABELS (switch_expr));
6de9cd9a 1600
726a989a 1601 return GS_ALL_DONE;
6de9cd9a
DN
1602}
1603
ad19c4be 1604/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
726a989a 1605
6de9cd9a 1606static enum gimplify_status
726a989a 1607gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 1608{
953ff289 1609 struct gimplify_ctx *ctxp;
538dd0b7 1610 glabel *label_stmt;
953ff289 1611
41dbbb37 1612 /* Invalid programs can play Duff's Device type games with, for example,
953ff289 1613 #pragma omp parallel. At least in the C front end, we don't
41dbbb37
TS
1614 detect such invalid branches until after gimplification, in the
1615 diagnose_omp_blocks pass. */
953ff289 1616 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
9771b263 1617 if (ctxp->case_labels.exists ())
953ff289 1618 break;
282899df 1619
538dd0b7 1620 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
9771b263 1621 ctxp->case_labels.safe_push (*expr_p);
538dd0b7 1622 gimplify_seq_add_stmt (pre_p, label_stmt);
726a989a 1623
6de9cd9a
DN
1624 return GS_ALL_DONE;
1625}
1626
6de9cd9a
DN
1627/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1628 if necessary. */
1629
1630tree
1631build_and_jump (tree *label_p)
1632{
1633 if (label_p == NULL)
1634 /* If there's nowhere to jump, just fall through. */
65355d53 1635 return NULL_TREE;
6de9cd9a
DN
1636
1637 if (*label_p == NULL_TREE)
1638 {
c2255bc4 1639 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
1640 *label_p = label;
1641 }
1642
1643 return build1 (GOTO_EXPR, void_type_node, *label_p);
1644}
1645
1646/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1647 This also involves building a label to jump to and communicating it to
1648 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1649
1650static enum gimplify_status
1651gimplify_exit_expr (tree *expr_p)
1652{
1653 tree cond = TREE_OPERAND (*expr_p, 0);
1654 tree expr;
1655
1656 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 1657 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
1658 *expr_p = expr;
1659
1660 return GS_OK;
1661}
1662
26d44ae2
RH
1663/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1664 different from its canonical type, wrap the whole thing inside a
1665 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1666 type.
6de9cd9a 1667
26d44ae2
RH
1668 The canonical type of a COMPONENT_REF is the type of the field being
1669 referenced--unless the field is a bit-field which can be read directly
1670 in a smaller mode, in which case the canonical type is the
1671 sign-appropriate type corresponding to that mode. */
6de9cd9a 1672
26d44ae2
RH
1673static void
1674canonicalize_component_ref (tree *expr_p)
6de9cd9a 1675{
26d44ae2
RH
1676 tree expr = *expr_p;
1677 tree type;
6de9cd9a 1678
282899df 1679 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 1680
26d44ae2
RH
1681 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1682 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1683 else
1684 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 1685
b26c6d55
RG
1686 /* One could argue that all the stuff below is not necessary for
1687 the non-bitfield case and declare it a FE error if type
1688 adjustment would be needed. */
26d44ae2 1689 if (TREE_TYPE (expr) != type)
6de9cd9a 1690 {
b26c6d55 1691#ifdef ENABLE_TYPES_CHECKING
26d44ae2 1692 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
1693#endif
1694 int type_quals;
1695
1696 /* We need to preserve qualifiers and propagate them from
1697 operand 0. */
1698 type_quals = TYPE_QUALS (type)
1699 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1700 if (TYPE_QUALS (type) != type_quals)
1701 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 1702
26d44ae2
RH
1703 /* Set the type of the COMPONENT_REF to the underlying type. */
1704 TREE_TYPE (expr) = type;
6de9cd9a 1705
b26c6d55
RG
1706#ifdef ENABLE_TYPES_CHECKING
1707 /* It is now a FE error, if the conversion from the canonical
1708 type to the original expression type is not useless. */
1709 gcc_assert (useless_type_conversion_p (old_type, type));
1710#endif
26d44ae2
RH
1711 }
1712}
6de9cd9a 1713
26d44ae2 1714/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 1715 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
1716 T array[U];
1717 (T *)&array
1718 ==>
1719 &array[L]
1720 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
1721 lower bound.
1722 The constraint is that the type of &array[L] is trivially convertible
1723 to T *. */
6de9cd9a 1724
26d44ae2
RH
1725static void
1726canonicalize_addr_expr (tree *expr_p)
1727{
1728 tree expr = *expr_p;
26d44ae2 1729 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 1730 tree datype, ddatype, pddatype;
6de9cd9a 1731
04d86531
RG
1732 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1733 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1734 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 1735 return;
6de9cd9a 1736
26d44ae2 1737 /* The addr_expr type should be a pointer to an array. */
04d86531 1738 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
1739 if (TREE_CODE (datype) != ARRAY_TYPE)
1740 return;
6de9cd9a 1741
04d86531
RG
1742 /* The pointer to element type shall be trivially convertible to
1743 the expression pointer type. */
26d44ae2 1744 ddatype = TREE_TYPE (datype);
04d86531 1745 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
1746 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1747 pddatype))
26d44ae2 1748 return;
6de9cd9a 1749
26d44ae2 1750 /* The lower bound and element sizes must be constant. */
04d86531
RG
1751 if (!TYPE_SIZE_UNIT (ddatype)
1752 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
1753 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1754 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1755 return;
6de9cd9a 1756
26d44ae2 1757 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 1758 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 1759 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 1760 NULL_TREE, NULL_TREE);
04d86531 1761 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
1762
1763 /* We can have stripped a required restrict qualifier above. */
1764 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1765 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 1766}
6de9cd9a 1767
26d44ae2
RH
1768/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1769 underneath as appropriate. */
6de9cd9a 1770
26d44ae2
RH
1771static enum gimplify_status
1772gimplify_conversion (tree *expr_p)
d3147f64 1773{
db3927fb 1774 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 1775 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 1776
0710ccff
NS
1777 /* Then strip away all but the outermost conversion. */
1778 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1779
1780 /* And remove the outermost conversion if it's useless. */
1781 if (tree_ssa_useless_type_conversion (*expr_p))
1782 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1783
26d44ae2
RH
1784 /* If we still have a conversion at the toplevel,
1785 then canonicalize some constructs. */
1043771b 1786 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
1787 {
1788 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1789
26d44ae2
RH
1790 /* If a NOP conversion is changing the type of a COMPONENT_REF
1791 expression, then canonicalize its type now in order to expose more
1792 redundant conversions. */
1793 if (TREE_CODE (sub) == COMPONENT_REF)
1794 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 1795
26d44ae2
RH
1796 /* If a NOP conversion is changing a pointer to array of foo
1797 to a pointer to foo, embed that change in the ADDR_EXPR. */
1798 else if (TREE_CODE (sub) == ADDR_EXPR)
1799 canonicalize_addr_expr (expr_p);
1800 }
6de9cd9a 1801
8b17cc05
RG
1802 /* If we have a conversion to a non-register type force the
1803 use of a VIEW_CONVERT_EXPR instead. */
4f934809 1804 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 1805 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 1806 TREE_OPERAND (*expr_p, 0));
8b17cc05 1807
741233cf
RB
1808 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1809 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1810 TREE_SET_CODE (*expr_p, NOP_EXPR);
1811
6de9cd9a
DN
1812 return GS_OK;
1813}
1814
77f2a970 1815/* Nonlocal VLAs seen in the current function. */
6e2830c3 1816static hash_set<tree> *nonlocal_vlas;
77f2a970 1817
96ddb7ec
JJ
1818/* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1819static tree nonlocal_vla_vars;
1820
ad19c4be 1821/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
a9f7c570
RH
1822 DECL_VALUE_EXPR, and it's worth re-examining things. */
1823
1824static enum gimplify_status
1825gimplify_var_or_parm_decl (tree *expr_p)
1826{
1827 tree decl = *expr_p;
1828
1829 /* ??? If this is a local variable, and it has not been seen in any
1830 outer BIND_EXPR, then it's probably the result of a duplicate
1831 declaration, for which we've already issued an error. It would
1832 be really nice if the front end wouldn't leak these at all.
1833 Currently the only known culprit is C++ destructors, as seen
1834 in g++.old-deja/g++.jason/binding.C. */
1835 if (TREE_CODE (decl) == VAR_DECL
1836 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1837 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1838 && decl_function_context (decl) == current_function_decl)
1839 {
1da2ed5f 1840 gcc_assert (seen_error ());
a9f7c570
RH
1841 return GS_ERROR;
1842 }
1843
41dbbb37 1844 /* When within an OMP context, notice uses of variables. */
953ff289
DN
1845 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1846 return GS_ALL_DONE;
1847
a9f7c570
RH
1848 /* If the decl is an alias for another expression, substitute it now. */
1849 if (DECL_HAS_VALUE_EXPR_P (decl))
1850 {
77f2a970
JJ
1851 tree value_expr = DECL_VALUE_EXPR (decl);
1852
1853 /* For referenced nonlocal VLAs add a decl for debugging purposes
1854 to the current function. */
1855 if (TREE_CODE (decl) == VAR_DECL
1856 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1857 && nonlocal_vlas != NULL
1858 && TREE_CODE (value_expr) == INDIRECT_REF
1859 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1860 && decl_function_context (decl) != current_function_decl)
1861 {
1862 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
1863 while (ctx
1864 && (ctx->region_type == ORT_WORKSHARE
182190f2
NS
1865 || ctx->region_type == ORT_SIMD
1866 || ctx->region_type == ORT_ACC))
77f2a970 1867 ctx = ctx->outer_context;
6e2830c3 1868 if (!ctx && !nonlocal_vlas->add (decl))
77f2a970 1869 {
96ddb7ec 1870 tree copy = copy_node (decl);
77f2a970
JJ
1871
1872 lang_hooks.dup_lang_specific_decl (copy);
2eb79bbb 1873 SET_DECL_RTL (copy, 0);
77f2a970 1874 TREE_USED (copy) = 1;
96ddb7ec
JJ
1875 DECL_CHAIN (copy) = nonlocal_vla_vars;
1876 nonlocal_vla_vars = copy;
77f2a970
JJ
1877 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1878 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1879 }
1880 }
1881
1882 *expr_p = unshare_expr (value_expr);
a9f7c570
RH
1883 return GS_OK;
1884 }
1885
1886 return GS_ALL_DONE;
1887}
1888
66c14933
EB
1889/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1890
1891static void
2fb9a547
AM
1892recalculate_side_effects (tree t)
1893{
1894 enum tree_code code = TREE_CODE (t);
1895 int len = TREE_OPERAND_LENGTH (t);
1896 int i;
1897
1898 switch (TREE_CODE_CLASS (code))
1899 {
1900 case tcc_expression:
1901 switch (code)
1902 {
1903 case INIT_EXPR:
1904 case MODIFY_EXPR:
1905 case VA_ARG_EXPR:
1906 case PREDECREMENT_EXPR:
1907 case PREINCREMENT_EXPR:
1908 case POSTDECREMENT_EXPR:
1909 case POSTINCREMENT_EXPR:
1910 /* All of these have side-effects, no matter what their
1911 operands are. */
1912 return;
1913
1914 default:
1915 break;
1916 }
1917 /* Fall through. */
1918
1919 case tcc_comparison: /* a comparison expression */
1920 case tcc_unary: /* a unary arithmetic expression */
1921 case tcc_binary: /* a binary arithmetic expression */
1922 case tcc_reference: /* a reference */
1923 case tcc_vl_exp: /* a function call */
1924 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1925 for (i = 0; i < len; ++i)
1926 {
1927 tree op = TREE_OPERAND (t, i);
1928 if (op && TREE_SIDE_EFFECTS (op))
1929 TREE_SIDE_EFFECTS (t) = 1;
1930 }
1931 break;
1932
1933 case tcc_constant:
1934 /* No side-effects. */
1935 return;
1936
1937 default:
1938 gcc_unreachable ();
1939 }
1940}
1941
6de9cd9a 1942/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 1943 node *EXPR_P.
6de9cd9a
DN
1944
1945 compound_lval
1946 : min_lval '[' val ']'
1947 | min_lval '.' ID
1948 | compound_lval '[' val ']'
1949 | compound_lval '.' ID
1950
1951 This is not part of the original SIMPLE definition, which separates
1952 array and member references, but it seems reasonable to handle them
1953 together. Also, this way we don't run into problems with union
1954 aliasing; gcc requires that for accesses through a union to alias, the
1955 union reference must be explicit, which was not always the case when we
1956 were splitting up array and member refs.
1957
726a989a 1958 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1959 *EXPR_P should be stored.
1960
726a989a 1961 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
1962 *EXPR_P should be stored. */
1963
1964static enum gimplify_status
726a989a
RB
1965gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1966 fallback_t fallback)
6de9cd9a
DN
1967{
1968 tree *p;
941f78d1 1969 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 1970 int i;
db3927fb 1971 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 1972 tree expr = *expr_p;
6de9cd9a 1973
6de9cd9a 1974 /* Create a stack of the subexpressions so later we can walk them in
ec234842 1975 order from inner to outer. */
00f96dc9 1976 auto_vec<tree, 10> expr_stack;
6de9cd9a 1977
afe84921 1978 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
1979 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1980 {
a9f7c570 1981 restart:
6a720599
JM
1982 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1983 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 1984 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
1985
1986 if (handled_component_p (*p))
1987 ;
1988 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1989 additional COMPONENT_REFs. */
1990 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1991 && gimplify_var_or_parm_decl (p) == GS_OK)
1992 goto restart;
1993 else
6a720599 1994 break;
b8698a0f 1995
9771b263 1996 expr_stack.safe_push (*p);
6a720599 1997 }
6de9cd9a 1998
9771b263 1999 gcc_assert (expr_stack.length ());
9e51aaf5 2000
0823efed
DN
2001 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2002 walked through and P points to the innermost expression.
6de9cd9a 2003
af72267c
RK
2004 Java requires that we elaborated nodes in source order. That
2005 means we must gimplify the inner expression followed by each of
2006 the indices, in order. But we can't gimplify the inner
2007 expression until we deal with any variable bounds, sizes, or
2008 positions in order to deal with PLACEHOLDER_EXPRs.
2009
2010 So we do this in three steps. First we deal with the annotations
2011 for any variables in the components, then we gimplify the base,
2012 then we gimplify any indices, from left to right. */
9771b263 2013 for (i = expr_stack.length () - 1; i >= 0; i--)
6de9cd9a 2014 {
9771b263 2015 tree t = expr_stack[i];
44de5aeb
RK
2016
2017 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 2018 {
44de5aeb
RK
2019 /* Gimplify the low bound and element type size and put them into
2020 the ARRAY_REF. If these values are set, they have already been
2021 gimplified. */
726a989a 2022 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 2023 {
a7cc468a
RH
2024 tree low = unshare_expr (array_ref_low_bound (t));
2025 if (!is_gimple_min_invariant (low))
44de5aeb 2026 {
726a989a
RB
2027 TREE_OPERAND (t, 2) = low;
2028 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2029 post_p, is_gimple_reg,
726a989a 2030 fb_rvalue);
44de5aeb
RK
2031 ret = MIN (ret, tret);
2032 }
2033 }
19c44640
JJ
2034 else
2035 {
2036 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2037 is_gimple_reg, fb_rvalue);
2038 ret = MIN (ret, tret);
2039 }
44de5aeb 2040
19c44640 2041 if (TREE_OPERAND (t, 3) == NULL_TREE)
44de5aeb
RK
2042 {
2043 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2044 tree elmt_size = unshare_expr (array_ref_element_size (t));
a4e9ffe5 2045 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
44de5aeb
RK
2046
2047 /* Divide the element size by the alignment of the element
2048 type (above). */
ad19c4be
EB
2049 elmt_size
2050 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
44de5aeb 2051
a7cc468a 2052 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 2053 {
726a989a
RB
2054 TREE_OPERAND (t, 3) = elmt_size;
2055 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 2056 post_p, is_gimple_reg,
726a989a 2057 fb_rvalue);
44de5aeb
RK
2058 ret = MIN (ret, tret);
2059 }
6de9cd9a 2060 }
19c44640
JJ
2061 else
2062 {
2063 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2064 is_gimple_reg, fb_rvalue);
2065 ret = MIN (ret, tret);
2066 }
6de9cd9a 2067 }
44de5aeb
RK
2068 else if (TREE_CODE (t) == COMPONENT_REF)
2069 {
2070 /* Set the field offset into T and gimplify it. */
19c44640 2071 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb
RK
2072 {
2073 tree offset = unshare_expr (component_ref_field_offset (t));
2074 tree field = TREE_OPERAND (t, 1);
2075 tree factor
2076 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2077
2078 /* Divide the offset by its alignment. */
db3927fb 2079 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
44de5aeb 2080
a7cc468a 2081 if (!is_gimple_min_invariant (offset))
44de5aeb 2082 {
726a989a
RB
2083 TREE_OPERAND (t, 2) = offset;
2084 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2085 post_p, is_gimple_reg,
726a989a 2086 fb_rvalue);
44de5aeb
RK
2087 ret = MIN (ret, tret);
2088 }
2089 }
19c44640
JJ
2090 else
2091 {
2092 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2093 is_gimple_reg, fb_rvalue);
2094 ret = MIN (ret, tret);
2095 }
44de5aeb 2096 }
af72267c
RK
2097 }
2098
a9f7c570
RH
2099 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2100 so as to match the min_lval predicate. Failure to do so may result
2101 in the creation of large aggregate temporaries. */
2102 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2103 fallback | fb_lvalue);
af72267c
RK
2104 ret = MIN (ret, tret);
2105
ea814c66 2106 /* And finally, the indices and operands of ARRAY_REF. During this
48eb4e53 2107 loop we also remove any useless conversions. */
9771b263 2108 for (; expr_stack.length () > 0; )
af72267c 2109 {
9771b263 2110 tree t = expr_stack.pop ();
af72267c
RK
2111
2112 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2113 {
ba4d8f9d 2114 /* Gimplify the dimension. */
af72267c
RK
2115 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2116 {
2117 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 2118 is_gimple_val, fb_rvalue);
af72267c
RK
2119 ret = MIN (ret, tret);
2120 }
2121 }
48eb4e53
RK
2122
2123 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2124
726a989a
RB
2125 /* The innermost expression P may have originally had
2126 TREE_SIDE_EFFECTS set which would have caused all the outer
2127 expressions in *EXPR_P leading to P to also have had
2128 TREE_SIDE_EFFECTS set. */
6de9cd9a 2129 recalculate_side_effects (t);
6de9cd9a
DN
2130 }
2131
2132 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 2133 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
2134 {
2135 canonicalize_component_ref (expr_p);
6de9cd9a
DN
2136 }
2137
9771b263 2138 expr_stack.release ();
07724022 2139
941f78d1
JM
2140 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2141
6de9cd9a
DN
2142 return ret;
2143}
2144
206048bd
VR
2145/* Gimplify the self modifying expression pointed to by EXPR_P
2146 (++, --, +=, -=).
6de9cd9a
DN
2147
2148 PRE_P points to the list where side effects that must happen before
2149 *EXPR_P should be stored.
2150
2151 POST_P points to the list where side effects that must happen after
2152 *EXPR_P should be stored.
2153
2154 WANT_VALUE is nonzero iff we want to use the value of this expression
cc3c4f62 2155 in another expression.
6de9cd9a 2156
cc3c4f62
RB
2157 ARITH_TYPE is the type the computation should be performed in. */
2158
2159enum gimplify_status
726a989a 2160gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
cc3c4f62 2161 bool want_value, tree arith_type)
6de9cd9a
DN
2162{
2163 enum tree_code code;
726a989a
RB
2164 tree lhs, lvalue, rhs, t1;
2165 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
2166 bool postfix;
2167 enum tree_code arith_code;
2168 enum gimplify_status ret;
db3927fb 2169 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
2170
2171 code = TREE_CODE (*expr_p);
2172
282899df
NS
2173 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2174 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
2175
2176 /* Prefix or postfix? */
2177 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2178 /* Faster to treat as prefix if result is not used. */
2179 postfix = want_value;
2180 else
2181 postfix = false;
2182
82181741
JJ
2183 /* For postfix, make sure the inner expression's post side effects
2184 are executed after side effects from this expression. */
2185 if (postfix)
2186 post_p = &post;
2187
6de9cd9a
DN
2188 /* Add or subtract? */
2189 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2190 arith_code = PLUS_EXPR;
2191 else
2192 arith_code = MINUS_EXPR;
2193
2194 /* Gimplify the LHS into a GIMPLE lvalue. */
2195 lvalue = TREE_OPERAND (*expr_p, 0);
2196 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2197 if (ret == GS_ERROR)
2198 return ret;
2199
2200 /* Extract the operands to the arithmetic operation. */
2201 lhs = lvalue;
2202 rhs = TREE_OPERAND (*expr_p, 1);
2203
2204 /* For postfix operator, we evaluate the LHS to an rvalue and then use
d97c9b22 2205 that as the result value and in the postqueue operation. */
6de9cd9a
DN
2206 if (postfix)
2207 {
2208 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2209 if (ret == GS_ERROR)
2210 return ret;
6de9cd9a 2211
d97c9b22
JJ
2212 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2213 }
cc3c4f62 2214
5be014d5
AP
2215 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2216 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2217 {
0d82a1c8 2218 rhs = convert_to_ptrofftype_loc (loc, rhs);
5be014d5 2219 if (arith_code == MINUS_EXPR)
db3927fb 2220 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
cc3c4f62 2221 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
5be014d5 2222 }
cc3c4f62
RB
2223 else
2224 t1 = fold_convert (TREE_TYPE (*expr_p),
2225 fold_build2 (arith_code, arith_type,
2226 fold_convert (arith_type, lhs),
2227 fold_convert (arith_type, rhs)));
5be014d5 2228
6de9cd9a
DN
2229 if (postfix)
2230 {
cf1867a0 2231 gimplify_assign (lvalue, t1, pre_p);
726a989a 2232 gimplify_seq_add_seq (orig_post_p, post);
cc3c4f62 2233 *expr_p = lhs;
6de9cd9a
DN
2234 return GS_ALL_DONE;
2235 }
2236 else
2237 {
726a989a 2238 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
2239 return GS_OK;
2240 }
2241}
2242
d25cee4d
RH
2243/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2244
2245static void
2246maybe_with_size_expr (tree *expr_p)
2247{
61025d1b
RK
2248 tree expr = *expr_p;
2249 tree type = TREE_TYPE (expr);
2250 tree size;
d25cee4d 2251
61025d1b
RK
2252 /* If we've already wrapped this or the type is error_mark_node, we can't do
2253 anything. */
2254 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2255 || type == error_mark_node)
d25cee4d
RH
2256 return;
2257
61025d1b 2258 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 2259 size = TYPE_SIZE_UNIT (type);
61025d1b
RK
2260 if (!size || TREE_CODE (size) == INTEGER_CST)
2261 return;
2262
2263 /* Otherwise, make a WITH_SIZE_EXPR. */
2264 size = unshare_expr (size);
2265 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2266 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
2267}
2268
726a989a 2269/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f
AH
2270 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2271 the CALL_EXPR. */
e4f78bd4 2272
fe6ebcf1 2273enum gimplify_status
1282697f 2274gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
e4f78bd4
JM
2275{
2276 bool (*test) (tree);
2277 fallback_t fb;
2278
2279 /* In general, we allow lvalues for function arguments to avoid
2280 extra overhead of copying large aggregates out of even larger
2281 aggregates into temporaries only to copy the temporaries to
2282 the argument list. Make optimizers happy by pulling out to
2283 temporaries those types that fit in registers. */
726a989a 2284 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
2285 test = is_gimple_val, fb = fb_rvalue;
2286 else
b4ef8aac
JM
2287 {
2288 test = is_gimple_lvalue, fb = fb_either;
2289 /* Also strip a TARGET_EXPR that would force an extra copy. */
2290 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2291 {
2292 tree init = TARGET_EXPR_INITIAL (*arg_p);
2293 if (init
2294 && !VOID_TYPE_P (TREE_TYPE (init)))
2295 *arg_p = init;
2296 }
2297 }
e4f78bd4 2298
d25cee4d 2299 /* If this is a variable sized type, we must remember the size. */
726a989a 2300 maybe_with_size_expr (arg_p);
d25cee4d 2301
c2255bc4 2302 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
2303 /* Make sure arguments have the same location as the function call
2304 itself. */
2305 protected_set_expr_location (*arg_p, call_location);
2306
e4f78bd4
JM
2307 /* There is a sequence point before a function call. Side effects in
2308 the argument list must occur before the actual call. So, when
2309 gimplifying arguments, force gimplify_expr to use an internal
2310 post queue which is then appended to the end of PRE_P. */
726a989a 2311 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
e4f78bd4
JM
2312}
2313
d26fc979
JJ
2314/* Don't fold inside offloading or taskreg regions: it can break code by
2315 adding decl references that weren't in the source. We'll do it during
2316 omplower pass instead. */
88ac13da
TS
2317
2318static bool
2319maybe_fold_stmt (gimple_stmt_iterator *gsi)
2320{
2321 struct gimplify_omp_ctx *ctx;
2322 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
d9a6bd32 2323 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
88ac13da
TS
2324 return false;
2325 return fold_stmt (gsi);
2326}
2327
726a989a 2328/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 2329 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
2330
2331static enum gimplify_status
726a989a 2332gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 2333{
f20ca725 2334 tree fndecl, parms, p, fnptrtype;
6de9cd9a 2335 enum gimplify_status ret;
5039610b 2336 int i, nargs;
538dd0b7 2337 gcall *call;
ed9c79e1 2338 bool builtin_va_start_p = false;
db3927fb 2339 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 2340
282899df 2341 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 2342
d3147f64 2343 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 2344 every call_expr be annotated with file and line. */
a281759f
PB
2345 if (! EXPR_HAS_LOCATION (*expr_p))
2346 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a 2347
0e37a2f3
MP
2348 /* Gimplify internal functions created in the FEs. */
2349 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2350 {
1304953e
JJ
2351 if (want_value)
2352 return GS_ALL_DONE;
2353
0e37a2f3
MP
2354 nargs = call_expr_nargs (*expr_p);
2355 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2356 auto_vec<tree> vargs (nargs);
2357
2358 for (i = 0; i < nargs; i++)
2359 {
2360 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2361 EXPR_LOCATION (*expr_p));
2362 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2363 }
355fe088 2364 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
0e37a2f3
MP
2365 gimplify_seq_add_stmt (pre_p, call);
2366 return GS_ALL_DONE;
2367 }
2368
6de9cd9a
DN
2369 /* This may be a call to a builtin function.
2370
2371 Builtin function calls may be transformed into different
2372 (and more efficient) builtin function calls under certain
2373 circumstances. Unfortunately, gimplification can muck things
2374 up enough that the builtin expanders are not aware that certain
2375 transformations are still valid.
2376
2377 So we attempt transformation/gimplification of the call before
2378 we gimplify the CALL_EXPR. At this time we do not manage to
2379 transform all calls in the same manner as the expanders do, but
2380 we do transform most of them. */
726a989a 2381 fndecl = get_callee_fndecl (*expr_p);
3537a0cd
RG
2382 if (fndecl
2383 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2384 switch (DECL_FUNCTION_CODE (fndecl))
2385 {
03c00798
EB
2386 case BUILT_IN_ALLOCA:
2387 case BUILT_IN_ALLOCA_WITH_ALIGN:
2388 /* If the call has been built for a variable-sized object, then we
2389 want to restore the stack level when the enclosing BIND_EXPR is
2390 exited to reclaim the allocated space; otherwise, we precisely
2391 need to do the opposite and preserve the latest stack level. */
2392 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
2393 gimplify_ctxp->save_stack = true;
2394 else
2395 gimplify_ctxp->keep_stack = true;
2396 break;
2397
3537a0cd 2398 case BUILT_IN_VA_START:
2efcfa4e 2399 {
726a989a 2400 builtin_va_start_p = TRUE;
5039610b 2401 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
2402 {
2403 error ("too few arguments to function %<va_start%>");
c2255bc4 2404 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2405 return GS_OK;
2406 }
b8698a0f 2407
5039610b 2408 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 2409 {
c2255bc4 2410 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2411 return GS_OK;
2412 }
3537a0cd
RG
2413 break;
2414 }
2415 case BUILT_IN_LINE:
2416 {
bb42836b
MLI
2417 *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2418 LOCATION_LINE (EXPR_LOCATION (*expr_p)));
3537a0cd
RG
2419 return GS_OK;
2420 }
2421 case BUILT_IN_FILE:
2422 {
bb42836b
MLI
2423 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2424 *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
3537a0cd
RG
2425 return GS_OK;
2426 }
2427 case BUILT_IN_FUNCTION:
2428 {
2429 const char *function;
2430 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2431 *expr_p = build_string_literal (strlen (function) + 1, function);
2432 return GS_OK;
2433 }
2434 default:
2435 ;
2436 }
2437 if (fndecl && DECL_BUILT_IN (fndecl))
2438 {
2439 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2440 if (new_tree && new_tree != *expr_p)
2441 {
2442 /* There was a transformation of this call which computes the
2443 same value, but in a more efficient way. Return and try
2444 again. */
2445 *expr_p = new_tree;
2446 return GS_OK;
2efcfa4e 2447 }
6de9cd9a
DN
2448 }
2449
f20ca725
RG
2450 /* Remember the original function pointer type. */
2451 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2452
6de9cd9a
DN
2453 /* There is a sequence point before the call, so any side effects in
2454 the calling expression must occur before the actual call. Force
2455 gimplify_expr to use an internal post queue. */
5039610b 2456 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 2457 is_gimple_call_addr, fb_rvalue);
6de9cd9a 2458
5039610b
SL
2459 nargs = call_expr_nargs (*expr_p);
2460
e36711f3 2461 /* Get argument types for verification. */
726a989a 2462 fndecl = get_callee_fndecl (*expr_p);
e36711f3 2463 parms = NULL_TREE;
726a989a
RB
2464 if (fndecl)
2465 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
a96c6a62
RB
2466 else
2467 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
e36711f3 2468
726a989a 2469 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 2470 p = DECL_ARGUMENTS (fndecl);
004e2fa7 2471 else if (parms)
f9487002 2472 p = parms;
6ef5231b 2473 else
498e51ca 2474 p = NULL_TREE;
f9487002
JJ
2475 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2476 ;
6ef5231b
JJ
2477
2478 /* If the last argument is __builtin_va_arg_pack () and it is not
2479 passed as a named argument, decrease the number of CALL_EXPR
2480 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2481 if (!p
2482 && i < nargs
2483 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2484 {
2485 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2486 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2487
2488 if (last_arg_fndecl
2489 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2490 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2491 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2492 {
2493 tree call = *expr_p;
2494
2495 --nargs;
db3927fb
AH
2496 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2497 CALL_EXPR_FN (call),
2498 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
2499
2500 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
2501 CALL_EXPR_VA_ARG_PACK flag. */
2502 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2503 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2504 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2505 = CALL_EXPR_RETURN_SLOT_OPT (call);
2506 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
5e278028 2507 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
726a989a 2508
6ef5231b
JJ
2509 /* Set CALL_EXPR_VA_ARG_PACK. */
2510 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2511 }
2512 }
e36711f3 2513
f2d3d07e 2514 /* Gimplify the function arguments. */
726a989a 2515 if (nargs > 0)
6de9cd9a 2516 {
726a989a
RB
2517 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2518 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2519 PUSH_ARGS_REVERSED ? i-- : i++)
2520 {
2521 enum gimplify_status t;
6de9cd9a 2522
726a989a
RB
2523 /* Avoid gimplifying the second argument to va_start, which needs to
2524 be the plain PARM_DECL. */
2525 if ((i != 1) || !builtin_va_start_p)
2526 {
1282697f
AH
2527 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2528 EXPR_LOCATION (*expr_p));
6de9cd9a 2529
726a989a
RB
2530 if (t == GS_ERROR)
2531 ret = GS_ERROR;
2532 }
2533 }
6de9cd9a 2534 }
6de9cd9a 2535
f2d3d07e
RH
2536 /* Gimplify the static chain. */
2537 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2538 {
2539 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2540 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2541 else
2542 {
2543 enum gimplify_status t;
2544 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2545 EXPR_LOCATION (*expr_p));
2546 if (t == GS_ERROR)
2547 ret = GS_ERROR;
2548 }
2549 }
2550
33922890
RG
2551 /* Verify the function result. */
2552 if (want_value && fndecl
f20ca725 2553 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
33922890
RG
2554 {
2555 error_at (loc, "using result of function returning %<void%>");
2556 ret = GS_ERROR;
2557 }
2558
6de9cd9a 2559 /* Try this again in case gimplification exposed something. */
6f538523 2560 if (ret != GS_ERROR)
6de9cd9a 2561 {
db3927fb 2562 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 2563
82d6e6fc 2564 if (new_tree && new_tree != *expr_p)
5039610b
SL
2565 {
2566 /* There was a transformation of this call which computes the
2567 same value, but in a more efficient way. Return and try
2568 again. */
82d6e6fc 2569 *expr_p = new_tree;
5039610b 2570 return GS_OK;
6de9cd9a
DN
2571 }
2572 }
726a989a
RB
2573 else
2574 {
df8fa700 2575 *expr_p = error_mark_node;
726a989a
RB
2576 return GS_ERROR;
2577 }
6de9cd9a
DN
2578
2579 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2580 decl. This allows us to eliminate redundant or useless
2581 calls to "const" functions. */
becfd6e5
KZ
2582 if (TREE_CODE (*expr_p) == CALL_EXPR)
2583 {
2584 int flags = call_expr_flags (*expr_p);
2585 if (flags & (ECF_CONST | ECF_PURE)
2586 /* An infinite loop is considered a side effect. */
2587 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2588 TREE_SIDE_EFFECTS (*expr_p) = 0;
2589 }
726a989a
RB
2590
2591 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2592 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2593 form and delegate the creation of a GIMPLE_CALL to
2594 gimplify_modify_expr. This is always possible because when
2595 WANT_VALUE is true, the caller wants the result of this call into
2596 a temporary, which means that we will emit an INIT_EXPR in
2597 internal_get_tmp_var which will then be handled by
2598 gimplify_modify_expr. */
2599 if (!want_value)
2600 {
2601 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2602 have to do is replicate it as a GIMPLE_CALL tuple. */
64e0f5ff 2603 gimple_stmt_iterator gsi;
726a989a 2604 call = gimple_build_call_from_tree (*expr_p);
f20ca725 2605 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
f6b64c35 2606 notice_special_calls (call);
726a989a 2607 gimplify_seq_add_stmt (pre_p, call);
64e0f5ff 2608 gsi = gsi_last (*pre_p);
88ac13da 2609 maybe_fold_stmt (&gsi);
726a989a
RB
2610 *expr_p = NULL_TREE;
2611 }
f20ca725
RG
2612 else
2613 /* Remember the original function type. */
2614 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2615 CALL_EXPR_FN (*expr_p));
726a989a 2616
6de9cd9a
DN
2617 return ret;
2618}
2619
2620/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2621 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2622
2623 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2624 condition is true or false, respectively. If null, we should generate
2625 our own to skip over the evaluation of this specific expression.
2626
ca80e52b
EB
2627 LOCUS is the source location of the COND_EXPR.
2628
6de9cd9a
DN
2629 This function is the tree equivalent of do_jump.
2630
2631 shortcut_cond_r should only be called by shortcut_cond_expr. */
2632
2633static tree
ca80e52b
EB
2634shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2635 location_t locus)
6de9cd9a
DN
2636{
2637 tree local_label = NULL_TREE;
2638 tree t, expr = NULL;
2639
2640 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2641 retain the shortcut semantics. Just insert the gotos here;
2642 shortcut_cond_expr will append the real blocks later. */
2643 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2644 {
ca80e52b
EB
2645 location_t new_locus;
2646
6de9cd9a
DN
2647 /* Turn if (a && b) into
2648
2649 if (a); else goto no;
2650 if (b) goto yes; else goto no;
2651 (no:) */
2652
2653 if (false_label_p == NULL)
2654 false_label_p = &local_label;
2655
ca80e52b
EB
2656 /* Keep the original source location on the first 'if'. */
2657 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
2658 append_to_statement_list (t, &expr);
2659
ca80e52b
EB
2660 /* Set the source location of the && on the second 'if'. */
2661 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2662 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2663 new_locus);
6de9cd9a
DN
2664 append_to_statement_list (t, &expr);
2665 }
2666 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2667 {
ca80e52b
EB
2668 location_t new_locus;
2669
6de9cd9a
DN
2670 /* Turn if (a || b) into
2671
2672 if (a) goto yes;
2673 if (b) goto yes; else goto no;
2674 (yes:) */
2675
2676 if (true_label_p == NULL)
2677 true_label_p = &local_label;
2678
ca80e52b
EB
2679 /* Keep the original source location on the first 'if'. */
2680 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
2681 append_to_statement_list (t, &expr);
2682
ca80e52b
EB
2683 /* Set the source location of the || on the second 'if'. */
2684 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2685 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2686 new_locus);
6de9cd9a
DN
2687 append_to_statement_list (t, &expr);
2688 }
1537737f
JJ
2689 else if (TREE_CODE (pred) == COND_EXPR
2690 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2691 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
6de9cd9a 2692 {
ca80e52b
EB
2693 location_t new_locus;
2694
6de9cd9a
DN
2695 /* As long as we're messing with gotos, turn if (a ? b : c) into
2696 if (a)
2697 if (b) goto yes; else goto no;
2698 else
1537737f
JJ
2699 if (c) goto yes; else goto no;
2700
2701 Don't do this if one of the arms has void type, which can happen
2702 in C++ when the arm is throw. */
ca80e52b
EB
2703
2704 /* Keep the original source location on the first 'if'. Set the source
2705 location of the ? on the second 'if'. */
2706 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
b4257cfc
RG
2707 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2708 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 2709 false_label_p, locus),
b4257cfc 2710 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 2711 false_label_p, new_locus));
6de9cd9a
DN
2712 }
2713 else
2714 {
b4257cfc
RG
2715 expr = build3 (COND_EXPR, void_type_node, pred,
2716 build_and_jump (true_label_p),
2717 build_and_jump (false_label_p));
ca80e52b 2718 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2719 }
2720
2721 if (local_label)
2722 {
2723 t = build1 (LABEL_EXPR, void_type_node, local_label);
2724 append_to_statement_list (t, &expr);
2725 }
2726
2727 return expr;
2728}
2729
726a989a
RB
2730/* Given a conditional expression EXPR with short-circuit boolean
2731 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
073a8998 2732 predicate apart into the equivalent sequence of conditionals. */
726a989a 2733
6de9cd9a
DN
2734static tree
2735shortcut_cond_expr (tree expr)
2736{
2737 tree pred = TREE_OPERAND (expr, 0);
2738 tree then_ = TREE_OPERAND (expr, 1);
2739 tree else_ = TREE_OPERAND (expr, 2);
2740 tree true_label, false_label, end_label, t;
2741 tree *true_label_p;
2742 tree *false_label_p;
089efaa4 2743 bool emit_end, emit_false, jump_over_else;
65355d53
RH
2744 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2745 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
2746
2747 /* First do simple transformations. */
65355d53 2748 if (!else_se)
6de9cd9a 2749 {
ca80e52b
EB
2750 /* If there is no 'else', turn
2751 if (a && b) then c
2752 into
2753 if (a) if (b) then c. */
6de9cd9a
DN
2754 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2755 {
ca80e52b 2756 /* Keep the original source location on the first 'if'. */
8400e75e 2757 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 2758 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2759 /* Set the source location of the && on the second 'if'. */
2760 if (EXPR_HAS_LOCATION (pred))
2761 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2762 then_ = shortcut_cond_expr (expr);
4356a1bf 2763 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 2764 pred = TREE_OPERAND (pred, 0);
b4257cfc 2765 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 2766 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2767 }
2768 }
726a989a 2769
65355d53 2770 if (!then_se)
6de9cd9a
DN
2771 {
2772 /* If there is no 'then', turn
2773 if (a || b); else d
2774 into
2775 if (a); else if (b); else d. */
2776 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2777 {
ca80e52b 2778 /* Keep the original source location on the first 'if'. */
8400e75e 2779 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 2780 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2781 /* Set the source location of the || on the second 'if'. */
2782 if (EXPR_HAS_LOCATION (pred))
2783 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2784 else_ = shortcut_cond_expr (expr);
4356a1bf 2785 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 2786 pred = TREE_OPERAND (pred, 0);
b4257cfc 2787 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 2788 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2789 }
2790 }
2791
2792 /* If we're done, great. */
2793 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2794 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2795 return expr;
2796
2797 /* Otherwise we need to mess with gotos. Change
2798 if (a) c; else d;
2799 to
2800 if (a); else goto no;
2801 c; goto end;
2802 no: d; end:
2803 and recursively gimplify the condition. */
2804
2805 true_label = false_label = end_label = NULL_TREE;
2806
2807 /* If our arms just jump somewhere, hijack those labels so we don't
2808 generate jumps to jumps. */
2809
65355d53
RH
2810 if (then_
2811 && TREE_CODE (then_) == GOTO_EXPR
6de9cd9a
DN
2812 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2813 {
2814 true_label = GOTO_DESTINATION (then_);
65355d53
RH
2815 then_ = NULL;
2816 then_se = false;
6de9cd9a
DN
2817 }
2818
65355d53
RH
2819 if (else_
2820 && TREE_CODE (else_) == GOTO_EXPR
6de9cd9a
DN
2821 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2822 {
2823 false_label = GOTO_DESTINATION (else_);
65355d53
RH
2824 else_ = NULL;
2825 else_se = false;
6de9cd9a
DN
2826 }
2827
9cf737f8 2828 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
2829 if (true_label)
2830 true_label_p = &true_label;
2831 else
2832 true_label_p = NULL;
2833
2834 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 2835 if (false_label || else_se)
6de9cd9a
DN
2836 false_label_p = &false_label;
2837 else
2838 false_label_p = NULL;
2839
2840 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 2841 if (!then_se && !else_se)
ca80e52b 2842 return shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 2843 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
2844
2845 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 2846 if (else_se)
ca80e52b 2847 t = expr_last (else_);
65355d53 2848 else if (then_se)
ca80e52b 2849 t = expr_last (then_);
65355d53 2850 else
ca80e52b
EB
2851 t = NULL;
2852 if (t && TREE_CODE (t) == LABEL_EXPR)
2853 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
2854
2855 /* If we don't care about jumping to the 'else' branch, jump to the end
2856 if the condition is false. */
2857 if (!false_label_p)
2858 false_label_p = &end_label;
2859
2860 /* We only want to emit these labels if we aren't hijacking them. */
2861 emit_end = (end_label == NULL_TREE);
2862 emit_false = (false_label == NULL_TREE);
2863
089efaa4
ILT
2864 /* We only emit the jump over the else clause if we have to--if the
2865 then clause may fall through. Otherwise we can wind up with a
2866 useless jump and a useless label at the end of gimplified code,
2867 which will cause us to think that this conditional as a whole
2868 falls through even if it doesn't. If we then inline a function
2869 which ends with such a condition, that can cause us to issue an
2870 inappropriate warning about control reaching the end of a
2871 non-void function. */
2872 jump_over_else = block_may_fallthru (then_);
2873
ca80e52b 2874 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 2875 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
2876
2877 expr = NULL;
2878 append_to_statement_list (pred, &expr);
2879
2880 append_to_statement_list (then_, &expr);
65355d53 2881 if (else_se)
6de9cd9a 2882 {
089efaa4
ILT
2883 if (jump_over_else)
2884 {
ca80e52b 2885 tree last = expr_last (expr);
089efaa4 2886 t = build_and_jump (&end_label);
ca80e52b
EB
2887 if (EXPR_HAS_LOCATION (last))
2888 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
089efaa4
ILT
2889 append_to_statement_list (t, &expr);
2890 }
6de9cd9a
DN
2891 if (emit_false)
2892 {
2893 t = build1 (LABEL_EXPR, void_type_node, false_label);
2894 append_to_statement_list (t, &expr);
2895 }
2896 append_to_statement_list (else_, &expr);
2897 }
2898 if (emit_end && end_label)
2899 {
2900 t = build1 (LABEL_EXPR, void_type_node, end_label);
2901 append_to_statement_list (t, &expr);
2902 }
2903
2904 return expr;
2905}
2906
2907/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2908
50674e96 2909tree
6de9cd9a
DN
2910gimple_boolify (tree expr)
2911{
2912 tree type = TREE_TYPE (expr);
db3927fb 2913 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 2914
554cf330
JJ
2915 if (TREE_CODE (expr) == NE_EXPR
2916 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2917 && integer_zerop (TREE_OPERAND (expr, 1)))
2918 {
2919 tree call = TREE_OPERAND (expr, 0);
2920 tree fn = get_callee_fndecl (call);
2921
d53c73e0
JJ
2922 /* For __builtin_expect ((long) (x), y) recurse into x as well
2923 if x is truth_value_p. */
554cf330
JJ
2924 if (fn
2925 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2926 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2927 && call_expr_nargs (call) == 2)
2928 {
2929 tree arg = CALL_EXPR_ARG (call, 0);
2930 if (arg)
2931 {
2932 if (TREE_CODE (arg) == NOP_EXPR
2933 && TREE_TYPE (arg) == TREE_TYPE (call))
2934 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
2935 if (truth_value_p (TREE_CODE (arg)))
2936 {
2937 arg = gimple_boolify (arg);
2938 CALL_EXPR_ARG (call, 0)
2939 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2940 }
554cf330
JJ
2941 }
2942 }
2943 }
2944
6de9cd9a
DN
2945 switch (TREE_CODE (expr))
2946 {
2947 case TRUTH_AND_EXPR:
2948 case TRUTH_OR_EXPR:
2949 case TRUTH_XOR_EXPR:
2950 case TRUTH_ANDIF_EXPR:
2951 case TRUTH_ORIF_EXPR:
2952 /* Also boolify the arguments of truth exprs. */
2953 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2954 /* FALLTHRU */
2955
2956 case TRUTH_NOT_EXPR:
2957 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
6de9cd9a 2958
6de9cd9a 2959 /* These expressions always produce boolean results. */
7f3ff782
KT
2960 if (TREE_CODE (type) != BOOLEAN_TYPE)
2961 TREE_TYPE (expr) = boolean_type_node;
6de9cd9a 2962 return expr;
d3147f64 2963
8170608b 2964 case ANNOTATE_EXPR:
718c4601 2965 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
8170608b 2966 {
718c4601
EB
2967 case annot_expr_ivdep_kind:
2968 case annot_expr_no_vector_kind:
2969 case annot_expr_vector_kind:
8170608b
TB
2970 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2971 if (TREE_CODE (type) != BOOLEAN_TYPE)
2972 TREE_TYPE (expr) = boolean_type_node;
2973 return expr;
718c4601
EB
2974 default:
2975 gcc_unreachable ();
8170608b 2976 }
8170608b 2977
6de9cd9a 2978 default:
7f3ff782
KT
2979 if (COMPARISON_CLASS_P (expr))
2980 {
2981 /* There expressions always prduce boolean results. */
2982 if (TREE_CODE (type) != BOOLEAN_TYPE)
2983 TREE_TYPE (expr) = boolean_type_node;
2984 return expr;
2985 }
6de9cd9a
DN
2986 /* Other expressions that get here must have boolean values, but
2987 might need to be converted to the appropriate mode. */
7f3ff782 2988 if (TREE_CODE (type) == BOOLEAN_TYPE)
1d15f620 2989 return expr;
db3927fb 2990 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
2991 }
2992}
2993
aea74440
JJ
2994/* Given a conditional expression *EXPR_P without side effects, gimplify
2995 its operands. New statements are inserted to PRE_P. */
2996
2997static enum gimplify_status
726a989a 2998gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
2999{
3000 tree expr = *expr_p, cond;
3001 enum gimplify_status ret, tret;
3002 enum tree_code code;
3003
3004 cond = gimple_boolify (COND_EXPR_COND (expr));
3005
3006 /* We need to handle && and || specially, as their gimplification
3007 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3008 code = TREE_CODE (cond);
3009 if (code == TRUTH_ANDIF_EXPR)
3010 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3011 else if (code == TRUTH_ORIF_EXPR)
3012 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 3013 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
3014 COND_EXPR_COND (*expr_p) = cond;
3015
3016 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3017 is_gimple_val, fb_rvalue);
3018 ret = MIN (ret, tret);
3019 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3020 is_gimple_val, fb_rvalue);
3021
3022 return MIN (ret, tret);
3023}
3024
ad19c4be 3025/* Return true if evaluating EXPR could trap.
aea74440
JJ
3026 EXPR is GENERIC, while tree_could_trap_p can be called
3027 only on GIMPLE. */
3028
3029static bool
3030generic_expr_could_trap_p (tree expr)
3031{
3032 unsigned i, n;
3033
3034 if (!expr || is_gimple_val (expr))
3035 return false;
3036
3037 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3038 return true;
3039
3040 n = TREE_OPERAND_LENGTH (expr);
3041 for (i = 0; i < n; i++)
3042 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3043 return true;
3044
3045 return false;
3046}
3047
206048bd 3048/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
3049 into
3050
3051 if (p) if (p)
3052 t1 = a; a;
3053 else or else
3054 t1 = b; b;
3055 t1;
3056
3057 The second form is used when *EXPR_P is of type void.
3058
3059 PRE_P points to the list where side effects that must happen before
dae7ec87 3060 *EXPR_P should be stored. */
6de9cd9a
DN
3061
3062static enum gimplify_status
726a989a 3063gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
3064{
3065 tree expr = *expr_p;
06ec59e6
EB
3066 tree type = TREE_TYPE (expr);
3067 location_t loc = EXPR_LOCATION (expr);
3068 tree tmp, arm1, arm2;
6de9cd9a 3069 enum gimplify_status ret;
726a989a
RB
3070 tree label_true, label_false, label_cont;
3071 bool have_then_clause_p, have_else_clause_p;
538dd0b7 3072 gcond *cond_stmt;
726a989a
RB
3073 enum tree_code pred_code;
3074 gimple_seq seq = NULL;
26d44ae2
RH
3075
3076 /* If this COND_EXPR has a value, copy the values into a temporary within
3077 the arms. */
06ec59e6 3078 if (!VOID_TYPE_P (type))
26d44ae2 3079 {
06ec59e6 3080 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
3081 tree result;
3082
06ec59e6
EB
3083 /* If either an rvalue is ok or we do not require an lvalue, create the
3084 temporary. But we cannot do that if the type is addressable. */
3085 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c3e203cf 3086 && !TREE_ADDRESSABLE (type))
aff98faf 3087 {
aea74440
JJ
3088 if (gimplify_ctxp->allow_rhs_cond_expr
3089 /* If either branch has side effects or could trap, it can't be
3090 evaluated unconditionally. */
06ec59e6
EB
3091 && !TREE_SIDE_EFFECTS (then_)
3092 && !generic_expr_could_trap_p (then_)
3093 && !TREE_SIDE_EFFECTS (else_)
3094 && !generic_expr_could_trap_p (else_))
aea74440
JJ
3095 return gimplify_pure_cond_expr (expr_p, pre_p);
3096
06ec59e6
EB
3097 tmp = create_tmp_var (type, "iftmp");
3098 result = tmp;
aff98faf 3099 }
06ec59e6
EB
3100
3101 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
3102 else
3103 {
06ec59e6 3104 type = build_pointer_type (type);
aff98faf 3105
06ec59e6
EB
3106 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3107 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 3108
06ec59e6
EB
3109 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3110 else_ = build_fold_addr_expr_loc (loc, else_);
3111
3112 expr
3113 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 3114
726a989a 3115 tmp = create_tmp_var (type, "iftmp");
70f34814 3116 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
3117 }
3118
06ec59e6
EB
3119 /* Build the new then clause, `tmp = then_;'. But don't build the
3120 assignment if the value is void; in C++ it can be if it's a throw. */
3121 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3122 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
26d44ae2 3123
06ec59e6
EB
3124 /* Similarly, build the new else clause, `tmp = else_;'. */
3125 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3126 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
26d44ae2
RH
3127
3128 TREE_TYPE (expr) = void_type_node;
3129 recalculate_side_effects (expr);
3130
d91ba7b0 3131 /* Move the COND_EXPR to the prequeue. */
726a989a 3132 gimplify_stmt (&expr, pre_p);
26d44ae2 3133
aff98faf 3134 *expr_p = result;
726a989a 3135 return GS_ALL_DONE;
26d44ae2
RH
3136 }
3137
f2f81d57
EB
3138 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3139 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3140 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3141 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3142
26d44ae2
RH
3143 /* Make sure the condition has BOOLEAN_TYPE. */
3144 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3145
3146 /* Break apart && and || conditions. */
3147 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3148 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3149 {
3150 expr = shortcut_cond_expr (expr);
3151
3152 if (expr != *expr_p)
3153 {
3154 *expr_p = expr;
3155
3156 /* We can't rely on gimplify_expr to re-gimplify the expanded
3157 form properly, as cleanups might cause the target labels to be
3158 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3159 set up a conditional context. */
3160 gimple_push_condition ();
726a989a 3161 gimplify_stmt (expr_p, &seq);
26d44ae2 3162 gimple_pop_condition (pre_p);
726a989a 3163 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
3164
3165 return GS_ALL_DONE;
3166 }
3167 }
3168
3169 /* Now do the normal gimplification. */
26d44ae2 3170
726a989a
RB
3171 /* Gimplify condition. */
3172 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3173 fb_rvalue);
26d44ae2 3174 if (ret == GS_ERROR)
726a989a
RB
3175 return GS_ERROR;
3176 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3177
3178 gimple_push_condition ();
26d44ae2 3179
726a989a
RB
3180 have_then_clause_p = have_else_clause_p = false;
3181 if (TREE_OPERAND (expr, 1) != NULL
3182 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3183 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3184 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3185 == current_function_decl)
3186 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3187 have different locations, otherwise we end up with incorrect
3188 location information on the branches. */
3189 && (optimize
3190 || !EXPR_HAS_LOCATION (expr)
3191 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3192 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3193 {
3194 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3195 have_then_clause_p = true;
26d44ae2
RH
3196 }
3197 else
c2255bc4 3198 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3199 if (TREE_OPERAND (expr, 2) != NULL
3200 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3201 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3202 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3203 == current_function_decl)
3204 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3205 have different locations, otherwise we end up with incorrect
3206 location information on the branches. */
3207 && (optimize
3208 || !EXPR_HAS_LOCATION (expr)
3209 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3210 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3211 {
3212 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3213 have_else_clause_p = true;
3214 }
3215 else
c2255bc4 3216 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 3217
726a989a
RB
3218 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3219 &arm2);
538dd0b7 3220 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
c3bea076 3221 label_false);
538dd0b7 3222 gimplify_seq_add_stmt (&seq, cond_stmt);
c3bea076
RB
3223 gimple_stmt_iterator gsi = gsi_last (seq);
3224 maybe_fold_stmt (&gsi);
3225
726a989a
RB
3226 label_cont = NULL_TREE;
3227 if (!have_then_clause_p)
3228 {
3229 /* For if (...) {} else { code; } put label_true after
3230 the else block. */
3231 if (TREE_OPERAND (expr, 1) == NULL_TREE
3232 && !have_else_clause_p
3233 && TREE_OPERAND (expr, 2) != NULL_TREE)
3234 label_cont = label_true;
3235 else
3236 {
3237 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3238 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3239 /* For if (...) { code; } else {} or
3240 if (...) { code; } else goto label; or
3241 if (...) { code; return; } else { ... }
3242 label_cont isn't needed. */
3243 if (!have_else_clause_p
3244 && TREE_OPERAND (expr, 2) != NULL_TREE
3245 && gimple_seq_may_fallthru (seq))
3246 {
355fe088 3247 gimple *g;
c2255bc4 3248 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3249
3250 g = gimple_build_goto (label_cont);
3251
3252 /* GIMPLE_COND's are very low level; they have embedded
3253 gotos. This particular embedded goto should not be marked
3254 with the location of the original COND_EXPR, as it would
3255 correspond to the COND_EXPR's condition, not the ELSE or the
3256 THEN arms. To avoid marking it with the wrong location, flag
3257 it as "no location". */
3258 gimple_set_do_not_emit_location (g);
3259
3260 gimplify_seq_add_stmt (&seq, g);
3261 }
3262 }
3263 }
3264 if (!have_else_clause_p)
3265 {
3266 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3267 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3268 }
3269 if (label_cont)
3270 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3271
3272 gimple_pop_condition (pre_p);
3273 gimple_seq_add_seq (pre_p, seq);
3274
3275 if (ret == GS_ERROR)
3276 ; /* Do nothing. */
3277 else if (have_then_clause_p || have_else_clause_p)
3278 ret = GS_ALL_DONE;
3279 else
3280 {
3281 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3282 expr = TREE_OPERAND (expr, 0);
3283 gimplify_stmt (&expr, pre_p);
3284 }
3285
3286 *expr_p = NULL;
3287 return ret;
3288}
3289
f76d6e6f
EB
3290/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3291 to be marked addressable.
3292
3293 We cannot rely on such an expression being directly markable if a temporary
3294 has been created by the gimplification. In this case, we create another
3295 temporary and initialize it with a copy, which will become a store after we
3296 mark it addressable. This can happen if the front-end passed us something
3297 that it could not mark addressable yet, like a Fortran pass-by-reference
3298 parameter (int) floatvar. */
3299
3300static void
3301prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3302{
3303 while (handled_component_p (*expr_p))
3304 expr_p = &TREE_OPERAND (*expr_p, 0);
3305 if (is_gimple_reg (*expr_p))
947ca6a0
RB
3306 {
3307 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3308 DECL_GIMPLE_REG_P (var) = 0;
3309 *expr_p = var;
3310 }
f76d6e6f
EB
3311}
3312
726a989a
RB
3313/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3314 a call to __builtin_memcpy. */
3315
3316static enum gimplify_status
3317gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3318 gimple_seq *seq_p)
26d44ae2 3319{
5039610b 3320 tree t, to, to_ptr, from, from_ptr;
538dd0b7 3321 gcall *gs;
db3927fb 3322 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3323
726a989a
RB
3324 to = TREE_OPERAND (*expr_p, 0);
3325 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 3326
f76d6e6f
EB
3327 /* Mark the RHS addressable. Beware that it may not be possible to do so
3328 directly if a temporary has been created by the gimplification. */
3329 prepare_gimple_addressable (&from, seq_p);
3330
628c189e 3331 mark_addressable (from);
db3927fb
AH
3332 from_ptr = build_fold_addr_expr_loc (loc, from);
3333 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 3334
628c189e 3335 mark_addressable (to);
db3927fb
AH
3336 to_ptr = build_fold_addr_expr_loc (loc, to);
3337 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 3338
e79983f4 3339 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
726a989a
RB
3340
3341 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
3342
3343 if (want_value)
3344 {
726a989a 3345 /* tmp = memcpy() */
b731b390 3346 t = create_tmp_var (TREE_TYPE (to_ptr));
726a989a
RB
3347 gimple_call_set_lhs (gs, t);
3348 gimplify_seq_add_stmt (seq_p, gs);
3349
70f34814 3350 *expr_p = build_simple_mem_ref (t);
726a989a 3351 return GS_ALL_DONE;
26d44ae2
RH
3352 }
3353
726a989a
RB
3354 gimplify_seq_add_stmt (seq_p, gs);
3355 *expr_p = NULL;
3356 return GS_ALL_DONE;
26d44ae2
RH
3357}
3358
3359/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3360 a call to __builtin_memset. In this case we know that the RHS is
3361 a CONSTRUCTOR with an empty element list. */
3362
3363static enum gimplify_status
726a989a
RB
3364gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3365 gimple_seq *seq_p)
26d44ae2 3366{
1a13360e 3367 tree t, from, to, to_ptr;
538dd0b7 3368 gcall *gs;
db3927fb 3369 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3370
1a13360e
OH
3371 /* Assert our assumptions, to abort instead of producing wrong code
3372 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3373 not be immediately exposed. */
b8698a0f 3374 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
3375 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3376 from = TREE_OPERAND (from, 0);
3377
3378 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
9771b263 3379 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
1a13360e
OH
3380
3381 /* Now proceed. */
726a989a 3382 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 3383
db3927fb
AH
3384 to_ptr = build_fold_addr_expr_loc (loc, to);
3385 gimplify_arg (&to_ptr, seq_p, loc);
e79983f4 3386 t = builtin_decl_implicit (BUILT_IN_MEMSET);
726a989a
RB
3387
3388 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
3389
3390 if (want_value)
3391 {
726a989a 3392 /* tmp = memset() */
b731b390 3393 t = create_tmp_var (TREE_TYPE (to_ptr));
726a989a
RB
3394 gimple_call_set_lhs (gs, t);
3395 gimplify_seq_add_stmt (seq_p, gs);
3396
3397 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3398 return GS_ALL_DONE;
26d44ae2
RH
3399 }
3400
726a989a
RB
3401 gimplify_seq_add_stmt (seq_p, gs);
3402 *expr_p = NULL;
3403 return GS_ALL_DONE;
26d44ae2
RH
3404}
3405
57d1dd87
RH
3406/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3407 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
ad19c4be 3408 assignment. Return non-null if we detect a potential overlap. */
57d1dd87
RH
3409
3410struct gimplify_init_ctor_preeval_data
3411{
3412 /* The base decl of the lhs object. May be NULL, in which case we
3413 have to assume the lhs is indirect. */
3414 tree lhs_base_decl;
3415
3416 /* The alias set of the lhs object. */
4862826d 3417 alias_set_type lhs_alias_set;
57d1dd87
RH
3418};
3419
3420static tree
3421gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3422{
3423 struct gimplify_init_ctor_preeval_data *data
3424 = (struct gimplify_init_ctor_preeval_data *) xdata;
3425 tree t = *tp;
3426
3427 /* If we find the base object, obviously we have overlap. */
3428 if (data->lhs_base_decl == t)
3429 return t;
3430
3431 /* If the constructor component is indirect, determine if we have a
3432 potential overlap with the lhs. The only bits of information we
3433 have to go on at this point are addressability and alias sets. */
70f34814
RG
3434 if ((INDIRECT_REF_P (t)
3435 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
3436 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3437 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3438 return t;
3439
df10ee2a 3440 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
3441 potential overlap with the lhs through an INDIRECT_REF like above.
3442 ??? Ugh - this is completely broken. In fact this whole analysis
3443 doesn't look conservative. */
df10ee2a
EB
3444 if (TREE_CODE (t) == CALL_EXPR)
3445 {
3446 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3447
3448 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3449 if (POINTER_TYPE_P (TREE_VALUE (type))
3450 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3451 && alias_sets_conflict_p (data->lhs_alias_set,
3452 get_alias_set
3453 (TREE_TYPE (TREE_VALUE (type)))))
3454 return t;
3455 }
3456
6615c446 3457 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
3458 *walk_subtrees = 0;
3459 return NULL;
3460}
3461
726a989a 3462/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
3463 force values that overlap with the lhs (as described by *DATA)
3464 into temporaries. */
3465
3466static void
726a989a 3467gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
3468 struct gimplify_init_ctor_preeval_data *data)
3469{
3470 enum gimplify_status one;
3471
51eed280
PB
3472 /* If the value is constant, then there's nothing to pre-evaluate. */
3473 if (TREE_CONSTANT (*expr_p))
3474 {
3475 /* Ensure it does not have side effects, it might contain a reference to
3476 the object we're initializing. */
3477 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3478 return;
3479 }
57d1dd87
RH
3480
3481 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3482 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3483 return;
3484
3485 /* Recurse for nested constructors. */
3486 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3487 {
4038c495
GB
3488 unsigned HOST_WIDE_INT ix;
3489 constructor_elt *ce;
9771b263 3490 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4038c495 3491
9771b263 3492 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4038c495 3493 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 3494
57d1dd87
RH
3495 return;
3496 }
3497
0461b801
EB
3498 /* If this is a variable sized type, we must remember the size. */
3499 maybe_with_size_expr (expr_p);
57d1dd87
RH
3500
3501 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 3502 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 3503 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
3504 gimplification now means that we won't have to deal with complicated
3505 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 3506 exponential search behavior. */
57d1dd87
RH
3507 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3508 if (one == GS_ERROR)
3509 {
3510 *expr_p = NULL;
3511 return;
3512 }
3513
3514 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3515 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3516 always be true for all scalars, since is_gimple_mem_rhs insists on a
3517 temporary variable for them. */
3518 if (DECL_P (*expr_p))
3519 return;
3520
3521 /* If this is of variable size, we have no choice but to assume it doesn't
3522 overlap since we can't make a temporary for it. */
4c923c28 3523 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
3524 return;
3525
3526 /* Otherwise, we must search for overlap ... */
3527 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3528 return;
3529
3530 /* ... and if found, force the value into a temporary. */
3531 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3532}
3533
6fa91b48
SB
3534/* A subroutine of gimplify_init_ctor_eval. Create a loop for
3535 a RANGE_EXPR in a CONSTRUCTOR for an array.
3536
3537 var = lower;
3538 loop_entry:
3539 object[var] = value;
3540 if (var == upper)
3541 goto loop_exit;
3542 var = var + 1;
3543 goto loop_entry;
3544 loop_exit:
3545
3546 We increment var _after_ the loop exit check because we might otherwise
3547 fail if upper == TYPE_MAX_VALUE (type for upper).
3548
3549 Note that we never have to deal with SAVE_EXPRs here, because this has
3550 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3551
9771b263 3552static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
726a989a 3553 gimple_seq *, bool);
6fa91b48
SB
3554
3555static void
3556gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3557 tree value, tree array_elt_type,
726a989a 3558 gimple_seq *pre_p, bool cleared)
6fa91b48 3559{
726a989a 3560 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 3561 tree var, var_type, cref, tmp;
6fa91b48 3562
c2255bc4
AH
3563 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3564 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3565 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
3566
3567 /* Create and initialize the index variable. */
3568 var_type = TREE_TYPE (upper);
b731b390 3569 var = create_tmp_var (var_type);
726a989a 3570 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
3571
3572 /* Add the loop entry label. */
726a989a 3573 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
3574
3575 /* Build the reference. */
3576 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3577 var, NULL_TREE, NULL_TREE);
3578
3579 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3580 the store. Otherwise just assign value to the reference. */
3581
3582 if (TREE_CODE (value) == CONSTRUCTOR)
3583 /* NB we might have to call ourself recursively through
3584 gimplify_init_ctor_eval if the value is a constructor. */
3585 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3586 pre_p, cleared);
3587 else
726a989a 3588 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
3589
3590 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
3591 gimplify_seq_add_stmt (pre_p,
3592 gimple_build_cond (EQ_EXPR, var, upper,
3593 loop_exit_label, fall_thru_label));
3594
3595 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
3596
3597 /* Otherwise, increment the index var... */
b56b9fe3
RS
3598 tmp = build2 (PLUS_EXPR, var_type, var,
3599 fold_convert (var_type, integer_one_node));
726a989a 3600 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
3601
3602 /* ...and jump back to the loop entry. */
726a989a 3603 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
3604
3605 /* Add the loop exit label. */
726a989a 3606 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
3607}
3608
292a398f 3609/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 3610
292a398f 3611static bool
22ea9ec0 3612zero_sized_field_decl (const_tree fdecl)
292a398f 3613{
b8698a0f 3614 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
3615 && integer_zerop (DECL_SIZE (fdecl)))
3616 return true;
3617 return false;
3618}
3619
d06526b7 3620/* Return true if TYPE is zero sized. */
b8698a0f 3621
d06526b7 3622static bool
22ea9ec0 3623zero_sized_type (const_tree type)
d06526b7
AP
3624{
3625 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3626 && integer_zerop (TYPE_SIZE (type)))
3627 return true;
3628 return false;
3629}
3630
57d1dd87
RH
3631/* A subroutine of gimplify_init_constructor. Generate individual
3632 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 3633 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
3634 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3635 zeroed first. */
3636
3637static void
9771b263 3638gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
726a989a 3639 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
3640{
3641 tree array_elt_type = NULL;
4038c495
GB
3642 unsigned HOST_WIDE_INT ix;
3643 tree purpose, value;
57d1dd87
RH
3644
3645 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3646 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3647
4038c495 3648 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 3649 {
726a989a 3650 tree cref;
57d1dd87
RH
3651
3652 /* NULL values are created above for gimplification errors. */
3653 if (value == NULL)
3654 continue;
3655
3656 if (cleared && initializer_zerop (value))
3657 continue;
3658
6fa91b48
SB
3659 /* ??? Here's to hoping the front end fills in all of the indices,
3660 so we don't have to figure out what's missing ourselves. */
3661 gcc_assert (purpose);
3662
816fa80a
OH
3663 /* Skip zero-sized fields, unless value has side-effects. This can
3664 happen with calls to functions returning a zero-sized type, which
3665 we shouldn't discard. As a number of downstream passes don't
3666 expect sets of zero-sized fields, we rely on the gimplification of
3667 the MODIFY_EXPR we make below to drop the assignment statement. */
3668 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
3669 continue;
3670
6fa91b48
SB
3671 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3672 whole range. */
3673 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 3674 {
6fa91b48
SB
3675 tree lower = TREE_OPERAND (purpose, 0);
3676 tree upper = TREE_OPERAND (purpose, 1);
3677
3678 /* If the lower bound is equal to upper, just treat it as if
3679 upper was the index. */
3680 if (simple_cst_equal (lower, upper))
3681 purpose = upper;
3682 else
3683 {
3684 gimplify_init_ctor_eval_range (object, lower, upper, value,
3685 array_elt_type, pre_p, cleared);
3686 continue;
3687 }
3688 }
57d1dd87 3689
6fa91b48
SB
3690 if (array_elt_type)
3691 {
1a1640db
RG
3692 /* Do not use bitsizetype for ARRAY_REF indices. */
3693 if (TYPE_DOMAIN (TREE_TYPE (object)))
ad19c4be
EB
3694 purpose
3695 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3696 purpose);
b4257cfc
RG
3697 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3698 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
3699 }
3700 else
cf0efa6a
ILT
3701 {
3702 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
3703 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3704 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 3705 }
57d1dd87 3706
cf0efa6a
ILT
3707 if (TREE_CODE (value) == CONSTRUCTOR
3708 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
3709 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3710 pre_p, cleared);
3711 else
3712 {
726a989a 3713 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 3714 gimplify_and_add (init, pre_p);
726a989a 3715 ggc_free (init);
57d1dd87
RH
3716 }
3717 }
3718}
3719
ad19c4be 3720/* Return the appropriate RHS predicate for this LHS. */
726a989a 3721
18f429e2 3722gimple_predicate
726a989a
RB
3723rhs_predicate_for (tree lhs)
3724{
ba4d8f9d
RG
3725 if (is_gimple_reg (lhs))
3726 return is_gimple_reg_rhs_or_call;
726a989a 3727 else
ba4d8f9d 3728 return is_gimple_mem_rhs_or_call;
726a989a
RB
3729}
3730
2ec5deb5
PB
3731/* Gimplify a C99 compound literal expression. This just means adding
3732 the DECL_EXPR before the current statement and using its anonymous
3733 decl instead. */
3734
3735static enum gimplify_status
a845a7f5 3736gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4c53d183 3737 bool (*gimple_test_f) (tree),
a845a7f5 3738 fallback_t fallback)
2ec5deb5
PB
3739{
3740 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3741 tree decl = DECL_EXPR_DECL (decl_s);
4c53d183 3742 tree init = DECL_INITIAL (decl);
2ec5deb5
PB
3743 /* Mark the decl as addressable if the compound literal
3744 expression is addressable now, otherwise it is marked too late
3745 after we gimplify the initialization expression. */
3746 if (TREE_ADDRESSABLE (*expr_p))
3747 TREE_ADDRESSABLE (decl) = 1;
4c53d183
MM
3748 /* Otherwise, if we don't need an lvalue and have a literal directly
3749 substitute it. Check if it matches the gimple predicate, as
3750 otherwise we'd generate a new temporary, and we can as well just
3751 use the decl we already have. */
3752 else if (!TREE_ADDRESSABLE (decl)
3753 && init
3754 && (fallback & fb_lvalue) == 0
3755 && gimple_test_f (init))
3756 {
3757 *expr_p = init;
3758 return GS_OK;
3759 }
2ec5deb5
PB
3760
3761 /* Preliminarily mark non-addressed complex variables as eligible
3762 for promotion to gimple registers. We'll transform their uses
3763 as we find them. */
3764 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3765 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3766 && !TREE_THIS_VOLATILE (decl)
3767 && !needs_to_live_in_memory (decl))
3768 DECL_GIMPLE_REG_P (decl) = 1;
3769
a845a7f5
ILT
3770 /* If the decl is not addressable, then it is being used in some
3771 expression or on the right hand side of a statement, and it can
3772 be put into a readonly data section. */
3773 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3774 TREE_READONLY (decl) = 1;
3775
2ec5deb5
PB
3776 /* This decl isn't mentioned in the enclosing block, so add it to the
3777 list of temps. FIXME it seems a bit of a kludge to say that
3778 anonymous artificial vars aren't pushed, but everything else is. */
3779 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3780 gimple_add_tmp_var (decl);
3781
3782 gimplify_and_add (decl_s, pre_p);
3783 *expr_p = decl;
3784 return GS_OK;
3785}
3786
3787/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3788 return a new CONSTRUCTOR if something changed. */
3789
3790static tree
3791optimize_compound_literals_in_ctor (tree orig_ctor)
3792{
3793 tree ctor = orig_ctor;
9771b263
DN
3794 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3795 unsigned int idx, num = vec_safe_length (elts);
2ec5deb5
PB
3796
3797 for (idx = 0; idx < num; idx++)
3798 {
9771b263 3799 tree value = (*elts)[idx].value;
2ec5deb5
PB
3800 tree newval = value;
3801 if (TREE_CODE (value) == CONSTRUCTOR)
3802 newval = optimize_compound_literals_in_ctor (value);
3803 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3804 {
3805 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3806 tree decl = DECL_EXPR_DECL (decl_s);
3807 tree init = DECL_INITIAL (decl);
3808
3809 if (!TREE_ADDRESSABLE (value)
3810 && !TREE_ADDRESSABLE (decl)
6f8f67e9
JJ
3811 && init
3812 && TREE_CODE (init) == CONSTRUCTOR)
2ec5deb5
PB
3813 newval = optimize_compound_literals_in_ctor (init);
3814 }
3815 if (newval == value)
3816 continue;
3817
3818 if (ctor == orig_ctor)
3819 {
3820 ctor = copy_node (orig_ctor);
9771b263 3821 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
2ec5deb5
PB
3822 elts = CONSTRUCTOR_ELTS (ctor);
3823 }
9771b263 3824 (*elts)[idx].value = newval;
2ec5deb5
PB
3825 }
3826 return ctor;
3827}
3828
26d44ae2
RH
3829/* A subroutine of gimplify_modify_expr. Break out elements of a
3830 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3831
3832 Note that we still need to clear any elements that don't have explicit
3833 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
3834 original MODIFY_EXPR, we just remove all of the constructor elements.
3835
3836 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3837 GS_ERROR if we would have to create a temporary when gimplifying
3838 this constructor. Otherwise, return GS_OK.
3839
3840 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
3841
3842static enum gimplify_status
726a989a
RB
3843gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3844 bool want_value, bool notify_temp_creation)
26d44ae2 3845{
f5a1f0d0 3846 tree object, ctor, type;
26d44ae2 3847 enum gimplify_status ret;
9771b263 3848 vec<constructor_elt, va_gc> *elts;
26d44ae2 3849
f5a1f0d0 3850 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 3851
ffed8a01
AH
3852 if (!notify_temp_creation)
3853 {
726a989a 3854 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
3855 is_gimple_lvalue, fb_lvalue);
3856 if (ret == GS_ERROR)
3857 return ret;
3858 }
57d1dd87 3859
726a989a 3860 object = TREE_OPERAND (*expr_p, 0);
f5a1f0d0
PB
3861 ctor = TREE_OPERAND (*expr_p, 1) =
3862 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3863 type = TREE_TYPE (ctor);
3864 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 3865 ret = GS_ALL_DONE;
726a989a 3866
26d44ae2
RH
3867 switch (TREE_CODE (type))
3868 {
3869 case RECORD_TYPE:
3870 case UNION_TYPE:
3871 case QUAL_UNION_TYPE:
3872 case ARRAY_TYPE:
3873 {
57d1dd87 3874 struct gimplify_init_ctor_preeval_data preeval_data;
953d0c90
RS
3875 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3876 bool cleared, complete_p, valid_const_initializer;
26d44ae2
RH
3877
3878 /* Aggregate types must lower constructors to initialization of
3879 individual elements. The exception is that a CONSTRUCTOR node
3880 with no elements indicates zero-initialization of the whole. */
9771b263 3881 if (vec_safe_is_empty (elts))
ffed8a01
AH
3882 {
3883 if (notify_temp_creation)
3884 return GS_OK;
3885 break;
3886 }
b8698a0f 3887
fe24d485
OH
3888 /* Fetch information about the constructor to direct later processing.
3889 We might want to make static versions of it in various cases, and
3890 can only do so if it known to be a valid constant initializer. */
3891 valid_const_initializer
3892 = categorize_ctor_elements (ctor, &num_nonzero_elements,
953d0c90 3893 &num_ctor_elements, &complete_p);
26d44ae2
RH
3894
3895 /* If a const aggregate variable is being initialized, then it
3896 should never be a lose to promote the variable to be static. */
fe24d485 3897 if (valid_const_initializer
6f642f98 3898 && num_nonzero_elements > 1
26d44ae2 3899 && TREE_READONLY (object)
d0ea0759
SE
3900 && TREE_CODE (object) == VAR_DECL
3901 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
26d44ae2 3902 {
ffed8a01
AH
3903 if (notify_temp_creation)
3904 return GS_ERROR;
26d44ae2
RH
3905 DECL_INITIAL (object) = ctor;
3906 TREE_STATIC (object) = 1;
3907 if (!DECL_NAME (object))
3908 DECL_NAME (object) = create_tmp_var_name ("C");
3909 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3910
3911 /* ??? C++ doesn't automatically append a .<number> to the
6bdf3519 3912 assembler name, and even when it does, it looks at FE private
26d44ae2
RH
3913 data structures to figure out what that number should be,
3914 which are not set for this variable. I suppose this is
3915 important for local statics for inline functions, which aren't
3916 "local" in the object file sense. So in order to get a unique
3917 TU-local symbol, we must invoke the lhd version now. */
3918 lhd_set_decl_assembler_name (object);
3919
3920 *expr_p = NULL_TREE;
3921 break;
3922 }
3923
cce70747
JC
3924 /* If there are "lots" of initialized elements, even discounting
3925 those that are not address constants (and thus *must* be
3926 computed at runtime), then partition the constructor into
3927 constant and non-constant parts. Block copy the constant
3928 parts in, then generate code for the non-constant parts. */
3929 /* TODO. There's code in cp/typeck.c to do this. */
3930
953d0c90
RS
3931 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3932 /* store_constructor will ignore the clearing of variable-sized
3933 objects. Initializers for such objects must explicitly set
3934 every field that needs to be set. */
3935 cleared = false;
d368135f 3936 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
953d0c90 3937 /* If the constructor isn't complete, clear the whole object
d368135f 3938 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
953d0c90
RS
3939
3940 ??? This ought not to be needed. For any element not present
3941 in the initializer, we should simply set them to zero. Except
3942 we'd need to *find* the elements that are not present, and that
3943 requires trickery to avoid quadratic compile-time behavior in
3944 large cases or excessive memory use in small cases. */
73ed17ff 3945 cleared = true;
953d0c90 3946 else if (num_ctor_elements - num_nonzero_elements
e04ad03d 3947 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
953d0c90
RS
3948 && num_nonzero_elements < num_ctor_elements / 4)
3949 /* If there are "lots" of zeros, it's more efficient to clear
3950 the memory and then set the nonzero elements. */
cce70747 3951 cleared = true;
953d0c90
RS
3952 else
3953 cleared = false;
cce70747 3954
26d44ae2
RH
3955 /* If there are "lots" of initialized elements, and all of them
3956 are valid address constants, then the entire initializer can
cce70747
JC
3957 be dropped to memory, and then memcpy'd out. Don't do this
3958 for sparse arrays, though, as it's more efficient to follow
3959 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
3960 individual element initialization. Also don't do this for small
3961 all-zero initializers (which aren't big enough to merit
3962 clearing), and don't try to make bitwise copies of
d5e254e1
IE
3963 TREE_ADDRESSABLE types.
3964
3965 We cannot apply such transformation when compiling chkp static
3966 initializer because creation of initializer image in the memory
3967 will require static initialization of bounds for it. It should
3968 result in another gimplification of similar initializer and we
3969 may fall into infinite loop. */
8afd015a
JM
3970 if (valid_const_initializer
3971 && !(cleared || num_nonzero_elements == 0)
d5e254e1
IE
3972 && !TREE_ADDRESSABLE (type)
3973 && (!current_function_decl
3974 || !lookup_attribute ("chkp ctor",
3975 DECL_ATTRIBUTES (current_function_decl))))
26d44ae2
RH
3976 {
3977 HOST_WIDE_INT size = int_size_in_bytes (type);
3978 unsigned int align;
3979
3980 /* ??? We can still get unbounded array types, at least
3981 from the C++ front end. This seems wrong, but attempt
3982 to work around it for now. */
3983 if (size < 0)
3984 {
3985 size = int_size_in_bytes (TREE_TYPE (object));
3986 if (size >= 0)
3987 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3988 }
3989
3990 /* Find the maximum alignment we can assume for the object. */
3991 /* ??? Make use of DECL_OFFSET_ALIGN. */
3992 if (DECL_P (object))
3993 align = DECL_ALIGN (object);
3994 else
3995 align = TYPE_ALIGN (type);
3996
f301837e
EB
3997 /* Do a block move either if the size is so small as to make
3998 each individual move a sub-unit move on average, or if it
3999 is so large as to make individual moves inefficient. */
329ad380
JJ
4000 if (size > 0
4001 && num_nonzero_elements > 1
f301837e
EB
4002 && (size < num_nonzero_elements
4003 || !can_move_by_pieces (size, align)))
26d44ae2 4004 {
ffed8a01
AH
4005 if (notify_temp_creation)
4006 return GS_ERROR;
4007
46314d3e
EB
4008 walk_tree (&ctor, force_labels_r, NULL, NULL);
4009 ctor = tree_output_constant_def (ctor);
4010 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4011 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4012 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
4013
4014 /* This is no longer an assignment of a CONSTRUCTOR, but
4015 we still may have processing to do on the LHS. So
4016 pretend we didn't do anything here to let that happen. */
4017 return GS_UNHANDLED;
26d44ae2
RH
4018 }
4019 }
4020
558af7ca
EB
4021 /* If the target is volatile, we have non-zero elements and more than
4022 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
4023 if (TREE_THIS_VOLATILE (object)
4024 && !TREE_ADDRESSABLE (type)
558af7ca 4025 && num_nonzero_elements > 0
9771b263 4026 && vec_safe_length (elts) > 1)
61c7cbf8 4027 {
b731b390 4028 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
61c7cbf8
RG
4029 TREE_OPERAND (*expr_p, 0) = temp;
4030 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4031 *expr_p,
4032 build2 (MODIFY_EXPR, void_type_node,
4033 object, temp));
4034 return GS_OK;
4035 }
4036
ffed8a01
AH
4037 if (notify_temp_creation)
4038 return GS_OK;
4039
675c873b
EB
4040 /* If there are nonzero elements and if needed, pre-evaluate to capture
4041 elements overlapping with the lhs into temporaries. We must do this
4042 before clearing to fetch the values before they are zeroed-out. */
4043 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
4044 {
4045 preeval_data.lhs_base_decl = get_base_address (object);
4046 if (!DECL_P (preeval_data.lhs_base_decl))
4047 preeval_data.lhs_base_decl = NULL;
4048 preeval_data.lhs_alias_set = get_alias_set (object);
4049
726a989a 4050 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
4051 pre_p, post_p, &preeval_data);
4052 }
4053
2234a9cb
PP
4054 bool ctor_has_side_effects_p
4055 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4056
26d44ae2
RH
4057 if (cleared)
4058 {
4059 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4060 Note that we still have to gimplify, in order to handle the
57d1dd87 4061 case of variable sized types. Avoid shared tree structures. */
4038c495 4062 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 4063 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 4064 object = unshare_expr (object);
726a989a 4065 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
4066 }
4067
6fa91b48 4068 /* If we have not block cleared the object, or if there are nonzero
2234a9cb
PP
4069 elements in the constructor, or if the constructor has side effects,
4070 add assignments to the individual scalar fields of the object. */
4071 if (!cleared
4072 || num_nonzero_elements > 0
4073 || ctor_has_side_effects_p)
85d89e76 4074 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
4075
4076 *expr_p = NULL_TREE;
4077 }
4078 break;
4079
4080 case COMPLEX_TYPE:
4081 {
4082 tree r, i;
4083
ffed8a01
AH
4084 if (notify_temp_creation)
4085 return GS_OK;
4086
26d44ae2 4087 /* Extract the real and imaginary parts out of the ctor. */
9771b263
DN
4088 gcc_assert (elts->length () == 2);
4089 r = (*elts)[0].value;
4090 i = (*elts)[1].value;
26d44ae2
RH
4091 if (r == NULL || i == NULL)
4092 {
e8160c9a 4093 tree zero = build_zero_cst (TREE_TYPE (type));
26d44ae2
RH
4094 if (r == NULL)
4095 r = zero;
4096 if (i == NULL)
4097 i = zero;
4098 }
4099
4100 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4101 represent creation of a complex value. */
4102 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4103 {
4104 ctor = build_complex (type, r, i);
4105 TREE_OPERAND (*expr_p, 1) = ctor;
4106 }
4107 else
4108 {
b4257cfc 4109 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 4110 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
4111 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4112 pre_p,
4113 post_p,
17ad5b5e
RH
4114 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4115 fb_rvalue);
26d44ae2
RH
4116 }
4117 }
4118 break;
506e2710 4119
26d44ae2 4120 case VECTOR_TYPE:
4038c495
GB
4121 {
4122 unsigned HOST_WIDE_INT ix;
4123 constructor_elt *ce;
e89be13b 4124
ffed8a01
AH
4125 if (notify_temp_creation)
4126 return GS_OK;
4127
4038c495
GB
4128 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4129 if (TREE_CONSTANT (ctor))
4130 {
4131 bool constant_p = true;
4132 tree value;
4133
4134 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
4135 elements, such as addresses or trapping values like
4136 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4137 in VECTOR_CST nodes. */
4038c495
GB
4138 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4139 if (!CONSTANT_CLASS_P (value))
4140 {
4141 constant_p = false;
4142 break;
4143 }
e89be13b 4144
4038c495
GB
4145 if (constant_p)
4146 {
4147 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4148 break;
4149 }
84816907 4150
9f1da821 4151 TREE_CONSTANT (ctor) = 0;
4038c495 4152 }
e89be13b 4153
4038c495
GB
4154 /* Vector types use CONSTRUCTOR all the way through gimple
4155 compilation as a general initializer. */
9771b263 4156 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038c495
GB
4157 {
4158 enum gimplify_status tret;
726a989a
RB
4159 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4160 fb_rvalue);
4038c495
GB
4161 if (tret == GS_ERROR)
4162 ret = GS_ERROR;
4163 }
726a989a
RB
4164 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4165 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 4166 }
26d44ae2 4167 break;
6de9cd9a 4168
26d44ae2
RH
4169 default:
4170 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 4171 gcc_unreachable ();
26d44ae2 4172 }
6de9cd9a 4173
26d44ae2
RH
4174 if (ret == GS_ERROR)
4175 return GS_ERROR;
4176 else if (want_value)
4177 {
26d44ae2
RH
4178 *expr_p = object;
4179 return GS_OK;
6de9cd9a 4180 }
26d44ae2 4181 else
726a989a
RB
4182 {
4183 /* If we have gimplified both sides of the initializer but have
4184 not emitted an assignment, do so now. */
4185 if (*expr_p)
4186 {
4187 tree lhs = TREE_OPERAND (*expr_p, 0);
4188 tree rhs = TREE_OPERAND (*expr_p, 1);
538dd0b7 4189 gassign *init = gimple_build_assign (lhs, rhs);
726a989a
RB
4190 gimplify_seq_add_stmt (pre_p, init);
4191 *expr_p = NULL;
4192 }
4193
4194 return GS_ALL_DONE;
4195 }
26d44ae2 4196}
6de9cd9a 4197
de4af523
JJ
4198/* Given a pointer value OP0, return a simplified version of an
4199 indirection through OP0, or NULL_TREE if no simplification is
4200 possible. This may only be applied to a rhs of an expression.
4201 Note that the resulting type may be different from the type pointed
4202 to in the sense that it is still compatible from the langhooks
4203 point of view. */
4204
4205static tree
4206gimple_fold_indirect_ref_rhs (tree t)
4207{
4208 return gimple_fold_indirect_ref (t);
4209}
4210
4caa08da
AH
4211/* Subroutine of gimplify_modify_expr to do simplifications of
4212 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4213 something changes. */
6de9cd9a 4214
26d44ae2 4215static enum gimplify_status
726a989a
RB
4216gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4217 gimple_seq *pre_p, gimple_seq *post_p,
4218 bool want_value)
26d44ae2 4219{
6d729f28
JM
4220 enum gimplify_status ret = GS_UNHANDLED;
4221 bool changed;
6de9cd9a 4222
6d729f28
JM
4223 do
4224 {
4225 changed = false;
4226 switch (TREE_CODE (*from_p))
4227 {
4228 case VAR_DECL:
4229 /* If we're assigning from a read-only variable initialized with
4230 a constructor, do the direct assignment from the constructor,
4231 but only if neither source nor target are volatile since this
4232 latter assignment might end up being done on a per-field basis. */
4233 if (DECL_INITIAL (*from_p)
4234 && TREE_READONLY (*from_p)
4235 && !TREE_THIS_VOLATILE (*from_p)
4236 && !TREE_THIS_VOLATILE (*to_p)
4237 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4238 {
4239 tree old_from = *from_p;
4240 enum gimplify_status subret;
4241
4242 /* Move the constructor into the RHS. */
4243 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4244
4245 /* Let's see if gimplify_init_constructor will need to put
4246 it in memory. */
4247 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4248 false, true);
4249 if (subret == GS_ERROR)
4250 {
4251 /* If so, revert the change. */
4252 *from_p = old_from;
4253 }
4254 else
4255 {
4256 ret = GS_OK;
4257 changed = true;
4258 }
4259 }
4260 break;
4261 case INDIRECT_REF:
4caa08da 4262 {
6d729f28 4263 /* If we have code like
ffed8a01 4264
6d729f28 4265 *(const A*)(A*)&x
ffed8a01 4266
6d729f28
JM
4267 where the type of "x" is a (possibly cv-qualified variant
4268 of "A"), treat the entire expression as identical to "x".
4269 This kind of code arises in C++ when an object is bound
4270 to a const reference, and if "x" is a TARGET_EXPR we want
4271 to take advantage of the optimization below. */
06baaba3 4272 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6d729f28
JM
4273 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4274 if (t)
ffed8a01 4275 {
06baaba3
RG
4276 if (TREE_THIS_VOLATILE (t) != volatile_p)
4277 {
3a65ee74 4278 if (DECL_P (t))
06baaba3
RG
4279 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4280 build_fold_addr_expr (t));
4281 if (REFERENCE_CLASS_P (t))
4282 TREE_THIS_VOLATILE (t) = volatile_p;
4283 }
6d729f28
JM
4284 *from_p = t;
4285 ret = GS_OK;
4286 changed = true;
ffed8a01 4287 }
6d729f28
JM
4288 break;
4289 }
4290
4291 case TARGET_EXPR:
4292 {
4293 /* If we are initializing something from a TARGET_EXPR, strip the
4294 TARGET_EXPR and initialize it directly, if possible. This can't
4295 be done if the initializer is void, since that implies that the
4296 temporary is set in some non-trivial way.
4297
4298 ??? What about code that pulls out the temp and uses it
4299 elsewhere? I think that such code never uses the TARGET_EXPR as
4300 an initializer. If I'm wrong, we'll die because the temp won't
4301 have any RTL. In that case, I guess we'll need to replace
4302 references somehow. */
4303 tree init = TARGET_EXPR_INITIAL (*from_p);
4304
4305 if (init
4306 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 4307 {
6d729f28 4308 *from_p = init;
ffed8a01 4309 ret = GS_OK;
6d729f28 4310 changed = true;
ffed8a01 4311 }
4caa08da 4312 }
6d729f28 4313 break;
f98625f6 4314
6d729f28
JM
4315 case COMPOUND_EXPR:
4316 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4317 caught. */
4318 gimplify_compound_expr (from_p, pre_p, true);
4319 ret = GS_OK;
4320 changed = true;
4321 break;
f98625f6 4322
6d729f28 4323 case CONSTRUCTOR:
ce3beba3
JM
4324 /* If we already made some changes, let the front end have a
4325 crack at this before we break it down. */
4326 if (ret != GS_UNHANDLED)
4327 break;
6d729f28
JM
4328 /* If we're initializing from a CONSTRUCTOR, break this into
4329 individual MODIFY_EXPRs. */
4330 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4331 false);
4332
4333 case COND_EXPR:
4334 /* If we're assigning to a non-register type, push the assignment
4335 down into the branches. This is mandatory for ADDRESSABLE types,
4336 since we cannot generate temporaries for such, but it saves a
4337 copy in other cases as well. */
4338 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 4339 {
6d729f28
JM
4340 /* This code should mirror the code in gimplify_cond_expr. */
4341 enum tree_code code = TREE_CODE (*expr_p);
4342 tree cond = *from_p;
4343 tree result = *to_p;
4344
4345 ret = gimplify_expr (&result, pre_p, post_p,
4346 is_gimple_lvalue, fb_lvalue);
4347 if (ret != GS_ERROR)
4348 ret = GS_OK;
4349
4350 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4351 TREE_OPERAND (cond, 1)
4352 = build2 (code, void_type_node, result,
4353 TREE_OPERAND (cond, 1));
4354 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4355 TREE_OPERAND (cond, 2)
4356 = build2 (code, void_type_node, unshare_expr (result),
4357 TREE_OPERAND (cond, 2));
4358
4359 TREE_TYPE (cond) = void_type_node;
4360 recalculate_side_effects (cond);
4361
4362 if (want_value)
4363 {
4364 gimplify_and_add (cond, pre_p);
4365 *expr_p = unshare_expr (result);
4366 }
4367 else
4368 *expr_p = cond;
4369 return ret;
f98625f6 4370 }
f98625f6 4371 break;
f98625f6 4372
6d729f28
JM
4373 case CALL_EXPR:
4374 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4375 return slot so that we don't generate a temporary. */
4376 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4377 && aggregate_value_p (*from_p, *from_p))
26d44ae2 4378 {
6d729f28
JM
4379 bool use_target;
4380
4381 if (!(rhs_predicate_for (*to_p))(*from_p))
4382 /* If we need a temporary, *to_p isn't accurate. */
4383 use_target = false;
ad19c4be 4384 /* It's OK to use the return slot directly unless it's an NRV. */
6d729f28
JM
4385 else if (TREE_CODE (*to_p) == RESULT_DECL
4386 && DECL_NAME (*to_p) == NULL_TREE
4387 && needs_to_live_in_memory (*to_p))
6d729f28
JM
4388 use_target = true;
4389 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4390 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4391 /* Don't force regs into memory. */
4392 use_target = false;
4393 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4394 /* It's OK to use the target directly if it's being
4395 initialized. */
4396 use_target = true;
e6a54b01
EB
4397 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4398 != INTEGER_CST)
aabb90e5
RG
4399 /* Always use the target and thus RSO for variable-sized types.
4400 GIMPLE cannot deal with a variable-sized assignment
4401 embedded in a call statement. */
4402 use_target = true;
345ae177
AH
4403 else if (TREE_CODE (*to_p) != SSA_NAME
4404 && (!is_gimple_variable (*to_p)
4405 || needs_to_live_in_memory (*to_p)))
6d729f28
JM
4406 /* Don't use the original target if it's already addressable;
4407 if its address escapes, and the called function uses the
4408 NRV optimization, a conforming program could see *to_p
4409 change before the called function returns; see c++/19317.
4410 When optimizing, the return_slot pass marks more functions
4411 as safe after we have escape info. */
4412 use_target = false;
4413 else
4414 use_target = true;
4415
4416 if (use_target)
4417 {
4418 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4419 mark_addressable (*to_p);
4420 }
26d44ae2 4421 }
6d729f28 4422 break;
6de9cd9a 4423
6d729f28
JM
4424 case WITH_SIZE_EXPR:
4425 /* Likewise for calls that return an aggregate of non-constant size,
4426 since we would not be able to generate a temporary at all. */
4427 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4428 {
4429 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
4430 /* We don't change ret in this case because the
4431 WITH_SIZE_EXPR might have been added in
4432 gimplify_modify_expr, so returning GS_OK would lead to an
4433 infinite loop. */
6d729f28
JM
4434 changed = true;
4435 }
4436 break;
6de9cd9a 4437
6d729f28
JM
4438 /* If we're initializing from a container, push the initialization
4439 inside it. */
4440 case CLEANUP_POINT_EXPR:
4441 case BIND_EXPR:
4442 case STATEMENT_LIST:
26d44ae2 4443 {
6d729f28
JM
4444 tree wrap = *from_p;
4445 tree t;
dae7ec87 4446
6d729f28
JM
4447 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4448 fb_lvalue);
dae7ec87
JM
4449 if (ret != GS_ERROR)
4450 ret = GS_OK;
4451
6d729f28
JM
4452 t = voidify_wrapper_expr (wrap, *expr_p);
4453 gcc_assert (t == *expr_p);
dae7ec87
JM
4454
4455 if (want_value)
4456 {
6d729f28
JM
4457 gimplify_and_add (wrap, pre_p);
4458 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
4459 }
4460 else
6d729f28
JM
4461 *expr_p = wrap;
4462 return GS_OK;
26d44ae2 4463 }
6de9cd9a 4464
6d729f28 4465 case COMPOUND_LITERAL_EXPR:
fa47911c 4466 {
6d729f28
JM
4467 tree complit = TREE_OPERAND (*expr_p, 1);
4468 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4469 tree decl = DECL_EXPR_DECL (decl_s);
4470 tree init = DECL_INITIAL (decl);
4471
4472 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4473 into struct T x = { 0, 1, 2 } if the address of the
4474 compound literal has never been taken. */
4475 if (!TREE_ADDRESSABLE (complit)
4476 && !TREE_ADDRESSABLE (decl)
4477 && init)
fa47911c 4478 {
6d729f28
JM
4479 *expr_p = copy_node (*expr_p);
4480 TREE_OPERAND (*expr_p, 1) = init;
4481 return GS_OK;
fa47911c
JM
4482 }
4483 }
4484
6d729f28
JM
4485 default:
4486 break;
2ec5deb5 4487 }
6d729f28
JM
4488 }
4489 while (changed);
6de9cd9a 4490
6de9cd9a
DN
4491 return ret;
4492}
4493
216820a4
RG
4494
4495/* Return true if T looks like a valid GIMPLE statement. */
4496
4497static bool
4498is_gimple_stmt (tree t)
4499{
4500 const enum tree_code code = TREE_CODE (t);
4501
4502 switch (code)
4503 {
4504 case NOP_EXPR:
4505 /* The only valid NOP_EXPR is the empty statement. */
4506 return IS_EMPTY_STMT (t);
4507
4508 case BIND_EXPR:
4509 case COND_EXPR:
4510 /* These are only valid if they're void. */
4511 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4512
4513 case SWITCH_EXPR:
4514 case GOTO_EXPR:
4515 case RETURN_EXPR:
4516 case LABEL_EXPR:
4517 case CASE_LABEL_EXPR:
4518 case TRY_CATCH_EXPR:
4519 case TRY_FINALLY_EXPR:
4520 case EH_FILTER_EXPR:
4521 case CATCH_EXPR:
4522 case ASM_EXPR:
4523 case STATEMENT_LIST:
41dbbb37
TS
4524 case OACC_PARALLEL:
4525 case OACC_KERNELS:
4526 case OACC_DATA:
4527 case OACC_HOST_DATA:
4528 case OACC_DECLARE:
4529 case OACC_UPDATE:
4530 case OACC_ENTER_DATA:
4531 case OACC_EXIT_DATA:
4532 case OACC_CACHE:
216820a4
RG
4533 case OMP_PARALLEL:
4534 case OMP_FOR:
74bf76ed 4535 case OMP_SIMD:
c02065fc 4536 case CILK_SIMD:
acf0174b 4537 case OMP_DISTRIBUTE:
41dbbb37 4538 case OACC_LOOP:
216820a4
RG
4539 case OMP_SECTIONS:
4540 case OMP_SECTION:
4541 case OMP_SINGLE:
4542 case OMP_MASTER:
acf0174b 4543 case OMP_TASKGROUP:
216820a4
RG
4544 case OMP_ORDERED:
4545 case OMP_CRITICAL:
4546 case OMP_TASK:
d9a6bd32
JJ
4547 case OMP_TARGET:
4548 case OMP_TARGET_DATA:
4549 case OMP_TARGET_UPDATE:
4550 case OMP_TARGET_ENTER_DATA:
4551 case OMP_TARGET_EXIT_DATA:
4552 case OMP_TASKLOOP:
4553 case OMP_TEAMS:
216820a4
RG
4554 /* These are always void. */
4555 return true;
4556
4557 case CALL_EXPR:
4558 case MODIFY_EXPR:
4559 case PREDICT_EXPR:
4560 /* These are valid regardless of their type. */
4561 return true;
4562
4563 default:
4564 return false;
4565 }
4566}
4567
4568
d9c2d296
AP
4569/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4570 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
7b7e6ecd
EB
4571 DECL_GIMPLE_REG_P set.
4572
4573 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4574 other, unmodified part of the complex object just before the total store.
4575 As a consequence, if the object is still uninitialized, an undefined value
4576 will be loaded into a register, which may result in a spurious exception
4577 if the register is floating-point and the value happens to be a signaling
4578 NaN for example. Then the fully-fledged complex operations lowering pass
4579 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
4580
4581static enum gimplify_status
726a989a
RB
4582gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4583 bool want_value)
d9c2d296
AP
4584{
4585 enum tree_code code, ocode;
4586 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4587
726a989a
RB
4588 lhs = TREE_OPERAND (*expr_p, 0);
4589 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
4590 code = TREE_CODE (lhs);
4591 lhs = TREE_OPERAND (lhs, 0);
4592
4593 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4594 other = build1 (ocode, TREE_TYPE (rhs), lhs);
8d2b0410 4595 TREE_NO_WARNING (other) = 1;
d9c2d296
AP
4596 other = get_formal_tmp_var (other, pre_p);
4597
4598 realpart = code == REALPART_EXPR ? rhs : other;
4599 imagpart = code == REALPART_EXPR ? other : rhs;
4600
4601 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4602 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4603 else
4604 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4605
726a989a
RB
4606 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4607 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
4608
4609 return GS_ALL_DONE;
4610}
4611
206048bd 4612/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
4613
4614 modify_expr
4615 : varname '=' rhs
4616 | '*' ID '=' rhs
4617
4618 PRE_P points to the list where side effects that must happen before
4619 *EXPR_P should be stored.
4620
4621 POST_P points to the list where side effects that must happen after
4622 *EXPR_P should be stored.
4623
4624 WANT_VALUE is nonzero iff we want to use the value of this expression
4625 in another expression. */
4626
4627static enum gimplify_status
726a989a
RB
4628gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4629 bool want_value)
6de9cd9a 4630{
726a989a
RB
4631 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4632 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 4633 enum gimplify_status ret = GS_UNHANDLED;
355fe088 4634 gimple *assign;
db3927fb 4635 location_t loc = EXPR_LOCATION (*expr_p);
6da8be89 4636 gimple_stmt_iterator gsi;
6de9cd9a 4637
282899df
NS
4638 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4639 || TREE_CODE (*expr_p) == INIT_EXPR);
6de9cd9a 4640
d0ad58f9
JM
4641 /* Trying to simplify a clobber using normal logic doesn't work,
4642 so handle it here. */
4643 if (TREE_CLOBBER_P (*from_p))
4644 {
5d751b0c
JJ
4645 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4646 if (ret == GS_ERROR)
4647 return ret;
4648 gcc_assert (!want_value
4649 && (TREE_CODE (*to_p) == VAR_DECL
4650 || TREE_CODE (*to_p) == MEM_REF));
d0ad58f9
JM
4651 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4652 *expr_p = NULL;
4653 return GS_ALL_DONE;
4654 }
4655
1b24a790
RG
4656 /* Insert pointer conversions required by the middle-end that are not
4657 required by the frontend. This fixes middle-end type checking for
4658 for example gcc.dg/redecl-6.c. */
daad0278 4659 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
4660 {
4661 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4662 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 4663 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
4664 }
4665
83d7e8f0
JM
4666 /* See if any simplifications can be done based on what the RHS is. */
4667 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4668 want_value);
4669 if (ret != GS_UNHANDLED)
4670 return ret;
4671
4672 /* For zero sized types only gimplify the left hand side and right hand
4673 side as statements and throw away the assignment. Do this after
4674 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4675 types properly. */
753b34d7 4676 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
412f8986 4677 {
726a989a
RB
4678 gimplify_stmt (from_p, pre_p);
4679 gimplify_stmt (to_p, pre_p);
412f8986
AP
4680 *expr_p = NULL_TREE;
4681 return GS_ALL_DONE;
4682 }
6de9cd9a 4683
d25cee4d
RH
4684 /* If the value being copied is of variable width, compute the length
4685 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4686 before gimplifying any of the operands so that we can resolve any
4687 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4688 the size of the expression to be copied, not of the destination, so
726a989a 4689 that is what we must do here. */
d25cee4d 4690 maybe_with_size_expr (from_p);
6de9cd9a 4691
44de5aeb
RK
4692 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4693 if (ret == GS_ERROR)
4694 return ret;
6de9cd9a 4695
726a989a
RB
4696 /* As a special case, we have to temporarily allow for assignments
4697 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4698 a toplevel statement, when gimplifying the GENERIC expression
4699 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4700 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4701
4702 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4703 prevent gimplify_expr from trying to create a new temporary for
4704 foo's LHS, we tell it that it should only gimplify until it
4705 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4706 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4707 and all we need to do here is set 'a' to be its LHS. */
4708 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4709 fb_rvalue);
6de9cd9a
DN
4710 if (ret == GS_ERROR)
4711 return ret;
4712
f8e89441 4713 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
026c3cfd 4714 size as argument to the call. */
f8e89441
TV
4715 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4716 {
4717 tree call = TREE_OPERAND (*from_p, 0);
4718 tree vlasize = TREE_OPERAND (*from_p, 1);
4719
4720 if (TREE_CODE (call) == CALL_EXPR
4721 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4722 {
2fe1d762 4723 int nargs = call_expr_nargs (call);
f8e89441
TV
4724 tree type = TREE_TYPE (call);
4725 tree ap = CALL_EXPR_ARG (call, 0);
4726 tree tag = CALL_EXPR_ARG (call, 1);
4727 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
2fe1d762
TV
4728 IFN_VA_ARG, type,
4729 nargs + 1, ap, tag,
c7b38fd5 4730 vlasize);
f8e89441
TV
4731 tree *call_p = &(TREE_OPERAND (*from_p, 0));
4732 *call_p = newcall;
4733 }
4734 }
4735
44de5aeb
RK
4736 /* Now see if the above changed *from_p to something we handle specially. */
4737 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4738 want_value);
6de9cd9a
DN
4739 if (ret != GS_UNHANDLED)
4740 return ret;
4741
d25cee4d
RH
4742 /* If we've got a variable sized assignment between two lvalues (i.e. does
4743 not involve a call), then we can make things a bit more straightforward
4744 by converting the assignment to memcpy or memset. */
4745 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4746 {
4747 tree from = TREE_OPERAND (*from_p, 0);
4748 tree size = TREE_OPERAND (*from_p, 1);
4749
4750 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
4751 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4752
e847cc68 4753 if (is_gimple_addressable (from))
d25cee4d
RH
4754 {
4755 *from_p = from;
726a989a
RB
4756 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4757 pre_p);
d25cee4d
RH
4758 }
4759 }
4760
e41d82f5
RH
4761 /* Transform partial stores to non-addressable complex variables into
4762 total stores. This allows us to use real instead of virtual operands
4763 for these variables, which improves optimization. */
4764 if ((TREE_CODE (*to_p) == REALPART_EXPR
4765 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4766 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4767 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4768
f173837a 4769 /* Try to alleviate the effects of the gimplification creating artificial
b4771722
EB
4770 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4771 make sure not to create DECL_DEBUG_EXPR links across functions. */
f173837a 4772 if (!gimplify_ctxp->into_ssa
f2896bc9 4773 && TREE_CODE (*from_p) == VAR_DECL
726a989a
RB
4774 && DECL_IGNORED_P (*from_p)
4775 && DECL_P (*to_p)
b4771722
EB
4776 && !DECL_IGNORED_P (*to_p)
4777 && decl_function_context (*to_p) == current_function_decl)
f173837a
EB
4778 {
4779 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4780 DECL_NAME (*from_p)
4781 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
839b422f 4782 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
f173837a 4783 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
4784 }
4785
8f0fe813
NS
4786 if (want_value && TREE_THIS_VOLATILE (*to_p))
4787 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4788
726a989a
RB
4789 if (TREE_CODE (*from_p) == CALL_EXPR)
4790 {
4791 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4792 instead of a GIMPLE_ASSIGN. */
538dd0b7 4793 gcall *call_stmt;
1304953e
JJ
4794 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4795 {
4796 /* Gimplify internal functions created in the FEs. */
4797 int nargs = call_expr_nargs (*from_p), i;
4798 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4799 auto_vec<tree> vargs (nargs);
4800
4801 for (i = 0; i < nargs; i++)
4802 {
4803 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4804 EXPR_LOCATION (*from_p));
4805 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4806 }
538dd0b7
DM
4807 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4808 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
1304953e 4809 }
ed9c79e1
JJ
4810 else
4811 {
1304953e
JJ
4812 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4813 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4814 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4815 tree fndecl = get_callee_fndecl (*from_p);
4816 if (fndecl
4817 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4818 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4819 && call_expr_nargs (*from_p) == 3)
538dd0b7
DM
4820 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4821 CALL_EXPR_ARG (*from_p, 0),
4822 CALL_EXPR_ARG (*from_p, 1),
4823 CALL_EXPR_ARG (*from_p, 2));
1304953e
JJ
4824 else
4825 {
538dd0b7
DM
4826 call_stmt = gimple_build_call_from_tree (*from_p);
4827 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
1304953e 4828 }
ed9c79e1 4829 }
538dd0b7
DM
4830 notice_special_calls (call_stmt);
4831 if (!gimple_call_noreturn_p (call_stmt))
4832 gimple_call_set_lhs (call_stmt, *to_p);
4833 assign = call_stmt;
f173837a 4834 }
726a989a 4835 else
c2255bc4
AH
4836 {
4837 assign = gimple_build_assign (*to_p, *from_p);
4838 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4839 }
f173837a 4840
726a989a 4841 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 4842 {
2ad728d2
RG
4843 /* We should have got an SSA name from the start. */
4844 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
726a989a 4845 }
07beea0d 4846
6da8be89
MM
4847 gimplify_seq_add_stmt (pre_p, assign);
4848 gsi = gsi_last (*pre_p);
88ac13da 4849 maybe_fold_stmt (&gsi);
6da8be89 4850
726a989a
RB
4851 if (want_value)
4852 {
8f0fe813 4853 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 4854 return GS_OK;
6de9cd9a 4855 }
726a989a
RB
4856 else
4857 *expr_p = NULL;
6de9cd9a 4858
17ad5b5e 4859 return GS_ALL_DONE;
6de9cd9a
DN
4860}
4861
ad19c4be
EB
4862/* Gimplify a comparison between two variable-sized objects. Do this
4863 with a call to BUILT_IN_MEMCMP. */
44de5aeb
RK
4864
4865static enum gimplify_status
4866gimplify_variable_sized_compare (tree *expr_p)
4867{
692ad9aa 4868 location_t loc = EXPR_LOCATION (*expr_p);
44de5aeb
RK
4869 tree op0 = TREE_OPERAND (*expr_p, 0);
4870 tree op1 = TREE_OPERAND (*expr_p, 1);
692ad9aa 4871 tree t, arg, dest, src, expr;
5039610b
SL
4872
4873 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4874 arg = unshare_expr (arg);
4875 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
4876 src = build_fold_addr_expr_loc (loc, op1);
4877 dest = build_fold_addr_expr_loc (loc, op0);
e79983f4 4878 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
db3927fb 4879 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
692ad9aa
EB
4880
4881 expr
b4257cfc 4882 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
692ad9aa
EB
4883 SET_EXPR_LOCATION (expr, loc);
4884 *expr_p = expr;
44de5aeb
RK
4885
4886 return GS_OK;
4887}
4888
ad19c4be
EB
4889/* Gimplify a comparison between two aggregate objects of integral scalar
4890 mode as a comparison between the bitwise equivalent scalar values. */
61c25908
OH
4891
4892static enum gimplify_status
4893gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4894{
db3927fb 4895 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
4896 tree op0 = TREE_OPERAND (*expr_p, 0);
4897 tree op1 = TREE_OPERAND (*expr_p, 1);
4898
4899 tree type = TREE_TYPE (op0);
4900 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4901
db3927fb
AH
4902 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4903 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
4904
4905 *expr_p
db3927fb 4906 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
4907
4908 return GS_OK;
4909}
4910
ad19c4be
EB
4911/* Gimplify an expression sequence. This function gimplifies each
4912 expression and rewrites the original expression with the last
6de9cd9a
DN
4913 expression of the sequence in GIMPLE form.
4914
4915 PRE_P points to the list where the side effects for all the
4916 expressions in the sequence will be emitted.
d3147f64 4917
6de9cd9a 4918 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
4919
4920static enum gimplify_status
726a989a 4921gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
4922{
4923 tree t = *expr_p;
4924
4925 do
4926 {
4927 tree *sub_p = &TREE_OPERAND (t, 0);
4928
4929 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4930 gimplify_compound_expr (sub_p, pre_p, false);
4931 else
726a989a 4932 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
4933
4934 t = TREE_OPERAND (t, 1);
4935 }
4936 while (TREE_CODE (t) == COMPOUND_EXPR);
4937
4938 *expr_p = t;
4939 if (want_value)
4940 return GS_OK;
4941 else
4942 {
726a989a 4943 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
4944 return GS_ALL_DONE;
4945 }
4946}
4947
726a989a
RB
4948/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4949 gimplify. After gimplification, EXPR_P will point to a new temporary
4950 that holds the original value of the SAVE_EXPR node.
6de9cd9a 4951
726a989a 4952 PRE_P points to the list where side effects that must happen before
ad19c4be 4953 *EXPR_P should be stored. */
6de9cd9a
DN
4954
4955static enum gimplify_status
726a989a 4956gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4957{
4958 enum gimplify_status ret = GS_ALL_DONE;
4959 tree val;
4960
282899df 4961 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
4962 val = TREE_OPERAND (*expr_p, 0);
4963
7f5e6307
RH
4964 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4965 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 4966 {
7f5e6307
RH
4967 /* The operand may be a void-valued expression such as SAVE_EXPRs
4968 generated by the Java frontend for class initialization. It is
4969 being executed only for its side-effects. */
4970 if (TREE_TYPE (val) == void_type_node)
4971 {
4972 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4973 is_gimple_stmt, fb_none);
7f5e6307
RH
4974 val = NULL;
4975 }
4976 else
4977 val = get_initialized_tmp_var (val, pre_p, post_p);
4978
4979 TREE_OPERAND (*expr_p, 0) = val;
4980 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 4981 }
6de9cd9a 4982
7f5e6307
RH
4983 *expr_p = val;
4984
6de9cd9a
DN
4985 return ret;
4986}
4987
ad19c4be 4988/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
4989
4990 unary_expr
4991 : ...
4992 | '&' varname
4993 ...
4994
4995 PRE_P points to the list where side effects that must happen before
4996 *EXPR_P should be stored.
4997
4998 POST_P points to the list where side effects that must happen after
4999 *EXPR_P should be stored. */
5000
5001static enum gimplify_status
726a989a 5002gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5003{
5004 tree expr = *expr_p;
5005 tree op0 = TREE_OPERAND (expr, 0);
5006 enum gimplify_status ret;
db3927fb 5007 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
5008
5009 switch (TREE_CODE (op0))
5010 {
5011 case INDIRECT_REF:
67f23620 5012 do_indirect_ref:
6de9cd9a
DN
5013 /* Check if we are dealing with an expression of the form '&*ptr'.
5014 While the front end folds away '&*ptr' into 'ptr', these
5015 expressions may be generated internally by the compiler (e.g.,
5016 builtins like __builtin_va_end). */
67f23620
RH
5017 /* Caution: the silent array decomposition semantics we allow for
5018 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
5019 /* Gimplification of the ADDR_EXPR operand may drop
5020 cv-qualification conversions, so make sure we add them if
5021 needed. */
67f23620
RH
5022 {
5023 tree op00 = TREE_OPERAND (op0, 0);
5024 tree t_expr = TREE_TYPE (expr);
5025 tree t_op00 = TREE_TYPE (op00);
5026
f4088621 5027 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 5028 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
5029 *expr_p = op00;
5030 ret = GS_OK;
5031 }
6de9cd9a
DN
5032 break;
5033
44de5aeb
RK
5034 case VIEW_CONVERT_EXPR:
5035 /* Take the address of our operand and then convert it to the type of
af72267c
RK
5036 this ADDR_EXPR.
5037
5038 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5039 all clear. The impact of this transformation is even less clear. */
91804752
EB
5040
5041 /* If the operand is a useless conversion, look through it. Doing so
5042 guarantees that the ADDR_EXPR and its operand will remain of the
5043 same type. */
5044 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 5045 op0 = TREE_OPERAND (op0, 0);
91804752 5046
db3927fb
AH
5047 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5048 build_fold_addr_expr_loc (loc,
5049 TREE_OPERAND (op0, 0)));
44de5aeb 5050 ret = GS_OK;
6de9cd9a
DN
5051 break;
5052
903eccd1
EB
5053 case MEM_REF:
5054 if (integer_zerop (TREE_OPERAND (op0, 1)))
5055 goto do_indirect_ref;
5056
5057 /* ... fall through ... */
5058
6de9cd9a 5059 default:
cbf5d0e7
RB
5060 /* If we see a call to a declared builtin or see its address
5061 being taken (we can unify those cases here) then we can mark
5062 the builtin for implicit generation by GCC. */
5063 if (TREE_CODE (op0) == FUNCTION_DECL
5064 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5065 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5066 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5067
6de9cd9a 5068 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
5069 the address of a call that returns a struct; see
5070 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5071 the implied temporary explicit. */
936d04b6 5072
f76d6e6f 5073 /* Make the operand addressable. */
6de9cd9a 5074 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 5075 is_gimple_addressable, fb_either);
8b17cc05
RG
5076 if (ret == GS_ERROR)
5077 break;
67f23620 5078
f76d6e6f
EB
5079 /* Then mark it. Beware that it may not be possible to do so directly
5080 if a temporary has been created by the gimplification. */
5081 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 5082
8b17cc05 5083 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 5084
8b17cc05
RG
5085 /* For various reasons, the gimplification of the expression
5086 may have made a new INDIRECT_REF. */
5087 if (TREE_CODE (op0) == INDIRECT_REF)
5088 goto do_indirect_ref;
5089
6b8b9e42
RG
5090 mark_addressable (TREE_OPERAND (expr, 0));
5091
5092 /* The FEs may end up building ADDR_EXPRs early on a decl with
5093 an incomplete type. Re-build ADDR_EXPRs in canonical form
5094 here. */
5095 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5096 *expr_p = build_fold_addr_expr (op0);
5097
8b17cc05 5098 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
5099 recompute_tree_invariant_for_addr_expr (*expr_p);
5100
5101 /* If we re-built the ADDR_EXPR add a conversion to the original type
5102 if required. */
5103 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5104 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 5105
6de9cd9a
DN
5106 break;
5107 }
5108
6de9cd9a
DN
5109 return ret;
5110}
5111
5112/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5113 value; output operands should be a gimple lvalue. */
5114
5115static enum gimplify_status
726a989a 5116gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 5117{
726a989a
RB
5118 tree expr;
5119 int noutputs;
5120 const char **oconstraints;
6de9cd9a
DN
5121 int i;
5122 tree link;
5123 const char *constraint;
5124 bool allows_mem, allows_reg, is_inout;
5125 enum gimplify_status ret, tret;
538dd0b7 5126 gasm *stmt;
9771b263
DN
5127 vec<tree, va_gc> *inputs;
5128 vec<tree, va_gc> *outputs;
5129 vec<tree, va_gc> *clobbers;
5130 vec<tree, va_gc> *labels;
726a989a 5131 tree link_next;
b8698a0f 5132
726a989a
RB
5133 expr = *expr_p;
5134 noutputs = list_length (ASM_OUTPUTS (expr));
5135 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5136
9771b263
DN
5137 inputs = NULL;
5138 outputs = NULL;
5139 clobbers = NULL;
5140 labels = NULL;
6de9cd9a 5141
6de9cd9a 5142 ret = GS_ALL_DONE;
726a989a
RB
5143 link_next = NULL_TREE;
5144 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5145 {
2c68ba8e 5146 bool ok;
726a989a
RB
5147 size_t constraint_len;
5148
5149 link_next = TREE_CHAIN (link);
5150
5151 oconstraints[i]
5152 = constraint
6de9cd9a 5153 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
5154 constraint_len = strlen (constraint);
5155 if (constraint_len == 0)
5156 continue;
6de9cd9a 5157
2c68ba8e
LB
5158 ok = parse_output_constraint (&constraint, i, 0, 0,
5159 &allows_mem, &allows_reg, &is_inout);
5160 if (!ok)
5161 {
5162 ret = GS_ERROR;
5163 is_inout = false;
5164 }
6de9cd9a
DN
5165
5166 if (!allows_reg && allows_mem)
936d04b6 5167 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5168
5169 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5170 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5171 fb_lvalue | fb_mayfail);
5172 if (tret == GS_ERROR)
5173 {
5174 error ("invalid lvalue in asm output %d", i);
5175 ret = tret;
5176 }
5177
9771b263 5178 vec_safe_push (outputs, link);
726a989a
RB
5179 TREE_CHAIN (link) = NULL_TREE;
5180
6de9cd9a
DN
5181 if (is_inout)
5182 {
5183 /* An input/output operand. To give the optimizers more
5184 flexibility, split it into separate input and output
5185 operands. */
5186 tree input;
5187 char buf[10];
6de9cd9a
DN
5188
5189 /* Turn the in/out constraint into an output constraint. */
5190 char *p = xstrdup (constraint);
5191 p[0] = '=';
5192 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
5193
5194 /* And add a matching input constraint. */
5195 if (allows_reg)
5196 {
5197 sprintf (buf, "%d", i);
372d72d9
JJ
5198
5199 /* If there are multiple alternatives in the constraint,
5200 handle each of them individually. Those that allow register
5201 will be replaced with operand number, the others will stay
5202 unchanged. */
5203 if (strchr (p, ',') != NULL)
5204 {
5205 size_t len = 0, buflen = strlen (buf);
5206 char *beg, *end, *str, *dst;
5207
5208 for (beg = p + 1;;)
5209 {
5210 end = strchr (beg, ',');
5211 if (end == NULL)
5212 end = strchr (beg, '\0');
5213 if ((size_t) (end - beg) < buflen)
5214 len += buflen + 1;
5215 else
5216 len += end - beg + 1;
5217 if (*end)
5218 beg = end + 1;
5219 else
5220 break;
5221 }
5222
858904db 5223 str = (char *) alloca (len);
372d72d9
JJ
5224 for (beg = p + 1, dst = str;;)
5225 {
5226 const char *tem;
5227 bool mem_p, reg_p, inout_p;
5228
5229 end = strchr (beg, ',');
5230 if (end)
5231 *end = '\0';
5232 beg[-1] = '=';
5233 tem = beg - 1;
5234 parse_output_constraint (&tem, i, 0, 0,
5235 &mem_p, &reg_p, &inout_p);
5236 if (dst != str)
5237 *dst++ = ',';
5238 if (reg_p)
5239 {
5240 memcpy (dst, buf, buflen);
5241 dst += buflen;
5242 }
5243 else
5244 {
5245 if (end)
5246 len = end - beg;
5247 else
5248 len = strlen (beg);
5249 memcpy (dst, beg, len);
5250 dst += len;
5251 }
5252 if (end)
5253 beg = end + 1;
5254 else
5255 break;
5256 }
5257 *dst = '\0';
5258 input = build_string (dst - str, str);
5259 }
5260 else
5261 input = build_string (strlen (buf), buf);
6de9cd9a
DN
5262 }
5263 else
5264 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
5265
5266 free (p);
5267
6de9cd9a
DN
5268 input = build_tree_list (build_tree_list (NULL_TREE, input),
5269 unshare_expr (TREE_VALUE (link)));
5270 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5271 }
5272 }
5273
726a989a
RB
5274 link_next = NULL_TREE;
5275 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5276 {
726a989a
RB
5277 link_next = TREE_CHAIN (link);
5278 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
5279 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5280 oconstraints, &allows_mem, &allows_reg);
5281
f497c16c
JJ
5282 /* If we can't make copies, we can only accept memory. */
5283 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5284 {
5285 if (allows_mem)
5286 allows_reg = 0;
5287 else
5288 {
5289 error ("impossible constraint in %<asm%>");
5290 error ("non-memory input %d must stay in memory", i);
5291 return GS_ERROR;
5292 }
5293 }
5294
6de9cd9a
DN
5295 /* If the operand is a memory input, it should be an lvalue. */
5296 if (!allows_reg && allows_mem)
5297 {
502c5084
JJ
5298 tree inputv = TREE_VALUE (link);
5299 STRIP_NOPS (inputv);
5300 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5301 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5302 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
8f1e28e0
MP
5303 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5304 || TREE_CODE (inputv) == MODIFY_EXPR)
502c5084 5305 TREE_VALUE (link) = error_mark_node;
6de9cd9a
DN
5306 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5307 is_gimple_lvalue, fb_lvalue | fb_mayfail);
936d04b6 5308 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5309 if (tret == GS_ERROR)
5310 {
6a3799eb
AH
5311 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5312 input_location = EXPR_LOCATION (TREE_VALUE (link));
6de9cd9a
DN
5313 error ("memory input %d is not directly addressable", i);
5314 ret = tret;
5315 }
5316 }
5317 else
5318 {
5319 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 5320 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
5321 if (tret == GS_ERROR)
5322 ret = tret;
5323 }
726a989a
RB
5324
5325 TREE_CHAIN (link) = NULL_TREE;
9771b263 5326 vec_safe_push (inputs, link);
6de9cd9a 5327 }
b8698a0f 5328
ca081cc8
EB
5329 link_next = NULL_TREE;
5330 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5331 {
5332 link_next = TREE_CHAIN (link);
5333 TREE_CHAIN (link) = NULL_TREE;
5334 vec_safe_push (clobbers, link);
5335 }
1c384bf1 5336
ca081cc8
EB
5337 link_next = NULL_TREE;
5338 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5339 {
5340 link_next = TREE_CHAIN (link);
5341 TREE_CHAIN (link) = NULL_TREE;
5342 vec_safe_push (labels, link);
5343 }
726a989a 5344
a406865a
RG
5345 /* Do not add ASMs with errors to the gimple IL stream. */
5346 if (ret != GS_ERROR)
5347 {
5348 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 5349 inputs, outputs, clobbers, labels);
726a989a 5350
15a85b05 5351 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
a406865a
RG
5352 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5353
5354 gimplify_seq_add_stmt (pre_p, stmt);
5355 }
6de9cd9a
DN
5356
5357 return ret;
5358}
5359
5360/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 5361 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
5362 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5363 return to this function.
5364
5365 FIXME should we complexify the prequeue handling instead? Or use flags
5366 for all the cleanups and let the optimizer tighten them up? The current
5367 code seems pretty fragile; it will break on a cleanup within any
5368 non-conditional nesting. But any such nesting would be broken, anyway;
5369 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5370 and continues out of it. We can do that at the RTL level, though, so
5371 having an optimizer to tighten up try/finally regions would be a Good
5372 Thing. */
5373
5374static enum gimplify_status
726a989a 5375gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 5376{
726a989a
RB
5377 gimple_stmt_iterator iter;
5378 gimple_seq body_sequence = NULL;
6de9cd9a 5379
325c3691 5380 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
5381
5382 /* We only care about the number of conditions between the innermost
df77f454
JM
5383 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5384 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 5385 int old_conds = gimplify_ctxp->conditions;
726a989a 5386 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
32be32af 5387 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6de9cd9a 5388 gimplify_ctxp->conditions = 0;
726a989a 5389 gimplify_ctxp->conditional_cleanups = NULL;
32be32af 5390 gimplify_ctxp->in_cleanup_point_expr = true;
6de9cd9a 5391
726a989a 5392 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
5393
5394 gimplify_ctxp->conditions = old_conds;
df77f454 5395 gimplify_ctxp->conditional_cleanups = old_cleanups;
32be32af 5396 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6de9cd9a 5397
726a989a 5398 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 5399 {
355fe088 5400 gimple *wce = gsi_stmt (iter);
6de9cd9a 5401
726a989a 5402 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 5403 {
726a989a 5404 if (gsi_one_before_end_p (iter))
6de9cd9a 5405 {
726a989a
RB
5406 /* Note that gsi_insert_seq_before and gsi_remove do not
5407 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
5408 if (!gimple_wce_cleanup_eh_only (wce))
5409 gsi_insert_seq_before_without_update (&iter,
5410 gimple_wce_cleanup (wce),
5411 GSI_SAME_STMT);
726a989a 5412 gsi_remove (&iter, true);
6de9cd9a
DN
5413 break;
5414 }
5415 else
5416 {
538dd0b7 5417 gtry *gtry;
726a989a
RB
5418 gimple_seq seq;
5419 enum gimple_try_flags kind;
40aac948 5420
726a989a
RB
5421 if (gimple_wce_cleanup_eh_only (wce))
5422 kind = GIMPLE_TRY_CATCH;
40aac948 5423 else
726a989a
RB
5424 kind = GIMPLE_TRY_FINALLY;
5425 seq = gsi_split_seq_after (iter);
5426
82d6e6fc 5427 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
5428 /* Do not use gsi_replace here, as it may scan operands.
5429 We want to do a simple structural modification only. */
355a7673 5430 gsi_set_stmt (&iter, gtry);
daa6e488 5431 iter = gsi_start (gtry->eval);
6de9cd9a
DN
5432 }
5433 }
5434 else
726a989a 5435 gsi_next (&iter);
6de9cd9a
DN
5436 }
5437
726a989a 5438 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
5439 if (temp)
5440 {
5441 *expr_p = temp;
6de9cd9a
DN
5442 return GS_OK;
5443 }
5444 else
5445 {
726a989a 5446 *expr_p = NULL;
6de9cd9a
DN
5447 return GS_ALL_DONE;
5448 }
5449}
5450
5451/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a
RB
5452 is the cleanup action required. EH_ONLY is true if the cleanup should
5453 only be executed if an exception is thrown, not on normal exit. */
6de9cd9a
DN
5454
5455static void
726a989a 5456gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
6de9cd9a 5457{
355fe088 5458 gimple *wce;
726a989a 5459 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
5460
5461 /* Errors can result in improperly nested cleanups. Which results in
726a989a 5462 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 5463 if (seen_error ())
6de9cd9a
DN
5464 return;
5465
5466 if (gimple_conditional_context ())
5467 {
5468 /* If we're in a conditional context, this is more complex. We only
5469 want to run the cleanup if we actually ran the initialization that
5470 necessitates it, but we want to run it after the end of the
5471 conditional context. So we wrap the try/finally around the
5472 condition and use a flag to determine whether or not to actually
5473 run the destructor. Thus
5474
5475 test ? f(A()) : 0
5476
5477 becomes (approximately)
5478
5479 flag = 0;
5480 try {
5481 if (test) { A::A(temp); flag = 1; val = f(temp); }
5482 else { val = 0; }
5483 } finally {
5484 if (flag) A::~A(temp);
5485 }
5486 val
5487 */
6de9cd9a 5488 tree flag = create_tmp_var (boolean_type_node, "cleanup");
538dd0b7
DM
5489 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5490 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
726a989a 5491
b4257cfc 5492 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
726a989a
RB
5493 gimplify_stmt (&cleanup, &cleanup_stmts);
5494 wce = gimple_build_wce (cleanup_stmts);
5495
5496 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5497 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5498 gimplify_seq_add_stmt (pre_p, ftrue);
6de9cd9a
DN
5499
5500 /* Because of this manipulation, and the EH edges that jump
5501 threading cannot redirect, the temporary (VAR) will appear
5502 to be used uninitialized. Don't warn. */
5503 TREE_NO_WARNING (var) = 1;
5504 }
5505 else
5506 {
726a989a
RB
5507 gimplify_stmt (&cleanup, &cleanup_stmts);
5508 wce = gimple_build_wce (cleanup_stmts);
5509 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5510 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 5511 }
6de9cd9a
DN
5512}
5513
5514/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5515
5516static enum gimplify_status
726a989a 5517gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5518{
5519 tree targ = *expr_p;
5520 tree temp = TARGET_EXPR_SLOT (targ);
5521 tree init = TARGET_EXPR_INITIAL (targ);
5522 enum gimplify_status ret;
5523
5524 if (init)
5525 {
d0ad58f9
JM
5526 tree cleanup = NULL_TREE;
5527
3a5b9284 5528 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea
JJ
5529 to the temps list. Handle also variable length TARGET_EXPRs. */
5530 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5531 {
5532 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5533 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5534 gimplify_vla_decl (temp, pre_p);
5535 }
5536 else
5537 gimple_add_tmp_var (temp);
6de9cd9a 5538
3a5b9284
RH
5539 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5540 expression is supposed to initialize the slot. */
5541 if (VOID_TYPE_P (TREE_TYPE (init)))
5542 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5543 else
325c3691 5544 {
726a989a
RB
5545 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5546 init = init_expr;
5547 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5548 init = NULL;
5549 ggc_free (init_expr);
325c3691 5550 }
3a5b9284 5551 if (ret == GS_ERROR)
abc67de1
SM
5552 {
5553 /* PR c++/28266 Make sure this is expanded only once. */
5554 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5555 return GS_ERROR;
5556 }
726a989a
RB
5557 if (init)
5558 gimplify_and_add (init, pre_p);
6de9cd9a
DN
5559
5560 /* If needed, push the cleanup for the temp. */
5561 if (TARGET_EXPR_CLEANUP (targ))
d0ad58f9
JM
5562 {
5563 if (CLEANUP_EH_ONLY (targ))
5564 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5565 CLEANUP_EH_ONLY (targ), pre_p);
5566 else
5567 cleanup = TARGET_EXPR_CLEANUP (targ);
5568 }
5569
5570 /* Add a clobber for the temporary going out of scope, like
5571 gimplify_bind_expr. */
32be32af 5572 if (gimplify_ctxp->in_cleanup_point_expr
87e2a8fd
XDL
5573 && needs_to_live_in_memory (temp)
5574 && flag_stack_reuse == SR_ALL)
d0ad58f9 5575 {
9771b263
DN
5576 tree clobber = build_constructor (TREE_TYPE (temp),
5577 NULL);
d0ad58f9
JM
5578 TREE_THIS_VOLATILE (clobber) = true;
5579 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5580 if (cleanup)
5581 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5582 clobber);
5583 else
5584 cleanup = clobber;
5585 }
5586
5587 if (cleanup)
5588 gimple_push_cleanup (temp, cleanup, false, pre_p);
6de9cd9a
DN
5589
5590 /* Only expand this once. */
5591 TREE_OPERAND (targ, 3) = init;
5592 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5593 }
282899df 5594 else
6de9cd9a 5595 /* We should have expanded this before. */
282899df 5596 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
5597
5598 *expr_p = temp;
5599 return GS_OK;
5600}
5601
5602/* Gimplification of expression trees. */
5603
726a989a
RB
5604/* Gimplify an expression which appears at statement context. The
5605 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5606 NULL, a new sequence is allocated.
6de9cd9a 5607
726a989a
RB
5608 Return true if we actually added a statement to the queue. */
5609
5610bool
5611gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 5612{
726a989a 5613 gimple_seq_node last;
6de9cd9a 5614
726a989a
RB
5615 last = gimple_seq_last (*seq_p);
5616 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5617 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
5618}
5619
953ff289
DN
5620/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5621 to CTX. If entries already exist, force them to be some flavor of private.
5622 If there is no enclosing parallel, do nothing. */
5623
5624void
5625omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5626{
5627 splay_tree_node n;
5628
d9a6bd32 5629 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
953ff289
DN
5630 return;
5631
5632 do
5633 {
5634 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5635 if (n != NULL)
5636 {
5637 if (n->value & GOVD_SHARED)
5638 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
acf0174b
JJ
5639 else if (n->value & GOVD_MAP)
5640 n->value |= GOVD_MAP_TO_ONLY;
953ff289
DN
5641 else
5642 return;
5643 }
d9a6bd32
JJ
5644 else if ((ctx->region_type & ORT_TARGET) != 0)
5645 {
5646 if (ctx->target_map_scalars_firstprivate)
5647 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5648 else
5649 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5650 }
74bf76ed 5651 else if (ctx->region_type != ORT_WORKSHARE
acf0174b 5652 && ctx->region_type != ORT_SIMD
182190f2
NS
5653 && ctx->region_type != ORT_ACC
5654 && !(ctx->region_type & ORT_TARGET_DATA))
953ff289
DN
5655 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5656
5657 ctx = ctx->outer_context;
5658 }
5659 while (ctx);
5660}
5661
5662/* Similarly for each of the type sizes of TYPE. */
5663
5664static void
5665omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5666{
5667 if (type == NULL || type == error_mark_node)
5668 return;
5669 type = TYPE_MAIN_VARIANT (type);
5670
6e2830c3 5671 if (ctx->privatized_types->add (type))
953ff289
DN
5672 return;
5673
5674 switch (TREE_CODE (type))
5675 {
5676 case INTEGER_TYPE:
5677 case ENUMERAL_TYPE:
5678 case BOOLEAN_TYPE:
953ff289 5679 case REAL_TYPE:
325217ed 5680 case FIXED_POINT_TYPE:
953ff289
DN
5681 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5682 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5683 break;
5684
5685 case ARRAY_TYPE:
5686 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5687 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5688 break;
5689
5690 case RECORD_TYPE:
5691 case UNION_TYPE:
5692 case QUAL_UNION_TYPE:
5693 {
5694 tree field;
910ad8de 5695 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
5696 if (TREE_CODE (field) == FIELD_DECL)
5697 {
5698 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5699 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5700 }
5701 }
5702 break;
5703
5704 case POINTER_TYPE:
5705 case REFERENCE_TYPE:
5706 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5707 break;
5708
5709 default:
5710 break;
5711 }
5712
5713 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5714 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5715 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5716}
5717
41dbbb37 5718/* Add an entry for DECL in the OMP context CTX with FLAGS. */
953ff289
DN
5719
5720static void
5721omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5722{
5723 splay_tree_node n;
5724 unsigned int nflags;
5725 tree t;
5726
d9a6bd32 5727 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
953ff289
DN
5728 return;
5729
5730 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5731 there are constructors involved somewhere. */
5732 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5733 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5734 flags |= GOVD_SEEN;
5735
5736 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
d9a6bd32 5737 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
953ff289
DN
5738 {
5739 /* We shouldn't be re-adding the decl with the same data
5740 sharing class. */
5741 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
953ff289 5742 nflags = n->value | flags;
182190f2
NS
5743 /* The only combination of data sharing classes we should see is
5744 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
5745 reduction variables to be used in data sharing clauses. */
5746 gcc_assert ((ctx->region_type & ORT_ACC) != 0
5747 || ((nflags & GOVD_DATA_SHARE_CLASS)
5748 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
74bf76ed 5749 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
953ff289
DN
5750 n->value = nflags;
5751 return;
5752 }
5753
5754 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 5755 of additional bits of data: the pointer replacement variable, and
953ff289 5756 the parameters of the type. */
4c923c28 5757 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
5758 {
5759 /* Add the pointer replacement variable as PRIVATE if the variable
5760 replacement is private, else FIRSTPRIVATE since we'll need the
5761 address of the original variable either for SHARED, or for the
5762 copy into or out of the context. */
5763 if (!(flags & GOVD_LOCAL))
5764 {
41dbbb37
TS
5765 if (flags & GOVD_MAP)
5766 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5767 else if (flags & GOVD_PRIVATE)
5768 nflags = GOVD_PRIVATE;
d9a6bd32
JJ
5769 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5770 && (flags & GOVD_FIRSTPRIVATE))
5771 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
41dbbb37
TS
5772 else
5773 nflags = GOVD_FIRSTPRIVATE;
953ff289
DN
5774 nflags |= flags & GOVD_SEEN;
5775 t = DECL_VALUE_EXPR (decl);
5776 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5777 t = TREE_OPERAND (t, 0);
5778 gcc_assert (DECL_P (t));
5779 omp_add_variable (ctx, t, nflags);
5780 }
5781
5782 /* Add all of the variable and type parameters (which should have
5783 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5784 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5785 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5786 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5787
5788 /* The variable-sized variable itself is never SHARED, only some form
5789 of PRIVATE. The sharing would take place via the pointer variable
5790 which we remapped above. */
5791 if (flags & GOVD_SHARED)
5792 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5793 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5794
b8698a0f 5795 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
5796 alloca statement we generate for the variable, so make sure it
5797 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
5798 case, since we won't be allocating local storage then.
5799 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5800 in this case omp_notice_variable will be called later
5801 on when it is gimplified. */
acf0174b 5802 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
423ed416 5803 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
953ff289
DN
5804 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5805 }
acf0174b
JJ
5806 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5807 && lang_hooks.decls.omp_privatize_by_reference (decl))
953ff289 5808 {
953ff289
DN
5809 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5810
5811 /* Similar to the direct variable sized case above, we'll need the
5812 size of references being privatized. */
5813 if ((flags & GOVD_SHARED) == 0)
5814 {
5815 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
d9a6bd32 5816 if (DECL_P (t))
953ff289
DN
5817 omp_notice_variable (ctx, t, true);
5818 }
5819 }
5820
74bf76ed
JJ
5821 if (n != NULL)
5822 n->value |= flags;
5823 else
5824 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
953ff289
DN
5825}
5826
41dbbb37 5827/* Notice a threadprivate variable DECL used in OMP context CTX.
f22f4340
JJ
5828 This just prints out diagnostics about threadprivate variable uses
5829 in untied tasks. If DECL2 is non-NULL, prevent this warning
5830 on that variable. */
5831
5832static bool
5833omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5834 tree decl2)
5835{
5836 splay_tree_node n;
acf0174b
JJ
5837 struct gimplify_omp_ctx *octx;
5838
5839 for (octx = ctx; octx; octx = octx->outer_context)
d9a6bd32 5840 if ((octx->region_type & ORT_TARGET) != 0)
acf0174b
JJ
5841 {
5842 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5843 if (n == NULL)
5844 {
5845 error ("threadprivate variable %qE used in target region",
5846 DECL_NAME (decl));
5847 error_at (octx->location, "enclosing target region");
5848 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5849 }
5850 if (decl2)
5851 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5852 }
f22f4340
JJ
5853
5854 if (ctx->region_type != ORT_UNTIED_TASK)
5855 return false;
5856 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5857 if (n == NULL)
5858 {
ad19c4be
EB
5859 error ("threadprivate variable %qE used in untied task",
5860 DECL_NAME (decl));
f22f4340
JJ
5861 error_at (ctx->location, "enclosing task");
5862 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5863 }
5864 if (decl2)
5865 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5866 return false;
5867}
5868
6e232ba4
JN
5869/* Return true if global var DECL is device resident. */
5870
5871static bool
5872device_resident_p (tree decl)
5873{
5874 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
5875
5876 if (!attr)
5877 return false;
5878
5879 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
5880 {
5881 tree c = TREE_VALUE (t);
5882 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
5883 return true;
5884 }
5885
5886 return false;
5887}
5888
72500605
NS
5889/* Determine outer default flags for DECL mentioned in an OMP region
5890 but not declared in an enclosing clause.
5891
5892 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5893 remapped firstprivate instead of shared. To some extent this is
5894 addressed in omp_firstprivatize_type_sizes, but not
5895 effectively. */
5896
5897static unsigned
5898omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
5899 bool in_code, unsigned flags)
5900{
5901 enum omp_clause_default_kind default_kind = ctx->default_kind;
5902 enum omp_clause_default_kind kind;
5903
5904 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5905 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5906 default_kind = kind;
5907
5908 switch (default_kind)
5909 {
5910 case OMP_CLAUSE_DEFAULT_NONE:
5911 {
5912 const char *rtype;
5913
5914 if (ctx->region_type & ORT_PARALLEL)
5915 rtype = "parallel";
5916 else if (ctx->region_type & ORT_TASK)
5917 rtype = "task";
5918 else if (ctx->region_type & ORT_TEAMS)
5919 rtype = "teams";
5920 else
5921 gcc_unreachable ();
5922
5923 error ("%qE not specified in enclosing %s",
5924 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
5925 error_at (ctx->location, "enclosing %s", rtype);
5926 }
5927 /* FALLTHRU */
5928 case OMP_CLAUSE_DEFAULT_SHARED:
5929 flags |= GOVD_SHARED;
5930 break;
5931 case OMP_CLAUSE_DEFAULT_PRIVATE:
5932 flags |= GOVD_PRIVATE;
5933 break;
5934 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5935 flags |= GOVD_FIRSTPRIVATE;
5936 break;
5937 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5938 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5939 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5940 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
5941 {
5942 omp_notice_variable (octx, decl, in_code);
5943 for (; octx; octx = octx->outer_context)
5944 {
5945 splay_tree_node n2;
5946
72500605 5947 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
e01d41e5
JJ
5948 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
5949 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
5950 continue;
72500605
NS
5951 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5952 {
5953 flags |= GOVD_FIRSTPRIVATE;
5954 goto found_outer;
5955 }
5956 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5957 {
5958 flags |= GOVD_SHARED;
5959 goto found_outer;
5960 }
5961 }
5962 }
5963
5964 if (TREE_CODE (decl) == PARM_DECL
5965 || (!is_global_var (decl)
5966 && DECL_CONTEXT (decl) == current_function_decl))
5967 flags |= GOVD_FIRSTPRIVATE;
5968 else
5969 flags |= GOVD_SHARED;
5970 found_outer:
5971 break;
5972
5973 default:
5974 gcc_unreachable ();
5975 }
5976
5977 return flags;
5978}
5979
fffeedeb
NS
5980
5981/* Determine outer default flags for DECL mentioned in an OACC region
5982 but not declared in an enclosing clause. */
5983
5984static unsigned
5985oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
5986{
5987 const char *rkind;
6e232ba4
JN
5988 bool on_device = false;
5989
5990 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
5991 && is_global_var (decl)
5992 && device_resident_p (decl))
5993 {
5994 on_device = true;
5995 flags |= GOVD_MAP_TO_ONLY;
5996 }
fffeedeb
NS
5997
5998 switch (ctx->region_type)
5999 {
6000 default:
6001 gcc_unreachable ();
6002
6003 case ORT_ACC_KERNELS:
db0f1c7a
TV
6004 /* Scalars are default 'copy' under kernels, non-scalars are default
6005 'present_or_copy'. */
fffeedeb 6006 flags |= GOVD_MAP;
db0f1c7a
TV
6007 if (!AGGREGATE_TYPE_P (TREE_TYPE (decl)))
6008 flags |= GOVD_MAP_FORCE;
6009
fffeedeb
NS
6010 rkind = "kernels";
6011 break;
6012
6013 case ORT_ACC_PARALLEL:
6014 {
6015 tree type = TREE_TYPE (decl);
6016
6017 if (TREE_CODE (type) == REFERENCE_TYPE
6018 || POINTER_TYPE_P (type))
6019 type = TREE_TYPE (type);
6020
6e232ba4 6021 if (on_device || AGGREGATE_TYPE_P (type))
fffeedeb
NS
6022 /* Aggregates default to 'present_or_copy'. */
6023 flags |= GOVD_MAP;
6024 else
6025 /* Scalars default to 'firstprivate'. */
6026 flags |= GOVD_FIRSTPRIVATE;
6027 rkind = "parallel";
6028 }
6029 break;
6030 }
6031
6032 if (DECL_ARTIFICIAL (decl))
6033 ; /* We can get compiler-generated decls, and should not complain
6034 about them. */
6035 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6036 {
7c602779 6037 error ("%qE not specified in enclosing OpenACC %qs construct",
fffeedeb 6038 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
20e8b68f 6039 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
fffeedeb
NS
6040 }
6041 else
6042 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6043
6044 return flags;
6045}
6046
41dbbb37 6047/* Record the fact that DECL was used within the OMP context CTX.
953ff289
DN
6048 IN_CODE is true when real code uses DECL, and false when we should
6049 merely emit default(none) errors. Return true if DECL is going to
6050 be remapped and thus DECL shouldn't be gimplified into its
6051 DECL_VALUE_EXPR (if any). */
6052
6053static bool
6054omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6055{
6056 splay_tree_node n;
6057 unsigned flags = in_code ? GOVD_SEEN : 0;
6058 bool ret = false, shared;
6059
b504a918 6060 if (error_operand_p (decl))
953ff289
DN
6061 return false;
6062
d9a6bd32
JJ
6063 if (ctx->region_type == ORT_NONE)
6064 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6065
953ff289
DN
6066 /* Threadprivate variables are predetermined. */
6067 if (is_global_var (decl))
6068 {
6069 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 6070 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
6071
6072 if (DECL_HAS_VALUE_EXPR_P (decl))
6073 {
6074 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6075
6076 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
f22f4340 6077 return omp_notice_threadprivate_variable (ctx, decl, value);
953ff289
DN
6078 }
6079 }
6080
6081 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
d9a6bd32 6082 if ((ctx->region_type & ORT_TARGET) != 0)
acf0174b 6083 {
f014c653 6084 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
acf0174b
JJ
6085 if (n == NULL)
6086 {
d9a6bd32
JJ
6087 unsigned nflags = flags;
6088 if (ctx->target_map_pointers_as_0len_arrays
6089 || ctx->target_map_scalars_firstprivate)
6090 {
6091 bool is_declare_target = false;
6092 bool is_scalar = false;
6093 if (is_global_var (decl)
6094 && varpool_node::get_create (decl)->offloadable)
6095 {
6096 struct gimplify_omp_ctx *octx;
6097 for (octx = ctx->outer_context;
6098 octx; octx = octx->outer_context)
6099 {
6100 n = splay_tree_lookup (octx->variables,
6101 (splay_tree_key)decl);
6102 if (n
6103 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6104 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6105 break;
6106 }
6107 is_declare_target = octx == NULL;
6108 }
6109 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6110 {
6111 tree type = TREE_TYPE (decl);
6112 if (TREE_CODE (type) == REFERENCE_TYPE)
6113 type = TREE_TYPE (type);
6114 if (TREE_CODE (type) == COMPLEX_TYPE)
6115 type = TREE_TYPE (type);
6116 if (INTEGRAL_TYPE_P (type)
6117 || SCALAR_FLOAT_TYPE_P (type)
6118 || TREE_CODE (type) == POINTER_TYPE)
6119 is_scalar = true;
6120 }
6121 if (is_declare_target)
6122 ;
6123 else if (ctx->target_map_pointers_as_0len_arrays
6124 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6125 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6126 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6127 == POINTER_TYPE)))
6128 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6129 else if (is_scalar)
6130 nflags |= GOVD_FIRSTPRIVATE;
6131 }
182190f2
NS
6132
6133 struct gimplify_omp_ctx *octx = ctx->outer_context;
6134 if ((ctx->region_type & ORT_ACC) && octx)
acf0174b 6135 {
182190f2
NS
6136 /* Look in outer OpenACC contexts, to see if there's a
6137 data attribute for this variable. */
6138 omp_notice_variable (octx, decl, in_code);
6139
6140 for (; octx; octx = octx->outer_context)
6141 {
6142 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
6143 break;
6144 splay_tree_node n2
6145 = splay_tree_lookup (octx->variables,
6146 (splay_tree_key) decl);
6147 if (n2)
6148 {
37d5ad46
JB
6149 if (octx->region_type == ORT_ACC_HOST_DATA)
6150 error ("variable %qE declared in enclosing "
6151 "%<host_data%> region", DECL_NAME (decl));
182190f2
NS
6152 nflags |= GOVD_MAP;
6153 goto found_outer;
6154 }
6155 }
acf0174b 6156 }
182190f2
NS
6157
6158 {
6159 tree type = TREE_TYPE (decl);
6160
6161 if (nflags == flags
6162 && gimplify_omp_ctxp->target_firstprivatize_array_bases
6163 && lang_hooks.decls.omp_privatize_by_reference (decl))
6164 type = TREE_TYPE (type);
6165 if (nflags == flags
6166 && !lang_hooks.types.omp_mappable_type (type))
6167 {
6168 error ("%qD referenced in target region does not have "
6169 "a mappable type", decl);
6170 nflags |= GOVD_MAP | GOVD_EXPLICIT;
6171 }
6172 else if (nflags == flags)
fffeedeb
NS
6173 {
6174 if ((ctx->region_type & ORT_ACC) != 0)
6175 nflags = oacc_default_clause (ctx, decl, flags);
6176 else
6177 nflags |= GOVD_MAP;
6178 }
182190f2
NS
6179 }
6180 found_outer:
d9a6bd32 6181 omp_add_variable (ctx, decl, nflags);
acf0174b
JJ
6182 }
6183 else
f014c653
JJ
6184 {
6185 /* If nothing changed, there's nothing left to do. */
6186 if ((n->value & flags) == flags)
6187 return ret;
1a80d6b8
JJ
6188 flags |= n->value;
6189 n->value = flags;
f014c653 6190 }
acf0174b
JJ
6191 goto do_outer;
6192 }
6193
953ff289
DN
6194 if (n == NULL)
6195 {
74bf76ed 6196 if (ctx->region_type == ORT_WORKSHARE
acf0174b 6197 || ctx->region_type == ORT_SIMD
182190f2
NS
6198 || ctx->region_type == ORT_ACC
6199 || (ctx->region_type & ORT_TARGET_DATA) != 0)
953ff289
DN
6200 goto do_outer;
6201
72500605 6202 flags = omp_default_clause (ctx, decl, in_code, flags);
953ff289 6203
a68ab351
JJ
6204 if ((flags & GOVD_PRIVATE)
6205 && lang_hooks.decls.omp_private_outer_ref (decl))
6206 flags |= GOVD_PRIVATE_OUTER_REF;
6207
953ff289
DN
6208 omp_add_variable (ctx, decl, flags);
6209
6210 shared = (flags & GOVD_SHARED) != 0;
6211 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6212 goto do_outer;
6213 }
6214
3ad6b266
JJ
6215 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6216 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6217 && DECL_SIZE (decl)
6218 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6219 {
6220 splay_tree_node n2;
6221 tree t = DECL_VALUE_EXPR (decl);
6222 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6223 t = TREE_OPERAND (t, 0);
6224 gcc_assert (DECL_P (t));
6225 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6226 n2->value |= GOVD_SEEN;
6227 }
6228
953ff289
DN
6229 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6230 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6231
6232 /* If nothing changed, there's nothing left to do. */
6233 if ((n->value & flags) == flags)
6234 return ret;
6235 flags |= n->value;
6236 n->value = flags;
6237
6238 do_outer:
6239 /* If the variable is private in the current context, then we don't
6240 need to propagate anything to an outer context. */
a68ab351 6241 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289 6242 return ret;
41b37d5e
JJ
6243 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6244 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6245 return ret;
6246 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6247 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6248 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6249 return ret;
953ff289
DN
6250 if (ctx->outer_context
6251 && omp_notice_variable (ctx->outer_context, decl, in_code))
6252 return true;
6253 return ret;
6254}
6255
6256/* Verify that DECL is private within CTX. If there's specific information
6257 to the contrary in the innermost scope, generate an error. */
6258
6259static bool
f7468577 6260omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
953ff289
DN
6261{
6262 splay_tree_node n;
6263
6264 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6265 if (n != NULL)
6266 {
6267 if (n->value & GOVD_SHARED)
6268 {
6269 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 6270 {
74bf76ed
JJ
6271 if (simd)
6272 error ("iteration variable %qE is predetermined linear",
6273 DECL_NAME (decl));
6274 else
6275 error ("iteration variable %qE should be private",
6276 DECL_NAME (decl));
f6a5ffbf
JJ
6277 n->value = GOVD_PRIVATE;
6278 return true;
6279 }
6280 else
6281 return false;
953ff289 6282 }
761041be
JJ
6283 else if ((n->value & GOVD_EXPLICIT) != 0
6284 && (ctx == gimplify_omp_ctxp
a68ab351 6285 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
6286 && gimplify_omp_ctxp->outer_context == ctx)))
6287 {
6288 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
6289 error ("iteration variable %qE should not be firstprivate",
6290 DECL_NAME (decl));
761041be 6291 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
6292 error ("iteration variable %qE should not be reduction",
6293 DECL_NAME (decl));
e01d41e5
JJ
6294 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6295 error ("iteration variable %qE should not be linear",
6296 DECL_NAME (decl));
f7468577 6297 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
74bf76ed
JJ
6298 error ("iteration variable %qE should not be lastprivate",
6299 DECL_NAME (decl));
6300 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6301 error ("iteration variable %qE should not be private",
6302 DECL_NAME (decl));
f7468577 6303 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
74bf76ed
JJ
6304 error ("iteration variable %qE is predetermined linear",
6305 DECL_NAME (decl));
761041be 6306 }
ca2b1311
JJ
6307 return (ctx == gimplify_omp_ctxp
6308 || (ctx->region_type == ORT_COMBINED_PARALLEL
6309 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
6310 }
6311
74bf76ed 6312 if (ctx->region_type != ORT_WORKSHARE
182190f2
NS
6313 && ctx->region_type != ORT_SIMD
6314 && ctx->region_type != ORT_ACC)
953ff289 6315 return false;
f6a5ffbf 6316 else if (ctx->outer_context)
74bf76ed 6317 return omp_is_private (ctx->outer_context, decl, simd);
ca2b1311 6318 return false;
953ff289
DN
6319}
6320
07b7aade
JJ
6321/* Return true if DECL is private within a parallel region
6322 that binds to the current construct's context or in parallel
6323 region's REDUCTION clause. */
6324
6325static bool
cab37c89 6326omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
07b7aade
JJ
6327{
6328 splay_tree_node n;
6329
6330 do
6331 {
6332 ctx = ctx->outer_context;
6333 if (ctx == NULL)
d9a6bd32
JJ
6334 {
6335 if (is_global_var (decl))
6336 return false;
6337
6338 /* References might be private, but might be shared too,
6339 when checking for copyprivate, assume they might be
6340 private, otherwise assume they might be shared. */
6341 if (copyprivate)
6342 return true;
6343
6344 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6345 return false;
6346
6347 /* Treat C++ privatized non-static data members outside
6348 of the privatization the same. */
6349 if (omp_member_access_dummy_var (decl))
6350 return false;
6351
6352 return true;
6353 }
07b7aade 6354
e01d41e5
JJ
6355 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6356
6357 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6358 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
acf0174b
JJ
6359 continue;
6360
07b7aade 6361 if (n != NULL)
d9a6bd32
JJ
6362 {
6363 if ((n->value & GOVD_LOCAL) != 0
6364 && omp_member_access_dummy_var (decl))
6365 return false;
6366 return (n->value & GOVD_SHARED) == 0;
6367 }
07b7aade 6368 }
74bf76ed 6369 while (ctx->region_type == ORT_WORKSHARE
182190f2
NS
6370 || ctx->region_type == ORT_SIMD
6371 || ctx->region_type == ORT_ACC);
07b7aade
JJ
6372 return false;
6373}
6374
41b37d5e
JJ
6375/* Return true if the CTX is combined with distribute and thus
6376 lastprivate can't be supported. */
6377
6378static bool
6379omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6380{
6381 do
6382 {
6383 if (ctx->outer_context == NULL)
6384 return false;
6385 ctx = ctx->outer_context;
6386 switch (ctx->region_type)
6387 {
6388 case ORT_WORKSHARE:
6389 if (!ctx->combined_loop)
6390 return false;
6391 if (ctx->distribute)
e01d41e5 6392 return lang_GNU_Fortran ();
41b37d5e
JJ
6393 break;
6394 case ORT_COMBINED_PARALLEL:
6395 break;
6396 case ORT_COMBINED_TEAMS:
e01d41e5 6397 return lang_GNU_Fortran ();
41b37d5e
JJ
6398 default:
6399 return false;
6400 }
6401 }
6402 while (1);
6403}
6404
d9a6bd32
JJ
6405/* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
6406
6407static tree
6408find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6409{
6410 tree t = *tp;
6411
6412 /* If this node has been visited, unmark it and keep looking. */
6413 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6414 return t;
6415
6416 if (IS_TYPE_OR_DECL_P (t))
6417 *walk_subtrees = 0;
6418 return NULL_TREE;
6419}
6420
41dbbb37 6421/* Scan the OMP clauses in *LIST_P, installing mappings into a new
953ff289
DN
6422 and previous omp contexts. */
6423
6424static void
726a989a 6425gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
d9a6bd32
JJ
6426 enum omp_region_type region_type,
6427 enum tree_code code)
953ff289
DN
6428{
6429 struct gimplify_omp_ctx *ctx, *outer_ctx;
6430 tree c;
d9a6bd32 6431 hash_map<tree, tree> *struct_map_to_clause = NULL;
e01d41e5 6432 tree *prev_list_p = NULL;
953ff289 6433
a68ab351 6434 ctx = new_omp_context (region_type);
953ff289 6435 outer_ctx = ctx->outer_context;
d9a6bd32
JJ
6436 if (code == OMP_TARGET && !lang_GNU_Fortran ())
6437 {
6438 ctx->target_map_pointers_as_0len_arrays = true;
6439 /* FIXME: For Fortran we want to set this too, when
6440 the Fortran FE is updated to OpenMP 4.5. */
6441 ctx->target_map_scalars_firstprivate = true;
6442 }
6443 if (!lang_GNU_Fortran ())
6444 switch (code)
6445 {
6446 case OMP_TARGET:
6447 case OMP_TARGET_DATA:
6448 case OMP_TARGET_ENTER_DATA:
6449 case OMP_TARGET_EXIT_DATA:
37d5ad46 6450 case OACC_HOST_DATA:
d9a6bd32
JJ
6451 ctx->target_firstprivatize_array_bases = true;
6452 default:
6453 break;
6454 }
953ff289
DN
6455
6456 while ((c = *list_p) != NULL)
6457 {
953ff289
DN
6458 bool remove = false;
6459 bool notice_outer = true;
07b7aade 6460 const char *check_non_private = NULL;
953ff289
DN
6461 unsigned int flags;
6462 tree decl;
6463
aaf46ef9 6464 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6465 {
6466 case OMP_CLAUSE_PRIVATE:
6467 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
6468 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6469 {
6470 flags |= GOVD_PRIVATE_OUTER_REF;
6471 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6472 }
6473 else
6474 notice_outer = false;
953ff289
DN
6475 goto do_add;
6476 case OMP_CLAUSE_SHARED:
6477 flags = GOVD_SHARED | GOVD_EXPLICIT;
6478 goto do_add;
6479 case OMP_CLAUSE_FIRSTPRIVATE:
6480 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 6481 check_non_private = "firstprivate";
953ff289
DN
6482 goto do_add;
6483 case OMP_CLAUSE_LASTPRIVATE:
6484 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 6485 check_non_private = "lastprivate";
41b37d5e
JJ
6486 decl = OMP_CLAUSE_DECL (c);
6487 if (omp_no_lastprivate (ctx))
6488 {
6489 notice_outer = false;
6490 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6491 }
6492 else if (error_operand_p (decl))
6493 goto do_add;
6494 else if (outer_ctx
e01d41e5
JJ
6495 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6496 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
41b37d5e
JJ
6497 && splay_tree_lookup (outer_ctx->variables,
6498 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
6499 {
6500 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6501 if (outer_ctx->outer_context)
6502 omp_notice_variable (outer_ctx->outer_context, decl, true);
6503 }
d9a6bd32
JJ
6504 else if (outer_ctx
6505 && (outer_ctx->region_type & ORT_TASK) != 0
6506 && outer_ctx->combined_loop
6507 && splay_tree_lookup (outer_ctx->variables,
6508 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
6509 {
6510 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6511 if (outer_ctx->outer_context)
6512 omp_notice_variable (outer_ctx->outer_context, decl, true);
6513 }
41b37d5e 6514 else if (outer_ctx
182190f2
NS
6515 && (outer_ctx->region_type == ORT_WORKSHARE
6516 || outer_ctx->region_type == ORT_ACC)
41b37d5e
JJ
6517 && outer_ctx->combined_loop
6518 && splay_tree_lookup (outer_ctx->variables,
6519 (splay_tree_key) decl) == NULL
6520 && !omp_check_private (outer_ctx, decl, false))
6521 {
6522 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6523 if (outer_ctx->outer_context
6524 && (outer_ctx->outer_context->region_type
6525 == ORT_COMBINED_PARALLEL)
6526 && splay_tree_lookup (outer_ctx->outer_context->variables,
6527 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
6528 {
6529 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6530 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6531 if (octx->outer_context)
6532 omp_notice_variable (octx->outer_context, decl, true);
6533 }
6534 else if (outer_ctx->outer_context)
6535 omp_notice_variable (outer_ctx->outer_context, decl, true);
41b37d5e 6536 }
953ff289
DN
6537 goto do_add;
6538 case OMP_CLAUSE_REDUCTION:
6539 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
182190f2
NS
6540 /* OpenACC permits reductions on private variables. */
6541 if (!(region_type & ORT_ACC))
6542 check_non_private = "reduction";
d9a6bd32
JJ
6543 decl = OMP_CLAUSE_DECL (c);
6544 if (TREE_CODE (decl) == MEM_REF)
6545 {
6546 tree type = TREE_TYPE (decl);
6547 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
6548 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6549 {
6550 remove = true;
6551 break;
6552 }
6553 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6554 if (DECL_P (v))
6555 {
6556 omp_firstprivatize_variable (ctx, v);
6557 omp_notice_variable (ctx, v, true);
6558 }
6559 decl = TREE_OPERAND (decl, 0);
e01d41e5
JJ
6560 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6561 {
6562 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
6563 NULL, is_gimple_val, fb_rvalue)
6564 == GS_ERROR)
6565 {
6566 remove = true;
6567 break;
6568 }
6569 v = TREE_OPERAND (decl, 1);
6570 if (DECL_P (v))
6571 {
6572 omp_firstprivatize_variable (ctx, v);
6573 omp_notice_variable (ctx, v, true);
6574 }
6575 decl = TREE_OPERAND (decl, 0);
6576 }
d9a6bd32
JJ
6577 if (TREE_CODE (decl) == ADDR_EXPR
6578 || TREE_CODE (decl) == INDIRECT_REF)
6579 decl = TREE_OPERAND (decl, 0);
6580 }
6581 goto do_add_decl;
acf0174b
JJ
6582 case OMP_CLAUSE_LINEAR:
6583 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6584 is_gimple_val, fb_rvalue) == GS_ERROR)
6585 {
6586 remove = true;
6587 break;
6588 }
41b37d5e
JJ
6589 else
6590 {
d9a6bd32
JJ
6591 if (code == OMP_SIMD
6592 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6593 {
6594 struct gimplify_omp_ctx *octx = outer_ctx;
6595 if (octx
6596 && octx->region_type == ORT_WORKSHARE
6597 && octx->combined_loop
6598 && !octx->distribute)
6599 {
6600 if (octx->outer_context
6601 && (octx->outer_context->region_type
6602 == ORT_COMBINED_PARALLEL))
6603 octx = octx->outer_context->outer_context;
6604 else
6605 octx = octx->outer_context;
6606 }
6607 if (octx
6608 && octx->region_type == ORT_WORKSHARE
6609 && octx->combined_loop
6610 && octx->distribute
6611 && !lang_GNU_Fortran ())
6612 {
6613 error_at (OMP_CLAUSE_LOCATION (c),
6614 "%<linear%> clause for variable other than "
6615 "loop iterator specified on construct "
6616 "combined with %<distribute%>");
6617 remove = true;
6618 break;
6619 }
6620 }
41b37d5e
JJ
6621 /* For combined #pragma omp parallel for simd, need to put
6622 lastprivate and perhaps firstprivate too on the
6623 parallel. Similarly for #pragma omp for simd. */
6624 struct gimplify_omp_ctx *octx = outer_ctx;
6625 decl = NULL_TREE;
6626 if (omp_no_lastprivate (ctx))
6627 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6628 do
6629 {
6630 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6631 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6632 break;
6633 decl = OMP_CLAUSE_DECL (c);
6634 if (error_operand_p (decl))
6635 {
6636 decl = NULL_TREE;
6637 break;
6638 }
d9a6bd32
JJ
6639 flags = GOVD_SEEN;
6640 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6641 flags |= GOVD_FIRSTPRIVATE;
6642 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6643 flags |= GOVD_LASTPRIVATE;
41b37d5e
JJ
6644 if (octx
6645 && octx->region_type == ORT_WORKSHARE
6646 && octx->combined_loop)
6647 {
6648 if (octx->outer_context
6649 && (octx->outer_context->region_type
e01d41e5 6650 == ORT_COMBINED_PARALLEL))
41b37d5e
JJ
6651 octx = octx->outer_context;
6652 else if (omp_check_private (octx, decl, false))
6653 break;
6654 }
d9a6bd32
JJ
6655 else if (octx
6656 && (octx->region_type & ORT_TASK) != 0
6657 && octx->combined_loop)
6658 ;
6659 else if (octx
6660 && octx->region_type == ORT_COMBINED_PARALLEL
6661 && ctx->region_type == ORT_WORKSHARE
6662 && octx == outer_ctx)
6663 flags = GOVD_SEEN | GOVD_SHARED;
e01d41e5
JJ
6664 else if (octx
6665 && octx->region_type == ORT_COMBINED_TEAMS)
6666 flags = GOVD_SEEN | GOVD_SHARED;
d9a6bd32
JJ
6667 else if (octx
6668 && octx->region_type == ORT_COMBINED_TARGET)
e01d41e5
JJ
6669 {
6670 flags &= ~GOVD_LASTPRIVATE;
6671 if (flags == GOVD_SEEN)
6672 break;
6673 }
41b37d5e
JJ
6674 else
6675 break;
d9a6bd32
JJ
6676 splay_tree_node on
6677 = splay_tree_lookup (octx->variables,
6678 (splay_tree_key) decl);
6679 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
2ee10f81
JJ
6680 {
6681 octx = NULL;
6682 break;
6683 }
41b37d5e
JJ
6684 omp_add_variable (octx, decl, flags);
6685 if (octx->outer_context == NULL)
6686 break;
6687 octx = octx->outer_context;
6688 }
6689 while (1);
6690 if (octx
6691 && decl
6692 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6693 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6694 omp_notice_variable (octx, decl, true);
6695 }
acf0174b 6696 flags = GOVD_LINEAR | GOVD_EXPLICIT;
41b37d5e
JJ
6697 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6698 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6699 {
6700 notice_outer = false;
6701 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6702 }
acf0174b
JJ
6703 goto do_add;
6704
6705 case OMP_CLAUSE_MAP:
b46ebd6c
JJ
6706 decl = OMP_CLAUSE_DECL (c);
6707 if (error_operand_p (decl))
d9a6bd32
JJ
6708 remove = true;
6709 switch (code)
b46ebd6c 6710 {
d9a6bd32
JJ
6711 case OMP_TARGET:
6712 break;
6713 case OMP_TARGET_DATA:
6714 case OMP_TARGET_ENTER_DATA:
6715 case OMP_TARGET_EXIT_DATA:
37d5ad46 6716 case OACC_HOST_DATA:
e01d41e5
JJ
6717 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6718 || (OMP_CLAUSE_MAP_KIND (c)
6719 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32
JJ
6720 /* For target {,enter ,exit }data only the array slice is
6721 mapped, but not the pointer to it. */
6722 remove = true;
6723 break;
6724 default:
b46ebd6c
JJ
6725 break;
6726 }
d9a6bd32
JJ
6727 if (remove)
6728 break;
37d5ad46
JB
6729 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
6730 {
6731 struct gimplify_omp_ctx *octx;
6732 for (octx = outer_ctx; octx; octx = octx->outer_context)
6733 {
6734 if (octx->region_type != ORT_ACC_HOST_DATA)
6735 break;
6736 splay_tree_node n2
6737 = splay_tree_lookup (octx->variables,
6738 (splay_tree_key) decl);
6739 if (n2)
6740 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
6741 "declared in enclosing %<host_data%> region",
6742 DECL_NAME (decl));
6743 }
6744 }
b46ebd6c
JJ
6745 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6746 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6747 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6748 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6749 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
6750 {
6751 remove = true;
6752 break;
6753 }
e01d41e5
JJ
6754 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6755 || (OMP_CLAUSE_MAP_KIND (c)
6756 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32
JJ
6757 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
6758 {
6759 OMP_CLAUSE_SIZE (c)
6760 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL);
6761 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
6762 GOVD_FIRSTPRIVATE | GOVD_SEEN);
6763 }
acf0174b
JJ
6764 if (!DECL_P (decl))
6765 {
d9a6bd32
JJ
6766 tree d = decl, *pd;
6767 if (TREE_CODE (d) == ARRAY_REF)
6768 {
6769 while (TREE_CODE (d) == ARRAY_REF)
6770 d = TREE_OPERAND (d, 0);
6771 if (TREE_CODE (d) == COMPONENT_REF
6772 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
6773 decl = d;
6774 }
6775 pd = &OMP_CLAUSE_DECL (c);
6776 if (d == decl
6777 && TREE_CODE (decl) == INDIRECT_REF
6778 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
6779 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6780 == REFERENCE_TYPE))
6781 {
6782 pd = &TREE_OPERAND (decl, 0);
6783 decl = TREE_OPERAND (decl, 0);
6784 }
6785 if (TREE_CODE (decl) == COMPONENT_REF)
6786 {
6787 while (TREE_CODE (decl) == COMPONENT_REF)
6788 decl = TREE_OPERAND (decl, 0);
6789 }
6790 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
acf0174b
JJ
6791 == GS_ERROR)
6792 {
6793 remove = true;
6794 break;
6795 }
d9a6bd32
JJ
6796 if (DECL_P (decl))
6797 {
6798 if (error_operand_p (decl))
6799 {
6800 remove = true;
6801 break;
6802 }
6803
6804 if (TYPE_SIZE_UNIT (TREE_TYPE (decl)) == NULL
6805 || (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
6806 != INTEGER_CST))
6807 {
6808 error_at (OMP_CLAUSE_LOCATION (c),
6809 "mapping field %qE of variable length "
6810 "structure", OMP_CLAUSE_DECL (c));
6811 remove = true;
6812 break;
6813 }
6814
e01d41e5
JJ
6815 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
6816 {
6817 /* Error recovery. */
6818 if (prev_list_p == NULL)
6819 {
6820 remove = true;
6821 break;
6822 }
6823 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6824 {
6825 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
6826 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
6827 {
6828 remove = true;
6829 break;
6830 }
6831 }
6832 }
6833
d9a6bd32
JJ
6834 tree offset;
6835 HOST_WIDE_INT bitsize, bitpos;
6836 machine_mode mode;
ee45a32d 6837 int unsignedp, reversep, volatilep = 0;
d9a6bd32
JJ
6838 tree base = OMP_CLAUSE_DECL (c);
6839 while (TREE_CODE (base) == ARRAY_REF)
6840 base = TREE_OPERAND (base, 0);
6841 if (TREE_CODE (base) == INDIRECT_REF)
6842 base = TREE_OPERAND (base, 0);
6843 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
ee45a32d 6844 &mode, &unsignedp, &reversep,
d9a6bd32
JJ
6845 &volatilep, false);
6846 gcc_assert (base == decl
6847 && (offset == NULL_TREE
6848 || TREE_CODE (offset) == INTEGER_CST));
6849
6850 splay_tree_node n
6851 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6852 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
e01d41e5
JJ
6853 == GOMP_MAP_ALWAYS_POINTER);
6854 if (n == NULL || (n->value & GOVD_MAP) == 0)
d9a6bd32 6855 {
e01d41e5
JJ
6856 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6857 OMP_CLAUSE_MAP);
6858 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
6859 OMP_CLAUSE_DECL (l) = decl;
6860 OMP_CLAUSE_SIZE (l) = size_int (1);
6861 if (struct_map_to_clause == NULL)
6862 struct_map_to_clause = new hash_map<tree, tree>;
6863 struct_map_to_clause->put (decl, l);
d9a6bd32
JJ
6864 if (ptr)
6865 {
e01d41e5
JJ
6866 enum gomp_map_kind mkind
6867 = code == OMP_TARGET_EXIT_DATA
6868 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
d9a6bd32 6869 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
e01d41e5
JJ
6870 OMP_CLAUSE_MAP);
6871 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
6872 OMP_CLAUSE_DECL (c2)
6873 = unshare_expr (OMP_CLAUSE_DECL (c));
6874 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
6875 OMP_CLAUSE_SIZE (c2)
6876 = TYPE_SIZE_UNIT (ptr_type_node);
6877 OMP_CLAUSE_CHAIN (l) = c2;
6878 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6879 {
6880 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
6881 tree c3
6882 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6883 OMP_CLAUSE_MAP);
6884 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
6885 OMP_CLAUSE_DECL (c3)
6886 = unshare_expr (OMP_CLAUSE_DECL (c4));
6887 OMP_CLAUSE_SIZE (c3)
6888 = TYPE_SIZE_UNIT (ptr_type_node);
6889 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
6890 OMP_CLAUSE_CHAIN (c2) = c3;
6891 }
6892 *prev_list_p = l;
6893 prev_list_p = NULL;
6894 }
6895 else
6896 {
6897 OMP_CLAUSE_CHAIN (l) = c;
6898 *list_p = l;
6899 list_p = &OMP_CLAUSE_CHAIN (l);
d9a6bd32 6900 }
d9a6bd32 6901 flags = GOVD_MAP | GOVD_EXPLICIT;
e01d41e5 6902 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
d9a6bd32
JJ
6903 flags |= GOVD_SEEN;
6904 goto do_add_decl;
6905 }
6906 else
6907 {
6908 tree *osc = struct_map_to_clause->get (decl);
e01d41e5
JJ
6909 tree *sc = NULL, *scp = NULL;
6910 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
d9a6bd32
JJ
6911 n->value |= GOVD_SEEN;
6912 offset_int o1, o2;
6913 if (offset)
6914 o1 = wi::to_offset (offset);
6915 else
6916 o1 = 0;
6917 if (bitpos)
6918 o1 = o1 + bitpos / BITS_PER_UNIT;
e01d41e5
JJ
6919 for (sc = &OMP_CLAUSE_CHAIN (*osc);
6920 *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
6921 if (ptr && sc == prev_list_p)
6922 break;
6923 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6924 != COMPONENT_REF
6925 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6926 != INDIRECT_REF)
6927 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6928 != ARRAY_REF))
d9a6bd32
JJ
6929 break;
6930 else
6931 {
6932 tree offset2;
6933 HOST_WIDE_INT bitsize2, bitpos2;
6934 base = OMP_CLAUSE_DECL (*sc);
6935 if (TREE_CODE (base) == ARRAY_REF)
6936 {
6937 while (TREE_CODE (base) == ARRAY_REF)
6938 base = TREE_OPERAND (base, 0);
6939 if (TREE_CODE (base) != COMPONENT_REF
6940 || (TREE_CODE (TREE_TYPE (base))
6941 != ARRAY_TYPE))
6942 break;
6943 }
6944 else if (TREE_CODE (base) == INDIRECT_REF
6945 && (TREE_CODE (TREE_OPERAND (base, 0))
6946 == COMPONENT_REF)
6947 && (TREE_CODE (TREE_TYPE
6948 (TREE_OPERAND (base, 0)))
6949 == REFERENCE_TYPE))
6950 base = TREE_OPERAND (base, 0);
6951 base = get_inner_reference (base, &bitsize2,
6952 &bitpos2, &offset2,
6953 &mode, &unsignedp,
ee45a32d
EB
6954 &reversep, &volatilep,
6955 false);
d9a6bd32
JJ
6956 if (base != decl)
6957 break;
e01d41e5
JJ
6958 if (scp)
6959 continue;
d9a6bd32
JJ
6960 gcc_assert (offset == NULL_TREE
6961 || TREE_CODE (offset) == INTEGER_CST);
6962 tree d1 = OMP_CLAUSE_DECL (*sc);
6963 tree d2 = OMP_CLAUSE_DECL (c);
6964 while (TREE_CODE (d1) == ARRAY_REF)
6965 d1 = TREE_OPERAND (d1, 0);
6966 while (TREE_CODE (d2) == ARRAY_REF)
6967 d2 = TREE_OPERAND (d2, 0);
6968 if (TREE_CODE (d1) == INDIRECT_REF)
6969 d1 = TREE_OPERAND (d1, 0);
6970 if (TREE_CODE (d2) == INDIRECT_REF)
6971 d2 = TREE_OPERAND (d2, 0);
6972 while (TREE_CODE (d1) == COMPONENT_REF)
6973 if (TREE_CODE (d2) == COMPONENT_REF
6974 && TREE_OPERAND (d1, 1)
6975 == TREE_OPERAND (d2, 1))
6976 {
6977 d1 = TREE_OPERAND (d1, 0);
6978 d2 = TREE_OPERAND (d2, 0);
6979 }
6980 else
6981 break;
6982 if (d1 == d2)
6983 {
6984 error_at (OMP_CLAUSE_LOCATION (c),
6985 "%qE appears more than once in map "
6986 "clauses", OMP_CLAUSE_DECL (c));
6987 remove = true;
6988 break;
6989 }
6990 if (offset2)
6991 o2 = wi::to_offset (offset2);
6992 else
6993 o2 = 0;
6994 if (bitpos2)
6995 o2 = o2 + bitpos2 / BITS_PER_UNIT;
6996 if (wi::ltu_p (o1, o2)
6997 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
e01d41e5
JJ
6998 {
6999 if (ptr)
7000 scp = sc;
7001 else
7002 break;
7003 }
d9a6bd32 7004 }
e01d41e5
JJ
7005 if (remove)
7006 break;
7007 OMP_CLAUSE_SIZE (*osc)
7008 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
7009 size_one_node);
d9a6bd32
JJ
7010 if (ptr)
7011 {
e01d41e5
JJ
7012 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7013 OMP_CLAUSE_MAP);
7014 tree cl = NULL_TREE;
7015 enum gomp_map_kind mkind
7016 = code == OMP_TARGET_EXIT_DATA
7017 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7018 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7019 OMP_CLAUSE_DECL (c2)
7020 = unshare_expr (OMP_CLAUSE_DECL (c));
7021 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
7022 OMP_CLAUSE_SIZE (c2)
7023 = TYPE_SIZE_UNIT (ptr_type_node);
7024 cl = scp ? *prev_list_p : c2;
7025 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7026 {
7027 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7028 tree c3
7029 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7030 OMP_CLAUSE_MAP);
7031 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7032 OMP_CLAUSE_DECL (c3)
7033 = unshare_expr (OMP_CLAUSE_DECL (c4));
7034 OMP_CLAUSE_SIZE (c3)
7035 = TYPE_SIZE_UNIT (ptr_type_node);
7036 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7037 if (!scp)
7038 OMP_CLAUSE_CHAIN (c2) = c3;
7039 else
7040 cl = c3;
7041 }
7042 if (scp)
7043 *scp = c2;
7044 if (sc == prev_list_p)
7045 {
7046 *sc = cl;
7047 prev_list_p = NULL;
7048 }
7049 else
7050 {
7051 *prev_list_p = OMP_CLAUSE_CHAIN (c);
7052 list_p = prev_list_p;
7053 prev_list_p = NULL;
7054 OMP_CLAUSE_CHAIN (c) = *sc;
7055 *sc = cl;
7056 continue;
7057 }
d9a6bd32 7058 }
e01d41e5 7059 else if (*sc != c)
d9a6bd32
JJ
7060 {
7061 *list_p = OMP_CLAUSE_CHAIN (c);
7062 OMP_CLAUSE_CHAIN (c) = *sc;
7063 *sc = c;
7064 continue;
7065 }
7066 }
7067 }
e01d41e5
JJ
7068 if (!remove
7069 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7070 && OMP_CLAUSE_CHAIN (c)
7071 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7072 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7073 == GOMP_MAP_ALWAYS_POINTER))
7074 prev_list_p = list_p;
acf0174b
JJ
7075 break;
7076 }
7077 flags = GOVD_MAP | GOVD_EXPLICIT;
e01d41e5
JJ
7078 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7079 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
7080 flags |= GOVD_MAP_ALWAYS_TO;
acf0174b
JJ
7081 goto do_add;
7082
7083 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
7084 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
7085 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
7086 {
7087 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
7088 omp-low.c. */
7089 break;
7090 }
acf0174b
JJ
7091 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7092 {
7093 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7094 NULL, is_gimple_val, fb_rvalue);
7095 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7096 }
7097 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7098 {
7099 remove = true;
7100 break;
7101 }
7102 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7103 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7104 is_gimple_val, fb_rvalue) == GS_ERROR)
7105 {
7106 remove = true;
7107 break;
7108 }
7109 break;
7110
7111 case OMP_CLAUSE_TO:
7112 case OMP_CLAUSE_FROM:
41dbbb37 7113 case OMP_CLAUSE__CACHE_:
b46ebd6c
JJ
7114 decl = OMP_CLAUSE_DECL (c);
7115 if (error_operand_p (decl))
acf0174b
JJ
7116 {
7117 remove = true;
7118 break;
7119 }
b46ebd6c
JJ
7120 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7121 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7122 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7123 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7124 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
7125 {
7126 remove = true;
7127 break;
7128 }
7129 if (!DECL_P (decl))
7130 {
7131 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
7132 NULL, is_gimple_lvalue, fb_lvalue)
7133 == GS_ERROR)
7134 {
7135 remove = true;
7136 break;
7137 }
7138 break;
7139 }
7140 goto do_notice;
953ff289 7141
d9a6bd32
JJ
7142 case OMP_CLAUSE_USE_DEVICE_PTR:
7143 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7144 goto do_add;
7145 case OMP_CLAUSE_IS_DEVICE_PTR:
7146 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7147 goto do_add;
7148
953ff289
DN
7149 do_add:
7150 decl = OMP_CLAUSE_DECL (c);
d9a6bd32 7151 do_add_decl:
b504a918 7152 if (error_operand_p (decl))
953ff289
DN
7153 {
7154 remove = true;
7155 break;
7156 }
d9a6bd32
JJ
7157 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
7158 {
7159 tree t = omp_member_access_dummy_var (decl);
7160 if (t)
7161 {
7162 tree v = DECL_VALUE_EXPR (decl);
7163 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
7164 if (outer_ctx)
7165 omp_notice_variable (outer_ctx, t, true);
7166 }
7167 }
953ff289 7168 omp_add_variable (ctx, decl, flags);
693d710f 7169 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
7170 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7171 {
7172 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 7173 GOVD_LOCAL | GOVD_SEEN);
d9a6bd32
JJ
7174 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7175 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7176 find_decl_expr,
7177 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7178 NULL) == NULL_TREE)
7179 omp_add_variable (ctx,
7180 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7181 GOVD_LOCAL | GOVD_SEEN);
953ff289 7182 gimplify_omp_ctxp = ctx;
45852dcc 7183 push_gimplify_context ();
726a989a 7184
355a7673
MM
7185 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7186 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
726a989a
RB
7187
7188 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7189 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7190 pop_gimplify_context
7191 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
45852dcc 7192 push_gimplify_context ();
726a989a
RB
7193 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7194 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 7195 pop_gimplify_context
726a989a
RB
7196 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7197 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7198 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7199
953ff289
DN
7200 gimplify_omp_ctxp = outer_ctx;
7201 }
a68ab351
JJ
7202 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7203 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7204 {
7205 gimplify_omp_ctxp = ctx;
45852dcc 7206 push_gimplify_context ();
a68ab351
JJ
7207 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7208 {
7209 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7210 NULL, NULL);
7211 TREE_SIDE_EFFECTS (bind) = 1;
7212 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7213 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7214 }
726a989a
RB
7215 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7216 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7217 pop_gimplify_context
7218 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7219 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7220
dd2fc525
JJ
7221 gimplify_omp_ctxp = outer_ctx;
7222 }
7223 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7224 && OMP_CLAUSE_LINEAR_STMT (c))
7225 {
7226 gimplify_omp_ctxp = ctx;
7227 push_gimplify_context ();
7228 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7229 {
7230 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7231 NULL, NULL);
7232 TREE_SIDE_EFFECTS (bind) = 1;
7233 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7234 OMP_CLAUSE_LINEAR_STMT (c) = bind;
7235 }
7236 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7237 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7238 pop_gimplify_context
7239 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7240 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7241
a68ab351
JJ
7242 gimplify_omp_ctxp = outer_ctx;
7243 }
953ff289
DN
7244 if (notice_outer)
7245 goto do_notice;
7246 break;
7247
7248 case OMP_CLAUSE_COPYIN:
7249 case OMP_CLAUSE_COPYPRIVATE:
7250 decl = OMP_CLAUSE_DECL (c);
b504a918 7251 if (error_operand_p (decl))
953ff289
DN
7252 {
7253 remove = true;
7254 break;
7255 }
cab37c89
JJ
7256 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7257 && !remove
7258 && !omp_check_private (ctx, decl, true))
7259 {
7260 remove = true;
7261 if (is_global_var (decl))
7262 {
7263 if (DECL_THREAD_LOCAL_P (decl))
7264 remove = false;
7265 else if (DECL_HAS_VALUE_EXPR_P (decl))
7266 {
7267 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7268
7269 if (value
7270 && DECL_P (value)
7271 && DECL_THREAD_LOCAL_P (value))
7272 remove = false;
7273 }
7274 }
7275 if (remove)
7276 error_at (OMP_CLAUSE_LOCATION (c),
7277 "copyprivate variable %qE is not threadprivate"
7278 " or private in outer context", DECL_NAME (decl));
7279 }
953ff289
DN
7280 do_notice:
7281 if (outer_ctx)
7282 omp_notice_variable (outer_ctx, decl, true);
07b7aade 7283 if (check_non_private
a68ab351 7284 && region_type == ORT_WORKSHARE
d9a6bd32
JJ
7285 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7286 || decl == OMP_CLAUSE_DECL (c)
7287 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7288 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
e01d41e5
JJ
7289 == ADDR_EXPR
7290 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7291 == POINTER_PLUS_EXPR
7292 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7293 (OMP_CLAUSE_DECL (c), 0), 0))
7294 == ADDR_EXPR)))))
cab37c89 7295 && omp_check_private (ctx, decl, false))
07b7aade 7296 {
4f1e4960
JM
7297 error ("%s variable %qE is private in outer context",
7298 check_non_private, DECL_NAME (decl));
07b7aade
JJ
7299 remove = true;
7300 }
953ff289
DN
7301 break;
7302
953ff289 7303 case OMP_CLAUSE_IF:
d9a6bd32
JJ
7304 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7305 && OMP_CLAUSE_IF_MODIFIER (c) != code)
7306 {
7307 const char *p[2];
7308 for (int i = 0; i < 2; i++)
7309 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7310 {
7311 case OMP_PARALLEL: p[i] = "parallel"; break;
7312 case OMP_TASK: p[i] = "task"; break;
7313 case OMP_TASKLOOP: p[i] = "taskloop"; break;
7314 case OMP_TARGET_DATA: p[i] = "target data"; break;
7315 case OMP_TARGET: p[i] = "target"; break;
7316 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7317 case OMP_TARGET_ENTER_DATA:
7318 p[i] = "target enter data"; break;
7319 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7320 default: gcc_unreachable ();
7321 }
7322 error_at (OMP_CLAUSE_LOCATION (c),
7323 "expected %qs %<if%> clause modifier rather than %qs",
7324 p[0], p[1]);
7325 remove = true;
7326 }
7327 /* Fall through. */
7328
7329 case OMP_CLAUSE_FINAL:
d568d1a8
RS
7330 OMP_CLAUSE_OPERAND (c, 0)
7331 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7332 /* Fall through. */
7333
7334 case OMP_CLAUSE_SCHEDULE:
953ff289 7335 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
7336 case OMP_CLAUSE_NUM_TEAMS:
7337 case OMP_CLAUSE_THREAD_LIMIT:
7338 case OMP_CLAUSE_DIST_SCHEDULE:
7339 case OMP_CLAUSE_DEVICE:
d9a6bd32
JJ
7340 case OMP_CLAUSE_PRIORITY:
7341 case OMP_CLAUSE_GRAINSIZE:
7342 case OMP_CLAUSE_NUM_TASKS:
7343 case OMP_CLAUSE_HINT:
9a771876 7344 case OMP_CLAUSE__CILK_FOR_COUNT_:
41dbbb37
TS
7345 case OMP_CLAUSE_ASYNC:
7346 case OMP_CLAUSE_WAIT:
7347 case OMP_CLAUSE_NUM_GANGS:
7348 case OMP_CLAUSE_NUM_WORKERS:
7349 case OMP_CLAUSE_VECTOR_LENGTH:
41dbbb37
TS
7350 case OMP_CLAUSE_WORKER:
7351 case OMP_CLAUSE_VECTOR:
726a989a
RB
7352 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7353 is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b 7354 remove = true;
d9a6bd32
JJ
7355 break;
7356
7357 case OMP_CLAUSE_GANG:
7358 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7359 is_gimple_val, fb_rvalue) == GS_ERROR)
7360 remove = true;
7361 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7362 is_gimple_val, fb_rvalue) == GS_ERROR)
41dbbb37
TS
7363 remove = true;
7364 break;
7365
7a5e4956
CP
7366 case OMP_CLAUSE_TILE:
7367 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7368 list = TREE_CHAIN (list))
7369 {
7370 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7371 is_gimple_val, fb_rvalue) == GS_ERROR)
7372 remove = true;
7373 }
7374 break;
7375
41dbbb37 7376 case OMP_CLAUSE_DEVICE_RESIDENT:
41dbbb37 7377 remove = true;
953ff289
DN
7378 break;
7379
7380 case OMP_CLAUSE_NOWAIT:
7381 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
7382 case OMP_CLAUSE_UNTIED:
7383 case OMP_CLAUSE_COLLAPSE:
41dbbb37
TS
7384 case OMP_CLAUSE_AUTO:
7385 case OMP_CLAUSE_SEQ:
7a5e4956 7386 case OMP_CLAUSE_INDEPENDENT:
20906c66 7387 case OMP_CLAUSE_MERGEABLE:
acf0174b 7388 case OMP_CLAUSE_PROC_BIND:
74bf76ed 7389 case OMP_CLAUSE_SAFELEN:
d9a6bd32
JJ
7390 case OMP_CLAUSE_SIMDLEN:
7391 case OMP_CLAUSE_NOGROUP:
7392 case OMP_CLAUSE_THREADS:
7393 case OMP_CLAUSE_SIMD:
7394 break;
7395
7396 case OMP_CLAUSE_DEFAULTMAP:
7397 ctx->target_map_scalars_firstprivate = false;
953ff289
DN
7398 break;
7399
acf0174b
JJ
7400 case OMP_CLAUSE_ALIGNED:
7401 decl = OMP_CLAUSE_DECL (c);
7402 if (error_operand_p (decl))
7403 {
7404 remove = true;
7405 break;
7406 }
b46ebd6c
JJ
7407 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7408 is_gimple_val, fb_rvalue) == GS_ERROR)
7409 {
7410 remove = true;
7411 break;
7412 }
acf0174b
JJ
7413 if (!is_global_var (decl)
7414 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7415 omp_add_variable (ctx, decl, GOVD_ALIGNED);
7416 break;
7417
953ff289
DN
7418 case OMP_CLAUSE_DEFAULT:
7419 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7420 break;
7421
7422 default:
7423 gcc_unreachable ();
7424 }
7425
7426 if (remove)
7427 *list_p = OMP_CLAUSE_CHAIN (c);
7428 else
7429 list_p = &OMP_CLAUSE_CHAIN (c);
7430 }
7431
7432 gimplify_omp_ctxp = ctx;
d9a6bd32
JJ
7433 if (struct_map_to_clause)
7434 delete struct_map_to_clause;
953ff289
DN
7435}
7436
1a80d6b8
JJ
7437/* Return true if DECL is a candidate for shared to firstprivate
7438 optimization. We only consider non-addressable scalars, not
7439 too big, and not references. */
7440
7441static bool
7442omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
7443{
7444 if (TREE_ADDRESSABLE (decl))
7445 return false;
7446 tree type = TREE_TYPE (decl);
7447 if (!is_gimple_reg_type (type)
7448 || TREE_CODE (type) == REFERENCE_TYPE
7449 || TREE_ADDRESSABLE (type))
7450 return false;
7451 /* Don't optimize too large decls, as each thread/task will have
7452 its own. */
7453 HOST_WIDE_INT len = int_size_in_bytes (type);
7454 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
7455 return false;
7456 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7457 return false;
7458 return true;
7459}
7460
7461/* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
7462 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
7463 GOVD_WRITTEN in outer contexts. */
7464
7465static void
7466omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
7467{
7468 for (; ctx; ctx = ctx->outer_context)
7469 {
7470 splay_tree_node n = splay_tree_lookup (ctx->variables,
7471 (splay_tree_key) decl);
7472 if (n == NULL)
7473 continue;
7474 else if (n->value & GOVD_SHARED)
7475 {
7476 n->value |= GOVD_WRITTEN;
7477 return;
7478 }
7479 else if (n->value & GOVD_DATA_SHARE_CLASS)
7480 return;
7481 }
7482}
7483
7484/* Helper callback for walk_gimple_seq to discover possible stores
7485 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7486 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7487 for those. */
7488
7489static tree
7490omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
7491{
7492 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7493
7494 *walk_subtrees = 0;
7495 if (!wi->is_lhs)
7496 return NULL_TREE;
7497
7498 tree op = *tp;
7499 do
7500 {
7501 if (handled_component_p (op))
7502 op = TREE_OPERAND (op, 0);
7503 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
7504 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
7505 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
7506 else
7507 break;
7508 }
7509 while (1);
7510 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
7511 return NULL_TREE;
7512
7513 omp_mark_stores (gimplify_omp_ctxp, op);
7514 return NULL_TREE;
7515}
7516
7517/* Helper callback for walk_gimple_seq to discover possible stores
7518 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7519 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7520 for those. */
7521
7522static tree
7523omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
7524 bool *handled_ops_p,
7525 struct walk_stmt_info *wi)
7526{
7527 gimple *stmt = gsi_stmt (*gsi_p);
7528 switch (gimple_code (stmt))
7529 {
7530 /* Don't recurse on OpenMP constructs for which
7531 gimplify_adjust_omp_clauses already handled the bodies,
7532 except handle gimple_omp_for_pre_body. */
7533 case GIMPLE_OMP_FOR:
7534 *handled_ops_p = true;
7535 if (gimple_omp_for_pre_body (stmt))
7536 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
7537 omp_find_stores_stmt, omp_find_stores_op, wi);
7538 break;
7539 case GIMPLE_OMP_PARALLEL:
7540 case GIMPLE_OMP_TASK:
7541 case GIMPLE_OMP_SECTIONS:
7542 case GIMPLE_OMP_SINGLE:
7543 case GIMPLE_OMP_TARGET:
7544 case GIMPLE_OMP_TEAMS:
7545 case GIMPLE_OMP_CRITICAL:
7546 *handled_ops_p = true;
7547 break;
7548 default:
7549 break;
7550 }
7551 return NULL_TREE;
7552}
7553
f014c653
JJ
7554struct gimplify_adjust_omp_clauses_data
7555{
7556 tree *list_p;
7557 gimple_seq *pre_p;
7558};
7559
953ff289
DN
7560/* For all variables that were not actually used within the context,
7561 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
7562
7563static int
7564gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7565{
f014c653
JJ
7566 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7567 gimple_seq *pre_p
7568 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
953ff289
DN
7569 tree decl = (tree) n->key;
7570 unsigned flags = n->value;
aaf46ef9 7571 enum omp_clause_code code;
953ff289
DN
7572 tree clause;
7573 bool private_debug;
7574
7575 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7576 return 0;
7577 if ((flags & GOVD_SEEN) == 0)
7578 return 0;
7579 if (flags & GOVD_DEBUG_PRIVATE)
7580 {
7581 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
7582 private_debug = true;
7583 }
acf0174b
JJ
7584 else if (flags & GOVD_MAP)
7585 private_debug = false;
953ff289
DN
7586 else
7587 private_debug
7588 = lang_hooks.decls.omp_private_debug_clause (decl,
7589 !!(flags & GOVD_SHARED));
7590 if (private_debug)
7591 code = OMP_CLAUSE_PRIVATE;
acf0174b
JJ
7592 else if (flags & GOVD_MAP)
7593 code = OMP_CLAUSE_MAP;
953ff289
DN
7594 else if (flags & GOVD_SHARED)
7595 {
7596 if (is_global_var (decl))
64964499
JJ
7597 {
7598 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7599 while (ctx != NULL)
7600 {
7601 splay_tree_node on
7602 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7603 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
74bf76ed 7604 | GOVD_PRIVATE | GOVD_REDUCTION
7de20fbd 7605 | GOVD_LINEAR | GOVD_MAP)) != 0)
64964499
JJ
7606 break;
7607 ctx = ctx->outer_context;
7608 }
7609 if (ctx == NULL)
7610 return 0;
7611 }
953ff289
DN
7612 code = OMP_CLAUSE_SHARED;
7613 }
7614 else if (flags & GOVD_PRIVATE)
7615 code = OMP_CLAUSE_PRIVATE;
7616 else if (flags & GOVD_FIRSTPRIVATE)
7617 code = OMP_CLAUSE_FIRSTPRIVATE;
74bf76ed
JJ
7618 else if (flags & GOVD_LASTPRIVATE)
7619 code = OMP_CLAUSE_LASTPRIVATE;
acf0174b
JJ
7620 else if (flags & GOVD_ALIGNED)
7621 return 0;
953ff289
DN
7622 else
7623 gcc_unreachable ();
7624
1a80d6b8
JJ
7625 if (((flags & GOVD_LASTPRIVATE)
7626 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
7627 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7628 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7629
c2255bc4 7630 clause = build_omp_clause (input_location, code);
aaf46ef9 7631 OMP_CLAUSE_DECL (clause) = decl;
953ff289
DN
7632 OMP_CLAUSE_CHAIN (clause) = *list_p;
7633 if (private_debug)
7634 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
7635 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7636 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
1a80d6b8
JJ
7637 else if (code == OMP_CLAUSE_SHARED
7638 && (flags & GOVD_WRITTEN) == 0
7639 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7640 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
d9a6bd32
JJ
7641 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7642 {
7643 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7644 OMP_CLAUSE_DECL (nc) = decl;
7645 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7646 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7647 OMP_CLAUSE_DECL (clause)
7648 = build_simple_mem_ref_loc (input_location, decl);
7649 OMP_CLAUSE_DECL (clause)
7650 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7651 build_int_cst (build_pointer_type (char_type_node), 0));
7652 OMP_CLAUSE_SIZE (clause) = size_zero_node;
7653 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7654 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7655 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7656 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7657 OMP_CLAUSE_CHAIN (nc) = *list_p;
7658 OMP_CLAUSE_CHAIN (clause) = nc;
7659 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7660 gimplify_omp_ctxp = ctx->outer_context;
7661 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7662 pre_p, NULL, is_gimple_val, fb_rvalue);
7663 gimplify_omp_ctxp = ctx;
7664 }
acf0174b
JJ
7665 else if (code == OMP_CLAUSE_MAP)
7666 {
db0f1c7a
TV
7667 int kind = (flags & GOVD_MAP_TO_ONLY
7668 ? GOMP_MAP_TO
7669 : GOMP_MAP_TOFROM);
7670 if (flags & GOVD_MAP_FORCE)
7671 kind |= GOMP_MAP_FLAG_FORCE;
7672 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
acf0174b
JJ
7673 if (DECL_SIZE (decl)
7674 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7675 {
7676 tree decl2 = DECL_VALUE_EXPR (decl);
7677 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7678 decl2 = TREE_OPERAND (decl2, 0);
7679 gcc_assert (DECL_P (decl2));
7680 tree mem = build_simple_mem_ref (decl2);
7681 OMP_CLAUSE_DECL (clause) = mem;
7682 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7683 if (gimplify_omp_ctxp->outer_context)
7684 {
7685 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7686 omp_notice_variable (ctx, decl2, true);
7687 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
7688 }
7689 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7690 OMP_CLAUSE_MAP);
7691 OMP_CLAUSE_DECL (nc) = decl;
7692 OMP_CLAUSE_SIZE (nc) = size_zero_node;
d9a6bd32
JJ
7693 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
7694 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7695 else
7696 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
acf0174b
JJ
7697 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7698 OMP_CLAUSE_CHAIN (clause) = nc;
7699 }
e01d41e5
JJ
7700 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7701 && lang_hooks.decls.omp_privatize_by_reference (decl))
7702 {
7703 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
7704 OMP_CLAUSE_SIZE (clause)
7705 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
7706 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7707 gimplify_omp_ctxp = ctx->outer_context;
7708 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
7709 pre_p, NULL, is_gimple_val, fb_rvalue);
7710 gimplify_omp_ctxp = ctx;
7711 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7712 OMP_CLAUSE_MAP);
7713 OMP_CLAUSE_DECL (nc) = decl;
7714 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7715 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7716 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7717 OMP_CLAUSE_CHAIN (clause) = nc;
7718 }
b46ebd6c
JJ
7719 else
7720 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
acf0174b 7721 }
95782571
JJ
7722 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
7723 {
7724 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
7725 OMP_CLAUSE_DECL (nc) = decl;
7726 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
7727 OMP_CLAUSE_CHAIN (nc) = *list_p;
7728 OMP_CLAUSE_CHAIN (clause) = nc;
f014c653
JJ
7729 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7730 gimplify_omp_ctxp = ctx->outer_context;
7731 lang_hooks.decls.omp_finish_clause (nc, pre_p);
7732 gimplify_omp_ctxp = ctx;
95782571 7733 }
953ff289 7734 *list_p = clause;
f014c653
JJ
7735 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7736 gimplify_omp_ctxp = ctx->outer_context;
7737 lang_hooks.decls.omp_finish_clause (clause, pre_p);
7738 gimplify_omp_ctxp = ctx;
953ff289
DN
7739 return 0;
7740}
7741
7742static void
1a80d6b8 7743gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
d9a6bd32 7744 enum tree_code code)
953ff289
DN
7745{
7746 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7747 tree c, decl;
7748
1a80d6b8
JJ
7749 if (body)
7750 {
7751 struct gimplify_omp_ctx *octx;
7752 for (octx = ctx; octx; octx = octx->outer_context)
7753 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
7754 break;
7755 if (octx)
7756 {
7757 struct walk_stmt_info wi;
7758 memset (&wi, 0, sizeof (wi));
7759 walk_gimple_seq (body, omp_find_stores_stmt,
7760 omp_find_stores_op, &wi);
7761 }
7762 }
953ff289
DN
7763 while ((c = *list_p) != NULL)
7764 {
7765 splay_tree_node n;
7766 bool remove = false;
7767
aaf46ef9 7768 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
7769 {
7770 case OMP_CLAUSE_PRIVATE:
7771 case OMP_CLAUSE_SHARED:
7772 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 7773 case OMP_CLAUSE_LINEAR:
953ff289
DN
7774 decl = OMP_CLAUSE_DECL (c);
7775 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7776 remove = !(n->value & GOVD_SEEN);
7777 if (! remove)
7778 {
aaf46ef9 7779 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
7780 if ((n->value & GOVD_DEBUG_PRIVATE)
7781 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
7782 {
7783 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
7784 || ((n->value & GOVD_DATA_SHARE_CLASS)
7785 == GOVD_PRIVATE));
aaf46ef9 7786 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
7787 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
7788 }
1a80d6b8
JJ
7789 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7790 && (n->value & GOVD_WRITTEN) == 0
7791 && DECL_P (decl)
7792 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7793 OMP_CLAUSE_SHARED_READONLY (c) = 1;
7794 else if (DECL_P (decl)
7795 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7796 && (n->value & GOVD_WRITTEN) != 1)
7797 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7798 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7799 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7800 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
953ff289
DN
7801 }
7802 break;
7803
7804 case OMP_CLAUSE_LASTPRIVATE:
7805 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
7806 accurately reflect the presence of a FIRSTPRIVATE clause. */
7807 decl = OMP_CLAUSE_DECL (c);
7808 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7809 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7810 = (n->value & GOVD_FIRSTPRIVATE) != 0;
41b37d5e
JJ
7811 if (omp_no_lastprivate (ctx))
7812 {
7813 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7814 remove = true;
7815 else
7816 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
7817 }
e01d41e5
JJ
7818 else if (code == OMP_DISTRIBUTE
7819 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7820 {
7821 remove = true;
7822 error_at (OMP_CLAUSE_LOCATION (c),
7823 "same variable used in %<firstprivate%> and "
7824 "%<lastprivate%> clauses on %<distribute%> "
7825 "construct");
7826 }
1a80d6b8
JJ
7827 if (!remove
7828 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7829 && DECL_P (decl)
7830 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7831 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
953ff289 7832 break;
b8698a0f 7833
acf0174b
JJ
7834 case OMP_CLAUSE_ALIGNED:
7835 decl = OMP_CLAUSE_DECL (c);
7836 if (!is_global_var (decl))
7837 {
7838 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7839 remove = n == NULL || !(n->value & GOVD_SEEN);
7840 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7841 {
7842 struct gimplify_omp_ctx *octx;
7843 if (n != NULL
7844 && (n->value & (GOVD_DATA_SHARE_CLASS
7845 & ~GOVD_FIRSTPRIVATE)))
7846 remove = true;
7847 else
7848 for (octx = ctx->outer_context; octx;
7849 octx = octx->outer_context)
7850 {
7851 n = splay_tree_lookup (octx->variables,
7852 (splay_tree_key) decl);
7853 if (n == NULL)
7854 continue;
7855 if (n->value & GOVD_LOCAL)
7856 break;
7857 /* We have to avoid assigning a shared variable
7858 to itself when trying to add
7859 __builtin_assume_aligned. */
7860 if (n->value & GOVD_SHARED)
7861 {
7862 remove = true;
7863 break;
7864 }
7865 }
7866 }
7867 }
7868 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
7869 {
7870 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7871 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7872 remove = true;
7873 }
7874 break;
7875
7876 case OMP_CLAUSE_MAP:
e01d41e5
JJ
7877 if (code == OMP_TARGET_EXIT_DATA
7878 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7879 {
7880 remove = true;
7881 break;
7882 }
acf0174b
JJ
7883 decl = OMP_CLAUSE_DECL (c);
7884 if (!DECL_P (decl))
d9a6bd32
JJ
7885 {
7886 if ((ctx->region_type & ORT_TARGET) != 0
7887 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7888 {
7889 if (TREE_CODE (decl) == INDIRECT_REF
7890 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7891 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7892 == REFERENCE_TYPE))
7893 decl = TREE_OPERAND (decl, 0);
7894 if (TREE_CODE (decl) == COMPONENT_REF)
7895 {
7896 while (TREE_CODE (decl) == COMPONENT_REF)
7897 decl = TREE_OPERAND (decl, 0);
7898 if (DECL_P (decl))
7899 {
7900 n = splay_tree_lookup (ctx->variables,
7901 (splay_tree_key) decl);
7902 if (!(n->value & GOVD_SEEN))
7903 remove = true;
7904 }
7905 }
7906 }
7907 break;
7908 }
acf0174b 7909 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
d9a6bd32
JJ
7910 if ((ctx->region_type & ORT_TARGET) != 0
7911 && !(n->value & GOVD_SEEN)
4a38b02b
IV
7912 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
7913 && !lookup_attribute ("omp declare target link",
7914 DECL_ATTRIBUTES (decl)))
d9a6bd32
JJ
7915 {
7916 remove = true;
7917 /* For struct element mapping, if struct is never referenced
7918 in target block and none of the mapping has always modifier,
7919 remove all the struct element mappings, which immediately
7920 follow the GOMP_MAP_STRUCT map clause. */
7921 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
7922 {
7923 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
7924 while (cnt--)
7925 OMP_CLAUSE_CHAIN (c)
7926 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
7927 }
7928 }
7929 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
7930 && code == OMP_TARGET_EXIT_DATA)
acf0174b
JJ
7931 remove = true;
7932 else if (DECL_SIZE (decl)
7933 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
d9a6bd32 7934 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
e01d41e5
JJ
7935 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
7936 && (OMP_CLAUSE_MAP_KIND (c)
7937 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
acf0174b 7938 {
41dbbb37
TS
7939 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
7940 for these, TREE_CODE (DECL_SIZE (decl)) will always be
7941 INTEGER_CST. */
7942 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
7943
acf0174b
JJ
7944 tree decl2 = DECL_VALUE_EXPR (decl);
7945 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7946 decl2 = TREE_OPERAND (decl2, 0);
7947 gcc_assert (DECL_P (decl2));
7948 tree mem = build_simple_mem_ref (decl2);
7949 OMP_CLAUSE_DECL (c) = mem;
7950 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7951 if (ctx->outer_context)
7952 {
7953 omp_notice_variable (ctx->outer_context, decl2, true);
7954 omp_notice_variable (ctx->outer_context,
7955 OMP_CLAUSE_SIZE (c), true);
7956 }
d9a6bd32
JJ
7957 if (((ctx->region_type & ORT_TARGET) != 0
7958 || !ctx->target_firstprivatize_array_bases)
7959 && ((n->value & GOVD_SEEN) == 0
7960 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
7961 {
7962 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7963 OMP_CLAUSE_MAP);
7964 OMP_CLAUSE_DECL (nc) = decl;
7965 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7966 if (ctx->target_firstprivatize_array_bases)
7967 OMP_CLAUSE_SET_MAP_KIND (nc,
7968 GOMP_MAP_FIRSTPRIVATE_POINTER);
7969 else
7970 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7971 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
7972 OMP_CLAUSE_CHAIN (c) = nc;
7973 c = nc;
7974 }
7975 }
7976 else
7977 {
7978 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7979 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
e01d41e5
JJ
7980 gcc_assert ((n->value & GOVD_SEEN) == 0
7981 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
7982 == 0));
acf0174b
JJ
7983 }
7984 break;
7985
7986 case OMP_CLAUSE_TO:
7987 case OMP_CLAUSE_FROM:
41dbbb37 7988 case OMP_CLAUSE__CACHE_:
acf0174b
JJ
7989 decl = OMP_CLAUSE_DECL (c);
7990 if (!DECL_P (decl))
7991 break;
7992 if (DECL_SIZE (decl)
7993 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7994 {
7995 tree decl2 = DECL_VALUE_EXPR (decl);
7996 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7997 decl2 = TREE_OPERAND (decl2, 0);
7998 gcc_assert (DECL_P (decl2));
7999 tree mem = build_simple_mem_ref (decl2);
8000 OMP_CLAUSE_DECL (c) = mem;
8001 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8002 if (ctx->outer_context)
8003 {
8004 omp_notice_variable (ctx->outer_context, decl2, true);
8005 omp_notice_variable (ctx->outer_context,
8006 OMP_CLAUSE_SIZE (c), true);
8007 }
8008 }
b46ebd6c
JJ
8009 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8010 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
acf0174b
JJ
8011 break;
8012
953ff289 8013 case OMP_CLAUSE_REDUCTION:
1a80d6b8
JJ
8014 decl = OMP_CLAUSE_DECL (c);
8015 if (DECL_P (decl)
8016 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8017 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8018 break;
953ff289
DN
8019 case OMP_CLAUSE_COPYIN:
8020 case OMP_CLAUSE_COPYPRIVATE:
8021 case OMP_CLAUSE_IF:
8022 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
8023 case OMP_CLAUSE_NUM_TEAMS:
8024 case OMP_CLAUSE_THREAD_LIMIT:
8025 case OMP_CLAUSE_DIST_SCHEDULE:
8026 case OMP_CLAUSE_DEVICE:
953ff289
DN
8027 case OMP_CLAUSE_SCHEDULE:
8028 case OMP_CLAUSE_NOWAIT:
8029 case OMP_CLAUSE_ORDERED:
8030 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
8031 case OMP_CLAUSE_UNTIED:
8032 case OMP_CLAUSE_COLLAPSE:
20906c66
JJ
8033 case OMP_CLAUSE_FINAL:
8034 case OMP_CLAUSE_MERGEABLE:
acf0174b 8035 case OMP_CLAUSE_PROC_BIND:
74bf76ed 8036 case OMP_CLAUSE_SAFELEN:
d9a6bd32 8037 case OMP_CLAUSE_SIMDLEN:
acf0174b 8038 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
8039 case OMP_CLAUSE_PRIORITY:
8040 case OMP_CLAUSE_GRAINSIZE:
8041 case OMP_CLAUSE_NUM_TASKS:
8042 case OMP_CLAUSE_NOGROUP:
8043 case OMP_CLAUSE_THREADS:
8044 case OMP_CLAUSE_SIMD:
8045 case OMP_CLAUSE_HINT:
8046 case OMP_CLAUSE_DEFAULTMAP:
8047 case OMP_CLAUSE_USE_DEVICE_PTR:
8048 case OMP_CLAUSE_IS_DEVICE_PTR:
9a771876 8049 case OMP_CLAUSE__CILK_FOR_COUNT_:
41dbbb37
TS
8050 case OMP_CLAUSE_ASYNC:
8051 case OMP_CLAUSE_WAIT:
8052 case OMP_CLAUSE_DEVICE_RESIDENT:
41dbbb37
TS
8053 case OMP_CLAUSE_INDEPENDENT:
8054 case OMP_CLAUSE_NUM_GANGS:
8055 case OMP_CLAUSE_NUM_WORKERS:
8056 case OMP_CLAUSE_VECTOR_LENGTH:
8057 case OMP_CLAUSE_GANG:
8058 case OMP_CLAUSE_WORKER:
8059 case OMP_CLAUSE_VECTOR:
8060 case OMP_CLAUSE_AUTO:
8061 case OMP_CLAUSE_SEQ:
7a5e4956 8062 case OMP_CLAUSE_TILE:
953ff289
DN
8063 break;
8064
8065 default:
8066 gcc_unreachable ();
8067 }
8068
8069 if (remove)
8070 *list_p = OMP_CLAUSE_CHAIN (c);
8071 else
8072 list_p = &OMP_CLAUSE_CHAIN (c);
8073 }
8074
8075 /* Add in any implicit data sharing. */
f014c653
JJ
8076 struct gimplify_adjust_omp_clauses_data data;
8077 data.list_p = list_p;
8078 data.pre_p = pre_p;
8079 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
b8698a0f 8080
953ff289
DN
8081 gimplify_omp_ctxp = ctx->outer_context;
8082 delete_omp_context (ctx);
8083}
8084
41dbbb37
TS
8085/* Gimplify OACC_CACHE. */
8086
8087static void
8088gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
8089{
8090 tree expr = *expr_p;
8091
182190f2 8092 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
d9a6bd32 8093 OACC_CACHE);
1a80d6b8
JJ
8094 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
8095 OACC_CACHE);
41dbbb37
TS
8096
8097 /* TODO: Do something sensible with this information. */
8098
8099 *expr_p = NULL_TREE;
8100}
8101
6e232ba4
JN
8102/* Helper function of gimplify_oacc_declare. The helper's purpose is to,
8103 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
8104 kind. The entry kind will replace the one in CLAUSE, while the exit
8105 kind will be used in a new omp_clause and returned to the caller. */
8106
8107static tree
8108gimplify_oacc_declare_1 (tree clause)
8109{
8110 HOST_WIDE_INT kind, new_op;
8111 bool ret = false;
8112 tree c = NULL;
8113
8114 kind = OMP_CLAUSE_MAP_KIND (clause);
8115
8116 switch (kind)
8117 {
8118 case GOMP_MAP_ALLOC:
8119 case GOMP_MAP_FORCE_ALLOC:
8120 case GOMP_MAP_FORCE_TO:
8121 new_op = GOMP_MAP_FORCE_DEALLOC;
8122 ret = true;
8123 break;
8124
8125 case GOMP_MAP_FORCE_FROM:
8126 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8127 new_op = GOMP_MAP_FORCE_FROM;
8128 ret = true;
8129 break;
8130
8131 case GOMP_MAP_FORCE_TOFROM:
8132 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
8133 new_op = GOMP_MAP_FORCE_FROM;
8134 ret = true;
8135 break;
8136
8137 case GOMP_MAP_FROM:
8138 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8139 new_op = GOMP_MAP_FROM;
8140 ret = true;
8141 break;
8142
8143 case GOMP_MAP_TOFROM:
8144 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
8145 new_op = GOMP_MAP_FROM;
8146 ret = true;
8147 break;
8148
8149 case GOMP_MAP_DEVICE_RESIDENT:
8150 case GOMP_MAP_FORCE_DEVICEPTR:
8151 case GOMP_MAP_FORCE_PRESENT:
8152 case GOMP_MAP_LINK:
8153 case GOMP_MAP_POINTER:
8154 case GOMP_MAP_TO:
8155 break;
8156
8157 default:
8158 gcc_unreachable ();
8159 break;
8160 }
8161
8162 if (ret)
8163 {
8164 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
8165 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
8166 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
8167 }
8168
8169 return c;
8170}
8171
8172/* Gimplify OACC_DECLARE. */
8173
8174static void
8175gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
8176{
8177 tree expr = *expr_p;
8178 gomp_target *stmt;
8179 tree clauses, t;
8180
8181 clauses = OACC_DECLARE_CLAUSES (expr);
8182
8183 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
8184
8185 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
8186 {
8187 tree decl = OMP_CLAUSE_DECL (t);
8188
8189 if (TREE_CODE (decl) == MEM_REF)
8190 continue;
8191
8192 if (TREE_CODE (decl) == VAR_DECL
8193 && !is_global_var (decl)
8194 && DECL_CONTEXT (decl) == current_function_decl)
8195 {
8196 tree c = gimplify_oacc_declare_1 (t);
8197 if (c)
8198 {
8199 if (oacc_declare_returns == NULL)
8200 oacc_declare_returns = new hash_map<tree, tree>;
8201
8202 oacc_declare_returns->put (decl, c);
8203 }
8204 }
8205
8206 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
8207 }
8208
8209 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
8210 clauses);
8211
8212 gimplify_seq_add_stmt (pre_p, stmt);
8213
8214 *expr_p = NULL_TREE;
8215}
8216
953ff289
DN
8217/* Gimplify the contents of an OMP_PARALLEL statement. This involves
8218 gimplification of the body, as well as scanning the body for used
8219 variables. We need to do this scan now, because variable-sized
8220 decls will be decomposed during gimplification. */
8221
726a989a
RB
8222static void
8223gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
8224{
8225 tree expr = *expr_p;
355fe088 8226 gimple *g;
726a989a 8227 gimple_seq body = NULL;
953ff289 8228
a68ab351
JJ
8229 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
8230 OMP_PARALLEL_COMBINED (expr)
8231 ? ORT_COMBINED_PARALLEL
d9a6bd32 8232 : ORT_PARALLEL, OMP_PARALLEL);
953ff289 8233
45852dcc 8234 push_gimplify_context ();
953ff289 8235
726a989a
RB
8236 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
8237 if (gimple_code (g) == GIMPLE_BIND)
8238 pop_gimplify_context (g);
50674e96 8239 else
726a989a 8240 pop_gimplify_context (NULL);
953ff289 8241
1a80d6b8 8242 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
d9a6bd32 8243 OMP_PARALLEL);
953ff289 8244
726a989a
RB
8245 g = gimple_build_omp_parallel (body,
8246 OMP_PARALLEL_CLAUSES (expr),
8247 NULL_TREE, NULL_TREE);
8248 if (OMP_PARALLEL_COMBINED (expr))
8249 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
8250 gimplify_seq_add_stmt (pre_p, g);
8251 *expr_p = NULL_TREE;
953ff289
DN
8252}
8253
a68ab351
JJ
8254/* Gimplify the contents of an OMP_TASK statement. This involves
8255 gimplification of the body, as well as scanning the body for used
8256 variables. We need to do this scan now, because variable-sized
8257 decls will be decomposed during gimplification. */
953ff289 8258
726a989a
RB
8259static void
8260gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 8261{
a68ab351 8262 tree expr = *expr_p;
355fe088 8263 gimple *g;
726a989a 8264 gimple_seq body = NULL;
953ff289 8265
f22f4340
JJ
8266 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
8267 find_omp_clause (OMP_TASK_CLAUSES (expr),
8268 OMP_CLAUSE_UNTIED)
d9a6bd32 8269 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
953ff289 8270
45852dcc 8271 push_gimplify_context ();
953ff289 8272
726a989a
RB
8273 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
8274 if (gimple_code (g) == GIMPLE_BIND)
8275 pop_gimplify_context (g);
953ff289 8276 else
726a989a 8277 pop_gimplify_context (NULL);
953ff289 8278
1a80d6b8
JJ
8279 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
8280 OMP_TASK);
917948d3 8281
726a989a
RB
8282 g = gimple_build_omp_task (body,
8283 OMP_TASK_CLAUSES (expr),
8284 NULL_TREE, NULL_TREE,
8285 NULL_TREE, NULL_TREE, NULL_TREE);
8286 gimplify_seq_add_stmt (pre_p, g);
8287 *expr_p = NULL_TREE;
a68ab351
JJ
8288}
8289
acf0174b
JJ
8290/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
8291 with non-NULL OMP_FOR_INIT. */
8292
8293static tree
8294find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
8295{
8296 *walk_subtrees = 0;
8297 switch (TREE_CODE (*tp))
8298 {
8299 case OMP_FOR:
8300 *walk_subtrees = 1;
8301 /* FALLTHRU */
8302 case OMP_SIMD:
8303 if (OMP_FOR_INIT (*tp) != NULL_TREE)
8304 return *tp;
8305 break;
8306 case BIND_EXPR:
8307 case STATEMENT_LIST:
8308 case OMP_PARALLEL:
8309 *walk_subtrees = 1;
8310 break;
8311 default:
8312 break;
8313 }
8314 return NULL_TREE;
8315}
8316
a68ab351
JJ
8317/* Gimplify the gross structure of an OMP_FOR statement. */
8318
8319static enum gimplify_status
726a989a 8320gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 8321{
9ce1688b 8322 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
32e8bb8e
ILT
8323 enum gimplify_status ret = GS_ALL_DONE;
8324 enum gimplify_status tret;
538dd0b7 8325 gomp_for *gfor;
726a989a 8326 gimple_seq for_body, for_pre_body;
a68ab351 8327 int i;
74bf76ed 8328 bitmap has_decl_expr = NULL;
d9a6bd32 8329 enum omp_region_type ort = ORT_WORKSHARE;
a68ab351 8330
acf0174b 8331 orig_for_stmt = for_stmt = *expr_p;
a68ab351 8332
41dbbb37
TS
8333 switch (TREE_CODE (for_stmt))
8334 {
8335 case OMP_FOR:
8336 case CILK_FOR:
8337 case OMP_DISTRIBUTE:
182190f2 8338 break;
41dbbb37 8339 case OACC_LOOP:
182190f2 8340 ort = ORT_ACC;
d9a6bd32
JJ
8341 break;
8342 case OMP_TASKLOOP:
8343 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
8344 ort = ORT_UNTIED_TASK;
8345 else
8346 ort = ORT_TASK;
41dbbb37
TS
8347 break;
8348 case OMP_SIMD:
8349 case CILK_SIMD:
d9a6bd32 8350 ort = ORT_SIMD;
41dbbb37
TS
8351 break;
8352 default:
8353 gcc_unreachable ();
8354 }
8355
41b37d5e
JJ
8356 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
8357 clause for the IV. */
d9a6bd32 8358 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
41b37d5e
JJ
8359 {
8360 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
8361 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8362 decl = TREE_OPERAND (t, 0);
8363 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8364 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8365 && OMP_CLAUSE_DECL (c) == decl)
8366 {
8367 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8368 break;
8369 }
8370 }
8371
9ce1688b
JJ
8372 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8373 {
8374 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
8375 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
8376 find_combined_omp_for, NULL, NULL);
8377 if (inner_for_stmt == NULL_TREE)
8378 {
8379 gcc_assert (seen_error ());
8380 *expr_p = NULL_TREE;
8381 return GS_ERROR;
8382 }
8383 }
8384
d9a6bd32
JJ
8385 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
8386 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
8387 TREE_CODE (for_stmt));
8388
9cf32741
JJ
8389 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
8390 gimplify_omp_ctxp->distribute = true;
917948d3 8391
726a989a
RB
8392 /* Handle OMP_FOR_INIT. */
8393 for_pre_body = NULL;
d9a6bd32 8394 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
74bf76ed
JJ
8395 {
8396 has_decl_expr = BITMAP_ALLOC (NULL);
8397 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
8398 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
acf0174b 8399 == VAR_DECL)
74bf76ed
JJ
8400 {
8401 t = OMP_FOR_PRE_BODY (for_stmt);
8402 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8403 }
8404 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
8405 {
8406 tree_stmt_iterator si;
8407 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
8408 tsi_next (&si))
8409 {
8410 t = tsi_stmt (si);
8411 if (TREE_CODE (t) == DECL_EXPR
8412 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
8413 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8414 }
8415 }
8416 }
d9a6bd32
JJ
8417 if (OMP_FOR_PRE_BODY (for_stmt))
8418 {
8419 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
8420 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8421 else
8422 {
8423 struct gimplify_omp_ctx ctx;
8424 memset (&ctx, 0, sizeof (ctx));
8425 ctx.region_type = ORT_NONE;
8426 gimplify_omp_ctxp = &ctx;
8427 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8428 gimplify_omp_ctxp = NULL;
8429 }
8430 }
726a989a 8431 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 8432
acf0174b 8433 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
d9a6bd32
JJ
8434 for_stmt = inner_for_stmt;
8435
8436 /* For taskloop, need to gimplify the start, end and step before the
8437 taskloop, outside of the taskloop omp context. */
8438 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
acf0174b 8439 {
d9a6bd32
JJ
8440 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8441 {
8442 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8443 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8444 {
8445 TREE_OPERAND (t, 1)
8446 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8447 pre_p, NULL);
8448 tree c = build_omp_clause (input_location,
8449 OMP_CLAUSE_FIRSTPRIVATE);
8450 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8451 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8452 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8453 }
8454
8455 /* Handle OMP_FOR_COND. */
8456 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8457 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8458 {
8459 TREE_OPERAND (t, 1)
8460 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8461 gimple_seq_empty_p (for_pre_body)
8462 ? pre_p : &for_pre_body, NULL);
8463 tree c = build_omp_clause (input_location,
8464 OMP_CLAUSE_FIRSTPRIVATE);
8465 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8466 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8467 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8468 }
8469
8470 /* Handle OMP_FOR_INCR. */
8471 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8472 if (TREE_CODE (t) == MODIFY_EXPR)
8473 {
8474 decl = TREE_OPERAND (t, 0);
8475 t = TREE_OPERAND (t, 1);
8476 tree *tp = &TREE_OPERAND (t, 1);
8477 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8478 tp = &TREE_OPERAND (t, 0);
8479
8480 if (!is_gimple_constant (*tp))
8481 {
8482 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8483 ? pre_p : &for_pre_body;
8484 *tp = get_initialized_tmp_var (*tp, seq, NULL);
8485 tree c = build_omp_clause (input_location,
8486 OMP_CLAUSE_FIRSTPRIVATE);
8487 OMP_CLAUSE_DECL (c) = *tp;
8488 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8489 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8490 }
8491 }
8492 }
8493
8494 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8495 OMP_TASKLOOP);
acf0174b
JJ
8496 }
8497
d9a6bd32
JJ
8498 if (orig_for_stmt != for_stmt)
8499 gimplify_omp_ctxp->combined_loop = true;
8500
355a7673 8501 for_body = NULL;
a68ab351
JJ
8502 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8503 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8504 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8505 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
d9a6bd32
JJ
8506
8507 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8508 bool is_doacross = false;
8509 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8510 {
8511 is_doacross = true;
8512 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8513 (OMP_FOR_INIT (for_stmt))
8514 * 2);
8515 }
8516 int collapse = 1;
8517 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8518 if (c)
8519 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
a68ab351
JJ
8520 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8521 {
8522 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
8523 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8524 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
8525 gcc_assert (DECL_P (decl));
8526 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8527 || POINTER_TYPE_P (TREE_TYPE (decl)));
d9a6bd32
JJ
8528 if (is_doacross)
8529 {
8530 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8531 gimplify_omp_ctxp->loop_iter_var.quick_push
8532 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8533 else
8534 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8535 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8536 }
a68ab351
JJ
8537
8538 /* Make sure the iteration variable is private. */
74bf76ed 8539 tree c = NULL_TREE;
f7468577 8540 tree c2 = NULL_TREE;
acf0174b
JJ
8541 if (orig_for_stmt != for_stmt)
8542 /* Do this only on innermost construct for combined ones. */;
d9a6bd32 8543 else if (ort == ORT_SIMD)
74bf76ed
JJ
8544 {
8545 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
d9a6bd32 8546 (splay_tree_key) decl);
f7468577
JJ
8547 omp_is_private (gimplify_omp_ctxp, decl,
8548 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8549 != 1));
74bf76ed
JJ
8550 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8551 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8552 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8553 {
8554 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8555 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
41b37d5e
JJ
8556 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8557 if ((has_decl_expr
8558 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8559 || omp_no_lastprivate (gimplify_omp_ctxp))
8560 {
8561 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8562 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8563 }
499c20bb
JJ
8564 struct gimplify_omp_ctx *outer
8565 = gimplify_omp_ctxp->outer_context;
8566 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8567 {
8568 if (outer->region_type == ORT_WORKSHARE
8569 && outer->combined_loop)
8570 {
8571 n = splay_tree_lookup (outer->variables,
8572 (splay_tree_key)decl);
8573 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8574 {
8575 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8576 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8577 }
e01d41e5
JJ
8578 else
8579 {
8580 struct gimplify_omp_ctx *octx = outer->outer_context;
8581 if (octx
8582 && octx->region_type == ORT_COMBINED_PARALLEL
8583 && octx->outer_context
8584 && (octx->outer_context->region_type
8585 == ORT_WORKSHARE)
8586 && octx->outer_context->combined_loop)
8587 {
8588 octx = octx->outer_context;
8589 n = splay_tree_lookup (octx->variables,
8590 (splay_tree_key)decl);
8591 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8592 {
8593 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8594 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8595 }
8596 }
8597 }
499c20bb
JJ
8598 }
8599 }
8600
74bf76ed
JJ
8601 OMP_CLAUSE_DECL (c) = decl;
8602 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8603 OMP_FOR_CLAUSES (for_stmt) = c;
41b37d5e 8604 omp_add_variable (gimplify_omp_ctxp, decl, flags);
41b37d5e
JJ
8605 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8606 {
8607 if (outer->region_type == ORT_WORKSHARE
8608 && outer->combined_loop)
8609 {
8610 if (outer->outer_context
8611 && (outer->outer_context->region_type
8612 == ORT_COMBINED_PARALLEL))
8613 outer = outer->outer_context;
8614 else if (omp_check_private (outer, decl, false))
8615 outer = NULL;
8616 }
d9a6bd32
JJ
8617 else if (((outer->region_type & ORT_TASK) != 0)
8618 && outer->combined_loop
8619 && !omp_check_private (gimplify_omp_ctxp,
8620 decl, false))
8621 ;
41b37d5e
JJ
8622 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8623 outer = NULL;
8624 if (outer)
8625 {
cbdfbde8
JJ
8626 n = splay_tree_lookup (outer->variables,
8627 (splay_tree_key)decl);
8628 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8629 {
8630 omp_add_variable (outer, decl,
8631 GOVD_LASTPRIVATE | GOVD_SEEN);
e01d41e5
JJ
8632 if (outer->region_type == ORT_COMBINED_PARALLEL
8633 && outer->outer_context
8634 && (outer->outer_context->region_type
8635 == ORT_WORKSHARE)
8636 && outer->outer_context->combined_loop)
8637 {
8638 outer = outer->outer_context;
8639 n = splay_tree_lookup (outer->variables,
8640 (splay_tree_key)decl);
8641 if (omp_check_private (outer, decl, false))
8642 outer = NULL;
8643 else if (n == NULL
8644 || ((n->value & GOVD_DATA_SHARE_CLASS)
8645 == 0))
8646 omp_add_variable (outer, decl,
8647 GOVD_LASTPRIVATE
8648 | GOVD_SEEN);
8649 else
8650 outer = NULL;
8651 }
8652 if (outer && outer->outer_context
8653 && (outer->outer_context->region_type
8654 == ORT_COMBINED_TEAMS))
8655 {
8656 outer = outer->outer_context;
8657 n = splay_tree_lookup (outer->variables,
8658 (splay_tree_key)decl);
8659 if (n == NULL
8660 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8661 omp_add_variable (outer, decl,
8662 GOVD_SHARED | GOVD_SEEN);
8663 else
8664 outer = NULL;
8665 }
8666 if (outer && outer->outer_context)
cbdfbde8
JJ
8667 omp_notice_variable (outer->outer_context, decl,
8668 true);
8669 }
41b37d5e
JJ
8670 }
8671 }
74bf76ed
JJ
8672 }
8673 else
8674 {
8675 bool lastprivate
8676 = (!has_decl_expr
41b37d5e
JJ
8677 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8678 && !omp_no_lastprivate (gimplify_omp_ctxp);
8679 struct gimplify_omp_ctx *outer
8680 = gimplify_omp_ctxp->outer_context;
8681 if (outer && lastprivate)
56ad0e38 8682 {
41b37d5e
JJ
8683 if (outer->region_type == ORT_WORKSHARE
8684 && outer->combined_loop)
8685 {
499c20bb
JJ
8686 n = splay_tree_lookup (outer->variables,
8687 (splay_tree_key)decl);
8688 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8689 {
8690 lastprivate = false;
8691 outer = NULL;
8692 }
8693 else if (outer->outer_context
8694 && (outer->outer_context->region_type
8695 == ORT_COMBINED_PARALLEL))
41b37d5e
JJ
8696 outer = outer->outer_context;
8697 else if (omp_check_private (outer, decl, false))
8698 outer = NULL;
8699 }
d9a6bd32
JJ
8700 else if (((outer->region_type & ORT_TASK) != 0)
8701 && outer->combined_loop
8702 && !omp_check_private (gimplify_omp_ctxp,
8703 decl, false))
8704 ;
41b37d5e
JJ
8705 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8706 outer = NULL;
8707 if (outer)
56ad0e38 8708 {
cbdfbde8
JJ
8709 n = splay_tree_lookup (outer->variables,
8710 (splay_tree_key)decl);
8711 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8712 {
8713 omp_add_variable (outer, decl,
8714 GOVD_LASTPRIVATE | GOVD_SEEN);
e01d41e5
JJ
8715 if (outer->region_type == ORT_COMBINED_PARALLEL
8716 && outer->outer_context
8717 && (outer->outer_context->region_type
8718 == ORT_WORKSHARE)
8719 && outer->outer_context->combined_loop)
8720 {
8721 outer = outer->outer_context;
8722 n = splay_tree_lookup (outer->variables,
8723 (splay_tree_key)decl);
8724 if (omp_check_private (outer, decl, false))
8725 outer = NULL;
8726 else if (n == NULL
8727 || ((n->value & GOVD_DATA_SHARE_CLASS)
8728 == 0))
8729 omp_add_variable (outer, decl,
8730 GOVD_LASTPRIVATE
8731 | GOVD_SEEN);
8732 else
8733 outer = NULL;
8734 }
8735 if (outer && outer->outer_context
8736 && (outer->outer_context->region_type
8737 == ORT_COMBINED_TEAMS))
8738 {
8739 outer = outer->outer_context;
8740 n = splay_tree_lookup (outer->variables,
8741 (splay_tree_key)decl);
8742 if (n == NULL
8743 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8744 omp_add_variable (outer, decl,
8745 GOVD_SHARED | GOVD_SEEN);
8746 else
8747 outer = NULL;
8748 }
8749 if (outer && outer->outer_context)
cbdfbde8
JJ
8750 omp_notice_variable (outer->outer_context, decl,
8751 true);
8752 }
56ad0e38
JJ
8753 }
8754 }
41b37d5e 8755
74bf76ed
JJ
8756 c = build_omp_clause (input_location,
8757 lastprivate ? OMP_CLAUSE_LASTPRIVATE
8758 : OMP_CLAUSE_PRIVATE);
8759 OMP_CLAUSE_DECL (c) = decl;
8760 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
f7468577 8761 OMP_FOR_CLAUSES (for_stmt) = c;
74bf76ed
JJ
8762 omp_add_variable (gimplify_omp_ctxp, decl,
8763 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
f7468577 8764 | GOVD_EXPLICIT | GOVD_SEEN);
74bf76ed
JJ
8765 c = NULL_TREE;
8766 }
8767 }
f7468577 8768 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
a68ab351
JJ
8769 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8770 else
8771 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
8772
8773 /* If DECL is not a gimple register, create a temporary variable to act
8774 as an iteration counter. This is valid, since DECL cannot be
56ad0e38
JJ
8775 modified in the body of the loop. Similarly for any iteration vars
8776 in simd with collapse > 1 where the iterator vars must be
8777 lastprivate. */
acf0174b
JJ
8778 if (orig_for_stmt != for_stmt)
8779 var = decl;
56ad0e38 8780 else if (!is_gimple_reg (decl)
d9a6bd32
JJ
8781 || (ort == ORT_SIMD
8782 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
a68ab351
JJ
8783 {
8784 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
726a989a 8785 TREE_OPERAND (t, 0) = var;
b8698a0f 8786
726a989a 8787 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 8788
d9a6bd32
JJ
8789 if (ort == ORT_SIMD
8790 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
f7468577
JJ
8791 {
8792 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8793 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
8794 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
8795 OMP_CLAUSE_DECL (c2) = var;
8796 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
8797 OMP_FOR_CLAUSES (for_stmt) = c2;
8798 omp_add_variable (gimplify_omp_ctxp, var,
8799 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
8800 if (c == NULL_TREE)
8801 {
8802 c = c2;
8803 c2 = NULL_TREE;
8804 }
8805 }
8806 else
8807 omp_add_variable (gimplify_omp_ctxp, var,
8808 GOVD_PRIVATE | GOVD_SEEN);
a68ab351
JJ
8809 }
8810 else
8811 var = decl;
07beea0d 8812
32e8bb8e 8813 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 8814 is_gimple_val, fb_rvalue);
32e8bb8e 8815 ret = MIN (ret, tret);
726a989a
RB
8816 if (ret == GS_ERROR)
8817 return ret;
953ff289 8818
726a989a 8819 /* Handle OMP_FOR_COND. */
a68ab351
JJ
8820 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8821 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 8822 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 8823
32e8bb8e 8824 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 8825 is_gimple_val, fb_rvalue);
32e8bb8e 8826 ret = MIN (ret, tret);
917948d3 8827
726a989a 8828 /* Handle OMP_FOR_INCR. */
a68ab351 8829 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
8830 switch (TREE_CODE (t))
8831 {
a68ab351
JJ
8832 case PREINCREMENT_EXPR:
8833 case POSTINCREMENT_EXPR:
c02065fc
AH
8834 {
8835 tree decl = TREE_OPERAND (t, 0);
da6f124d
JJ
8836 /* c_omp_for_incr_canonicalize_ptr() should have been
8837 called to massage things appropriately. */
c02065fc
AH
8838 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8839
8840 if (orig_for_stmt != for_stmt)
8841 break;
8842 t = build_int_cst (TREE_TYPE (decl), 1);
8843 if (c)
8844 OMP_CLAUSE_LINEAR_STEP (c) = t;
8845 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8846 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8847 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
acf0174b 8848 break;
c02065fc 8849 }
a68ab351
JJ
8850
8851 case PREDECREMENT_EXPR:
8852 case POSTDECREMENT_EXPR:
da6f124d
JJ
8853 /* c_omp_for_incr_canonicalize_ptr() should have been
8854 called to massage things appropriately. */
8855 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
acf0174b
JJ
8856 if (orig_for_stmt != for_stmt)
8857 break;
a68ab351 8858 t = build_int_cst (TREE_TYPE (decl), -1);
74bf76ed
JJ
8859 if (c)
8860 OMP_CLAUSE_LINEAR_STEP (c) = t;
a68ab351 8861 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 8862 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
8863 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8864 break;
8865
726a989a
RB
8866 case MODIFY_EXPR:
8867 gcc_assert (TREE_OPERAND (t, 0) == decl);
8868 TREE_OPERAND (t, 0) = var;
a68ab351 8869
726a989a 8870 t = TREE_OPERAND (t, 1);
a68ab351 8871 switch (TREE_CODE (t))
953ff289 8872 {
a68ab351
JJ
8873 case PLUS_EXPR:
8874 if (TREE_OPERAND (t, 1) == decl)
8875 {
8876 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
8877 TREE_OPERAND (t, 0) = var;
8878 break;
8879 }
8880
8881 /* Fallthru. */
8882 case MINUS_EXPR:
8883 case POINTER_PLUS_EXPR:
8884 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 8885 TREE_OPERAND (t, 0) = var;
953ff289 8886 break;
a68ab351
JJ
8887 default:
8888 gcc_unreachable ();
953ff289 8889 }
917948d3 8890
32e8bb8e 8891 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 8892 is_gimple_val, fb_rvalue);
32e8bb8e 8893 ret = MIN (ret, tret);
74bf76ed
JJ
8894 if (c)
8895 {
da6f124d
JJ
8896 tree step = TREE_OPERAND (t, 1);
8897 tree stept = TREE_TYPE (decl);
8898 if (POINTER_TYPE_P (stept))
8899 stept = sizetype;
8900 step = fold_convert (stept, step);
74bf76ed 8901 if (TREE_CODE (t) == MINUS_EXPR)
da6f124d
JJ
8902 step = fold_build1 (NEGATE_EXPR, stept, step);
8903 OMP_CLAUSE_LINEAR_STEP (c) = step;
8904 if (step != TREE_OPERAND (t, 1))
74bf76ed 8905 {
74bf76ed
JJ
8906 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
8907 &for_pre_body, NULL,
8908 is_gimple_val, fb_rvalue);
8909 ret = MIN (ret, tret);
8910 }
8911 }
953ff289 8912 break;
a68ab351 8913
953ff289
DN
8914 default:
8915 gcc_unreachable ();
8916 }
8917
f7468577
JJ
8918 if (c2)
8919 {
8920 gcc_assert (c);
8921 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
8922 }
8923
d9a6bd32 8924 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
a68ab351 8925 {
a68ab351 8926 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
f7468577
JJ
8927 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8928 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
8929 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8930 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
8931 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
8932 && OMP_CLAUSE_DECL (c) == decl)
726a989a 8933 {
d9a6bd32
JJ
8934 if (is_doacross && (collapse == 1 || i >= collapse))
8935 t = var;
8936 else
8937 {
8938 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8939 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8940 gcc_assert (TREE_OPERAND (t, 0) == var);
8941 t = TREE_OPERAND (t, 1);
8942 gcc_assert (TREE_CODE (t) == PLUS_EXPR
8943 || TREE_CODE (t) == MINUS_EXPR
8944 || TREE_CODE (t) == POINTER_PLUS_EXPR);
8945 gcc_assert (TREE_OPERAND (t, 0) == var);
8946 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
8947 is_doacross ? var : decl,
8948 TREE_OPERAND (t, 1));
8949 }
f7468577
JJ
8950 gimple_seq *seq;
8951 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
8952 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
8953 else
8954 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
8955 gimplify_assign (decl, t, seq);
a68ab351
JJ
8956 }
8957 }
953ff289
DN
8958 }
8959
74bf76ed
JJ
8960 BITMAP_FREE (has_decl_expr);
8961
d9a6bd32
JJ
8962 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8963 {
8964 push_gimplify_context ();
8965 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
8966 {
8967 OMP_FOR_BODY (orig_for_stmt)
8968 = build3 (BIND_EXPR, void_type_node, NULL,
8969 OMP_FOR_BODY (orig_for_stmt), NULL);
8970 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
8971 }
8972 }
8973
8974 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
8975 &for_body);
8976
8977 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8978 {
8979 if (gimple_code (g) == GIMPLE_BIND)
8980 pop_gimplify_context (g);
8981 else
8982 pop_gimplify_context (NULL);
8983 }
726a989a 8984
acf0174b
JJ
8985 if (orig_for_stmt != for_stmt)
8986 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8987 {
8988 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8989 decl = TREE_OPERAND (t, 0);
d9a6bd32
JJ
8990 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8991 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8992 gimplify_omp_ctxp = ctx->outer_context;
acf0174b 8993 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
d9a6bd32 8994 gimplify_omp_ctxp = ctx;
acf0174b
JJ
8995 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
8996 TREE_OPERAND (t, 0) = var;
8997 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8998 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
8999 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
9000 }
9001
1a80d6b8
JJ
9002 gimplify_adjust_omp_clauses (pre_p, for_body,
9003 &OMP_FOR_CLAUSES (orig_for_stmt),
d9a6bd32 9004 TREE_CODE (orig_for_stmt));
953ff289 9005
74bf76ed 9006 int kind;
acf0174b 9007 switch (TREE_CODE (orig_for_stmt))
74bf76ed
JJ
9008 {
9009 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
9010 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
c02065fc 9011 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9a771876 9012 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
acf0174b 9013 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
d9a6bd32 9014 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
41dbbb37 9015 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
74bf76ed
JJ
9016 default:
9017 gcc_unreachable ();
9018 }
acf0174b 9019 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
726a989a
RB
9020 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
9021 for_pre_body);
acf0174b
JJ
9022 if (orig_for_stmt != for_stmt)
9023 gimple_omp_for_set_combined_p (gfor, true);
9024 if (gimplify_omp_ctxp
9025 && (gimplify_omp_ctxp->combined_loop
9026 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9027 && gimplify_omp_ctxp->outer_context
9028 && gimplify_omp_ctxp->outer_context->combined_loop)))
9029 {
9030 gimple_omp_for_set_combined_into_p (gfor, true);
9031 if (gimplify_omp_ctxp->combined_loop)
9032 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
9033 else
9034 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
9035 }
726a989a
RB
9036
9037 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9038 {
9039 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9040 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
9041 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
9042 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9043 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
9044 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
9045 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9046 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
9047 }
9048
d9a6bd32
JJ
9049 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
9050 constructs with GIMPLE_OMP_TASK sandwiched in between them.
9051 The outer taskloop stands for computing the number of iterations,
9052 counts for collapsed loops and holding taskloop specific clauses.
9053 The task construct stands for the effect of data sharing on the
9054 explicit task it creates and the inner taskloop stands for expansion
9055 of the static loop inside of the explicit task construct. */
9056 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9057 {
9058 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
9059 tree task_clauses = NULL_TREE;
9060 tree c = *gfor_clauses_ptr;
9061 tree *gtask_clauses_ptr = &task_clauses;
9062 tree outer_for_clauses = NULL_TREE;
9063 tree *gforo_clauses_ptr = &outer_for_clauses;
9064 for (; c; c = OMP_CLAUSE_CHAIN (c))
9065 switch (OMP_CLAUSE_CODE (c))
9066 {
9067 /* These clauses are allowed on task, move them there. */
9068 case OMP_CLAUSE_SHARED:
9069 case OMP_CLAUSE_FIRSTPRIVATE:
9070 case OMP_CLAUSE_DEFAULT:
9071 case OMP_CLAUSE_IF:
9072 case OMP_CLAUSE_UNTIED:
9073 case OMP_CLAUSE_FINAL:
9074 case OMP_CLAUSE_MERGEABLE:
9075 case OMP_CLAUSE_PRIORITY:
9076 *gtask_clauses_ptr = c;
9077 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9078 break;
9079 case OMP_CLAUSE_PRIVATE:
9080 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
9081 {
9082 /* We want private on outer for and firstprivate
9083 on task. */
9084 *gtask_clauses_ptr
9085 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9086 OMP_CLAUSE_FIRSTPRIVATE);
9087 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9088 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9089 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9090 *gforo_clauses_ptr = c;
9091 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9092 }
9093 else
9094 {
9095 *gtask_clauses_ptr = c;
9096 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9097 }
9098 break;
9099 /* These clauses go into outer taskloop clauses. */
9100 case OMP_CLAUSE_GRAINSIZE:
9101 case OMP_CLAUSE_NUM_TASKS:
9102 case OMP_CLAUSE_NOGROUP:
9103 *gforo_clauses_ptr = c;
9104 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9105 break;
9106 /* Taskloop clause we duplicate on both taskloops. */
9107 case OMP_CLAUSE_COLLAPSE:
9108 *gfor_clauses_ptr = c;
9109 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9110 *gforo_clauses_ptr = copy_node (c);
9111 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9112 break;
9113 /* For lastprivate, keep the clause on inner taskloop, and add
9114 a shared clause on task. If the same decl is also firstprivate,
9115 add also firstprivate clause on the inner taskloop. */
9116 case OMP_CLAUSE_LASTPRIVATE:
9117 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
9118 {
9119 /* For taskloop C++ lastprivate IVs, we want:
9120 1) private on outer taskloop
9121 2) firstprivate and shared on task
9122 3) lastprivate on inner taskloop */
9123 *gtask_clauses_ptr
9124 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9125 OMP_CLAUSE_FIRSTPRIVATE);
9126 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9127 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9128 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9129 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
9130 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9131 OMP_CLAUSE_PRIVATE);
9132 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
9133 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
9134 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
9135 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9136 }
9137 *gfor_clauses_ptr = c;
9138 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9139 *gtask_clauses_ptr
9140 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
9141 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9142 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9143 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
9144 gtask_clauses_ptr
9145 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9146 break;
9147 default:
9148 gcc_unreachable ();
9149 }
9150 *gfor_clauses_ptr = NULL_TREE;
9151 *gtask_clauses_ptr = NULL_TREE;
9152 *gforo_clauses_ptr = NULL_TREE;
9153 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
9154 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
9155 NULL_TREE, NULL_TREE, NULL_TREE);
9156 gimple_omp_task_set_taskloop_p (g, true);
9157 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
9158 gomp_for *gforo
9159 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
9160 gimple_omp_for_collapse (gfor),
9161 gimple_omp_for_pre_body (gfor));
9162 gimple_omp_for_set_pre_body (gfor, NULL);
9163 gimple_omp_for_set_combined_p (gforo, true);
9164 gimple_omp_for_set_combined_into_p (gfor, true);
9165 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
9166 {
9167 t = unshare_expr (gimple_omp_for_index (gfor, i));
9168 gimple_omp_for_set_index (gforo, i, t);
9169 t = unshare_expr (gimple_omp_for_initial (gfor, i));
9170 gimple_omp_for_set_initial (gforo, i, t);
9171 gimple_omp_for_set_cond (gforo, i,
9172 gimple_omp_for_cond (gfor, i));
9173 t = unshare_expr (gimple_omp_for_final (gfor, i));
9174 gimple_omp_for_set_final (gforo, i, t);
9175 t = unshare_expr (gimple_omp_for_incr (gfor, i));
9176 gimple_omp_for_set_incr (gforo, i, t);
9177 }
9178 gimplify_seq_add_stmt (pre_p, gforo);
9179 }
9180 else
9181 gimplify_seq_add_stmt (pre_p, gfor);
74bf76ed
JJ
9182 if (ret != GS_ALL_DONE)
9183 return GS_ERROR;
9184 *expr_p = NULL_TREE;
9185 return GS_ALL_DONE;
953ff289
DN
9186}
9187
e01d41e5
JJ
9188/* Helper function of optimize_target_teams, find OMP_TEAMS inside
9189 of OMP_TARGET's body. */
9190
9191static tree
9192find_omp_teams (tree *tp, int *walk_subtrees, void *)
9193{
9194 *walk_subtrees = 0;
9195 switch (TREE_CODE (*tp))
9196 {
9197 case OMP_TEAMS:
9198 return *tp;
9199 case BIND_EXPR:
9200 case STATEMENT_LIST:
9201 *walk_subtrees = 1;
9202 break;
9203 default:
9204 break;
9205 }
9206 return NULL_TREE;
9207}
9208
9209/* Helper function of optimize_target_teams, determine if the expression
9210 can be computed safely before the target construct on the host. */
9211
9212static tree
9213computable_teams_clause (tree *tp, int *walk_subtrees, void *)
9214{
9215 splay_tree_node n;
9216
9217 if (TYPE_P (*tp))
9218 {
9219 *walk_subtrees = 0;
9220 return NULL_TREE;
9221 }
9222 switch (TREE_CODE (*tp))
9223 {
9224 case VAR_DECL:
9225 case PARM_DECL:
9226 case RESULT_DECL:
9227 *walk_subtrees = 0;
9228 if (error_operand_p (*tp)
9229 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
9230 || DECL_HAS_VALUE_EXPR_P (*tp)
9231 || DECL_THREAD_LOCAL_P (*tp)
9232 || TREE_SIDE_EFFECTS (*tp)
9233 || TREE_THIS_VOLATILE (*tp))
9234 return *tp;
9235 if (is_global_var (*tp)
9236 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
9237 || lookup_attribute ("omp declare target link",
9238 DECL_ATTRIBUTES (*tp))))
9239 return *tp;
9240 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9241 (splay_tree_key) *tp);
9242 if (n == NULL)
9243 {
9244 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
9245 return NULL_TREE;
9246 return *tp;
9247 }
9248 else if (n->value & GOVD_LOCAL)
9249 return *tp;
9250 else if (n->value & GOVD_FIRSTPRIVATE)
9251 return NULL_TREE;
9252 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9253 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9254 return NULL_TREE;
9255 return *tp;
9256 case INTEGER_CST:
9257 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9258 return *tp;
9259 return NULL_TREE;
9260 case TARGET_EXPR:
9261 if (TARGET_EXPR_INITIAL (*tp)
9262 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
9263 return *tp;
9264 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
9265 walk_subtrees, NULL);
9266 /* Allow some reasonable subset of integral arithmetics. */
9267 case PLUS_EXPR:
9268 case MINUS_EXPR:
9269 case MULT_EXPR:
9270 case TRUNC_DIV_EXPR:
9271 case CEIL_DIV_EXPR:
9272 case FLOOR_DIV_EXPR:
9273 case ROUND_DIV_EXPR:
9274 case TRUNC_MOD_EXPR:
9275 case CEIL_MOD_EXPR:
9276 case FLOOR_MOD_EXPR:
9277 case ROUND_MOD_EXPR:
9278 case RDIV_EXPR:
9279 case EXACT_DIV_EXPR:
9280 case MIN_EXPR:
9281 case MAX_EXPR:
9282 case LSHIFT_EXPR:
9283 case RSHIFT_EXPR:
9284 case BIT_IOR_EXPR:
9285 case BIT_XOR_EXPR:
9286 case BIT_AND_EXPR:
9287 case NEGATE_EXPR:
9288 case ABS_EXPR:
9289 case BIT_NOT_EXPR:
9290 case NON_LVALUE_EXPR:
9291 CASE_CONVERT:
9292 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9293 return *tp;
9294 return NULL_TREE;
9295 /* And disallow anything else, except for comparisons. */
9296 default:
9297 if (COMPARISON_CLASS_P (*tp))
9298 return NULL_TREE;
9299 return *tp;
9300 }
9301}
9302
9303/* Try to determine if the num_teams and/or thread_limit expressions
9304 can have their values determined already before entering the
9305 target construct.
9306 INTEGER_CSTs trivially are,
9307 integral decls that are firstprivate (explicitly or implicitly)
9308 or explicitly map(always, to:) or map(always, tofrom:) on the target
9309 region too, and expressions involving simple arithmetics on those
9310 too, function calls are not ok, dereferencing something neither etc.
9311 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
9312 EXPR based on what we find:
9313 0 stands for clause not specified at all, use implementation default
9314 -1 stands for value that can't be determined easily before entering
9315 the target construct.
9316 If teams construct is not present at all, use 1 for num_teams
9317 and 0 for thread_limit (only one team is involved, and the thread
9318 limit is implementation defined. */
9319
9320static void
9321optimize_target_teams (tree target, gimple_seq *pre_p)
9322{
9323 tree body = OMP_BODY (target);
9324 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
9325 tree num_teams = integer_zero_node;
9326 tree thread_limit = integer_zero_node;
9327 location_t num_teams_loc = EXPR_LOCATION (target);
9328 location_t thread_limit_loc = EXPR_LOCATION (target);
9329 tree c, *p, expr;
9330 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
9331
9332 if (teams == NULL_TREE)
9333 num_teams = integer_one_node;
9334 else
9335 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
9336 {
9337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
9338 {
9339 p = &num_teams;
9340 num_teams_loc = OMP_CLAUSE_LOCATION (c);
9341 }
9342 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
9343 {
9344 p = &thread_limit;
9345 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
9346 }
9347 else
9348 continue;
9349 expr = OMP_CLAUSE_OPERAND (c, 0);
9350 if (TREE_CODE (expr) == INTEGER_CST)
9351 {
9352 *p = expr;
9353 continue;
9354 }
9355 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
9356 {
9357 *p = integer_minus_one_node;
9358 continue;
9359 }
9360 *p = expr;
9361 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
9362 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue)
9363 == GS_ERROR)
9364 {
9365 gimplify_omp_ctxp = target_ctx;
9366 *p = integer_minus_one_node;
9367 continue;
9368 }
9369 gimplify_omp_ctxp = target_ctx;
9370 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
9371 OMP_CLAUSE_OPERAND (c, 0) = *p;
9372 }
9373 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
9374 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
9375 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9376 OMP_TARGET_CLAUSES (target) = c;
9377 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
9378 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
9379 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9380 OMP_TARGET_CLAUSES (target) = c;
9381}
9382
41dbbb37 9383/* Gimplify the gross structure of several OMP constructs. */
953ff289 9384
726a989a
RB
9385static void
9386gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 9387{
726a989a 9388 tree expr = *expr_p;
355fe088 9389 gimple *stmt;
726a989a 9390 gimple_seq body = NULL;
41dbbb37 9391 enum omp_region_type ort;
953ff289 9392
acf0174b
JJ
9393 switch (TREE_CODE (expr))
9394 {
9395 case OMP_SECTIONS:
9396 case OMP_SINGLE:
41dbbb37 9397 ort = ORT_WORKSHARE;
acf0174b 9398 break;
d9a6bd32
JJ
9399 case OMP_TARGET:
9400 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
9401 break;
41dbbb37 9402 case OACC_KERNELS:
182190f2
NS
9403 ort = ORT_ACC_KERNELS;
9404 break;
41dbbb37 9405 case OACC_PARALLEL:
182190f2 9406 ort = ORT_ACC_PARALLEL;
acf0174b 9407 break;
41dbbb37 9408 case OACC_DATA:
182190f2
NS
9409 ort = ORT_ACC_DATA;
9410 break;
acf0174b
JJ
9411 case OMP_TARGET_DATA:
9412 ort = ORT_TARGET_DATA;
9413 break;
9414 case OMP_TEAMS:
41b37d5e 9415 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
acf0174b 9416 break;
37d5ad46
JB
9417 case OACC_HOST_DATA:
9418 ort = ORT_ACC_HOST_DATA;
9419 break;
acf0174b
JJ
9420 default:
9421 gcc_unreachable ();
9422 }
d9a6bd32
JJ
9423 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
9424 TREE_CODE (expr));
e01d41e5
JJ
9425 if (TREE_CODE (expr) == OMP_TARGET)
9426 optimize_target_teams (expr, pre_p);
d9a6bd32 9427 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
acf0174b 9428 {
45852dcc 9429 push_gimplify_context ();
355fe088 9430 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
acf0174b
JJ
9431 if (gimple_code (g) == GIMPLE_BIND)
9432 pop_gimplify_context (g);
9433 else
9434 pop_gimplify_context (NULL);
182190f2 9435 if ((ort & ORT_TARGET_DATA) != 0)
acf0174b 9436 {
41dbbb37
TS
9437 enum built_in_function end_ix;
9438 switch (TREE_CODE (expr))
9439 {
9440 case OACC_DATA:
37d5ad46 9441 case OACC_HOST_DATA:
41dbbb37
TS
9442 end_ix = BUILT_IN_GOACC_DATA_END;
9443 break;
9444 case OMP_TARGET_DATA:
9445 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
9446 break;
9447 default:
9448 gcc_unreachable ();
9449 }
9450 tree fn = builtin_decl_explicit (end_ix);
acf0174b 9451 g = gimple_build_call (fn, 0);
41dbbb37 9452 gimple_seq cleanup = NULL;
acf0174b
JJ
9453 gimple_seq_add_stmt (&cleanup, g);
9454 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
9455 body = NULL;
9456 gimple_seq_add_stmt (&body, g);
9457 }
9458 }
9459 else
9460 gimplify_and_add (OMP_BODY (expr), &body);
1a80d6b8
JJ
9461 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
9462 TREE_CODE (expr));
953ff289 9463
acf0174b
JJ
9464 switch (TREE_CODE (expr))
9465 {
41dbbb37
TS
9466 case OACC_DATA:
9467 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9468 OMP_CLAUSES (expr));
9469 break;
9470 case OACC_KERNELS:
9471 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9472 OMP_CLAUSES (expr));
9473 break;
37d5ad46
JB
9474 case OACC_HOST_DATA:
9475 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
9476 OMP_CLAUSES (expr));
9477 break;
41dbbb37
TS
9478 case OACC_PARALLEL:
9479 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9480 OMP_CLAUSES (expr));
9481 break;
acf0174b
JJ
9482 case OMP_SECTIONS:
9483 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9484 break;
9485 case OMP_SINGLE:
9486 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9487 break;
9488 case OMP_TARGET:
9489 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9490 OMP_CLAUSES (expr));
9491 break;
9492 case OMP_TARGET_DATA:
9493 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9494 OMP_CLAUSES (expr));
9495 break;
9496 case OMP_TEAMS:
9497 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9498 break;
9499 default:
9500 gcc_unreachable ();
9501 }
9502
9503 gimplify_seq_add_stmt (pre_p, stmt);
9504 *expr_p = NULL_TREE;
9505}
9506
41dbbb37
TS
9507/* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9508 target update constructs. */
acf0174b
JJ
9509
9510static void
9511gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9512{
b811915d 9513 tree expr = *expr_p;
41dbbb37 9514 int kind;
538dd0b7 9515 gomp_target *stmt;
182190f2 9516 enum omp_region_type ort = ORT_WORKSHARE;
acf0174b 9517
41dbbb37
TS
9518 switch (TREE_CODE (expr))
9519 {
9520 case OACC_ENTER_DATA:
41dbbb37 9521 case OACC_EXIT_DATA:
41dbbb37 9522 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
182190f2 9523 ort = ORT_ACC;
41dbbb37
TS
9524 break;
9525 case OACC_UPDATE:
41dbbb37 9526 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
182190f2 9527 ort = ORT_ACC;
41dbbb37
TS
9528 break;
9529 case OMP_TARGET_UPDATE:
41dbbb37
TS
9530 kind = GF_OMP_TARGET_KIND_UPDATE;
9531 break;
d9a6bd32
JJ
9532 case OMP_TARGET_ENTER_DATA:
9533 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9534 break;
9535 case OMP_TARGET_EXIT_DATA:
9536 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9537 break;
41dbbb37
TS
9538 default:
9539 gcc_unreachable ();
9540 }
b811915d 9541 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
182190f2 9542 ort, TREE_CODE (expr));
1a80d6b8 9543 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
d9a6bd32 9544 TREE_CODE (expr));
b811915d 9545 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
726a989a
RB
9546
9547 gimplify_seq_add_stmt (pre_p, stmt);
acf0174b 9548 *expr_p = NULL_TREE;
953ff289
DN
9549}
9550
9551/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 9552 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
9553 EXPR is this stabilized form. */
9554
9555static bool
a509ebb5 9556goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
9557{
9558 /* Also include casts to other type variants. The C front end is fond
b8698a0f 9559 of adding these for e.g. volatile variables. This is like
953ff289 9560 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 9561 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 9562
78e47463
JJ
9563 if (TREE_CODE (expr) == INDIRECT_REF)
9564 {
9565 expr = TREE_OPERAND (expr, 0);
9566 while (expr != addr
1043771b 9567 && (CONVERT_EXPR_P (expr)
78e47463
JJ
9568 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9569 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 9570 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
9571 {
9572 expr = TREE_OPERAND (expr, 0);
9573 addr = TREE_OPERAND (addr, 0);
9574 }
251923f5
JJ
9575 if (expr == addr)
9576 return true;
71458b8a
JJ
9577 return (TREE_CODE (addr) == ADDR_EXPR
9578 && TREE_CODE (expr) == ADDR_EXPR
251923f5 9579 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 9580 }
953ff289
DN
9581 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9582 return true;
9583 return false;
9584}
9585
ad19c4be
EB
9586/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
9587 expression does not involve the lhs, evaluate it into a temporary.
9588 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9589 or -1 if an error was encountered. */
953ff289
DN
9590
9591static int
726a989a
RB
9592goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9593 tree lhs_var)
953ff289
DN
9594{
9595 tree expr = *expr_p;
9596 int saw_lhs;
9597
9598 if (goa_lhs_expr_p (expr, lhs_addr))
9599 {
9600 *expr_p = lhs_var;
9601 return 1;
9602 }
9603 if (is_gimple_val (expr))
9604 return 0;
b8698a0f 9605
953ff289
DN
9606 saw_lhs = 0;
9607 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
9608 {
9609 case tcc_binary:
067dd3c9 9610 case tcc_comparison:
726a989a
RB
9611 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
9612 lhs_var);
953ff289 9613 case tcc_unary:
726a989a
RB
9614 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
9615 lhs_var);
953ff289 9616 break;
067dd3c9
JJ
9617 case tcc_expression:
9618 switch (TREE_CODE (expr))
9619 {
9620 case TRUTH_ANDIF_EXPR:
9621 case TRUTH_ORIF_EXPR:
f2b11865
JJ
9622 case TRUTH_AND_EXPR:
9623 case TRUTH_OR_EXPR:
9624 case TRUTH_XOR_EXPR:
067dd3c9
JJ
9625 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
9626 lhs_addr, lhs_var);
f2b11865 9627 case TRUTH_NOT_EXPR:
067dd3c9
JJ
9628 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
9629 lhs_addr, lhs_var);
9630 break;
4063e61b
JM
9631 case COMPOUND_EXPR:
9632 /* Break out any preevaluations from cp_build_modify_expr. */
9633 for (; TREE_CODE (expr) == COMPOUND_EXPR;
9634 expr = TREE_OPERAND (expr, 1))
9635 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
9636 *expr_p = expr;
9637 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
067dd3c9
JJ
9638 default:
9639 break;
9640 }
9641 break;
953ff289
DN
9642 default:
9643 break;
9644 }
9645
9646 if (saw_lhs == 0)
9647 {
9648 enum gimplify_status gs;
9649 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
9650 if (gs != GS_ALL_DONE)
9651 saw_lhs = -1;
9652 }
9653
9654 return saw_lhs;
9655}
9656
953ff289
DN
9657/* Gimplify an OMP_ATOMIC statement. */
9658
9659static enum gimplify_status
726a989a 9660gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
9661{
9662 tree addr = TREE_OPERAND (*expr_p, 0);
20906c66
JJ
9663 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
9664 ? NULL : TREE_OPERAND (*expr_p, 1);
953ff289 9665 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 9666 tree tmp_load;
538dd0b7
DM
9667 gomp_atomic_load *loadstmt;
9668 gomp_atomic_store *storestmt;
953ff289 9669
b731b390 9670 tmp_load = create_tmp_reg (type);
20906c66
JJ
9671 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
9672 return GS_ERROR;
9673
9674 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
9675 != GS_ALL_DONE)
9676 return GS_ERROR;
953ff289 9677
20906c66
JJ
9678 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
9679 gimplify_seq_add_stmt (pre_p, loadstmt);
9680 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
9681 != GS_ALL_DONE)
9682 return GS_ERROR;
953ff289 9683
20906c66
JJ
9684 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
9685 rhs = tmp_load;
9686 storestmt = gimple_build_omp_atomic_store (rhs);
9687 gimplify_seq_add_stmt (pre_p, storestmt);
acf0174b
JJ
9688 if (OMP_ATOMIC_SEQ_CST (*expr_p))
9689 {
9690 gimple_omp_atomic_set_seq_cst (loadstmt);
9691 gimple_omp_atomic_set_seq_cst (storestmt);
9692 }
20906c66
JJ
9693 switch (TREE_CODE (*expr_p))
9694 {
9695 case OMP_ATOMIC_READ:
9696 case OMP_ATOMIC_CAPTURE_OLD:
9697 *expr_p = tmp_load;
9698 gimple_omp_atomic_set_need_value (loadstmt);
9699 break;
9700 case OMP_ATOMIC_CAPTURE_NEW:
9701 *expr_p = rhs;
9702 gimple_omp_atomic_set_need_value (storestmt);
9703 break;
9704 default:
9705 *expr_p = NULL;
9706 break;
9707 }
a509ebb5 9708
acf0174b 9709 return GS_ALL_DONE;
953ff289 9710}
6de9cd9a 9711
0a35513e
AH
9712/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
9713 body, and adding some EH bits. */
9714
9715static enum gimplify_status
9716gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
9717{
9718 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
355fe088 9719 gimple *body_stmt;
538dd0b7 9720 gtransaction *trans_stmt;
0a35513e 9721 gimple_seq body = NULL;
0a35513e
AH
9722 int subcode = 0;
9723
9724 /* Wrap the transaction body in a BIND_EXPR so we have a context
41dbbb37 9725 where to put decls for OMP. */
0a35513e
AH
9726 if (TREE_CODE (tbody) != BIND_EXPR)
9727 {
9728 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
9729 TREE_SIDE_EFFECTS (bind) = 1;
9730 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
9731 TRANSACTION_EXPR_BODY (expr) = bind;
9732 }
9733
45852dcc 9734 push_gimplify_context ();
0a35513e
AH
9735 temp = voidify_wrapper_expr (*expr_p, NULL);
9736
538dd0b7
DM
9737 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
9738 pop_gimplify_context (body_stmt);
0a35513e 9739
7c11b0fe 9740 trans_stmt = gimple_build_transaction (body);
0a35513e
AH
9741 if (TRANSACTION_EXPR_OUTER (expr))
9742 subcode = GTMA_IS_OUTER;
9743 else if (TRANSACTION_EXPR_RELAXED (expr))
9744 subcode = GTMA_IS_RELAXED;
538dd0b7 9745 gimple_transaction_set_subcode (trans_stmt, subcode);
0a35513e 9746
538dd0b7 9747 gimplify_seq_add_stmt (pre_p, trans_stmt);
0a35513e
AH
9748
9749 if (temp)
9750 {
9751 *expr_p = temp;
9752 return GS_OK;
9753 }
9754
9755 *expr_p = NULL_TREE;
9756 return GS_ALL_DONE;
9757}
9758
d9a6bd32
JJ
9759/* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
9760 is the OMP_BODY of the original EXPR (which has already been
9761 gimplified so it's not present in the EXPR).
9762
9763 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
9764
9765static gimple *
9766gimplify_omp_ordered (tree expr, gimple_seq body)
9767{
9768 tree c, decls;
9769 int failures = 0;
9770 unsigned int i;
9771 tree source_c = NULL_TREE;
9772 tree sink_c = NULL_TREE;
9773
9774 if (gimplify_omp_ctxp)
9775 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
9776 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9777 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
9778 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
9779 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
9780 {
9781 error_at (OMP_CLAUSE_LOCATION (c),
d9f4ea18
JJ
9782 "%<ordered%> construct with %<depend%> clause must be "
9783 "closely nested inside a loop with %<ordered%> clause "
9784 "with a parameter");
d9a6bd32
JJ
9785 failures++;
9786 }
9787 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9788 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9789 {
9790 bool fail = false;
9791 for (decls = OMP_CLAUSE_DECL (c), i = 0;
9792 decls && TREE_CODE (decls) == TREE_LIST;
9793 decls = TREE_CHAIN (decls), ++i)
9794 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
9795 continue;
9796 else if (TREE_VALUE (decls)
9797 != gimplify_omp_ctxp->loop_iter_var[2 * i])
9798 {
9799 error_at (OMP_CLAUSE_LOCATION (c),
9800 "variable %qE is not an iteration "
9801 "of outermost loop %d, expected %qE",
9802 TREE_VALUE (decls), i + 1,
9803 gimplify_omp_ctxp->loop_iter_var[2 * i]);
9804 fail = true;
9805 failures++;
9806 }
9807 else
9808 TREE_VALUE (decls)
9809 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
9810 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
9811 {
9812 error_at (OMP_CLAUSE_LOCATION (c),
9813 "number of variables in %<depend(sink)%> "
9814 "clause does not match number of "
9815 "iteration variables");
9816 failures++;
9817 }
9818 sink_c = c;
9819 }
9820 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9821 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9822 {
9823 if (source_c)
9824 {
9825 error_at (OMP_CLAUSE_LOCATION (c),
9826 "more than one %<depend(source)%> clause on an "
9827 "%<ordered%> construct");
9828 failures++;
9829 }
9830 else
9831 source_c = c;
9832 }
9833 if (source_c && sink_c)
9834 {
9835 error_at (OMP_CLAUSE_LOCATION (source_c),
9836 "%<depend(source)%> clause specified together with "
9837 "%<depend(sink:)%> clauses on the same construct");
9838 failures++;
9839 }
9840
9841 if (failures)
9842 return gimple_build_nop ();
9843 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
9844}
9845
ad19c4be 9846/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
726a989a
RB
9847 expression produces a value to be used as an operand inside a GIMPLE
9848 statement, the value will be stored back in *EXPR_P. This value will
9849 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
9850 an SSA_NAME. The corresponding sequence of GIMPLE statements is
9851 emitted in PRE_P and POST_P.
9852
9853 Additionally, this process may overwrite parts of the input
9854 expression during gimplification. Ideally, it should be
9855 possible to do non-destructive gimplification.
9856
9857 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
9858 the expression needs to evaluate to a value to be used as
9859 an operand in a GIMPLE statement, this value will be stored in
9860 *EXPR_P on exit. This happens when the caller specifies one
9861 of fb_lvalue or fb_rvalue fallback flags.
9862
9863 PRE_P will contain the sequence of GIMPLE statements corresponding
9864 to the evaluation of EXPR and all the side-effects that must
9865 be executed before the main expression. On exit, the last
9866 statement of PRE_P is the core statement being gimplified. For
9867 instance, when gimplifying 'if (++a)' the last statement in
9868 PRE_P will be 'if (t.1)' where t.1 is the result of
9869 pre-incrementing 'a'.
9870
9871 POST_P will contain the sequence of GIMPLE statements corresponding
9872 to the evaluation of all the side-effects that must be executed
9873 after the main expression. If this is NULL, the post
9874 side-effects are stored at the end of PRE_P.
9875
9876 The reason why the output is split in two is to handle post
9877 side-effects explicitly. In some cases, an expression may have
9878 inner and outer post side-effects which need to be emitted in
9879 an order different from the one given by the recursive
9880 traversal. For instance, for the expression (*p--)++ the post
9881 side-effects of '--' must actually occur *after* the post
9882 side-effects of '++'. However, gimplification will first visit
9883 the inner expression, so if a separate POST sequence was not
9884 used, the resulting sequence would be:
9885
9886 1 t.1 = *p
9887 2 p = p - 1
9888 3 t.2 = t.1 + 1
9889 4 *p = t.2
9890
9891 However, the post-decrement operation in line #2 must not be
9892 evaluated until after the store to *p at line #4, so the
9893 correct sequence should be:
9894
9895 1 t.1 = *p
9896 2 t.2 = t.1 + 1
9897 3 *p = t.2
9898 4 p = p - 1
9899
9900 So, by specifying a separate post queue, it is possible
9901 to emit the post side-effects in the correct order.
9902 If POST_P is NULL, an internal queue will be used. Before
9903 returning to the caller, the sequence POST_P is appended to
9904 the main output sequence PRE_P.
9905
9906 GIMPLE_TEST_F points to a function that takes a tree T and
9907 returns nonzero if T is in the GIMPLE form requested by the
12947319 9908 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
9909
9910 FALLBACK tells the function what sort of a temporary we want if
9911 gimplification cannot produce an expression that complies with
9912 GIMPLE_TEST_F.
9913
9914 fb_none means that no temporary should be generated
9915 fb_rvalue means that an rvalue is OK to generate
9916 fb_lvalue means that an lvalue is OK to generate
9917 fb_either means that either is OK, but an lvalue is preferable.
9918 fb_mayfail means that gimplification may fail (in which case
9919 GS_ERROR will be returned)
9920
9921 The return value is either GS_ERROR or GS_ALL_DONE, since this
9922 function iterates until EXPR is completely gimplified or an error
9923 occurs. */
6de9cd9a
DN
9924
9925enum gimplify_status
726a989a
RB
9926gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
9927 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
9928{
9929 tree tmp;
726a989a
RB
9930 gimple_seq internal_pre = NULL;
9931 gimple_seq internal_post = NULL;
6de9cd9a 9932 tree save_expr;
726a989a 9933 bool is_statement;
6de9cd9a
DN
9934 location_t saved_location;
9935 enum gimplify_status ret;
726a989a 9936 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6de9cd9a
DN
9937
9938 save_expr = *expr_p;
9939 if (save_expr == NULL_TREE)
9940 return GS_ALL_DONE;
9941
726a989a
RB
9942 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
9943 is_statement = gimple_test_f == is_gimple_stmt;
9944 if (is_statement)
9945 gcc_assert (pre_p);
9946
9947 /* Consistency checks. */
9948 if (gimple_test_f == is_gimple_reg)
9949 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
9950 else if (gimple_test_f == is_gimple_val
726a989a
RB
9951 || gimple_test_f == is_gimple_call_addr
9952 || gimple_test_f == is_gimple_condexpr
9953 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 9954 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 9955 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 9956 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
9957 || gimple_test_f == is_gimple_asm_val
9958 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
9959 gcc_assert (fallback & fb_rvalue);
9960 else if (gimple_test_f == is_gimple_min_lval
9961 || gimple_test_f == is_gimple_lvalue)
9962 gcc_assert (fallback & fb_lvalue);
9963 else if (gimple_test_f == is_gimple_addressable)
9964 gcc_assert (fallback & fb_either);
9965 else if (gimple_test_f == is_gimple_stmt)
9966 gcc_assert (fallback == fb_none);
9967 else
9968 {
9969 /* We should have recognized the GIMPLE_TEST_F predicate to
9970 know what kind of fallback to use in case a temporary is
9971 needed to hold the value or address of *EXPR_P. */
9972 gcc_unreachable ();
9973 }
9974
6de9cd9a
DN
9975 /* We used to check the predicate here and return immediately if it
9976 succeeds. This is wrong; the design is for gimplification to be
9977 idempotent, and for the predicates to only test for valid forms, not
9978 whether they are fully simplified. */
6de9cd9a
DN
9979 if (pre_p == NULL)
9980 pre_p = &internal_pre;
726a989a 9981
6de9cd9a
DN
9982 if (post_p == NULL)
9983 post_p = &internal_post;
9984
726a989a
RB
9985 /* Remember the last statements added to PRE_P and POST_P. Every
9986 new statement added by the gimplification helpers needs to be
9987 annotated with location information. To centralize the
9988 responsibility, we remember the last statement that had been
9989 added to both queues before gimplifying *EXPR_P. If
9990 gimplification produces new statements in PRE_P and POST_P, those
9991 statements will be annotated with the same location information
9992 as *EXPR_P. */
9993 pre_last_gsi = gsi_last (*pre_p);
9994 post_last_gsi = gsi_last (*post_p);
9995
6de9cd9a 9996 saved_location = input_location;
a281759f
PB
9997 if (save_expr != error_mark_node
9998 && EXPR_HAS_LOCATION (*expr_p))
9999 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
10000
10001 /* Loop over the specific gimplifiers until the toplevel node
10002 remains the same. */
10003 do
10004 {
73d6ddef
RK
10005 /* Strip away as many useless type conversions as possible
10006 at the toplevel. */
10007 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
10008
10009 /* Remember the expr. */
10010 save_expr = *expr_p;
10011
10012 /* Die, die, die, my darling. */
10013 if (save_expr == error_mark_node
726a989a 10014 || (TREE_TYPE (save_expr)
65355d53 10015 && TREE_TYPE (save_expr) == error_mark_node))
6de9cd9a
DN
10016 {
10017 ret = GS_ERROR;
10018 break;
10019 }
10020
10021 /* Do any language-specific gimplification. */
32e8bb8e
ILT
10022 ret = ((enum gimplify_status)
10023 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
10024 if (ret == GS_OK)
10025 {
10026 if (*expr_p == NULL_TREE)
10027 break;
10028 if (*expr_p != save_expr)
10029 continue;
10030 }
10031 else if (ret != GS_UNHANDLED)
10032 break;
10033
941f78d1
JM
10034 /* Make sure that all the cases set 'ret' appropriately. */
10035 ret = GS_UNHANDLED;
6de9cd9a
DN
10036 switch (TREE_CODE (*expr_p))
10037 {
10038 /* First deal with the special cases. */
10039
10040 case POSTINCREMENT_EXPR:
10041 case POSTDECREMENT_EXPR:
10042 case PREINCREMENT_EXPR:
10043 case PREDECREMENT_EXPR:
10044 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
cc3c4f62
RB
10045 fallback != fb_none,
10046 TREE_TYPE (*expr_p));
6de9cd9a
DN
10047 break;
10048
0bd34ae4
RB
10049 case VIEW_CONVERT_EXPR:
10050 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
10051 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
10052 {
10053 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10054 post_p, is_gimple_val, fb_rvalue);
10055 recalculate_side_effects (*expr_p);
10056 break;
10057 }
10058 /* Fallthru. */
10059
6de9cd9a 10060 case ARRAY_REF:
44de5aeb
RK
10061 case ARRAY_RANGE_REF:
10062 case REALPART_EXPR:
10063 case IMAGPART_EXPR:
6de9cd9a
DN
10064 case COMPONENT_REF:
10065 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 10066 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
10067 break;
10068
10069 case COND_EXPR:
dae7ec87 10070 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 10071
0223e4f5
JM
10072 /* C99 code may assign to an array in a structure value of a
10073 conditional expression, and this has undefined behavior
10074 only on execution, so create a temporary if an lvalue is
10075 required. */
10076 if (fallback == fb_lvalue)
10077 {
10078 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 10079 mark_addressable (*expr_p);
941f78d1 10080 ret = GS_OK;
0223e4f5 10081 }
6de9cd9a
DN
10082 break;
10083
10084 case CALL_EXPR:
90051e16 10085 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 10086
0223e4f5
JM
10087 /* C99 code may assign to an array in a structure returned
10088 from a function, and this has undefined behavior only on
10089 execution, so create a temporary if an lvalue is
10090 required. */
10091 if (fallback == fb_lvalue)
10092 {
10093 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 10094 mark_addressable (*expr_p);
941f78d1 10095 ret = GS_OK;
0223e4f5 10096 }
6de9cd9a
DN
10097 break;
10098
10099 case TREE_LIST:
282899df 10100 gcc_unreachable ();
6de9cd9a
DN
10101
10102 case COMPOUND_EXPR:
10103 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
10104 break;
10105
2ec5deb5 10106 case COMPOUND_LITERAL_EXPR:
4c53d183
MM
10107 ret = gimplify_compound_literal_expr (expr_p, pre_p,
10108 gimple_test_f, fallback);
2ec5deb5
PB
10109 break;
10110
6de9cd9a
DN
10111 case MODIFY_EXPR:
10112 case INIT_EXPR:
ebad5233
JM
10113 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
10114 fallback != fb_none);
6de9cd9a
DN
10115 break;
10116
10117 case TRUTH_ANDIF_EXPR:
10118 case TRUTH_ORIF_EXPR:
1d15f620
KT
10119 {
10120 /* Preserve the original type of the expression and the
10121 source location of the outer expression. */
10122 tree org_type = TREE_TYPE (*expr_p);
10123 *expr_p = gimple_boolify (*expr_p);
4b4455e5 10124 *expr_p = build3_loc (input_location, COND_EXPR,
1d15f620
KT
10125 org_type, *expr_p,
10126 fold_convert_loc
4b4455e5 10127 (input_location,
1d15f620
KT
10128 org_type, boolean_true_node),
10129 fold_convert_loc
4b4455e5 10130 (input_location,
1d15f620
KT
10131 org_type, boolean_false_node));
10132 ret = GS_OK;
10133 break;
10134 }
6de9cd9a
DN
10135
10136 case TRUTH_NOT_EXPR:
3c6cbf7a 10137 {
53020648
RG
10138 tree type = TREE_TYPE (*expr_p);
10139 /* The parsers are careful to generate TRUTH_NOT_EXPR
10140 only with operands that are always zero or one.
10141 We do not fold here but handle the only interesting case
10142 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
3c6cbf7a 10143 *expr_p = gimple_boolify (*expr_p);
53020648
RG
10144 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
10145 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
10146 TREE_TYPE (*expr_p),
10147 TREE_OPERAND (*expr_p, 0));
10148 else
10149 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
10150 TREE_TYPE (*expr_p),
10151 TREE_OPERAND (*expr_p, 0),
10152 build_int_cst (TREE_TYPE (*expr_p), 1));
10153 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
10154 *expr_p = fold_convert_loc (input_location, type, *expr_p);
10155 ret = GS_OK;
bd5d002e 10156 break;
3c6cbf7a 10157 }
67339062 10158
6de9cd9a
DN
10159 case ADDR_EXPR:
10160 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
10161 break;
10162
8170608b
TB
10163 case ANNOTATE_EXPR:
10164 {
10165 tree cond = TREE_OPERAND (*expr_p, 0);
718c4601 10166 tree kind = TREE_OPERAND (*expr_p, 1);
664ceb1e
JJ
10167 tree type = TREE_TYPE (cond);
10168 if (!INTEGRAL_TYPE_P (type))
10169 {
10170 *expr_p = cond;
10171 ret = GS_OK;
10172 break;
10173 }
b731b390 10174 tree tmp = create_tmp_var (type);
8170608b 10175 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
538dd0b7 10176 gcall *call
718c4601 10177 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
8170608b
TB
10178 gimple_call_set_lhs (call, tmp);
10179 gimplify_seq_add_stmt (pre_p, call);
10180 *expr_p = tmp;
10181 ret = GS_ALL_DONE;
10182 break;
10183 }
10184
6de9cd9a 10185 case VA_ARG_EXPR:
cd3ce9b4 10186 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
10187 break;
10188
1043771b 10189 CASE_CONVERT:
6de9cd9a
DN
10190 if (IS_EMPTY_STMT (*expr_p))
10191 {
10192 ret = GS_ALL_DONE;
10193 break;
10194 }
10195
10196 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
10197 || fallback == fb_none)
10198 {
10199 /* Just strip a conversion to void (or in void context) and
10200 try again. */
10201 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 10202 ret = GS_OK;
6de9cd9a
DN
10203 break;
10204 }
10205
10206 ret = gimplify_conversion (expr_p);
10207 if (ret == GS_ERROR)
10208 break;
10209 if (*expr_p != save_expr)
10210 break;
10211 /* FALLTHRU */
10212
10213 case FIX_TRUNC_EXPR:
6de9cd9a
DN
10214 /* unary_expr: ... | '(' cast ')' val | ... */
10215 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10216 is_gimple_val, fb_rvalue);
10217 recalculate_side_effects (*expr_p);
10218 break;
10219
6a720599 10220 case INDIRECT_REF:
70f34814
RG
10221 {
10222 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
3748f5c9 10223 bool notrap = TREE_THIS_NOTRAP (*expr_p);
70f34814
RG
10224 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
10225
10226 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
10227 if (*expr_p != save_expr)
10228 {
10229 ret = GS_OK;
10230 break;
10231 }
10232
10233 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10234 is_gimple_reg, fb_rvalue);
dca26746
RG
10235 if (ret == GS_ERROR)
10236 break;
70f34814 10237
dca26746 10238 recalculate_side_effects (*expr_p);
70f34814
RG
10239 *expr_p = fold_build2_loc (input_location, MEM_REF,
10240 TREE_TYPE (*expr_p),
10241 TREE_OPERAND (*expr_p, 0),
10242 build_int_cst (saved_ptr_type, 0));
10243 TREE_THIS_VOLATILE (*expr_p) = volatilep;
3748f5c9 10244 TREE_THIS_NOTRAP (*expr_p) = notrap;
70f34814
RG
10245 ret = GS_OK;
10246 break;
10247 }
10248
10249 /* We arrive here through the various re-gimplifcation paths. */
10250 case MEM_REF:
10251 /* First try re-folding the whole thing. */
10252 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
10253 TREE_OPERAND (*expr_p, 0),
10254 TREE_OPERAND (*expr_p, 1));
10255 if (tmp)
941f78d1 10256 {
ee45a32d
EB
10257 REF_REVERSE_STORAGE_ORDER (tmp)
10258 = REF_REVERSE_STORAGE_ORDER (*expr_p);
70f34814
RG
10259 *expr_p = tmp;
10260 recalculate_side_effects (*expr_p);
941f78d1
JM
10261 ret = GS_OK;
10262 break;
10263 }
01718e96
RG
10264 /* Avoid re-gimplifying the address operand if it is already
10265 in suitable form. Re-gimplifying would mark the address
10266 operand addressable. Always gimplify when not in SSA form
10267 as we still may have to gimplify decls with value-exprs. */
10268 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
10269 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
10270 {
10271 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10272 is_gimple_mem_ref_addr, fb_rvalue);
10273 if (ret == GS_ERROR)
10274 break;
10275 }
6de9cd9a 10276 recalculate_side_effects (*expr_p);
70f34814 10277 ret = GS_ALL_DONE;
6de9cd9a
DN
10278 break;
10279
01718e96 10280 /* Constants need not be gimplified. */
6de9cd9a
DN
10281 case INTEGER_CST:
10282 case REAL_CST:
325217ed 10283 case FIXED_CST:
6de9cd9a
DN
10284 case STRING_CST:
10285 case COMPLEX_CST:
10286 case VECTOR_CST:
3f5c390d
RB
10287 /* Drop the overflow flag on constants, we do not want
10288 that in the GIMPLE IL. */
10289 if (TREE_OVERFLOW_P (*expr_p))
10290 *expr_p = drop_tree_overflow (*expr_p);
6de9cd9a
DN
10291 ret = GS_ALL_DONE;
10292 break;
10293
10294 case CONST_DECL:
0534fa56 10295 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 10296 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
10297 value. */
10298 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
10299 if (fallback & fb_lvalue)
10300 ret = GS_ALL_DONE;
10301 else
941f78d1
JM
10302 {
10303 *expr_p = DECL_INITIAL (*expr_p);
10304 ret = GS_OK;
10305 }
6de9cd9a
DN
10306 break;
10307
350fae66 10308 case DECL_EXPR:
726a989a 10309 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
10310 break;
10311
6de9cd9a 10312 case BIND_EXPR:
c6c7698d 10313 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
10314 break;
10315
10316 case LOOP_EXPR:
10317 ret = gimplify_loop_expr (expr_p, pre_p);
10318 break;
10319
10320 case SWITCH_EXPR:
10321 ret = gimplify_switch_expr (expr_p, pre_p);
10322 break;
10323
6de9cd9a
DN
10324 case EXIT_EXPR:
10325 ret = gimplify_exit_expr (expr_p);
10326 break;
10327
10328 case GOTO_EXPR:
10329 /* If the target is not LABEL, then it is a computed jump
10330 and the target needs to be gimplified. */
10331 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
10332 {
10333 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
10334 NULL, is_gimple_val, fb_rvalue);
10335 if (ret == GS_ERROR)
10336 break;
10337 }
726a989a
RB
10338 gimplify_seq_add_stmt (pre_p,
10339 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 10340 ret = GS_ALL_DONE;
6de9cd9a
DN
10341 break;
10342
2e28e797 10343 case PREDICT_EXPR:
726a989a
RB
10344 gimplify_seq_add_stmt (pre_p,
10345 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
10346 PREDICT_EXPR_OUTCOME (*expr_p)));
10347 ret = GS_ALL_DONE;
10348 break;
2e28e797 10349
6de9cd9a
DN
10350 case LABEL_EXPR:
10351 ret = GS_ALL_DONE;
282899df
NS
10352 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
10353 == current_function_decl);
726a989a
RB
10354 gimplify_seq_add_stmt (pre_p,
10355 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6de9cd9a
DN
10356 break;
10357
10358 case CASE_LABEL_EXPR:
726a989a 10359 ret = gimplify_case_label_expr (expr_p, pre_p);
6de9cd9a
DN
10360 break;
10361
10362 case RETURN_EXPR:
10363 ret = gimplify_return_expr (*expr_p, pre_p);
10364 break;
10365
10366 case CONSTRUCTOR:
48eb4e53
RK
10367 /* Don't reduce this in place; let gimplify_init_constructor work its
10368 magic. Buf if we're just elaborating this for side effects, just
10369 gimplify any element that has side-effects. */
10370 if (fallback == fb_none)
10371 {
4038c495 10372 unsigned HOST_WIDE_INT ix;
ac47786e 10373 tree val;
08330ec2 10374 tree temp = NULL_TREE;
ac47786e
NF
10375 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
10376 if (TREE_SIDE_EFFECTS (val))
10377 append_to_statement_list (val, &temp);
48eb4e53 10378
08330ec2 10379 *expr_p = temp;
941f78d1 10380 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 10381 }
ca0b7d18
AP
10382 /* C99 code may assign to an array in a constructed
10383 structure or union, and this has undefined behavior only
10384 on execution, so create a temporary if an lvalue is
10385 required. */
10386 else if (fallback == fb_lvalue)
10387 {
10388 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 10389 mark_addressable (*expr_p);
941f78d1 10390 ret = GS_OK;
ca0b7d18 10391 }
08330ec2
AP
10392 else
10393 ret = GS_ALL_DONE;
6de9cd9a
DN
10394 break;
10395
10396 /* The following are special cases that are not handled by the
10397 original GIMPLE grammar. */
10398
10399 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
10400 eliminated. */
10401 case SAVE_EXPR:
10402 ret = gimplify_save_expr (expr_p, pre_p, post_p);
10403 break;
10404
10405 case BIT_FIELD_REF:
ea814c66
EB
10406 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10407 post_p, is_gimple_lvalue, fb_either);
10408 recalculate_side_effects (*expr_p);
6de9cd9a
DN
10409 break;
10410
150e3929
RG
10411 case TARGET_MEM_REF:
10412 {
10413 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
10414
23a534a1 10415 if (TMR_BASE (*expr_p))
150e3929 10416 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
4d948885 10417 post_p, is_gimple_mem_ref_addr, fb_either);
150e3929
RG
10418 if (TMR_INDEX (*expr_p))
10419 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
10420 post_p, is_gimple_val, fb_rvalue);
4d948885
RG
10421 if (TMR_INDEX2 (*expr_p))
10422 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
10423 post_p, is_gimple_val, fb_rvalue);
150e3929
RG
10424 /* TMR_STEP and TMR_OFFSET are always integer constants. */
10425 ret = MIN (r0, r1);
10426 }
10427 break;
10428
6de9cd9a
DN
10429 case NON_LVALUE_EXPR:
10430 /* This should have been stripped above. */
282899df 10431 gcc_unreachable ();
6de9cd9a
DN
10432
10433 case ASM_EXPR:
10434 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
10435 break;
10436
10437 case TRY_FINALLY_EXPR:
10438 case TRY_CATCH_EXPR:
726a989a
RB
10439 {
10440 gimple_seq eval, cleanup;
538dd0b7 10441 gtry *try_;
726a989a 10442
820055a0
DC
10443 /* Calls to destructors are generated automatically in FINALLY/CATCH
10444 block. They should have location as UNKNOWN_LOCATION. However,
10445 gimplify_call_expr will reset these call stmts to input_location
10446 if it finds stmt's location is unknown. To prevent resetting for
10447 destructors, we set the input_location to unknown.
10448 Note that this only affects the destructor calls in FINALLY/CATCH
10449 block, and will automatically reset to its original value by the
10450 end of gimplify_expr. */
10451 input_location = UNKNOWN_LOCATION;
726a989a
RB
10452 eval = cleanup = NULL;
10453 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
10454 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
10455 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
10456 if (gimple_seq_empty_p (cleanup))
10457 {
10458 gimple_seq_add_seq (pre_p, eval);
10459 ret = GS_ALL_DONE;
10460 break;
10461 }
726a989a
RB
10462 try_ = gimple_build_try (eval, cleanup,
10463 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10464 ? GIMPLE_TRY_FINALLY
10465 : GIMPLE_TRY_CATCH);
220d905f 10466 if (EXPR_HAS_LOCATION (save_expr))
e368f44f 10467 gimple_set_location (try_, EXPR_LOCATION (save_expr));
220d905f
AH
10468 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10469 gimple_set_location (try_, saved_location);
726a989a
RB
10470 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10471 gimple_try_set_catch_is_cleanup (try_,
10472 TRY_CATCH_IS_CLEANUP (*expr_p));
10473 gimplify_seq_add_stmt (pre_p, try_);
10474 ret = GS_ALL_DONE;
10475 break;
10476 }
6de9cd9a
DN
10477
10478 case CLEANUP_POINT_EXPR:
10479 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10480 break;
10481
10482 case TARGET_EXPR:
10483 ret = gimplify_target_expr (expr_p, pre_p, post_p);
10484 break;
10485
10486 case CATCH_EXPR:
726a989a 10487 {
355fe088 10488 gimple *c;
726a989a
RB
10489 gimple_seq handler = NULL;
10490 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10491 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10492 gimplify_seq_add_stmt (pre_p, c);
10493 ret = GS_ALL_DONE;
10494 break;
10495 }
6de9cd9a
DN
10496
10497 case EH_FILTER_EXPR:
726a989a 10498 {
355fe088 10499 gimple *ehf;
726a989a
RB
10500 gimple_seq failure = NULL;
10501
10502 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10503 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 10504 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
10505 gimplify_seq_add_stmt (pre_p, ehf);
10506 ret = GS_ALL_DONE;
10507 break;
10508 }
6de9cd9a 10509
0f59171d
RH
10510 case OBJ_TYPE_REF:
10511 {
10512 enum gimplify_status r0, r1;
726a989a
RB
10513 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10514 post_p, is_gimple_val, fb_rvalue);
10515 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10516 post_p, is_gimple_val, fb_rvalue);
0f3a057a 10517 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
10518 ret = MIN (r0, r1);
10519 }
6de9cd9a
DN
10520 break;
10521
6de9cd9a
DN
10522 case LABEL_DECL:
10523 /* We get here when taking the address of a label. We mark
10524 the label as "forced"; meaning it can never be removed and
10525 it is a potential target for any computed goto. */
10526 FORCED_LABEL (*expr_p) = 1;
10527 ret = GS_ALL_DONE;
10528 break;
10529
10530 case STATEMENT_LIST:
c6c7698d 10531 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
10532 break;
10533
d25cee4d
RH
10534 case WITH_SIZE_EXPR:
10535 {
70e2829d
KH
10536 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10537 post_p == &internal_post ? NULL : post_p,
10538 gimple_test_f, fallback);
10539 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10540 is_gimple_val, fb_rvalue);
941f78d1 10541 ret = GS_ALL_DONE;
d25cee4d
RH
10542 }
10543 break;
10544
6de9cd9a 10545 case VAR_DECL:
4744afba 10546 case PARM_DECL:
a9f7c570 10547 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
10548 break;
10549
077b0dfb 10550 case RESULT_DECL:
41dbbb37 10551 /* When within an OMP context, notice uses of variables. */
077b0dfb
JJ
10552 if (gimplify_omp_ctxp)
10553 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10554 ret = GS_ALL_DONE;
10555 break;
10556
71956db3
RH
10557 case SSA_NAME:
10558 /* Allow callbacks into the gimplifier during optimization. */
10559 ret = GS_ALL_DONE;
10560 break;
10561
953ff289 10562 case OMP_PARALLEL:
726a989a
RB
10563 gimplify_omp_parallel (expr_p, pre_p);
10564 ret = GS_ALL_DONE;
953ff289
DN
10565 break;
10566
a68ab351 10567 case OMP_TASK:
726a989a
RB
10568 gimplify_omp_task (expr_p, pre_p);
10569 ret = GS_ALL_DONE;
a68ab351
JJ
10570 break;
10571
953ff289 10572 case OMP_FOR:
74bf76ed 10573 case OMP_SIMD:
c02065fc 10574 case CILK_SIMD:
9a771876 10575 case CILK_FOR:
acf0174b 10576 case OMP_DISTRIBUTE:
d9a6bd32 10577 case OMP_TASKLOOP:
41dbbb37 10578 case OACC_LOOP:
953ff289
DN
10579 ret = gimplify_omp_for (expr_p, pre_p);
10580 break;
10581
41dbbb37
TS
10582 case OACC_CACHE:
10583 gimplify_oacc_cache (expr_p, pre_p);
10584 ret = GS_ALL_DONE;
10585 break;
10586
6e232ba4
JN
10587 case OACC_DECLARE:
10588 gimplify_oacc_declare (expr_p, pre_p);
10589 ret = GS_ALL_DONE;
10590 break;
10591
37d5ad46 10592 case OACC_HOST_DATA:
88bae6f4 10593 case OACC_DATA:
41dbbb37 10594 case OACC_KERNELS:
41dbbb37 10595 case OACC_PARALLEL:
953ff289
DN
10596 case OMP_SECTIONS:
10597 case OMP_SINGLE:
acf0174b
JJ
10598 case OMP_TARGET:
10599 case OMP_TARGET_DATA:
10600 case OMP_TEAMS:
726a989a
RB
10601 gimplify_omp_workshare (expr_p, pre_p);
10602 ret = GS_ALL_DONE;
953ff289
DN
10603 break;
10604
41dbbb37
TS
10605 case OACC_ENTER_DATA:
10606 case OACC_EXIT_DATA:
10607 case OACC_UPDATE:
acf0174b 10608 case OMP_TARGET_UPDATE:
d9a6bd32
JJ
10609 case OMP_TARGET_ENTER_DATA:
10610 case OMP_TARGET_EXIT_DATA:
acf0174b
JJ
10611 gimplify_omp_target_update (expr_p, pre_p);
10612 ret = GS_ALL_DONE;
10613 break;
10614
953ff289
DN
10615 case OMP_SECTION:
10616 case OMP_MASTER:
acf0174b 10617 case OMP_TASKGROUP:
953ff289
DN
10618 case OMP_ORDERED:
10619 case OMP_CRITICAL:
726a989a
RB
10620 {
10621 gimple_seq body = NULL;
355fe088 10622 gimple *g;
726a989a
RB
10623
10624 gimplify_and_add (OMP_BODY (*expr_p), &body);
10625 switch (TREE_CODE (*expr_p))
10626 {
10627 case OMP_SECTION:
10628 g = gimple_build_omp_section (body);
10629 break;
10630 case OMP_MASTER:
10631 g = gimple_build_omp_master (body);
10632 break;
acf0174b
JJ
10633 case OMP_TASKGROUP:
10634 {
10635 gimple_seq cleanup = NULL;
10636 tree fn
10637 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
10638 g = gimple_build_call (fn, 0);
10639 gimple_seq_add_stmt (&cleanup, g);
10640 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10641 body = NULL;
10642 gimple_seq_add_stmt (&body, g);
10643 g = gimple_build_omp_taskgroup (body);
10644 }
10645 break;
726a989a 10646 case OMP_ORDERED:
d9a6bd32 10647 g = gimplify_omp_ordered (*expr_p, body);
726a989a
RB
10648 break;
10649 case OMP_CRITICAL:
d9a6bd32
JJ
10650 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
10651 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
1a80d6b8 10652 gimplify_adjust_omp_clauses (pre_p, body,
d9a6bd32
JJ
10653 &OMP_CRITICAL_CLAUSES (*expr_p),
10654 OMP_CRITICAL);
726a989a 10655 g = gimple_build_omp_critical (body,
d9a6bd32
JJ
10656 OMP_CRITICAL_NAME (*expr_p),
10657 OMP_CRITICAL_CLAUSES (*expr_p));
726a989a
RB
10658 break;
10659 default:
10660 gcc_unreachable ();
10661 }
10662 gimplify_seq_add_stmt (pre_p, g);
10663 ret = GS_ALL_DONE;
10664 break;
10665 }
953ff289
DN
10666
10667 case OMP_ATOMIC:
20906c66
JJ
10668 case OMP_ATOMIC_READ:
10669 case OMP_ATOMIC_CAPTURE_OLD:
10670 case OMP_ATOMIC_CAPTURE_NEW:
953ff289
DN
10671 ret = gimplify_omp_atomic (expr_p, pre_p);
10672 break;
10673
0a35513e
AH
10674 case TRANSACTION_EXPR:
10675 ret = gimplify_transaction (expr_p, pre_p);
10676 break;
10677
16949072
RG
10678 case TRUTH_AND_EXPR:
10679 case TRUTH_OR_EXPR:
10680 case TRUTH_XOR_EXPR:
1d15f620 10681 {
bd5d002e 10682 tree orig_type = TREE_TYPE (*expr_p);
fc1f4caf 10683 tree new_type, xop0, xop1;
1d15f620 10684 *expr_p = gimple_boolify (*expr_p);
fc1f4caf
KT
10685 new_type = TREE_TYPE (*expr_p);
10686 if (!useless_type_conversion_p (orig_type, new_type))
1d15f620 10687 {
4b4455e5 10688 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
1d15f620
KT
10689 ret = GS_OK;
10690 break;
10691 }
da5fb469 10692
bd5d002e
RG
10693 /* Boolified binary truth expressions are semantically equivalent
10694 to bitwise binary expressions. Canonicalize them to the
10695 bitwise variant. */
10696 switch (TREE_CODE (*expr_p))
10697 {
10698 case TRUTH_AND_EXPR:
10699 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
10700 break;
10701 case TRUTH_OR_EXPR:
10702 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
10703 break;
10704 case TRUTH_XOR_EXPR:
10705 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
10706 break;
10707 default:
10708 break;
10709 }
fc1f4caf
KT
10710 /* Now make sure that operands have compatible type to
10711 expression's new_type. */
10712 xop0 = TREE_OPERAND (*expr_p, 0);
10713 xop1 = TREE_OPERAND (*expr_p, 1);
10714 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
10715 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
10716 new_type,
10717 xop0);
10718 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
10719 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
10720 new_type,
10721 xop1);
bd5d002e
RG
10722 /* Continue classified as tcc_binary. */
10723 goto expr_2;
da5fb469 10724 }
16949072
RG
10725
10726 case FMA_EXPR:
e6ed43b0 10727 case VEC_COND_EXPR:
2205ed25 10728 case VEC_PERM_EXPR:
16949072
RG
10729 /* Classified as tcc_expression. */
10730 goto expr_3;
10731
5be014d5 10732 case POINTER_PLUS_EXPR:
315f5f1b
RG
10733 {
10734 enum gimplify_status r0, r1;
10735 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10736 post_p, is_gimple_val, fb_rvalue);
10737 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10738 post_p, is_gimple_val, fb_rvalue);
10739 recalculate_side_effects (*expr_p);
10740 ret = MIN (r0, r1);
315f5f1b
RG
10741 break;
10742 }
726a989a 10743
939b37da
BI
10744 case CILK_SYNC_STMT:
10745 {
10746 if (!fn_contains_cilk_spawn_p (cfun))
10747 {
10748 error_at (EXPR_LOCATION (*expr_p),
10749 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
10750 ret = GS_ERROR;
10751 }
10752 else
10753 {
10754 gimplify_cilk_sync (expr_p, pre_p);
10755 ret = GS_ALL_DONE;
10756 }
10757 break;
10758 }
10759
6de9cd9a 10760 default:
282899df 10761 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 10762 {
6615c446 10763 case tcc_comparison:
61c25908
OH
10764 /* Handle comparison of objects of non scalar mode aggregates
10765 with a call to memcmp. It would be nice to only have to do
10766 this for variable-sized objects, but then we'd have to allow
10767 the same nest of reference nodes we allow for MODIFY_EXPR and
10768 that's too complex.
10769
10770 Compare scalar mode aggregates as scalar mode values. Using
10771 memcmp for them would be very inefficient at best, and is
10772 plain wrong if bitfields are involved. */
726a989a
RB
10773 {
10774 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 10775
544d960a
AS
10776 /* Vector comparisons need no boolification. */
10777 if (TREE_CODE (type) == VECTOR_TYPE)
10778 goto expr_2;
10779 else if (!AGGREGATE_TYPE_P (type))
7f3ff782
KT
10780 {
10781 tree org_type = TREE_TYPE (*expr_p);
10782 *expr_p = gimple_boolify (*expr_p);
10783 if (!useless_type_conversion_p (org_type,
10784 TREE_TYPE (*expr_p)))
10785 {
10786 *expr_p = fold_convert_loc (input_location,
10787 org_type, *expr_p);
10788 ret = GS_OK;
10789 }
10790 else
10791 goto expr_2;
10792 }
726a989a
RB
10793 else if (TYPE_MODE (type) != BLKmode)
10794 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
10795 else
10796 ret = gimplify_variable_sized_compare (expr_p);
61c25908 10797
726a989a 10798 break;
61c25908 10799 }
d3147f64 10800
282899df
NS
10801 /* If *EXPR_P does not need to be special-cased, handle it
10802 according to its class. */
6615c446 10803 case tcc_unary:
282899df
NS
10804 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10805 post_p, is_gimple_val, fb_rvalue);
10806 break;
6de9cd9a 10807
6615c446 10808 case tcc_binary:
282899df
NS
10809 expr_2:
10810 {
10811 enum gimplify_status r0, r1;
d3147f64 10812
282899df 10813 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 10814 post_p, is_gimple_val, fb_rvalue);
282899df
NS
10815 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10816 post_p, is_gimple_val, fb_rvalue);
d3147f64 10817
282899df
NS
10818 ret = MIN (r0, r1);
10819 break;
10820 }
d3147f64 10821
16949072
RG
10822 expr_3:
10823 {
10824 enum gimplify_status r0, r1, r2;
10825
10826 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10827 post_p, is_gimple_val, fb_rvalue);
10828 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10829 post_p, is_gimple_val, fb_rvalue);
10830 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
10831 post_p, is_gimple_val, fb_rvalue);
10832
10833 ret = MIN (MIN (r0, r1), r2);
10834 break;
10835 }
10836
6615c446
JO
10837 case tcc_declaration:
10838 case tcc_constant:
6de9cd9a 10839 ret = GS_ALL_DONE;
282899df 10840 goto dont_recalculate;
d3147f64 10841
282899df 10842 default:
16949072 10843 gcc_unreachable ();
6de9cd9a 10844 }
6de9cd9a
DN
10845
10846 recalculate_side_effects (*expr_p);
726a989a 10847
282899df 10848 dont_recalculate:
6de9cd9a
DN
10849 break;
10850 }
d3147f64 10851
941f78d1 10852 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
10853 }
10854 while (ret == GS_OK);
10855
10856 /* If we encountered an error_mark somewhere nested inside, either
10857 stub out the statement or propagate the error back out. */
10858 if (ret == GS_ERROR)
10859 {
10860 if (is_statement)
65355d53 10861 *expr_p = NULL;
6de9cd9a
DN
10862 goto out;
10863 }
10864
6de9cd9a
DN
10865 /* This was only valid as a return value from the langhook, which
10866 we handled. Make sure it doesn't escape from any other context. */
282899df 10867 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 10868
65355d53 10869 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
10870 {
10871 /* We aren't looking for a value, and we don't have a valid
10872 statement. If it doesn't have side-effects, throw it away. */
10873 if (!TREE_SIDE_EFFECTS (*expr_p))
65355d53 10874 *expr_p = NULL;
6de9cd9a 10875 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
10876 {
10877 /* This is probably a _REF that contains something nested that
10878 has side effects. Recurse through the operands to find it. */
10879 enum tree_code code = TREE_CODE (*expr_p);
10880
282899df 10881 switch (code)
44de5aeb 10882 {
282899df 10883 case COMPONENT_REF:
02a5eac4
EB
10884 case REALPART_EXPR:
10885 case IMAGPART_EXPR:
10886 case VIEW_CONVERT_EXPR:
282899df
NS
10887 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10888 gimple_test_f, fallback);
10889 break;
10890
a9e64c63
EB
10891 case ARRAY_REF:
10892 case ARRAY_RANGE_REF:
44de5aeb
RK
10893 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10894 gimple_test_f, fallback);
10895 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
10896 gimple_test_f, fallback);
10897 break;
10898
10899 default:
10900 /* Anything else with side-effects must be converted to
a9e64c63 10901 a valid statement before we get here. */
282899df 10902 gcc_unreachable ();
44de5aeb 10903 }
44de5aeb 10904
65355d53 10905 *expr_p = NULL;
44de5aeb 10906 }
a9e64c63
EB
10907 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
10908 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 10909 {
a9e64c63
EB
10910 /* Historically, the compiler has treated a bare reference
10911 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 10912 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 10913
c22b1771 10914 /* Normally, we do not want to create a temporary for a
a38578e1
MM
10915 TREE_ADDRESSABLE type because such a type should not be
10916 copied by bitwise-assignment. However, we make an
10917 exception here, as all we are doing here is ensuring that
10918 we read the bytes that make up the type. We use
10919 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 10920 given a TREE_ADDRESSABLE type. */
a38578e1
MM
10921 tree tmp = create_tmp_var_raw (type, "vol");
10922 gimple_add_tmp_var (tmp);
726a989a
RB
10923 gimplify_assign (tmp, *expr_p, pre_p);
10924 *expr_p = NULL;
6de9cd9a
DN
10925 }
10926 else
10927 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
10928 an incomplete type, so just throw it away. Likewise for
10929 a BLKmode type, since any implicit inner load should
10930 already have been turned into an explicit one by the
10931 gimplification process. */
65355d53 10932 *expr_p = NULL;
6de9cd9a
DN
10933 }
10934
10935 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 10936 everything together and return. */
325c3691 10937 if (fallback == fb_none || is_statement)
6de9cd9a 10938 {
726a989a
RB
10939 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
10940 it out for GC to reclaim it. */
10941 *expr_p = NULL_TREE;
10942
10943 if (!gimple_seq_empty_p (internal_pre)
10944 || !gimple_seq_empty_p (internal_post))
be00f578 10945 {
726a989a
RB
10946 gimplify_seq_add_seq (&internal_pre, internal_post);
10947 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 10948 }
726a989a
RB
10949
10950 /* The result of gimplifying *EXPR_P is going to be the last few
10951 statements in *PRE_P and *POST_P. Add location information
10952 to all the statements that were added by the gimplification
10953 helpers. */
10954 if (!gimple_seq_empty_p (*pre_p))
10955 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
10956
10957 if (!gimple_seq_empty_p (*post_p))
10958 annotate_all_with_location_after (*post_p, post_last_gsi,
10959 input_location);
10960
6de9cd9a
DN
10961 goto out;
10962 }
10963
726a989a
RB
10964#ifdef ENABLE_GIMPLE_CHECKING
10965 if (*expr_p)
10966 {
10967 enum tree_code code = TREE_CODE (*expr_p);
10968 /* These expressions should already be in gimple IR form. */
10969 gcc_assert (code != MODIFY_EXPR
10970 && code != ASM_EXPR
10971 && code != BIND_EXPR
10972 && code != CATCH_EXPR
6fc4fb06 10973 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
10974 && code != EH_FILTER_EXPR
10975 && code != GOTO_EXPR
10976 && code != LABEL_EXPR
10977 && code != LOOP_EXPR
726a989a
RB
10978 && code != SWITCH_EXPR
10979 && code != TRY_FINALLY_EXPR
41dbbb37
TS
10980 && code != OACC_PARALLEL
10981 && code != OACC_KERNELS
10982 && code != OACC_DATA
10983 && code != OACC_HOST_DATA
10984 && code != OACC_DECLARE
10985 && code != OACC_UPDATE
10986 && code != OACC_ENTER_DATA
10987 && code != OACC_EXIT_DATA
10988 && code != OACC_CACHE
726a989a
RB
10989 && code != OMP_CRITICAL
10990 && code != OMP_FOR
41dbbb37 10991 && code != OACC_LOOP
726a989a 10992 && code != OMP_MASTER
acf0174b 10993 && code != OMP_TASKGROUP
726a989a
RB
10994 && code != OMP_ORDERED
10995 && code != OMP_PARALLEL
10996 && code != OMP_SECTIONS
10997 && code != OMP_SECTION
10998 && code != OMP_SINGLE);
10999 }
11000#endif
6de9cd9a 11001
726a989a
RB
11002 /* Otherwise we're gimplifying a subexpression, so the resulting
11003 value is interesting. If it's a valid operand that matches
11004 GIMPLE_TEST_F, we're done. Unless we are handling some
11005 post-effects internally; if that's the case, we need to copy into
11006 a temporary before adding the post-effects to POST_P. */
11007 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
11008 goto out;
11009
11010 /* Otherwise, we need to create a new temporary for the gimplified
11011 expression. */
11012
11013 /* We can't return an lvalue if we have an internal postqueue. The
11014 object the lvalue refers to would (probably) be modified by the
11015 postqueue; we need to copy the value out first, which means an
11016 rvalue. */
726a989a
RB
11017 if ((fallback & fb_lvalue)
11018 && gimple_seq_empty_p (internal_post)
e847cc68 11019 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
11020 {
11021 /* An lvalue will do. Take the address of the expression, store it
11022 in a temporary, and replace the expression with an INDIRECT_REF of
11023 that temporary. */
db3927fb 11024 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 11025 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7f5ad6d7 11026 *expr_p = build_simple_mem_ref (tmp);
6de9cd9a 11027 }
ba4d8f9d 11028 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 11029 {
726a989a
RB
11030 /* An rvalue will do. Assign the gimplified expression into a
11031 new temporary TMP and replace the original expression with
11032 TMP. First, make sure that the expression has a type so that
11033 it can be assigned into a temporary. */
282899df 11034 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
412a1d9e 11035 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6de9cd9a 11036 }
282899df 11037 else
6de9cd9a 11038 {
726a989a 11039#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
11040 if (!(fallback & fb_mayfail))
11041 {
11042 fprintf (stderr, "gimplification failed:\n");
11043 print_generic_expr (stderr, *expr_p, 0);
11044 debug_tree (*expr_p);
11045 internal_error ("gimplification failed");
11046 }
11047#endif
11048 gcc_assert (fallback & fb_mayfail);
726a989a 11049
282899df 11050 /* If this is an asm statement, and the user asked for the
535a42b1 11051 impossible, don't die. Fail and let gimplify_asm_expr
282899df 11052 issue an error. */
6de9cd9a
DN
11053 ret = GS_ERROR;
11054 goto out;
11055 }
6de9cd9a 11056
6de9cd9a 11057 /* Make sure the temporary matches our predicate. */
282899df 11058 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 11059
726a989a 11060 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 11061 {
726a989a
RB
11062 annotate_all_with_location (internal_post, input_location);
11063 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
11064 }
11065
11066 out:
11067 input_location = saved_location;
11068 return ret;
11069}
11070
44de5aeb 11071/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 11072 size that we find. Add to LIST_P any statements generated. */
44de5aeb 11073
65355d53 11074void
726a989a 11075gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 11076{
ad50bc8d
RH
11077 tree field, t;
11078
19dbbf36 11079 if (type == NULL || type == error_mark_node)
8e0a600b 11080 return;
ad50bc8d 11081
6c6cfbfd 11082 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 11083 type = TYPE_MAIN_VARIANT (type);
44de5aeb 11084
8e0a600b 11085 /* Avoid infinite recursion. */
19dbbf36 11086 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
11087 return;
11088
11089 TYPE_SIZES_GIMPLIFIED (type) = 1;
11090
44de5aeb
RK
11091 switch (TREE_CODE (type))
11092 {
44de5aeb
RK
11093 case INTEGER_TYPE:
11094 case ENUMERAL_TYPE:
11095 case BOOLEAN_TYPE:
44de5aeb 11096 case REAL_TYPE:
325217ed 11097 case FIXED_POINT_TYPE:
65355d53
RH
11098 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
11099 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
11100
11101 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11102 {
11103 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
11104 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 11105 }
44de5aeb
RK
11106 break;
11107
11108 case ARRAY_TYPE:
ad50bc8d 11109 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
11110 gimplify_type_sizes (TREE_TYPE (type), list_p);
11111 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
11112 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
11113 with assigned stack slots, for -O1+ -g they should be tracked
11114 by VTA. */
08d78391
EB
11115 if (!(TYPE_NAME (type)
11116 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
11117 && DECL_IGNORED_P (TYPE_NAME (type)))
11118 && TYPE_DOMAIN (type)
802e9f8e
JJ
11119 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
11120 {
11121 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
11122 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11123 DECL_IGNORED_P (t) = 0;
11124 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11125 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11126 DECL_IGNORED_P (t) = 0;
11127 }
44de5aeb
RK
11128 break;
11129
11130 case RECORD_TYPE:
11131 case UNION_TYPE:
11132 case QUAL_UNION_TYPE:
910ad8de 11133 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 11134 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
11135 {
11136 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
11137 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
11138 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
11139 gimplify_type_sizes (TREE_TYPE (field), list_p);
11140 }
11141 break;
11142
11143 case POINTER_TYPE:
11144 case REFERENCE_TYPE:
706c4bb7
OH
11145 /* We used to recurse on the pointed-to type here, which turned out to
11146 be incorrect because its definition might refer to variables not
11147 yet initialized at this point if a forward declaration is involved.
11148
11149 It was actually useful for anonymous pointed-to types to ensure
11150 that the sizes evaluation dominates every possible later use of the
11151 values. Restricting to such types here would be safe since there
f63645be
KH
11152 is no possible forward declaration around, but would introduce an
11153 undesirable middle-end semantic to anonymity. We then defer to
11154 front-ends the responsibility of ensuring that the sizes are
11155 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 11156 type declarations to the tree. */
44de5aeb
RK
11157 break;
11158
11159 default:
11160 break;
11161 }
11162
65355d53
RH
11163 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
11164 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 11165
ad50bc8d 11166 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 11167 {
ad50bc8d
RH
11168 TYPE_SIZE (t) = TYPE_SIZE (type);
11169 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
11170 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 11171 }
b4830636
RH
11172}
11173
11174/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
11175 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 11176 We add any required statements to *STMT_P. */
44de5aeb
RK
11177
11178void
726a989a 11179gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 11180{
3ac8781c 11181 tree expr = *expr_p;
a9c5ddf9 11182
44de5aeb 11183 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 11184 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 11185 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
11186 will want to replace it with a new variable, but that will cause problems
11187 if this type is from outside the function. It's OK to have that here. */
848be094 11188 if (is_gimple_sizepos (expr))
44de5aeb
RK
11189 return;
11190
a9c5ddf9
RH
11191 *expr_p = unshare_expr (expr);
11192
ad50bc8d 11193 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
44de5aeb 11194}
6de9cd9a 11195
3ad065ef
EB
11196/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
11197 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
11198 is true, also gimplify the parameters. */
726a989a 11199
538dd0b7 11200gbind *
3ad065ef 11201gimplify_body (tree fndecl, bool do_parms)
6de9cd9a
DN
11202{
11203 location_t saved_location = input_location;
726a989a 11204 gimple_seq parm_stmts, seq;
355fe088 11205 gimple *outer_stmt;
538dd0b7 11206 gbind *outer_bind;
9f9ebcdf 11207 struct cgraph_node *cgn;
6de9cd9a
DN
11208
11209 timevar_push (TV_TREE_GIMPLIFY);
953ff289 11210
f66d6761
SB
11211 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
11212 gimplification. */
11213 default_rtl_profile ();
11214
953ff289 11215 gcc_assert (gimplify_ctxp == NULL);
45852dcc 11216 push_gimplify_context ();
6de9cd9a 11217
41dbbb37 11218 if (flag_openacc || flag_openmp)
acf0174b
JJ
11219 {
11220 gcc_assert (gimplify_omp_ctxp == NULL);
11221 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
11222 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
11223 }
11224
44de5aeb
RK
11225 /* Unshare most shared trees in the body and in that of any nested functions.
11226 It would seem we don't have to do this for nested functions because
11227 they are supposed to be output and then the outer function gimplified
11228 first, but the g++ front end doesn't always do it that way. */
3ad065ef
EB
11229 unshare_body (fndecl);
11230 unvisit_body (fndecl);
6de9cd9a 11231
d52f5295 11232 cgn = cgraph_node::get (fndecl);
9f9ebcdf 11233 if (cgn && cgn->origin)
6e2830c3 11234 nonlocal_vlas = new hash_set<tree>;
77f2a970 11235
fa10beec 11236 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
11237 input_location = DECL_SOURCE_LOCATION (fndecl);
11238
4744afba
RH
11239 /* Resolve callee-copies. This has to be done before processing
11240 the body so that DECL_VALUE_EXPR gets processed correctly. */
3ad065ef 11241 parm_stmts = do_parms ? gimplify_parameters () : NULL;
4744afba 11242
6de9cd9a 11243 /* Gimplify the function's body. */
726a989a 11244 seq = NULL;
3ad065ef 11245 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
538dd0b7
DM
11246 outer_stmt = gimple_seq_first_stmt (seq);
11247 if (!outer_stmt)
6de9cd9a 11248 {
538dd0b7
DM
11249 outer_stmt = gimple_build_nop ();
11250 gimplify_seq_add_stmt (&seq, outer_stmt);
6de9cd9a 11251 }
44de5aeb 11252
726a989a
RB
11253 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
11254 not the case, wrap everything in a GIMPLE_BIND to make it so. */
538dd0b7 11255 if (gimple_code (outer_stmt) == GIMPLE_BIND
726a989a 11256 && gimple_seq_first (seq) == gimple_seq_last (seq))
538dd0b7 11257 outer_bind = as_a <gbind *> (outer_stmt);
726a989a
RB
11258 else
11259 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
11260
3ad065ef 11261 DECL_SAVED_TREE (fndecl) = NULL_TREE;
4744afba
RH
11262
11263 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 11264 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 11265 if (!gimple_seq_empty_p (parm_stmts))
4744afba 11266 {
f0c10f0f
RG
11267 tree parm;
11268
726a989a
RB
11269 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
11270 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
11271
11272 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 11273 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
11274 if (DECL_HAS_VALUE_EXPR_P (parm))
11275 {
11276 DECL_HAS_VALUE_EXPR_P (parm) = 0;
11277 DECL_IGNORED_P (parm) = 0;
11278 }
4744afba
RH
11279 }
11280
77f2a970
JJ
11281 if (nonlocal_vlas)
11282 {
96ddb7ec
JJ
11283 if (nonlocal_vla_vars)
11284 {
11285 /* tree-nested.c may later on call declare_vars (..., true);
11286 which relies on BLOCK_VARS chain to be the tail of the
11287 gimple_bind_vars chain. Ensure we don't violate that
11288 assumption. */
11289 if (gimple_bind_block (outer_bind)
11290 == DECL_INITIAL (current_function_decl))
11291 declare_vars (nonlocal_vla_vars, outer_bind, true);
11292 else
11293 BLOCK_VARS (DECL_INITIAL (current_function_decl))
11294 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
11295 nonlocal_vla_vars);
11296 nonlocal_vla_vars = NULL_TREE;
11297 }
6e2830c3 11298 delete nonlocal_vlas;
77f2a970
JJ
11299 nonlocal_vlas = NULL;
11300 }
11301
41dbbb37
TS
11302 if ((flag_openacc || flag_openmp || flag_openmp_simd)
11303 && gimplify_omp_ctxp)
acf0174b
JJ
11304 {
11305 delete_omp_context (gimplify_omp_ctxp);
11306 gimplify_omp_ctxp = NULL;
11307 }
11308
726a989a 11309 pop_gimplify_context (outer_bind);
953ff289 11310 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 11311
b2b29377 11312 if (flag_checking && !seen_error ())
34019e28 11313 verify_gimple_in_seq (gimple_bind_body (outer_bind));
6de9cd9a
DN
11314
11315 timevar_pop (TV_TREE_GIMPLIFY);
11316 input_location = saved_location;
726a989a
RB
11317
11318 return outer_bind;
6de9cd9a
DN
11319}
11320
6a1f6c9c 11321typedef char *char_p; /* For DEF_VEC_P. */
6a1f6c9c
JM
11322
11323/* Return whether we should exclude FNDECL from instrumentation. */
11324
11325static bool
11326flag_instrument_functions_exclude_p (tree fndecl)
11327{
9771b263 11328 vec<char_p> *v;
6a1f6c9c 11329
9771b263
DN
11330 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
11331 if (v && v->length () > 0)
6a1f6c9c
JM
11332 {
11333 const char *name;
11334 int i;
11335 char *s;
11336
11337 name = lang_hooks.decl_printable_name (fndecl, 0);
9771b263 11338 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
11339 if (strstr (name, s) != NULL)
11340 return true;
11341 }
11342
9771b263
DN
11343 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
11344 if (v && v->length () > 0)
6a1f6c9c
JM
11345 {
11346 const char *name;
11347 int i;
11348 char *s;
11349
11350 name = DECL_SOURCE_FILE (fndecl);
9771b263 11351 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
11352 if (strstr (name, s) != NULL)
11353 return true;
11354 }
11355
11356 return false;
11357}
11358
6de9cd9a 11359/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 11360 node for the function we want to gimplify.
b8698a0f 11361
ad19c4be 11362 Return the sequence of GIMPLE statements corresponding to the body
726a989a 11363 of FNDECL. */
6de9cd9a
DN
11364
11365void
11366gimplify_function_tree (tree fndecl)
11367{
af16bc76 11368 tree parm, ret;
726a989a 11369 gimple_seq seq;
538dd0b7 11370 gbind *bind;
6de9cd9a 11371
a406865a
RG
11372 gcc_assert (!gimple_body (fndecl));
11373
db2960f4
SL
11374 if (DECL_STRUCT_FUNCTION (fndecl))
11375 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
11376 else
11377 push_struct_function (fndecl);
6de9cd9a 11378
d67cb100
TV
11379 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
11380 if necessary. */
11381 cfun->curr_properties |= PROP_gimple_lva;
11382
910ad8de 11383 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
e41d82f5
RH
11384 {
11385 /* Preliminarily mark non-addressed complex variables as eligible
11386 for promotion to gimple registers. We'll transform their uses
11387 as we find them. */
0890b981
AP
11388 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
11389 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
e41d82f5
RH
11390 && !TREE_THIS_VOLATILE (parm)
11391 && !needs_to_live_in_memory (parm))
0890b981 11392 DECL_GIMPLE_REG_P (parm) = 1;
e41d82f5
RH
11393 }
11394
11395 ret = DECL_RESULT (fndecl);
0890b981 11396 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7b7e6ecd 11397 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
e41d82f5 11398 && !needs_to_live_in_memory (ret))
0890b981 11399 DECL_GIMPLE_REG_P (ret) = 1;
e41d82f5 11400
3ad065ef 11401 bind = gimplify_body (fndecl, true);
726a989a
RB
11402
11403 /* The tree body of the function is no longer needed, replace it
11404 with the new GIMPLE body. */
355a7673 11405 seq = NULL;
726a989a
RB
11406 gimple_seq_add_stmt (&seq, bind);
11407 gimple_set_body (fndecl, seq);
6de9cd9a
DN
11408
11409 /* If we're instrumenting function entry/exit, then prepend the call to
11410 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
11411 catch the exit hook. */
11412 /* ??? Add some way to ignore exceptions for this TFE. */
11413 if (flag_instrument_function_entry_exit
8d5a7d1f
ILT
11414 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
11415 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 11416 {
726a989a 11417 tree x;
538dd0b7 11418 gbind *new_bind;
355fe088 11419 gimple *tf;
726a989a 11420 gimple_seq cleanup = NULL, body = NULL;
b01890ff 11421 tree tmp_var;
538dd0b7 11422 gcall *call;
b01890ff 11423
e79983f4 11424 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 11425 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
11426 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11427 gimple_call_set_lhs (call, tmp_var);
11428 gimplify_seq_add_stmt (&cleanup, call);
e79983f4 11429 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
b01890ff
JH
11430 call = gimple_build_call (x, 2,
11431 build_fold_addr_expr (current_function_decl),
11432 tmp_var);
11433 gimplify_seq_add_stmt (&cleanup, call);
726a989a 11434 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 11435
e79983f4 11436 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 11437 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
11438 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11439 gimple_call_set_lhs (call, tmp_var);
11440 gimplify_seq_add_stmt (&body, call);
e79983f4 11441 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
b01890ff
JH
11442 call = gimple_build_call (x, 2,
11443 build_fold_addr_expr (current_function_decl),
11444 tmp_var);
11445 gimplify_seq_add_stmt (&body, call);
726a989a 11446 gimplify_seq_add_stmt (&body, tf);
32001f69 11447 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
726a989a
RB
11448 /* Clear the block for BIND, since it is no longer directly inside
11449 the function, but within a try block. */
32001f69 11450 gimple_bind_set_block (bind, NULL);
6de9cd9a 11451
726a989a
RB
11452 /* Replace the current function body with the body
11453 wrapped in the try/finally TF. */
355a7673 11454 seq = NULL;
726a989a
RB
11455 gimple_seq_add_stmt (&seq, new_bind);
11456 gimple_set_body (fndecl, seq);
fca4adf2
JJ
11457 bind = new_bind;
11458 }
11459
2f3c4b69
BE
11460 if ((flag_sanitize & SANITIZE_THREAD) != 0
11461 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
fca4adf2
JJ
11462 {
11463 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
355fe088 11464 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
fca4adf2
JJ
11465 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11466 /* Clear the block for BIND, since it is no longer directly inside
11467 the function, but within a try block. */
11468 gimple_bind_set_block (bind, NULL);
11469 /* Replace the current function body with the body
11470 wrapped in the try/finally TF. */
11471 seq = NULL;
11472 gimple_seq_add_stmt (&seq, new_bind);
11473 gimple_set_body (fndecl, seq);
6de9cd9a
DN
11474 }
11475
726a989a 11476 DECL_SAVED_TREE (fndecl) = NULL_TREE;
d67cb100 11477 cfun->curr_properties |= PROP_gimple_any;
726a989a 11478
db2960f4 11479 pop_cfun ();
88d91afd
TV
11480
11481 dump_function (TDI_generic, fndecl);
6de9cd9a 11482}
726a989a 11483
4a7cb16f
AM
11484/* Return a dummy expression of type TYPE in order to keep going after an
11485 error. */
b184c8f1 11486
4a7cb16f
AM
11487static tree
11488dummy_object (tree type)
b184c8f1 11489{
4a7cb16f
AM
11490 tree t = build_int_cst (build_pointer_type (type), 0);
11491 return build2 (MEM_REF, type, t, t);
b184c8f1
AM
11492}
11493
4a7cb16f
AM
11494/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11495 builtin function, but a very special sort of operator. */
b184c8f1 11496
4a7cb16f 11497enum gimplify_status
f8e89441
TV
11498gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11499 gimple_seq *post_p ATTRIBUTE_UNUSED)
4a7cb16f
AM
11500{
11501 tree promoted_type, have_va_type;
11502 tree valist = TREE_OPERAND (*expr_p, 0);
11503 tree type = TREE_TYPE (*expr_p);
2fe1d762 11504 tree t, tag;
4a7cb16f 11505 location_t loc = EXPR_LOCATION (*expr_p);
b184c8f1 11506
4a7cb16f
AM
11507 /* Verify that valist is of the proper type. */
11508 have_va_type = TREE_TYPE (valist);
11509 if (have_va_type == error_mark_node)
11510 return GS_ERROR;
11511 have_va_type = targetm.canonical_va_list_type (have_va_type);
b184c8f1 11512
4a7cb16f
AM
11513 if (have_va_type == NULL_TREE)
11514 {
11515 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
11516 return GS_ERROR;
11517 }
b184c8f1 11518
4a7cb16f
AM
11519 /* Generate a diagnostic for requesting data of a type that cannot
11520 be passed through `...' due to type promotion at the call site. */
11521 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11522 != type)
11523 {
11524 static bool gave_help;
11525 bool warned;
b184c8f1 11526
4a7cb16f
AM
11527 /* Unfortunately, this is merely undefined, rather than a constraint
11528 violation, so we cannot make this an error. If this call is never
11529 executed, the program is still strictly conforming. */
11530 warned = warning_at (loc, 0,
11531 "%qT is promoted to %qT when passed through %<...%>",
11532 type, promoted_type);
11533 if (!gave_help && warned)
11534 {
11535 gave_help = true;
11536 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
11537 promoted_type, type);
11538 }
b184c8f1 11539
4a7cb16f
AM
11540 /* We can, however, treat "undefined" any way we please.
11541 Call abort to encourage the user to fix the program. */
11542 if (warned)
11543 inform (loc, "if this code is reached, the program will abort");
11544 /* Before the abort, allow the evaluation of the va_list
11545 expression to exit or longjmp. */
11546 gimplify_and_add (valist, pre_p);
11547 t = build_call_expr_loc (loc,
11548 builtin_decl_implicit (BUILT_IN_TRAP), 0);
b184c8f1
AM
11549 gimplify_and_add (t, pre_p);
11550
4a7cb16f
AM
11551 /* This is dead code, but go ahead and finish so that the
11552 mode of the result comes out right. */
11553 *expr_p = dummy_object (type);
11554 return GS_ALL_DONE;
b184c8f1 11555 }
b184c8f1 11556
f8e89441 11557 tag = build_int_cst (build_pointer_type (type), 0);
2fe1d762 11558 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 2, valist, tag);
b184c8f1 11559
d67cb100
TV
11560 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
11561 needs to be expanded. */
11562 cfun->curr_properties &= ~PROP_gimple_lva;
11563
f8e89441 11564 return GS_OK;
b184c8f1 11565}
bcf71673 11566
45b0be94
AM
11567/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
11568
11569 DST/SRC are the destination and source respectively. You can pass
11570 ungimplified trees in DST or SRC, in which case they will be
11571 converted to a gimple operand if necessary.
11572
11573 This function returns the newly created GIMPLE_ASSIGN tuple. */
11574
355fe088 11575gimple *
45b0be94
AM
11576gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
11577{
11578 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11579 gimplify_and_add (t, seq_p);
11580 ggc_free (t);
11581 return gimple_seq_last_stmt (*seq_p);
11582}
11583
18f429e2 11584inline hashval_t
67f58944 11585gimplify_hasher::hash (const elt_t *p)
18f429e2
AM
11586{
11587 tree t = p->val;
11588 return iterative_hash_expr (t, 0);
11589}
11590
11591inline bool
67f58944 11592gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18f429e2
AM
11593{
11594 tree t1 = p1->val;
11595 tree t2 = p2->val;
11596 enum tree_code code = TREE_CODE (t1);
11597
11598 if (TREE_CODE (t2) != code
11599 || TREE_TYPE (t1) != TREE_TYPE (t2))
11600 return false;
11601
11602 if (!operand_equal_p (t1, t2, 0))
11603 return false;
11604
18f429e2
AM
11605 /* Only allow them to compare equal if they also hash equal; otherwise
11606 results are nondeterminate, and we fail bootstrap comparison. */
b2b29377 11607 gcc_checking_assert (hash (p1) == hash (p2));
18f429e2
AM
11608
11609 return true;
11610}