]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
c-ada-spec.c (dump_ada_function_declaration): Increase buffer size to guarantee it...
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
818ab71a 3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
6de9cd9a
DN
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
9dcd6f09 11Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
c7131fb2 26#include "backend.h"
957060b5
AM
27#include "target.h"
28#include "rtl.h"
6de9cd9a 29#include "tree.h"
c7131fb2 30#include "gimple.h"
9fdcd34e 31#include "gimple-predict.h"
957060b5 32#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
c7131fb2 33#include "ssa.h"
957060b5
AM
34#include "cgraph.h"
35#include "tree-pretty-print.h"
36#include "diagnostic-core.h"
37#include "alias.h"
c7131fb2 38#include "fold-const.h"
36566b39 39#include "calls.h"
36566b39
PK
40#include "varasm.h"
41#include "stmt.h"
42#include "expr.h"
2fb9a547
AM
43#include "gimple-fold.h"
44#include "tree-eh.h"
45b0be94 45#include "gimplify.h"
5be5c238 46#include "gimple-iterator.h"
d8a2d370 47#include "stor-layout.h"
d8a2d370 48#include "print-tree.h"
726a989a 49#include "tree-iterator.h"
6de9cd9a 50#include "tree-inline.h"
6de9cd9a 51#include "langhooks.h"
442b4905 52#include "tree-cfg.h"
442b4905 53#include "tree-ssa.h"
0645c1a2 54#include "omp-low.h"
4484a35a 55#include "gimple-low.h"
939b37da 56#include "cilk.h"
41dbbb37 57#include "gomp-constants.h"
88d91afd 58#include "tree-dump.h"
1a80d6b8 59#include "gimple-walk.h"
7ee2468b 60#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
9b2b7279 61#include "builtins.h"
953ff289
DN
62
63enum gimplify_omp_var_data
64{
65 GOVD_SEEN = 1,
66 GOVD_EXPLICIT = 2,
67 GOVD_SHARED = 4,
68 GOVD_PRIVATE = 8,
69 GOVD_FIRSTPRIVATE = 16,
70 GOVD_LASTPRIVATE = 32,
71 GOVD_REDUCTION = 64,
72 GOVD_LOCAL = 128,
acf0174b
JJ
73 GOVD_MAP = 256,
74 GOVD_DEBUG_PRIVATE = 512,
75 GOVD_PRIVATE_OUTER_REF = 1024,
74bf76ed 76 GOVD_LINEAR = 2048,
acf0174b 77 GOVD_ALIGNED = 4096,
41dbbb37
TS
78
79 /* Flag for GOVD_MAP: don't copy back. */
acf0174b 80 GOVD_MAP_TO_ONLY = 8192,
41dbbb37 81
41b37d5e
JJ
82 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
83 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
84
d9a6bd32
JJ
85 GOVD_MAP_0LEN_ARRAY = 32768,
86
e01d41e5
JJ
87 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
88 GOVD_MAP_ALWAYS_TO = 65536,
89
1a80d6b8
JJ
90 /* Flag for shared vars that are or might be stored to in the region. */
91 GOVD_WRITTEN = 131072,
92
db0f1c7a
TV
93 /* Flag for GOVD_MAP, if it is a forced mapping. */
94 GOVD_MAP_FORCE = 262144,
95
953ff289 96 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
74bf76ed
JJ
97 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
98 | GOVD_LOCAL)
953ff289
DN
99};
100
726a989a 101
a68ab351
JJ
102enum omp_region_type
103{
182190f2
NS
104 ORT_WORKSHARE = 0x00,
105 ORT_SIMD = 0x01,
106
107 ORT_PARALLEL = 0x02,
108 ORT_COMBINED_PARALLEL = 0x03,
109
110 ORT_TASK = 0x04,
111 ORT_UNTIED_TASK = 0x05,
112
113 ORT_TEAMS = 0x08,
114 ORT_COMBINED_TEAMS = 0x09,
115
41dbbb37 116 /* Data region. */
182190f2
NS
117 ORT_TARGET_DATA = 0x10,
118
41dbbb37 119 /* Data region with offloading. */
182190f2
NS
120 ORT_TARGET = 0x20,
121 ORT_COMBINED_TARGET = 0x21,
122
123 /* OpenACC variants. */
124 ORT_ACC = 0x40, /* A generic OpenACC region. */
125 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
126 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
127 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
37d5ad46 128 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
182190f2 129
d9a6bd32
JJ
130 /* Dummy OpenMP region, used to disable expansion of
131 DECL_VALUE_EXPRs in taskloop pre body. */
182190f2 132 ORT_NONE = 0x100
a68ab351
JJ
133};
134
45852dcc
AM
135/* Gimplify hashtable helper. */
136
95fbe13e 137struct gimplify_hasher : free_ptr_hash <elt_t>
45852dcc 138{
67f58944
TS
139 static inline hashval_t hash (const elt_t *);
140 static inline bool equal (const elt_t *, const elt_t *);
45852dcc
AM
141};
142
143struct gimplify_ctx
144{
145 struct gimplify_ctx *prev_context;
146
538dd0b7 147 vec<gbind *> bind_expr_stack;
45852dcc
AM
148 tree temps;
149 gimple_seq conditional_cleanups;
150 tree exit_label;
151 tree return_temp;
152
153 vec<tree> case_labels;
154 /* The formal temporary table. Should this be persistent? */
c203e8a7 155 hash_table<gimplify_hasher> *temp_htab;
45852dcc
AM
156
157 int conditions;
03c00798
EB
158 unsigned into_ssa : 1;
159 unsigned allow_rhs_cond_expr : 1;
160 unsigned in_cleanup_point_expr : 1;
161 unsigned keep_stack : 1;
162 unsigned save_stack : 1;
45852dcc
AM
163};
164
953ff289 165struct gimplify_omp_ctx
6de9cd9a 166{
953ff289
DN
167 struct gimplify_omp_ctx *outer_context;
168 splay_tree variables;
6e2830c3 169 hash_set<tree> *privatized_types;
d9a6bd32
JJ
170 /* Iteration variables in an OMP_FOR. */
171 vec<tree> loop_iter_var;
953ff289
DN
172 location_t location;
173 enum omp_clause_default_kind default_kind;
a68ab351 174 enum omp_region_type region_type;
acf0174b 175 bool combined_loop;
9cf32741 176 bool distribute;
d9a6bd32
JJ
177 bool target_map_scalars_firstprivate;
178 bool target_map_pointers_as_0len_arrays;
179 bool target_firstprivatize_array_bases;
953ff289
DN
180};
181
45852dcc 182static struct gimplify_ctx *gimplify_ctxp;
953ff289
DN
183static struct gimplify_omp_ctx *gimplify_omp_ctxp;
184
ad19c4be 185/* Forward declaration. */
726a989a 186static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
6e232ba4 187static hash_map<tree, tree> *oacc_declare_returns;
381cdae4
RB
188static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
189 bool (*) (tree), fallback_t, bool);
eb6127a4 190
a1a6c5b2
JJ
191/* Shorter alias name for the above function for use in gimplify.c
192 only. */
193
194static inline void
355fe088 195gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
a1a6c5b2
JJ
196{
197 gimple_seq_add_stmt_without_update (seq_p, gs);
198}
199
726a989a
RB
200/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
201 NULL, a new sequence is allocated. This function is
202 similar to gimple_seq_add_seq, but does not scan the operands.
203 During gimplification, we need to manipulate statement sequences
204 before the def/use vectors have been constructed. */
205
206static void
207gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
208{
209 gimple_stmt_iterator si;
210
211 if (src == NULL)
212 return;
213
726a989a
RB
214 si = gsi_last (*dst_p);
215 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
216}
217
45852dcc
AM
218
219/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
220 and popping gimplify contexts. */
221
222static struct gimplify_ctx *ctx_pool = NULL;
223
224/* Return a gimplify context struct from the pool. */
225
226static inline struct gimplify_ctx *
227ctx_alloc (void)
228{
229 struct gimplify_ctx * c = ctx_pool;
230
231 if (c)
232 ctx_pool = c->prev_context;
233 else
234 c = XNEW (struct gimplify_ctx);
235
236 memset (c, '\0', sizeof (*c));
237 return c;
238}
239
240/* Put gimplify context C back into the pool. */
241
242static inline void
243ctx_free (struct gimplify_ctx *c)
244{
245 c->prev_context = ctx_pool;
246 ctx_pool = c;
247}
248
249/* Free allocated ctx stack memory. */
250
251void
252free_gimplify_stack (void)
253{
254 struct gimplify_ctx *c;
255
256 while ((c = ctx_pool))
257 {
258 ctx_pool = c->prev_context;
259 free (c);
260 }
261}
262
263
6de9cd9a
DN
264/* Set up a context for the gimplifier. */
265
266void
45852dcc 267push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
6de9cd9a 268{
45852dcc
AM
269 struct gimplify_ctx *c = ctx_alloc ();
270
953ff289 271 c->prev_context = gimplify_ctxp;
953ff289 272 gimplify_ctxp = c;
45852dcc
AM
273 gimplify_ctxp->into_ssa = in_ssa;
274 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
6de9cd9a
DN
275}
276
277/* Tear down a context for the gimplifier. If BODY is non-null, then
278 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
279 in the local_decls.
280
281 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
282
283void
355fe088 284pop_gimplify_context (gimple *body)
6de9cd9a 285{
953ff289 286 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 287
9771b263
DN
288 gcc_assert (c
289 && (!c->bind_expr_stack.exists ()
290 || c->bind_expr_stack.is_empty ()));
291 c->bind_expr_stack.release ();
953ff289 292 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
293
294 if (body)
5123ad09 295 declare_vars (c->temps, body, false);
6de9cd9a 296 else
953ff289 297 record_vars (c->temps);
6de9cd9a 298
c203e8a7
TS
299 delete c->temp_htab;
300 c->temp_htab = NULL;
45852dcc 301 ctx_free (c);
6de9cd9a
DN
302}
303
ad19c4be
EB
304/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
305
c24b7de9 306static void
538dd0b7 307gimple_push_bind_expr (gbind *bind_stmt)
6de9cd9a 308{
9771b263 309 gimplify_ctxp->bind_expr_stack.reserve (8);
538dd0b7 310 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
6de9cd9a
DN
311}
312
ad19c4be
EB
313/* Pop the first element off the stack of bindings. */
314
c24b7de9 315static void
6de9cd9a
DN
316gimple_pop_bind_expr (void)
317{
9771b263 318 gimplify_ctxp->bind_expr_stack.pop ();
6de9cd9a
DN
319}
320
ad19c4be
EB
321/* Return the first element of the stack of bindings. */
322
538dd0b7 323gbind *
6de9cd9a
DN
324gimple_current_bind_expr (void)
325{
9771b263 326 return gimplify_ctxp->bind_expr_stack.last ();
726a989a
RB
327}
328
ad19c4be 329/* Return the stack of bindings created during gimplification. */
726a989a 330
538dd0b7 331vec<gbind *>
726a989a
RB
332gimple_bind_expr_stack (void)
333{
334 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
335}
336
ad19c4be 337/* Return true iff there is a COND_EXPR between us and the innermost
6de9cd9a
DN
338 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
339
340static bool
341gimple_conditional_context (void)
342{
343 return gimplify_ctxp->conditions > 0;
344}
345
346/* Note that we've entered a COND_EXPR. */
347
348static void
349gimple_push_condition (void)
350{
726a989a 351#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 352 if (gimplify_ctxp->conditions == 0)
726a989a 353 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 354#endif
6de9cd9a
DN
355 ++(gimplify_ctxp->conditions);
356}
357
358/* Note that we've left a COND_EXPR. If we're back at unconditional scope
359 now, add any conditional cleanups we've seen to the prequeue. */
360
361static void
726a989a 362gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
363{
364 int conds = --(gimplify_ctxp->conditions);
aa4a53af 365
282899df 366 gcc_assert (conds >= 0);
6de9cd9a
DN
367 if (conds == 0)
368 {
726a989a
RB
369 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
370 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 371 }
6de9cd9a
DN
372}
373
953ff289
DN
374/* A stable comparison routine for use with splay trees and DECLs. */
375
376static int
377splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
378{
379 tree a = (tree) xa;
380 tree b = (tree) xb;
381
382 return DECL_UID (a) - DECL_UID (b);
383}
384
385/* Create a new omp construct that deals with variable remapping. */
386
387static struct gimplify_omp_ctx *
a68ab351 388new_omp_context (enum omp_region_type region_type)
953ff289
DN
389{
390 struct gimplify_omp_ctx *c;
391
392 c = XCNEW (struct gimplify_omp_ctx);
393 c->outer_context = gimplify_omp_ctxp;
394 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
6e2830c3 395 c->privatized_types = new hash_set<tree>;
953ff289 396 c->location = input_location;
a68ab351 397 c->region_type = region_type;
f22f4340 398 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
399 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
400 else
401 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
953ff289
DN
402
403 return c;
404}
405
406/* Destroy an omp construct that deals with variable remapping. */
407
408static void
409delete_omp_context (struct gimplify_omp_ctx *c)
410{
411 splay_tree_delete (c->variables);
6e2830c3 412 delete c->privatized_types;
d9a6bd32 413 c->loop_iter_var.release ();
953ff289
DN
414 XDELETE (c);
415}
416
417static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
418static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
419
726a989a
RB
420/* Both gimplify the statement T and append it to *SEQ_P. This function
421 behaves exactly as gimplify_stmt, but you don't have to pass T as a
422 reference. */
cd3ce9b4
JM
423
424void
726a989a
RB
425gimplify_and_add (tree t, gimple_seq *seq_p)
426{
427 gimplify_stmt (&t, seq_p);
428}
429
430/* Gimplify statement T into sequence *SEQ_P, and return the first
431 tuple in the sequence of generated tuples for this statement.
432 Return NULL if gimplifying T produced no tuples. */
433
355fe088 434static gimple *
726a989a 435gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 436{
726a989a
RB
437 gimple_stmt_iterator last = gsi_last (*seq_p);
438
439 gimplify_and_add (t, seq_p);
440
441 if (!gsi_end_p (last))
442 {
443 gsi_next (&last);
444 return gsi_stmt (last);
445 }
446 else
447 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
448}
449
216820a4
RG
450/* Returns true iff T is a valid RHS for an assignment to an un-renamed
451 LHS, or for a call argument. */
452
453static bool
454is_gimple_mem_rhs (tree t)
455{
456 /* If we're dealing with a renamable type, either source or dest must be
457 a renamed variable. */
458 if (is_gimple_reg_type (TREE_TYPE (t)))
459 return is_gimple_val (t);
460 else
461 return is_gimple_val (t) || is_gimple_lvalue (t);
462}
463
726a989a 464/* Return true if T is a CALL_EXPR or an expression that can be
12947319 465 assigned to a temporary. Note that this predicate should only be
726a989a
RB
466 used during gimplification. See the rationale for this in
467 gimplify_modify_expr. */
468
469static bool
ba4d8f9d 470is_gimple_reg_rhs_or_call (tree t)
726a989a 471{
ba4d8f9d
RG
472 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
473 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
474}
475
476/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
477 this predicate should only be used during gimplification. See the
478 rationale for this in gimplify_modify_expr. */
479
480static bool
ba4d8f9d 481is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
482{
483 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
484 a renamed variable. */
485 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
486 return is_gimple_val (t);
487 else
ba4d8f9d
RG
488 return (is_gimple_val (t) || is_gimple_lvalue (t)
489 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
490}
491
2ad728d2
RG
492/* Create a temporary with a name derived from VAL. Subroutine of
493 lookup_tmp_var; nobody else should call this function. */
494
495static inline tree
947ca6a0 496create_tmp_from_val (tree val)
2ad728d2
RG
497{
498 /* Drop all qualifiers and address-space information from the value type. */
499 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
500 tree var = create_tmp_var (type, get_name (val));
947ca6a0
RB
501 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
502 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
2ad728d2
RG
503 DECL_GIMPLE_REG_P (var) = 1;
504 return var;
505}
506
507/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
508 an existing expression temporary. */
509
510static tree
511lookup_tmp_var (tree val, bool is_formal)
512{
513 tree ret;
514
515 /* If not optimizing, never really reuse a temporary. local-alloc
516 won't allocate any variable that is used in more than one basic
517 block, which means it will go into memory, causing much extra
518 work in reload and final and poorer code generation, outweighing
519 the extra memory allocation here. */
520 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
947ca6a0 521 ret = create_tmp_from_val (val);
2ad728d2
RG
522 else
523 {
524 elt_t elt, *elt_p;
4a8fb1a1 525 elt_t **slot;
2ad728d2
RG
526
527 elt.val = val;
c203e8a7
TS
528 if (!gimplify_ctxp->temp_htab)
529 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
530 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
2ad728d2
RG
531 if (*slot == NULL)
532 {
533 elt_p = XNEW (elt_t);
534 elt_p->val = val;
947ca6a0 535 elt_p->temp = ret = create_tmp_from_val (val);
4a8fb1a1 536 *slot = elt_p;
2ad728d2
RG
537 }
538 else
539 {
4a8fb1a1 540 elt_p = *slot;
2ad728d2
RG
541 ret = elt_p->temp;
542 }
543 }
544
545 return ret;
546}
547
ba4d8f9d 548/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
549
550static tree
726a989a 551internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
381cdae4 552 bool is_formal, bool allow_ssa)
6de9cd9a
DN
553{
554 tree t, mod;
6de9cd9a 555
726a989a
RB
556 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
557 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 558 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 559 fb_rvalue);
6de9cd9a 560
381cdae4
RB
561 if (allow_ssa
562 && gimplify_ctxp->into_ssa
2ad728d2 563 && is_gimple_reg_type (TREE_TYPE (val)))
381cdae4
RB
564 {
565 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
566 if (! gimple_in_ssa_p (cfun))
567 {
568 const char *name = get_name (val);
569 if (name)
570 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
571 }
572 }
2ad728d2
RG
573 else
574 t = lookup_tmp_var (val, is_formal);
e41d82f5 575
2e929cf3 576 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 577
8400e75e 578 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
6de9cd9a 579
fff34d35
RK
580 /* gimplify_modify_expr might want to reduce this further. */
581 gimplify_and_add (mod, pre_p);
726a989a 582 ggc_free (mod);
8b11a64c 583
6de9cd9a
DN
584 return t;
585}
586
ad19c4be 587/* Return a formal temporary variable initialized with VAL. PRE_P is as
ba4d8f9d
RG
588 in gimplify_expr. Only use this function if:
589
590 1) The value of the unfactored expression represented by VAL will not
591 change between the initialization and use of the temporary, and
592 2) The temporary will not be otherwise modified.
593
594 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
595 and #2 means it is inappropriate for && temps.
596
597 For other cases, use get_initialized_tmp_var instead. */
50674e96 598
6de9cd9a 599tree
726a989a 600get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a 601{
381cdae4 602 return internal_get_tmp_var (val, pre_p, NULL, true, true);
6de9cd9a
DN
603}
604
ad19c4be 605/* Return a temporary variable initialized with VAL. PRE_P and POST_P
6de9cd9a
DN
606 are as in gimplify_expr. */
607
608tree
381cdae4
RB
609get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
610 bool allow_ssa)
6de9cd9a 611{
381cdae4 612 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
6de9cd9a
DN
613}
614
ad19c4be
EB
615/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
616 generate debug info for them; otherwise don't. */
6de9cd9a
DN
617
618void
355fe088 619declare_vars (tree vars, gimple *gs, bool debug_info)
6de9cd9a
DN
620{
621 tree last = vars;
622 if (last)
623 {
5123ad09 624 tree temps, block;
6de9cd9a 625
538dd0b7 626 gbind *scope = as_a <gbind *> (gs);
6de9cd9a
DN
627
628 temps = nreverse (last);
5123ad09 629
524d9a45 630 block = gimple_bind_block (scope);
726a989a 631 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
632 if (!block || !debug_info)
633 {
910ad8de 634 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 635 gimple_bind_set_vars (scope, temps);
5123ad09
EB
636 }
637 else
638 {
639 /* We need to attach the nodes both to the BIND_EXPR and to its
640 associated BLOCK for debugging purposes. The key point here
641 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
642 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
643 if (BLOCK_VARS (block))
644 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
645 else
646 {
726a989a
RB
647 gimple_bind_set_vars (scope,
648 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
649 BLOCK_VARS (block) = temps;
650 }
651 }
6de9cd9a
DN
652 }
653}
654
a441447f
OH
655/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
656 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
657 no such upper bound can be obtained. */
658
659static void
660force_constant_size (tree var)
661{
662 /* The only attempt we make is by querying the maximum size of objects
663 of the variable's type. */
664
665 HOST_WIDE_INT max_size;
666
667 gcc_assert (TREE_CODE (var) == VAR_DECL);
668
669 max_size = max_int_size_in_bytes (TREE_TYPE (var));
670
671 gcc_assert (max_size >= 0);
672
673 DECL_SIZE_UNIT (var)
674 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
675 DECL_SIZE (var)
676 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
677}
678
ad19c4be
EB
679/* Push the temporary variable TMP into the current binding. */
680
45b62594
RB
681void
682gimple_add_tmp_var_fn (struct function *fn, tree tmp)
683{
684 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
685
686 /* Later processing assumes that the object size is constant, which might
687 not be true at this point. Force the use of a constant upper bound in
688 this case. */
689 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
690 force_constant_size (tmp);
691
692 DECL_CONTEXT (tmp) = fn->decl;
693 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
694
695 record_vars_into (tmp, fn->decl);
696}
697
698/* Push the temporary variable TMP into the current binding. */
699
6de9cd9a
DN
700void
701gimple_add_tmp_var (tree tmp)
702{
910ad8de 703 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 704
a441447f
OH
705 /* Later processing assumes that the object size is constant, which might
706 not be true at this point. Force the use of a constant upper bound in
707 this case. */
cc269bb6 708 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
a441447f
OH
709 force_constant_size (tmp);
710
6de9cd9a 711 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 712 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
713
714 if (gimplify_ctxp)
715 {
910ad8de 716 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 717 gimplify_ctxp->temps = tmp;
953ff289
DN
718
719 /* Mark temporaries local within the nearest enclosing parallel. */
720 if (gimplify_omp_ctxp)
721 {
722 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
723 while (ctx
724 && (ctx->region_type == ORT_WORKSHARE
182190f2
NS
725 || ctx->region_type == ORT_SIMD
726 || ctx->region_type == ORT_ACC))
953ff289
DN
727 ctx = ctx->outer_context;
728 if (ctx)
729 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
730 }
6de9cd9a
DN
731 }
732 else if (cfun)
733 record_vars (tmp);
734 else
726a989a
RB
735 {
736 gimple_seq body_seq;
737
738 /* This case is for nested functions. We need to expose the locals
739 they create. */
740 body_seq = gimple_body (current_function_decl);
741 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
742 }
743}
744
726a989a 745
616f1431
EB
746\f
747/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
748 nodes that are referenced more than once in GENERIC functions. This is
749 necessary because gimplification (translation into GIMPLE) is performed
750 by modifying tree nodes in-place, so gimplication of a shared node in a
751 first context could generate an invalid GIMPLE form in a second context.
752
753 This is achieved with a simple mark/copy/unmark algorithm that walks the
754 GENERIC representation top-down, marks nodes with TREE_VISITED the first
755 time it encounters them, duplicates them if they already have TREE_VISITED
756 set, and finally removes the TREE_VISITED marks it has set.
757
758 The algorithm works only at the function level, i.e. it generates a GENERIC
759 representation of a function with no nodes shared within the function when
760 passed a GENERIC function (except for nodes that are allowed to be shared).
761
762 At the global level, it is also necessary to unshare tree nodes that are
763 referenced in more than one function, for the same aforementioned reason.
764 This requires some cooperation from the front-end. There are 2 strategies:
765
766 1. Manual unsharing. The front-end needs to call unshare_expr on every
767 expression that might end up being shared across functions.
768
769 2. Deep unsharing. This is an extension of regular unsharing. Instead
770 of calling unshare_expr on expressions that might be shared across
771 functions, the front-end pre-marks them with TREE_VISITED. This will
772 ensure that they are unshared on the first reference within functions
773 when the regular unsharing algorithm runs. The counterpart is that
774 this algorithm must look deeper than for manual unsharing, which is
775 specified by LANG_HOOKS_DEEP_UNSHARING.
776
777 If there are only few specific cases of node sharing across functions, it is
778 probably easier for a front-end to unshare the expressions manually. On the
779 contrary, if the expressions generated at the global level are as widespread
780 as expressions generated within functions, deep unsharing is very likely the
781 way to go. */
782
783/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
3ad065ef
EB
784 These nodes model computations that must be done once. If we were to
785 unshare something like SAVE_EXPR(i++), the gimplification process would
786 create wrong code. However, if DATA is non-null, it must hold a pointer
787 set that is used to unshare the subtrees of these nodes. */
6de9cd9a
DN
788
789static tree
790mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
791{
616f1431
EB
792 tree t = *tp;
793 enum tree_code code = TREE_CODE (t);
794
6687b740
EB
795 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
796 copy their subtrees if we can make sure to do it only once. */
797 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431 798 {
6e2830c3 799 if (data && !((hash_set<tree> *)data)->add (t))
616f1431
EB
800 ;
801 else
802 *walk_subtrees = 0;
803 }
804
805 /* Stop at types, decls, constants like copy_tree_r. */
806 else if (TREE_CODE_CLASS (code) == tcc_type
807 || TREE_CODE_CLASS (code) == tcc_declaration
808 || TREE_CODE_CLASS (code) == tcc_constant
809 /* We can't do anything sensible with a BLOCK used as an
810 expression, but we also can't just die when we see it
811 because of non-expression uses. So we avert our eyes
812 and cross our fingers. Silly Java. */
813 || code == BLOCK)
6de9cd9a 814 *walk_subtrees = 0;
616f1431
EB
815
816 /* Cope with the statement expression extension. */
817 else if (code == STATEMENT_LIST)
818 ;
819
820 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 821 else
6687b740 822 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
823
824 return NULL_TREE;
825}
826
3ad065ef
EB
827/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
828 If *TP has been visited already, then *TP is deeply copied by calling
829 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
6de9cd9a
DN
830
831static tree
616f1431 832copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 833{
f0638e1d
RH
834 tree t = *tp;
835 enum tree_code code = TREE_CODE (t);
836
44de5aeb
RK
837 /* Skip types, decls, and constants. But we do want to look at their
838 types and the bounds of types. Mark them as visited so we properly
839 unmark their subtrees on the unmark pass. If we've already seen them,
840 don't look down further. */
6615c446
JO
841 if (TREE_CODE_CLASS (code) == tcc_type
842 || TREE_CODE_CLASS (code) == tcc_declaration
843 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
844 {
845 if (TREE_VISITED (t))
846 *walk_subtrees = 0;
847 else
848 TREE_VISITED (t) = 1;
849 }
f0638e1d 850
6de9cd9a
DN
851 /* If this node has been visited already, unshare it and don't look
852 any deeper. */
f0638e1d 853 else if (TREE_VISITED (t))
6de9cd9a 854 {
616f1431 855 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
856 *walk_subtrees = 0;
857 }
f0638e1d 858
616f1431 859 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 860 else
77c9db77 861 TREE_VISITED (t) = 1;
f0638e1d 862
6de9cd9a
DN
863 return NULL_TREE;
864}
865
3ad065ef
EB
866/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
867 copy_if_shared_r callback unmodified. */
6de9cd9a 868
616f1431 869static inline void
3ad065ef 870copy_if_shared (tree *tp, void *data)
616f1431 871{
3ad065ef 872 walk_tree (tp, copy_if_shared_r, data, NULL);
6de9cd9a
DN
873}
874
3ad065ef
EB
875/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
876 any nested functions. */
44de5aeb
RK
877
878static void
3ad065ef 879unshare_body (tree fndecl)
44de5aeb 880{
d52f5295 881 struct cgraph_node *cgn = cgraph_node::get (fndecl);
3ad065ef
EB
882 /* If the language requires deep unsharing, we need a pointer set to make
883 sure we don't repeatedly unshare subtrees of unshareable nodes. */
6e2830c3
TS
884 hash_set<tree> *visited
885 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
44de5aeb 886
3ad065ef
EB
887 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
888 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
889 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
890
6e2830c3 891 delete visited;
616f1431 892
3ad065ef 893 if (cgn)
48eb4e53 894 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 895 unshare_body (cgn->decl);
44de5aeb
RK
896}
897
616f1431
EB
898/* Callback for walk_tree to unmark the visited trees rooted at *TP.
899 Subtrees are walked until the first unvisited node is encountered. */
900
901static tree
902unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
903{
904 tree t = *tp;
905
906 /* If this node has been visited, unmark it and keep looking. */
907 if (TREE_VISITED (t))
908 TREE_VISITED (t) = 0;
909
910 /* Otherwise, don't look any deeper. */
911 else
912 *walk_subtrees = 0;
913
914 return NULL_TREE;
915}
916
917/* Unmark the visited trees rooted at *TP. */
918
919static inline void
920unmark_visited (tree *tp)
921{
922 walk_tree (tp, unmark_visited_r, NULL, NULL);
923}
924
44de5aeb
RK
925/* Likewise, but mark all trees as not visited. */
926
927static void
3ad065ef 928unvisit_body (tree fndecl)
44de5aeb 929{
d52f5295 930 struct cgraph_node *cgn = cgraph_node::get (fndecl);
44de5aeb 931
3ad065ef
EB
932 unmark_visited (&DECL_SAVED_TREE (fndecl));
933 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
934 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
616f1431 935
3ad065ef 936 if (cgn)
48eb4e53 937 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
67348ccc 938 unvisit_body (cgn->decl);
44de5aeb
RK
939}
940
6de9cd9a
DN
941/* Unconditionally make an unshared copy of EXPR. This is used when using
942 stored expressions which span multiple functions, such as BINFO_VTABLE,
943 as the normal unsharing process can't tell that they're shared. */
944
945tree
946unshare_expr (tree expr)
947{
948 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
949 return expr;
950}
d1f98542
RB
951
952/* Worker for unshare_expr_without_location. */
953
954static tree
955prune_expr_location (tree *tp, int *walk_subtrees, void *)
956{
957 if (EXPR_P (*tp))
958 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
959 else
960 *walk_subtrees = 0;
961 return NULL_TREE;
962}
963
964/* Similar to unshare_expr but also prune all expression locations
965 from EXPR. */
966
967tree
968unshare_expr_without_location (tree expr)
969{
970 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
971 if (EXPR_P (expr))
972 walk_tree (&expr, prune_expr_location, NULL, NULL);
973 return expr;
974}
6de9cd9a
DN
975\f
976/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
977 contain statements and have a value. Assign its value to a temporary
ad19c4be 978 and give it void_type_node. Return the temporary, or NULL_TREE if
6de9cd9a
DN
979 WRAPPER was already void. */
980
981tree
325c3691 982voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 983{
4832214a
JM
984 tree type = TREE_TYPE (wrapper);
985 if (type && !VOID_TYPE_P (type))
6de9cd9a 986 {
c6c7698d 987 tree *p;
6de9cd9a 988
c6c7698d
JM
989 /* Set p to point to the body of the wrapper. Loop until we find
990 something that isn't a wrapper. */
991 for (p = &wrapper; p && *p; )
d3147f64 992 {
c6c7698d 993 switch (TREE_CODE (*p))
6de9cd9a 994 {
c6c7698d
JM
995 case BIND_EXPR:
996 TREE_SIDE_EFFECTS (*p) = 1;
997 TREE_TYPE (*p) = void_type_node;
998 /* For a BIND_EXPR, the body is operand 1. */
999 p = &BIND_EXPR_BODY (*p);
1000 break;
1001
1002 case CLEANUP_POINT_EXPR:
1003 case TRY_FINALLY_EXPR:
1004 case TRY_CATCH_EXPR:
6de9cd9a
DN
1005 TREE_SIDE_EFFECTS (*p) = 1;
1006 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
1007 p = &TREE_OPERAND (*p, 0);
1008 break;
1009
1010 case STATEMENT_LIST:
1011 {
1012 tree_stmt_iterator i = tsi_last (*p);
1013 TREE_SIDE_EFFECTS (*p) = 1;
1014 TREE_TYPE (*p) = void_type_node;
1015 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1016 }
1017 break;
1018
1019 case COMPOUND_EXPR:
ad19c4be
EB
1020 /* Advance to the last statement. Set all container types to
1021 void. */
c6c7698d
JM
1022 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1023 {
1024 TREE_SIDE_EFFECTS (*p) = 1;
1025 TREE_TYPE (*p) = void_type_node;
1026 }
1027 break;
1028
0a35513e
AH
1029 case TRANSACTION_EXPR:
1030 TREE_SIDE_EFFECTS (*p) = 1;
1031 TREE_TYPE (*p) = void_type_node;
1032 p = &TRANSACTION_EXPR_BODY (*p);
1033 break;
1034
c6c7698d 1035 default:
5f23640f
TR
1036 /* Assume that any tree upon which voidify_wrapper_expr is
1037 directly called is a wrapper, and that its body is op0. */
1038 if (p == &wrapper)
1039 {
1040 TREE_SIDE_EFFECTS (*p) = 1;
1041 TREE_TYPE (*p) = void_type_node;
1042 p = &TREE_OPERAND (*p, 0);
1043 break;
1044 }
c6c7698d 1045 goto out;
6de9cd9a
DN
1046 }
1047 }
1048
c6c7698d 1049 out:
325c3691 1050 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
1051 temp = NULL_TREE;
1052 else if (temp)
6de9cd9a 1053 {
c6c7698d
JM
1054 /* The wrapper is on the RHS of an assignment that we're pushing
1055 down. */
1056 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1057 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 1058 TREE_OPERAND (temp, 1) = *p;
c6c7698d 1059 *p = temp;
6de9cd9a
DN
1060 }
1061 else
1062 {
c6c7698d
JM
1063 temp = create_tmp_var (type, "retval");
1064 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
1065 }
1066
6de9cd9a
DN
1067 return temp;
1068 }
1069
1070 return NULL_TREE;
1071}
1072
1073/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 1074 a temporary through which they communicate. */
6de9cd9a
DN
1075
1076static void
538dd0b7 1077build_stack_save_restore (gcall **save, gcall **restore)
6de9cd9a 1078{
726a989a 1079 tree tmp_var;
6de9cd9a 1080
e79983f4 1081 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
6de9cd9a 1082 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 1083 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 1084
ad19c4be 1085 *restore
e79983f4 1086 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
ad19c4be 1087 1, tmp_var);
6de9cd9a
DN
1088}
1089
1090/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1091
1092static enum gimplify_status
726a989a 1093gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1094{
1095 tree bind_expr = *expr_p;
03c00798 1096 bool old_keep_stack = gimplify_ctxp->keep_stack;
6de9cd9a
DN
1097 bool old_save_stack = gimplify_ctxp->save_stack;
1098 tree t;
538dd0b7 1099 gbind *bind_stmt;
47598145 1100 gimple_seq body, cleanup;
538dd0b7 1101 gcall *stack_save;
a5852bea 1102 location_t start_locus = 0, end_locus = 0;
6e232ba4 1103 tree ret_clauses = NULL;
6de9cd9a 1104
c6c7698d 1105 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 1106
6de9cd9a 1107 /* Mark variables seen in this bind expr. */
910ad8de 1108 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 1109 {
820cc88f 1110 if (TREE_CODE (t) == VAR_DECL)
8cb86b65
JJ
1111 {
1112 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1113
1114 /* Mark variable as local. */
d9a6bd32 1115 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
8cb86b65
JJ
1116 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1117 || splay_tree_lookup (ctx->variables,
1118 (splay_tree_key) t) == NULL))
c74559df
JJ
1119 {
1120 if (ctx->region_type == ORT_SIMD
1121 && TREE_ADDRESSABLE (t)
1122 && !TREE_STATIC (t))
1123 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1124 else
1125 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1126 }
8cb86b65
JJ
1127
1128 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
1129
1130 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1131 cfun->has_local_explicit_reg_vars = true;
8cb86b65 1132 }
e41d82f5
RH
1133
1134 /* Preliminarily mark non-addressed complex variables as eligible
1135 for promotion to gimple registers. We'll transform their uses
bd2e63a1
RG
1136 as we find them. */
1137 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1138 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
e41d82f5
RH
1139 && !TREE_THIS_VOLATILE (t)
1140 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1141 && !needs_to_live_in_memory (t))
0890b981 1142 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 1143 }
6de9cd9a 1144
538dd0b7 1145 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
03c00798 1146 BIND_EXPR_BLOCK (bind_expr));
538dd0b7 1147 gimple_push_bind_expr (bind_stmt);
726a989a 1148
03c00798 1149 gimplify_ctxp->keep_stack = false;
6de9cd9a
DN
1150 gimplify_ctxp->save_stack = false;
1151
726a989a
RB
1152 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1153 body = NULL;
1154 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
538dd0b7 1155 gimple_bind_set_body (bind_stmt, body);
6de9cd9a 1156
a5852bea
OH
1157 /* Source location wise, the cleanup code (stack_restore and clobbers)
1158 belongs to the end of the block, so propagate what we have. The
1159 stack_save operation belongs to the beginning of block, which we can
1160 infer from the bind_expr directly if the block has no explicit
1161 assignment. */
1162 if (BIND_EXPR_BLOCK (bind_expr))
1163 {
1164 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1165 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1166 }
1167 if (start_locus == 0)
1168 start_locus = EXPR_LOCATION (bind_expr);
1169
47598145
MM
1170 cleanup = NULL;
1171 stack_save = NULL;
03c00798
EB
1172
1173 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1174 the stack space allocated to the VLAs. */
1175 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
6de9cd9a 1176 {
538dd0b7 1177 gcall *stack_restore;
6de9cd9a
DN
1178
1179 /* Save stack on entry and restore it on exit. Add a try_finally
98906124 1180 block to achieve this. */
6de9cd9a
DN
1181 build_stack_save_restore (&stack_save, &stack_restore);
1182
a5852bea
OH
1183 gimple_set_location (stack_save, start_locus);
1184 gimple_set_location (stack_restore, end_locus);
1185
726a989a 1186 gimplify_seq_add_stmt (&cleanup, stack_restore);
47598145
MM
1187 }
1188
1189 /* Add clobbers for all variables that go out of scope. */
1190 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1191 {
1192 if (TREE_CODE (t) == VAR_DECL
1193 && !is_global_var (t)
1194 && DECL_CONTEXT (t) == current_function_decl
1195 && !DECL_HARD_REGISTER (t)
1196 && !TREE_THIS_VOLATILE (t)
1197 && !DECL_HAS_VALUE_EXPR_P (t)
1198 /* Only care for variables that have to be in memory. Others
1199 will be rewritten into SSA names, hence moved to the top-level. */
87e2a8fd
XDL
1200 && !is_gimple_reg (t)
1201 && flag_stack_reuse != SR_NONE)
47598145 1202 {
a5852bea 1203 tree clobber = build_constructor (TREE_TYPE (t), NULL);
355fe088 1204 gimple *clobber_stmt;
47598145 1205 TREE_THIS_VOLATILE (clobber) = 1;
a5852bea
OH
1206 clobber_stmt = gimple_build_assign (t, clobber);
1207 gimple_set_location (clobber_stmt, end_locus);
1208 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
6e232ba4
JN
1209
1210 if (flag_openacc && oacc_declare_returns != NULL)
1211 {
1212 tree *c = oacc_declare_returns->get (t);
1213 if (c != NULL)
1214 {
1215 if (ret_clauses)
1216 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1217
1218 ret_clauses = *c;
1219
1220 oacc_declare_returns->remove (t);
1221
1222 if (oacc_declare_returns->elements () == 0)
1223 {
1224 delete oacc_declare_returns;
1225 oacc_declare_returns = NULL;
1226 }
1227 }
1228 }
47598145
MM
1229 }
1230 }
1231
6e232ba4
JN
1232 if (ret_clauses)
1233 {
1234 gomp_target *stmt;
1235 gimple_stmt_iterator si = gsi_start (cleanup);
1236
1237 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1238 ret_clauses);
1239 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1240 }
1241
47598145
MM
1242 if (cleanup)
1243 {
538dd0b7 1244 gtry *gs;
47598145
MM
1245 gimple_seq new_body;
1246
1247 new_body = NULL;
538dd0b7 1248 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
726a989a 1249 GIMPLE_TRY_FINALLY);
6de9cd9a 1250
47598145
MM
1251 if (stack_save)
1252 gimplify_seq_add_stmt (&new_body, stack_save);
726a989a 1253 gimplify_seq_add_stmt (&new_body, gs);
538dd0b7 1254 gimple_bind_set_body (bind_stmt, new_body);
6de9cd9a
DN
1255 }
1256
03c00798
EB
1257 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1258 if (!gimplify_ctxp->keep_stack)
1259 gimplify_ctxp->keep_stack = old_keep_stack;
6de9cd9a 1260 gimplify_ctxp->save_stack = old_save_stack;
03c00798 1261
6de9cd9a
DN
1262 gimple_pop_bind_expr ();
1263
538dd0b7 1264 gimplify_seq_add_stmt (pre_p, bind_stmt);
726a989a 1265
6de9cd9a
DN
1266 if (temp)
1267 {
1268 *expr_p = temp;
6de9cd9a
DN
1269 return GS_OK;
1270 }
726a989a
RB
1271
1272 *expr_p = NULL_TREE;
1273 return GS_ALL_DONE;
6de9cd9a
DN
1274}
1275
1276/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1277 GIMPLE value, it is assigned to a new temporary and the statement is
1278 re-written to return the temporary.
1279
726a989a 1280 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1281 STMT should be stored. */
1282
1283static enum gimplify_status
726a989a 1284gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1285{
538dd0b7 1286 greturn *ret;
6de9cd9a 1287 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1288 tree result_decl, result;
6de9cd9a 1289
726a989a
RB
1290 if (ret_expr == error_mark_node)
1291 return GS_ERROR;
1292
939b37da
BI
1293 /* Implicit _Cilk_sync must be inserted right before any return statement
1294 if there is a _Cilk_spawn in the function. If the user has provided a
1295 _Cilk_sync, the optimizer should remove this duplicate one. */
1296 if (fn_contains_cilk_spawn_p (cfun))
1297 {
1298 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1299 gimplify_and_add (impl_sync, pre_p);
1300 }
1301
726a989a
RB
1302 if (!ret_expr
1303 || TREE_CODE (ret_expr) == RESULT_DECL
55e99d52 1304 || ret_expr == error_mark_node)
726a989a 1305 {
538dd0b7 1306 greturn *ret = gimple_build_return (ret_expr);
726a989a
RB
1307 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1308 gimplify_seq_add_stmt (pre_p, ret);
1309 return GS_ALL_DONE;
1310 }
6de9cd9a 1311
6de9cd9a 1312 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1313 result_decl = NULL_TREE;
6de9cd9a
DN
1314 else
1315 {
726a989a
RB
1316 result_decl = TREE_OPERAND (ret_expr, 0);
1317
1318 /* See through a return by reference. */
cc77ae10 1319 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1320 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1321
1322 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1323 || TREE_CODE (ret_expr) == INIT_EXPR)
1324 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1325 }
1326
71877985
RH
1327 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1328 Recall that aggregate_value_p is FALSE for any aggregate type that is
1329 returned in registers. If we're returning values in registers, then
1330 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1331 across another call. In addition, for those aggregates for which
535a42b1 1332 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1333 expansion of structure assignments; there's special code in expand_return
1334 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1335 if (!result_decl)
1336 result = NULL_TREE;
1337 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1338 {
1339 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1340 {
1341 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1342 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1343 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1344 should be effectively allocated by the caller, i.e. all calls to
1345 this function must be subject to the Return Slot Optimization. */
1346 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1347 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1348 }
1349 result = result_decl;
1350 }
71877985
RH
1351 else if (gimplify_ctxp->return_temp)
1352 result = gimplify_ctxp->return_temp;
1353 else
1354 {
b731b390 1355 result = create_tmp_reg (TREE_TYPE (result_decl));
ff98621c
RH
1356
1357 /* ??? With complex control flow (usually involving abnormal edges),
1358 we can wind up warning about an uninitialized value for this. Due
1359 to how this variable is constructed and initialized, this is never
1360 true. Give up and never warn. */
1361 TREE_NO_WARNING (result) = 1;
1362
71877985
RH
1363 gimplify_ctxp->return_temp = result;
1364 }
1365
726a989a 1366 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1367 Then gimplify the whole thing. */
1368 if (result != result_decl)
726a989a 1369 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1370
1371 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1372
726a989a
RB
1373 ret = gimple_build_return (result);
1374 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1375 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1376
6de9cd9a
DN
1377 return GS_ALL_DONE;
1378}
1379
ad19c4be
EB
1380/* Gimplify a variable-length array DECL. */
1381
786025ea 1382static void
726a989a 1383gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1384{
1385 /* This is a variable-sized decl. Simplify its size and mark it
98906124 1386 for deferred expansion. */
786025ea
JJ
1387 tree t, addr, ptr_type;
1388
726a989a
RB
1389 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1390 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea 1391
0138d6b2
JM
1392 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1393 if (DECL_HAS_VALUE_EXPR_P (decl))
1394 return;
1395
786025ea
JJ
1396 /* All occurrences of this decl in final gimplified code will be
1397 replaced by indirection. Setting DECL_VALUE_EXPR does two
1398 things: First, it lets the rest of the gimplifier know what
1399 replacement to use. Second, it lets the debug info know
1400 where to find the value. */
1401 ptr_type = build_pointer_type (TREE_TYPE (decl));
1402 addr = create_tmp_var (ptr_type, get_name (decl));
1403 DECL_IGNORED_P (addr) = 0;
1404 t = build_fold_indirect_ref (addr);
31408f60 1405 TREE_THIS_NOTRAP (t) = 1;
786025ea
JJ
1406 SET_DECL_VALUE_EXPR (decl, t);
1407 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1408
e79983f4 1409 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13e49da9
TV
1410 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1411 size_int (DECL_ALIGN (decl)));
d3c12306 1412 /* The call has been built for a variable-sized object. */
63d2a353 1413 CALL_ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1414 t = fold_convert (ptr_type, t);
726a989a 1415 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1416
726a989a 1417 gimplify_and_add (t, seq_p);
786025ea
JJ
1418}
1419
45b0be94
AM
1420/* A helper function to be called via walk_tree. Mark all labels under *TP
1421 as being forced. To be called for DECL_INITIAL of static variables. */
1422
1423static tree
1424force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1425{
1426 if (TYPE_P (*tp))
1427 *walk_subtrees = 0;
1428 if (TREE_CODE (*tp) == LABEL_DECL)
aa43616c
RH
1429 {
1430 FORCED_LABEL (*tp) = 1;
1431 cfun->has_forced_label_in_static = 1;
1432 }
45b0be94
AM
1433
1434 return NULL_TREE;
1435}
1436
ad19c4be 1437/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
350fae66
RK
1438 and initialization explicit. */
1439
1440static enum gimplify_status
726a989a 1441gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1442{
1443 tree stmt = *stmt_p;
1444 tree decl = DECL_EXPR_DECL (stmt);
1445
1446 *stmt_p = NULL_TREE;
1447
1448 if (TREE_TYPE (decl) == error_mark_node)
1449 return GS_ERROR;
1450
8e0a600b
JJ
1451 if ((TREE_CODE (decl) == TYPE_DECL
1452 || TREE_CODE (decl) == VAR_DECL)
1453 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
da3d46cb
JJ
1454 {
1455 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1456 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1457 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1458 }
350fae66 1459
d400d17e
EB
1460 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1461 in case its size expressions contain problematic nodes like CALL_EXPR. */
1462 if (TREE_CODE (decl) == TYPE_DECL
1463 && DECL_ORIGINAL_TYPE (decl)
1464 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
da3d46cb
JJ
1465 {
1466 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1467 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1468 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1469 }
d400d17e 1470
8e0a600b 1471 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
350fae66
RK
1472 {
1473 tree init = DECL_INITIAL (decl);
1474
b38f3813
EB
1475 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1476 || (!TREE_STATIC (decl)
1477 && flag_stack_check == GENERIC_STACK_CHECK
1478 && compare_tree_int (DECL_SIZE_UNIT (decl),
1479 STACK_CHECK_MAX_VAR_SIZE) > 0))
726a989a 1480 gimplify_vla_decl (decl, seq_p);
350fae66 1481
22192559
JM
1482 /* Some front ends do not explicitly declare all anonymous
1483 artificial variables. We compensate here by declaring the
1484 variables, though it would be better if the front ends would
1485 explicitly declare them. */
1486 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1487 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1488 gimple_add_tmp_var (decl);
1489
350fae66
RK
1490 if (init && init != error_mark_node)
1491 {
1492 if (!TREE_STATIC (decl))
1493 {
1494 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1495 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1496 gimplify_and_add (init, seq_p);
1497 ggc_free (init);
350fae66
RK
1498 }
1499 else
1500 /* We must still examine initializers for static variables
1501 as they may contain a label address. */
1502 walk_tree (&init, force_labels_r, NULL, NULL);
1503 }
350fae66
RK
1504 }
1505
1506 return GS_ALL_DONE;
1507}
1508
6de9cd9a
DN
1509/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1510 and replacing the LOOP_EXPR with goto, but if the loop contains an
1511 EXIT_EXPR, we need to append a label for it to jump to. */
1512
1513static enum gimplify_status
726a989a 1514gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1515{
1516 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1517 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1518
726a989a 1519 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1520
1521 gimplify_ctxp->exit_label = NULL_TREE;
1522
fff34d35 1523 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1524
726a989a
RB
1525 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1526
6de9cd9a 1527 if (gimplify_ctxp->exit_label)
ad19c4be
EB
1528 gimplify_seq_add_stmt (pre_p,
1529 gimple_build_label (gimplify_ctxp->exit_label));
726a989a
RB
1530
1531 gimplify_ctxp->exit_label = saved_label;
1532
1533 *expr_p = NULL;
1534 return GS_ALL_DONE;
1535}
1536
ad19c4be 1537/* Gimplify a statement list onto a sequence. These may be created either
726a989a
RB
1538 by an enlightened front-end, or by shortcut_cond_expr. */
1539
1540static enum gimplify_status
1541gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1542{
1543 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1544
1545 tree_stmt_iterator i = tsi_start (*expr_p);
1546
1547 while (!tsi_end_p (i))
6de9cd9a 1548 {
726a989a
RB
1549 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1550 tsi_delink (&i);
6de9cd9a 1551 }
6de9cd9a 1552
726a989a
RB
1553 if (temp)
1554 {
1555 *expr_p = temp;
1556 return GS_OK;
1557 }
6de9cd9a
DN
1558
1559 return GS_ALL_DONE;
1560}
0f1f6967 1561
146c55da
MP
1562/* Callback for walk_gimple_seq. */
1563
1564static tree
1565warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1566 struct walk_stmt_info *wi)
1567{
1568 gimple *stmt = gsi_stmt (*gsi_p);
1569
1570 *handled_ops_p = true;
1571 switch (gimple_code (stmt))
1572 {
1573 case GIMPLE_TRY:
1574 /* A compiler-generated cleanup or a user-written try block.
1575 If it's empty, don't dive into it--that would result in
1576 worse location info. */
1577 if (gimple_try_eval (stmt) == NULL)
1578 {
1579 wi->info = stmt;
1580 return integer_zero_node;
1581 }
1582 /* Fall through. */
1583 case GIMPLE_BIND:
1584 case GIMPLE_CATCH:
1585 case GIMPLE_EH_FILTER:
1586 case GIMPLE_TRANSACTION:
1587 /* Walk the sub-statements. */
1588 *handled_ops_p = false;
1589 break;
1590 default:
1591 /* Save the first "real" statement (not a decl/lexical scope/...). */
1592 wi->info = stmt;
1593 return integer_zero_node;
1594 }
1595 return NULL_TREE;
1596}
1597
1598/* Possibly warn about unreachable statements between switch's controlling
1599 expression and the first case. SEQ is the body of a switch expression. */
1600
1601static void
1602maybe_warn_switch_unreachable (gimple_seq seq)
1603{
1604 if (!warn_switch_unreachable
1605 /* This warning doesn't play well with Fortran when optimizations
1606 are on. */
1607 || lang_GNU_Fortran ()
1608 || seq == NULL)
1609 return;
1610
1611 struct walk_stmt_info wi;
1612 memset (&wi, 0, sizeof (wi));
1613 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1614 gimple *stmt = (gimple *) wi.info;
1615
1616 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1617 {
1618 if (gimple_code (stmt) == GIMPLE_GOTO
1619 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1620 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1621 /* Don't warn for compiler-generated gotos. These occur
1622 in Duff's devices, for example. */;
1623 else
1624 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1625 "statement will never be executed");
1626 }
1627}
1628
68e72840
SB
1629\f
1630/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
6de9cd9a
DN
1631 branch to. */
1632
1633static enum gimplify_status
726a989a 1634gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1635{
1636 tree switch_expr = *expr_p;
726a989a 1637 gimple_seq switch_body_seq = NULL;
6de9cd9a 1638 enum gimplify_status ret;
0cd2402d
SB
1639 tree index_type = TREE_TYPE (switch_expr);
1640 if (index_type == NULL_TREE)
1641 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
6de9cd9a 1642
726a989a
RB
1643 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1644 fb_rvalue);
1645 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1646 return ret;
6de9cd9a
DN
1647
1648 if (SWITCH_BODY (switch_expr))
1649 {
9771b263
DN
1650 vec<tree> labels;
1651 vec<tree> saved_labels;
726a989a 1652 tree default_case = NULL_TREE;
538dd0b7 1653 gswitch *switch_stmt;
b8698a0f 1654
6de9cd9a
DN
1655 /* If someone can be bothered to fill in the labels, they can
1656 be bothered to null out the body too. */
282899df 1657 gcc_assert (!SWITCH_LABELS (switch_expr));
6de9cd9a 1658
0cd2402d 1659 /* Save old labels, get new ones from body, then restore the old
726a989a 1660 labels. Save all the things from the switch body to append after. */
6de9cd9a 1661 saved_labels = gimplify_ctxp->case_labels;
9771b263 1662 gimplify_ctxp->case_labels.create (8);
6de9cd9a 1663
726a989a 1664 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
a7dc5980 1665
146c55da
MP
1666 maybe_warn_switch_unreachable (switch_body_seq);
1667
6de9cd9a
DN
1668 labels = gimplify_ctxp->case_labels;
1669 gimplify_ctxp->case_labels = saved_labels;
b8698a0f 1670
68e72840
SB
1671 preprocess_case_label_vec_for_gimple (labels, index_type,
1672 &default_case);
32f579f6 1673
726a989a 1674 if (!default_case)
6de9cd9a 1675 {
538dd0b7 1676 glabel *new_default;
6de9cd9a 1677
68e72840
SB
1678 default_case
1679 = build_case_label (NULL_TREE, NULL_TREE,
1680 create_artificial_label (UNKNOWN_LOCATION));
1681 new_default = gimple_build_label (CASE_LABEL (default_case));
1682 gimplify_seq_add_stmt (&switch_body_seq, new_default);
32f579f6 1683 }
f667741c 1684
538dd0b7 1685 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
fd8d363e 1686 default_case, labels);
538dd0b7 1687 gimplify_seq_add_stmt (pre_p, switch_stmt);
726a989a 1688 gimplify_seq_add_seq (pre_p, switch_body_seq);
9771b263 1689 labels.release ();
6de9cd9a 1690 }
282899df
NS
1691 else
1692 gcc_assert (SWITCH_LABELS (switch_expr));
6de9cd9a 1693
726a989a 1694 return GS_ALL_DONE;
6de9cd9a
DN
1695}
1696
ad19c4be 1697/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
726a989a 1698
6de9cd9a 1699static enum gimplify_status
726a989a 1700gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 1701{
953ff289 1702 struct gimplify_ctx *ctxp;
538dd0b7 1703 glabel *label_stmt;
953ff289 1704
41dbbb37 1705 /* Invalid programs can play Duff's Device type games with, for example,
953ff289 1706 #pragma omp parallel. At least in the C front end, we don't
41dbbb37
TS
1707 detect such invalid branches until after gimplification, in the
1708 diagnose_omp_blocks pass. */
953ff289 1709 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
9771b263 1710 if (ctxp->case_labels.exists ())
953ff289 1711 break;
282899df 1712
538dd0b7 1713 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
9771b263 1714 ctxp->case_labels.safe_push (*expr_p);
538dd0b7 1715 gimplify_seq_add_stmt (pre_p, label_stmt);
726a989a 1716
6de9cd9a
DN
1717 return GS_ALL_DONE;
1718}
1719
6de9cd9a
DN
1720/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1721 if necessary. */
1722
1723tree
1724build_and_jump (tree *label_p)
1725{
1726 if (label_p == NULL)
1727 /* If there's nowhere to jump, just fall through. */
65355d53 1728 return NULL_TREE;
6de9cd9a
DN
1729
1730 if (*label_p == NULL_TREE)
1731 {
c2255bc4 1732 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
1733 *label_p = label;
1734 }
1735
1736 return build1 (GOTO_EXPR, void_type_node, *label_p);
1737}
1738
1739/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1740 This also involves building a label to jump to and communicating it to
1741 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1742
1743static enum gimplify_status
1744gimplify_exit_expr (tree *expr_p)
1745{
1746 tree cond = TREE_OPERAND (*expr_p, 0);
1747 tree expr;
1748
1749 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 1750 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
1751 *expr_p = expr;
1752
1753 return GS_OK;
1754}
1755
26d44ae2
RH
1756/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1757 different from its canonical type, wrap the whole thing inside a
1758 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1759 type.
6de9cd9a 1760
26d44ae2
RH
1761 The canonical type of a COMPONENT_REF is the type of the field being
1762 referenced--unless the field is a bit-field which can be read directly
1763 in a smaller mode, in which case the canonical type is the
1764 sign-appropriate type corresponding to that mode. */
6de9cd9a 1765
26d44ae2
RH
1766static void
1767canonicalize_component_ref (tree *expr_p)
6de9cd9a 1768{
26d44ae2
RH
1769 tree expr = *expr_p;
1770 tree type;
6de9cd9a 1771
282899df 1772 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 1773
26d44ae2
RH
1774 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1775 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1776 else
1777 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 1778
b26c6d55
RG
1779 /* One could argue that all the stuff below is not necessary for
1780 the non-bitfield case and declare it a FE error if type
1781 adjustment would be needed. */
26d44ae2 1782 if (TREE_TYPE (expr) != type)
6de9cd9a 1783 {
b26c6d55 1784#ifdef ENABLE_TYPES_CHECKING
26d44ae2 1785 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
1786#endif
1787 int type_quals;
1788
1789 /* We need to preserve qualifiers and propagate them from
1790 operand 0. */
1791 type_quals = TYPE_QUALS (type)
1792 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1793 if (TYPE_QUALS (type) != type_quals)
1794 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 1795
26d44ae2
RH
1796 /* Set the type of the COMPONENT_REF to the underlying type. */
1797 TREE_TYPE (expr) = type;
6de9cd9a 1798
b26c6d55
RG
1799#ifdef ENABLE_TYPES_CHECKING
1800 /* It is now a FE error, if the conversion from the canonical
1801 type to the original expression type is not useless. */
1802 gcc_assert (useless_type_conversion_p (old_type, type));
1803#endif
26d44ae2
RH
1804 }
1805}
6de9cd9a 1806
26d44ae2 1807/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 1808 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
1809 T array[U];
1810 (T *)&array
1811 ==>
1812 &array[L]
1813 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
1814 lower bound.
1815 The constraint is that the type of &array[L] is trivially convertible
1816 to T *. */
6de9cd9a 1817
26d44ae2
RH
1818static void
1819canonicalize_addr_expr (tree *expr_p)
1820{
1821 tree expr = *expr_p;
26d44ae2 1822 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 1823 tree datype, ddatype, pddatype;
6de9cd9a 1824
04d86531
RG
1825 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1826 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1827 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 1828 return;
6de9cd9a 1829
26d44ae2 1830 /* The addr_expr type should be a pointer to an array. */
04d86531 1831 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
1832 if (TREE_CODE (datype) != ARRAY_TYPE)
1833 return;
6de9cd9a 1834
04d86531
RG
1835 /* The pointer to element type shall be trivially convertible to
1836 the expression pointer type. */
26d44ae2 1837 ddatype = TREE_TYPE (datype);
04d86531 1838 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
1839 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1840 pddatype))
26d44ae2 1841 return;
6de9cd9a 1842
26d44ae2 1843 /* The lower bound and element sizes must be constant. */
04d86531
RG
1844 if (!TYPE_SIZE_UNIT (ddatype)
1845 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
1846 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1847 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1848 return;
6de9cd9a 1849
26d44ae2 1850 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 1851 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 1852 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 1853 NULL_TREE, NULL_TREE);
04d86531 1854 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
1855
1856 /* We can have stripped a required restrict qualifier above. */
1857 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1858 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 1859}
6de9cd9a 1860
26d44ae2
RH
1861/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1862 underneath as appropriate. */
6de9cd9a 1863
26d44ae2
RH
1864static enum gimplify_status
1865gimplify_conversion (tree *expr_p)
d3147f64 1866{
db3927fb 1867 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 1868 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 1869
0710ccff
NS
1870 /* Then strip away all but the outermost conversion. */
1871 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1872
1873 /* And remove the outermost conversion if it's useless. */
1874 if (tree_ssa_useless_type_conversion (*expr_p))
1875 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1876
26d44ae2
RH
1877 /* If we still have a conversion at the toplevel,
1878 then canonicalize some constructs. */
1043771b 1879 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
1880 {
1881 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1882
26d44ae2
RH
1883 /* If a NOP conversion is changing the type of a COMPONENT_REF
1884 expression, then canonicalize its type now in order to expose more
1885 redundant conversions. */
1886 if (TREE_CODE (sub) == COMPONENT_REF)
1887 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 1888
26d44ae2
RH
1889 /* If a NOP conversion is changing a pointer to array of foo
1890 to a pointer to foo, embed that change in the ADDR_EXPR. */
1891 else if (TREE_CODE (sub) == ADDR_EXPR)
1892 canonicalize_addr_expr (expr_p);
1893 }
6de9cd9a 1894
8b17cc05
RG
1895 /* If we have a conversion to a non-register type force the
1896 use of a VIEW_CONVERT_EXPR instead. */
4f934809 1897 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 1898 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 1899 TREE_OPERAND (*expr_p, 0));
8b17cc05 1900
741233cf
RB
1901 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1902 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1903 TREE_SET_CODE (*expr_p, NOP_EXPR);
1904
6de9cd9a
DN
1905 return GS_OK;
1906}
1907
77f2a970 1908/* Nonlocal VLAs seen in the current function. */
6e2830c3 1909static hash_set<tree> *nonlocal_vlas;
77f2a970 1910
96ddb7ec
JJ
1911/* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1912static tree nonlocal_vla_vars;
1913
ad19c4be 1914/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
a9f7c570
RH
1915 DECL_VALUE_EXPR, and it's worth re-examining things. */
1916
1917static enum gimplify_status
1918gimplify_var_or_parm_decl (tree *expr_p)
1919{
1920 tree decl = *expr_p;
1921
1922 /* ??? If this is a local variable, and it has not been seen in any
1923 outer BIND_EXPR, then it's probably the result of a duplicate
1924 declaration, for which we've already issued an error. It would
1925 be really nice if the front end wouldn't leak these at all.
1926 Currently the only known culprit is C++ destructors, as seen
1927 in g++.old-deja/g++.jason/binding.C. */
1928 if (TREE_CODE (decl) == VAR_DECL
1929 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1930 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1931 && decl_function_context (decl) == current_function_decl)
1932 {
1da2ed5f 1933 gcc_assert (seen_error ());
a9f7c570
RH
1934 return GS_ERROR;
1935 }
1936
41dbbb37 1937 /* When within an OMP context, notice uses of variables. */
953ff289
DN
1938 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1939 return GS_ALL_DONE;
1940
a9f7c570
RH
1941 /* If the decl is an alias for another expression, substitute it now. */
1942 if (DECL_HAS_VALUE_EXPR_P (decl))
1943 {
77f2a970
JJ
1944 tree value_expr = DECL_VALUE_EXPR (decl);
1945
1946 /* For referenced nonlocal VLAs add a decl for debugging purposes
1947 to the current function. */
1948 if (TREE_CODE (decl) == VAR_DECL
1949 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1950 && nonlocal_vlas != NULL
1951 && TREE_CODE (value_expr) == INDIRECT_REF
1952 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1953 && decl_function_context (decl) != current_function_decl)
1954 {
1955 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
74bf76ed
JJ
1956 while (ctx
1957 && (ctx->region_type == ORT_WORKSHARE
182190f2
NS
1958 || ctx->region_type == ORT_SIMD
1959 || ctx->region_type == ORT_ACC))
77f2a970 1960 ctx = ctx->outer_context;
6e2830c3 1961 if (!ctx && !nonlocal_vlas->add (decl))
77f2a970 1962 {
96ddb7ec 1963 tree copy = copy_node (decl);
77f2a970
JJ
1964
1965 lang_hooks.dup_lang_specific_decl (copy);
2eb79bbb 1966 SET_DECL_RTL (copy, 0);
77f2a970 1967 TREE_USED (copy) = 1;
96ddb7ec
JJ
1968 DECL_CHAIN (copy) = nonlocal_vla_vars;
1969 nonlocal_vla_vars = copy;
77f2a970
JJ
1970 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1971 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1972 }
1973 }
1974
1975 *expr_p = unshare_expr (value_expr);
a9f7c570
RH
1976 return GS_OK;
1977 }
1978
1979 return GS_ALL_DONE;
1980}
1981
66c14933
EB
1982/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1983
1984static void
2fb9a547
AM
1985recalculate_side_effects (tree t)
1986{
1987 enum tree_code code = TREE_CODE (t);
1988 int len = TREE_OPERAND_LENGTH (t);
1989 int i;
1990
1991 switch (TREE_CODE_CLASS (code))
1992 {
1993 case tcc_expression:
1994 switch (code)
1995 {
1996 case INIT_EXPR:
1997 case MODIFY_EXPR:
1998 case VA_ARG_EXPR:
1999 case PREDECREMENT_EXPR:
2000 case PREINCREMENT_EXPR:
2001 case POSTDECREMENT_EXPR:
2002 case POSTINCREMENT_EXPR:
2003 /* All of these have side-effects, no matter what their
2004 operands are. */
2005 return;
2006
2007 default:
2008 break;
2009 }
2010 /* Fall through. */
2011
2012 case tcc_comparison: /* a comparison expression */
2013 case tcc_unary: /* a unary arithmetic expression */
2014 case tcc_binary: /* a binary arithmetic expression */
2015 case tcc_reference: /* a reference */
2016 case tcc_vl_exp: /* a function call */
2017 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2018 for (i = 0; i < len; ++i)
2019 {
2020 tree op = TREE_OPERAND (t, i);
2021 if (op && TREE_SIDE_EFFECTS (op))
2022 TREE_SIDE_EFFECTS (t) = 1;
2023 }
2024 break;
2025
2026 case tcc_constant:
2027 /* No side-effects. */
2028 return;
2029
2030 default:
2031 gcc_unreachable ();
2032 }
2033}
2034
6de9cd9a 2035/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 2036 node *EXPR_P.
6de9cd9a
DN
2037
2038 compound_lval
2039 : min_lval '[' val ']'
2040 | min_lval '.' ID
2041 | compound_lval '[' val ']'
2042 | compound_lval '.' ID
2043
2044 This is not part of the original SIMPLE definition, which separates
2045 array and member references, but it seems reasonable to handle them
2046 together. Also, this way we don't run into problems with union
2047 aliasing; gcc requires that for accesses through a union to alias, the
2048 union reference must be explicit, which was not always the case when we
2049 were splitting up array and member refs.
2050
726a989a 2051 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
2052 *EXPR_P should be stored.
2053
726a989a 2054 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
2055 *EXPR_P should be stored. */
2056
2057static enum gimplify_status
726a989a
RB
2058gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2059 fallback_t fallback)
6de9cd9a
DN
2060{
2061 tree *p;
941f78d1 2062 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 2063 int i;
db3927fb 2064 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 2065 tree expr = *expr_p;
6de9cd9a 2066
6de9cd9a 2067 /* Create a stack of the subexpressions so later we can walk them in
ec234842 2068 order from inner to outer. */
00f96dc9 2069 auto_vec<tree, 10> expr_stack;
6de9cd9a 2070
afe84921 2071 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
2072 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2073 {
a9f7c570 2074 restart:
6a720599
JM
2075 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2076 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 2077 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
2078
2079 if (handled_component_p (*p))
2080 ;
2081 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2082 additional COMPONENT_REFs. */
2083 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2084 && gimplify_var_or_parm_decl (p) == GS_OK)
2085 goto restart;
2086 else
6a720599 2087 break;
b8698a0f 2088
9771b263 2089 expr_stack.safe_push (*p);
6a720599 2090 }
6de9cd9a 2091
9771b263 2092 gcc_assert (expr_stack.length ());
9e51aaf5 2093
0823efed
DN
2094 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2095 walked through and P points to the innermost expression.
6de9cd9a 2096
af72267c
RK
2097 Java requires that we elaborated nodes in source order. That
2098 means we must gimplify the inner expression followed by each of
2099 the indices, in order. But we can't gimplify the inner
2100 expression until we deal with any variable bounds, sizes, or
2101 positions in order to deal with PLACEHOLDER_EXPRs.
2102
2103 So we do this in three steps. First we deal with the annotations
2104 for any variables in the components, then we gimplify the base,
2105 then we gimplify any indices, from left to right. */
9771b263 2106 for (i = expr_stack.length () - 1; i >= 0; i--)
6de9cd9a 2107 {
9771b263 2108 tree t = expr_stack[i];
44de5aeb
RK
2109
2110 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 2111 {
44de5aeb
RK
2112 /* Gimplify the low bound and element type size and put them into
2113 the ARRAY_REF. If these values are set, they have already been
2114 gimplified. */
726a989a 2115 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 2116 {
a7cc468a
RH
2117 tree low = unshare_expr (array_ref_low_bound (t));
2118 if (!is_gimple_min_invariant (low))
44de5aeb 2119 {
726a989a
RB
2120 TREE_OPERAND (t, 2) = low;
2121 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2122 post_p, is_gimple_reg,
726a989a 2123 fb_rvalue);
44de5aeb
RK
2124 ret = MIN (ret, tret);
2125 }
2126 }
19c44640
JJ
2127 else
2128 {
2129 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2130 is_gimple_reg, fb_rvalue);
2131 ret = MIN (ret, tret);
2132 }
44de5aeb 2133
19c44640 2134 if (TREE_OPERAND (t, 3) == NULL_TREE)
44de5aeb
RK
2135 {
2136 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2137 tree elmt_size = unshare_expr (array_ref_element_size (t));
a4e9ffe5 2138 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
44de5aeb
RK
2139
2140 /* Divide the element size by the alignment of the element
2141 type (above). */
ad19c4be
EB
2142 elmt_size
2143 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
44de5aeb 2144
a7cc468a 2145 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 2146 {
726a989a
RB
2147 TREE_OPERAND (t, 3) = elmt_size;
2148 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 2149 post_p, is_gimple_reg,
726a989a 2150 fb_rvalue);
44de5aeb
RK
2151 ret = MIN (ret, tret);
2152 }
6de9cd9a 2153 }
19c44640
JJ
2154 else
2155 {
2156 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2157 is_gimple_reg, fb_rvalue);
2158 ret = MIN (ret, tret);
2159 }
6de9cd9a 2160 }
44de5aeb
RK
2161 else if (TREE_CODE (t) == COMPONENT_REF)
2162 {
2163 /* Set the field offset into T and gimplify it. */
19c44640 2164 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb
RK
2165 {
2166 tree offset = unshare_expr (component_ref_field_offset (t));
2167 tree field = TREE_OPERAND (t, 1);
2168 tree factor
2169 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2170
2171 /* Divide the offset by its alignment. */
db3927fb 2172 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
44de5aeb 2173
a7cc468a 2174 if (!is_gimple_min_invariant (offset))
44de5aeb 2175 {
726a989a
RB
2176 TREE_OPERAND (t, 2) = offset;
2177 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2178 post_p, is_gimple_reg,
726a989a 2179 fb_rvalue);
44de5aeb
RK
2180 ret = MIN (ret, tret);
2181 }
2182 }
19c44640
JJ
2183 else
2184 {
2185 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2186 is_gimple_reg, fb_rvalue);
2187 ret = MIN (ret, tret);
2188 }
44de5aeb 2189 }
af72267c
RK
2190 }
2191
a9f7c570
RH
2192 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2193 so as to match the min_lval predicate. Failure to do so may result
2194 in the creation of large aggregate temporaries. */
2195 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2196 fallback | fb_lvalue);
af72267c
RK
2197 ret = MIN (ret, tret);
2198
ea814c66 2199 /* And finally, the indices and operands of ARRAY_REF. During this
48eb4e53 2200 loop we also remove any useless conversions. */
9771b263 2201 for (; expr_stack.length () > 0; )
af72267c 2202 {
9771b263 2203 tree t = expr_stack.pop ();
af72267c
RK
2204
2205 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2206 {
ba4d8f9d 2207 /* Gimplify the dimension. */
af72267c
RK
2208 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2209 {
2210 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 2211 is_gimple_val, fb_rvalue);
af72267c
RK
2212 ret = MIN (ret, tret);
2213 }
2214 }
48eb4e53
RK
2215
2216 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2217
726a989a
RB
2218 /* The innermost expression P may have originally had
2219 TREE_SIDE_EFFECTS set which would have caused all the outer
2220 expressions in *EXPR_P leading to P to also have had
2221 TREE_SIDE_EFFECTS set. */
6de9cd9a 2222 recalculate_side_effects (t);
6de9cd9a
DN
2223 }
2224
2225 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 2226 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
2227 {
2228 canonicalize_component_ref (expr_p);
6de9cd9a
DN
2229 }
2230
9771b263 2231 expr_stack.release ();
07724022 2232
941f78d1
JM
2233 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2234
6de9cd9a
DN
2235 return ret;
2236}
2237
206048bd
VR
2238/* Gimplify the self modifying expression pointed to by EXPR_P
2239 (++, --, +=, -=).
6de9cd9a
DN
2240
2241 PRE_P points to the list where side effects that must happen before
2242 *EXPR_P should be stored.
2243
2244 POST_P points to the list where side effects that must happen after
2245 *EXPR_P should be stored.
2246
2247 WANT_VALUE is nonzero iff we want to use the value of this expression
cc3c4f62 2248 in another expression.
6de9cd9a 2249
cc3c4f62
RB
2250 ARITH_TYPE is the type the computation should be performed in. */
2251
2252enum gimplify_status
726a989a 2253gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
cc3c4f62 2254 bool want_value, tree arith_type)
6de9cd9a
DN
2255{
2256 enum tree_code code;
726a989a
RB
2257 tree lhs, lvalue, rhs, t1;
2258 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
2259 bool postfix;
2260 enum tree_code arith_code;
2261 enum gimplify_status ret;
db3927fb 2262 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
2263
2264 code = TREE_CODE (*expr_p);
2265
282899df
NS
2266 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2267 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
2268
2269 /* Prefix or postfix? */
2270 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2271 /* Faster to treat as prefix if result is not used. */
2272 postfix = want_value;
2273 else
2274 postfix = false;
2275
82181741
JJ
2276 /* For postfix, make sure the inner expression's post side effects
2277 are executed after side effects from this expression. */
2278 if (postfix)
2279 post_p = &post;
2280
6de9cd9a
DN
2281 /* Add or subtract? */
2282 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2283 arith_code = PLUS_EXPR;
2284 else
2285 arith_code = MINUS_EXPR;
2286
2287 /* Gimplify the LHS into a GIMPLE lvalue. */
2288 lvalue = TREE_OPERAND (*expr_p, 0);
2289 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2290 if (ret == GS_ERROR)
2291 return ret;
2292
2293 /* Extract the operands to the arithmetic operation. */
2294 lhs = lvalue;
2295 rhs = TREE_OPERAND (*expr_p, 1);
2296
2297 /* For postfix operator, we evaluate the LHS to an rvalue and then use
d97c9b22 2298 that as the result value and in the postqueue operation. */
6de9cd9a
DN
2299 if (postfix)
2300 {
2301 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2302 if (ret == GS_ERROR)
2303 return ret;
6de9cd9a 2304
d97c9b22
JJ
2305 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2306 }
cc3c4f62 2307
5be014d5
AP
2308 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2309 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2310 {
0d82a1c8 2311 rhs = convert_to_ptrofftype_loc (loc, rhs);
5be014d5 2312 if (arith_code == MINUS_EXPR)
db3927fb 2313 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
cc3c4f62 2314 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
5be014d5 2315 }
cc3c4f62
RB
2316 else
2317 t1 = fold_convert (TREE_TYPE (*expr_p),
2318 fold_build2 (arith_code, arith_type,
2319 fold_convert (arith_type, lhs),
2320 fold_convert (arith_type, rhs)));
5be014d5 2321
6de9cd9a
DN
2322 if (postfix)
2323 {
cf1867a0 2324 gimplify_assign (lvalue, t1, pre_p);
726a989a 2325 gimplify_seq_add_seq (orig_post_p, post);
cc3c4f62 2326 *expr_p = lhs;
6de9cd9a
DN
2327 return GS_ALL_DONE;
2328 }
2329 else
2330 {
726a989a 2331 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
2332 return GS_OK;
2333 }
2334}
2335
d25cee4d
RH
2336/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2337
2338static void
2339maybe_with_size_expr (tree *expr_p)
2340{
61025d1b
RK
2341 tree expr = *expr_p;
2342 tree type = TREE_TYPE (expr);
2343 tree size;
d25cee4d 2344
61025d1b
RK
2345 /* If we've already wrapped this or the type is error_mark_node, we can't do
2346 anything. */
2347 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2348 || type == error_mark_node)
d25cee4d
RH
2349 return;
2350
61025d1b 2351 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 2352 size = TYPE_SIZE_UNIT (type);
61025d1b
RK
2353 if (!size || TREE_CODE (size) == INTEGER_CST)
2354 return;
2355
2356 /* Otherwise, make a WITH_SIZE_EXPR. */
2357 size = unshare_expr (size);
2358 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2359 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
2360}
2361
726a989a 2362/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f 2363 Store any side-effects in PRE_P. CALL_LOCATION is the location of
381cdae4
RB
2364 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
2365 gimplified to an SSA name. */
e4f78bd4 2366
fe6ebcf1 2367enum gimplify_status
381cdae4
RB
2368gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
2369 bool allow_ssa)
e4f78bd4
JM
2370{
2371 bool (*test) (tree);
2372 fallback_t fb;
2373
2374 /* In general, we allow lvalues for function arguments to avoid
2375 extra overhead of copying large aggregates out of even larger
2376 aggregates into temporaries only to copy the temporaries to
2377 the argument list. Make optimizers happy by pulling out to
2378 temporaries those types that fit in registers. */
726a989a 2379 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
2380 test = is_gimple_val, fb = fb_rvalue;
2381 else
b4ef8aac
JM
2382 {
2383 test = is_gimple_lvalue, fb = fb_either;
2384 /* Also strip a TARGET_EXPR that would force an extra copy. */
2385 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2386 {
2387 tree init = TARGET_EXPR_INITIAL (*arg_p);
2388 if (init
2389 && !VOID_TYPE_P (TREE_TYPE (init)))
2390 *arg_p = init;
2391 }
2392 }
e4f78bd4 2393
d25cee4d 2394 /* If this is a variable sized type, we must remember the size. */
726a989a 2395 maybe_with_size_expr (arg_p);
d25cee4d 2396
c2255bc4 2397 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
2398 /* Make sure arguments have the same location as the function call
2399 itself. */
2400 protected_set_expr_location (*arg_p, call_location);
2401
e4f78bd4
JM
2402 /* There is a sequence point before a function call. Side effects in
2403 the argument list must occur before the actual call. So, when
2404 gimplifying arguments, force gimplify_expr to use an internal
2405 post queue which is then appended to the end of PRE_P. */
381cdae4 2406 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
e4f78bd4
JM
2407}
2408
d26fc979
JJ
2409/* Don't fold inside offloading or taskreg regions: it can break code by
2410 adding decl references that weren't in the source. We'll do it during
2411 omplower pass instead. */
88ac13da
TS
2412
2413static bool
2414maybe_fold_stmt (gimple_stmt_iterator *gsi)
2415{
2416 struct gimplify_omp_ctx *ctx;
2417 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
d9a6bd32 2418 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
88ac13da
TS
2419 return false;
2420 return fold_stmt (gsi);
2421}
2422
726a989a 2423/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 2424 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
2425
2426static enum gimplify_status
726a989a 2427gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 2428{
f20ca725 2429 tree fndecl, parms, p, fnptrtype;
6de9cd9a 2430 enum gimplify_status ret;
5039610b 2431 int i, nargs;
538dd0b7 2432 gcall *call;
ed9c79e1 2433 bool builtin_va_start_p = false;
db3927fb 2434 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 2435
282899df 2436 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 2437
d3147f64 2438 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 2439 every call_expr be annotated with file and line. */
a281759f
PB
2440 if (! EXPR_HAS_LOCATION (*expr_p))
2441 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a 2442
0e37a2f3
MP
2443 /* Gimplify internal functions created in the FEs. */
2444 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2445 {
1304953e
JJ
2446 if (want_value)
2447 return GS_ALL_DONE;
2448
0e37a2f3
MP
2449 nargs = call_expr_nargs (*expr_p);
2450 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2451 auto_vec<tree> vargs (nargs);
2452
2453 for (i = 0; i < nargs; i++)
2454 {
2455 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2456 EXPR_LOCATION (*expr_p));
2457 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2458 }
355fe088 2459 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
0e37a2f3
MP
2460 gimplify_seq_add_stmt (pre_p, call);
2461 return GS_ALL_DONE;
2462 }
2463
6de9cd9a
DN
2464 /* This may be a call to a builtin function.
2465
2466 Builtin function calls may be transformed into different
2467 (and more efficient) builtin function calls under certain
2468 circumstances. Unfortunately, gimplification can muck things
2469 up enough that the builtin expanders are not aware that certain
2470 transformations are still valid.
2471
2472 So we attempt transformation/gimplification of the call before
2473 we gimplify the CALL_EXPR. At this time we do not manage to
2474 transform all calls in the same manner as the expanders do, but
2475 we do transform most of them. */
726a989a 2476 fndecl = get_callee_fndecl (*expr_p);
3537a0cd
RG
2477 if (fndecl
2478 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2479 switch (DECL_FUNCTION_CODE (fndecl))
2480 {
03c00798
EB
2481 case BUILT_IN_ALLOCA:
2482 case BUILT_IN_ALLOCA_WITH_ALIGN:
2483 /* If the call has been built for a variable-sized object, then we
2484 want to restore the stack level when the enclosing BIND_EXPR is
2485 exited to reclaim the allocated space; otherwise, we precisely
2486 need to do the opposite and preserve the latest stack level. */
2487 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
2488 gimplify_ctxp->save_stack = true;
2489 else
2490 gimplify_ctxp->keep_stack = true;
2491 break;
2492
3537a0cd 2493 case BUILT_IN_VA_START:
2efcfa4e 2494 {
726a989a 2495 builtin_va_start_p = TRUE;
5039610b 2496 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
2497 {
2498 error ("too few arguments to function %<va_start%>");
c2255bc4 2499 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2500 return GS_OK;
2501 }
b8698a0f 2502
5039610b 2503 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 2504 {
c2255bc4 2505 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2506 return GS_OK;
2507 }
3537a0cd
RG
2508 break;
2509 }
b25aad5f 2510
3537a0cd
RG
2511 default:
2512 ;
2513 }
2514 if (fndecl && DECL_BUILT_IN (fndecl))
2515 {
2516 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2517 if (new_tree && new_tree != *expr_p)
2518 {
2519 /* There was a transformation of this call which computes the
2520 same value, but in a more efficient way. Return and try
2521 again. */
2522 *expr_p = new_tree;
2523 return GS_OK;
2efcfa4e 2524 }
6de9cd9a
DN
2525 }
2526
f20ca725
RG
2527 /* Remember the original function pointer type. */
2528 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2529
6de9cd9a
DN
2530 /* There is a sequence point before the call, so any side effects in
2531 the calling expression must occur before the actual call. Force
2532 gimplify_expr to use an internal post queue. */
5039610b 2533 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 2534 is_gimple_call_addr, fb_rvalue);
6de9cd9a 2535
5039610b
SL
2536 nargs = call_expr_nargs (*expr_p);
2537
e36711f3 2538 /* Get argument types for verification. */
726a989a 2539 fndecl = get_callee_fndecl (*expr_p);
e36711f3 2540 parms = NULL_TREE;
726a989a
RB
2541 if (fndecl)
2542 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
a96c6a62
RB
2543 else
2544 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
e36711f3 2545
726a989a 2546 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 2547 p = DECL_ARGUMENTS (fndecl);
004e2fa7 2548 else if (parms)
f9487002 2549 p = parms;
6ef5231b 2550 else
498e51ca 2551 p = NULL_TREE;
f9487002
JJ
2552 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2553 ;
6ef5231b
JJ
2554
2555 /* If the last argument is __builtin_va_arg_pack () and it is not
2556 passed as a named argument, decrease the number of CALL_EXPR
2557 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2558 if (!p
2559 && i < nargs
2560 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2561 {
2562 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2563 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2564
2565 if (last_arg_fndecl
2566 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2567 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2568 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2569 {
2570 tree call = *expr_p;
2571
2572 --nargs;
db3927fb
AH
2573 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2574 CALL_EXPR_FN (call),
2575 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
2576
2577 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
2578 CALL_EXPR_VA_ARG_PACK flag. */
2579 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2580 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2581 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2582 = CALL_EXPR_RETURN_SLOT_OPT (call);
2583 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
5e278028 2584 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
726a989a 2585
6ef5231b
JJ
2586 /* Set CALL_EXPR_VA_ARG_PACK. */
2587 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2588 }
2589 }
e36711f3 2590
381cdae4
RB
2591 /* If the call returns twice then after building the CFG the call
2592 argument computations will no longer dominate the call because
2593 we add an abnormal incoming edge to the call. So do not use SSA
2594 vars there. */
2595 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
2596
f2d3d07e 2597 /* Gimplify the function arguments. */
726a989a 2598 if (nargs > 0)
6de9cd9a 2599 {
726a989a
RB
2600 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2601 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2602 PUSH_ARGS_REVERSED ? i-- : i++)
2603 {
2604 enum gimplify_status t;
6de9cd9a 2605
726a989a
RB
2606 /* Avoid gimplifying the second argument to va_start, which needs to
2607 be the plain PARM_DECL. */
2608 if ((i != 1) || !builtin_va_start_p)
2609 {
1282697f 2610 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
381cdae4 2611 EXPR_LOCATION (*expr_p), ! returns_twice);
6de9cd9a 2612
726a989a
RB
2613 if (t == GS_ERROR)
2614 ret = GS_ERROR;
2615 }
2616 }
6de9cd9a 2617 }
6de9cd9a 2618
f2d3d07e
RH
2619 /* Gimplify the static chain. */
2620 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2621 {
2622 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2623 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2624 else
2625 {
2626 enum gimplify_status t;
2627 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
381cdae4 2628 EXPR_LOCATION (*expr_p), ! returns_twice);
f2d3d07e
RH
2629 if (t == GS_ERROR)
2630 ret = GS_ERROR;
2631 }
2632 }
2633
33922890
RG
2634 /* Verify the function result. */
2635 if (want_value && fndecl
f20ca725 2636 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
33922890
RG
2637 {
2638 error_at (loc, "using result of function returning %<void%>");
2639 ret = GS_ERROR;
2640 }
2641
6de9cd9a 2642 /* Try this again in case gimplification exposed something. */
6f538523 2643 if (ret != GS_ERROR)
6de9cd9a 2644 {
db3927fb 2645 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 2646
82d6e6fc 2647 if (new_tree && new_tree != *expr_p)
5039610b
SL
2648 {
2649 /* There was a transformation of this call which computes the
2650 same value, but in a more efficient way. Return and try
2651 again. */
82d6e6fc 2652 *expr_p = new_tree;
5039610b 2653 return GS_OK;
6de9cd9a
DN
2654 }
2655 }
726a989a
RB
2656 else
2657 {
df8fa700 2658 *expr_p = error_mark_node;
726a989a
RB
2659 return GS_ERROR;
2660 }
6de9cd9a
DN
2661
2662 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2663 decl. This allows us to eliminate redundant or useless
2664 calls to "const" functions. */
becfd6e5
KZ
2665 if (TREE_CODE (*expr_p) == CALL_EXPR)
2666 {
2667 int flags = call_expr_flags (*expr_p);
2668 if (flags & (ECF_CONST | ECF_PURE)
2669 /* An infinite loop is considered a side effect. */
2670 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2671 TREE_SIDE_EFFECTS (*expr_p) = 0;
2672 }
726a989a
RB
2673
2674 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2675 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2676 form and delegate the creation of a GIMPLE_CALL to
2677 gimplify_modify_expr. This is always possible because when
2678 WANT_VALUE is true, the caller wants the result of this call into
2679 a temporary, which means that we will emit an INIT_EXPR in
2680 internal_get_tmp_var which will then be handled by
2681 gimplify_modify_expr. */
2682 if (!want_value)
2683 {
2684 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2685 have to do is replicate it as a GIMPLE_CALL tuple. */
64e0f5ff 2686 gimple_stmt_iterator gsi;
726a989a 2687 call = gimple_build_call_from_tree (*expr_p);
f20ca725 2688 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
f6b64c35 2689 notice_special_calls (call);
726a989a 2690 gimplify_seq_add_stmt (pre_p, call);
64e0f5ff 2691 gsi = gsi_last (*pre_p);
88ac13da 2692 maybe_fold_stmt (&gsi);
726a989a
RB
2693 *expr_p = NULL_TREE;
2694 }
f20ca725
RG
2695 else
2696 /* Remember the original function type. */
2697 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2698 CALL_EXPR_FN (*expr_p));
726a989a 2699
6de9cd9a
DN
2700 return ret;
2701}
2702
2703/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2704 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2705
2706 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2707 condition is true or false, respectively. If null, we should generate
2708 our own to skip over the evaluation of this specific expression.
2709
ca80e52b
EB
2710 LOCUS is the source location of the COND_EXPR.
2711
6de9cd9a
DN
2712 This function is the tree equivalent of do_jump.
2713
2714 shortcut_cond_r should only be called by shortcut_cond_expr. */
2715
2716static tree
ca80e52b
EB
2717shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2718 location_t locus)
6de9cd9a
DN
2719{
2720 tree local_label = NULL_TREE;
2721 tree t, expr = NULL;
2722
2723 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2724 retain the shortcut semantics. Just insert the gotos here;
2725 shortcut_cond_expr will append the real blocks later. */
2726 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2727 {
ca80e52b
EB
2728 location_t new_locus;
2729
6de9cd9a
DN
2730 /* Turn if (a && b) into
2731
2732 if (a); else goto no;
2733 if (b) goto yes; else goto no;
2734 (no:) */
2735
2736 if (false_label_p == NULL)
2737 false_label_p = &local_label;
2738
ca80e52b
EB
2739 /* Keep the original source location on the first 'if'. */
2740 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
2741 append_to_statement_list (t, &expr);
2742
ca80e52b
EB
2743 /* Set the source location of the && on the second 'if'. */
2744 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2745 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2746 new_locus);
6de9cd9a
DN
2747 append_to_statement_list (t, &expr);
2748 }
2749 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2750 {
ca80e52b
EB
2751 location_t new_locus;
2752
6de9cd9a
DN
2753 /* Turn if (a || b) into
2754
2755 if (a) goto yes;
2756 if (b) goto yes; else goto no;
2757 (yes:) */
2758
2759 if (true_label_p == NULL)
2760 true_label_p = &local_label;
2761
ca80e52b
EB
2762 /* Keep the original source location on the first 'if'. */
2763 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
2764 append_to_statement_list (t, &expr);
2765
ca80e52b
EB
2766 /* Set the source location of the || on the second 'if'. */
2767 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2768 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2769 new_locus);
6de9cd9a
DN
2770 append_to_statement_list (t, &expr);
2771 }
1537737f
JJ
2772 else if (TREE_CODE (pred) == COND_EXPR
2773 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2774 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
6de9cd9a 2775 {
ca80e52b
EB
2776 location_t new_locus;
2777
6de9cd9a
DN
2778 /* As long as we're messing with gotos, turn if (a ? b : c) into
2779 if (a)
2780 if (b) goto yes; else goto no;
2781 else
1537737f
JJ
2782 if (c) goto yes; else goto no;
2783
2784 Don't do this if one of the arms has void type, which can happen
2785 in C++ when the arm is throw. */
ca80e52b
EB
2786
2787 /* Keep the original source location on the first 'if'. Set the source
2788 location of the ? on the second 'if'. */
2789 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
b4257cfc
RG
2790 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2791 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 2792 false_label_p, locus),
b4257cfc 2793 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 2794 false_label_p, new_locus));
6de9cd9a
DN
2795 }
2796 else
2797 {
b4257cfc
RG
2798 expr = build3 (COND_EXPR, void_type_node, pred,
2799 build_and_jump (true_label_p),
2800 build_and_jump (false_label_p));
ca80e52b 2801 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2802 }
2803
2804 if (local_label)
2805 {
2806 t = build1 (LABEL_EXPR, void_type_node, local_label);
2807 append_to_statement_list (t, &expr);
2808 }
2809
2810 return expr;
2811}
2812
726a989a
RB
2813/* Given a conditional expression EXPR with short-circuit boolean
2814 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
073a8998 2815 predicate apart into the equivalent sequence of conditionals. */
726a989a 2816
6de9cd9a
DN
2817static tree
2818shortcut_cond_expr (tree expr)
2819{
2820 tree pred = TREE_OPERAND (expr, 0);
2821 tree then_ = TREE_OPERAND (expr, 1);
2822 tree else_ = TREE_OPERAND (expr, 2);
2823 tree true_label, false_label, end_label, t;
2824 tree *true_label_p;
2825 tree *false_label_p;
089efaa4 2826 bool emit_end, emit_false, jump_over_else;
65355d53
RH
2827 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2828 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
2829
2830 /* First do simple transformations. */
65355d53 2831 if (!else_se)
6de9cd9a 2832 {
ca80e52b
EB
2833 /* If there is no 'else', turn
2834 if (a && b) then c
2835 into
2836 if (a) if (b) then c. */
6de9cd9a
DN
2837 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2838 {
ca80e52b 2839 /* Keep the original source location on the first 'if'. */
8400e75e 2840 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 2841 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2842 /* Set the source location of the && on the second 'if'. */
2843 if (EXPR_HAS_LOCATION (pred))
2844 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2845 then_ = shortcut_cond_expr (expr);
4356a1bf 2846 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 2847 pred = TREE_OPERAND (pred, 0);
b4257cfc 2848 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 2849 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2850 }
2851 }
726a989a 2852
65355d53 2853 if (!then_se)
6de9cd9a
DN
2854 {
2855 /* If there is no 'then', turn
2856 if (a || b); else d
2857 into
2858 if (a); else if (b); else d. */
2859 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2860 {
ca80e52b 2861 /* Keep the original source location on the first 'if'. */
8400e75e 2862 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
6de9cd9a 2863 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2864 /* Set the source location of the || on the second 'if'. */
2865 if (EXPR_HAS_LOCATION (pred))
2866 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2867 else_ = shortcut_cond_expr (expr);
4356a1bf 2868 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 2869 pred = TREE_OPERAND (pred, 0);
b4257cfc 2870 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 2871 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2872 }
2873 }
2874
2875 /* If we're done, great. */
2876 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2877 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2878 return expr;
2879
2880 /* Otherwise we need to mess with gotos. Change
2881 if (a) c; else d;
2882 to
2883 if (a); else goto no;
2884 c; goto end;
2885 no: d; end:
2886 and recursively gimplify the condition. */
2887
2888 true_label = false_label = end_label = NULL_TREE;
2889
2890 /* If our arms just jump somewhere, hijack those labels so we don't
2891 generate jumps to jumps. */
2892
65355d53
RH
2893 if (then_
2894 && TREE_CODE (then_) == GOTO_EXPR
6de9cd9a
DN
2895 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2896 {
2897 true_label = GOTO_DESTINATION (then_);
65355d53
RH
2898 then_ = NULL;
2899 then_se = false;
6de9cd9a
DN
2900 }
2901
65355d53
RH
2902 if (else_
2903 && TREE_CODE (else_) == GOTO_EXPR
6de9cd9a
DN
2904 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2905 {
2906 false_label = GOTO_DESTINATION (else_);
65355d53
RH
2907 else_ = NULL;
2908 else_se = false;
6de9cd9a
DN
2909 }
2910
9cf737f8 2911 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
2912 if (true_label)
2913 true_label_p = &true_label;
2914 else
2915 true_label_p = NULL;
2916
2917 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 2918 if (false_label || else_se)
6de9cd9a
DN
2919 false_label_p = &false_label;
2920 else
2921 false_label_p = NULL;
2922
2923 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 2924 if (!then_se && !else_se)
ca80e52b 2925 return shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 2926 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
2927
2928 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 2929 if (else_se)
ca80e52b 2930 t = expr_last (else_);
65355d53 2931 else if (then_se)
ca80e52b 2932 t = expr_last (then_);
65355d53 2933 else
ca80e52b
EB
2934 t = NULL;
2935 if (t && TREE_CODE (t) == LABEL_EXPR)
2936 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
2937
2938 /* If we don't care about jumping to the 'else' branch, jump to the end
2939 if the condition is false. */
2940 if (!false_label_p)
2941 false_label_p = &end_label;
2942
2943 /* We only want to emit these labels if we aren't hijacking them. */
2944 emit_end = (end_label == NULL_TREE);
2945 emit_false = (false_label == NULL_TREE);
2946
089efaa4
ILT
2947 /* We only emit the jump over the else clause if we have to--if the
2948 then clause may fall through. Otherwise we can wind up with a
2949 useless jump and a useless label at the end of gimplified code,
2950 which will cause us to think that this conditional as a whole
2951 falls through even if it doesn't. If we then inline a function
2952 which ends with such a condition, that can cause us to issue an
2953 inappropriate warning about control reaching the end of a
2954 non-void function. */
2955 jump_over_else = block_may_fallthru (then_);
2956
ca80e52b 2957 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
8400e75e 2958 EXPR_LOC_OR_LOC (expr, input_location));
6de9cd9a
DN
2959
2960 expr = NULL;
2961 append_to_statement_list (pred, &expr);
2962
2963 append_to_statement_list (then_, &expr);
65355d53 2964 if (else_se)
6de9cd9a 2965 {
089efaa4
ILT
2966 if (jump_over_else)
2967 {
ca80e52b 2968 tree last = expr_last (expr);
089efaa4 2969 t = build_and_jump (&end_label);
ca80e52b
EB
2970 if (EXPR_HAS_LOCATION (last))
2971 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
089efaa4
ILT
2972 append_to_statement_list (t, &expr);
2973 }
6de9cd9a
DN
2974 if (emit_false)
2975 {
2976 t = build1 (LABEL_EXPR, void_type_node, false_label);
2977 append_to_statement_list (t, &expr);
2978 }
2979 append_to_statement_list (else_, &expr);
2980 }
2981 if (emit_end && end_label)
2982 {
2983 t = build1 (LABEL_EXPR, void_type_node, end_label);
2984 append_to_statement_list (t, &expr);
2985 }
2986
2987 return expr;
2988}
2989
2990/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2991
50674e96 2992tree
6de9cd9a
DN
2993gimple_boolify (tree expr)
2994{
2995 tree type = TREE_TYPE (expr);
db3927fb 2996 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 2997
554cf330
JJ
2998 if (TREE_CODE (expr) == NE_EXPR
2999 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3000 && integer_zerop (TREE_OPERAND (expr, 1)))
3001 {
3002 tree call = TREE_OPERAND (expr, 0);
3003 tree fn = get_callee_fndecl (call);
3004
d53c73e0
JJ
3005 /* For __builtin_expect ((long) (x), y) recurse into x as well
3006 if x is truth_value_p. */
554cf330
JJ
3007 if (fn
3008 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3009 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3010 && call_expr_nargs (call) == 2)
3011 {
3012 tree arg = CALL_EXPR_ARG (call, 0);
3013 if (arg)
3014 {
3015 if (TREE_CODE (arg) == NOP_EXPR
3016 && TREE_TYPE (arg) == TREE_TYPE (call))
3017 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
3018 if (truth_value_p (TREE_CODE (arg)))
3019 {
3020 arg = gimple_boolify (arg);
3021 CALL_EXPR_ARG (call, 0)
3022 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3023 }
554cf330
JJ
3024 }
3025 }
3026 }
3027
6de9cd9a
DN
3028 switch (TREE_CODE (expr))
3029 {
3030 case TRUTH_AND_EXPR:
3031 case TRUTH_OR_EXPR:
3032 case TRUTH_XOR_EXPR:
3033 case TRUTH_ANDIF_EXPR:
3034 case TRUTH_ORIF_EXPR:
3035 /* Also boolify the arguments of truth exprs. */
3036 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3037 /* FALLTHRU */
3038
3039 case TRUTH_NOT_EXPR:
3040 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
6de9cd9a 3041
6de9cd9a 3042 /* These expressions always produce boolean results. */
7f3ff782
KT
3043 if (TREE_CODE (type) != BOOLEAN_TYPE)
3044 TREE_TYPE (expr) = boolean_type_node;
6de9cd9a 3045 return expr;
d3147f64 3046
8170608b 3047 case ANNOTATE_EXPR:
718c4601 3048 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
8170608b 3049 {
718c4601
EB
3050 case annot_expr_ivdep_kind:
3051 case annot_expr_no_vector_kind:
3052 case annot_expr_vector_kind:
8170608b
TB
3053 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3054 if (TREE_CODE (type) != BOOLEAN_TYPE)
3055 TREE_TYPE (expr) = boolean_type_node;
3056 return expr;
718c4601
EB
3057 default:
3058 gcc_unreachable ();
8170608b 3059 }
8170608b 3060
6de9cd9a 3061 default:
7f3ff782
KT
3062 if (COMPARISON_CLASS_P (expr))
3063 {
3064 /* There expressions always prduce boolean results. */
3065 if (TREE_CODE (type) != BOOLEAN_TYPE)
3066 TREE_TYPE (expr) = boolean_type_node;
3067 return expr;
3068 }
6de9cd9a
DN
3069 /* Other expressions that get here must have boolean values, but
3070 might need to be converted to the appropriate mode. */
7f3ff782 3071 if (TREE_CODE (type) == BOOLEAN_TYPE)
1d15f620 3072 return expr;
db3927fb 3073 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
3074 }
3075}
3076
aea74440
JJ
3077/* Given a conditional expression *EXPR_P without side effects, gimplify
3078 its operands. New statements are inserted to PRE_P. */
3079
3080static enum gimplify_status
726a989a 3081gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
3082{
3083 tree expr = *expr_p, cond;
3084 enum gimplify_status ret, tret;
3085 enum tree_code code;
3086
3087 cond = gimple_boolify (COND_EXPR_COND (expr));
3088
3089 /* We need to handle && and || specially, as their gimplification
3090 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3091 code = TREE_CODE (cond);
3092 if (code == TRUTH_ANDIF_EXPR)
3093 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3094 else if (code == TRUTH_ORIF_EXPR)
3095 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 3096 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
3097 COND_EXPR_COND (*expr_p) = cond;
3098
3099 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3100 is_gimple_val, fb_rvalue);
3101 ret = MIN (ret, tret);
3102 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3103 is_gimple_val, fb_rvalue);
3104
3105 return MIN (ret, tret);
3106}
3107
ad19c4be 3108/* Return true if evaluating EXPR could trap.
aea74440
JJ
3109 EXPR is GENERIC, while tree_could_trap_p can be called
3110 only on GIMPLE. */
3111
3112static bool
3113generic_expr_could_trap_p (tree expr)
3114{
3115 unsigned i, n;
3116
3117 if (!expr || is_gimple_val (expr))
3118 return false;
3119
3120 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3121 return true;
3122
3123 n = TREE_OPERAND_LENGTH (expr);
3124 for (i = 0; i < n; i++)
3125 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3126 return true;
3127
3128 return false;
3129}
3130
206048bd 3131/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
3132 into
3133
3134 if (p) if (p)
3135 t1 = a; a;
3136 else or else
3137 t1 = b; b;
3138 t1;
3139
3140 The second form is used when *EXPR_P is of type void.
3141
3142 PRE_P points to the list where side effects that must happen before
dae7ec87 3143 *EXPR_P should be stored. */
6de9cd9a
DN
3144
3145static enum gimplify_status
726a989a 3146gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
3147{
3148 tree expr = *expr_p;
06ec59e6
EB
3149 tree type = TREE_TYPE (expr);
3150 location_t loc = EXPR_LOCATION (expr);
3151 tree tmp, arm1, arm2;
6de9cd9a 3152 enum gimplify_status ret;
726a989a
RB
3153 tree label_true, label_false, label_cont;
3154 bool have_then_clause_p, have_else_clause_p;
538dd0b7 3155 gcond *cond_stmt;
726a989a
RB
3156 enum tree_code pred_code;
3157 gimple_seq seq = NULL;
26d44ae2
RH
3158
3159 /* If this COND_EXPR has a value, copy the values into a temporary within
3160 the arms. */
06ec59e6 3161 if (!VOID_TYPE_P (type))
26d44ae2 3162 {
06ec59e6 3163 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
3164 tree result;
3165
06ec59e6
EB
3166 /* If either an rvalue is ok or we do not require an lvalue, create the
3167 temporary. But we cannot do that if the type is addressable. */
3168 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c3e203cf 3169 && !TREE_ADDRESSABLE (type))
aff98faf 3170 {
aea74440
JJ
3171 if (gimplify_ctxp->allow_rhs_cond_expr
3172 /* If either branch has side effects or could trap, it can't be
3173 evaluated unconditionally. */
06ec59e6
EB
3174 && !TREE_SIDE_EFFECTS (then_)
3175 && !generic_expr_could_trap_p (then_)
3176 && !TREE_SIDE_EFFECTS (else_)
3177 && !generic_expr_could_trap_p (else_))
aea74440
JJ
3178 return gimplify_pure_cond_expr (expr_p, pre_p);
3179
06ec59e6
EB
3180 tmp = create_tmp_var (type, "iftmp");
3181 result = tmp;
aff98faf 3182 }
06ec59e6
EB
3183
3184 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
3185 else
3186 {
06ec59e6 3187 type = build_pointer_type (type);
aff98faf 3188
06ec59e6
EB
3189 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3190 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 3191
06ec59e6
EB
3192 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3193 else_ = build_fold_addr_expr_loc (loc, else_);
3194
3195 expr
3196 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 3197
726a989a 3198 tmp = create_tmp_var (type, "iftmp");
70f34814 3199 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
3200 }
3201
06ec59e6
EB
3202 /* Build the new then clause, `tmp = then_;'. But don't build the
3203 assignment if the value is void; in C++ it can be if it's a throw. */
3204 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3205 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
26d44ae2 3206
06ec59e6
EB
3207 /* Similarly, build the new else clause, `tmp = else_;'. */
3208 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3209 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
26d44ae2
RH
3210
3211 TREE_TYPE (expr) = void_type_node;
3212 recalculate_side_effects (expr);
3213
d91ba7b0 3214 /* Move the COND_EXPR to the prequeue. */
726a989a 3215 gimplify_stmt (&expr, pre_p);
26d44ae2 3216
aff98faf 3217 *expr_p = result;
726a989a 3218 return GS_ALL_DONE;
26d44ae2
RH
3219 }
3220
f2f81d57
EB
3221 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3222 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3223 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3224 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3225
26d44ae2
RH
3226 /* Make sure the condition has BOOLEAN_TYPE. */
3227 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3228
3229 /* Break apart && and || conditions. */
3230 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3231 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3232 {
3233 expr = shortcut_cond_expr (expr);
3234
3235 if (expr != *expr_p)
3236 {
3237 *expr_p = expr;
3238
3239 /* We can't rely on gimplify_expr to re-gimplify the expanded
3240 form properly, as cleanups might cause the target labels to be
3241 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3242 set up a conditional context. */
3243 gimple_push_condition ();
726a989a 3244 gimplify_stmt (expr_p, &seq);
26d44ae2 3245 gimple_pop_condition (pre_p);
726a989a 3246 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
3247
3248 return GS_ALL_DONE;
3249 }
3250 }
3251
3252 /* Now do the normal gimplification. */
26d44ae2 3253
726a989a
RB
3254 /* Gimplify condition. */
3255 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3256 fb_rvalue);
26d44ae2 3257 if (ret == GS_ERROR)
726a989a
RB
3258 return GS_ERROR;
3259 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3260
3261 gimple_push_condition ();
26d44ae2 3262
726a989a
RB
3263 have_then_clause_p = have_else_clause_p = false;
3264 if (TREE_OPERAND (expr, 1) != NULL
3265 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3266 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3267 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3268 == current_function_decl)
3269 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3270 have different locations, otherwise we end up with incorrect
3271 location information on the branches. */
3272 && (optimize
3273 || !EXPR_HAS_LOCATION (expr)
3274 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3275 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3276 {
3277 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3278 have_then_clause_p = true;
26d44ae2
RH
3279 }
3280 else
c2255bc4 3281 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3282 if (TREE_OPERAND (expr, 2) != NULL
3283 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3284 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3285 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3286 == current_function_decl)
3287 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3288 have different locations, otherwise we end up with incorrect
3289 location information on the branches. */
3290 && (optimize
3291 || !EXPR_HAS_LOCATION (expr)
3292 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3293 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3294 {
3295 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3296 have_else_clause_p = true;
3297 }
3298 else
c2255bc4 3299 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 3300
726a989a
RB
3301 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3302 &arm2);
538dd0b7 3303 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
c3bea076 3304 label_false);
932c0da4 3305 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
538dd0b7 3306 gimplify_seq_add_stmt (&seq, cond_stmt);
c3bea076
RB
3307 gimple_stmt_iterator gsi = gsi_last (seq);
3308 maybe_fold_stmt (&gsi);
3309
726a989a
RB
3310 label_cont = NULL_TREE;
3311 if (!have_then_clause_p)
3312 {
3313 /* For if (...) {} else { code; } put label_true after
3314 the else block. */
3315 if (TREE_OPERAND (expr, 1) == NULL_TREE
3316 && !have_else_clause_p
3317 && TREE_OPERAND (expr, 2) != NULL_TREE)
3318 label_cont = label_true;
3319 else
3320 {
3321 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3322 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3323 /* For if (...) { code; } else {} or
3324 if (...) { code; } else goto label; or
3325 if (...) { code; return; } else { ... }
3326 label_cont isn't needed. */
3327 if (!have_else_clause_p
3328 && TREE_OPERAND (expr, 2) != NULL_TREE
3329 && gimple_seq_may_fallthru (seq))
3330 {
355fe088 3331 gimple *g;
c2255bc4 3332 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3333
3334 g = gimple_build_goto (label_cont);
3335
3336 /* GIMPLE_COND's are very low level; they have embedded
3337 gotos. This particular embedded goto should not be marked
3338 with the location of the original COND_EXPR, as it would
3339 correspond to the COND_EXPR's condition, not the ELSE or the
3340 THEN arms. To avoid marking it with the wrong location, flag
3341 it as "no location". */
3342 gimple_set_do_not_emit_location (g);
3343
3344 gimplify_seq_add_stmt (&seq, g);
3345 }
3346 }
3347 }
3348 if (!have_else_clause_p)
3349 {
3350 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3351 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3352 }
3353 if (label_cont)
3354 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3355
3356 gimple_pop_condition (pre_p);
3357 gimple_seq_add_seq (pre_p, seq);
3358
3359 if (ret == GS_ERROR)
3360 ; /* Do nothing. */
3361 else if (have_then_clause_p || have_else_clause_p)
3362 ret = GS_ALL_DONE;
3363 else
3364 {
3365 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3366 expr = TREE_OPERAND (expr, 0);
3367 gimplify_stmt (&expr, pre_p);
3368 }
3369
3370 *expr_p = NULL;
3371 return ret;
3372}
3373
f76d6e6f
EB
3374/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3375 to be marked addressable.
3376
3377 We cannot rely on such an expression being directly markable if a temporary
3378 has been created by the gimplification. In this case, we create another
3379 temporary and initialize it with a copy, which will become a store after we
3380 mark it addressable. This can happen if the front-end passed us something
3381 that it could not mark addressable yet, like a Fortran pass-by-reference
3382 parameter (int) floatvar. */
3383
3384static void
3385prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3386{
3387 while (handled_component_p (*expr_p))
3388 expr_p = &TREE_OPERAND (*expr_p, 0);
3389 if (is_gimple_reg (*expr_p))
947ca6a0 3390 {
381cdae4
RB
3391 /* Do not allow an SSA name as the temporary. */
3392 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
947ca6a0
RB
3393 DECL_GIMPLE_REG_P (var) = 0;
3394 *expr_p = var;
3395 }
f76d6e6f
EB
3396}
3397
726a989a
RB
3398/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3399 a call to __builtin_memcpy. */
3400
3401static enum gimplify_status
3402gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3403 gimple_seq *seq_p)
26d44ae2 3404{
5039610b 3405 tree t, to, to_ptr, from, from_ptr;
538dd0b7 3406 gcall *gs;
db3927fb 3407 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3408
726a989a
RB
3409 to = TREE_OPERAND (*expr_p, 0);
3410 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 3411
f76d6e6f
EB
3412 /* Mark the RHS addressable. Beware that it may not be possible to do so
3413 directly if a temporary has been created by the gimplification. */
3414 prepare_gimple_addressable (&from, seq_p);
3415
628c189e 3416 mark_addressable (from);
db3927fb
AH
3417 from_ptr = build_fold_addr_expr_loc (loc, from);
3418 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 3419
628c189e 3420 mark_addressable (to);
db3927fb
AH
3421 to_ptr = build_fold_addr_expr_loc (loc, to);
3422 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 3423
e79983f4 3424 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
726a989a
RB
3425
3426 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
3427
3428 if (want_value)
3429 {
726a989a 3430 /* tmp = memcpy() */
b731b390 3431 t = create_tmp_var (TREE_TYPE (to_ptr));
726a989a
RB
3432 gimple_call_set_lhs (gs, t);
3433 gimplify_seq_add_stmt (seq_p, gs);
3434
70f34814 3435 *expr_p = build_simple_mem_ref (t);
726a989a 3436 return GS_ALL_DONE;
26d44ae2
RH
3437 }
3438
726a989a
RB
3439 gimplify_seq_add_stmt (seq_p, gs);
3440 *expr_p = NULL;
3441 return GS_ALL_DONE;
26d44ae2
RH
3442}
3443
3444/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3445 a call to __builtin_memset. In this case we know that the RHS is
3446 a CONSTRUCTOR with an empty element list. */
3447
3448static enum gimplify_status
726a989a
RB
3449gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3450 gimple_seq *seq_p)
26d44ae2 3451{
1a13360e 3452 tree t, from, to, to_ptr;
538dd0b7 3453 gcall *gs;
db3927fb 3454 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3455
1a13360e
OH
3456 /* Assert our assumptions, to abort instead of producing wrong code
3457 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3458 not be immediately exposed. */
b8698a0f 3459 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
3460 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3461 from = TREE_OPERAND (from, 0);
3462
3463 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
9771b263 3464 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
1a13360e
OH
3465
3466 /* Now proceed. */
726a989a 3467 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 3468
db3927fb
AH
3469 to_ptr = build_fold_addr_expr_loc (loc, to);
3470 gimplify_arg (&to_ptr, seq_p, loc);
e79983f4 3471 t = builtin_decl_implicit (BUILT_IN_MEMSET);
726a989a
RB
3472
3473 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
3474
3475 if (want_value)
3476 {
726a989a 3477 /* tmp = memset() */
b731b390 3478 t = create_tmp_var (TREE_TYPE (to_ptr));
726a989a
RB
3479 gimple_call_set_lhs (gs, t);
3480 gimplify_seq_add_stmt (seq_p, gs);
3481
3482 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3483 return GS_ALL_DONE;
26d44ae2
RH
3484 }
3485
726a989a
RB
3486 gimplify_seq_add_stmt (seq_p, gs);
3487 *expr_p = NULL;
3488 return GS_ALL_DONE;
26d44ae2
RH
3489}
3490
57d1dd87
RH
3491/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3492 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
ad19c4be 3493 assignment. Return non-null if we detect a potential overlap. */
57d1dd87
RH
3494
3495struct gimplify_init_ctor_preeval_data
3496{
3497 /* The base decl of the lhs object. May be NULL, in which case we
3498 have to assume the lhs is indirect. */
3499 tree lhs_base_decl;
3500
3501 /* The alias set of the lhs object. */
4862826d 3502 alias_set_type lhs_alias_set;
57d1dd87
RH
3503};
3504
3505static tree
3506gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3507{
3508 struct gimplify_init_ctor_preeval_data *data
3509 = (struct gimplify_init_ctor_preeval_data *) xdata;
3510 tree t = *tp;
3511
3512 /* If we find the base object, obviously we have overlap. */
3513 if (data->lhs_base_decl == t)
3514 return t;
3515
3516 /* If the constructor component is indirect, determine if we have a
3517 potential overlap with the lhs. The only bits of information we
3518 have to go on at this point are addressability and alias sets. */
70f34814
RG
3519 if ((INDIRECT_REF_P (t)
3520 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
3521 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3522 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3523 return t;
3524
df10ee2a 3525 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
3526 potential overlap with the lhs through an INDIRECT_REF like above.
3527 ??? Ugh - this is completely broken. In fact this whole analysis
3528 doesn't look conservative. */
df10ee2a
EB
3529 if (TREE_CODE (t) == CALL_EXPR)
3530 {
3531 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3532
3533 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3534 if (POINTER_TYPE_P (TREE_VALUE (type))
3535 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3536 && alias_sets_conflict_p (data->lhs_alias_set,
3537 get_alias_set
3538 (TREE_TYPE (TREE_VALUE (type)))))
3539 return t;
3540 }
3541
6615c446 3542 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
3543 *walk_subtrees = 0;
3544 return NULL;
3545}
3546
726a989a 3547/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
3548 force values that overlap with the lhs (as described by *DATA)
3549 into temporaries. */
3550
3551static void
726a989a 3552gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
3553 struct gimplify_init_ctor_preeval_data *data)
3554{
3555 enum gimplify_status one;
3556
51eed280
PB
3557 /* If the value is constant, then there's nothing to pre-evaluate. */
3558 if (TREE_CONSTANT (*expr_p))
3559 {
3560 /* Ensure it does not have side effects, it might contain a reference to
3561 the object we're initializing. */
3562 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3563 return;
3564 }
57d1dd87
RH
3565
3566 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3567 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3568 return;
3569
3570 /* Recurse for nested constructors. */
3571 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3572 {
4038c495
GB
3573 unsigned HOST_WIDE_INT ix;
3574 constructor_elt *ce;
9771b263 3575 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4038c495 3576
9771b263 3577 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4038c495 3578 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 3579
57d1dd87
RH
3580 return;
3581 }
3582
0461b801
EB
3583 /* If this is a variable sized type, we must remember the size. */
3584 maybe_with_size_expr (expr_p);
57d1dd87
RH
3585
3586 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 3587 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 3588 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
3589 gimplification now means that we won't have to deal with complicated
3590 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 3591 exponential search behavior. */
57d1dd87
RH
3592 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3593 if (one == GS_ERROR)
3594 {
3595 *expr_p = NULL;
3596 return;
3597 }
3598
3599 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3600 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3601 always be true for all scalars, since is_gimple_mem_rhs insists on a
3602 temporary variable for them. */
3603 if (DECL_P (*expr_p))
3604 return;
3605
3606 /* If this is of variable size, we have no choice but to assume it doesn't
3607 overlap since we can't make a temporary for it. */
4c923c28 3608 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
3609 return;
3610
3611 /* Otherwise, we must search for overlap ... */
3612 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3613 return;
3614
3615 /* ... and if found, force the value into a temporary. */
3616 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3617}
3618
6fa91b48
SB
3619/* A subroutine of gimplify_init_ctor_eval. Create a loop for
3620 a RANGE_EXPR in a CONSTRUCTOR for an array.
3621
3622 var = lower;
3623 loop_entry:
3624 object[var] = value;
3625 if (var == upper)
3626 goto loop_exit;
3627 var = var + 1;
3628 goto loop_entry;
3629 loop_exit:
3630
3631 We increment var _after_ the loop exit check because we might otherwise
3632 fail if upper == TYPE_MAX_VALUE (type for upper).
3633
3634 Note that we never have to deal with SAVE_EXPRs here, because this has
3635 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3636
9771b263 3637static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
726a989a 3638 gimple_seq *, bool);
6fa91b48
SB
3639
3640static void
3641gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3642 tree value, tree array_elt_type,
726a989a 3643 gimple_seq *pre_p, bool cleared)
6fa91b48 3644{
726a989a 3645 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 3646 tree var, var_type, cref, tmp;
6fa91b48 3647
c2255bc4
AH
3648 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3649 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3650 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
3651
3652 /* Create and initialize the index variable. */
3653 var_type = TREE_TYPE (upper);
b731b390 3654 var = create_tmp_var (var_type);
726a989a 3655 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
3656
3657 /* Add the loop entry label. */
726a989a 3658 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
3659
3660 /* Build the reference. */
3661 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3662 var, NULL_TREE, NULL_TREE);
3663
3664 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3665 the store. Otherwise just assign value to the reference. */
3666
3667 if (TREE_CODE (value) == CONSTRUCTOR)
3668 /* NB we might have to call ourself recursively through
3669 gimplify_init_ctor_eval if the value is a constructor. */
3670 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3671 pre_p, cleared);
3672 else
726a989a 3673 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
3674
3675 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
3676 gimplify_seq_add_stmt (pre_p,
3677 gimple_build_cond (EQ_EXPR, var, upper,
3678 loop_exit_label, fall_thru_label));
3679
3680 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
3681
3682 /* Otherwise, increment the index var... */
b56b9fe3
RS
3683 tmp = build2 (PLUS_EXPR, var_type, var,
3684 fold_convert (var_type, integer_one_node));
726a989a 3685 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
3686
3687 /* ...and jump back to the loop entry. */
726a989a 3688 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
3689
3690 /* Add the loop exit label. */
726a989a 3691 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
3692}
3693
292a398f 3694/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 3695
292a398f 3696static bool
22ea9ec0 3697zero_sized_field_decl (const_tree fdecl)
292a398f 3698{
b8698a0f 3699 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
3700 && integer_zerop (DECL_SIZE (fdecl)))
3701 return true;
3702 return false;
3703}
3704
d06526b7 3705/* Return true if TYPE is zero sized. */
b8698a0f 3706
d06526b7 3707static bool
22ea9ec0 3708zero_sized_type (const_tree type)
d06526b7
AP
3709{
3710 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3711 && integer_zerop (TYPE_SIZE (type)))
3712 return true;
3713 return false;
3714}
3715
57d1dd87
RH
3716/* A subroutine of gimplify_init_constructor. Generate individual
3717 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 3718 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
3719 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3720 zeroed first. */
3721
3722static void
9771b263 3723gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
726a989a 3724 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
3725{
3726 tree array_elt_type = NULL;
4038c495
GB
3727 unsigned HOST_WIDE_INT ix;
3728 tree purpose, value;
57d1dd87
RH
3729
3730 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3731 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3732
4038c495 3733 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 3734 {
726a989a 3735 tree cref;
57d1dd87
RH
3736
3737 /* NULL values are created above for gimplification errors. */
3738 if (value == NULL)
3739 continue;
3740
3741 if (cleared && initializer_zerop (value))
3742 continue;
3743
6fa91b48
SB
3744 /* ??? Here's to hoping the front end fills in all of the indices,
3745 so we don't have to figure out what's missing ourselves. */
3746 gcc_assert (purpose);
3747
816fa80a
OH
3748 /* Skip zero-sized fields, unless value has side-effects. This can
3749 happen with calls to functions returning a zero-sized type, which
3750 we shouldn't discard. As a number of downstream passes don't
3751 expect sets of zero-sized fields, we rely on the gimplification of
3752 the MODIFY_EXPR we make below to drop the assignment statement. */
3753 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
3754 continue;
3755
6fa91b48
SB
3756 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3757 whole range. */
3758 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 3759 {
6fa91b48
SB
3760 tree lower = TREE_OPERAND (purpose, 0);
3761 tree upper = TREE_OPERAND (purpose, 1);
3762
3763 /* If the lower bound is equal to upper, just treat it as if
3764 upper was the index. */
3765 if (simple_cst_equal (lower, upper))
3766 purpose = upper;
3767 else
3768 {
3769 gimplify_init_ctor_eval_range (object, lower, upper, value,
3770 array_elt_type, pre_p, cleared);
3771 continue;
3772 }
3773 }
57d1dd87 3774
6fa91b48
SB
3775 if (array_elt_type)
3776 {
1a1640db
RG
3777 /* Do not use bitsizetype for ARRAY_REF indices. */
3778 if (TYPE_DOMAIN (TREE_TYPE (object)))
ad19c4be
EB
3779 purpose
3780 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3781 purpose);
b4257cfc
RG
3782 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3783 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
3784 }
3785 else
cf0efa6a
ILT
3786 {
3787 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
3788 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3789 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 3790 }
57d1dd87 3791
cf0efa6a
ILT
3792 if (TREE_CODE (value) == CONSTRUCTOR
3793 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
3794 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3795 pre_p, cleared);
3796 else
3797 {
726a989a 3798 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 3799 gimplify_and_add (init, pre_p);
726a989a 3800 ggc_free (init);
57d1dd87
RH
3801 }
3802 }
3803}
3804
ad19c4be 3805/* Return the appropriate RHS predicate for this LHS. */
726a989a 3806
18f429e2 3807gimple_predicate
726a989a
RB
3808rhs_predicate_for (tree lhs)
3809{
ba4d8f9d
RG
3810 if (is_gimple_reg (lhs))
3811 return is_gimple_reg_rhs_or_call;
726a989a 3812 else
ba4d8f9d 3813 return is_gimple_mem_rhs_or_call;
726a989a
RB
3814}
3815
8a1b7b7f
JM
3816/* Return the initial guess for an appropriate RHS predicate for this LHS,
3817 before the LHS has been gimplified. */
3818
3819static gimple_predicate
3820initial_rhs_predicate_for (tree lhs)
3821{
3822 if (is_gimple_reg_type (TREE_TYPE (lhs)))
3823 return is_gimple_reg_rhs_or_call;
3824 else
3825 return is_gimple_mem_rhs_or_call;
3826}
3827
2ec5deb5
PB
3828/* Gimplify a C99 compound literal expression. This just means adding
3829 the DECL_EXPR before the current statement and using its anonymous
3830 decl instead. */
3831
3832static enum gimplify_status
a845a7f5 3833gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4c53d183 3834 bool (*gimple_test_f) (tree),
a845a7f5 3835 fallback_t fallback)
2ec5deb5
PB
3836{
3837 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3838 tree decl = DECL_EXPR_DECL (decl_s);
4c53d183 3839 tree init = DECL_INITIAL (decl);
2ec5deb5
PB
3840 /* Mark the decl as addressable if the compound literal
3841 expression is addressable now, otherwise it is marked too late
3842 after we gimplify the initialization expression. */
3843 if (TREE_ADDRESSABLE (*expr_p))
3844 TREE_ADDRESSABLE (decl) = 1;
4c53d183
MM
3845 /* Otherwise, if we don't need an lvalue and have a literal directly
3846 substitute it. Check if it matches the gimple predicate, as
3847 otherwise we'd generate a new temporary, and we can as well just
3848 use the decl we already have. */
3849 else if (!TREE_ADDRESSABLE (decl)
3850 && init
3851 && (fallback & fb_lvalue) == 0
3852 && gimple_test_f (init))
3853 {
3854 *expr_p = init;
3855 return GS_OK;
3856 }
2ec5deb5
PB
3857
3858 /* Preliminarily mark non-addressed complex variables as eligible
3859 for promotion to gimple registers. We'll transform their uses
3860 as we find them. */
3861 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3862 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3863 && !TREE_THIS_VOLATILE (decl)
3864 && !needs_to_live_in_memory (decl))
3865 DECL_GIMPLE_REG_P (decl) = 1;
3866
a845a7f5
ILT
3867 /* If the decl is not addressable, then it is being used in some
3868 expression or on the right hand side of a statement, and it can
3869 be put into a readonly data section. */
3870 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3871 TREE_READONLY (decl) = 1;
3872
2ec5deb5
PB
3873 /* This decl isn't mentioned in the enclosing block, so add it to the
3874 list of temps. FIXME it seems a bit of a kludge to say that
3875 anonymous artificial vars aren't pushed, but everything else is. */
3876 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3877 gimple_add_tmp_var (decl);
3878
3879 gimplify_and_add (decl_s, pre_p);
3880 *expr_p = decl;
3881 return GS_OK;
3882}
3883
3884/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3885 return a new CONSTRUCTOR if something changed. */
3886
3887static tree
3888optimize_compound_literals_in_ctor (tree orig_ctor)
3889{
3890 tree ctor = orig_ctor;
9771b263
DN
3891 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3892 unsigned int idx, num = vec_safe_length (elts);
2ec5deb5
PB
3893
3894 for (idx = 0; idx < num; idx++)
3895 {
9771b263 3896 tree value = (*elts)[idx].value;
2ec5deb5
PB
3897 tree newval = value;
3898 if (TREE_CODE (value) == CONSTRUCTOR)
3899 newval = optimize_compound_literals_in_ctor (value);
3900 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3901 {
3902 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3903 tree decl = DECL_EXPR_DECL (decl_s);
3904 tree init = DECL_INITIAL (decl);
3905
3906 if (!TREE_ADDRESSABLE (value)
3907 && !TREE_ADDRESSABLE (decl)
6f8f67e9
JJ
3908 && init
3909 && TREE_CODE (init) == CONSTRUCTOR)
2ec5deb5
PB
3910 newval = optimize_compound_literals_in_ctor (init);
3911 }
3912 if (newval == value)
3913 continue;
3914
3915 if (ctor == orig_ctor)
3916 {
3917 ctor = copy_node (orig_ctor);
9771b263 3918 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
2ec5deb5
PB
3919 elts = CONSTRUCTOR_ELTS (ctor);
3920 }
9771b263 3921 (*elts)[idx].value = newval;
2ec5deb5
PB
3922 }
3923 return ctor;
3924}
3925
26d44ae2
RH
3926/* A subroutine of gimplify_modify_expr. Break out elements of a
3927 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3928
3929 Note that we still need to clear any elements that don't have explicit
3930 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
3931 original MODIFY_EXPR, we just remove all of the constructor elements.
3932
3933 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3934 GS_ERROR if we would have to create a temporary when gimplifying
3935 this constructor. Otherwise, return GS_OK.
3936
3937 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
3938
3939static enum gimplify_status
726a989a
RB
3940gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3941 bool want_value, bool notify_temp_creation)
26d44ae2 3942{
f5a1f0d0 3943 tree object, ctor, type;
26d44ae2 3944 enum gimplify_status ret;
9771b263 3945 vec<constructor_elt, va_gc> *elts;
26d44ae2 3946
f5a1f0d0 3947 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 3948
ffed8a01
AH
3949 if (!notify_temp_creation)
3950 {
726a989a 3951 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
3952 is_gimple_lvalue, fb_lvalue);
3953 if (ret == GS_ERROR)
3954 return ret;
3955 }
57d1dd87 3956
726a989a 3957 object = TREE_OPERAND (*expr_p, 0);
f5a1f0d0
PB
3958 ctor = TREE_OPERAND (*expr_p, 1) =
3959 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3960 type = TREE_TYPE (ctor);
3961 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 3962 ret = GS_ALL_DONE;
726a989a 3963
26d44ae2
RH
3964 switch (TREE_CODE (type))
3965 {
3966 case RECORD_TYPE:
3967 case UNION_TYPE:
3968 case QUAL_UNION_TYPE:
3969 case ARRAY_TYPE:
3970 {
57d1dd87 3971 struct gimplify_init_ctor_preeval_data preeval_data;
953d0c90
RS
3972 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3973 bool cleared, complete_p, valid_const_initializer;
26d44ae2
RH
3974
3975 /* Aggregate types must lower constructors to initialization of
3976 individual elements. The exception is that a CONSTRUCTOR node
3977 with no elements indicates zero-initialization of the whole. */
9771b263 3978 if (vec_safe_is_empty (elts))
ffed8a01
AH
3979 {
3980 if (notify_temp_creation)
3981 return GS_OK;
3982 break;
3983 }
b8698a0f 3984
fe24d485
OH
3985 /* Fetch information about the constructor to direct later processing.
3986 We might want to make static versions of it in various cases, and
3987 can only do so if it known to be a valid constant initializer. */
3988 valid_const_initializer
3989 = categorize_ctor_elements (ctor, &num_nonzero_elements,
953d0c90 3990 &num_ctor_elements, &complete_p);
26d44ae2
RH
3991
3992 /* If a const aggregate variable is being initialized, then it
3993 should never be a lose to promote the variable to be static. */
fe24d485 3994 if (valid_const_initializer
6f642f98 3995 && num_nonzero_elements > 1
26d44ae2 3996 && TREE_READONLY (object)
d0ea0759
SE
3997 && TREE_CODE (object) == VAR_DECL
3998 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
26d44ae2 3999 {
ffed8a01
AH
4000 if (notify_temp_creation)
4001 return GS_ERROR;
26d44ae2
RH
4002 DECL_INITIAL (object) = ctor;
4003 TREE_STATIC (object) = 1;
4004 if (!DECL_NAME (object))
4005 DECL_NAME (object) = create_tmp_var_name ("C");
4006 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4007
4008 /* ??? C++ doesn't automatically append a .<number> to the
6bdf3519 4009 assembler name, and even when it does, it looks at FE private
26d44ae2
RH
4010 data structures to figure out what that number should be,
4011 which are not set for this variable. I suppose this is
4012 important for local statics for inline functions, which aren't
4013 "local" in the object file sense. So in order to get a unique
4014 TU-local symbol, we must invoke the lhd version now. */
4015 lhd_set_decl_assembler_name (object);
4016
4017 *expr_p = NULL_TREE;
4018 break;
4019 }
4020
cce70747
JC
4021 /* If there are "lots" of initialized elements, even discounting
4022 those that are not address constants (and thus *must* be
4023 computed at runtime), then partition the constructor into
4024 constant and non-constant parts. Block copy the constant
4025 parts in, then generate code for the non-constant parts. */
4026 /* TODO. There's code in cp/typeck.c to do this. */
4027
953d0c90
RS
4028 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4029 /* store_constructor will ignore the clearing of variable-sized
4030 objects. Initializers for such objects must explicitly set
4031 every field that needs to be set. */
4032 cleared = false;
d368135f 4033 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
953d0c90 4034 /* If the constructor isn't complete, clear the whole object
d368135f 4035 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
953d0c90
RS
4036
4037 ??? This ought not to be needed. For any element not present
4038 in the initializer, we should simply set them to zero. Except
4039 we'd need to *find* the elements that are not present, and that
4040 requires trickery to avoid quadratic compile-time behavior in
4041 large cases or excessive memory use in small cases. */
73ed17ff 4042 cleared = true;
953d0c90 4043 else if (num_ctor_elements - num_nonzero_elements
e04ad03d 4044 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
953d0c90
RS
4045 && num_nonzero_elements < num_ctor_elements / 4)
4046 /* If there are "lots" of zeros, it's more efficient to clear
4047 the memory and then set the nonzero elements. */
cce70747 4048 cleared = true;
953d0c90
RS
4049 else
4050 cleared = false;
cce70747 4051
26d44ae2
RH
4052 /* If there are "lots" of initialized elements, and all of them
4053 are valid address constants, then the entire initializer can
cce70747
JC
4054 be dropped to memory, and then memcpy'd out. Don't do this
4055 for sparse arrays, though, as it's more efficient to follow
4056 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
4057 individual element initialization. Also don't do this for small
4058 all-zero initializers (which aren't big enough to merit
4059 clearing), and don't try to make bitwise copies of
d5e254e1
IE
4060 TREE_ADDRESSABLE types.
4061
4062 We cannot apply such transformation when compiling chkp static
4063 initializer because creation of initializer image in the memory
4064 will require static initialization of bounds for it. It should
4065 result in another gimplification of similar initializer and we
4066 may fall into infinite loop. */
8afd015a
JM
4067 if (valid_const_initializer
4068 && !(cleared || num_nonzero_elements == 0)
d5e254e1
IE
4069 && !TREE_ADDRESSABLE (type)
4070 && (!current_function_decl
4071 || !lookup_attribute ("chkp ctor",
4072 DECL_ATTRIBUTES (current_function_decl))))
26d44ae2
RH
4073 {
4074 HOST_WIDE_INT size = int_size_in_bytes (type);
4075 unsigned int align;
4076
4077 /* ??? We can still get unbounded array types, at least
4078 from the C++ front end. This seems wrong, but attempt
4079 to work around it for now. */
4080 if (size < 0)
4081 {
4082 size = int_size_in_bytes (TREE_TYPE (object));
4083 if (size >= 0)
4084 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4085 }
4086
4087 /* Find the maximum alignment we can assume for the object. */
4088 /* ??? Make use of DECL_OFFSET_ALIGN. */
4089 if (DECL_P (object))
4090 align = DECL_ALIGN (object);
4091 else
4092 align = TYPE_ALIGN (type);
4093
f301837e
EB
4094 /* Do a block move either if the size is so small as to make
4095 each individual move a sub-unit move on average, or if it
4096 is so large as to make individual moves inefficient. */
329ad380
JJ
4097 if (size > 0
4098 && num_nonzero_elements > 1
f301837e
EB
4099 && (size < num_nonzero_elements
4100 || !can_move_by_pieces (size, align)))
26d44ae2 4101 {
ffed8a01
AH
4102 if (notify_temp_creation)
4103 return GS_ERROR;
4104
46314d3e
EB
4105 walk_tree (&ctor, force_labels_r, NULL, NULL);
4106 ctor = tree_output_constant_def (ctor);
4107 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4108 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4109 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
4110
4111 /* This is no longer an assignment of a CONSTRUCTOR, but
4112 we still may have processing to do on the LHS. So
4113 pretend we didn't do anything here to let that happen. */
4114 return GS_UNHANDLED;
26d44ae2
RH
4115 }
4116 }
4117
558af7ca
EB
4118 /* If the target is volatile, we have non-zero elements and more than
4119 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
4120 if (TREE_THIS_VOLATILE (object)
4121 && !TREE_ADDRESSABLE (type)
558af7ca 4122 && num_nonzero_elements > 0
9771b263 4123 && vec_safe_length (elts) > 1)
61c7cbf8 4124 {
b731b390 4125 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
61c7cbf8
RG
4126 TREE_OPERAND (*expr_p, 0) = temp;
4127 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4128 *expr_p,
4129 build2 (MODIFY_EXPR, void_type_node,
4130 object, temp));
4131 return GS_OK;
4132 }
4133
ffed8a01
AH
4134 if (notify_temp_creation)
4135 return GS_OK;
4136
675c873b
EB
4137 /* If there are nonzero elements and if needed, pre-evaluate to capture
4138 elements overlapping with the lhs into temporaries. We must do this
4139 before clearing to fetch the values before they are zeroed-out. */
4140 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
4141 {
4142 preeval_data.lhs_base_decl = get_base_address (object);
4143 if (!DECL_P (preeval_data.lhs_base_decl))
4144 preeval_data.lhs_base_decl = NULL;
4145 preeval_data.lhs_alias_set = get_alias_set (object);
4146
726a989a 4147 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
4148 pre_p, post_p, &preeval_data);
4149 }
4150
2234a9cb
PP
4151 bool ctor_has_side_effects_p
4152 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4153
26d44ae2
RH
4154 if (cleared)
4155 {
4156 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4157 Note that we still have to gimplify, in order to handle the
57d1dd87 4158 case of variable sized types. Avoid shared tree structures. */
4038c495 4159 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 4160 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 4161 object = unshare_expr (object);
726a989a 4162 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
4163 }
4164
6fa91b48 4165 /* If we have not block cleared the object, or if there are nonzero
2234a9cb
PP
4166 elements in the constructor, or if the constructor has side effects,
4167 add assignments to the individual scalar fields of the object. */
4168 if (!cleared
4169 || num_nonzero_elements > 0
4170 || ctor_has_side_effects_p)
85d89e76 4171 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
4172
4173 *expr_p = NULL_TREE;
4174 }
4175 break;
4176
4177 case COMPLEX_TYPE:
4178 {
4179 tree r, i;
4180
ffed8a01
AH
4181 if (notify_temp_creation)
4182 return GS_OK;
4183
26d44ae2 4184 /* Extract the real and imaginary parts out of the ctor. */
9771b263
DN
4185 gcc_assert (elts->length () == 2);
4186 r = (*elts)[0].value;
4187 i = (*elts)[1].value;
26d44ae2
RH
4188 if (r == NULL || i == NULL)
4189 {
e8160c9a 4190 tree zero = build_zero_cst (TREE_TYPE (type));
26d44ae2
RH
4191 if (r == NULL)
4192 r = zero;
4193 if (i == NULL)
4194 i = zero;
4195 }
4196
4197 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4198 represent creation of a complex value. */
4199 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4200 {
4201 ctor = build_complex (type, r, i);
4202 TREE_OPERAND (*expr_p, 1) = ctor;
4203 }
4204 else
4205 {
b4257cfc 4206 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 4207 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
4208 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4209 pre_p,
4210 post_p,
17ad5b5e
RH
4211 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4212 fb_rvalue);
26d44ae2
RH
4213 }
4214 }
4215 break;
506e2710 4216
26d44ae2 4217 case VECTOR_TYPE:
4038c495
GB
4218 {
4219 unsigned HOST_WIDE_INT ix;
4220 constructor_elt *ce;
e89be13b 4221
ffed8a01
AH
4222 if (notify_temp_creation)
4223 return GS_OK;
4224
4038c495
GB
4225 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4226 if (TREE_CONSTANT (ctor))
4227 {
4228 bool constant_p = true;
4229 tree value;
4230
4231 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
4232 elements, such as addresses or trapping values like
4233 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4234 in VECTOR_CST nodes. */
4038c495
GB
4235 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4236 if (!CONSTANT_CLASS_P (value))
4237 {
4238 constant_p = false;
4239 break;
4240 }
e89be13b 4241
4038c495
GB
4242 if (constant_p)
4243 {
4244 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4245 break;
4246 }
84816907 4247
9f1da821 4248 TREE_CONSTANT (ctor) = 0;
4038c495 4249 }
e89be13b 4250
4038c495 4251 /* Vector types use CONSTRUCTOR all the way through gimple
37947cd0 4252 compilation as a general initializer. */
9771b263 4253 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4038c495
GB
4254 {
4255 enum gimplify_status tret;
726a989a
RB
4256 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4257 fb_rvalue);
4038c495
GB
4258 if (tret == GS_ERROR)
4259 ret = GS_ERROR;
37947cd0
JJ
4260 else if (TREE_STATIC (ctor)
4261 && !initializer_constant_valid_p (ce->value,
4262 TREE_TYPE (ce->value)))
4263 TREE_STATIC (ctor) = 0;
4038c495 4264 }
726a989a
RB
4265 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4266 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 4267 }
26d44ae2 4268 break;
6de9cd9a 4269
26d44ae2
RH
4270 default:
4271 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 4272 gcc_unreachable ();
26d44ae2 4273 }
6de9cd9a 4274
26d44ae2
RH
4275 if (ret == GS_ERROR)
4276 return GS_ERROR;
4277 else if (want_value)
4278 {
26d44ae2
RH
4279 *expr_p = object;
4280 return GS_OK;
6de9cd9a 4281 }
26d44ae2 4282 else
726a989a
RB
4283 {
4284 /* If we have gimplified both sides of the initializer but have
4285 not emitted an assignment, do so now. */
4286 if (*expr_p)
4287 {
4288 tree lhs = TREE_OPERAND (*expr_p, 0);
4289 tree rhs = TREE_OPERAND (*expr_p, 1);
538dd0b7 4290 gassign *init = gimple_build_assign (lhs, rhs);
726a989a
RB
4291 gimplify_seq_add_stmt (pre_p, init);
4292 *expr_p = NULL;
4293 }
4294
4295 return GS_ALL_DONE;
4296 }
26d44ae2 4297}
6de9cd9a 4298
de4af523
JJ
4299/* Given a pointer value OP0, return a simplified version of an
4300 indirection through OP0, or NULL_TREE if no simplification is
4301 possible. This may only be applied to a rhs of an expression.
4302 Note that the resulting type may be different from the type pointed
4303 to in the sense that it is still compatible from the langhooks
4304 point of view. */
4305
4306static tree
4307gimple_fold_indirect_ref_rhs (tree t)
4308{
4309 return gimple_fold_indirect_ref (t);
4310}
4311
4caa08da
AH
4312/* Subroutine of gimplify_modify_expr to do simplifications of
4313 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4314 something changes. */
6de9cd9a 4315
26d44ae2 4316static enum gimplify_status
726a989a
RB
4317gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4318 gimple_seq *pre_p, gimple_seq *post_p,
4319 bool want_value)
26d44ae2 4320{
6d729f28
JM
4321 enum gimplify_status ret = GS_UNHANDLED;
4322 bool changed;
6de9cd9a 4323
6d729f28
JM
4324 do
4325 {
4326 changed = false;
4327 switch (TREE_CODE (*from_p))
4328 {
4329 case VAR_DECL:
4330 /* If we're assigning from a read-only variable initialized with
4331 a constructor, do the direct assignment from the constructor,
4332 but only if neither source nor target are volatile since this
4333 latter assignment might end up being done on a per-field basis. */
4334 if (DECL_INITIAL (*from_p)
4335 && TREE_READONLY (*from_p)
4336 && !TREE_THIS_VOLATILE (*from_p)
4337 && !TREE_THIS_VOLATILE (*to_p)
4338 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4339 {
4340 tree old_from = *from_p;
4341 enum gimplify_status subret;
4342
4343 /* Move the constructor into the RHS. */
4344 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4345
4346 /* Let's see if gimplify_init_constructor will need to put
4347 it in memory. */
4348 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4349 false, true);
4350 if (subret == GS_ERROR)
4351 {
4352 /* If so, revert the change. */
4353 *from_p = old_from;
4354 }
4355 else
4356 {
4357 ret = GS_OK;
4358 changed = true;
4359 }
4360 }
4361 break;
4362 case INDIRECT_REF:
4caa08da 4363 {
6d729f28 4364 /* If we have code like
ffed8a01 4365
6d729f28 4366 *(const A*)(A*)&x
ffed8a01 4367
6d729f28
JM
4368 where the type of "x" is a (possibly cv-qualified variant
4369 of "A"), treat the entire expression as identical to "x".
4370 This kind of code arises in C++ when an object is bound
4371 to a const reference, and if "x" is a TARGET_EXPR we want
4372 to take advantage of the optimization below. */
06baaba3 4373 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6d729f28
JM
4374 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4375 if (t)
ffed8a01 4376 {
06baaba3
RG
4377 if (TREE_THIS_VOLATILE (t) != volatile_p)
4378 {
3a65ee74 4379 if (DECL_P (t))
06baaba3
RG
4380 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4381 build_fold_addr_expr (t));
4382 if (REFERENCE_CLASS_P (t))
4383 TREE_THIS_VOLATILE (t) = volatile_p;
4384 }
6d729f28
JM
4385 *from_p = t;
4386 ret = GS_OK;
4387 changed = true;
ffed8a01 4388 }
6d729f28
JM
4389 break;
4390 }
4391
4392 case TARGET_EXPR:
4393 {
4394 /* If we are initializing something from a TARGET_EXPR, strip the
4395 TARGET_EXPR and initialize it directly, if possible. This can't
4396 be done if the initializer is void, since that implies that the
4397 temporary is set in some non-trivial way.
4398
4399 ??? What about code that pulls out the temp and uses it
4400 elsewhere? I think that such code never uses the TARGET_EXPR as
4401 an initializer. If I'm wrong, we'll die because the temp won't
4402 have any RTL. In that case, I guess we'll need to replace
4403 references somehow. */
4404 tree init = TARGET_EXPR_INITIAL (*from_p);
4405
4406 if (init
4407 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 4408 {
6d729f28 4409 *from_p = init;
ffed8a01 4410 ret = GS_OK;
6d729f28 4411 changed = true;
ffed8a01 4412 }
4caa08da 4413 }
6d729f28 4414 break;
f98625f6 4415
6d729f28
JM
4416 case COMPOUND_EXPR:
4417 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4418 caught. */
4419 gimplify_compound_expr (from_p, pre_p, true);
4420 ret = GS_OK;
4421 changed = true;
4422 break;
f98625f6 4423
6d729f28 4424 case CONSTRUCTOR:
ce3beba3
JM
4425 /* If we already made some changes, let the front end have a
4426 crack at this before we break it down. */
4427 if (ret != GS_UNHANDLED)
4428 break;
6d729f28
JM
4429 /* If we're initializing from a CONSTRUCTOR, break this into
4430 individual MODIFY_EXPRs. */
4431 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4432 false);
4433
4434 case COND_EXPR:
4435 /* If we're assigning to a non-register type, push the assignment
4436 down into the branches. This is mandatory for ADDRESSABLE types,
4437 since we cannot generate temporaries for such, but it saves a
4438 copy in other cases as well. */
4439 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 4440 {
6d729f28
JM
4441 /* This code should mirror the code in gimplify_cond_expr. */
4442 enum tree_code code = TREE_CODE (*expr_p);
4443 tree cond = *from_p;
4444 tree result = *to_p;
4445
4446 ret = gimplify_expr (&result, pre_p, post_p,
4447 is_gimple_lvalue, fb_lvalue);
4448 if (ret != GS_ERROR)
4449 ret = GS_OK;
4450
4451 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4452 TREE_OPERAND (cond, 1)
4453 = build2 (code, void_type_node, result,
4454 TREE_OPERAND (cond, 1));
4455 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4456 TREE_OPERAND (cond, 2)
4457 = build2 (code, void_type_node, unshare_expr (result),
4458 TREE_OPERAND (cond, 2));
4459
4460 TREE_TYPE (cond) = void_type_node;
4461 recalculate_side_effects (cond);
4462
4463 if (want_value)
4464 {
4465 gimplify_and_add (cond, pre_p);
4466 *expr_p = unshare_expr (result);
4467 }
4468 else
4469 *expr_p = cond;
4470 return ret;
f98625f6 4471 }
f98625f6 4472 break;
f98625f6 4473
6d729f28
JM
4474 case CALL_EXPR:
4475 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4476 return slot so that we don't generate a temporary. */
4477 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4478 && aggregate_value_p (*from_p, *from_p))
26d44ae2 4479 {
6d729f28
JM
4480 bool use_target;
4481
4482 if (!(rhs_predicate_for (*to_p))(*from_p))
4483 /* If we need a temporary, *to_p isn't accurate. */
4484 use_target = false;
ad19c4be 4485 /* It's OK to use the return slot directly unless it's an NRV. */
6d729f28
JM
4486 else if (TREE_CODE (*to_p) == RESULT_DECL
4487 && DECL_NAME (*to_p) == NULL_TREE
4488 && needs_to_live_in_memory (*to_p))
6d729f28
JM
4489 use_target = true;
4490 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4491 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4492 /* Don't force regs into memory. */
4493 use_target = false;
4494 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4495 /* It's OK to use the target directly if it's being
4496 initialized. */
4497 use_target = true;
e6a54b01
EB
4498 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4499 != INTEGER_CST)
aabb90e5
RG
4500 /* Always use the target and thus RSO for variable-sized types.
4501 GIMPLE cannot deal with a variable-sized assignment
4502 embedded in a call statement. */
4503 use_target = true;
345ae177
AH
4504 else if (TREE_CODE (*to_p) != SSA_NAME
4505 && (!is_gimple_variable (*to_p)
4506 || needs_to_live_in_memory (*to_p)))
6d729f28
JM
4507 /* Don't use the original target if it's already addressable;
4508 if its address escapes, and the called function uses the
4509 NRV optimization, a conforming program could see *to_p
4510 change before the called function returns; see c++/19317.
4511 When optimizing, the return_slot pass marks more functions
4512 as safe after we have escape info. */
4513 use_target = false;
4514 else
4515 use_target = true;
4516
4517 if (use_target)
4518 {
4519 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4520 mark_addressable (*to_p);
4521 }
26d44ae2 4522 }
6d729f28 4523 break;
6de9cd9a 4524
6d729f28
JM
4525 case WITH_SIZE_EXPR:
4526 /* Likewise for calls that return an aggregate of non-constant size,
4527 since we would not be able to generate a temporary at all. */
4528 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4529 {
4530 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
4531 /* We don't change ret in this case because the
4532 WITH_SIZE_EXPR might have been added in
4533 gimplify_modify_expr, so returning GS_OK would lead to an
4534 infinite loop. */
6d729f28
JM
4535 changed = true;
4536 }
4537 break;
6de9cd9a 4538
6d729f28
JM
4539 /* If we're initializing from a container, push the initialization
4540 inside it. */
4541 case CLEANUP_POINT_EXPR:
4542 case BIND_EXPR:
4543 case STATEMENT_LIST:
26d44ae2 4544 {
6d729f28
JM
4545 tree wrap = *from_p;
4546 tree t;
dae7ec87 4547
6d729f28
JM
4548 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4549 fb_lvalue);
dae7ec87
JM
4550 if (ret != GS_ERROR)
4551 ret = GS_OK;
4552
6d729f28
JM
4553 t = voidify_wrapper_expr (wrap, *expr_p);
4554 gcc_assert (t == *expr_p);
dae7ec87
JM
4555
4556 if (want_value)
4557 {
6d729f28
JM
4558 gimplify_and_add (wrap, pre_p);
4559 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
4560 }
4561 else
6d729f28
JM
4562 *expr_p = wrap;
4563 return GS_OK;
26d44ae2 4564 }
6de9cd9a 4565
6d729f28 4566 case COMPOUND_LITERAL_EXPR:
fa47911c 4567 {
6d729f28
JM
4568 tree complit = TREE_OPERAND (*expr_p, 1);
4569 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4570 tree decl = DECL_EXPR_DECL (decl_s);
4571 tree init = DECL_INITIAL (decl);
4572
4573 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4574 into struct T x = { 0, 1, 2 } if the address of the
4575 compound literal has never been taken. */
4576 if (!TREE_ADDRESSABLE (complit)
4577 && !TREE_ADDRESSABLE (decl)
4578 && init)
fa47911c 4579 {
6d729f28
JM
4580 *expr_p = copy_node (*expr_p);
4581 TREE_OPERAND (*expr_p, 1) = init;
4582 return GS_OK;
fa47911c
JM
4583 }
4584 }
4585
6d729f28
JM
4586 default:
4587 break;
2ec5deb5 4588 }
6d729f28
JM
4589 }
4590 while (changed);
6de9cd9a 4591
6de9cd9a
DN
4592 return ret;
4593}
4594
216820a4
RG
4595
4596/* Return true if T looks like a valid GIMPLE statement. */
4597
4598static bool
4599is_gimple_stmt (tree t)
4600{
4601 const enum tree_code code = TREE_CODE (t);
4602
4603 switch (code)
4604 {
4605 case NOP_EXPR:
4606 /* The only valid NOP_EXPR is the empty statement. */
4607 return IS_EMPTY_STMT (t);
4608
4609 case BIND_EXPR:
4610 case COND_EXPR:
4611 /* These are only valid if they're void. */
4612 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4613
4614 case SWITCH_EXPR:
4615 case GOTO_EXPR:
4616 case RETURN_EXPR:
4617 case LABEL_EXPR:
4618 case CASE_LABEL_EXPR:
4619 case TRY_CATCH_EXPR:
4620 case TRY_FINALLY_EXPR:
4621 case EH_FILTER_EXPR:
4622 case CATCH_EXPR:
4623 case ASM_EXPR:
4624 case STATEMENT_LIST:
41dbbb37
TS
4625 case OACC_PARALLEL:
4626 case OACC_KERNELS:
4627 case OACC_DATA:
4628 case OACC_HOST_DATA:
4629 case OACC_DECLARE:
4630 case OACC_UPDATE:
4631 case OACC_ENTER_DATA:
4632 case OACC_EXIT_DATA:
4633 case OACC_CACHE:
216820a4
RG
4634 case OMP_PARALLEL:
4635 case OMP_FOR:
74bf76ed 4636 case OMP_SIMD:
c02065fc 4637 case CILK_SIMD:
acf0174b 4638 case OMP_DISTRIBUTE:
41dbbb37 4639 case OACC_LOOP:
216820a4
RG
4640 case OMP_SECTIONS:
4641 case OMP_SECTION:
4642 case OMP_SINGLE:
4643 case OMP_MASTER:
acf0174b 4644 case OMP_TASKGROUP:
216820a4
RG
4645 case OMP_ORDERED:
4646 case OMP_CRITICAL:
4647 case OMP_TASK:
d9a6bd32
JJ
4648 case OMP_TARGET:
4649 case OMP_TARGET_DATA:
4650 case OMP_TARGET_UPDATE:
4651 case OMP_TARGET_ENTER_DATA:
4652 case OMP_TARGET_EXIT_DATA:
4653 case OMP_TASKLOOP:
4654 case OMP_TEAMS:
216820a4
RG
4655 /* These are always void. */
4656 return true;
4657
4658 case CALL_EXPR:
4659 case MODIFY_EXPR:
4660 case PREDICT_EXPR:
4661 /* These are valid regardless of their type. */
4662 return true;
4663
4664 default:
4665 return false;
4666 }
4667}
4668
4669
d9c2d296
AP
4670/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4671 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
7b7e6ecd
EB
4672 DECL_GIMPLE_REG_P set.
4673
4674 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4675 other, unmodified part of the complex object just before the total store.
4676 As a consequence, if the object is still uninitialized, an undefined value
4677 will be loaded into a register, which may result in a spurious exception
4678 if the register is floating-point and the value happens to be a signaling
4679 NaN for example. Then the fully-fledged complex operations lowering pass
4680 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
4681
4682static enum gimplify_status
726a989a
RB
4683gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4684 bool want_value)
d9c2d296
AP
4685{
4686 enum tree_code code, ocode;
4687 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4688
726a989a
RB
4689 lhs = TREE_OPERAND (*expr_p, 0);
4690 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
4691 code = TREE_CODE (lhs);
4692 lhs = TREE_OPERAND (lhs, 0);
4693
4694 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4695 other = build1 (ocode, TREE_TYPE (rhs), lhs);
8d2b0410 4696 TREE_NO_WARNING (other) = 1;
d9c2d296
AP
4697 other = get_formal_tmp_var (other, pre_p);
4698
4699 realpart = code == REALPART_EXPR ? rhs : other;
4700 imagpart = code == REALPART_EXPR ? other : rhs;
4701
4702 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4703 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4704 else
4705 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4706
726a989a
RB
4707 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4708 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
4709
4710 return GS_ALL_DONE;
4711}
4712
206048bd 4713/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
4714
4715 modify_expr
4716 : varname '=' rhs
4717 | '*' ID '=' rhs
4718
4719 PRE_P points to the list where side effects that must happen before
4720 *EXPR_P should be stored.
4721
4722 POST_P points to the list where side effects that must happen after
4723 *EXPR_P should be stored.
4724
4725 WANT_VALUE is nonzero iff we want to use the value of this expression
4726 in another expression. */
4727
4728static enum gimplify_status
726a989a
RB
4729gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4730 bool want_value)
6de9cd9a 4731{
726a989a
RB
4732 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4733 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 4734 enum gimplify_status ret = GS_UNHANDLED;
355fe088 4735 gimple *assign;
db3927fb 4736 location_t loc = EXPR_LOCATION (*expr_p);
6da8be89 4737 gimple_stmt_iterator gsi;
6de9cd9a 4738
282899df
NS
4739 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4740 || TREE_CODE (*expr_p) == INIT_EXPR);
6de9cd9a 4741
d0ad58f9
JM
4742 /* Trying to simplify a clobber using normal logic doesn't work,
4743 so handle it here. */
4744 if (TREE_CLOBBER_P (*from_p))
4745 {
5d751b0c
JJ
4746 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4747 if (ret == GS_ERROR)
4748 return ret;
4749 gcc_assert (!want_value
4750 && (TREE_CODE (*to_p) == VAR_DECL
4751 || TREE_CODE (*to_p) == MEM_REF));
d0ad58f9
JM
4752 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4753 *expr_p = NULL;
4754 return GS_ALL_DONE;
4755 }
4756
1b24a790
RG
4757 /* Insert pointer conversions required by the middle-end that are not
4758 required by the frontend. This fixes middle-end type checking for
4759 for example gcc.dg/redecl-6.c. */
daad0278 4760 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
4761 {
4762 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4763 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 4764 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
4765 }
4766
83d7e8f0
JM
4767 /* See if any simplifications can be done based on what the RHS is. */
4768 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4769 want_value);
4770 if (ret != GS_UNHANDLED)
4771 return ret;
4772
4773 /* For zero sized types only gimplify the left hand side and right hand
4774 side as statements and throw away the assignment. Do this after
4775 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4776 types properly. */
753b34d7 4777 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
412f8986 4778 {
726a989a
RB
4779 gimplify_stmt (from_p, pre_p);
4780 gimplify_stmt (to_p, pre_p);
412f8986
AP
4781 *expr_p = NULL_TREE;
4782 return GS_ALL_DONE;
4783 }
6de9cd9a 4784
d25cee4d
RH
4785 /* If the value being copied is of variable width, compute the length
4786 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4787 before gimplifying any of the operands so that we can resolve any
4788 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4789 the size of the expression to be copied, not of the destination, so
726a989a 4790 that is what we must do here. */
d25cee4d 4791 maybe_with_size_expr (from_p);
6de9cd9a 4792
726a989a
RB
4793 /* As a special case, we have to temporarily allow for assignments
4794 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4795 a toplevel statement, when gimplifying the GENERIC expression
4796 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4797 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4798
4799 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4800 prevent gimplify_expr from trying to create a new temporary for
4801 foo's LHS, we tell it that it should only gimplify until it
4802 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4803 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4804 and all we need to do here is set 'a' to be its LHS. */
8a1b7b7f
JM
4805
4806 /* Gimplify the RHS first for C++17 and bug 71104. */
4807 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
4808 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
4809 if (ret == GS_ERROR)
4810 return ret;
4811
4812 /* Then gimplify the LHS. */
7f15b177
RB
4813 /* If we gimplified the RHS to a CALL_EXPR and that call may return
4814 twice we have to make sure to gimplify into non-SSA as otherwise
4815 the abnormal edge added later will make those defs not dominate
4816 their uses.
4817 ??? Technically this applies only to the registers used in the
4818 resulting non-register *TO_P. */
4819 bool saved_into_ssa = gimplify_ctxp->into_ssa;
4820 if (saved_into_ssa
4821 && TREE_CODE (*from_p) == CALL_EXPR
4822 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
4823 gimplify_ctxp->into_ssa = false;
8a1b7b7f 4824 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
7f15b177 4825 gimplify_ctxp->into_ssa = saved_into_ssa;
6de9cd9a
DN
4826 if (ret == GS_ERROR)
4827 return ret;
4828
8a1b7b7f
JM
4829 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
4830 guess for the predicate was wrong. */
4831 gimple_predicate final_pred = rhs_predicate_for (*to_p);
4832 if (final_pred != initial_pred)
4833 {
4834 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
4835 if (ret == GS_ERROR)
4836 return ret;
4837 }
4838
f8e89441 4839 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
026c3cfd 4840 size as argument to the call. */
f8e89441
TV
4841 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4842 {
4843 tree call = TREE_OPERAND (*from_p, 0);
4844 tree vlasize = TREE_OPERAND (*from_p, 1);
4845
4846 if (TREE_CODE (call) == CALL_EXPR
4847 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4848 {
2fe1d762 4849 int nargs = call_expr_nargs (call);
f8e89441
TV
4850 tree type = TREE_TYPE (call);
4851 tree ap = CALL_EXPR_ARG (call, 0);
4852 tree tag = CALL_EXPR_ARG (call, 1);
33f0852f 4853 tree aptag = CALL_EXPR_ARG (call, 2);
f8e89441 4854 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
2fe1d762
TV
4855 IFN_VA_ARG, type,
4856 nargs + 1, ap, tag,
33f0852f
JJ
4857 aptag, vlasize);
4858 TREE_OPERAND (*from_p, 0) = newcall;
f8e89441
TV
4859 }
4860 }
4861
44de5aeb
RK
4862 /* Now see if the above changed *from_p to something we handle specially. */
4863 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4864 want_value);
6de9cd9a
DN
4865 if (ret != GS_UNHANDLED)
4866 return ret;
4867
d25cee4d
RH
4868 /* If we've got a variable sized assignment between two lvalues (i.e. does
4869 not involve a call), then we can make things a bit more straightforward
4870 by converting the assignment to memcpy or memset. */
4871 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4872 {
4873 tree from = TREE_OPERAND (*from_p, 0);
4874 tree size = TREE_OPERAND (*from_p, 1);
4875
4876 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
4877 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4878
e847cc68 4879 if (is_gimple_addressable (from))
d25cee4d
RH
4880 {
4881 *from_p = from;
726a989a
RB
4882 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4883 pre_p);
d25cee4d
RH
4884 }
4885 }
4886
e41d82f5
RH
4887 /* Transform partial stores to non-addressable complex variables into
4888 total stores. This allows us to use real instead of virtual operands
4889 for these variables, which improves optimization. */
4890 if ((TREE_CODE (*to_p) == REALPART_EXPR
4891 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4892 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4893 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4894
f173837a 4895 /* Try to alleviate the effects of the gimplification creating artificial
b4771722
EB
4896 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4897 make sure not to create DECL_DEBUG_EXPR links across functions. */
f173837a 4898 if (!gimplify_ctxp->into_ssa
f2896bc9 4899 && TREE_CODE (*from_p) == VAR_DECL
726a989a
RB
4900 && DECL_IGNORED_P (*from_p)
4901 && DECL_P (*to_p)
b4771722
EB
4902 && !DECL_IGNORED_P (*to_p)
4903 && decl_function_context (*to_p) == current_function_decl)
f173837a
EB
4904 {
4905 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4906 DECL_NAME (*from_p)
4907 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
839b422f 4908 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
f173837a 4909 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
4910 }
4911
8f0fe813
NS
4912 if (want_value && TREE_THIS_VOLATILE (*to_p))
4913 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4914
726a989a
RB
4915 if (TREE_CODE (*from_p) == CALL_EXPR)
4916 {
4917 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4918 instead of a GIMPLE_ASSIGN. */
538dd0b7 4919 gcall *call_stmt;
1304953e
JJ
4920 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4921 {
4922 /* Gimplify internal functions created in the FEs. */
4923 int nargs = call_expr_nargs (*from_p), i;
4924 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4925 auto_vec<tree> vargs (nargs);
4926
4927 for (i = 0; i < nargs; i++)
4928 {
4929 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4930 EXPR_LOCATION (*from_p));
4931 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4932 }
538dd0b7
DM
4933 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4934 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
1304953e 4935 }
ed9c79e1
JJ
4936 else
4937 {
1304953e
JJ
4938 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4939 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4940 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4941 tree fndecl = get_callee_fndecl (*from_p);
4942 if (fndecl
4943 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4944 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4945 && call_expr_nargs (*from_p) == 3)
538dd0b7
DM
4946 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4947 CALL_EXPR_ARG (*from_p, 0),
4948 CALL_EXPR_ARG (*from_p, 1),
4949 CALL_EXPR_ARG (*from_p, 2));
1304953e
JJ
4950 else
4951 {
538dd0b7
DM
4952 call_stmt = gimple_build_call_from_tree (*from_p);
4953 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
1304953e 4954 }
ed9c79e1 4955 }
538dd0b7 4956 notice_special_calls (call_stmt);
abd3a68c 4957 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
538dd0b7 4958 gimple_call_set_lhs (call_stmt, *to_p);
381cdae4
RB
4959 else if (TREE_CODE (*to_p) == SSA_NAME)
4960 /* The above is somewhat premature, avoid ICEing later for a
4961 SSA name w/o a definition. We may have uses in the GIMPLE IL.
4962 ??? This doesn't make it a default-def. */
4963 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
538dd0b7 4964 assign = call_stmt;
f173837a 4965 }
726a989a 4966 else
c2255bc4
AH
4967 {
4968 assign = gimple_build_assign (*to_p, *from_p);
4969 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4cde512c
JJ
4970 if (COMPARISON_CLASS_P (*from_p))
4971 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
c2255bc4 4972 }
f173837a 4973
726a989a 4974 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 4975 {
2ad728d2 4976 /* We should have got an SSA name from the start. */
381cdae4
RB
4977 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
4978 || ! gimple_in_ssa_p (cfun));
726a989a 4979 }
07beea0d 4980
6da8be89
MM
4981 gimplify_seq_add_stmt (pre_p, assign);
4982 gsi = gsi_last (*pre_p);
88ac13da 4983 maybe_fold_stmt (&gsi);
6da8be89 4984
726a989a
RB
4985 if (want_value)
4986 {
8f0fe813 4987 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 4988 return GS_OK;
6de9cd9a 4989 }
726a989a
RB
4990 else
4991 *expr_p = NULL;
6de9cd9a 4992
17ad5b5e 4993 return GS_ALL_DONE;
6de9cd9a
DN
4994}
4995
ad19c4be
EB
4996/* Gimplify a comparison between two variable-sized objects. Do this
4997 with a call to BUILT_IN_MEMCMP. */
44de5aeb
RK
4998
4999static enum gimplify_status
5000gimplify_variable_sized_compare (tree *expr_p)
5001{
692ad9aa 5002 location_t loc = EXPR_LOCATION (*expr_p);
44de5aeb
RK
5003 tree op0 = TREE_OPERAND (*expr_p, 0);
5004 tree op1 = TREE_OPERAND (*expr_p, 1);
692ad9aa 5005 tree t, arg, dest, src, expr;
5039610b
SL
5006
5007 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5008 arg = unshare_expr (arg);
5009 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
5010 src = build_fold_addr_expr_loc (loc, op1);
5011 dest = build_fold_addr_expr_loc (loc, op0);
e79983f4 5012 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
db3927fb 5013 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
692ad9aa
EB
5014
5015 expr
b4257cfc 5016 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
692ad9aa
EB
5017 SET_EXPR_LOCATION (expr, loc);
5018 *expr_p = expr;
44de5aeb
RK
5019
5020 return GS_OK;
5021}
5022
ad19c4be
EB
5023/* Gimplify a comparison between two aggregate objects of integral scalar
5024 mode as a comparison between the bitwise equivalent scalar values. */
61c25908
OH
5025
5026static enum gimplify_status
5027gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5028{
db3927fb 5029 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
5030 tree op0 = TREE_OPERAND (*expr_p, 0);
5031 tree op1 = TREE_OPERAND (*expr_p, 1);
5032
5033 tree type = TREE_TYPE (op0);
5034 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5035
db3927fb
AH
5036 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5037 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
5038
5039 *expr_p
db3927fb 5040 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
5041
5042 return GS_OK;
5043}
5044
ad19c4be
EB
5045/* Gimplify an expression sequence. This function gimplifies each
5046 expression and rewrites the original expression with the last
6de9cd9a
DN
5047 expression of the sequence in GIMPLE form.
5048
5049 PRE_P points to the list where the side effects for all the
5050 expressions in the sequence will be emitted.
d3147f64 5051
6de9cd9a 5052 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
5053
5054static enum gimplify_status
726a989a 5055gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
5056{
5057 tree t = *expr_p;
5058
5059 do
5060 {
5061 tree *sub_p = &TREE_OPERAND (t, 0);
5062
5063 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5064 gimplify_compound_expr (sub_p, pre_p, false);
5065 else
726a989a 5066 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
5067
5068 t = TREE_OPERAND (t, 1);
5069 }
5070 while (TREE_CODE (t) == COMPOUND_EXPR);
5071
5072 *expr_p = t;
5073 if (want_value)
5074 return GS_OK;
5075 else
5076 {
726a989a 5077 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
5078 return GS_ALL_DONE;
5079 }
5080}
5081
726a989a
RB
5082/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5083 gimplify. After gimplification, EXPR_P will point to a new temporary
5084 that holds the original value of the SAVE_EXPR node.
6de9cd9a 5085
726a989a 5086 PRE_P points to the list where side effects that must happen before
ad19c4be 5087 *EXPR_P should be stored. */
6de9cd9a
DN
5088
5089static enum gimplify_status
726a989a 5090gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5091{
5092 enum gimplify_status ret = GS_ALL_DONE;
5093 tree val;
5094
282899df 5095 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
5096 val = TREE_OPERAND (*expr_p, 0);
5097
7f5e6307
RH
5098 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5099 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 5100 {
7f5e6307
RH
5101 /* The operand may be a void-valued expression such as SAVE_EXPRs
5102 generated by the Java frontend for class initialization. It is
5103 being executed only for its side-effects. */
5104 if (TREE_TYPE (val) == void_type_node)
5105 {
5106 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5107 is_gimple_stmt, fb_none);
7f5e6307
RH
5108 val = NULL;
5109 }
5110 else
381cdae4
RB
5111 /* The temporary may not be an SSA name as later abnormal and EH
5112 control flow may invalidate use/def domination. */
5113 val = get_initialized_tmp_var (val, pre_p, post_p, false);
7f5e6307
RH
5114
5115 TREE_OPERAND (*expr_p, 0) = val;
5116 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 5117 }
6de9cd9a 5118
7f5e6307
RH
5119 *expr_p = val;
5120
6de9cd9a
DN
5121 return ret;
5122}
5123
ad19c4be 5124/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
5125
5126 unary_expr
5127 : ...
5128 | '&' varname
5129 ...
5130
5131 PRE_P points to the list where side effects that must happen before
5132 *EXPR_P should be stored.
5133
5134 POST_P points to the list where side effects that must happen after
5135 *EXPR_P should be stored. */
5136
5137static enum gimplify_status
726a989a 5138gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5139{
5140 tree expr = *expr_p;
5141 tree op0 = TREE_OPERAND (expr, 0);
5142 enum gimplify_status ret;
db3927fb 5143 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
5144
5145 switch (TREE_CODE (op0))
5146 {
5147 case INDIRECT_REF:
67f23620 5148 do_indirect_ref:
6de9cd9a
DN
5149 /* Check if we are dealing with an expression of the form '&*ptr'.
5150 While the front end folds away '&*ptr' into 'ptr', these
5151 expressions may be generated internally by the compiler (e.g.,
5152 builtins like __builtin_va_end). */
67f23620
RH
5153 /* Caution: the silent array decomposition semantics we allow for
5154 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
5155 /* Gimplification of the ADDR_EXPR operand may drop
5156 cv-qualification conversions, so make sure we add them if
5157 needed. */
67f23620
RH
5158 {
5159 tree op00 = TREE_OPERAND (op0, 0);
5160 tree t_expr = TREE_TYPE (expr);
5161 tree t_op00 = TREE_TYPE (op00);
5162
f4088621 5163 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 5164 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
5165 *expr_p = op00;
5166 ret = GS_OK;
5167 }
6de9cd9a
DN
5168 break;
5169
44de5aeb
RK
5170 case VIEW_CONVERT_EXPR:
5171 /* Take the address of our operand and then convert it to the type of
af72267c
RK
5172 this ADDR_EXPR.
5173
5174 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5175 all clear. The impact of this transformation is even less clear. */
91804752
EB
5176
5177 /* If the operand is a useless conversion, look through it. Doing so
5178 guarantees that the ADDR_EXPR and its operand will remain of the
5179 same type. */
5180 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 5181 op0 = TREE_OPERAND (op0, 0);
91804752 5182
db3927fb
AH
5183 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5184 build_fold_addr_expr_loc (loc,
5185 TREE_OPERAND (op0, 0)));
44de5aeb 5186 ret = GS_OK;
6de9cd9a
DN
5187 break;
5188
903eccd1
EB
5189 case MEM_REF:
5190 if (integer_zerop (TREE_OPERAND (op0, 1)))
5191 goto do_indirect_ref;
5192
191816a3 5193 /* fall through */
903eccd1 5194
6de9cd9a 5195 default:
cbf5d0e7
RB
5196 /* If we see a call to a declared builtin or see its address
5197 being taken (we can unify those cases here) then we can mark
5198 the builtin for implicit generation by GCC. */
5199 if (TREE_CODE (op0) == FUNCTION_DECL
5200 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5201 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5202 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5203
6de9cd9a 5204 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
5205 the address of a call that returns a struct; see
5206 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5207 the implied temporary explicit. */
936d04b6 5208
f76d6e6f 5209 /* Make the operand addressable. */
6de9cd9a 5210 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 5211 is_gimple_addressable, fb_either);
8b17cc05
RG
5212 if (ret == GS_ERROR)
5213 break;
67f23620 5214
f76d6e6f
EB
5215 /* Then mark it. Beware that it may not be possible to do so directly
5216 if a temporary has been created by the gimplification. */
5217 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 5218
8b17cc05 5219 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 5220
8b17cc05
RG
5221 /* For various reasons, the gimplification of the expression
5222 may have made a new INDIRECT_REF. */
5223 if (TREE_CODE (op0) == INDIRECT_REF)
5224 goto do_indirect_ref;
5225
6b8b9e42
RG
5226 mark_addressable (TREE_OPERAND (expr, 0));
5227
5228 /* The FEs may end up building ADDR_EXPRs early on a decl with
5229 an incomplete type. Re-build ADDR_EXPRs in canonical form
5230 here. */
5231 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5232 *expr_p = build_fold_addr_expr (op0);
5233
8b17cc05 5234 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
5235 recompute_tree_invariant_for_addr_expr (*expr_p);
5236
5237 /* If we re-built the ADDR_EXPR add a conversion to the original type
5238 if required. */
5239 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5240 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 5241
6de9cd9a
DN
5242 break;
5243 }
5244
6de9cd9a
DN
5245 return ret;
5246}
5247
5248/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5249 value; output operands should be a gimple lvalue. */
5250
5251static enum gimplify_status
726a989a 5252gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 5253{
726a989a
RB
5254 tree expr;
5255 int noutputs;
5256 const char **oconstraints;
6de9cd9a
DN
5257 int i;
5258 tree link;
5259 const char *constraint;
5260 bool allows_mem, allows_reg, is_inout;
5261 enum gimplify_status ret, tret;
538dd0b7 5262 gasm *stmt;
9771b263
DN
5263 vec<tree, va_gc> *inputs;
5264 vec<tree, va_gc> *outputs;
5265 vec<tree, va_gc> *clobbers;
5266 vec<tree, va_gc> *labels;
726a989a 5267 tree link_next;
b8698a0f 5268
726a989a
RB
5269 expr = *expr_p;
5270 noutputs = list_length (ASM_OUTPUTS (expr));
5271 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5272
9771b263
DN
5273 inputs = NULL;
5274 outputs = NULL;
5275 clobbers = NULL;
5276 labels = NULL;
6de9cd9a 5277
6de9cd9a 5278 ret = GS_ALL_DONE;
726a989a
RB
5279 link_next = NULL_TREE;
5280 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5281 {
2c68ba8e 5282 bool ok;
726a989a
RB
5283 size_t constraint_len;
5284
5285 link_next = TREE_CHAIN (link);
5286
5287 oconstraints[i]
5288 = constraint
6de9cd9a 5289 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
5290 constraint_len = strlen (constraint);
5291 if (constraint_len == 0)
5292 continue;
6de9cd9a 5293
2c68ba8e
LB
5294 ok = parse_output_constraint (&constraint, i, 0, 0,
5295 &allows_mem, &allows_reg, &is_inout);
5296 if (!ok)
5297 {
5298 ret = GS_ERROR;
5299 is_inout = false;
5300 }
6de9cd9a
DN
5301
5302 if (!allows_reg && allows_mem)
936d04b6 5303 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5304
5305 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5306 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5307 fb_lvalue | fb_mayfail);
5308 if (tret == GS_ERROR)
5309 {
5310 error ("invalid lvalue in asm output %d", i);
5311 ret = tret;
5312 }
5313
ed87de55
RB
5314 /* If the constraint does not allow memory make sure we gimplify
5315 it to a register if it is not already but its base is. This
5316 happens for complex and vector components. */
5317 if (!allows_mem)
5318 {
5319 tree op = TREE_VALUE (link);
5320 if (! is_gimple_val (op)
5321 && is_gimple_reg_type (TREE_TYPE (op))
5322 && is_gimple_reg (get_base_address (op)))
5323 {
5324 tree tem = create_tmp_reg (TREE_TYPE (op));
5325 tree ass;
5326 if (is_inout)
5327 {
5328 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
5329 tem, unshare_expr (op));
5330 gimplify_and_add (ass, pre_p);
5331 }
5332 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
5333 gimplify_and_add (ass, post_p);
5334
5335 TREE_VALUE (link) = tem;
5336 tret = GS_OK;
5337 }
5338 }
5339
9771b263 5340 vec_safe_push (outputs, link);
726a989a
RB
5341 TREE_CHAIN (link) = NULL_TREE;
5342
6de9cd9a
DN
5343 if (is_inout)
5344 {
5345 /* An input/output operand. To give the optimizers more
5346 flexibility, split it into separate input and output
5347 operands. */
5348 tree input;
3d7b83b6
MS
5349 /* Buffer big enough to format a 32-bit UINT_MAX into. */
5350 char buf[11];
6de9cd9a
DN
5351
5352 /* Turn the in/out constraint into an output constraint. */
5353 char *p = xstrdup (constraint);
5354 p[0] = '=';
5355 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
5356
5357 /* And add a matching input constraint. */
5358 if (allows_reg)
5359 {
3d7b83b6 5360 sprintf (buf, "%u", i);
372d72d9
JJ
5361
5362 /* If there are multiple alternatives in the constraint,
5363 handle each of them individually. Those that allow register
5364 will be replaced with operand number, the others will stay
5365 unchanged. */
5366 if (strchr (p, ',') != NULL)
5367 {
5368 size_t len = 0, buflen = strlen (buf);
5369 char *beg, *end, *str, *dst;
5370
5371 for (beg = p + 1;;)
5372 {
5373 end = strchr (beg, ',');
5374 if (end == NULL)
5375 end = strchr (beg, '\0');
5376 if ((size_t) (end - beg) < buflen)
5377 len += buflen + 1;
5378 else
5379 len += end - beg + 1;
5380 if (*end)
5381 beg = end + 1;
5382 else
5383 break;
5384 }
5385
858904db 5386 str = (char *) alloca (len);
372d72d9
JJ
5387 for (beg = p + 1, dst = str;;)
5388 {
5389 const char *tem;
5390 bool mem_p, reg_p, inout_p;
5391
5392 end = strchr (beg, ',');
5393 if (end)
5394 *end = '\0';
5395 beg[-1] = '=';
5396 tem = beg - 1;
5397 parse_output_constraint (&tem, i, 0, 0,
5398 &mem_p, &reg_p, &inout_p);
5399 if (dst != str)
5400 *dst++ = ',';
5401 if (reg_p)
5402 {
5403 memcpy (dst, buf, buflen);
5404 dst += buflen;
5405 }
5406 else
5407 {
5408 if (end)
5409 len = end - beg;
5410 else
5411 len = strlen (beg);
5412 memcpy (dst, beg, len);
5413 dst += len;
5414 }
5415 if (end)
5416 beg = end + 1;
5417 else
5418 break;
5419 }
5420 *dst = '\0';
5421 input = build_string (dst - str, str);
5422 }
5423 else
5424 input = build_string (strlen (buf), buf);
6de9cd9a
DN
5425 }
5426 else
5427 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
5428
5429 free (p);
5430
6de9cd9a
DN
5431 input = build_tree_list (build_tree_list (NULL_TREE, input),
5432 unshare_expr (TREE_VALUE (link)));
5433 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5434 }
5435 }
5436
726a989a
RB
5437 link_next = NULL_TREE;
5438 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5439 {
726a989a
RB
5440 link_next = TREE_CHAIN (link);
5441 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
5442 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5443 oconstraints, &allows_mem, &allows_reg);
5444
f497c16c
JJ
5445 /* If we can't make copies, we can only accept memory. */
5446 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5447 {
5448 if (allows_mem)
5449 allows_reg = 0;
5450 else
5451 {
5452 error ("impossible constraint in %<asm%>");
5453 error ("non-memory input %d must stay in memory", i);
5454 return GS_ERROR;
5455 }
5456 }
5457
6de9cd9a
DN
5458 /* If the operand is a memory input, it should be an lvalue. */
5459 if (!allows_reg && allows_mem)
5460 {
502c5084
JJ
5461 tree inputv = TREE_VALUE (link);
5462 STRIP_NOPS (inputv);
5463 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5464 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5465 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
8f1e28e0
MP
5466 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5467 || TREE_CODE (inputv) == MODIFY_EXPR)
502c5084 5468 TREE_VALUE (link) = error_mark_node;
6de9cd9a
DN
5469 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5470 is_gimple_lvalue, fb_lvalue | fb_mayfail);
bdd3aea6
JJ
5471 if (tret != GS_ERROR)
5472 {
5473 /* Unlike output operands, memory inputs are not guaranteed
5474 to be lvalues by the FE, and while the expressions are
5475 marked addressable there, if it is e.g. a statement
5476 expression, temporaries in it might not end up being
5477 addressable. They might be already used in the IL and thus
5478 it is too late to make them addressable now though. */
5479 tree x = TREE_VALUE (link);
5480 while (handled_component_p (x))
5481 x = TREE_OPERAND (x, 0);
5482 if (TREE_CODE (x) == MEM_REF
5483 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
5484 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
5485 if ((TREE_CODE (x) == VAR_DECL
5486 || TREE_CODE (x) == PARM_DECL
5487 || TREE_CODE (x) == RESULT_DECL)
5488 && !TREE_ADDRESSABLE (x)
5489 && is_gimple_reg (x))
5490 {
5491 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
5492 input_location), 0,
5493 "memory input %d is not directly addressable",
5494 i);
5495 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
5496 }
5497 }
936d04b6 5498 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5499 if (tret == GS_ERROR)
5500 {
bdd3aea6
JJ
5501 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
5502 "memory input %d is not directly addressable", i);
6de9cd9a
DN
5503 ret = tret;
5504 }
5505 }
5506 else
5507 {
5508 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 5509 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
5510 if (tret == GS_ERROR)
5511 ret = tret;
5512 }
726a989a
RB
5513
5514 TREE_CHAIN (link) = NULL_TREE;
9771b263 5515 vec_safe_push (inputs, link);
6de9cd9a 5516 }
b8698a0f 5517
ca081cc8
EB
5518 link_next = NULL_TREE;
5519 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5520 {
5521 link_next = TREE_CHAIN (link);
5522 TREE_CHAIN (link) = NULL_TREE;
5523 vec_safe_push (clobbers, link);
5524 }
1c384bf1 5525
ca081cc8
EB
5526 link_next = NULL_TREE;
5527 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5528 {
5529 link_next = TREE_CHAIN (link);
5530 TREE_CHAIN (link) = NULL_TREE;
5531 vec_safe_push (labels, link);
5532 }
726a989a 5533
a406865a
RG
5534 /* Do not add ASMs with errors to the gimple IL stream. */
5535 if (ret != GS_ERROR)
5536 {
5537 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 5538 inputs, outputs, clobbers, labels);
726a989a 5539
15a85b05 5540 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
a406865a
RG
5541 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5542
5543 gimplify_seq_add_stmt (pre_p, stmt);
5544 }
6de9cd9a
DN
5545
5546 return ret;
5547}
5548
5549/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 5550 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
5551 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5552 return to this function.
5553
5554 FIXME should we complexify the prequeue handling instead? Or use flags
5555 for all the cleanups and let the optimizer tighten them up? The current
5556 code seems pretty fragile; it will break on a cleanup within any
5557 non-conditional nesting. But any such nesting would be broken, anyway;
5558 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5559 and continues out of it. We can do that at the RTL level, though, so
5560 having an optimizer to tighten up try/finally regions would be a Good
5561 Thing. */
5562
5563static enum gimplify_status
726a989a 5564gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 5565{
726a989a
RB
5566 gimple_stmt_iterator iter;
5567 gimple_seq body_sequence = NULL;
6de9cd9a 5568
325c3691 5569 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
5570
5571 /* We only care about the number of conditions between the innermost
df77f454
JM
5572 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5573 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 5574 int old_conds = gimplify_ctxp->conditions;
726a989a 5575 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
32be32af 5576 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6de9cd9a 5577 gimplify_ctxp->conditions = 0;
726a989a 5578 gimplify_ctxp->conditional_cleanups = NULL;
32be32af 5579 gimplify_ctxp->in_cleanup_point_expr = true;
6de9cd9a 5580
726a989a 5581 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
5582
5583 gimplify_ctxp->conditions = old_conds;
df77f454 5584 gimplify_ctxp->conditional_cleanups = old_cleanups;
32be32af 5585 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6de9cd9a 5586
726a989a 5587 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 5588 {
355fe088 5589 gimple *wce = gsi_stmt (iter);
6de9cd9a 5590
726a989a 5591 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 5592 {
726a989a 5593 if (gsi_one_before_end_p (iter))
6de9cd9a 5594 {
726a989a
RB
5595 /* Note that gsi_insert_seq_before and gsi_remove do not
5596 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
5597 if (!gimple_wce_cleanup_eh_only (wce))
5598 gsi_insert_seq_before_without_update (&iter,
5599 gimple_wce_cleanup (wce),
5600 GSI_SAME_STMT);
726a989a 5601 gsi_remove (&iter, true);
6de9cd9a
DN
5602 break;
5603 }
5604 else
5605 {
538dd0b7 5606 gtry *gtry;
726a989a
RB
5607 gimple_seq seq;
5608 enum gimple_try_flags kind;
40aac948 5609
726a989a
RB
5610 if (gimple_wce_cleanup_eh_only (wce))
5611 kind = GIMPLE_TRY_CATCH;
40aac948 5612 else
726a989a
RB
5613 kind = GIMPLE_TRY_FINALLY;
5614 seq = gsi_split_seq_after (iter);
5615
82d6e6fc 5616 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
5617 /* Do not use gsi_replace here, as it may scan operands.
5618 We want to do a simple structural modification only. */
355a7673 5619 gsi_set_stmt (&iter, gtry);
daa6e488 5620 iter = gsi_start (gtry->eval);
6de9cd9a
DN
5621 }
5622 }
5623 else
726a989a 5624 gsi_next (&iter);
6de9cd9a
DN
5625 }
5626
726a989a 5627 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
5628 if (temp)
5629 {
5630 *expr_p = temp;
6de9cd9a
DN
5631 return GS_OK;
5632 }
5633 else
5634 {
726a989a 5635 *expr_p = NULL;
6de9cd9a
DN
5636 return GS_ALL_DONE;
5637 }
5638}
5639
5640/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a
RB
5641 is the cleanup action required. EH_ONLY is true if the cleanup should
5642 only be executed if an exception is thrown, not on normal exit. */
6de9cd9a
DN
5643
5644static void
726a989a 5645gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
6de9cd9a 5646{
355fe088 5647 gimple *wce;
726a989a 5648 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
5649
5650 /* Errors can result in improperly nested cleanups. Which results in
726a989a 5651 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 5652 if (seen_error ())
6de9cd9a
DN
5653 return;
5654
5655 if (gimple_conditional_context ())
5656 {
5657 /* If we're in a conditional context, this is more complex. We only
5658 want to run the cleanup if we actually ran the initialization that
5659 necessitates it, but we want to run it after the end of the
5660 conditional context. So we wrap the try/finally around the
5661 condition and use a flag to determine whether or not to actually
5662 run the destructor. Thus
5663
5664 test ? f(A()) : 0
5665
5666 becomes (approximately)
5667
5668 flag = 0;
5669 try {
5670 if (test) { A::A(temp); flag = 1; val = f(temp); }
5671 else { val = 0; }
5672 } finally {
5673 if (flag) A::~A(temp);
5674 }
5675 val
5676 */
6de9cd9a 5677 tree flag = create_tmp_var (boolean_type_node, "cleanup");
538dd0b7
DM
5678 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5679 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
726a989a 5680
b4257cfc 5681 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
726a989a
RB
5682 gimplify_stmt (&cleanup, &cleanup_stmts);
5683 wce = gimple_build_wce (cleanup_stmts);
5684
5685 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5686 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5687 gimplify_seq_add_stmt (pre_p, ftrue);
6de9cd9a
DN
5688
5689 /* Because of this manipulation, and the EH edges that jump
5690 threading cannot redirect, the temporary (VAR) will appear
5691 to be used uninitialized. Don't warn. */
5692 TREE_NO_WARNING (var) = 1;
5693 }
5694 else
5695 {
726a989a
RB
5696 gimplify_stmt (&cleanup, &cleanup_stmts);
5697 wce = gimple_build_wce (cleanup_stmts);
5698 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5699 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 5700 }
6de9cd9a
DN
5701}
5702
5703/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5704
5705static enum gimplify_status
726a989a 5706gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5707{
5708 tree targ = *expr_p;
5709 tree temp = TARGET_EXPR_SLOT (targ);
5710 tree init = TARGET_EXPR_INITIAL (targ);
5711 enum gimplify_status ret;
5712
5713 if (init)
5714 {
d0ad58f9
JM
5715 tree cleanup = NULL_TREE;
5716
3a5b9284 5717 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea
JJ
5718 to the temps list. Handle also variable length TARGET_EXPRs. */
5719 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5720 {
5721 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5722 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5723 gimplify_vla_decl (temp, pre_p);
5724 }
5725 else
5726 gimple_add_tmp_var (temp);
6de9cd9a 5727
3a5b9284
RH
5728 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5729 expression is supposed to initialize the slot. */
5730 if (VOID_TYPE_P (TREE_TYPE (init)))
5731 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5732 else
325c3691 5733 {
726a989a
RB
5734 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5735 init = init_expr;
5736 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5737 init = NULL;
5738 ggc_free (init_expr);
325c3691 5739 }
3a5b9284 5740 if (ret == GS_ERROR)
abc67de1
SM
5741 {
5742 /* PR c++/28266 Make sure this is expanded only once. */
5743 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5744 return GS_ERROR;
5745 }
726a989a
RB
5746 if (init)
5747 gimplify_and_add (init, pre_p);
6de9cd9a
DN
5748
5749 /* If needed, push the cleanup for the temp. */
5750 if (TARGET_EXPR_CLEANUP (targ))
d0ad58f9
JM
5751 {
5752 if (CLEANUP_EH_ONLY (targ))
5753 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5754 CLEANUP_EH_ONLY (targ), pre_p);
5755 else
5756 cleanup = TARGET_EXPR_CLEANUP (targ);
5757 }
5758
5759 /* Add a clobber for the temporary going out of scope, like
5760 gimplify_bind_expr. */
32be32af 5761 if (gimplify_ctxp->in_cleanup_point_expr
87e2a8fd
XDL
5762 && needs_to_live_in_memory (temp)
5763 && flag_stack_reuse == SR_ALL)
d0ad58f9 5764 {
9771b263
DN
5765 tree clobber = build_constructor (TREE_TYPE (temp),
5766 NULL);
d0ad58f9
JM
5767 TREE_THIS_VOLATILE (clobber) = true;
5768 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5769 if (cleanup)
5770 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5771 clobber);
5772 else
5773 cleanup = clobber;
5774 }
5775
5776 if (cleanup)
5777 gimple_push_cleanup (temp, cleanup, false, pre_p);
6de9cd9a
DN
5778
5779 /* Only expand this once. */
5780 TREE_OPERAND (targ, 3) = init;
5781 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5782 }
282899df 5783 else
6de9cd9a 5784 /* We should have expanded this before. */
282899df 5785 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
5786
5787 *expr_p = temp;
5788 return GS_OK;
5789}
5790
5791/* Gimplification of expression trees. */
5792
726a989a
RB
5793/* Gimplify an expression which appears at statement context. The
5794 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5795 NULL, a new sequence is allocated.
6de9cd9a 5796
726a989a
RB
5797 Return true if we actually added a statement to the queue. */
5798
5799bool
5800gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 5801{
726a989a 5802 gimple_seq_node last;
6de9cd9a 5803
726a989a
RB
5804 last = gimple_seq_last (*seq_p);
5805 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5806 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
5807}
5808
953ff289
DN
5809/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5810 to CTX. If entries already exist, force them to be some flavor of private.
5811 If there is no enclosing parallel, do nothing. */
5812
5813void
5814omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5815{
5816 splay_tree_node n;
5817
d9a6bd32 5818 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
953ff289
DN
5819 return;
5820
5821 do
5822 {
5823 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5824 if (n != NULL)
5825 {
5826 if (n->value & GOVD_SHARED)
5827 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
acf0174b
JJ
5828 else if (n->value & GOVD_MAP)
5829 n->value |= GOVD_MAP_TO_ONLY;
953ff289
DN
5830 else
5831 return;
5832 }
d9a6bd32
JJ
5833 else if ((ctx->region_type & ORT_TARGET) != 0)
5834 {
5835 if (ctx->target_map_scalars_firstprivate)
5836 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5837 else
5838 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5839 }
74bf76ed 5840 else if (ctx->region_type != ORT_WORKSHARE
acf0174b 5841 && ctx->region_type != ORT_SIMD
182190f2
NS
5842 && ctx->region_type != ORT_ACC
5843 && !(ctx->region_type & ORT_TARGET_DATA))
953ff289
DN
5844 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5845
5846 ctx = ctx->outer_context;
5847 }
5848 while (ctx);
5849}
5850
5851/* Similarly for each of the type sizes of TYPE. */
5852
5853static void
5854omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5855{
5856 if (type == NULL || type == error_mark_node)
5857 return;
5858 type = TYPE_MAIN_VARIANT (type);
5859
6e2830c3 5860 if (ctx->privatized_types->add (type))
953ff289
DN
5861 return;
5862
5863 switch (TREE_CODE (type))
5864 {
5865 case INTEGER_TYPE:
5866 case ENUMERAL_TYPE:
5867 case BOOLEAN_TYPE:
953ff289 5868 case REAL_TYPE:
325217ed 5869 case FIXED_POINT_TYPE:
953ff289
DN
5870 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5871 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5872 break;
5873
5874 case ARRAY_TYPE:
5875 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5876 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5877 break;
5878
5879 case RECORD_TYPE:
5880 case UNION_TYPE:
5881 case QUAL_UNION_TYPE:
5882 {
5883 tree field;
910ad8de 5884 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
5885 if (TREE_CODE (field) == FIELD_DECL)
5886 {
5887 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5888 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5889 }
5890 }
5891 break;
5892
5893 case POINTER_TYPE:
5894 case REFERENCE_TYPE:
5895 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5896 break;
5897
5898 default:
5899 break;
5900 }
5901
5902 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5903 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5904 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5905}
5906
41dbbb37 5907/* Add an entry for DECL in the OMP context CTX with FLAGS. */
953ff289
DN
5908
5909static void
5910omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5911{
5912 splay_tree_node n;
5913 unsigned int nflags;
5914 tree t;
5915
d9a6bd32 5916 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
953ff289
DN
5917 return;
5918
5919 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5920 there are constructors involved somewhere. */
5921 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5922 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5923 flags |= GOVD_SEEN;
5924
5925 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
d9a6bd32 5926 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
953ff289
DN
5927 {
5928 /* We shouldn't be re-adding the decl with the same data
5929 sharing class. */
5930 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
953ff289 5931 nflags = n->value | flags;
182190f2
NS
5932 /* The only combination of data sharing classes we should see is
5933 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
5934 reduction variables to be used in data sharing clauses. */
5935 gcc_assert ((ctx->region_type & ORT_ACC) != 0
5936 || ((nflags & GOVD_DATA_SHARE_CLASS)
5937 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
74bf76ed 5938 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
953ff289
DN
5939 n->value = nflags;
5940 return;
5941 }
5942
5943 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 5944 of additional bits of data: the pointer replacement variable, and
953ff289 5945 the parameters of the type. */
4c923c28 5946 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
5947 {
5948 /* Add the pointer replacement variable as PRIVATE if the variable
5949 replacement is private, else FIRSTPRIVATE since we'll need the
5950 address of the original variable either for SHARED, or for the
5951 copy into or out of the context. */
5952 if (!(flags & GOVD_LOCAL))
5953 {
41dbbb37
TS
5954 if (flags & GOVD_MAP)
5955 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5956 else if (flags & GOVD_PRIVATE)
5957 nflags = GOVD_PRIVATE;
d9a6bd32
JJ
5958 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5959 && (flags & GOVD_FIRSTPRIVATE))
5960 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
41dbbb37
TS
5961 else
5962 nflags = GOVD_FIRSTPRIVATE;
953ff289
DN
5963 nflags |= flags & GOVD_SEEN;
5964 t = DECL_VALUE_EXPR (decl);
5965 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5966 t = TREE_OPERAND (t, 0);
5967 gcc_assert (DECL_P (t));
5968 omp_add_variable (ctx, t, nflags);
5969 }
5970
5971 /* Add all of the variable and type parameters (which should have
5972 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5973 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5974 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5975 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5976
5977 /* The variable-sized variable itself is never SHARED, only some form
5978 of PRIVATE. The sharing would take place via the pointer variable
5979 which we remapped above. */
5980 if (flags & GOVD_SHARED)
5981 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5982 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5983
b8698a0f 5984 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
5985 alloca statement we generate for the variable, so make sure it
5986 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
5987 case, since we won't be allocating local storage then.
5988 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5989 in this case omp_notice_variable will be called later
5990 on when it is gimplified. */
acf0174b 5991 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
423ed416 5992 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
953ff289
DN
5993 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5994 }
acf0174b
JJ
5995 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5996 && lang_hooks.decls.omp_privatize_by_reference (decl))
953ff289 5997 {
953ff289
DN
5998 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5999
6000 /* Similar to the direct variable sized case above, we'll need the
6001 size of references being privatized. */
6002 if ((flags & GOVD_SHARED) == 0)
6003 {
6004 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
d9a6bd32 6005 if (DECL_P (t))
953ff289
DN
6006 omp_notice_variable (ctx, t, true);
6007 }
6008 }
6009
74bf76ed
JJ
6010 if (n != NULL)
6011 n->value |= flags;
6012 else
6013 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
2c71d454
CLT
6014
6015 /* For reductions clauses in OpenACC loop directives, by default create a
6016 copy clause on the enclosing parallel construct for carrying back the
6017 results. */
6018 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6019 {
6020 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6021 while (outer_ctx)
6022 {
6023 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6024 if (n != NULL)
6025 {
6026 /* Ignore local variables and explicitly declared clauses. */
6027 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6028 break;
6029 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6030 {
6031 /* According to the OpenACC spec, such a reduction variable
6032 should already have a copy map on a kernels construct,
6033 verify that here. */
6034 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6035 && (n->value & GOVD_MAP));
6036 }
6037 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6038 {
6039 /* Remove firstprivate and make it a copy map. */
6040 n->value &= ~GOVD_FIRSTPRIVATE;
6041 n->value |= GOVD_MAP;
6042 }
6043 }
6044 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6045 {
6046 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6047 GOVD_MAP | GOVD_SEEN);
6048 break;
6049 }
6050 outer_ctx = outer_ctx->outer_context;
6051 }
6052 }
953ff289
DN
6053}
6054
41dbbb37 6055/* Notice a threadprivate variable DECL used in OMP context CTX.
f22f4340
JJ
6056 This just prints out diagnostics about threadprivate variable uses
6057 in untied tasks. If DECL2 is non-NULL, prevent this warning
6058 on that variable. */
6059
6060static bool
6061omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6062 tree decl2)
6063{
6064 splay_tree_node n;
acf0174b
JJ
6065 struct gimplify_omp_ctx *octx;
6066
6067 for (octx = ctx; octx; octx = octx->outer_context)
d9a6bd32 6068 if ((octx->region_type & ORT_TARGET) != 0)
acf0174b
JJ
6069 {
6070 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6071 if (n == NULL)
6072 {
6073 error ("threadprivate variable %qE used in target region",
6074 DECL_NAME (decl));
6075 error_at (octx->location, "enclosing target region");
6076 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6077 }
6078 if (decl2)
6079 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6080 }
f22f4340
JJ
6081
6082 if (ctx->region_type != ORT_UNTIED_TASK)
6083 return false;
6084 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6085 if (n == NULL)
6086 {
ad19c4be
EB
6087 error ("threadprivate variable %qE used in untied task",
6088 DECL_NAME (decl));
f22f4340
JJ
6089 error_at (ctx->location, "enclosing task");
6090 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6091 }
6092 if (decl2)
6093 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6094 return false;
6095}
6096
6e232ba4
JN
6097/* Return true if global var DECL is device resident. */
6098
6099static bool
6100device_resident_p (tree decl)
6101{
6102 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6103
6104 if (!attr)
6105 return false;
6106
6107 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6108 {
6109 tree c = TREE_VALUE (t);
6110 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6111 return true;
6112 }
6113
6114 return false;
6115}
6116
72500605
NS
6117/* Determine outer default flags for DECL mentioned in an OMP region
6118 but not declared in an enclosing clause.
6119
6120 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6121 remapped firstprivate instead of shared. To some extent this is
6122 addressed in omp_firstprivatize_type_sizes, but not
6123 effectively. */
6124
6125static unsigned
6126omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6127 bool in_code, unsigned flags)
6128{
6129 enum omp_clause_default_kind default_kind = ctx->default_kind;
6130 enum omp_clause_default_kind kind;
6131
6132 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6133 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6134 default_kind = kind;
6135
6136 switch (default_kind)
6137 {
6138 case OMP_CLAUSE_DEFAULT_NONE:
6139 {
6140 const char *rtype;
6141
6142 if (ctx->region_type & ORT_PARALLEL)
6143 rtype = "parallel";
6144 else if (ctx->region_type & ORT_TASK)
6145 rtype = "task";
6146 else if (ctx->region_type & ORT_TEAMS)
6147 rtype = "teams";
6148 else
6149 gcc_unreachable ();
6150
6151 error ("%qE not specified in enclosing %s",
6152 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6153 error_at (ctx->location, "enclosing %s", rtype);
6154 }
6155 /* FALLTHRU */
6156 case OMP_CLAUSE_DEFAULT_SHARED:
6157 flags |= GOVD_SHARED;
6158 break;
6159 case OMP_CLAUSE_DEFAULT_PRIVATE:
6160 flags |= GOVD_PRIVATE;
6161 break;
6162 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6163 flags |= GOVD_FIRSTPRIVATE;
6164 break;
6165 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6166 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6167 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6168 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6169 {
6170 omp_notice_variable (octx, decl, in_code);
6171 for (; octx; octx = octx->outer_context)
6172 {
6173 splay_tree_node n2;
6174
72500605 6175 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
e01d41e5
JJ
6176 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6177 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6178 continue;
72500605
NS
6179 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6180 {
6181 flags |= GOVD_FIRSTPRIVATE;
6182 goto found_outer;
6183 }
6184 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6185 {
6186 flags |= GOVD_SHARED;
6187 goto found_outer;
6188 }
6189 }
6190 }
6191
6192 if (TREE_CODE (decl) == PARM_DECL
6193 || (!is_global_var (decl)
6194 && DECL_CONTEXT (decl) == current_function_decl))
6195 flags |= GOVD_FIRSTPRIVATE;
6196 else
6197 flags |= GOVD_SHARED;
6198 found_outer:
6199 break;
6200
6201 default:
6202 gcc_unreachable ();
6203 }
6204
6205 return flags;
6206}
6207
fffeedeb
NS
6208
6209/* Determine outer default flags for DECL mentioned in an OACC region
6210 but not declared in an enclosing clause. */
6211
6212static unsigned
6213oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6214{
6215 const char *rkind;
6e232ba4 6216 bool on_device = false;
33a126a6
CP
6217 tree type = TREE_TYPE (decl);
6218
6219 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6220 type = TREE_TYPE (type);
6e232ba4
JN
6221
6222 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6223 && is_global_var (decl)
6224 && device_resident_p (decl))
6225 {
6226 on_device = true;
6227 flags |= GOVD_MAP_TO_ONLY;
6228 }
fffeedeb
NS
6229
6230 switch (ctx->region_type)
6231 {
6232 default:
6233 gcc_unreachable ();
6234
6235 case ORT_ACC_KERNELS:
db0f1c7a
TV
6236 /* Scalars are default 'copy' under kernels, non-scalars are default
6237 'present_or_copy'. */
fffeedeb 6238 flags |= GOVD_MAP;
33a126a6 6239 if (!AGGREGATE_TYPE_P (type))
db0f1c7a
TV
6240 flags |= GOVD_MAP_FORCE;
6241
fffeedeb
NS
6242 rkind = "kernels";
6243 break;
6244
6245 case ORT_ACC_PARALLEL:
6246 {
6e232ba4 6247 if (on_device || AGGREGATE_TYPE_P (type))
fffeedeb
NS
6248 /* Aggregates default to 'present_or_copy'. */
6249 flags |= GOVD_MAP;
6250 else
6251 /* Scalars default to 'firstprivate'. */
6252 flags |= GOVD_FIRSTPRIVATE;
6253 rkind = "parallel";
6254 }
6255 break;
6256 }
6257
6258 if (DECL_ARTIFICIAL (decl))
6259 ; /* We can get compiler-generated decls, and should not complain
6260 about them. */
6261 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6262 {
7c602779 6263 error ("%qE not specified in enclosing OpenACC %qs construct",
fffeedeb 6264 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
20e8b68f 6265 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
fffeedeb
NS
6266 }
6267 else
6268 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6269
6270 return flags;
6271}
6272
41dbbb37 6273/* Record the fact that DECL was used within the OMP context CTX.
953ff289
DN
6274 IN_CODE is true when real code uses DECL, and false when we should
6275 merely emit default(none) errors. Return true if DECL is going to
6276 be remapped and thus DECL shouldn't be gimplified into its
6277 DECL_VALUE_EXPR (if any). */
6278
6279static bool
6280omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6281{
6282 splay_tree_node n;
6283 unsigned flags = in_code ? GOVD_SEEN : 0;
6284 bool ret = false, shared;
6285
b504a918 6286 if (error_operand_p (decl))
953ff289
DN
6287 return false;
6288
d9a6bd32
JJ
6289 if (ctx->region_type == ORT_NONE)
6290 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6291
953ff289
DN
6292 if (is_global_var (decl))
6293 {
eb077516 6294 /* Threadprivate variables are predetermined. */
953ff289 6295 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 6296 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
6297
6298 if (DECL_HAS_VALUE_EXPR_P (decl))
6299 {
6300 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6301
6302 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
f22f4340 6303 return omp_notice_threadprivate_variable (ctx, decl, value);
953ff289 6304 }
eb077516
JN
6305
6306 if (gimplify_omp_ctxp->outer_context == NULL
6307 && VAR_P (decl)
6308 && get_oacc_fn_attrib (current_function_decl))
6309 {
6310 location_t loc = DECL_SOURCE_LOCATION (decl);
6311
6312 if (lookup_attribute ("omp declare target link",
6313 DECL_ATTRIBUTES (decl)))
6314 {
6315 error_at (loc,
6316 "%qE with %<link%> clause used in %<routine%> function",
6317 DECL_NAME (decl));
6318 return false;
6319 }
6320 else if (!lookup_attribute ("omp declare target",
6321 DECL_ATTRIBUTES (decl)))
6322 {
6323 error_at (loc,
6324 "%qE requires a %<declare%> directive for use "
6325 "in a %<routine%> function", DECL_NAME (decl));
6326 return false;
6327 }
6328 }
953ff289
DN
6329 }
6330
6331 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
d9a6bd32 6332 if ((ctx->region_type & ORT_TARGET) != 0)
acf0174b 6333 {
f014c653 6334 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
acf0174b
JJ
6335 if (n == NULL)
6336 {
d9a6bd32
JJ
6337 unsigned nflags = flags;
6338 if (ctx->target_map_pointers_as_0len_arrays
6339 || ctx->target_map_scalars_firstprivate)
6340 {
6341 bool is_declare_target = false;
6342 bool is_scalar = false;
6343 if (is_global_var (decl)
6344 && varpool_node::get_create (decl)->offloadable)
6345 {
6346 struct gimplify_omp_ctx *octx;
6347 for (octx = ctx->outer_context;
6348 octx; octx = octx->outer_context)
6349 {
6350 n = splay_tree_lookup (octx->variables,
6351 (splay_tree_key)decl);
6352 if (n
6353 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6354 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6355 break;
6356 }
6357 is_declare_target = octx == NULL;
6358 }
6359 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6360 {
6361 tree type = TREE_TYPE (decl);
6362 if (TREE_CODE (type) == REFERENCE_TYPE)
6363 type = TREE_TYPE (type);
6364 if (TREE_CODE (type) == COMPLEX_TYPE)
6365 type = TREE_TYPE (type);
6366 if (INTEGRAL_TYPE_P (type)
6367 || SCALAR_FLOAT_TYPE_P (type)
6368 || TREE_CODE (type) == POINTER_TYPE)
6369 is_scalar = true;
6370 }
6371 if (is_declare_target)
6372 ;
6373 else if (ctx->target_map_pointers_as_0len_arrays
6374 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6375 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6376 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6377 == POINTER_TYPE)))
6378 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6379 else if (is_scalar)
6380 nflags |= GOVD_FIRSTPRIVATE;
6381 }
182190f2
NS
6382
6383 struct gimplify_omp_ctx *octx = ctx->outer_context;
6384 if ((ctx->region_type & ORT_ACC) && octx)
acf0174b 6385 {
182190f2
NS
6386 /* Look in outer OpenACC contexts, to see if there's a
6387 data attribute for this variable. */
6388 omp_notice_variable (octx, decl, in_code);
6389
6390 for (; octx; octx = octx->outer_context)
6391 {
6392 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
6393 break;
6394 splay_tree_node n2
6395 = splay_tree_lookup (octx->variables,
6396 (splay_tree_key) decl);
6397 if (n2)
6398 {
37d5ad46
JB
6399 if (octx->region_type == ORT_ACC_HOST_DATA)
6400 error ("variable %qE declared in enclosing "
6401 "%<host_data%> region", DECL_NAME (decl));
182190f2 6402 nflags |= GOVD_MAP;
e46c7770
CP
6403 if (octx->region_type == ORT_ACC_DATA
6404 && (n2->value & GOVD_MAP_0LEN_ARRAY))
6405 nflags |= GOVD_MAP_0LEN_ARRAY;
182190f2
NS
6406 goto found_outer;
6407 }
6408 }
acf0174b 6409 }
182190f2
NS
6410
6411 {
6412 tree type = TREE_TYPE (decl);
6413
6414 if (nflags == flags
6415 && gimplify_omp_ctxp->target_firstprivatize_array_bases
6416 && lang_hooks.decls.omp_privatize_by_reference (decl))
6417 type = TREE_TYPE (type);
6418 if (nflags == flags
6419 && !lang_hooks.types.omp_mappable_type (type))
6420 {
6421 error ("%qD referenced in target region does not have "
6422 "a mappable type", decl);
6423 nflags |= GOVD_MAP | GOVD_EXPLICIT;
6424 }
6425 else if (nflags == flags)
fffeedeb
NS
6426 {
6427 if ((ctx->region_type & ORT_ACC) != 0)
6428 nflags = oacc_default_clause (ctx, decl, flags);
6429 else
6430 nflags |= GOVD_MAP;
6431 }
182190f2
NS
6432 }
6433 found_outer:
d9a6bd32 6434 omp_add_variable (ctx, decl, nflags);
acf0174b
JJ
6435 }
6436 else
f014c653
JJ
6437 {
6438 /* If nothing changed, there's nothing left to do. */
6439 if ((n->value & flags) == flags)
6440 return ret;
1a80d6b8
JJ
6441 flags |= n->value;
6442 n->value = flags;
f014c653 6443 }
acf0174b
JJ
6444 goto do_outer;
6445 }
6446
953ff289
DN
6447 if (n == NULL)
6448 {
74bf76ed 6449 if (ctx->region_type == ORT_WORKSHARE
acf0174b 6450 || ctx->region_type == ORT_SIMD
182190f2
NS
6451 || ctx->region_type == ORT_ACC
6452 || (ctx->region_type & ORT_TARGET_DATA) != 0)
953ff289
DN
6453 goto do_outer;
6454
72500605 6455 flags = omp_default_clause (ctx, decl, in_code, flags);
953ff289 6456
a68ab351
JJ
6457 if ((flags & GOVD_PRIVATE)
6458 && lang_hooks.decls.omp_private_outer_ref (decl))
6459 flags |= GOVD_PRIVATE_OUTER_REF;
6460
953ff289
DN
6461 omp_add_variable (ctx, decl, flags);
6462
6463 shared = (flags & GOVD_SHARED) != 0;
6464 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6465 goto do_outer;
6466 }
6467
3ad6b266
JJ
6468 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6469 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
da3d46cb
JJ
6470 && DECL_SIZE (decl))
6471 {
6472 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6473 {
6474 splay_tree_node n2;
6475 tree t = DECL_VALUE_EXPR (decl);
6476 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6477 t = TREE_OPERAND (t, 0);
6478 gcc_assert (DECL_P (t));
6479 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6480 n2->value |= GOVD_SEEN;
6481 }
6482 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
6483 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
6484 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
6485 != INTEGER_CST))
6486 {
6487 splay_tree_node n2;
6488 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6489 gcc_assert (DECL_P (t));
6490 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6491 if (n2)
6492 n2->value |= GOVD_SEEN;
6493 }
3ad6b266
JJ
6494 }
6495
953ff289
DN
6496 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6497 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6498
6499 /* If nothing changed, there's nothing left to do. */
6500 if ((n->value & flags) == flags)
6501 return ret;
6502 flags |= n->value;
6503 n->value = flags;
6504
6505 do_outer:
6506 /* If the variable is private in the current context, then we don't
6507 need to propagate anything to an outer context. */
a68ab351 6508 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289 6509 return ret;
41b37d5e
JJ
6510 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6511 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6512 return ret;
6513 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6514 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6515 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6516 return ret;
953ff289
DN
6517 if (ctx->outer_context
6518 && omp_notice_variable (ctx->outer_context, decl, in_code))
6519 return true;
6520 return ret;
6521}
6522
6523/* Verify that DECL is private within CTX. If there's specific information
6524 to the contrary in the innermost scope, generate an error. */
6525
6526static bool
f7468577 6527omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
953ff289
DN
6528{
6529 splay_tree_node n;
6530
6531 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6532 if (n != NULL)
6533 {
6534 if (n->value & GOVD_SHARED)
6535 {
6536 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 6537 {
74bf76ed
JJ
6538 if (simd)
6539 error ("iteration variable %qE is predetermined linear",
6540 DECL_NAME (decl));
6541 else
6542 error ("iteration variable %qE should be private",
6543 DECL_NAME (decl));
f6a5ffbf
JJ
6544 n->value = GOVD_PRIVATE;
6545 return true;
6546 }
6547 else
6548 return false;
953ff289 6549 }
761041be
JJ
6550 else if ((n->value & GOVD_EXPLICIT) != 0
6551 && (ctx == gimplify_omp_ctxp
a68ab351 6552 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
6553 && gimplify_omp_ctxp->outer_context == ctx)))
6554 {
6555 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
6556 error ("iteration variable %qE should not be firstprivate",
6557 DECL_NAME (decl));
761041be 6558 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
6559 error ("iteration variable %qE should not be reduction",
6560 DECL_NAME (decl));
e01d41e5
JJ
6561 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6562 error ("iteration variable %qE should not be linear",
6563 DECL_NAME (decl));
f7468577 6564 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
74bf76ed
JJ
6565 error ("iteration variable %qE should not be lastprivate",
6566 DECL_NAME (decl));
6567 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6568 error ("iteration variable %qE should not be private",
6569 DECL_NAME (decl));
f7468577 6570 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
74bf76ed
JJ
6571 error ("iteration variable %qE is predetermined linear",
6572 DECL_NAME (decl));
761041be 6573 }
ca2b1311
JJ
6574 return (ctx == gimplify_omp_ctxp
6575 || (ctx->region_type == ORT_COMBINED_PARALLEL
6576 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
6577 }
6578
74bf76ed 6579 if (ctx->region_type != ORT_WORKSHARE
182190f2
NS
6580 && ctx->region_type != ORT_SIMD
6581 && ctx->region_type != ORT_ACC)
953ff289 6582 return false;
f6a5ffbf 6583 else if (ctx->outer_context)
74bf76ed 6584 return omp_is_private (ctx->outer_context, decl, simd);
ca2b1311 6585 return false;
953ff289
DN
6586}
6587
07b7aade
JJ
6588/* Return true if DECL is private within a parallel region
6589 that binds to the current construct's context or in parallel
6590 region's REDUCTION clause. */
6591
6592static bool
cab37c89 6593omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
07b7aade
JJ
6594{
6595 splay_tree_node n;
6596
6597 do
6598 {
6599 ctx = ctx->outer_context;
6600 if (ctx == NULL)
d9a6bd32
JJ
6601 {
6602 if (is_global_var (decl))
6603 return false;
6604
6605 /* References might be private, but might be shared too,
6606 when checking for copyprivate, assume they might be
6607 private, otherwise assume they might be shared. */
6608 if (copyprivate)
6609 return true;
6610
6611 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6612 return false;
6613
6614 /* Treat C++ privatized non-static data members outside
6615 of the privatization the same. */
6616 if (omp_member_access_dummy_var (decl))
6617 return false;
6618
6619 return true;
6620 }
07b7aade 6621
e01d41e5
JJ
6622 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6623
6624 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6625 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
acf0174b
JJ
6626 continue;
6627
07b7aade 6628 if (n != NULL)
d9a6bd32
JJ
6629 {
6630 if ((n->value & GOVD_LOCAL) != 0
6631 && omp_member_access_dummy_var (decl))
6632 return false;
6633 return (n->value & GOVD_SHARED) == 0;
6634 }
07b7aade 6635 }
74bf76ed 6636 while (ctx->region_type == ORT_WORKSHARE
182190f2
NS
6637 || ctx->region_type == ORT_SIMD
6638 || ctx->region_type == ORT_ACC);
07b7aade
JJ
6639 return false;
6640}
6641
41b37d5e
JJ
6642/* Return true if the CTX is combined with distribute and thus
6643 lastprivate can't be supported. */
6644
6645static bool
6646omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6647{
6648 do
6649 {
6650 if (ctx->outer_context == NULL)
6651 return false;
6652 ctx = ctx->outer_context;
6653 switch (ctx->region_type)
6654 {
6655 case ORT_WORKSHARE:
6656 if (!ctx->combined_loop)
6657 return false;
6658 if (ctx->distribute)
e01d41e5 6659 return lang_GNU_Fortran ();
41b37d5e
JJ
6660 break;
6661 case ORT_COMBINED_PARALLEL:
6662 break;
6663 case ORT_COMBINED_TEAMS:
e01d41e5 6664 return lang_GNU_Fortran ();
41b37d5e
JJ
6665 default:
6666 return false;
6667 }
6668 }
6669 while (1);
6670}
6671
d9a6bd32
JJ
6672/* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
6673
6674static tree
6675find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6676{
6677 tree t = *tp;
6678
6679 /* If this node has been visited, unmark it and keep looking. */
6680 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6681 return t;
6682
6683 if (IS_TYPE_OR_DECL_P (t))
6684 *walk_subtrees = 0;
6685 return NULL_TREE;
6686}
6687
41dbbb37 6688/* Scan the OMP clauses in *LIST_P, installing mappings into a new
953ff289
DN
6689 and previous omp contexts. */
6690
6691static void
726a989a 6692gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
d9a6bd32
JJ
6693 enum omp_region_type region_type,
6694 enum tree_code code)
953ff289
DN
6695{
6696 struct gimplify_omp_ctx *ctx, *outer_ctx;
6697 tree c;
d9a6bd32 6698 hash_map<tree, tree> *struct_map_to_clause = NULL;
e01d41e5 6699 tree *prev_list_p = NULL;
953ff289 6700
a68ab351 6701 ctx = new_omp_context (region_type);
953ff289 6702 outer_ctx = ctx->outer_context;
d9a6bd32
JJ
6703 if (code == OMP_TARGET && !lang_GNU_Fortran ())
6704 {
6705 ctx->target_map_pointers_as_0len_arrays = true;
6706 /* FIXME: For Fortran we want to set this too, when
6707 the Fortran FE is updated to OpenMP 4.5. */
6708 ctx->target_map_scalars_firstprivate = true;
6709 }
6710 if (!lang_GNU_Fortran ())
6711 switch (code)
6712 {
6713 case OMP_TARGET:
6714 case OMP_TARGET_DATA:
6715 case OMP_TARGET_ENTER_DATA:
6716 case OMP_TARGET_EXIT_DATA:
37d5ad46 6717 case OACC_HOST_DATA:
d9a6bd32
JJ
6718 ctx->target_firstprivatize_array_bases = true;
6719 default:
6720 break;
6721 }
953ff289
DN
6722
6723 while ((c = *list_p) != NULL)
6724 {
953ff289
DN
6725 bool remove = false;
6726 bool notice_outer = true;
07b7aade 6727 const char *check_non_private = NULL;
953ff289
DN
6728 unsigned int flags;
6729 tree decl;
6730
aaf46ef9 6731 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6732 {
6733 case OMP_CLAUSE_PRIVATE:
6734 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
6735 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6736 {
6737 flags |= GOVD_PRIVATE_OUTER_REF;
6738 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6739 }
6740 else
6741 notice_outer = false;
953ff289
DN
6742 goto do_add;
6743 case OMP_CLAUSE_SHARED:
6744 flags = GOVD_SHARED | GOVD_EXPLICIT;
6745 goto do_add;
6746 case OMP_CLAUSE_FIRSTPRIVATE:
6747 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 6748 check_non_private = "firstprivate";
953ff289
DN
6749 goto do_add;
6750 case OMP_CLAUSE_LASTPRIVATE:
6751 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 6752 check_non_private = "lastprivate";
41b37d5e
JJ
6753 decl = OMP_CLAUSE_DECL (c);
6754 if (omp_no_lastprivate (ctx))
6755 {
6756 notice_outer = false;
6757 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6758 }
6759 else if (error_operand_p (decl))
6760 goto do_add;
6761 else if (outer_ctx
e01d41e5
JJ
6762 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6763 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
41b37d5e
JJ
6764 && splay_tree_lookup (outer_ctx->variables,
6765 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
6766 {
6767 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6768 if (outer_ctx->outer_context)
6769 omp_notice_variable (outer_ctx->outer_context, decl, true);
6770 }
d9a6bd32
JJ
6771 else if (outer_ctx
6772 && (outer_ctx->region_type & ORT_TASK) != 0
6773 && outer_ctx->combined_loop
6774 && splay_tree_lookup (outer_ctx->variables,
6775 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
6776 {
6777 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6778 if (outer_ctx->outer_context)
6779 omp_notice_variable (outer_ctx->outer_context, decl, true);
6780 }
41b37d5e 6781 else if (outer_ctx
182190f2
NS
6782 && (outer_ctx->region_type == ORT_WORKSHARE
6783 || outer_ctx->region_type == ORT_ACC)
41b37d5e
JJ
6784 && outer_ctx->combined_loop
6785 && splay_tree_lookup (outer_ctx->variables,
6786 (splay_tree_key) decl) == NULL
6787 && !omp_check_private (outer_ctx, decl, false))
6788 {
6789 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6790 if (outer_ctx->outer_context
6791 && (outer_ctx->outer_context->region_type
6792 == ORT_COMBINED_PARALLEL)
6793 && splay_tree_lookup (outer_ctx->outer_context->variables,
6794 (splay_tree_key) decl) == NULL)
e01d41e5
JJ
6795 {
6796 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6797 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6798 if (octx->outer_context)
6799 omp_notice_variable (octx->outer_context, decl, true);
6800 }
6801 else if (outer_ctx->outer_context)
6802 omp_notice_variable (outer_ctx->outer_context, decl, true);
41b37d5e 6803 }
953ff289
DN
6804 goto do_add;
6805 case OMP_CLAUSE_REDUCTION:
6806 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
182190f2
NS
6807 /* OpenACC permits reductions on private variables. */
6808 if (!(region_type & ORT_ACC))
6809 check_non_private = "reduction";
d9a6bd32
JJ
6810 decl = OMP_CLAUSE_DECL (c);
6811 if (TREE_CODE (decl) == MEM_REF)
6812 {
6813 tree type = TREE_TYPE (decl);
6814 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
381cdae4
RB
6815 NULL, is_gimple_val, fb_rvalue, false)
6816 == GS_ERROR)
d9a6bd32
JJ
6817 {
6818 remove = true;
6819 break;
6820 }
6821 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6822 if (DECL_P (v))
6823 {
6824 omp_firstprivatize_variable (ctx, v);
6825 omp_notice_variable (ctx, v, true);
6826 }
6827 decl = TREE_OPERAND (decl, 0);
e01d41e5
JJ
6828 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6829 {
6830 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
381cdae4 6831 NULL, is_gimple_val, fb_rvalue, false)
e01d41e5
JJ
6832 == GS_ERROR)
6833 {
6834 remove = true;
6835 break;
6836 }
6837 v = TREE_OPERAND (decl, 1);
6838 if (DECL_P (v))
6839 {
6840 omp_firstprivatize_variable (ctx, v);
6841 omp_notice_variable (ctx, v, true);
6842 }
6843 decl = TREE_OPERAND (decl, 0);
6844 }
d9a6bd32
JJ
6845 if (TREE_CODE (decl) == ADDR_EXPR
6846 || TREE_CODE (decl) == INDIRECT_REF)
6847 decl = TREE_OPERAND (decl, 0);
6848 }
6849 goto do_add_decl;
acf0174b
JJ
6850 case OMP_CLAUSE_LINEAR:
6851 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6852 is_gimple_val, fb_rvalue) == GS_ERROR)
6853 {
6854 remove = true;
6855 break;
6856 }
41b37d5e
JJ
6857 else
6858 {
d9a6bd32
JJ
6859 if (code == OMP_SIMD
6860 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6861 {
6862 struct gimplify_omp_ctx *octx = outer_ctx;
6863 if (octx
6864 && octx->region_type == ORT_WORKSHARE
6865 && octx->combined_loop
6866 && !octx->distribute)
6867 {
6868 if (octx->outer_context
6869 && (octx->outer_context->region_type
6870 == ORT_COMBINED_PARALLEL))
6871 octx = octx->outer_context->outer_context;
6872 else
6873 octx = octx->outer_context;
6874 }
6875 if (octx
6876 && octx->region_type == ORT_WORKSHARE
6877 && octx->combined_loop
6878 && octx->distribute
6879 && !lang_GNU_Fortran ())
6880 {
6881 error_at (OMP_CLAUSE_LOCATION (c),
6882 "%<linear%> clause for variable other than "
6883 "loop iterator specified on construct "
6884 "combined with %<distribute%>");
6885 remove = true;
6886 break;
6887 }
6888 }
41b37d5e
JJ
6889 /* For combined #pragma omp parallel for simd, need to put
6890 lastprivate and perhaps firstprivate too on the
6891 parallel. Similarly for #pragma omp for simd. */
6892 struct gimplify_omp_ctx *octx = outer_ctx;
6893 decl = NULL_TREE;
6894 if (omp_no_lastprivate (ctx))
6895 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6896 do
6897 {
6898 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6899 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6900 break;
6901 decl = OMP_CLAUSE_DECL (c);
6902 if (error_operand_p (decl))
6903 {
6904 decl = NULL_TREE;
6905 break;
6906 }
d9a6bd32
JJ
6907 flags = GOVD_SEEN;
6908 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6909 flags |= GOVD_FIRSTPRIVATE;
6910 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6911 flags |= GOVD_LASTPRIVATE;
41b37d5e
JJ
6912 if (octx
6913 && octx->region_type == ORT_WORKSHARE
6914 && octx->combined_loop)
6915 {
6916 if (octx->outer_context
6917 && (octx->outer_context->region_type
e01d41e5 6918 == ORT_COMBINED_PARALLEL))
41b37d5e
JJ
6919 octx = octx->outer_context;
6920 else if (omp_check_private (octx, decl, false))
6921 break;
6922 }
d9a6bd32
JJ
6923 else if (octx
6924 && (octx->region_type & ORT_TASK) != 0
6925 && octx->combined_loop)
6926 ;
6927 else if (octx
6928 && octx->region_type == ORT_COMBINED_PARALLEL
6929 && ctx->region_type == ORT_WORKSHARE
6930 && octx == outer_ctx)
6931 flags = GOVD_SEEN | GOVD_SHARED;
e01d41e5
JJ
6932 else if (octx
6933 && octx->region_type == ORT_COMBINED_TEAMS)
6934 flags = GOVD_SEEN | GOVD_SHARED;
d9a6bd32
JJ
6935 else if (octx
6936 && octx->region_type == ORT_COMBINED_TARGET)
e01d41e5
JJ
6937 {
6938 flags &= ~GOVD_LASTPRIVATE;
6939 if (flags == GOVD_SEEN)
6940 break;
6941 }
41b37d5e
JJ
6942 else
6943 break;
d9a6bd32
JJ
6944 splay_tree_node on
6945 = splay_tree_lookup (octx->variables,
6946 (splay_tree_key) decl);
6947 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
2ee10f81
JJ
6948 {
6949 octx = NULL;
6950 break;
6951 }
41b37d5e
JJ
6952 omp_add_variable (octx, decl, flags);
6953 if (octx->outer_context == NULL)
6954 break;
6955 octx = octx->outer_context;
6956 }
6957 while (1);
6958 if (octx
6959 && decl
6960 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6961 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6962 omp_notice_variable (octx, decl, true);
6963 }
acf0174b 6964 flags = GOVD_LINEAR | GOVD_EXPLICIT;
41b37d5e
JJ
6965 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6966 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6967 {
6968 notice_outer = false;
6969 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6970 }
acf0174b
JJ
6971 goto do_add;
6972
6973 case OMP_CLAUSE_MAP:
b46ebd6c
JJ
6974 decl = OMP_CLAUSE_DECL (c);
6975 if (error_operand_p (decl))
d9a6bd32
JJ
6976 remove = true;
6977 switch (code)
b46ebd6c 6978 {
d9a6bd32
JJ
6979 case OMP_TARGET:
6980 break;
e46c7770
CP
6981 case OACC_DATA:
6982 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
6983 break;
191816a3 6984 /* FALLTHRU */
d9a6bd32
JJ
6985 case OMP_TARGET_DATA:
6986 case OMP_TARGET_ENTER_DATA:
6987 case OMP_TARGET_EXIT_DATA:
e46c7770
CP
6988 case OACC_ENTER_DATA:
6989 case OACC_EXIT_DATA:
37d5ad46 6990 case OACC_HOST_DATA:
e01d41e5
JJ
6991 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6992 || (OMP_CLAUSE_MAP_KIND (c)
6993 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32
JJ
6994 /* For target {,enter ,exit }data only the array slice is
6995 mapped, but not the pointer to it. */
6996 remove = true;
6997 break;
6998 default:
b46ebd6c
JJ
6999 break;
7000 }
d9a6bd32
JJ
7001 if (remove)
7002 break;
37d5ad46
JB
7003 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7004 {
7005 struct gimplify_omp_ctx *octx;
7006 for (octx = outer_ctx; octx; octx = octx->outer_context)
7007 {
7008 if (octx->region_type != ORT_ACC_HOST_DATA)
7009 break;
7010 splay_tree_node n2
7011 = splay_tree_lookup (octx->variables,
7012 (splay_tree_key) decl);
7013 if (n2)
7014 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7015 "declared in enclosing %<host_data%> region",
7016 DECL_NAME (decl));
7017 }
7018 }
b46ebd6c
JJ
7019 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7020 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7021 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7022 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7023 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
7024 {
7025 remove = true;
7026 break;
7027 }
e01d41e5
JJ
7028 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7029 || (OMP_CLAUSE_MAP_KIND (c)
7030 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
d9a6bd32
JJ
7031 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7032 {
7033 OMP_CLAUSE_SIZE (c)
381cdae4
RB
7034 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7035 false);
d9a6bd32
JJ
7036 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7037 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7038 }
acf0174b
JJ
7039 if (!DECL_P (decl))
7040 {
d9a6bd32
JJ
7041 tree d = decl, *pd;
7042 if (TREE_CODE (d) == ARRAY_REF)
7043 {
7044 while (TREE_CODE (d) == ARRAY_REF)
7045 d = TREE_OPERAND (d, 0);
7046 if (TREE_CODE (d) == COMPONENT_REF
7047 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7048 decl = d;
7049 }
7050 pd = &OMP_CLAUSE_DECL (c);
7051 if (d == decl
7052 && TREE_CODE (decl) == INDIRECT_REF
7053 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7054 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7055 == REFERENCE_TYPE))
7056 {
7057 pd = &TREE_OPERAND (decl, 0);
7058 decl = TREE_OPERAND (decl, 0);
7059 }
7060 if (TREE_CODE (decl) == COMPONENT_REF)
7061 {
7062 while (TREE_CODE (decl) == COMPONENT_REF)
7063 decl = TREE_OPERAND (decl, 0);
283635f9
JJ
7064 if (TREE_CODE (decl) == INDIRECT_REF
7065 && DECL_P (TREE_OPERAND (decl, 0))
7066 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7067 == REFERENCE_TYPE))
7068 decl = TREE_OPERAND (decl, 0);
d9a6bd32
JJ
7069 }
7070 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
acf0174b
JJ
7071 == GS_ERROR)
7072 {
7073 remove = true;
7074 break;
7075 }
d9a6bd32
JJ
7076 if (DECL_P (decl))
7077 {
7078 if (error_operand_p (decl))
7079 {
7080 remove = true;
7081 break;
7082 }
7083
283635f9
JJ
7084 tree stype = TREE_TYPE (decl);
7085 if (TREE_CODE (stype) == REFERENCE_TYPE)
7086 stype = TREE_TYPE (stype);
7087 if (TYPE_SIZE_UNIT (stype) == NULL
7088 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
d9a6bd32
JJ
7089 {
7090 error_at (OMP_CLAUSE_LOCATION (c),
7091 "mapping field %qE of variable length "
7092 "structure", OMP_CLAUSE_DECL (c));
7093 remove = true;
7094 break;
7095 }
7096
e01d41e5
JJ
7097 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7098 {
7099 /* Error recovery. */
7100 if (prev_list_p == NULL)
7101 {
7102 remove = true;
7103 break;
7104 }
7105 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7106 {
7107 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7108 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7109 {
7110 remove = true;
7111 break;
7112 }
7113 }
7114 }
7115
d9a6bd32
JJ
7116 tree offset;
7117 HOST_WIDE_INT bitsize, bitpos;
7118 machine_mode mode;
ee45a32d 7119 int unsignedp, reversep, volatilep = 0;
d9a6bd32
JJ
7120 tree base = OMP_CLAUSE_DECL (c);
7121 while (TREE_CODE (base) == ARRAY_REF)
7122 base = TREE_OPERAND (base, 0);
7123 if (TREE_CODE (base) == INDIRECT_REF)
7124 base = TREE_OPERAND (base, 0);
7125 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
ee45a32d 7126 &mode, &unsignedp, &reversep,
25b75a48 7127 &volatilep);
283635f9
JJ
7128 tree orig_base = base;
7129 if ((TREE_CODE (base) == INDIRECT_REF
7130 || (TREE_CODE (base) == MEM_REF
7131 && integer_zerop (TREE_OPERAND (base, 1))))
7132 && DECL_P (TREE_OPERAND (base, 0))
7133 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7134 == REFERENCE_TYPE))
7135 base = TREE_OPERAND (base, 0);
d9a6bd32
JJ
7136 gcc_assert (base == decl
7137 && (offset == NULL_TREE
7138 || TREE_CODE (offset) == INTEGER_CST));
7139
7140 splay_tree_node n
7141 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7142 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
e01d41e5
JJ
7143 == GOMP_MAP_ALWAYS_POINTER);
7144 if (n == NULL || (n->value & GOVD_MAP) == 0)
d9a6bd32 7145 {
e01d41e5
JJ
7146 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7147 OMP_CLAUSE_MAP);
7148 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
283635f9
JJ
7149 if (orig_base != base)
7150 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7151 else
7152 OMP_CLAUSE_DECL (l) = decl;
e01d41e5
JJ
7153 OMP_CLAUSE_SIZE (l) = size_int (1);
7154 if (struct_map_to_clause == NULL)
7155 struct_map_to_clause = new hash_map<tree, tree>;
7156 struct_map_to_clause->put (decl, l);
d9a6bd32
JJ
7157 if (ptr)
7158 {
e01d41e5
JJ
7159 enum gomp_map_kind mkind
7160 = code == OMP_TARGET_EXIT_DATA
7161 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
d9a6bd32 7162 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
e01d41e5
JJ
7163 OMP_CLAUSE_MAP);
7164 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7165 OMP_CLAUSE_DECL (c2)
7166 = unshare_expr (OMP_CLAUSE_DECL (c));
7167 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7168 OMP_CLAUSE_SIZE (c2)
7169 = TYPE_SIZE_UNIT (ptr_type_node);
7170 OMP_CLAUSE_CHAIN (l) = c2;
7171 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7172 {
7173 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7174 tree c3
7175 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7176 OMP_CLAUSE_MAP);
7177 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7178 OMP_CLAUSE_DECL (c3)
7179 = unshare_expr (OMP_CLAUSE_DECL (c4));
7180 OMP_CLAUSE_SIZE (c3)
7181 = TYPE_SIZE_UNIT (ptr_type_node);
7182 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7183 OMP_CLAUSE_CHAIN (c2) = c3;
7184 }
7185 *prev_list_p = l;
7186 prev_list_p = NULL;
7187 }
7188 else
7189 {
7190 OMP_CLAUSE_CHAIN (l) = c;
7191 *list_p = l;
7192 list_p = &OMP_CLAUSE_CHAIN (l);
d9a6bd32 7193 }
283635f9
JJ
7194 if (orig_base != base && code == OMP_TARGET)
7195 {
7196 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7197 OMP_CLAUSE_MAP);
7198 enum gomp_map_kind mkind
7199 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7200 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7201 OMP_CLAUSE_DECL (c2) = decl;
7202 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7203 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7204 OMP_CLAUSE_CHAIN (l) = c2;
7205 }
d9a6bd32 7206 flags = GOVD_MAP | GOVD_EXPLICIT;
e01d41e5 7207 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
d9a6bd32
JJ
7208 flags |= GOVD_SEEN;
7209 goto do_add_decl;
7210 }
7211 else
7212 {
7213 tree *osc = struct_map_to_clause->get (decl);
e01d41e5
JJ
7214 tree *sc = NULL, *scp = NULL;
7215 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
d9a6bd32
JJ
7216 n->value |= GOVD_SEEN;
7217 offset_int o1, o2;
7218 if (offset)
7219 o1 = wi::to_offset (offset);
7220 else
7221 o1 = 0;
7222 if (bitpos)
7223 o1 = o1 + bitpos / BITS_PER_UNIT;
283635f9
JJ
7224 sc = &OMP_CLAUSE_CHAIN (*osc);
7225 if (*sc != c
7226 && (OMP_CLAUSE_MAP_KIND (*sc)
7227 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7228 sc = &OMP_CLAUSE_CHAIN (*sc);
7229 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
e01d41e5
JJ
7230 if (ptr && sc == prev_list_p)
7231 break;
7232 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7233 != COMPONENT_REF
7234 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7235 != INDIRECT_REF)
7236 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7237 != ARRAY_REF))
d9a6bd32
JJ
7238 break;
7239 else
7240 {
7241 tree offset2;
7242 HOST_WIDE_INT bitsize2, bitpos2;
7243 base = OMP_CLAUSE_DECL (*sc);
7244 if (TREE_CODE (base) == ARRAY_REF)
7245 {
7246 while (TREE_CODE (base) == ARRAY_REF)
7247 base = TREE_OPERAND (base, 0);
7248 if (TREE_CODE (base) != COMPONENT_REF
7249 || (TREE_CODE (TREE_TYPE (base))
7250 != ARRAY_TYPE))
7251 break;
7252 }
7253 else if (TREE_CODE (base) == INDIRECT_REF
7254 && (TREE_CODE (TREE_OPERAND (base, 0))
7255 == COMPONENT_REF)
7256 && (TREE_CODE (TREE_TYPE
7257 (TREE_OPERAND (base, 0)))
7258 == REFERENCE_TYPE))
7259 base = TREE_OPERAND (base, 0);
7260 base = get_inner_reference (base, &bitsize2,
7261 &bitpos2, &offset2,
7262 &mode, &unsignedp,
25b75a48 7263 &reversep, &volatilep);
283635f9
JJ
7264 if ((TREE_CODE (base) == INDIRECT_REF
7265 || (TREE_CODE (base) == MEM_REF
7266 && integer_zerop (TREE_OPERAND (base,
7267 1))))
7268 && DECL_P (TREE_OPERAND (base, 0))
7269 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
7270 0)))
7271 == REFERENCE_TYPE))
7272 base = TREE_OPERAND (base, 0);
d9a6bd32
JJ
7273 if (base != decl)
7274 break;
e01d41e5
JJ
7275 if (scp)
7276 continue;
d9a6bd32
JJ
7277 gcc_assert (offset == NULL_TREE
7278 || TREE_CODE (offset) == INTEGER_CST);
7279 tree d1 = OMP_CLAUSE_DECL (*sc);
7280 tree d2 = OMP_CLAUSE_DECL (c);
7281 while (TREE_CODE (d1) == ARRAY_REF)
7282 d1 = TREE_OPERAND (d1, 0);
7283 while (TREE_CODE (d2) == ARRAY_REF)
7284 d2 = TREE_OPERAND (d2, 0);
7285 if (TREE_CODE (d1) == INDIRECT_REF)
7286 d1 = TREE_OPERAND (d1, 0);
7287 if (TREE_CODE (d2) == INDIRECT_REF)
7288 d2 = TREE_OPERAND (d2, 0);
7289 while (TREE_CODE (d1) == COMPONENT_REF)
7290 if (TREE_CODE (d2) == COMPONENT_REF
7291 && TREE_OPERAND (d1, 1)
7292 == TREE_OPERAND (d2, 1))
7293 {
7294 d1 = TREE_OPERAND (d1, 0);
7295 d2 = TREE_OPERAND (d2, 0);
7296 }
7297 else
7298 break;
7299 if (d1 == d2)
7300 {
7301 error_at (OMP_CLAUSE_LOCATION (c),
7302 "%qE appears more than once in map "
7303 "clauses", OMP_CLAUSE_DECL (c));
7304 remove = true;
7305 break;
7306 }
7307 if (offset2)
7308 o2 = wi::to_offset (offset2);
7309 else
7310 o2 = 0;
7311 if (bitpos2)
7312 o2 = o2 + bitpos2 / BITS_PER_UNIT;
7313 if (wi::ltu_p (o1, o2)
7314 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
e01d41e5
JJ
7315 {
7316 if (ptr)
7317 scp = sc;
7318 else
7319 break;
7320 }
d9a6bd32 7321 }
e01d41e5
JJ
7322 if (remove)
7323 break;
7324 OMP_CLAUSE_SIZE (*osc)
7325 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
7326 size_one_node);
d9a6bd32
JJ
7327 if (ptr)
7328 {
e01d41e5
JJ
7329 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7330 OMP_CLAUSE_MAP);
7331 tree cl = NULL_TREE;
7332 enum gomp_map_kind mkind
7333 = code == OMP_TARGET_EXIT_DATA
7334 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7335 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7336 OMP_CLAUSE_DECL (c2)
7337 = unshare_expr (OMP_CLAUSE_DECL (c));
7338 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
7339 OMP_CLAUSE_SIZE (c2)
7340 = TYPE_SIZE_UNIT (ptr_type_node);
7341 cl = scp ? *prev_list_p : c2;
7342 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7343 {
7344 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7345 tree c3
7346 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7347 OMP_CLAUSE_MAP);
7348 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7349 OMP_CLAUSE_DECL (c3)
7350 = unshare_expr (OMP_CLAUSE_DECL (c4));
7351 OMP_CLAUSE_SIZE (c3)
7352 = TYPE_SIZE_UNIT (ptr_type_node);
7353 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7354 if (!scp)
7355 OMP_CLAUSE_CHAIN (c2) = c3;
7356 else
7357 cl = c3;
7358 }
7359 if (scp)
7360 *scp = c2;
7361 if (sc == prev_list_p)
7362 {
7363 *sc = cl;
7364 prev_list_p = NULL;
7365 }
7366 else
7367 {
7368 *prev_list_p = OMP_CLAUSE_CHAIN (c);
7369 list_p = prev_list_p;
7370 prev_list_p = NULL;
7371 OMP_CLAUSE_CHAIN (c) = *sc;
7372 *sc = cl;
7373 continue;
7374 }
d9a6bd32 7375 }
e01d41e5 7376 else if (*sc != c)
d9a6bd32
JJ
7377 {
7378 *list_p = OMP_CLAUSE_CHAIN (c);
7379 OMP_CLAUSE_CHAIN (c) = *sc;
7380 *sc = c;
7381 continue;
7382 }
7383 }
7384 }
e01d41e5
JJ
7385 if (!remove
7386 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7387 && OMP_CLAUSE_CHAIN (c)
7388 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7389 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7390 == GOMP_MAP_ALWAYS_POINTER))
7391 prev_list_p = list_p;
acf0174b
JJ
7392 break;
7393 }
7394 flags = GOVD_MAP | GOVD_EXPLICIT;
e01d41e5
JJ
7395 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7396 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
7397 flags |= GOVD_MAP_ALWAYS_TO;
acf0174b
JJ
7398 goto do_add;
7399
7400 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
7401 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
7402 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
7403 {
7404 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
7405 omp-low.c. */
7406 break;
7407 }
acf0174b
JJ
7408 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7409 {
7410 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7411 NULL, is_gimple_val, fb_rvalue);
7412 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7413 }
7414 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7415 {
7416 remove = true;
7417 break;
7418 }
7419 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7420 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7421 is_gimple_val, fb_rvalue) == GS_ERROR)
7422 {
7423 remove = true;
7424 break;
7425 }
7426 break;
7427
7428 case OMP_CLAUSE_TO:
7429 case OMP_CLAUSE_FROM:
41dbbb37 7430 case OMP_CLAUSE__CACHE_:
b46ebd6c
JJ
7431 decl = OMP_CLAUSE_DECL (c);
7432 if (error_operand_p (decl))
acf0174b
JJ
7433 {
7434 remove = true;
7435 break;
7436 }
b46ebd6c
JJ
7437 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7438 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7439 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7440 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7441 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b
JJ
7442 {
7443 remove = true;
7444 break;
7445 }
7446 if (!DECL_P (decl))
7447 {
7448 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
7449 NULL, is_gimple_lvalue, fb_lvalue)
7450 == GS_ERROR)
7451 {
7452 remove = true;
7453 break;
7454 }
7455 break;
7456 }
7457 goto do_notice;
953ff289 7458
d9a6bd32
JJ
7459 case OMP_CLAUSE_USE_DEVICE_PTR:
7460 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7461 goto do_add;
7462 case OMP_CLAUSE_IS_DEVICE_PTR:
7463 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7464 goto do_add;
7465
953ff289
DN
7466 do_add:
7467 decl = OMP_CLAUSE_DECL (c);
d9a6bd32 7468 do_add_decl:
b504a918 7469 if (error_operand_p (decl))
953ff289
DN
7470 {
7471 remove = true;
7472 break;
7473 }
d9a6bd32
JJ
7474 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
7475 {
7476 tree t = omp_member_access_dummy_var (decl);
7477 if (t)
7478 {
7479 tree v = DECL_VALUE_EXPR (decl);
7480 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
7481 if (outer_ctx)
7482 omp_notice_variable (outer_ctx, t, true);
7483 }
7484 }
e46c7770
CP
7485 if (code == OACC_DATA
7486 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7487 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7488 flags |= GOVD_MAP_0LEN_ARRAY;
953ff289 7489 omp_add_variable (ctx, decl, flags);
693d710f 7490 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
7491 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7492 {
7493 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 7494 GOVD_LOCAL | GOVD_SEEN);
d9a6bd32
JJ
7495 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7496 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7497 find_decl_expr,
7498 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7499 NULL) == NULL_TREE)
7500 omp_add_variable (ctx,
7501 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7502 GOVD_LOCAL | GOVD_SEEN);
953ff289 7503 gimplify_omp_ctxp = ctx;
45852dcc 7504 push_gimplify_context ();
726a989a 7505
355a7673
MM
7506 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7507 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
726a989a
RB
7508
7509 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7510 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7511 pop_gimplify_context
7512 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
45852dcc 7513 push_gimplify_context ();
726a989a
RB
7514 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7515 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 7516 pop_gimplify_context
726a989a
RB
7517 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7518 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7519 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7520
953ff289
DN
7521 gimplify_omp_ctxp = outer_ctx;
7522 }
a68ab351
JJ
7523 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7524 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7525 {
7526 gimplify_omp_ctxp = ctx;
45852dcc 7527 push_gimplify_context ();
a68ab351
JJ
7528 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7529 {
7530 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7531 NULL, NULL);
7532 TREE_SIDE_EFFECTS (bind) = 1;
7533 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7534 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7535 }
726a989a
RB
7536 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7537 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7538 pop_gimplify_context
7539 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7540 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7541
dd2fc525
JJ
7542 gimplify_omp_ctxp = outer_ctx;
7543 }
7544 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7545 && OMP_CLAUSE_LINEAR_STMT (c))
7546 {
7547 gimplify_omp_ctxp = ctx;
7548 push_gimplify_context ();
7549 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7550 {
7551 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7552 NULL, NULL);
7553 TREE_SIDE_EFFECTS (bind) = 1;
7554 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7555 OMP_CLAUSE_LINEAR_STMT (c) = bind;
7556 }
7557 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7558 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7559 pop_gimplify_context
7560 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7561 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7562
a68ab351
JJ
7563 gimplify_omp_ctxp = outer_ctx;
7564 }
953ff289
DN
7565 if (notice_outer)
7566 goto do_notice;
7567 break;
7568
7569 case OMP_CLAUSE_COPYIN:
7570 case OMP_CLAUSE_COPYPRIVATE:
7571 decl = OMP_CLAUSE_DECL (c);
b504a918 7572 if (error_operand_p (decl))
953ff289
DN
7573 {
7574 remove = true;
7575 break;
7576 }
cab37c89
JJ
7577 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7578 && !remove
7579 && !omp_check_private (ctx, decl, true))
7580 {
7581 remove = true;
7582 if (is_global_var (decl))
7583 {
7584 if (DECL_THREAD_LOCAL_P (decl))
7585 remove = false;
7586 else if (DECL_HAS_VALUE_EXPR_P (decl))
7587 {
7588 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7589
7590 if (value
7591 && DECL_P (value)
7592 && DECL_THREAD_LOCAL_P (value))
7593 remove = false;
7594 }
7595 }
7596 if (remove)
7597 error_at (OMP_CLAUSE_LOCATION (c),
7598 "copyprivate variable %qE is not threadprivate"
7599 " or private in outer context", DECL_NAME (decl));
7600 }
953ff289
DN
7601 do_notice:
7602 if (outer_ctx)
7603 omp_notice_variable (outer_ctx, decl, true);
07b7aade 7604 if (check_non_private
a68ab351 7605 && region_type == ORT_WORKSHARE
d9a6bd32
JJ
7606 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7607 || decl == OMP_CLAUSE_DECL (c)
7608 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7609 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
e01d41e5
JJ
7610 == ADDR_EXPR
7611 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7612 == POINTER_PLUS_EXPR
7613 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7614 (OMP_CLAUSE_DECL (c), 0), 0))
7615 == ADDR_EXPR)))))
cab37c89 7616 && omp_check_private (ctx, decl, false))
07b7aade 7617 {
4f1e4960
JM
7618 error ("%s variable %qE is private in outer context",
7619 check_non_private, DECL_NAME (decl));
07b7aade
JJ
7620 remove = true;
7621 }
953ff289
DN
7622 break;
7623
953ff289 7624 case OMP_CLAUSE_IF:
d9a6bd32
JJ
7625 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7626 && OMP_CLAUSE_IF_MODIFIER (c) != code)
7627 {
7628 const char *p[2];
7629 for (int i = 0; i < 2; i++)
7630 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7631 {
7632 case OMP_PARALLEL: p[i] = "parallel"; break;
7633 case OMP_TASK: p[i] = "task"; break;
7634 case OMP_TASKLOOP: p[i] = "taskloop"; break;
7635 case OMP_TARGET_DATA: p[i] = "target data"; break;
7636 case OMP_TARGET: p[i] = "target"; break;
7637 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7638 case OMP_TARGET_ENTER_DATA:
7639 p[i] = "target enter data"; break;
7640 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7641 default: gcc_unreachable ();
7642 }
7643 error_at (OMP_CLAUSE_LOCATION (c),
7644 "expected %qs %<if%> clause modifier rather than %qs",
7645 p[0], p[1]);
7646 remove = true;
7647 }
7648 /* Fall through. */
7649
7650 case OMP_CLAUSE_FINAL:
d568d1a8
RS
7651 OMP_CLAUSE_OPERAND (c, 0)
7652 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7653 /* Fall through. */
7654
7655 case OMP_CLAUSE_SCHEDULE:
953ff289 7656 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
7657 case OMP_CLAUSE_NUM_TEAMS:
7658 case OMP_CLAUSE_THREAD_LIMIT:
7659 case OMP_CLAUSE_DIST_SCHEDULE:
7660 case OMP_CLAUSE_DEVICE:
d9a6bd32
JJ
7661 case OMP_CLAUSE_PRIORITY:
7662 case OMP_CLAUSE_GRAINSIZE:
7663 case OMP_CLAUSE_NUM_TASKS:
7664 case OMP_CLAUSE_HINT:
9a771876 7665 case OMP_CLAUSE__CILK_FOR_COUNT_:
41dbbb37
TS
7666 case OMP_CLAUSE_ASYNC:
7667 case OMP_CLAUSE_WAIT:
7668 case OMP_CLAUSE_NUM_GANGS:
7669 case OMP_CLAUSE_NUM_WORKERS:
7670 case OMP_CLAUSE_VECTOR_LENGTH:
41dbbb37
TS
7671 case OMP_CLAUSE_WORKER:
7672 case OMP_CLAUSE_VECTOR:
726a989a
RB
7673 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7674 is_gimple_val, fb_rvalue) == GS_ERROR)
acf0174b 7675 remove = true;
d9a6bd32
JJ
7676 break;
7677
7678 case OMP_CLAUSE_GANG:
7679 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7680 is_gimple_val, fb_rvalue) == GS_ERROR)
7681 remove = true;
7682 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7683 is_gimple_val, fb_rvalue) == GS_ERROR)
41dbbb37
TS
7684 remove = true;
7685 break;
7686
7a5e4956
CP
7687 case OMP_CLAUSE_TILE:
7688 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7689 list = TREE_CHAIN (list))
7690 {
7691 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7692 is_gimple_val, fb_rvalue) == GS_ERROR)
7693 remove = true;
7694 }
7695 break;
7696
953ff289
DN
7697 case OMP_CLAUSE_NOWAIT:
7698 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
7699 case OMP_CLAUSE_UNTIED:
7700 case OMP_CLAUSE_COLLAPSE:
41dbbb37
TS
7701 case OMP_CLAUSE_AUTO:
7702 case OMP_CLAUSE_SEQ:
7a5e4956 7703 case OMP_CLAUSE_INDEPENDENT:
20906c66 7704 case OMP_CLAUSE_MERGEABLE:
acf0174b 7705 case OMP_CLAUSE_PROC_BIND:
74bf76ed 7706 case OMP_CLAUSE_SAFELEN:
d9a6bd32
JJ
7707 case OMP_CLAUSE_SIMDLEN:
7708 case OMP_CLAUSE_NOGROUP:
7709 case OMP_CLAUSE_THREADS:
7710 case OMP_CLAUSE_SIMD:
7711 break;
7712
7713 case OMP_CLAUSE_DEFAULTMAP:
7714 ctx->target_map_scalars_firstprivate = false;
953ff289
DN
7715 break;
7716
acf0174b
JJ
7717 case OMP_CLAUSE_ALIGNED:
7718 decl = OMP_CLAUSE_DECL (c);
7719 if (error_operand_p (decl))
7720 {
7721 remove = true;
7722 break;
7723 }
b46ebd6c
JJ
7724 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7725 is_gimple_val, fb_rvalue) == GS_ERROR)
7726 {
7727 remove = true;
7728 break;
7729 }
acf0174b
JJ
7730 if (!is_global_var (decl)
7731 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7732 omp_add_variable (ctx, decl, GOVD_ALIGNED);
7733 break;
7734
953ff289
DN
7735 case OMP_CLAUSE_DEFAULT:
7736 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7737 break;
7738
7739 default:
7740 gcc_unreachable ();
7741 }
7742
e46c7770
CP
7743 if (code == OACC_DATA
7744 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7745 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7746 remove = true;
953ff289
DN
7747 if (remove)
7748 *list_p = OMP_CLAUSE_CHAIN (c);
7749 else
7750 list_p = &OMP_CLAUSE_CHAIN (c);
7751 }
7752
7753 gimplify_omp_ctxp = ctx;
d9a6bd32
JJ
7754 if (struct_map_to_clause)
7755 delete struct_map_to_clause;
953ff289
DN
7756}
7757
1a80d6b8
JJ
7758/* Return true if DECL is a candidate for shared to firstprivate
7759 optimization. We only consider non-addressable scalars, not
7760 too big, and not references. */
7761
7762static bool
7763omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
7764{
7765 if (TREE_ADDRESSABLE (decl))
7766 return false;
7767 tree type = TREE_TYPE (decl);
7768 if (!is_gimple_reg_type (type)
7769 || TREE_CODE (type) == REFERENCE_TYPE
7770 || TREE_ADDRESSABLE (type))
7771 return false;
7772 /* Don't optimize too large decls, as each thread/task will have
7773 its own. */
7774 HOST_WIDE_INT len = int_size_in_bytes (type);
7775 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
7776 return false;
7777 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7778 return false;
7779 return true;
7780}
7781
7782/* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
7783 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
7784 GOVD_WRITTEN in outer contexts. */
7785
7786static void
7787omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
7788{
7789 for (; ctx; ctx = ctx->outer_context)
7790 {
7791 splay_tree_node n = splay_tree_lookup (ctx->variables,
7792 (splay_tree_key) decl);
7793 if (n == NULL)
7794 continue;
7795 else if (n->value & GOVD_SHARED)
7796 {
7797 n->value |= GOVD_WRITTEN;
7798 return;
7799 }
7800 else if (n->value & GOVD_DATA_SHARE_CLASS)
7801 return;
7802 }
7803}
7804
7805/* Helper callback for walk_gimple_seq to discover possible stores
7806 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7807 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7808 for those. */
7809
7810static tree
7811omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
7812{
7813 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7814
7815 *walk_subtrees = 0;
7816 if (!wi->is_lhs)
7817 return NULL_TREE;
7818
7819 tree op = *tp;
7820 do
7821 {
7822 if (handled_component_p (op))
7823 op = TREE_OPERAND (op, 0);
7824 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
7825 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
7826 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
7827 else
7828 break;
7829 }
7830 while (1);
7831 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
7832 return NULL_TREE;
7833
7834 omp_mark_stores (gimplify_omp_ctxp, op);
7835 return NULL_TREE;
7836}
7837
7838/* Helper callback for walk_gimple_seq to discover possible stores
7839 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7840 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7841 for those. */
7842
7843static tree
7844omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
7845 bool *handled_ops_p,
7846 struct walk_stmt_info *wi)
7847{
7848 gimple *stmt = gsi_stmt (*gsi_p);
7849 switch (gimple_code (stmt))
7850 {
7851 /* Don't recurse on OpenMP constructs for which
7852 gimplify_adjust_omp_clauses already handled the bodies,
7853 except handle gimple_omp_for_pre_body. */
7854 case GIMPLE_OMP_FOR:
7855 *handled_ops_p = true;
7856 if (gimple_omp_for_pre_body (stmt))
7857 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
7858 omp_find_stores_stmt, omp_find_stores_op, wi);
7859 break;
7860 case GIMPLE_OMP_PARALLEL:
7861 case GIMPLE_OMP_TASK:
7862 case GIMPLE_OMP_SECTIONS:
7863 case GIMPLE_OMP_SINGLE:
7864 case GIMPLE_OMP_TARGET:
7865 case GIMPLE_OMP_TEAMS:
7866 case GIMPLE_OMP_CRITICAL:
7867 *handled_ops_p = true;
7868 break;
7869 default:
7870 break;
7871 }
7872 return NULL_TREE;
7873}
7874
f014c653
JJ
7875struct gimplify_adjust_omp_clauses_data
7876{
7877 tree *list_p;
7878 gimple_seq *pre_p;
7879};
7880
953ff289
DN
7881/* For all variables that were not actually used within the context,
7882 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
7883
7884static int
7885gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7886{
f014c653
JJ
7887 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7888 gimple_seq *pre_p
7889 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
953ff289
DN
7890 tree decl = (tree) n->key;
7891 unsigned flags = n->value;
aaf46ef9 7892 enum omp_clause_code code;
953ff289
DN
7893 tree clause;
7894 bool private_debug;
7895
7896 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7897 return 0;
7898 if ((flags & GOVD_SEEN) == 0)
7899 return 0;
7900 if (flags & GOVD_DEBUG_PRIVATE)
7901 {
7902 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
7903 private_debug = true;
7904 }
acf0174b
JJ
7905 else if (flags & GOVD_MAP)
7906 private_debug = false;
953ff289
DN
7907 else
7908 private_debug
7909 = lang_hooks.decls.omp_private_debug_clause (decl,
7910 !!(flags & GOVD_SHARED));
7911 if (private_debug)
7912 code = OMP_CLAUSE_PRIVATE;
acf0174b
JJ
7913 else if (flags & GOVD_MAP)
7914 code = OMP_CLAUSE_MAP;
953ff289
DN
7915 else if (flags & GOVD_SHARED)
7916 {
7917 if (is_global_var (decl))
64964499
JJ
7918 {
7919 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7920 while (ctx != NULL)
7921 {
7922 splay_tree_node on
7923 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7924 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
74bf76ed 7925 | GOVD_PRIVATE | GOVD_REDUCTION
7de20fbd 7926 | GOVD_LINEAR | GOVD_MAP)) != 0)
64964499
JJ
7927 break;
7928 ctx = ctx->outer_context;
7929 }
7930 if (ctx == NULL)
7931 return 0;
7932 }
953ff289
DN
7933 code = OMP_CLAUSE_SHARED;
7934 }
7935 else if (flags & GOVD_PRIVATE)
7936 code = OMP_CLAUSE_PRIVATE;
7937 else if (flags & GOVD_FIRSTPRIVATE)
7938 code = OMP_CLAUSE_FIRSTPRIVATE;
74bf76ed
JJ
7939 else if (flags & GOVD_LASTPRIVATE)
7940 code = OMP_CLAUSE_LASTPRIVATE;
acf0174b
JJ
7941 else if (flags & GOVD_ALIGNED)
7942 return 0;
953ff289
DN
7943 else
7944 gcc_unreachable ();
7945
1a80d6b8
JJ
7946 if (((flags & GOVD_LASTPRIVATE)
7947 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
7948 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7949 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7950
c2255bc4 7951 clause = build_omp_clause (input_location, code);
aaf46ef9 7952 OMP_CLAUSE_DECL (clause) = decl;
953ff289
DN
7953 OMP_CLAUSE_CHAIN (clause) = *list_p;
7954 if (private_debug)
7955 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
7956 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7957 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
1a80d6b8
JJ
7958 else if (code == OMP_CLAUSE_SHARED
7959 && (flags & GOVD_WRITTEN) == 0
7960 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7961 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
ec35ea45
JJ
7962 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
7963 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
d9a6bd32
JJ
7964 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7965 {
7966 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7967 OMP_CLAUSE_DECL (nc) = decl;
7968 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7969 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7970 OMP_CLAUSE_DECL (clause)
7971 = build_simple_mem_ref_loc (input_location, decl);
7972 OMP_CLAUSE_DECL (clause)
7973 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7974 build_int_cst (build_pointer_type (char_type_node), 0));
7975 OMP_CLAUSE_SIZE (clause) = size_zero_node;
7976 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7977 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7978 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7979 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7980 OMP_CLAUSE_CHAIN (nc) = *list_p;
7981 OMP_CLAUSE_CHAIN (clause) = nc;
7982 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7983 gimplify_omp_ctxp = ctx->outer_context;
7984 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7985 pre_p, NULL, is_gimple_val, fb_rvalue);
7986 gimplify_omp_ctxp = ctx;
7987 }
acf0174b
JJ
7988 else if (code == OMP_CLAUSE_MAP)
7989 {
db0f1c7a
TV
7990 int kind = (flags & GOVD_MAP_TO_ONLY
7991 ? GOMP_MAP_TO
7992 : GOMP_MAP_TOFROM);
7993 if (flags & GOVD_MAP_FORCE)
7994 kind |= GOMP_MAP_FLAG_FORCE;
7995 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
acf0174b
JJ
7996 if (DECL_SIZE (decl)
7997 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7998 {
7999 tree decl2 = DECL_VALUE_EXPR (decl);
8000 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8001 decl2 = TREE_OPERAND (decl2, 0);
8002 gcc_assert (DECL_P (decl2));
8003 tree mem = build_simple_mem_ref (decl2);
8004 OMP_CLAUSE_DECL (clause) = mem;
8005 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8006 if (gimplify_omp_ctxp->outer_context)
8007 {
8008 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8009 omp_notice_variable (ctx, decl2, true);
8010 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8011 }
8012 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8013 OMP_CLAUSE_MAP);
8014 OMP_CLAUSE_DECL (nc) = decl;
8015 OMP_CLAUSE_SIZE (nc) = size_zero_node;
d9a6bd32
JJ
8016 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8017 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8018 else
8019 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
acf0174b
JJ
8020 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8021 OMP_CLAUSE_CHAIN (clause) = nc;
8022 }
e01d41e5
JJ
8023 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8024 && lang_hooks.decls.omp_privatize_by_reference (decl))
8025 {
8026 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8027 OMP_CLAUSE_SIZE (clause)
8028 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8029 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8030 gimplify_omp_ctxp = ctx->outer_context;
8031 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8032 pre_p, NULL, is_gimple_val, fb_rvalue);
8033 gimplify_omp_ctxp = ctx;
8034 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8035 OMP_CLAUSE_MAP);
8036 OMP_CLAUSE_DECL (nc) = decl;
8037 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8038 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8039 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8040 OMP_CLAUSE_CHAIN (clause) = nc;
8041 }
b46ebd6c
JJ
8042 else
8043 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
acf0174b 8044 }
95782571
JJ
8045 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8046 {
8047 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8048 OMP_CLAUSE_DECL (nc) = decl;
8049 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8050 OMP_CLAUSE_CHAIN (nc) = *list_p;
8051 OMP_CLAUSE_CHAIN (clause) = nc;
f014c653
JJ
8052 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8053 gimplify_omp_ctxp = ctx->outer_context;
8054 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8055 gimplify_omp_ctxp = ctx;
95782571 8056 }
953ff289 8057 *list_p = clause;
f014c653
JJ
8058 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8059 gimplify_omp_ctxp = ctx->outer_context;
8060 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8061 gimplify_omp_ctxp = ctx;
953ff289
DN
8062 return 0;
8063}
8064
8065static void
1a80d6b8 8066gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
d9a6bd32 8067 enum tree_code code)
953ff289
DN
8068{
8069 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8070 tree c, decl;
8071
1a80d6b8
JJ
8072 if (body)
8073 {
8074 struct gimplify_omp_ctx *octx;
8075 for (octx = ctx; octx; octx = octx->outer_context)
8076 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8077 break;
8078 if (octx)
8079 {
8080 struct walk_stmt_info wi;
8081 memset (&wi, 0, sizeof (wi));
8082 walk_gimple_seq (body, omp_find_stores_stmt,
8083 omp_find_stores_op, &wi);
8084 }
8085 }
953ff289
DN
8086 while ((c = *list_p) != NULL)
8087 {
8088 splay_tree_node n;
8089 bool remove = false;
8090
aaf46ef9 8091 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
8092 {
8093 case OMP_CLAUSE_PRIVATE:
8094 case OMP_CLAUSE_SHARED:
8095 case OMP_CLAUSE_FIRSTPRIVATE:
74bf76ed 8096 case OMP_CLAUSE_LINEAR:
953ff289
DN
8097 decl = OMP_CLAUSE_DECL (c);
8098 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8099 remove = !(n->value & GOVD_SEEN);
8100 if (! remove)
8101 {
aaf46ef9 8102 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
8103 if ((n->value & GOVD_DEBUG_PRIVATE)
8104 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8105 {
8106 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8107 || ((n->value & GOVD_DATA_SHARE_CLASS)
8108 == GOVD_PRIVATE));
aaf46ef9 8109 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
8110 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8111 }
1a80d6b8
JJ
8112 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8113 && (n->value & GOVD_WRITTEN) == 0
8114 && DECL_P (decl)
8115 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8116 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8117 else if (DECL_P (decl)
8118 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8119 && (n->value & GOVD_WRITTEN) != 1)
8120 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8121 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8122 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8123 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
953ff289
DN
8124 }
8125 break;
8126
8127 case OMP_CLAUSE_LASTPRIVATE:
8128 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8129 accurately reflect the presence of a FIRSTPRIVATE clause. */
8130 decl = OMP_CLAUSE_DECL (c);
8131 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8132 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8133 = (n->value & GOVD_FIRSTPRIVATE) != 0;
41b37d5e
JJ
8134 if (omp_no_lastprivate (ctx))
8135 {
8136 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8137 remove = true;
8138 else
8139 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
8140 }
e01d41e5
JJ
8141 else if (code == OMP_DISTRIBUTE
8142 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8143 {
8144 remove = true;
8145 error_at (OMP_CLAUSE_LOCATION (c),
8146 "same variable used in %<firstprivate%> and "
8147 "%<lastprivate%> clauses on %<distribute%> "
8148 "construct");
8149 }
1a80d6b8
JJ
8150 if (!remove
8151 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8152 && DECL_P (decl)
8153 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8154 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
953ff289 8155 break;
b8698a0f 8156
acf0174b
JJ
8157 case OMP_CLAUSE_ALIGNED:
8158 decl = OMP_CLAUSE_DECL (c);
8159 if (!is_global_var (decl))
8160 {
8161 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8162 remove = n == NULL || !(n->value & GOVD_SEEN);
8163 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8164 {
8165 struct gimplify_omp_ctx *octx;
8166 if (n != NULL
8167 && (n->value & (GOVD_DATA_SHARE_CLASS
8168 & ~GOVD_FIRSTPRIVATE)))
8169 remove = true;
8170 else
8171 for (octx = ctx->outer_context; octx;
8172 octx = octx->outer_context)
8173 {
8174 n = splay_tree_lookup (octx->variables,
8175 (splay_tree_key) decl);
8176 if (n == NULL)
8177 continue;
8178 if (n->value & GOVD_LOCAL)
8179 break;
8180 /* We have to avoid assigning a shared variable
8181 to itself when trying to add
8182 __builtin_assume_aligned. */
8183 if (n->value & GOVD_SHARED)
8184 {
8185 remove = true;
8186 break;
8187 }
8188 }
8189 }
8190 }
8191 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8192 {
8193 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8194 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8195 remove = true;
8196 }
8197 break;
8198
8199 case OMP_CLAUSE_MAP:
e01d41e5
JJ
8200 if (code == OMP_TARGET_EXIT_DATA
8201 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8202 {
8203 remove = true;
8204 break;
8205 }
acf0174b 8206 decl = OMP_CLAUSE_DECL (c);
e46c7770 8207 /* Data clauses associated with acc parallel reductions must be
c42cfb5c
CP
8208 compatible with present_or_copy. Warn and adjust the clause
8209 if that is not the case. */
8210 if (ctx->region_type == ORT_ACC_PARALLEL)
8211 {
8212 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8213 n = NULL;
8214
8215 if (DECL_P (t))
8216 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8217
8218 if (n && (n->value & GOVD_REDUCTION))
8219 {
8220 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
8221
8222 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8223 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
8224 && kind != GOMP_MAP_FORCE_PRESENT
8225 && kind != GOMP_MAP_POINTER)
8226 {
8227 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8228 "incompatible data clause with reduction "
8229 "on %qE; promoting to present_or_copy",
8230 DECL_NAME (t));
8231 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
8232 }
8233 }
8234 }
acf0174b 8235 if (!DECL_P (decl))
d9a6bd32
JJ
8236 {
8237 if ((ctx->region_type & ORT_TARGET) != 0
8238 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8239 {
8240 if (TREE_CODE (decl) == INDIRECT_REF
8241 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8242 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8243 == REFERENCE_TYPE))
8244 decl = TREE_OPERAND (decl, 0);
8245 if (TREE_CODE (decl) == COMPONENT_REF)
8246 {
8247 while (TREE_CODE (decl) == COMPONENT_REF)
8248 decl = TREE_OPERAND (decl, 0);
8249 if (DECL_P (decl))
8250 {
8251 n = splay_tree_lookup (ctx->variables,
8252 (splay_tree_key) decl);
8253 if (!(n->value & GOVD_SEEN))
8254 remove = true;
8255 }
8256 }
8257 }
8258 break;
8259 }
acf0174b 8260 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
d9a6bd32
JJ
8261 if ((ctx->region_type & ORT_TARGET) != 0
8262 && !(n->value & GOVD_SEEN)
4a38b02b
IV
8263 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
8264 && !lookup_attribute ("omp declare target link",
8265 DECL_ATTRIBUTES (decl)))
d9a6bd32
JJ
8266 {
8267 remove = true;
8268 /* For struct element mapping, if struct is never referenced
8269 in target block and none of the mapping has always modifier,
8270 remove all the struct element mappings, which immediately
8271 follow the GOMP_MAP_STRUCT map clause. */
8272 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
8273 {
8274 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
8275 while (cnt--)
8276 OMP_CLAUSE_CHAIN (c)
8277 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
8278 }
8279 }
8280 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
8281 && code == OMP_TARGET_EXIT_DATA)
acf0174b
JJ
8282 remove = true;
8283 else if (DECL_SIZE (decl)
8284 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
d9a6bd32 8285 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
e01d41e5
JJ
8286 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
8287 && (OMP_CLAUSE_MAP_KIND (c)
8288 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
acf0174b 8289 {
41dbbb37
TS
8290 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
8291 for these, TREE_CODE (DECL_SIZE (decl)) will always be
8292 INTEGER_CST. */
8293 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
8294
acf0174b
JJ
8295 tree decl2 = DECL_VALUE_EXPR (decl);
8296 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8297 decl2 = TREE_OPERAND (decl2, 0);
8298 gcc_assert (DECL_P (decl2));
8299 tree mem = build_simple_mem_ref (decl2);
8300 OMP_CLAUSE_DECL (c) = mem;
8301 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8302 if (ctx->outer_context)
8303 {
8304 omp_notice_variable (ctx->outer_context, decl2, true);
8305 omp_notice_variable (ctx->outer_context,
8306 OMP_CLAUSE_SIZE (c), true);
8307 }
d9a6bd32
JJ
8308 if (((ctx->region_type & ORT_TARGET) != 0
8309 || !ctx->target_firstprivatize_array_bases)
8310 && ((n->value & GOVD_SEEN) == 0
8311 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
8312 {
8313 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8314 OMP_CLAUSE_MAP);
8315 OMP_CLAUSE_DECL (nc) = decl;
8316 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8317 if (ctx->target_firstprivatize_array_bases)
8318 OMP_CLAUSE_SET_MAP_KIND (nc,
8319 GOMP_MAP_FIRSTPRIVATE_POINTER);
8320 else
8321 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8322 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
8323 OMP_CLAUSE_CHAIN (c) = nc;
8324 c = nc;
8325 }
8326 }
8327 else
8328 {
8329 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8330 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
e01d41e5
JJ
8331 gcc_assert ((n->value & GOVD_SEEN) == 0
8332 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8333 == 0));
acf0174b
JJ
8334 }
8335 break;
8336
8337 case OMP_CLAUSE_TO:
8338 case OMP_CLAUSE_FROM:
41dbbb37 8339 case OMP_CLAUSE__CACHE_:
acf0174b
JJ
8340 decl = OMP_CLAUSE_DECL (c);
8341 if (!DECL_P (decl))
8342 break;
8343 if (DECL_SIZE (decl)
8344 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8345 {
8346 tree decl2 = DECL_VALUE_EXPR (decl);
8347 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8348 decl2 = TREE_OPERAND (decl2, 0);
8349 gcc_assert (DECL_P (decl2));
8350 tree mem = build_simple_mem_ref (decl2);
8351 OMP_CLAUSE_DECL (c) = mem;
8352 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8353 if (ctx->outer_context)
8354 {
8355 omp_notice_variable (ctx->outer_context, decl2, true);
8356 omp_notice_variable (ctx->outer_context,
8357 OMP_CLAUSE_SIZE (c), true);
8358 }
8359 }
b46ebd6c
JJ
8360 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8361 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
acf0174b
JJ
8362 break;
8363
953ff289 8364 case OMP_CLAUSE_REDUCTION:
1a80d6b8 8365 decl = OMP_CLAUSE_DECL (c);
c42cfb5c
CP
8366 /* OpenACC reductions need a present_or_copy data clause.
8367 Add one if necessary. Error is the reduction is private. */
8368 if (ctx->region_type == ORT_ACC_PARALLEL)
8369 {
8370 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8371 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8372 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
8373 "reduction on %qE", DECL_NAME (decl));
8374 else if ((n->value & GOVD_MAP) == 0)
8375 {
8376 tree next = OMP_CLAUSE_CHAIN (c);
8377 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
8378 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
8379 OMP_CLAUSE_DECL (nc) = decl;
8380 OMP_CLAUSE_CHAIN (c) = nc;
8381 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8382 while (1)
8383 {
8384 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
8385 if (OMP_CLAUSE_CHAIN (nc) == NULL)
8386 break;
8387 nc = OMP_CLAUSE_CHAIN (nc);
8388 }
8389 OMP_CLAUSE_CHAIN (nc) = next;
8390 n->value |= GOVD_MAP;
8391 }
8392 }
1a80d6b8
JJ
8393 if (DECL_P (decl)
8394 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8395 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8396 break;
953ff289
DN
8397 case OMP_CLAUSE_COPYIN:
8398 case OMP_CLAUSE_COPYPRIVATE:
8399 case OMP_CLAUSE_IF:
8400 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
8401 case OMP_CLAUSE_NUM_TEAMS:
8402 case OMP_CLAUSE_THREAD_LIMIT:
8403 case OMP_CLAUSE_DIST_SCHEDULE:
8404 case OMP_CLAUSE_DEVICE:
953ff289
DN
8405 case OMP_CLAUSE_SCHEDULE:
8406 case OMP_CLAUSE_NOWAIT:
8407 case OMP_CLAUSE_ORDERED:
8408 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
8409 case OMP_CLAUSE_UNTIED:
8410 case OMP_CLAUSE_COLLAPSE:
20906c66
JJ
8411 case OMP_CLAUSE_FINAL:
8412 case OMP_CLAUSE_MERGEABLE:
acf0174b 8413 case OMP_CLAUSE_PROC_BIND:
74bf76ed 8414 case OMP_CLAUSE_SAFELEN:
d9a6bd32 8415 case OMP_CLAUSE_SIMDLEN:
acf0174b 8416 case OMP_CLAUSE_DEPEND:
d9a6bd32
JJ
8417 case OMP_CLAUSE_PRIORITY:
8418 case OMP_CLAUSE_GRAINSIZE:
8419 case OMP_CLAUSE_NUM_TASKS:
8420 case OMP_CLAUSE_NOGROUP:
8421 case OMP_CLAUSE_THREADS:
8422 case OMP_CLAUSE_SIMD:
8423 case OMP_CLAUSE_HINT:
8424 case OMP_CLAUSE_DEFAULTMAP:
8425 case OMP_CLAUSE_USE_DEVICE_PTR:
8426 case OMP_CLAUSE_IS_DEVICE_PTR:
9a771876 8427 case OMP_CLAUSE__CILK_FOR_COUNT_:
41dbbb37
TS
8428 case OMP_CLAUSE_ASYNC:
8429 case OMP_CLAUSE_WAIT:
41dbbb37
TS
8430 case OMP_CLAUSE_INDEPENDENT:
8431 case OMP_CLAUSE_NUM_GANGS:
8432 case OMP_CLAUSE_NUM_WORKERS:
8433 case OMP_CLAUSE_VECTOR_LENGTH:
8434 case OMP_CLAUSE_GANG:
8435 case OMP_CLAUSE_WORKER:
8436 case OMP_CLAUSE_VECTOR:
8437 case OMP_CLAUSE_AUTO:
8438 case OMP_CLAUSE_SEQ:
27d6ba88
TS
8439 break;
8440
7a5e4956 8441 case OMP_CLAUSE_TILE:
27d6ba88
TS
8442 /* We're not yet making use of the information provided by OpenACC
8443 tile clauses. Discard these here, to simplify later middle end
8444 processing. */
8445 remove = true;
953ff289
DN
8446 break;
8447
8448 default:
8449 gcc_unreachable ();
8450 }
8451
8452 if (remove)
8453 *list_p = OMP_CLAUSE_CHAIN (c);
8454 else
8455 list_p = &OMP_CLAUSE_CHAIN (c);
8456 }
8457
8458 /* Add in any implicit data sharing. */
f014c653
JJ
8459 struct gimplify_adjust_omp_clauses_data data;
8460 data.list_p = list_p;
8461 data.pre_p = pre_p;
8462 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
b8698a0f 8463
953ff289
DN
8464 gimplify_omp_ctxp = ctx->outer_context;
8465 delete_omp_context (ctx);
8466}
8467
41dbbb37
TS
8468/* Gimplify OACC_CACHE. */
8469
8470static void
8471gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
8472{
8473 tree expr = *expr_p;
8474
182190f2 8475 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
d9a6bd32 8476 OACC_CACHE);
1a80d6b8
JJ
8477 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
8478 OACC_CACHE);
41dbbb37
TS
8479
8480 /* TODO: Do something sensible with this information. */
8481
8482 *expr_p = NULL_TREE;
8483}
8484
6e232ba4
JN
8485/* Helper function of gimplify_oacc_declare. The helper's purpose is to,
8486 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
8487 kind. The entry kind will replace the one in CLAUSE, while the exit
8488 kind will be used in a new omp_clause and returned to the caller. */
8489
8490static tree
8491gimplify_oacc_declare_1 (tree clause)
8492{
8493 HOST_WIDE_INT kind, new_op;
8494 bool ret = false;
8495 tree c = NULL;
8496
8497 kind = OMP_CLAUSE_MAP_KIND (clause);
8498
8499 switch (kind)
8500 {
8501 case GOMP_MAP_ALLOC:
8502 case GOMP_MAP_FORCE_ALLOC:
8503 case GOMP_MAP_FORCE_TO:
91106e84 8504 new_op = GOMP_MAP_DELETE;
6e232ba4
JN
8505 ret = true;
8506 break;
8507
8508 case GOMP_MAP_FORCE_FROM:
8509 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8510 new_op = GOMP_MAP_FORCE_FROM;
8511 ret = true;
8512 break;
8513
8514 case GOMP_MAP_FORCE_TOFROM:
8515 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
8516 new_op = GOMP_MAP_FORCE_FROM;
8517 ret = true;
8518 break;
8519
8520 case GOMP_MAP_FROM:
8521 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8522 new_op = GOMP_MAP_FROM;
8523 ret = true;
8524 break;
8525
8526 case GOMP_MAP_TOFROM:
8527 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
8528 new_op = GOMP_MAP_FROM;
8529 ret = true;
8530 break;
8531
8532 case GOMP_MAP_DEVICE_RESIDENT:
8533 case GOMP_MAP_FORCE_DEVICEPTR:
8534 case GOMP_MAP_FORCE_PRESENT:
8535 case GOMP_MAP_LINK:
8536 case GOMP_MAP_POINTER:
8537 case GOMP_MAP_TO:
8538 break;
8539
8540 default:
8541 gcc_unreachable ();
8542 break;
8543 }
8544
8545 if (ret)
8546 {
8547 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
8548 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
8549 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
8550 }
8551
8552 return c;
8553}
8554
8555/* Gimplify OACC_DECLARE. */
8556
8557static void
8558gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
8559{
8560 tree expr = *expr_p;
8561 gomp_target *stmt;
8562 tree clauses, t;
8563
8564 clauses = OACC_DECLARE_CLAUSES (expr);
8565
8566 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
8567
8568 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
8569 {
8570 tree decl = OMP_CLAUSE_DECL (t);
8571
8572 if (TREE_CODE (decl) == MEM_REF)
8573 continue;
8574
8575 if (TREE_CODE (decl) == VAR_DECL
8576 && !is_global_var (decl)
8577 && DECL_CONTEXT (decl) == current_function_decl)
8578 {
8579 tree c = gimplify_oacc_declare_1 (t);
8580 if (c)
8581 {
8582 if (oacc_declare_returns == NULL)
8583 oacc_declare_returns = new hash_map<tree, tree>;
8584
8585 oacc_declare_returns->put (decl, c);
8586 }
8587 }
8588
8589 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
8590 }
8591
8592 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
8593 clauses);
8594
8595 gimplify_seq_add_stmt (pre_p, stmt);
8596
8597 *expr_p = NULL_TREE;
8598}
8599
953ff289
DN
8600/* Gimplify the contents of an OMP_PARALLEL statement. This involves
8601 gimplification of the body, as well as scanning the body for used
8602 variables. We need to do this scan now, because variable-sized
8603 decls will be decomposed during gimplification. */
8604
726a989a
RB
8605static void
8606gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
8607{
8608 tree expr = *expr_p;
355fe088 8609 gimple *g;
726a989a 8610 gimple_seq body = NULL;
953ff289 8611
a68ab351
JJ
8612 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
8613 OMP_PARALLEL_COMBINED (expr)
8614 ? ORT_COMBINED_PARALLEL
d9a6bd32 8615 : ORT_PARALLEL, OMP_PARALLEL);
953ff289 8616
45852dcc 8617 push_gimplify_context ();
953ff289 8618
726a989a
RB
8619 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
8620 if (gimple_code (g) == GIMPLE_BIND)
8621 pop_gimplify_context (g);
50674e96 8622 else
726a989a 8623 pop_gimplify_context (NULL);
953ff289 8624
1a80d6b8 8625 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
d9a6bd32 8626 OMP_PARALLEL);
953ff289 8627
726a989a
RB
8628 g = gimple_build_omp_parallel (body,
8629 OMP_PARALLEL_CLAUSES (expr),
8630 NULL_TREE, NULL_TREE);
8631 if (OMP_PARALLEL_COMBINED (expr))
8632 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
8633 gimplify_seq_add_stmt (pre_p, g);
8634 *expr_p = NULL_TREE;
953ff289
DN
8635}
8636
a68ab351
JJ
8637/* Gimplify the contents of an OMP_TASK statement. This involves
8638 gimplification of the body, as well as scanning the body for used
8639 variables. We need to do this scan now, because variable-sized
8640 decls will be decomposed during gimplification. */
953ff289 8641
726a989a
RB
8642static void
8643gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 8644{
a68ab351 8645 tree expr = *expr_p;
355fe088 8646 gimple *g;
726a989a 8647 gimple_seq body = NULL;
953ff289 8648
f22f4340
JJ
8649 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
8650 find_omp_clause (OMP_TASK_CLAUSES (expr),
8651 OMP_CLAUSE_UNTIED)
d9a6bd32 8652 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
953ff289 8653
45852dcc 8654 push_gimplify_context ();
953ff289 8655
726a989a
RB
8656 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
8657 if (gimple_code (g) == GIMPLE_BIND)
8658 pop_gimplify_context (g);
953ff289 8659 else
726a989a 8660 pop_gimplify_context (NULL);
953ff289 8661
1a80d6b8
JJ
8662 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
8663 OMP_TASK);
917948d3 8664
726a989a
RB
8665 g = gimple_build_omp_task (body,
8666 OMP_TASK_CLAUSES (expr),
8667 NULL_TREE, NULL_TREE,
8668 NULL_TREE, NULL_TREE, NULL_TREE);
8669 gimplify_seq_add_stmt (pre_p, g);
8670 *expr_p = NULL_TREE;
a68ab351
JJ
8671}
8672
acf0174b
JJ
8673/* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
8674 with non-NULL OMP_FOR_INIT. */
8675
8676static tree
8677find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
8678{
8679 *walk_subtrees = 0;
8680 switch (TREE_CODE (*tp))
8681 {
8682 case OMP_FOR:
8683 *walk_subtrees = 1;
8684 /* FALLTHRU */
8685 case OMP_SIMD:
8686 if (OMP_FOR_INIT (*tp) != NULL_TREE)
8687 return *tp;
8688 break;
8689 case BIND_EXPR:
8690 case STATEMENT_LIST:
8691 case OMP_PARALLEL:
8692 *walk_subtrees = 1;
8693 break;
8694 default:
8695 break;
8696 }
8697 return NULL_TREE;
8698}
8699
a68ab351
JJ
8700/* Gimplify the gross structure of an OMP_FOR statement. */
8701
8702static enum gimplify_status
726a989a 8703gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 8704{
9ce1688b 8705 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
32e8bb8e
ILT
8706 enum gimplify_status ret = GS_ALL_DONE;
8707 enum gimplify_status tret;
538dd0b7 8708 gomp_for *gfor;
726a989a 8709 gimple_seq for_body, for_pre_body;
a68ab351 8710 int i;
74bf76ed 8711 bitmap has_decl_expr = NULL;
d9a6bd32 8712 enum omp_region_type ort = ORT_WORKSHARE;
a68ab351 8713
acf0174b 8714 orig_for_stmt = for_stmt = *expr_p;
a68ab351 8715
41dbbb37
TS
8716 switch (TREE_CODE (for_stmt))
8717 {
8718 case OMP_FOR:
8719 case CILK_FOR:
8720 case OMP_DISTRIBUTE:
182190f2 8721 break;
41dbbb37 8722 case OACC_LOOP:
182190f2 8723 ort = ORT_ACC;
d9a6bd32
JJ
8724 break;
8725 case OMP_TASKLOOP:
8726 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
8727 ort = ORT_UNTIED_TASK;
8728 else
8729 ort = ORT_TASK;
41dbbb37
TS
8730 break;
8731 case OMP_SIMD:
8732 case CILK_SIMD:
d9a6bd32 8733 ort = ORT_SIMD;
41dbbb37
TS
8734 break;
8735 default:
8736 gcc_unreachable ();
8737 }
8738
41b37d5e
JJ
8739 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
8740 clause for the IV. */
d9a6bd32 8741 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
41b37d5e
JJ
8742 {
8743 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
8744 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8745 decl = TREE_OPERAND (t, 0);
8746 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8747 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8748 && OMP_CLAUSE_DECL (c) == decl)
8749 {
8750 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8751 break;
8752 }
8753 }
8754
9ce1688b
JJ
8755 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8756 {
8757 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
8758 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
8759 find_combined_omp_for, NULL, NULL);
8760 if (inner_for_stmt == NULL_TREE)
8761 {
8762 gcc_assert (seen_error ());
8763 *expr_p = NULL_TREE;
8764 return GS_ERROR;
8765 }
8766 }
8767
d9a6bd32
JJ
8768 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
8769 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
8770 TREE_CODE (for_stmt));
8771
9cf32741
JJ
8772 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
8773 gimplify_omp_ctxp->distribute = true;
917948d3 8774
726a989a
RB
8775 /* Handle OMP_FOR_INIT. */
8776 for_pre_body = NULL;
d9a6bd32 8777 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
74bf76ed
JJ
8778 {
8779 has_decl_expr = BITMAP_ALLOC (NULL);
8780 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
8781 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
acf0174b 8782 == VAR_DECL)
74bf76ed
JJ
8783 {
8784 t = OMP_FOR_PRE_BODY (for_stmt);
8785 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8786 }
8787 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
8788 {
8789 tree_stmt_iterator si;
8790 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
8791 tsi_next (&si))
8792 {
8793 t = tsi_stmt (si);
8794 if (TREE_CODE (t) == DECL_EXPR
8795 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
8796 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8797 }
8798 }
8799 }
d9a6bd32
JJ
8800 if (OMP_FOR_PRE_BODY (for_stmt))
8801 {
8802 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
8803 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8804 else
8805 {
8806 struct gimplify_omp_ctx ctx;
8807 memset (&ctx, 0, sizeof (ctx));
8808 ctx.region_type = ORT_NONE;
8809 gimplify_omp_ctxp = &ctx;
8810 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8811 gimplify_omp_ctxp = NULL;
8812 }
8813 }
726a989a 8814 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 8815
acf0174b 8816 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
d9a6bd32
JJ
8817 for_stmt = inner_for_stmt;
8818
8819 /* For taskloop, need to gimplify the start, end and step before the
8820 taskloop, outside of the taskloop omp context. */
8821 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
acf0174b 8822 {
d9a6bd32
JJ
8823 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8824 {
8825 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8826 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8827 {
8828 TREE_OPERAND (t, 1)
8829 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
381cdae4 8830 pre_p, NULL, false);
d9a6bd32
JJ
8831 tree c = build_omp_clause (input_location,
8832 OMP_CLAUSE_FIRSTPRIVATE);
8833 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8834 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8835 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8836 }
8837
8838 /* Handle OMP_FOR_COND. */
8839 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8840 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8841 {
8842 TREE_OPERAND (t, 1)
8843 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8844 gimple_seq_empty_p (for_pre_body)
381cdae4
RB
8845 ? pre_p : &for_pre_body, NULL,
8846 false);
d9a6bd32
JJ
8847 tree c = build_omp_clause (input_location,
8848 OMP_CLAUSE_FIRSTPRIVATE);
8849 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8850 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8851 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8852 }
8853
8854 /* Handle OMP_FOR_INCR. */
8855 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8856 if (TREE_CODE (t) == MODIFY_EXPR)
8857 {
8858 decl = TREE_OPERAND (t, 0);
8859 t = TREE_OPERAND (t, 1);
8860 tree *tp = &TREE_OPERAND (t, 1);
8861 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8862 tp = &TREE_OPERAND (t, 0);
8863
8864 if (!is_gimple_constant (*tp))
8865 {
8866 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8867 ? pre_p : &for_pre_body;
381cdae4 8868 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
d9a6bd32
JJ
8869 tree c = build_omp_clause (input_location,
8870 OMP_CLAUSE_FIRSTPRIVATE);
8871 OMP_CLAUSE_DECL (c) = *tp;
8872 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8873 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8874 }
8875 }
8876 }
8877
8878 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8879 OMP_TASKLOOP);
acf0174b
JJ
8880 }
8881
d9a6bd32
JJ
8882 if (orig_for_stmt != for_stmt)
8883 gimplify_omp_ctxp->combined_loop = true;
8884
355a7673 8885 for_body = NULL;
a68ab351
JJ
8886 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8887 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8888 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8889 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
d9a6bd32
JJ
8890
8891 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8892 bool is_doacross = false;
8893 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8894 {
8895 is_doacross = true;
8896 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8897 (OMP_FOR_INIT (for_stmt))
8898 * 2);
8899 }
8900 int collapse = 1;
8901 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8902 if (c)
8903 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
a68ab351
JJ
8904 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8905 {
8906 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
8907 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8908 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
8909 gcc_assert (DECL_P (decl));
8910 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8911 || POINTER_TYPE_P (TREE_TYPE (decl)));
d9a6bd32
JJ
8912 if (is_doacross)
8913 {
8914 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8915 gimplify_omp_ctxp->loop_iter_var.quick_push
8916 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8917 else
8918 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8919 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8920 }
a68ab351
JJ
8921
8922 /* Make sure the iteration variable is private. */
74bf76ed 8923 tree c = NULL_TREE;
f7468577 8924 tree c2 = NULL_TREE;
acf0174b
JJ
8925 if (orig_for_stmt != for_stmt)
8926 /* Do this only on innermost construct for combined ones. */;
d9a6bd32 8927 else if (ort == ORT_SIMD)
74bf76ed
JJ
8928 {
8929 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
d9a6bd32 8930 (splay_tree_key) decl);
f7468577
JJ
8931 omp_is_private (gimplify_omp_ctxp, decl,
8932 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8933 != 1));
74bf76ed
JJ
8934 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8935 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8936 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8937 {
8938 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8939 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
41b37d5e
JJ
8940 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8941 if ((has_decl_expr
8942 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8943 || omp_no_lastprivate (gimplify_omp_ctxp))
8944 {
8945 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8946 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8947 }
499c20bb
JJ
8948 struct gimplify_omp_ctx *outer
8949 = gimplify_omp_ctxp->outer_context;
8950 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8951 {
8952 if (outer->region_type == ORT_WORKSHARE
8953 && outer->combined_loop)
8954 {
8955 n = splay_tree_lookup (outer->variables,
8956 (splay_tree_key)decl);
8957 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8958 {
8959 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8960 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8961 }
e01d41e5
JJ
8962 else
8963 {
8964 struct gimplify_omp_ctx *octx = outer->outer_context;
8965 if (octx
8966 && octx->region_type == ORT_COMBINED_PARALLEL
8967 && octx->outer_context
8968 && (octx->outer_context->region_type
8969 == ORT_WORKSHARE)
8970 && octx->outer_context->combined_loop)
8971 {
8972 octx = octx->outer_context;
8973 n = splay_tree_lookup (octx->variables,
8974 (splay_tree_key)decl);
8975 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8976 {
8977 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8978 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8979 }
8980 }
8981 }
499c20bb
JJ
8982 }
8983 }
8984
74bf76ed
JJ
8985 OMP_CLAUSE_DECL (c) = decl;
8986 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8987 OMP_FOR_CLAUSES (for_stmt) = c;
41b37d5e 8988 omp_add_variable (gimplify_omp_ctxp, decl, flags);
41b37d5e
JJ
8989 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8990 {
8991 if (outer->region_type == ORT_WORKSHARE
8992 && outer->combined_loop)
8993 {
8994 if (outer->outer_context
8995 && (outer->outer_context->region_type
8996 == ORT_COMBINED_PARALLEL))
8997 outer = outer->outer_context;
8998 else if (omp_check_private (outer, decl, false))
8999 outer = NULL;
9000 }
d9a6bd32
JJ
9001 else if (((outer->region_type & ORT_TASK) != 0)
9002 && outer->combined_loop
9003 && !omp_check_private (gimplify_omp_ctxp,
9004 decl, false))
9005 ;
41b37d5e 9006 else if (outer->region_type != ORT_COMBINED_PARALLEL)
84311083
JJ
9007 {
9008 omp_notice_variable (outer, decl, true);
9009 outer = NULL;
9010 }
41b37d5e
JJ
9011 if (outer)
9012 {
cbdfbde8
JJ
9013 n = splay_tree_lookup (outer->variables,
9014 (splay_tree_key)decl);
9015 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9016 {
9017 omp_add_variable (outer, decl,
9018 GOVD_LASTPRIVATE | GOVD_SEEN);
e01d41e5
JJ
9019 if (outer->region_type == ORT_COMBINED_PARALLEL
9020 && outer->outer_context
9021 && (outer->outer_context->region_type
9022 == ORT_WORKSHARE)
9023 && outer->outer_context->combined_loop)
9024 {
9025 outer = outer->outer_context;
9026 n = splay_tree_lookup (outer->variables,
9027 (splay_tree_key)decl);
9028 if (omp_check_private (outer, decl, false))
9029 outer = NULL;
9030 else if (n == NULL
9031 || ((n->value & GOVD_DATA_SHARE_CLASS)
9032 == 0))
9033 omp_add_variable (outer, decl,
9034 GOVD_LASTPRIVATE
9035 | GOVD_SEEN);
9036 else
9037 outer = NULL;
9038 }
9039 if (outer && outer->outer_context
9040 && (outer->outer_context->region_type
9041 == ORT_COMBINED_TEAMS))
9042 {
9043 outer = outer->outer_context;
9044 n = splay_tree_lookup (outer->variables,
9045 (splay_tree_key)decl);
9046 if (n == NULL
9047 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9048 omp_add_variable (outer, decl,
9049 GOVD_SHARED | GOVD_SEEN);
9050 else
9051 outer = NULL;
9052 }
9053 if (outer && outer->outer_context)
cbdfbde8
JJ
9054 omp_notice_variable (outer->outer_context, decl,
9055 true);
9056 }
41b37d5e
JJ
9057 }
9058 }
74bf76ed
JJ
9059 }
9060 else
9061 {
9062 bool lastprivate
9063 = (!has_decl_expr
41b37d5e
JJ
9064 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9065 && !omp_no_lastprivate (gimplify_omp_ctxp);
9066 struct gimplify_omp_ctx *outer
9067 = gimplify_omp_ctxp->outer_context;
9068 if (outer && lastprivate)
56ad0e38 9069 {
41b37d5e
JJ
9070 if (outer->region_type == ORT_WORKSHARE
9071 && outer->combined_loop)
9072 {
499c20bb
JJ
9073 n = splay_tree_lookup (outer->variables,
9074 (splay_tree_key)decl);
9075 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9076 {
9077 lastprivate = false;
9078 outer = NULL;
9079 }
9080 else if (outer->outer_context
9081 && (outer->outer_context->region_type
9082 == ORT_COMBINED_PARALLEL))
41b37d5e
JJ
9083 outer = outer->outer_context;
9084 else if (omp_check_private (outer, decl, false))
9085 outer = NULL;
9086 }
d9a6bd32
JJ
9087 else if (((outer->region_type & ORT_TASK) != 0)
9088 && outer->combined_loop
9089 && !omp_check_private (gimplify_omp_ctxp,
9090 decl, false))
9091 ;
41b37d5e 9092 else if (outer->region_type != ORT_COMBINED_PARALLEL)
84311083
JJ
9093 {
9094 omp_notice_variable (outer, decl, true);
9095 outer = NULL;
9096 }
41b37d5e 9097 if (outer)
56ad0e38 9098 {
cbdfbde8
JJ
9099 n = splay_tree_lookup (outer->variables,
9100 (splay_tree_key)decl);
9101 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9102 {
9103 omp_add_variable (outer, decl,
9104 GOVD_LASTPRIVATE | GOVD_SEEN);
e01d41e5
JJ
9105 if (outer->region_type == ORT_COMBINED_PARALLEL
9106 && outer->outer_context
9107 && (outer->outer_context->region_type
9108 == ORT_WORKSHARE)
9109 && outer->outer_context->combined_loop)
9110 {
9111 outer = outer->outer_context;
9112 n = splay_tree_lookup (outer->variables,
9113 (splay_tree_key)decl);
9114 if (omp_check_private (outer, decl, false))
9115 outer = NULL;
9116 else if (n == NULL
9117 || ((n->value & GOVD_DATA_SHARE_CLASS)
9118 == 0))
9119 omp_add_variable (outer, decl,
9120 GOVD_LASTPRIVATE
9121 | GOVD_SEEN);
9122 else
9123 outer = NULL;
9124 }
9125 if (outer && outer->outer_context
9126 && (outer->outer_context->region_type
9127 == ORT_COMBINED_TEAMS))
9128 {
9129 outer = outer->outer_context;
9130 n = splay_tree_lookup (outer->variables,
9131 (splay_tree_key)decl);
9132 if (n == NULL
9133 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9134 omp_add_variable (outer, decl,
9135 GOVD_SHARED | GOVD_SEEN);
9136 else
9137 outer = NULL;
9138 }
9139 if (outer && outer->outer_context)
cbdfbde8
JJ
9140 omp_notice_variable (outer->outer_context, decl,
9141 true);
9142 }
56ad0e38
JJ
9143 }
9144 }
41b37d5e 9145
74bf76ed
JJ
9146 c = build_omp_clause (input_location,
9147 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9148 : OMP_CLAUSE_PRIVATE);
9149 OMP_CLAUSE_DECL (c) = decl;
9150 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
f7468577 9151 OMP_FOR_CLAUSES (for_stmt) = c;
74bf76ed
JJ
9152 omp_add_variable (gimplify_omp_ctxp, decl,
9153 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
f7468577 9154 | GOVD_EXPLICIT | GOVD_SEEN);
74bf76ed
JJ
9155 c = NULL_TREE;
9156 }
9157 }
f7468577 9158 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
a68ab351
JJ
9159 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9160 else
9161 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9162
9163 /* If DECL is not a gimple register, create a temporary variable to act
9164 as an iteration counter. This is valid, since DECL cannot be
56ad0e38
JJ
9165 modified in the body of the loop. Similarly for any iteration vars
9166 in simd with collapse > 1 where the iterator vars must be
9167 lastprivate. */
acf0174b
JJ
9168 if (orig_for_stmt != for_stmt)
9169 var = decl;
56ad0e38 9170 else if (!is_gimple_reg (decl)
d9a6bd32
JJ
9171 || (ort == ORT_SIMD
9172 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
a68ab351 9173 {
ab62397a
JJ
9174 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9175 /* Make sure omp_add_variable is not called on it prematurely.
9176 We call it ourselves a few lines later. */
9177 gimplify_omp_ctxp = NULL;
a68ab351 9178 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
ab62397a 9179 gimplify_omp_ctxp = ctx;
726a989a 9180 TREE_OPERAND (t, 0) = var;
b8698a0f 9181
726a989a 9182 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 9183
d9a6bd32
JJ
9184 if (ort == ORT_SIMD
9185 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
f7468577
JJ
9186 {
9187 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9188 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9189 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9190 OMP_CLAUSE_DECL (c2) = var;
9191 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9192 OMP_FOR_CLAUSES (for_stmt) = c2;
9193 omp_add_variable (gimplify_omp_ctxp, var,
9194 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9195 if (c == NULL_TREE)
9196 {
9197 c = c2;
9198 c2 = NULL_TREE;
9199 }
9200 }
9201 else
9202 omp_add_variable (gimplify_omp_ctxp, var,
9203 GOVD_PRIVATE | GOVD_SEEN);
a68ab351
JJ
9204 }
9205 else
9206 var = decl;
07beea0d 9207
32e8bb8e 9208 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
381cdae4 9209 is_gimple_val, fb_rvalue, false);
32e8bb8e 9210 ret = MIN (ret, tret);
726a989a
RB
9211 if (ret == GS_ERROR)
9212 return ret;
953ff289 9213
726a989a 9214 /* Handle OMP_FOR_COND. */
a68ab351
JJ
9215 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9216 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 9217 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 9218
32e8bb8e 9219 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
381cdae4 9220 is_gimple_val, fb_rvalue, false);
32e8bb8e 9221 ret = MIN (ret, tret);
917948d3 9222
726a989a 9223 /* Handle OMP_FOR_INCR. */
a68ab351 9224 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
9225 switch (TREE_CODE (t))
9226 {
a68ab351
JJ
9227 case PREINCREMENT_EXPR:
9228 case POSTINCREMENT_EXPR:
c02065fc
AH
9229 {
9230 tree decl = TREE_OPERAND (t, 0);
da6f124d
JJ
9231 /* c_omp_for_incr_canonicalize_ptr() should have been
9232 called to massage things appropriately. */
c02065fc
AH
9233 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
9234
9235 if (orig_for_stmt != for_stmt)
9236 break;
9237 t = build_int_cst (TREE_TYPE (decl), 1);
9238 if (c)
9239 OMP_CLAUSE_LINEAR_STEP (c) = t;
9240 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
9241 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
9242 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
acf0174b 9243 break;
c02065fc 9244 }
a68ab351
JJ
9245
9246 case PREDECREMENT_EXPR:
9247 case POSTDECREMENT_EXPR:
da6f124d
JJ
9248 /* c_omp_for_incr_canonicalize_ptr() should have been
9249 called to massage things appropriately. */
9250 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
acf0174b
JJ
9251 if (orig_for_stmt != for_stmt)
9252 break;
a68ab351 9253 t = build_int_cst (TREE_TYPE (decl), -1);
74bf76ed
JJ
9254 if (c)
9255 OMP_CLAUSE_LINEAR_STEP (c) = t;
a68ab351 9256 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 9257 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
9258 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
9259 break;
9260
726a989a
RB
9261 case MODIFY_EXPR:
9262 gcc_assert (TREE_OPERAND (t, 0) == decl);
9263 TREE_OPERAND (t, 0) = var;
a68ab351 9264
726a989a 9265 t = TREE_OPERAND (t, 1);
a68ab351 9266 switch (TREE_CODE (t))
953ff289 9267 {
a68ab351
JJ
9268 case PLUS_EXPR:
9269 if (TREE_OPERAND (t, 1) == decl)
9270 {
9271 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
9272 TREE_OPERAND (t, 0) = var;
9273 break;
9274 }
9275
9276 /* Fallthru. */
9277 case MINUS_EXPR:
9278 case POINTER_PLUS_EXPR:
9279 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 9280 TREE_OPERAND (t, 0) = var;
953ff289 9281 break;
a68ab351
JJ
9282 default:
9283 gcc_unreachable ();
953ff289 9284 }
917948d3 9285
32e8bb8e 9286 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
381cdae4 9287 is_gimple_val, fb_rvalue, false);
32e8bb8e 9288 ret = MIN (ret, tret);
74bf76ed
JJ
9289 if (c)
9290 {
da6f124d
JJ
9291 tree step = TREE_OPERAND (t, 1);
9292 tree stept = TREE_TYPE (decl);
9293 if (POINTER_TYPE_P (stept))
9294 stept = sizetype;
9295 step = fold_convert (stept, step);
74bf76ed 9296 if (TREE_CODE (t) == MINUS_EXPR)
da6f124d
JJ
9297 step = fold_build1 (NEGATE_EXPR, stept, step);
9298 OMP_CLAUSE_LINEAR_STEP (c) = step;
9299 if (step != TREE_OPERAND (t, 1))
74bf76ed 9300 {
74bf76ed
JJ
9301 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
9302 &for_pre_body, NULL,
381cdae4 9303 is_gimple_val, fb_rvalue, false);
74bf76ed
JJ
9304 ret = MIN (ret, tret);
9305 }
9306 }
953ff289 9307 break;
a68ab351 9308
953ff289
DN
9309 default:
9310 gcc_unreachable ();
9311 }
9312
f7468577
JJ
9313 if (c2)
9314 {
9315 gcc_assert (c);
9316 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
9317 }
9318
d9a6bd32 9319 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
a68ab351 9320 {
a68ab351 9321 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
f7468577
JJ
9322 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9323 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
9324 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9325 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
9326 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
9327 && OMP_CLAUSE_DECL (c) == decl)
726a989a 9328 {
d9a6bd32
JJ
9329 if (is_doacross && (collapse == 1 || i >= collapse))
9330 t = var;
9331 else
9332 {
9333 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9334 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9335 gcc_assert (TREE_OPERAND (t, 0) == var);
9336 t = TREE_OPERAND (t, 1);
9337 gcc_assert (TREE_CODE (t) == PLUS_EXPR
9338 || TREE_CODE (t) == MINUS_EXPR
9339 || TREE_CODE (t) == POINTER_PLUS_EXPR);
9340 gcc_assert (TREE_OPERAND (t, 0) == var);
9341 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
9342 is_doacross ? var : decl,
9343 TREE_OPERAND (t, 1));
9344 }
f7468577
JJ
9345 gimple_seq *seq;
9346 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9347 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
9348 else
9349 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
9350 gimplify_assign (decl, t, seq);
a68ab351
JJ
9351 }
9352 }
953ff289
DN
9353 }
9354
74bf76ed
JJ
9355 BITMAP_FREE (has_decl_expr);
9356
d9a6bd32
JJ
9357 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9358 {
9359 push_gimplify_context ();
9360 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
9361 {
9362 OMP_FOR_BODY (orig_for_stmt)
9363 = build3 (BIND_EXPR, void_type_node, NULL,
9364 OMP_FOR_BODY (orig_for_stmt), NULL);
9365 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
9366 }
9367 }
9368
9369 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
9370 &for_body);
9371
9372 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9373 {
9374 if (gimple_code (g) == GIMPLE_BIND)
9375 pop_gimplify_context (g);
9376 else
9377 pop_gimplify_context (NULL);
9378 }
726a989a 9379
acf0174b
JJ
9380 if (orig_for_stmt != for_stmt)
9381 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9382 {
9383 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9384 decl = TREE_OPERAND (t, 0);
d9a6bd32
JJ
9385 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9386 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9387 gimplify_omp_ctxp = ctx->outer_context;
acf0174b 9388 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
d9a6bd32 9389 gimplify_omp_ctxp = ctx;
acf0174b
JJ
9390 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
9391 TREE_OPERAND (t, 0) = var;
9392 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9393 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
9394 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
9395 }
9396
1a80d6b8
JJ
9397 gimplify_adjust_omp_clauses (pre_p, for_body,
9398 &OMP_FOR_CLAUSES (orig_for_stmt),
d9a6bd32 9399 TREE_CODE (orig_for_stmt));
953ff289 9400
74bf76ed 9401 int kind;
acf0174b 9402 switch (TREE_CODE (orig_for_stmt))
74bf76ed
JJ
9403 {
9404 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
9405 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
c02065fc 9406 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9a771876 9407 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
acf0174b 9408 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
d9a6bd32 9409 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
41dbbb37 9410 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
74bf76ed
JJ
9411 default:
9412 gcc_unreachable ();
9413 }
acf0174b 9414 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
726a989a
RB
9415 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
9416 for_pre_body);
acf0174b
JJ
9417 if (orig_for_stmt != for_stmt)
9418 gimple_omp_for_set_combined_p (gfor, true);
9419 if (gimplify_omp_ctxp
9420 && (gimplify_omp_ctxp->combined_loop
9421 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9422 && gimplify_omp_ctxp->outer_context
9423 && gimplify_omp_ctxp->outer_context->combined_loop)))
9424 {
9425 gimple_omp_for_set_combined_into_p (gfor, true);
9426 if (gimplify_omp_ctxp->combined_loop)
9427 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
9428 else
9429 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
9430 }
726a989a
RB
9431
9432 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9433 {
9434 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9435 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
9436 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
9437 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9438 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
9439 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
9440 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9441 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
9442 }
9443
d9a6bd32
JJ
9444 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
9445 constructs with GIMPLE_OMP_TASK sandwiched in between them.
9446 The outer taskloop stands for computing the number of iterations,
9447 counts for collapsed loops and holding taskloop specific clauses.
9448 The task construct stands for the effect of data sharing on the
9449 explicit task it creates and the inner taskloop stands for expansion
9450 of the static loop inside of the explicit task construct. */
9451 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9452 {
9453 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
9454 tree task_clauses = NULL_TREE;
9455 tree c = *gfor_clauses_ptr;
9456 tree *gtask_clauses_ptr = &task_clauses;
9457 tree outer_for_clauses = NULL_TREE;
9458 tree *gforo_clauses_ptr = &outer_for_clauses;
9459 for (; c; c = OMP_CLAUSE_CHAIN (c))
9460 switch (OMP_CLAUSE_CODE (c))
9461 {
9462 /* These clauses are allowed on task, move them there. */
9463 case OMP_CLAUSE_SHARED:
9464 case OMP_CLAUSE_FIRSTPRIVATE:
9465 case OMP_CLAUSE_DEFAULT:
9466 case OMP_CLAUSE_IF:
9467 case OMP_CLAUSE_UNTIED:
9468 case OMP_CLAUSE_FINAL:
9469 case OMP_CLAUSE_MERGEABLE:
9470 case OMP_CLAUSE_PRIORITY:
9471 *gtask_clauses_ptr = c;
9472 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9473 break;
9474 case OMP_CLAUSE_PRIVATE:
9475 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
9476 {
9477 /* We want private on outer for and firstprivate
9478 on task. */
9479 *gtask_clauses_ptr
9480 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9481 OMP_CLAUSE_FIRSTPRIVATE);
9482 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9483 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9484 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9485 *gforo_clauses_ptr = c;
9486 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9487 }
9488 else
9489 {
9490 *gtask_clauses_ptr = c;
9491 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9492 }
9493 break;
9494 /* These clauses go into outer taskloop clauses. */
9495 case OMP_CLAUSE_GRAINSIZE:
9496 case OMP_CLAUSE_NUM_TASKS:
9497 case OMP_CLAUSE_NOGROUP:
9498 *gforo_clauses_ptr = c;
9499 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9500 break;
9501 /* Taskloop clause we duplicate on both taskloops. */
9502 case OMP_CLAUSE_COLLAPSE:
9503 *gfor_clauses_ptr = c;
9504 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9505 *gforo_clauses_ptr = copy_node (c);
9506 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9507 break;
9508 /* For lastprivate, keep the clause on inner taskloop, and add
9509 a shared clause on task. If the same decl is also firstprivate,
9510 add also firstprivate clause on the inner taskloop. */
9511 case OMP_CLAUSE_LASTPRIVATE:
9512 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
9513 {
9514 /* For taskloop C++ lastprivate IVs, we want:
9515 1) private on outer taskloop
9516 2) firstprivate and shared on task
9517 3) lastprivate on inner taskloop */
9518 *gtask_clauses_ptr
9519 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9520 OMP_CLAUSE_FIRSTPRIVATE);
9521 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9522 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9523 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9524 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
9525 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9526 OMP_CLAUSE_PRIVATE);
9527 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
9528 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
9529 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
9530 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9531 }
9532 *gfor_clauses_ptr = c;
9533 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9534 *gtask_clauses_ptr
9535 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
9536 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9537 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9538 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
9539 gtask_clauses_ptr
9540 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9541 break;
9542 default:
9543 gcc_unreachable ();
9544 }
9545 *gfor_clauses_ptr = NULL_TREE;
9546 *gtask_clauses_ptr = NULL_TREE;
9547 *gforo_clauses_ptr = NULL_TREE;
9548 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
9549 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
9550 NULL_TREE, NULL_TREE, NULL_TREE);
9551 gimple_omp_task_set_taskloop_p (g, true);
9552 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
9553 gomp_for *gforo
9554 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
9555 gimple_omp_for_collapse (gfor),
9556 gimple_omp_for_pre_body (gfor));
9557 gimple_omp_for_set_pre_body (gfor, NULL);
9558 gimple_omp_for_set_combined_p (gforo, true);
9559 gimple_omp_for_set_combined_into_p (gfor, true);
9560 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
9561 {
9562 t = unshare_expr (gimple_omp_for_index (gfor, i));
9563 gimple_omp_for_set_index (gforo, i, t);
9564 t = unshare_expr (gimple_omp_for_initial (gfor, i));
9565 gimple_omp_for_set_initial (gforo, i, t);
9566 gimple_omp_for_set_cond (gforo, i,
9567 gimple_omp_for_cond (gfor, i));
9568 t = unshare_expr (gimple_omp_for_final (gfor, i));
9569 gimple_omp_for_set_final (gforo, i, t);
9570 t = unshare_expr (gimple_omp_for_incr (gfor, i));
9571 gimple_omp_for_set_incr (gforo, i, t);
9572 }
9573 gimplify_seq_add_stmt (pre_p, gforo);
9574 }
9575 else
9576 gimplify_seq_add_stmt (pre_p, gfor);
74bf76ed
JJ
9577 if (ret != GS_ALL_DONE)
9578 return GS_ERROR;
9579 *expr_p = NULL_TREE;
9580 return GS_ALL_DONE;
953ff289
DN
9581}
9582
e01d41e5
JJ
9583/* Helper function of optimize_target_teams, find OMP_TEAMS inside
9584 of OMP_TARGET's body. */
9585
9586static tree
9587find_omp_teams (tree *tp, int *walk_subtrees, void *)
9588{
9589 *walk_subtrees = 0;
9590 switch (TREE_CODE (*tp))
9591 {
9592 case OMP_TEAMS:
9593 return *tp;
9594 case BIND_EXPR:
9595 case STATEMENT_LIST:
9596 *walk_subtrees = 1;
9597 break;
9598 default:
9599 break;
9600 }
9601 return NULL_TREE;
9602}
9603
9604/* Helper function of optimize_target_teams, determine if the expression
9605 can be computed safely before the target construct on the host. */
9606
9607static tree
9608computable_teams_clause (tree *tp, int *walk_subtrees, void *)
9609{
9610 splay_tree_node n;
9611
9612 if (TYPE_P (*tp))
9613 {
9614 *walk_subtrees = 0;
9615 return NULL_TREE;
9616 }
9617 switch (TREE_CODE (*tp))
9618 {
9619 case VAR_DECL:
9620 case PARM_DECL:
9621 case RESULT_DECL:
9622 *walk_subtrees = 0;
9623 if (error_operand_p (*tp)
9624 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
9625 || DECL_HAS_VALUE_EXPR_P (*tp)
9626 || DECL_THREAD_LOCAL_P (*tp)
9627 || TREE_SIDE_EFFECTS (*tp)
9628 || TREE_THIS_VOLATILE (*tp))
9629 return *tp;
9630 if (is_global_var (*tp)
9631 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
9632 || lookup_attribute ("omp declare target link",
9633 DECL_ATTRIBUTES (*tp))))
9634 return *tp;
9635 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9636 (splay_tree_key) *tp);
9637 if (n == NULL)
9638 {
9639 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
9640 return NULL_TREE;
9641 return *tp;
9642 }
9643 else if (n->value & GOVD_LOCAL)
9644 return *tp;
9645 else if (n->value & GOVD_FIRSTPRIVATE)
9646 return NULL_TREE;
9647 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9648 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9649 return NULL_TREE;
9650 return *tp;
9651 case INTEGER_CST:
9652 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9653 return *tp;
9654 return NULL_TREE;
9655 case TARGET_EXPR:
9656 if (TARGET_EXPR_INITIAL (*tp)
9657 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
9658 return *tp;
9659 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
9660 walk_subtrees, NULL);
9661 /* Allow some reasonable subset of integral arithmetics. */
9662 case PLUS_EXPR:
9663 case MINUS_EXPR:
9664 case MULT_EXPR:
9665 case TRUNC_DIV_EXPR:
9666 case CEIL_DIV_EXPR:
9667 case FLOOR_DIV_EXPR:
9668 case ROUND_DIV_EXPR:
9669 case TRUNC_MOD_EXPR:
9670 case CEIL_MOD_EXPR:
9671 case FLOOR_MOD_EXPR:
9672 case ROUND_MOD_EXPR:
9673 case RDIV_EXPR:
9674 case EXACT_DIV_EXPR:
9675 case MIN_EXPR:
9676 case MAX_EXPR:
9677 case LSHIFT_EXPR:
9678 case RSHIFT_EXPR:
9679 case BIT_IOR_EXPR:
9680 case BIT_XOR_EXPR:
9681 case BIT_AND_EXPR:
9682 case NEGATE_EXPR:
9683 case ABS_EXPR:
9684 case BIT_NOT_EXPR:
9685 case NON_LVALUE_EXPR:
9686 CASE_CONVERT:
9687 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9688 return *tp;
9689 return NULL_TREE;
9690 /* And disallow anything else, except for comparisons. */
9691 default:
9692 if (COMPARISON_CLASS_P (*tp))
9693 return NULL_TREE;
9694 return *tp;
9695 }
9696}
9697
9698/* Try to determine if the num_teams and/or thread_limit expressions
9699 can have their values determined already before entering the
9700 target construct.
9701 INTEGER_CSTs trivially are,
9702 integral decls that are firstprivate (explicitly or implicitly)
9703 or explicitly map(always, to:) or map(always, tofrom:) on the target
9704 region too, and expressions involving simple arithmetics on those
9705 too, function calls are not ok, dereferencing something neither etc.
9706 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
9707 EXPR based on what we find:
9708 0 stands for clause not specified at all, use implementation default
9709 -1 stands for value that can't be determined easily before entering
9710 the target construct.
9711 If teams construct is not present at all, use 1 for num_teams
9712 and 0 for thread_limit (only one team is involved, and the thread
9713 limit is implementation defined. */
9714
9715static void
9716optimize_target_teams (tree target, gimple_seq *pre_p)
9717{
9718 tree body = OMP_BODY (target);
9719 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
9720 tree num_teams = integer_zero_node;
9721 tree thread_limit = integer_zero_node;
9722 location_t num_teams_loc = EXPR_LOCATION (target);
9723 location_t thread_limit_loc = EXPR_LOCATION (target);
9724 tree c, *p, expr;
9725 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
9726
9727 if (teams == NULL_TREE)
9728 num_teams = integer_one_node;
9729 else
9730 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
9731 {
9732 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
9733 {
9734 p = &num_teams;
9735 num_teams_loc = OMP_CLAUSE_LOCATION (c);
9736 }
9737 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
9738 {
9739 p = &thread_limit;
9740 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
9741 }
9742 else
9743 continue;
9744 expr = OMP_CLAUSE_OPERAND (c, 0);
9745 if (TREE_CODE (expr) == INTEGER_CST)
9746 {
9747 *p = expr;
9748 continue;
9749 }
9750 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
9751 {
9752 *p = integer_minus_one_node;
9753 continue;
9754 }
9755 *p = expr;
9756 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
381cdae4 9757 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
e01d41e5
JJ
9758 == GS_ERROR)
9759 {
9760 gimplify_omp_ctxp = target_ctx;
9761 *p = integer_minus_one_node;
9762 continue;
9763 }
9764 gimplify_omp_ctxp = target_ctx;
9765 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
9766 OMP_CLAUSE_OPERAND (c, 0) = *p;
9767 }
9768 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
9769 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
9770 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9771 OMP_TARGET_CLAUSES (target) = c;
9772 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
9773 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
9774 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9775 OMP_TARGET_CLAUSES (target) = c;
9776}
9777
41dbbb37 9778/* Gimplify the gross structure of several OMP constructs. */
953ff289 9779
726a989a
RB
9780static void
9781gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 9782{
726a989a 9783 tree expr = *expr_p;
355fe088 9784 gimple *stmt;
726a989a 9785 gimple_seq body = NULL;
41dbbb37 9786 enum omp_region_type ort;
953ff289 9787
acf0174b
JJ
9788 switch (TREE_CODE (expr))
9789 {
9790 case OMP_SECTIONS:
9791 case OMP_SINGLE:
41dbbb37 9792 ort = ORT_WORKSHARE;
acf0174b 9793 break;
d9a6bd32
JJ
9794 case OMP_TARGET:
9795 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
9796 break;
41dbbb37 9797 case OACC_KERNELS:
182190f2
NS
9798 ort = ORT_ACC_KERNELS;
9799 break;
41dbbb37 9800 case OACC_PARALLEL:
182190f2 9801 ort = ORT_ACC_PARALLEL;
acf0174b 9802 break;
41dbbb37 9803 case OACC_DATA:
182190f2
NS
9804 ort = ORT_ACC_DATA;
9805 break;
acf0174b
JJ
9806 case OMP_TARGET_DATA:
9807 ort = ORT_TARGET_DATA;
9808 break;
9809 case OMP_TEAMS:
41b37d5e 9810 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
acf0174b 9811 break;
37d5ad46
JB
9812 case OACC_HOST_DATA:
9813 ort = ORT_ACC_HOST_DATA;
9814 break;
acf0174b
JJ
9815 default:
9816 gcc_unreachable ();
9817 }
d9a6bd32
JJ
9818 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
9819 TREE_CODE (expr));
e01d41e5
JJ
9820 if (TREE_CODE (expr) == OMP_TARGET)
9821 optimize_target_teams (expr, pre_p);
d9a6bd32 9822 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
acf0174b 9823 {
45852dcc 9824 push_gimplify_context ();
355fe088 9825 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
acf0174b
JJ
9826 if (gimple_code (g) == GIMPLE_BIND)
9827 pop_gimplify_context (g);
9828 else
9829 pop_gimplify_context (NULL);
182190f2 9830 if ((ort & ORT_TARGET_DATA) != 0)
acf0174b 9831 {
41dbbb37
TS
9832 enum built_in_function end_ix;
9833 switch (TREE_CODE (expr))
9834 {
9835 case OACC_DATA:
37d5ad46 9836 case OACC_HOST_DATA:
41dbbb37
TS
9837 end_ix = BUILT_IN_GOACC_DATA_END;
9838 break;
9839 case OMP_TARGET_DATA:
9840 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
9841 break;
9842 default:
9843 gcc_unreachable ();
9844 }
9845 tree fn = builtin_decl_explicit (end_ix);
acf0174b 9846 g = gimple_build_call (fn, 0);
41dbbb37 9847 gimple_seq cleanup = NULL;
acf0174b
JJ
9848 gimple_seq_add_stmt (&cleanup, g);
9849 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
9850 body = NULL;
9851 gimple_seq_add_stmt (&body, g);
9852 }
9853 }
9854 else
9855 gimplify_and_add (OMP_BODY (expr), &body);
1a80d6b8
JJ
9856 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
9857 TREE_CODE (expr));
953ff289 9858
acf0174b
JJ
9859 switch (TREE_CODE (expr))
9860 {
41dbbb37
TS
9861 case OACC_DATA:
9862 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9863 OMP_CLAUSES (expr));
9864 break;
9865 case OACC_KERNELS:
9866 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9867 OMP_CLAUSES (expr));
9868 break;
37d5ad46
JB
9869 case OACC_HOST_DATA:
9870 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
9871 OMP_CLAUSES (expr));
9872 break;
41dbbb37
TS
9873 case OACC_PARALLEL:
9874 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9875 OMP_CLAUSES (expr));
9876 break;
acf0174b
JJ
9877 case OMP_SECTIONS:
9878 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9879 break;
9880 case OMP_SINGLE:
9881 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9882 break;
9883 case OMP_TARGET:
9884 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9885 OMP_CLAUSES (expr));
9886 break;
9887 case OMP_TARGET_DATA:
9888 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9889 OMP_CLAUSES (expr));
9890 break;
9891 case OMP_TEAMS:
9892 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9893 break;
9894 default:
9895 gcc_unreachable ();
9896 }
9897
9898 gimplify_seq_add_stmt (pre_p, stmt);
9899 *expr_p = NULL_TREE;
9900}
9901
41dbbb37
TS
9902/* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9903 target update constructs. */
acf0174b
JJ
9904
9905static void
9906gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9907{
b811915d 9908 tree expr = *expr_p;
41dbbb37 9909 int kind;
538dd0b7 9910 gomp_target *stmt;
182190f2 9911 enum omp_region_type ort = ORT_WORKSHARE;
acf0174b 9912
41dbbb37
TS
9913 switch (TREE_CODE (expr))
9914 {
9915 case OACC_ENTER_DATA:
41dbbb37 9916 case OACC_EXIT_DATA:
41dbbb37 9917 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
182190f2 9918 ort = ORT_ACC;
41dbbb37
TS
9919 break;
9920 case OACC_UPDATE:
41dbbb37 9921 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
182190f2 9922 ort = ORT_ACC;
41dbbb37
TS
9923 break;
9924 case OMP_TARGET_UPDATE:
41dbbb37
TS
9925 kind = GF_OMP_TARGET_KIND_UPDATE;
9926 break;
d9a6bd32
JJ
9927 case OMP_TARGET_ENTER_DATA:
9928 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9929 break;
9930 case OMP_TARGET_EXIT_DATA:
9931 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9932 break;
41dbbb37
TS
9933 default:
9934 gcc_unreachable ();
9935 }
b811915d 9936 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
182190f2 9937 ort, TREE_CODE (expr));
1a80d6b8 9938 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
d9a6bd32 9939 TREE_CODE (expr));
b811915d 9940 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
726a989a
RB
9941
9942 gimplify_seq_add_stmt (pre_p, stmt);
acf0174b 9943 *expr_p = NULL_TREE;
953ff289
DN
9944}
9945
9946/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 9947 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
9948 EXPR is this stabilized form. */
9949
9950static bool
a509ebb5 9951goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
9952{
9953 /* Also include casts to other type variants. The C front end is fond
b8698a0f 9954 of adding these for e.g. volatile variables. This is like
953ff289 9955 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 9956 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 9957
78e47463
JJ
9958 if (TREE_CODE (expr) == INDIRECT_REF)
9959 {
9960 expr = TREE_OPERAND (expr, 0);
9961 while (expr != addr
1043771b 9962 && (CONVERT_EXPR_P (expr)
78e47463
JJ
9963 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9964 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 9965 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
9966 {
9967 expr = TREE_OPERAND (expr, 0);
9968 addr = TREE_OPERAND (addr, 0);
9969 }
251923f5
JJ
9970 if (expr == addr)
9971 return true;
71458b8a
JJ
9972 return (TREE_CODE (addr) == ADDR_EXPR
9973 && TREE_CODE (expr) == ADDR_EXPR
251923f5 9974 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 9975 }
953ff289
DN
9976 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9977 return true;
9978 return false;
9979}
9980
ad19c4be
EB
9981/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
9982 expression does not involve the lhs, evaluate it into a temporary.
9983 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9984 or -1 if an error was encountered. */
953ff289
DN
9985
9986static int
726a989a
RB
9987goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9988 tree lhs_var)
953ff289
DN
9989{
9990 tree expr = *expr_p;
9991 int saw_lhs;
9992
9993 if (goa_lhs_expr_p (expr, lhs_addr))
9994 {
9995 *expr_p = lhs_var;
9996 return 1;
9997 }
9998 if (is_gimple_val (expr))
9999 return 0;
b8698a0f 10000
953ff289
DN
10001 saw_lhs = 0;
10002 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10003 {
10004 case tcc_binary:
067dd3c9 10005 case tcc_comparison:
726a989a
RB
10006 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10007 lhs_var);
191816a3 10008 /* FALLTHRU */
953ff289 10009 case tcc_unary:
726a989a
RB
10010 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10011 lhs_var);
953ff289 10012 break;
067dd3c9
JJ
10013 case tcc_expression:
10014 switch (TREE_CODE (expr))
10015 {
10016 case TRUTH_ANDIF_EXPR:
10017 case TRUTH_ORIF_EXPR:
f2b11865
JJ
10018 case TRUTH_AND_EXPR:
10019 case TRUTH_OR_EXPR:
10020 case TRUTH_XOR_EXPR:
067dd3c9
JJ
10021 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10022 lhs_addr, lhs_var);
191816a3 10023 /* FALLTHRU */
f2b11865 10024 case TRUTH_NOT_EXPR:
067dd3c9
JJ
10025 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10026 lhs_addr, lhs_var);
10027 break;
4063e61b
JM
10028 case COMPOUND_EXPR:
10029 /* Break out any preevaluations from cp_build_modify_expr. */
10030 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10031 expr = TREE_OPERAND (expr, 1))
10032 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10033 *expr_p = expr;
10034 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
067dd3c9
JJ
10035 default:
10036 break;
10037 }
10038 break;
953ff289
DN
10039 default:
10040 break;
10041 }
10042
10043 if (saw_lhs == 0)
10044 {
10045 enum gimplify_status gs;
10046 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10047 if (gs != GS_ALL_DONE)
10048 saw_lhs = -1;
10049 }
10050
10051 return saw_lhs;
10052}
10053
953ff289
DN
10054/* Gimplify an OMP_ATOMIC statement. */
10055
10056static enum gimplify_status
726a989a 10057gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
10058{
10059 tree addr = TREE_OPERAND (*expr_p, 0);
20906c66
JJ
10060 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10061 ? NULL : TREE_OPERAND (*expr_p, 1);
953ff289 10062 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 10063 tree tmp_load;
538dd0b7
DM
10064 gomp_atomic_load *loadstmt;
10065 gomp_atomic_store *storestmt;
953ff289 10066
b731b390 10067 tmp_load = create_tmp_reg (type);
20906c66
JJ
10068 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10069 return GS_ERROR;
10070
10071 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10072 != GS_ALL_DONE)
10073 return GS_ERROR;
953ff289 10074
20906c66
JJ
10075 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10076 gimplify_seq_add_stmt (pre_p, loadstmt);
10077 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10078 != GS_ALL_DONE)
10079 return GS_ERROR;
953ff289 10080
20906c66
JJ
10081 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10082 rhs = tmp_load;
10083 storestmt = gimple_build_omp_atomic_store (rhs);
10084 gimplify_seq_add_stmt (pre_p, storestmt);
acf0174b
JJ
10085 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10086 {
10087 gimple_omp_atomic_set_seq_cst (loadstmt);
10088 gimple_omp_atomic_set_seq_cst (storestmt);
10089 }
20906c66
JJ
10090 switch (TREE_CODE (*expr_p))
10091 {
10092 case OMP_ATOMIC_READ:
10093 case OMP_ATOMIC_CAPTURE_OLD:
10094 *expr_p = tmp_load;
10095 gimple_omp_atomic_set_need_value (loadstmt);
10096 break;
10097 case OMP_ATOMIC_CAPTURE_NEW:
10098 *expr_p = rhs;
10099 gimple_omp_atomic_set_need_value (storestmt);
10100 break;
10101 default:
10102 *expr_p = NULL;
10103 break;
10104 }
a509ebb5 10105
acf0174b 10106 return GS_ALL_DONE;
953ff289 10107}
6de9cd9a 10108
0a35513e
AH
10109/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10110 body, and adding some EH bits. */
10111
10112static enum gimplify_status
10113gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10114{
10115 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
355fe088 10116 gimple *body_stmt;
538dd0b7 10117 gtransaction *trans_stmt;
0a35513e 10118 gimple_seq body = NULL;
0a35513e
AH
10119 int subcode = 0;
10120
10121 /* Wrap the transaction body in a BIND_EXPR so we have a context
41dbbb37 10122 where to put decls for OMP. */
0a35513e
AH
10123 if (TREE_CODE (tbody) != BIND_EXPR)
10124 {
10125 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10126 TREE_SIDE_EFFECTS (bind) = 1;
10127 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10128 TRANSACTION_EXPR_BODY (expr) = bind;
10129 }
10130
45852dcc 10131 push_gimplify_context ();
0a35513e
AH
10132 temp = voidify_wrapper_expr (*expr_p, NULL);
10133
538dd0b7
DM
10134 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10135 pop_gimplify_context (body_stmt);
0a35513e 10136
7c11b0fe 10137 trans_stmt = gimple_build_transaction (body);
0a35513e
AH
10138 if (TRANSACTION_EXPR_OUTER (expr))
10139 subcode = GTMA_IS_OUTER;
10140 else if (TRANSACTION_EXPR_RELAXED (expr))
10141 subcode = GTMA_IS_RELAXED;
538dd0b7 10142 gimple_transaction_set_subcode (trans_stmt, subcode);
0a35513e 10143
538dd0b7 10144 gimplify_seq_add_stmt (pre_p, trans_stmt);
0a35513e
AH
10145
10146 if (temp)
10147 {
10148 *expr_p = temp;
10149 return GS_OK;
10150 }
10151
10152 *expr_p = NULL_TREE;
10153 return GS_ALL_DONE;
10154}
10155
d9a6bd32
JJ
10156/* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10157 is the OMP_BODY of the original EXPR (which has already been
10158 gimplified so it's not present in the EXPR).
10159
10160 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10161
10162static gimple *
10163gimplify_omp_ordered (tree expr, gimple_seq body)
10164{
10165 tree c, decls;
10166 int failures = 0;
10167 unsigned int i;
10168 tree source_c = NULL_TREE;
10169 tree sink_c = NULL_TREE;
10170
10171 if (gimplify_omp_ctxp)
6b37bdaf
PP
10172 {
10173 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10174 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10175 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10176 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10177 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10178 {
10179 error_at (OMP_CLAUSE_LOCATION (c),
10180 "%<ordered%> construct with %<depend%> clause must be "
10181 "closely nested inside a loop with %<ordered%> clause "
10182 "with a parameter");
10183 failures++;
10184 }
10185 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10186 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10187 {
10188 bool fail = false;
10189 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10190 decls && TREE_CODE (decls) == TREE_LIST;
10191 decls = TREE_CHAIN (decls), ++i)
10192 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10193 continue;
10194 else if (TREE_VALUE (decls)
10195 != gimplify_omp_ctxp->loop_iter_var[2 * i])
10196 {
10197 error_at (OMP_CLAUSE_LOCATION (c),
10198 "variable %qE is not an iteration "
10199 "of outermost loop %d, expected %qE",
10200 TREE_VALUE (decls), i + 1,
10201 gimplify_omp_ctxp->loop_iter_var[2 * i]);
10202 fail = true;
10203 failures++;
10204 }
10205 else
10206 TREE_VALUE (decls)
10207 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
10208 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
10209 {
10210 error_at (OMP_CLAUSE_LOCATION (c),
10211 "number of variables in %<depend(sink)%> "
10212 "clause does not match number of "
10213 "iteration variables");
10214 failures++;
10215 }
10216 sink_c = c;
10217 }
10218 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10219 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
10220 {
10221 if (source_c)
d9a6bd32
JJ
10222 {
10223 error_at (OMP_CLAUSE_LOCATION (c),
6b37bdaf
PP
10224 "more than one %<depend(source)%> clause on an "
10225 "%<ordered%> construct");
d9a6bd32
JJ
10226 failures++;
10227 }
10228 else
6b37bdaf
PP
10229 source_c = c;
10230 }
10231 }
d9a6bd32
JJ
10232 if (source_c && sink_c)
10233 {
10234 error_at (OMP_CLAUSE_LOCATION (source_c),
10235 "%<depend(source)%> clause specified together with "
10236 "%<depend(sink:)%> clauses on the same construct");
10237 failures++;
10238 }
10239
10240 if (failures)
10241 return gimple_build_nop ();
10242 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
10243}
10244
ad19c4be 10245/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
726a989a
RB
10246 expression produces a value to be used as an operand inside a GIMPLE
10247 statement, the value will be stored back in *EXPR_P. This value will
10248 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
10249 an SSA_NAME. The corresponding sequence of GIMPLE statements is
10250 emitted in PRE_P and POST_P.
10251
10252 Additionally, this process may overwrite parts of the input
10253 expression during gimplification. Ideally, it should be
10254 possible to do non-destructive gimplification.
10255
10256 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
10257 the expression needs to evaluate to a value to be used as
10258 an operand in a GIMPLE statement, this value will be stored in
10259 *EXPR_P on exit. This happens when the caller specifies one
10260 of fb_lvalue or fb_rvalue fallback flags.
10261
10262 PRE_P will contain the sequence of GIMPLE statements corresponding
10263 to the evaluation of EXPR and all the side-effects that must
10264 be executed before the main expression. On exit, the last
10265 statement of PRE_P is the core statement being gimplified. For
10266 instance, when gimplifying 'if (++a)' the last statement in
10267 PRE_P will be 'if (t.1)' where t.1 is the result of
10268 pre-incrementing 'a'.
10269
10270 POST_P will contain the sequence of GIMPLE statements corresponding
10271 to the evaluation of all the side-effects that must be executed
10272 after the main expression. If this is NULL, the post
10273 side-effects are stored at the end of PRE_P.
10274
10275 The reason why the output is split in two is to handle post
10276 side-effects explicitly. In some cases, an expression may have
10277 inner and outer post side-effects which need to be emitted in
10278 an order different from the one given by the recursive
10279 traversal. For instance, for the expression (*p--)++ the post
10280 side-effects of '--' must actually occur *after* the post
10281 side-effects of '++'. However, gimplification will first visit
10282 the inner expression, so if a separate POST sequence was not
10283 used, the resulting sequence would be:
10284
10285 1 t.1 = *p
10286 2 p = p - 1
10287 3 t.2 = t.1 + 1
10288 4 *p = t.2
10289
10290 However, the post-decrement operation in line #2 must not be
10291 evaluated until after the store to *p at line #4, so the
10292 correct sequence should be:
10293
10294 1 t.1 = *p
10295 2 t.2 = t.1 + 1
10296 3 *p = t.2
10297 4 p = p - 1
10298
10299 So, by specifying a separate post queue, it is possible
10300 to emit the post side-effects in the correct order.
10301 If POST_P is NULL, an internal queue will be used. Before
10302 returning to the caller, the sequence POST_P is appended to
10303 the main output sequence PRE_P.
10304
10305 GIMPLE_TEST_F points to a function that takes a tree T and
10306 returns nonzero if T is in the GIMPLE form requested by the
12947319 10307 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
10308
10309 FALLBACK tells the function what sort of a temporary we want if
10310 gimplification cannot produce an expression that complies with
10311 GIMPLE_TEST_F.
10312
10313 fb_none means that no temporary should be generated
10314 fb_rvalue means that an rvalue is OK to generate
10315 fb_lvalue means that an lvalue is OK to generate
10316 fb_either means that either is OK, but an lvalue is preferable.
10317 fb_mayfail means that gimplification may fail (in which case
10318 GS_ERROR will be returned)
10319
10320 The return value is either GS_ERROR or GS_ALL_DONE, since this
10321 function iterates until EXPR is completely gimplified or an error
10322 occurs. */
6de9cd9a
DN
10323
10324enum gimplify_status
726a989a
RB
10325gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
10326 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
10327{
10328 tree tmp;
726a989a
RB
10329 gimple_seq internal_pre = NULL;
10330 gimple_seq internal_post = NULL;
6de9cd9a 10331 tree save_expr;
726a989a 10332 bool is_statement;
6de9cd9a
DN
10333 location_t saved_location;
10334 enum gimplify_status ret;
726a989a 10335 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6de9cd9a
DN
10336
10337 save_expr = *expr_p;
10338 if (save_expr == NULL_TREE)
10339 return GS_ALL_DONE;
10340
726a989a
RB
10341 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
10342 is_statement = gimple_test_f == is_gimple_stmt;
10343 if (is_statement)
10344 gcc_assert (pre_p);
10345
10346 /* Consistency checks. */
10347 if (gimple_test_f == is_gimple_reg)
10348 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
10349 else if (gimple_test_f == is_gimple_val
726a989a
RB
10350 || gimple_test_f == is_gimple_call_addr
10351 || gimple_test_f == is_gimple_condexpr
10352 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 10353 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 10354 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 10355 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
10356 || gimple_test_f == is_gimple_asm_val
10357 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
10358 gcc_assert (fallback & fb_rvalue);
10359 else if (gimple_test_f == is_gimple_min_lval
10360 || gimple_test_f == is_gimple_lvalue)
10361 gcc_assert (fallback & fb_lvalue);
10362 else if (gimple_test_f == is_gimple_addressable)
10363 gcc_assert (fallback & fb_either);
10364 else if (gimple_test_f == is_gimple_stmt)
10365 gcc_assert (fallback == fb_none);
10366 else
10367 {
10368 /* We should have recognized the GIMPLE_TEST_F predicate to
10369 know what kind of fallback to use in case a temporary is
10370 needed to hold the value or address of *EXPR_P. */
10371 gcc_unreachable ();
10372 }
10373
6de9cd9a
DN
10374 /* We used to check the predicate here and return immediately if it
10375 succeeds. This is wrong; the design is for gimplification to be
10376 idempotent, and for the predicates to only test for valid forms, not
10377 whether they are fully simplified. */
6de9cd9a
DN
10378 if (pre_p == NULL)
10379 pre_p = &internal_pre;
726a989a 10380
6de9cd9a
DN
10381 if (post_p == NULL)
10382 post_p = &internal_post;
10383
726a989a
RB
10384 /* Remember the last statements added to PRE_P and POST_P. Every
10385 new statement added by the gimplification helpers needs to be
10386 annotated with location information. To centralize the
10387 responsibility, we remember the last statement that had been
10388 added to both queues before gimplifying *EXPR_P. If
10389 gimplification produces new statements in PRE_P and POST_P, those
10390 statements will be annotated with the same location information
10391 as *EXPR_P. */
10392 pre_last_gsi = gsi_last (*pre_p);
10393 post_last_gsi = gsi_last (*post_p);
10394
6de9cd9a 10395 saved_location = input_location;
a281759f
PB
10396 if (save_expr != error_mark_node
10397 && EXPR_HAS_LOCATION (*expr_p))
10398 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
10399
10400 /* Loop over the specific gimplifiers until the toplevel node
10401 remains the same. */
10402 do
10403 {
73d6ddef
RK
10404 /* Strip away as many useless type conversions as possible
10405 at the toplevel. */
10406 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
10407
10408 /* Remember the expr. */
10409 save_expr = *expr_p;
10410
10411 /* Die, die, die, my darling. */
10412 if (save_expr == error_mark_node
726a989a 10413 || (TREE_TYPE (save_expr)
65355d53 10414 && TREE_TYPE (save_expr) == error_mark_node))
6de9cd9a
DN
10415 {
10416 ret = GS_ERROR;
10417 break;
10418 }
10419
10420 /* Do any language-specific gimplification. */
32e8bb8e
ILT
10421 ret = ((enum gimplify_status)
10422 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
10423 if (ret == GS_OK)
10424 {
10425 if (*expr_p == NULL_TREE)
10426 break;
10427 if (*expr_p != save_expr)
10428 continue;
10429 }
10430 else if (ret != GS_UNHANDLED)
10431 break;
10432
941f78d1
JM
10433 /* Make sure that all the cases set 'ret' appropriately. */
10434 ret = GS_UNHANDLED;
6de9cd9a
DN
10435 switch (TREE_CODE (*expr_p))
10436 {
10437 /* First deal with the special cases. */
10438
10439 case POSTINCREMENT_EXPR:
10440 case POSTDECREMENT_EXPR:
10441 case PREINCREMENT_EXPR:
10442 case PREDECREMENT_EXPR:
10443 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
cc3c4f62
RB
10444 fallback != fb_none,
10445 TREE_TYPE (*expr_p));
6de9cd9a
DN
10446 break;
10447
0bd34ae4
RB
10448 case VIEW_CONVERT_EXPR:
10449 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
10450 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
10451 {
10452 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10453 post_p, is_gimple_val, fb_rvalue);
10454 recalculate_side_effects (*expr_p);
10455 break;
10456 }
10457 /* Fallthru. */
10458
6de9cd9a 10459 case ARRAY_REF:
44de5aeb
RK
10460 case ARRAY_RANGE_REF:
10461 case REALPART_EXPR:
10462 case IMAGPART_EXPR:
6de9cd9a
DN
10463 case COMPONENT_REF:
10464 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 10465 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
10466 break;
10467
10468 case COND_EXPR:
dae7ec87 10469 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 10470
0223e4f5
JM
10471 /* C99 code may assign to an array in a structure value of a
10472 conditional expression, and this has undefined behavior
10473 only on execution, so create a temporary if an lvalue is
10474 required. */
10475 if (fallback == fb_lvalue)
10476 {
381cdae4 10477 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
936d04b6 10478 mark_addressable (*expr_p);
941f78d1 10479 ret = GS_OK;
0223e4f5 10480 }
6de9cd9a
DN
10481 break;
10482
10483 case CALL_EXPR:
90051e16 10484 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 10485
0223e4f5
JM
10486 /* C99 code may assign to an array in a structure returned
10487 from a function, and this has undefined behavior only on
10488 execution, so create a temporary if an lvalue is
10489 required. */
10490 if (fallback == fb_lvalue)
10491 {
381cdae4 10492 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
936d04b6 10493 mark_addressable (*expr_p);
941f78d1 10494 ret = GS_OK;
0223e4f5 10495 }
6de9cd9a
DN
10496 break;
10497
10498 case TREE_LIST:
282899df 10499 gcc_unreachable ();
6de9cd9a
DN
10500
10501 case COMPOUND_EXPR:
10502 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
10503 break;
10504
2ec5deb5 10505 case COMPOUND_LITERAL_EXPR:
4c53d183
MM
10506 ret = gimplify_compound_literal_expr (expr_p, pre_p,
10507 gimple_test_f, fallback);
2ec5deb5
PB
10508 break;
10509
6de9cd9a
DN
10510 case MODIFY_EXPR:
10511 case INIT_EXPR:
ebad5233
JM
10512 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
10513 fallback != fb_none);
6de9cd9a
DN
10514 break;
10515
10516 case TRUTH_ANDIF_EXPR:
10517 case TRUTH_ORIF_EXPR:
1d15f620
KT
10518 {
10519 /* Preserve the original type of the expression and the
10520 source location of the outer expression. */
10521 tree org_type = TREE_TYPE (*expr_p);
10522 *expr_p = gimple_boolify (*expr_p);
4b4455e5 10523 *expr_p = build3_loc (input_location, COND_EXPR,
1d15f620
KT
10524 org_type, *expr_p,
10525 fold_convert_loc
4b4455e5 10526 (input_location,
1d15f620
KT
10527 org_type, boolean_true_node),
10528 fold_convert_loc
4b4455e5 10529 (input_location,
1d15f620
KT
10530 org_type, boolean_false_node));
10531 ret = GS_OK;
10532 break;
10533 }
6de9cd9a
DN
10534
10535 case TRUTH_NOT_EXPR:
3c6cbf7a 10536 {
53020648
RG
10537 tree type = TREE_TYPE (*expr_p);
10538 /* The parsers are careful to generate TRUTH_NOT_EXPR
10539 only with operands that are always zero or one.
10540 We do not fold here but handle the only interesting case
10541 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
3c6cbf7a 10542 *expr_p = gimple_boolify (*expr_p);
53020648
RG
10543 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
10544 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
10545 TREE_TYPE (*expr_p),
10546 TREE_OPERAND (*expr_p, 0));
10547 else
10548 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
10549 TREE_TYPE (*expr_p),
10550 TREE_OPERAND (*expr_p, 0),
10551 build_int_cst (TREE_TYPE (*expr_p), 1));
10552 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
10553 *expr_p = fold_convert_loc (input_location, type, *expr_p);
10554 ret = GS_OK;
bd5d002e 10555 break;
3c6cbf7a 10556 }
67339062 10557
6de9cd9a
DN
10558 case ADDR_EXPR:
10559 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
10560 break;
10561
8170608b
TB
10562 case ANNOTATE_EXPR:
10563 {
10564 tree cond = TREE_OPERAND (*expr_p, 0);
718c4601 10565 tree kind = TREE_OPERAND (*expr_p, 1);
664ceb1e
JJ
10566 tree type = TREE_TYPE (cond);
10567 if (!INTEGRAL_TYPE_P (type))
10568 {
10569 *expr_p = cond;
10570 ret = GS_OK;
10571 break;
10572 }
b731b390 10573 tree tmp = create_tmp_var (type);
8170608b 10574 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
538dd0b7 10575 gcall *call
718c4601 10576 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
8170608b
TB
10577 gimple_call_set_lhs (call, tmp);
10578 gimplify_seq_add_stmt (pre_p, call);
10579 *expr_p = tmp;
10580 ret = GS_ALL_DONE;
10581 break;
10582 }
10583
6de9cd9a 10584 case VA_ARG_EXPR:
cd3ce9b4 10585 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
10586 break;
10587
1043771b 10588 CASE_CONVERT:
6de9cd9a
DN
10589 if (IS_EMPTY_STMT (*expr_p))
10590 {
10591 ret = GS_ALL_DONE;
10592 break;
10593 }
10594
10595 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
10596 || fallback == fb_none)
10597 {
10598 /* Just strip a conversion to void (or in void context) and
10599 try again. */
10600 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 10601 ret = GS_OK;
6de9cd9a
DN
10602 break;
10603 }
10604
10605 ret = gimplify_conversion (expr_p);
10606 if (ret == GS_ERROR)
10607 break;
10608 if (*expr_p != save_expr)
10609 break;
10610 /* FALLTHRU */
10611
10612 case FIX_TRUNC_EXPR:
6de9cd9a
DN
10613 /* unary_expr: ... | '(' cast ')' val | ... */
10614 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10615 is_gimple_val, fb_rvalue);
10616 recalculate_side_effects (*expr_p);
10617 break;
10618
6a720599 10619 case INDIRECT_REF:
70f34814
RG
10620 {
10621 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
3748f5c9 10622 bool notrap = TREE_THIS_NOTRAP (*expr_p);
70f34814
RG
10623 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
10624
10625 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
10626 if (*expr_p != save_expr)
10627 {
10628 ret = GS_OK;
10629 break;
10630 }
10631
10632 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10633 is_gimple_reg, fb_rvalue);
dca26746
RG
10634 if (ret == GS_ERROR)
10635 break;
70f34814 10636
dca26746 10637 recalculate_side_effects (*expr_p);
70f34814
RG
10638 *expr_p = fold_build2_loc (input_location, MEM_REF,
10639 TREE_TYPE (*expr_p),
10640 TREE_OPERAND (*expr_p, 0),
10641 build_int_cst (saved_ptr_type, 0));
10642 TREE_THIS_VOLATILE (*expr_p) = volatilep;
3748f5c9 10643 TREE_THIS_NOTRAP (*expr_p) = notrap;
70f34814
RG
10644 ret = GS_OK;
10645 break;
10646 }
10647
10648 /* We arrive here through the various re-gimplifcation paths. */
10649 case MEM_REF:
10650 /* First try re-folding the whole thing. */
10651 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
10652 TREE_OPERAND (*expr_p, 0),
10653 TREE_OPERAND (*expr_p, 1));
10654 if (tmp)
941f78d1 10655 {
ee45a32d
EB
10656 REF_REVERSE_STORAGE_ORDER (tmp)
10657 = REF_REVERSE_STORAGE_ORDER (*expr_p);
70f34814
RG
10658 *expr_p = tmp;
10659 recalculate_side_effects (*expr_p);
941f78d1
JM
10660 ret = GS_OK;
10661 break;
10662 }
01718e96
RG
10663 /* Avoid re-gimplifying the address operand if it is already
10664 in suitable form. Re-gimplifying would mark the address
10665 operand addressable. Always gimplify when not in SSA form
10666 as we still may have to gimplify decls with value-exprs. */
ebc1b29e 10667 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
01718e96
RG
10668 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
10669 {
10670 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10671 is_gimple_mem_ref_addr, fb_rvalue);
10672 if (ret == GS_ERROR)
10673 break;
10674 }
6de9cd9a 10675 recalculate_side_effects (*expr_p);
70f34814 10676 ret = GS_ALL_DONE;
6de9cd9a
DN
10677 break;
10678
01718e96 10679 /* Constants need not be gimplified. */
6de9cd9a
DN
10680 case INTEGER_CST:
10681 case REAL_CST:
325217ed 10682 case FIXED_CST:
6de9cd9a
DN
10683 case STRING_CST:
10684 case COMPLEX_CST:
10685 case VECTOR_CST:
3f5c390d
RB
10686 /* Drop the overflow flag on constants, we do not want
10687 that in the GIMPLE IL. */
10688 if (TREE_OVERFLOW_P (*expr_p))
10689 *expr_p = drop_tree_overflow (*expr_p);
6de9cd9a
DN
10690 ret = GS_ALL_DONE;
10691 break;
10692
10693 case CONST_DECL:
0534fa56 10694 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 10695 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
10696 value. */
10697 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
10698 if (fallback & fb_lvalue)
10699 ret = GS_ALL_DONE;
10700 else
941f78d1
JM
10701 {
10702 *expr_p = DECL_INITIAL (*expr_p);
10703 ret = GS_OK;
10704 }
6de9cd9a
DN
10705 break;
10706
350fae66 10707 case DECL_EXPR:
726a989a 10708 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
10709 break;
10710
6de9cd9a 10711 case BIND_EXPR:
c6c7698d 10712 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
10713 break;
10714
10715 case LOOP_EXPR:
10716 ret = gimplify_loop_expr (expr_p, pre_p);
10717 break;
10718
10719 case SWITCH_EXPR:
10720 ret = gimplify_switch_expr (expr_p, pre_p);
10721 break;
10722
6de9cd9a
DN
10723 case EXIT_EXPR:
10724 ret = gimplify_exit_expr (expr_p);
10725 break;
10726
10727 case GOTO_EXPR:
10728 /* If the target is not LABEL, then it is a computed jump
10729 and the target needs to be gimplified. */
10730 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
10731 {
10732 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
10733 NULL, is_gimple_val, fb_rvalue);
10734 if (ret == GS_ERROR)
10735 break;
10736 }
726a989a
RB
10737 gimplify_seq_add_stmt (pre_p,
10738 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 10739 ret = GS_ALL_DONE;
6de9cd9a
DN
10740 break;
10741
2e28e797 10742 case PREDICT_EXPR:
726a989a
RB
10743 gimplify_seq_add_stmt (pre_p,
10744 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
10745 PREDICT_EXPR_OUTCOME (*expr_p)));
10746 ret = GS_ALL_DONE;
10747 break;
2e28e797 10748
6de9cd9a
DN
10749 case LABEL_EXPR:
10750 ret = GS_ALL_DONE;
282899df
NS
10751 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
10752 == current_function_decl);
726a989a
RB
10753 gimplify_seq_add_stmt (pre_p,
10754 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6de9cd9a
DN
10755 break;
10756
10757 case CASE_LABEL_EXPR:
726a989a 10758 ret = gimplify_case_label_expr (expr_p, pre_p);
6de9cd9a
DN
10759 break;
10760
10761 case RETURN_EXPR:
10762 ret = gimplify_return_expr (*expr_p, pre_p);
10763 break;
10764
10765 case CONSTRUCTOR:
48eb4e53
RK
10766 /* Don't reduce this in place; let gimplify_init_constructor work its
10767 magic. Buf if we're just elaborating this for side effects, just
10768 gimplify any element that has side-effects. */
10769 if (fallback == fb_none)
10770 {
4038c495 10771 unsigned HOST_WIDE_INT ix;
ac47786e 10772 tree val;
08330ec2 10773 tree temp = NULL_TREE;
ac47786e
NF
10774 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
10775 if (TREE_SIDE_EFFECTS (val))
10776 append_to_statement_list (val, &temp);
48eb4e53 10777
08330ec2 10778 *expr_p = temp;
941f78d1 10779 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 10780 }
ca0b7d18
AP
10781 /* C99 code may assign to an array in a constructed
10782 structure or union, and this has undefined behavior only
10783 on execution, so create a temporary if an lvalue is
10784 required. */
10785 else if (fallback == fb_lvalue)
10786 {
381cdae4 10787 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
936d04b6 10788 mark_addressable (*expr_p);
941f78d1 10789 ret = GS_OK;
ca0b7d18 10790 }
08330ec2
AP
10791 else
10792 ret = GS_ALL_DONE;
6de9cd9a
DN
10793 break;
10794
10795 /* The following are special cases that are not handled by the
10796 original GIMPLE grammar. */
10797
10798 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
10799 eliminated. */
10800 case SAVE_EXPR:
10801 ret = gimplify_save_expr (expr_p, pre_p, post_p);
10802 break;
10803
10804 case BIT_FIELD_REF:
ea814c66
EB
10805 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10806 post_p, is_gimple_lvalue, fb_either);
10807 recalculate_side_effects (*expr_p);
6de9cd9a
DN
10808 break;
10809
150e3929
RG
10810 case TARGET_MEM_REF:
10811 {
10812 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
10813
23a534a1 10814 if (TMR_BASE (*expr_p))
150e3929 10815 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
4d948885 10816 post_p, is_gimple_mem_ref_addr, fb_either);
150e3929
RG
10817 if (TMR_INDEX (*expr_p))
10818 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
10819 post_p, is_gimple_val, fb_rvalue);
4d948885
RG
10820 if (TMR_INDEX2 (*expr_p))
10821 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
10822 post_p, is_gimple_val, fb_rvalue);
150e3929
RG
10823 /* TMR_STEP and TMR_OFFSET are always integer constants. */
10824 ret = MIN (r0, r1);
10825 }
10826 break;
10827
6de9cd9a
DN
10828 case NON_LVALUE_EXPR:
10829 /* This should have been stripped above. */
282899df 10830 gcc_unreachable ();
6de9cd9a
DN
10831
10832 case ASM_EXPR:
10833 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
10834 break;
10835
10836 case TRY_FINALLY_EXPR:
10837 case TRY_CATCH_EXPR:
726a989a
RB
10838 {
10839 gimple_seq eval, cleanup;
538dd0b7 10840 gtry *try_;
726a989a 10841
820055a0
DC
10842 /* Calls to destructors are generated automatically in FINALLY/CATCH
10843 block. They should have location as UNKNOWN_LOCATION. However,
10844 gimplify_call_expr will reset these call stmts to input_location
10845 if it finds stmt's location is unknown. To prevent resetting for
10846 destructors, we set the input_location to unknown.
10847 Note that this only affects the destructor calls in FINALLY/CATCH
10848 block, and will automatically reset to its original value by the
10849 end of gimplify_expr. */
10850 input_location = UNKNOWN_LOCATION;
726a989a
RB
10851 eval = cleanup = NULL;
10852 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
10853 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
10854 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
10855 if (gimple_seq_empty_p (cleanup))
10856 {
10857 gimple_seq_add_seq (pre_p, eval);
10858 ret = GS_ALL_DONE;
10859 break;
10860 }
726a989a
RB
10861 try_ = gimple_build_try (eval, cleanup,
10862 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10863 ? GIMPLE_TRY_FINALLY
10864 : GIMPLE_TRY_CATCH);
220d905f 10865 if (EXPR_HAS_LOCATION (save_expr))
e368f44f 10866 gimple_set_location (try_, EXPR_LOCATION (save_expr));
220d905f
AH
10867 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10868 gimple_set_location (try_, saved_location);
726a989a
RB
10869 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10870 gimple_try_set_catch_is_cleanup (try_,
10871 TRY_CATCH_IS_CLEANUP (*expr_p));
10872 gimplify_seq_add_stmt (pre_p, try_);
10873 ret = GS_ALL_DONE;
10874 break;
10875 }
6de9cd9a
DN
10876
10877 case CLEANUP_POINT_EXPR:
10878 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10879 break;
10880
10881 case TARGET_EXPR:
10882 ret = gimplify_target_expr (expr_p, pre_p, post_p);
10883 break;
10884
10885 case CATCH_EXPR:
726a989a 10886 {
355fe088 10887 gimple *c;
726a989a
RB
10888 gimple_seq handler = NULL;
10889 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10890 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10891 gimplify_seq_add_stmt (pre_p, c);
10892 ret = GS_ALL_DONE;
10893 break;
10894 }
6de9cd9a
DN
10895
10896 case EH_FILTER_EXPR:
726a989a 10897 {
355fe088 10898 gimple *ehf;
726a989a
RB
10899 gimple_seq failure = NULL;
10900
10901 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10902 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 10903 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
10904 gimplify_seq_add_stmt (pre_p, ehf);
10905 ret = GS_ALL_DONE;
10906 break;
10907 }
6de9cd9a 10908
0f59171d
RH
10909 case OBJ_TYPE_REF:
10910 {
10911 enum gimplify_status r0, r1;
726a989a
RB
10912 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10913 post_p, is_gimple_val, fb_rvalue);
10914 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10915 post_p, is_gimple_val, fb_rvalue);
0f3a057a 10916 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
10917 ret = MIN (r0, r1);
10918 }
6de9cd9a
DN
10919 break;
10920
6de9cd9a
DN
10921 case LABEL_DECL:
10922 /* We get here when taking the address of a label. We mark
10923 the label as "forced"; meaning it can never be removed and
10924 it is a potential target for any computed goto. */
10925 FORCED_LABEL (*expr_p) = 1;
10926 ret = GS_ALL_DONE;
10927 break;
10928
10929 case STATEMENT_LIST:
c6c7698d 10930 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
10931 break;
10932
d25cee4d
RH
10933 case WITH_SIZE_EXPR:
10934 {
70e2829d
KH
10935 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10936 post_p == &internal_post ? NULL : post_p,
10937 gimple_test_f, fallback);
10938 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10939 is_gimple_val, fb_rvalue);
941f78d1 10940 ret = GS_ALL_DONE;
d25cee4d
RH
10941 }
10942 break;
10943
6de9cd9a 10944 case VAR_DECL:
4744afba 10945 case PARM_DECL:
a9f7c570 10946 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
10947 break;
10948
077b0dfb 10949 case RESULT_DECL:
41dbbb37 10950 /* When within an OMP context, notice uses of variables. */
077b0dfb
JJ
10951 if (gimplify_omp_ctxp)
10952 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10953 ret = GS_ALL_DONE;
10954 break;
10955
71956db3
RH
10956 case SSA_NAME:
10957 /* Allow callbacks into the gimplifier during optimization. */
10958 ret = GS_ALL_DONE;
10959 break;
10960
953ff289 10961 case OMP_PARALLEL:
726a989a
RB
10962 gimplify_omp_parallel (expr_p, pre_p);
10963 ret = GS_ALL_DONE;
953ff289
DN
10964 break;
10965
a68ab351 10966 case OMP_TASK:
726a989a
RB
10967 gimplify_omp_task (expr_p, pre_p);
10968 ret = GS_ALL_DONE;
a68ab351
JJ
10969 break;
10970
953ff289 10971 case OMP_FOR:
74bf76ed 10972 case OMP_SIMD:
c02065fc 10973 case CILK_SIMD:
9a771876 10974 case CILK_FOR:
acf0174b 10975 case OMP_DISTRIBUTE:
d9a6bd32 10976 case OMP_TASKLOOP:
41dbbb37 10977 case OACC_LOOP:
953ff289
DN
10978 ret = gimplify_omp_for (expr_p, pre_p);
10979 break;
10980
41dbbb37
TS
10981 case OACC_CACHE:
10982 gimplify_oacc_cache (expr_p, pre_p);
10983 ret = GS_ALL_DONE;
10984 break;
10985
6e232ba4
JN
10986 case OACC_DECLARE:
10987 gimplify_oacc_declare (expr_p, pre_p);
10988 ret = GS_ALL_DONE;
10989 break;
10990
37d5ad46 10991 case OACC_HOST_DATA:
88bae6f4 10992 case OACC_DATA:
41dbbb37 10993 case OACC_KERNELS:
41dbbb37 10994 case OACC_PARALLEL:
953ff289
DN
10995 case OMP_SECTIONS:
10996 case OMP_SINGLE:
acf0174b
JJ
10997 case OMP_TARGET:
10998 case OMP_TARGET_DATA:
10999 case OMP_TEAMS:
726a989a
RB
11000 gimplify_omp_workshare (expr_p, pre_p);
11001 ret = GS_ALL_DONE;
953ff289
DN
11002 break;
11003
41dbbb37
TS
11004 case OACC_ENTER_DATA:
11005 case OACC_EXIT_DATA:
11006 case OACC_UPDATE:
acf0174b 11007 case OMP_TARGET_UPDATE:
d9a6bd32
JJ
11008 case OMP_TARGET_ENTER_DATA:
11009 case OMP_TARGET_EXIT_DATA:
acf0174b
JJ
11010 gimplify_omp_target_update (expr_p, pre_p);
11011 ret = GS_ALL_DONE;
11012 break;
11013
953ff289
DN
11014 case OMP_SECTION:
11015 case OMP_MASTER:
acf0174b 11016 case OMP_TASKGROUP:
953ff289
DN
11017 case OMP_ORDERED:
11018 case OMP_CRITICAL:
726a989a
RB
11019 {
11020 gimple_seq body = NULL;
355fe088 11021 gimple *g;
726a989a
RB
11022
11023 gimplify_and_add (OMP_BODY (*expr_p), &body);
11024 switch (TREE_CODE (*expr_p))
11025 {
11026 case OMP_SECTION:
11027 g = gimple_build_omp_section (body);
11028 break;
11029 case OMP_MASTER:
11030 g = gimple_build_omp_master (body);
11031 break;
acf0174b
JJ
11032 case OMP_TASKGROUP:
11033 {
11034 gimple_seq cleanup = NULL;
11035 tree fn
11036 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11037 g = gimple_build_call (fn, 0);
11038 gimple_seq_add_stmt (&cleanup, g);
11039 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11040 body = NULL;
11041 gimple_seq_add_stmt (&body, g);
11042 g = gimple_build_omp_taskgroup (body);
11043 }
11044 break;
726a989a 11045 case OMP_ORDERED:
d9a6bd32 11046 g = gimplify_omp_ordered (*expr_p, body);
726a989a
RB
11047 break;
11048 case OMP_CRITICAL:
d9a6bd32
JJ
11049 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11050 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
1a80d6b8 11051 gimplify_adjust_omp_clauses (pre_p, body,
d9a6bd32
JJ
11052 &OMP_CRITICAL_CLAUSES (*expr_p),
11053 OMP_CRITICAL);
726a989a 11054 g = gimple_build_omp_critical (body,
d9a6bd32
JJ
11055 OMP_CRITICAL_NAME (*expr_p),
11056 OMP_CRITICAL_CLAUSES (*expr_p));
726a989a
RB
11057 break;
11058 default:
11059 gcc_unreachable ();
11060 }
11061 gimplify_seq_add_stmt (pre_p, g);
11062 ret = GS_ALL_DONE;
11063 break;
11064 }
953ff289
DN
11065
11066 case OMP_ATOMIC:
20906c66
JJ
11067 case OMP_ATOMIC_READ:
11068 case OMP_ATOMIC_CAPTURE_OLD:
11069 case OMP_ATOMIC_CAPTURE_NEW:
953ff289
DN
11070 ret = gimplify_omp_atomic (expr_p, pre_p);
11071 break;
11072
0a35513e
AH
11073 case TRANSACTION_EXPR:
11074 ret = gimplify_transaction (expr_p, pre_p);
11075 break;
11076
16949072
RG
11077 case TRUTH_AND_EXPR:
11078 case TRUTH_OR_EXPR:
11079 case TRUTH_XOR_EXPR:
1d15f620 11080 {
bd5d002e 11081 tree orig_type = TREE_TYPE (*expr_p);
fc1f4caf 11082 tree new_type, xop0, xop1;
1d15f620 11083 *expr_p = gimple_boolify (*expr_p);
fc1f4caf
KT
11084 new_type = TREE_TYPE (*expr_p);
11085 if (!useless_type_conversion_p (orig_type, new_type))
1d15f620 11086 {
4b4455e5 11087 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
1d15f620
KT
11088 ret = GS_OK;
11089 break;
11090 }
da5fb469 11091
bd5d002e
RG
11092 /* Boolified binary truth expressions are semantically equivalent
11093 to bitwise binary expressions. Canonicalize them to the
11094 bitwise variant. */
11095 switch (TREE_CODE (*expr_p))
11096 {
11097 case TRUTH_AND_EXPR:
11098 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11099 break;
11100 case TRUTH_OR_EXPR:
11101 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11102 break;
11103 case TRUTH_XOR_EXPR:
11104 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11105 break;
11106 default:
11107 break;
11108 }
fc1f4caf
KT
11109 /* Now make sure that operands have compatible type to
11110 expression's new_type. */
11111 xop0 = TREE_OPERAND (*expr_p, 0);
11112 xop1 = TREE_OPERAND (*expr_p, 1);
11113 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11114 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11115 new_type,
11116 xop0);
11117 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11118 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11119 new_type,
11120 xop1);
bd5d002e
RG
11121 /* Continue classified as tcc_binary. */
11122 goto expr_2;
da5fb469 11123 }
16949072 11124
e6ed43b0 11125 case VEC_COND_EXPR:
e93ed60e
RH
11126 {
11127 enum gimplify_status r0, r1, r2;
11128
11129 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11130 post_p, is_gimple_condexpr, fb_rvalue);
11131 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11132 post_p, is_gimple_val, fb_rvalue);
11133 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11134 post_p, is_gimple_val, fb_rvalue);
11135
11136 ret = MIN (MIN (r0, r1), r2);
11137 recalculate_side_effects (*expr_p);
11138 }
11139 break;
11140
11141 case FMA_EXPR:
2205ed25 11142 case VEC_PERM_EXPR:
16949072
RG
11143 /* Classified as tcc_expression. */
11144 goto expr_3;
11145
483c6429
RG
11146 case BIT_INSERT_EXPR:
11147 /* Argument 3 is a constant. */
11148 goto expr_2;
11149
5be014d5 11150 case POINTER_PLUS_EXPR:
315f5f1b
RG
11151 {
11152 enum gimplify_status r0, r1;
11153 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11154 post_p, is_gimple_val, fb_rvalue);
11155 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11156 post_p, is_gimple_val, fb_rvalue);
11157 recalculate_side_effects (*expr_p);
11158 ret = MIN (r0, r1);
315f5f1b
RG
11159 break;
11160 }
726a989a 11161
939b37da
BI
11162 case CILK_SYNC_STMT:
11163 {
11164 if (!fn_contains_cilk_spawn_p (cfun))
11165 {
11166 error_at (EXPR_LOCATION (*expr_p),
11167 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11168 ret = GS_ERROR;
11169 }
11170 else
11171 {
11172 gimplify_cilk_sync (expr_p, pre_p);
11173 ret = GS_ALL_DONE;
11174 }
11175 break;
11176 }
11177
6de9cd9a 11178 default:
282899df 11179 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 11180 {
6615c446 11181 case tcc_comparison:
61c25908
OH
11182 /* Handle comparison of objects of non scalar mode aggregates
11183 with a call to memcmp. It would be nice to only have to do
11184 this for variable-sized objects, but then we'd have to allow
11185 the same nest of reference nodes we allow for MODIFY_EXPR and
11186 that's too complex.
11187
11188 Compare scalar mode aggregates as scalar mode values. Using
11189 memcmp for them would be very inefficient at best, and is
11190 plain wrong if bitfields are involved. */
726a989a
RB
11191 {
11192 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 11193
544d960a
AS
11194 /* Vector comparisons need no boolification. */
11195 if (TREE_CODE (type) == VECTOR_TYPE)
11196 goto expr_2;
11197 else if (!AGGREGATE_TYPE_P (type))
7f3ff782
KT
11198 {
11199 tree org_type = TREE_TYPE (*expr_p);
11200 *expr_p = gimple_boolify (*expr_p);
11201 if (!useless_type_conversion_p (org_type,
11202 TREE_TYPE (*expr_p)))
11203 {
11204 *expr_p = fold_convert_loc (input_location,
11205 org_type, *expr_p);
11206 ret = GS_OK;
11207 }
11208 else
11209 goto expr_2;
11210 }
726a989a
RB
11211 else if (TYPE_MODE (type) != BLKmode)
11212 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
11213 else
11214 ret = gimplify_variable_sized_compare (expr_p);
61c25908 11215
726a989a 11216 break;
61c25908 11217 }
d3147f64 11218
282899df
NS
11219 /* If *EXPR_P does not need to be special-cased, handle it
11220 according to its class. */
6615c446 11221 case tcc_unary:
282899df
NS
11222 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11223 post_p, is_gimple_val, fb_rvalue);
11224 break;
6de9cd9a 11225
6615c446 11226 case tcc_binary:
282899df
NS
11227 expr_2:
11228 {
11229 enum gimplify_status r0, r1;
d3147f64 11230
282899df 11231 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 11232 post_p, is_gimple_val, fb_rvalue);
282899df
NS
11233 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11234 post_p, is_gimple_val, fb_rvalue);
d3147f64 11235
282899df
NS
11236 ret = MIN (r0, r1);
11237 break;
11238 }
d3147f64 11239
16949072
RG
11240 expr_3:
11241 {
11242 enum gimplify_status r0, r1, r2;
11243
11244 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11245 post_p, is_gimple_val, fb_rvalue);
11246 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11247 post_p, is_gimple_val, fb_rvalue);
11248 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11249 post_p, is_gimple_val, fb_rvalue);
11250
11251 ret = MIN (MIN (r0, r1), r2);
11252 break;
11253 }
11254
6615c446
JO
11255 case tcc_declaration:
11256 case tcc_constant:
6de9cd9a 11257 ret = GS_ALL_DONE;
282899df 11258 goto dont_recalculate;
d3147f64 11259
282899df 11260 default:
16949072 11261 gcc_unreachable ();
6de9cd9a 11262 }
6de9cd9a
DN
11263
11264 recalculate_side_effects (*expr_p);
726a989a 11265
282899df 11266 dont_recalculate:
6de9cd9a
DN
11267 break;
11268 }
d3147f64 11269
941f78d1 11270 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
11271 }
11272 while (ret == GS_OK);
11273
11274 /* If we encountered an error_mark somewhere nested inside, either
11275 stub out the statement or propagate the error back out. */
11276 if (ret == GS_ERROR)
11277 {
11278 if (is_statement)
65355d53 11279 *expr_p = NULL;
6de9cd9a
DN
11280 goto out;
11281 }
11282
6de9cd9a
DN
11283 /* This was only valid as a return value from the langhook, which
11284 we handled. Make sure it doesn't escape from any other context. */
282899df 11285 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 11286
65355d53 11287 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
11288 {
11289 /* We aren't looking for a value, and we don't have a valid
11290 statement. If it doesn't have side-effects, throw it away. */
11291 if (!TREE_SIDE_EFFECTS (*expr_p))
65355d53 11292 *expr_p = NULL;
6de9cd9a 11293 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
11294 {
11295 /* This is probably a _REF that contains something nested that
11296 has side effects. Recurse through the operands to find it. */
11297 enum tree_code code = TREE_CODE (*expr_p);
11298
282899df 11299 switch (code)
44de5aeb 11300 {
282899df 11301 case COMPONENT_REF:
02a5eac4
EB
11302 case REALPART_EXPR:
11303 case IMAGPART_EXPR:
11304 case VIEW_CONVERT_EXPR:
282899df
NS
11305 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11306 gimple_test_f, fallback);
11307 break;
11308
a9e64c63
EB
11309 case ARRAY_REF:
11310 case ARRAY_RANGE_REF:
44de5aeb
RK
11311 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11312 gimple_test_f, fallback);
11313 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
11314 gimple_test_f, fallback);
11315 break;
11316
11317 default:
11318 /* Anything else with side-effects must be converted to
a9e64c63 11319 a valid statement before we get here. */
282899df 11320 gcc_unreachable ();
44de5aeb 11321 }
44de5aeb 11322
65355d53 11323 *expr_p = NULL;
44de5aeb 11324 }
a9e64c63
EB
11325 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
11326 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 11327 {
a9e64c63
EB
11328 /* Historically, the compiler has treated a bare reference
11329 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 11330 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 11331
c22b1771 11332 /* Normally, we do not want to create a temporary for a
a38578e1
MM
11333 TREE_ADDRESSABLE type because such a type should not be
11334 copied by bitwise-assignment. However, we make an
11335 exception here, as all we are doing here is ensuring that
11336 we read the bytes that make up the type. We use
11337 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 11338 given a TREE_ADDRESSABLE type. */
a38578e1
MM
11339 tree tmp = create_tmp_var_raw (type, "vol");
11340 gimple_add_tmp_var (tmp);
726a989a
RB
11341 gimplify_assign (tmp, *expr_p, pre_p);
11342 *expr_p = NULL;
6de9cd9a
DN
11343 }
11344 else
11345 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
11346 an incomplete type, so just throw it away. Likewise for
11347 a BLKmode type, since any implicit inner load should
11348 already have been turned into an explicit one by the
11349 gimplification process. */
65355d53 11350 *expr_p = NULL;
6de9cd9a
DN
11351 }
11352
11353 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 11354 everything together and return. */
325c3691 11355 if (fallback == fb_none || is_statement)
6de9cd9a 11356 {
726a989a
RB
11357 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
11358 it out for GC to reclaim it. */
11359 *expr_p = NULL_TREE;
11360
11361 if (!gimple_seq_empty_p (internal_pre)
11362 || !gimple_seq_empty_p (internal_post))
be00f578 11363 {
726a989a
RB
11364 gimplify_seq_add_seq (&internal_pre, internal_post);
11365 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 11366 }
726a989a
RB
11367
11368 /* The result of gimplifying *EXPR_P is going to be the last few
11369 statements in *PRE_P and *POST_P. Add location information
11370 to all the statements that were added by the gimplification
11371 helpers. */
11372 if (!gimple_seq_empty_p (*pre_p))
11373 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
11374
11375 if (!gimple_seq_empty_p (*post_p))
11376 annotate_all_with_location_after (*post_p, post_last_gsi,
11377 input_location);
11378
6de9cd9a
DN
11379 goto out;
11380 }
11381
726a989a
RB
11382#ifdef ENABLE_GIMPLE_CHECKING
11383 if (*expr_p)
11384 {
11385 enum tree_code code = TREE_CODE (*expr_p);
11386 /* These expressions should already be in gimple IR form. */
11387 gcc_assert (code != MODIFY_EXPR
11388 && code != ASM_EXPR
11389 && code != BIND_EXPR
11390 && code != CATCH_EXPR
6fc4fb06 11391 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
11392 && code != EH_FILTER_EXPR
11393 && code != GOTO_EXPR
11394 && code != LABEL_EXPR
11395 && code != LOOP_EXPR
726a989a
RB
11396 && code != SWITCH_EXPR
11397 && code != TRY_FINALLY_EXPR
41dbbb37
TS
11398 && code != OACC_PARALLEL
11399 && code != OACC_KERNELS
11400 && code != OACC_DATA
11401 && code != OACC_HOST_DATA
11402 && code != OACC_DECLARE
11403 && code != OACC_UPDATE
11404 && code != OACC_ENTER_DATA
11405 && code != OACC_EXIT_DATA
11406 && code != OACC_CACHE
726a989a
RB
11407 && code != OMP_CRITICAL
11408 && code != OMP_FOR
41dbbb37 11409 && code != OACC_LOOP
726a989a 11410 && code != OMP_MASTER
acf0174b 11411 && code != OMP_TASKGROUP
726a989a
RB
11412 && code != OMP_ORDERED
11413 && code != OMP_PARALLEL
11414 && code != OMP_SECTIONS
11415 && code != OMP_SECTION
11416 && code != OMP_SINGLE);
11417 }
11418#endif
6de9cd9a 11419
726a989a
RB
11420 /* Otherwise we're gimplifying a subexpression, so the resulting
11421 value is interesting. If it's a valid operand that matches
11422 GIMPLE_TEST_F, we're done. Unless we are handling some
11423 post-effects internally; if that's the case, we need to copy into
11424 a temporary before adding the post-effects to POST_P. */
11425 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
11426 goto out;
11427
11428 /* Otherwise, we need to create a new temporary for the gimplified
11429 expression. */
11430
11431 /* We can't return an lvalue if we have an internal postqueue. The
11432 object the lvalue refers to would (probably) be modified by the
11433 postqueue; we need to copy the value out first, which means an
11434 rvalue. */
726a989a
RB
11435 if ((fallback & fb_lvalue)
11436 && gimple_seq_empty_p (internal_post)
e847cc68 11437 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
11438 {
11439 /* An lvalue will do. Take the address of the expression, store it
11440 in a temporary, and replace the expression with an INDIRECT_REF of
11441 that temporary. */
db3927fb 11442 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 11443 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7f5ad6d7 11444 *expr_p = build_simple_mem_ref (tmp);
6de9cd9a 11445 }
ba4d8f9d 11446 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 11447 {
726a989a
RB
11448 /* An rvalue will do. Assign the gimplified expression into a
11449 new temporary TMP and replace the original expression with
11450 TMP. First, make sure that the expression has a type so that
11451 it can be assigned into a temporary. */
282899df 11452 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
412a1d9e 11453 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6de9cd9a 11454 }
282899df 11455 else
6de9cd9a 11456 {
726a989a 11457#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
11458 if (!(fallback & fb_mayfail))
11459 {
11460 fprintf (stderr, "gimplification failed:\n");
11461 print_generic_expr (stderr, *expr_p, 0);
11462 debug_tree (*expr_p);
11463 internal_error ("gimplification failed");
11464 }
11465#endif
11466 gcc_assert (fallback & fb_mayfail);
726a989a 11467
282899df 11468 /* If this is an asm statement, and the user asked for the
535a42b1 11469 impossible, don't die. Fail and let gimplify_asm_expr
282899df 11470 issue an error. */
6de9cd9a
DN
11471 ret = GS_ERROR;
11472 goto out;
11473 }
6de9cd9a 11474
6de9cd9a 11475 /* Make sure the temporary matches our predicate. */
282899df 11476 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 11477
726a989a 11478 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 11479 {
726a989a
RB
11480 annotate_all_with_location (internal_post, input_location);
11481 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
11482 }
11483
11484 out:
11485 input_location = saved_location;
11486 return ret;
11487}
11488
381cdae4
RB
11489/* Like gimplify_expr but make sure the gimplified result is not itself
11490 a SSA name (but a decl if it were). Temporaries required by
11491 evaluating *EXPR_P may be still SSA names. */
11492
11493static enum gimplify_status
11494gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11495 bool (*gimple_test_f) (tree), fallback_t fallback,
11496 bool allow_ssa)
11497{
11498 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
11499 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
11500 gimple_test_f, fallback);
11501 if (! allow_ssa
11502 && TREE_CODE (*expr_p) == SSA_NAME)
11503 {
11504 tree name = *expr_p;
11505 if (was_ssa_name_p)
11506 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
11507 else
11508 {
11509 /* Avoid the extra copy if possible. */
11510 *expr_p = create_tmp_reg (TREE_TYPE (name));
11511 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
11512 release_ssa_name (name);
11513 }
11514 }
11515 return ret;
11516}
11517
44de5aeb 11518/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 11519 size that we find. Add to LIST_P any statements generated. */
44de5aeb 11520
65355d53 11521void
726a989a 11522gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 11523{
ad50bc8d
RH
11524 tree field, t;
11525
19dbbf36 11526 if (type == NULL || type == error_mark_node)
8e0a600b 11527 return;
ad50bc8d 11528
6c6cfbfd 11529 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 11530 type = TYPE_MAIN_VARIANT (type);
44de5aeb 11531
8e0a600b 11532 /* Avoid infinite recursion. */
19dbbf36 11533 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
11534 return;
11535
11536 TYPE_SIZES_GIMPLIFIED (type) = 1;
11537
44de5aeb
RK
11538 switch (TREE_CODE (type))
11539 {
44de5aeb
RK
11540 case INTEGER_TYPE:
11541 case ENUMERAL_TYPE:
11542 case BOOLEAN_TYPE:
44de5aeb 11543 case REAL_TYPE:
325217ed 11544 case FIXED_POINT_TYPE:
65355d53
RH
11545 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
11546 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
11547
11548 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11549 {
11550 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
11551 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 11552 }
44de5aeb
RK
11553 break;
11554
11555 case ARRAY_TYPE:
ad50bc8d 11556 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
11557 gimplify_type_sizes (TREE_TYPE (type), list_p);
11558 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
11559 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
11560 with assigned stack slots, for -O1+ -g they should be tracked
11561 by VTA. */
08d78391
EB
11562 if (!(TYPE_NAME (type)
11563 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
11564 && DECL_IGNORED_P (TYPE_NAME (type)))
11565 && TYPE_DOMAIN (type)
802e9f8e
JJ
11566 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
11567 {
11568 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
11569 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11570 DECL_IGNORED_P (t) = 0;
11571 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11572 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11573 DECL_IGNORED_P (t) = 0;
11574 }
44de5aeb
RK
11575 break;
11576
11577 case RECORD_TYPE:
11578 case UNION_TYPE:
11579 case QUAL_UNION_TYPE:
910ad8de 11580 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 11581 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
11582 {
11583 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
11584 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
11585 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
11586 gimplify_type_sizes (TREE_TYPE (field), list_p);
11587 }
11588 break;
11589
11590 case POINTER_TYPE:
11591 case REFERENCE_TYPE:
706c4bb7
OH
11592 /* We used to recurse on the pointed-to type here, which turned out to
11593 be incorrect because its definition might refer to variables not
11594 yet initialized at this point if a forward declaration is involved.
11595
11596 It was actually useful for anonymous pointed-to types to ensure
11597 that the sizes evaluation dominates every possible later use of the
11598 values. Restricting to such types here would be safe since there
f63645be
KH
11599 is no possible forward declaration around, but would introduce an
11600 undesirable middle-end semantic to anonymity. We then defer to
11601 front-ends the responsibility of ensuring that the sizes are
11602 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 11603 type declarations to the tree. */
44de5aeb
RK
11604 break;
11605
11606 default:
11607 break;
11608 }
11609
65355d53
RH
11610 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
11611 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 11612
ad50bc8d 11613 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 11614 {
ad50bc8d
RH
11615 TYPE_SIZE (t) = TYPE_SIZE (type);
11616 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
11617 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 11618 }
b4830636
RH
11619}
11620
11621/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
11622 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 11623 We add any required statements to *STMT_P. */
44de5aeb
RK
11624
11625void
726a989a 11626gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 11627{
3ac8781c 11628 tree expr = *expr_p;
a9c5ddf9 11629
44de5aeb 11630 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 11631 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 11632 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
11633 will want to replace it with a new variable, but that will cause problems
11634 if this type is from outside the function. It's OK to have that here. */
848be094 11635 if (is_gimple_sizepos (expr))
44de5aeb
RK
11636 return;
11637
a9c5ddf9
RH
11638 *expr_p = unshare_expr (expr);
11639
381cdae4
RB
11640 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
11641 if the def vanishes. */
11642 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
44de5aeb 11643}
6de9cd9a 11644
3ad065ef
EB
11645/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
11646 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
11647 is true, also gimplify the parameters. */
726a989a 11648
538dd0b7 11649gbind *
3ad065ef 11650gimplify_body (tree fndecl, bool do_parms)
6de9cd9a
DN
11651{
11652 location_t saved_location = input_location;
726a989a 11653 gimple_seq parm_stmts, seq;
355fe088 11654 gimple *outer_stmt;
538dd0b7 11655 gbind *outer_bind;
9f9ebcdf 11656 struct cgraph_node *cgn;
6de9cd9a
DN
11657
11658 timevar_push (TV_TREE_GIMPLIFY);
953ff289 11659
381cdae4
RB
11660 init_tree_ssa (cfun);
11661
f66d6761
SB
11662 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
11663 gimplification. */
11664 default_rtl_profile ();
11665
953ff289 11666 gcc_assert (gimplify_ctxp == NULL);
381cdae4 11667 push_gimplify_context (true);
6de9cd9a 11668
41dbbb37 11669 if (flag_openacc || flag_openmp)
acf0174b
JJ
11670 {
11671 gcc_assert (gimplify_omp_ctxp == NULL);
11672 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
11673 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
11674 }
11675
44de5aeb
RK
11676 /* Unshare most shared trees in the body and in that of any nested functions.
11677 It would seem we don't have to do this for nested functions because
11678 they are supposed to be output and then the outer function gimplified
11679 first, but the g++ front end doesn't always do it that way. */
3ad065ef
EB
11680 unshare_body (fndecl);
11681 unvisit_body (fndecl);
6de9cd9a 11682
d52f5295 11683 cgn = cgraph_node::get (fndecl);
9f9ebcdf 11684 if (cgn && cgn->origin)
6e2830c3 11685 nonlocal_vlas = new hash_set<tree>;
77f2a970 11686
fa10beec 11687 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
11688 input_location = DECL_SOURCE_LOCATION (fndecl);
11689
4744afba
RH
11690 /* Resolve callee-copies. This has to be done before processing
11691 the body so that DECL_VALUE_EXPR gets processed correctly. */
3ad065ef 11692 parm_stmts = do_parms ? gimplify_parameters () : NULL;
4744afba 11693
6de9cd9a 11694 /* Gimplify the function's body. */
726a989a 11695 seq = NULL;
3ad065ef 11696 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
538dd0b7
DM
11697 outer_stmt = gimple_seq_first_stmt (seq);
11698 if (!outer_stmt)
6de9cd9a 11699 {
538dd0b7
DM
11700 outer_stmt = gimple_build_nop ();
11701 gimplify_seq_add_stmt (&seq, outer_stmt);
6de9cd9a 11702 }
44de5aeb 11703
726a989a
RB
11704 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
11705 not the case, wrap everything in a GIMPLE_BIND to make it so. */
538dd0b7 11706 if (gimple_code (outer_stmt) == GIMPLE_BIND
726a989a 11707 && gimple_seq_first (seq) == gimple_seq_last (seq))
538dd0b7 11708 outer_bind = as_a <gbind *> (outer_stmt);
726a989a
RB
11709 else
11710 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
11711
3ad065ef 11712 DECL_SAVED_TREE (fndecl) = NULL_TREE;
4744afba
RH
11713
11714 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 11715 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 11716 if (!gimple_seq_empty_p (parm_stmts))
4744afba 11717 {
f0c10f0f
RG
11718 tree parm;
11719
726a989a
RB
11720 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
11721 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
11722
11723 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 11724 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
11725 if (DECL_HAS_VALUE_EXPR_P (parm))
11726 {
11727 DECL_HAS_VALUE_EXPR_P (parm) = 0;
11728 DECL_IGNORED_P (parm) = 0;
11729 }
4744afba
RH
11730 }
11731
77f2a970
JJ
11732 if (nonlocal_vlas)
11733 {
96ddb7ec
JJ
11734 if (nonlocal_vla_vars)
11735 {
11736 /* tree-nested.c may later on call declare_vars (..., true);
11737 which relies on BLOCK_VARS chain to be the tail of the
11738 gimple_bind_vars chain. Ensure we don't violate that
11739 assumption. */
11740 if (gimple_bind_block (outer_bind)
11741 == DECL_INITIAL (current_function_decl))
11742 declare_vars (nonlocal_vla_vars, outer_bind, true);
11743 else
11744 BLOCK_VARS (DECL_INITIAL (current_function_decl))
11745 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
11746 nonlocal_vla_vars);
11747 nonlocal_vla_vars = NULL_TREE;
11748 }
6e2830c3 11749 delete nonlocal_vlas;
77f2a970
JJ
11750 nonlocal_vlas = NULL;
11751 }
11752
41dbbb37
TS
11753 if ((flag_openacc || flag_openmp || flag_openmp_simd)
11754 && gimplify_omp_ctxp)
acf0174b
JJ
11755 {
11756 delete_omp_context (gimplify_omp_ctxp);
11757 gimplify_omp_ctxp = NULL;
11758 }
11759
726a989a 11760 pop_gimplify_context (outer_bind);
953ff289 11761 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 11762
b2b29377 11763 if (flag_checking && !seen_error ())
34019e28 11764 verify_gimple_in_seq (gimple_bind_body (outer_bind));
6de9cd9a
DN
11765
11766 timevar_pop (TV_TREE_GIMPLIFY);
11767 input_location = saved_location;
726a989a
RB
11768
11769 return outer_bind;
6de9cd9a
DN
11770}
11771
6a1f6c9c 11772typedef char *char_p; /* For DEF_VEC_P. */
6a1f6c9c
JM
11773
11774/* Return whether we should exclude FNDECL from instrumentation. */
11775
11776static bool
11777flag_instrument_functions_exclude_p (tree fndecl)
11778{
9771b263 11779 vec<char_p> *v;
6a1f6c9c 11780
9771b263
DN
11781 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
11782 if (v && v->length () > 0)
6a1f6c9c
JM
11783 {
11784 const char *name;
11785 int i;
11786 char *s;
11787
11788 name = lang_hooks.decl_printable_name (fndecl, 0);
9771b263 11789 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
11790 if (strstr (name, s) != NULL)
11791 return true;
11792 }
11793
9771b263
DN
11794 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
11795 if (v && v->length () > 0)
6a1f6c9c
JM
11796 {
11797 const char *name;
11798 int i;
11799 char *s;
11800
11801 name = DECL_SOURCE_FILE (fndecl);
9771b263 11802 FOR_EACH_VEC_ELT (*v, i, s)
6a1f6c9c
JM
11803 if (strstr (name, s) != NULL)
11804 return true;
11805 }
11806
11807 return false;
11808}
11809
6de9cd9a 11810/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 11811 node for the function we want to gimplify.
b8698a0f 11812
ad19c4be 11813 Return the sequence of GIMPLE statements corresponding to the body
726a989a 11814 of FNDECL. */
6de9cd9a
DN
11815
11816void
11817gimplify_function_tree (tree fndecl)
11818{
af16bc76 11819 tree parm, ret;
726a989a 11820 gimple_seq seq;
538dd0b7 11821 gbind *bind;
6de9cd9a 11822
a406865a
RG
11823 gcc_assert (!gimple_body (fndecl));
11824
db2960f4
SL
11825 if (DECL_STRUCT_FUNCTION (fndecl))
11826 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
11827 else
11828 push_struct_function (fndecl);
6de9cd9a 11829
d67cb100
TV
11830 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
11831 if necessary. */
11832 cfun->curr_properties |= PROP_gimple_lva;
11833
910ad8de 11834 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
e41d82f5
RH
11835 {
11836 /* Preliminarily mark non-addressed complex variables as eligible
11837 for promotion to gimple registers. We'll transform their uses
11838 as we find them. */
0890b981
AP
11839 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
11840 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
e41d82f5
RH
11841 && !TREE_THIS_VOLATILE (parm)
11842 && !needs_to_live_in_memory (parm))
0890b981 11843 DECL_GIMPLE_REG_P (parm) = 1;
e41d82f5
RH
11844 }
11845
11846 ret = DECL_RESULT (fndecl);
0890b981 11847 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7b7e6ecd 11848 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
e41d82f5 11849 && !needs_to_live_in_memory (ret))
0890b981 11850 DECL_GIMPLE_REG_P (ret) = 1;
e41d82f5 11851
3ad065ef 11852 bind = gimplify_body (fndecl, true);
726a989a
RB
11853
11854 /* The tree body of the function is no longer needed, replace it
11855 with the new GIMPLE body. */
355a7673 11856 seq = NULL;
726a989a
RB
11857 gimple_seq_add_stmt (&seq, bind);
11858 gimple_set_body (fndecl, seq);
6de9cd9a
DN
11859
11860 /* If we're instrumenting function entry/exit, then prepend the call to
11861 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
11862 catch the exit hook. */
11863 /* ??? Add some way to ignore exceptions for this TFE. */
11864 if (flag_instrument_function_entry_exit
8d5a7d1f
ILT
11865 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
11866 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 11867 {
726a989a 11868 tree x;
538dd0b7 11869 gbind *new_bind;
355fe088 11870 gimple *tf;
726a989a 11871 gimple_seq cleanup = NULL, body = NULL;
b01890ff 11872 tree tmp_var;
538dd0b7 11873 gcall *call;
b01890ff 11874
e79983f4 11875 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 11876 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
11877 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11878 gimple_call_set_lhs (call, tmp_var);
11879 gimplify_seq_add_stmt (&cleanup, call);
e79983f4 11880 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
b01890ff
JH
11881 call = gimple_build_call (x, 2,
11882 build_fold_addr_expr (current_function_decl),
11883 tmp_var);
11884 gimplify_seq_add_stmt (&cleanup, call);
726a989a 11885 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 11886
e79983f4 11887 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
59527282 11888 call = gimple_build_call (x, 1, integer_zero_node);
b01890ff
JH
11889 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11890 gimple_call_set_lhs (call, tmp_var);
11891 gimplify_seq_add_stmt (&body, call);
e79983f4 11892 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
b01890ff
JH
11893 call = gimple_build_call (x, 2,
11894 build_fold_addr_expr (current_function_decl),
11895 tmp_var);
11896 gimplify_seq_add_stmt (&body, call);
726a989a 11897 gimplify_seq_add_stmt (&body, tf);
32001f69 11898 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
726a989a
RB
11899 /* Clear the block for BIND, since it is no longer directly inside
11900 the function, but within a try block. */
32001f69 11901 gimple_bind_set_block (bind, NULL);
6de9cd9a 11902
726a989a
RB
11903 /* Replace the current function body with the body
11904 wrapped in the try/finally TF. */
355a7673 11905 seq = NULL;
726a989a
RB
11906 gimple_seq_add_stmt (&seq, new_bind);
11907 gimple_set_body (fndecl, seq);
fca4adf2
JJ
11908 bind = new_bind;
11909 }
11910
2f3c4b69
BE
11911 if ((flag_sanitize & SANITIZE_THREAD) != 0
11912 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
fca4adf2
JJ
11913 {
11914 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
355fe088 11915 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
fca4adf2
JJ
11916 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11917 /* Clear the block for BIND, since it is no longer directly inside
11918 the function, but within a try block. */
11919 gimple_bind_set_block (bind, NULL);
11920 /* Replace the current function body with the body
11921 wrapped in the try/finally TF. */
11922 seq = NULL;
11923 gimple_seq_add_stmt (&seq, new_bind);
11924 gimple_set_body (fndecl, seq);
6de9cd9a
DN
11925 }
11926
726a989a 11927 DECL_SAVED_TREE (fndecl) = NULL_TREE;
d67cb100 11928 cfun->curr_properties |= PROP_gimple_any;
726a989a 11929
db2960f4 11930 pop_cfun ();
88d91afd
TV
11931
11932 dump_function (TDI_generic, fndecl);
6de9cd9a 11933}
726a989a 11934
4a7cb16f
AM
11935/* Return a dummy expression of type TYPE in order to keep going after an
11936 error. */
b184c8f1 11937
4a7cb16f
AM
11938static tree
11939dummy_object (tree type)
b184c8f1 11940{
4a7cb16f
AM
11941 tree t = build_int_cst (build_pointer_type (type), 0);
11942 return build2 (MEM_REF, type, t, t);
b184c8f1
AM
11943}
11944
4a7cb16f
AM
11945/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11946 builtin function, but a very special sort of operator. */
b184c8f1 11947
4a7cb16f 11948enum gimplify_status
f8e89441
TV
11949gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11950 gimple_seq *post_p ATTRIBUTE_UNUSED)
4a7cb16f
AM
11951{
11952 tree promoted_type, have_va_type;
11953 tree valist = TREE_OPERAND (*expr_p, 0);
11954 tree type = TREE_TYPE (*expr_p);
33f0852f 11955 tree t, tag, aptag;
4a7cb16f 11956 location_t loc = EXPR_LOCATION (*expr_p);
b184c8f1 11957
4a7cb16f
AM
11958 /* Verify that valist is of the proper type. */
11959 have_va_type = TREE_TYPE (valist);
11960 if (have_va_type == error_mark_node)
11961 return GS_ERROR;
11962 have_va_type = targetm.canonical_va_list_type (have_va_type);
ba9bbd6f 11963 gcc_assert (have_va_type != NULL_TREE);
b184c8f1 11964
4a7cb16f
AM
11965 /* Generate a diagnostic for requesting data of a type that cannot
11966 be passed through `...' due to type promotion at the call site. */
11967 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11968 != type)
11969 {
11970 static bool gave_help;
11971 bool warned;
bd02f00f
MP
11972 /* Use the expansion point to handle cases such as passing bool (defined
11973 in a system header) through `...'. */
11974 source_location xloc
11975 = expansion_point_location_if_in_system_header (loc);
b184c8f1 11976
4a7cb16f
AM
11977 /* Unfortunately, this is merely undefined, rather than a constraint
11978 violation, so we cannot make this an error. If this call is never
11979 executed, the program is still strictly conforming. */
bd02f00f
MP
11980 warned = warning_at (xloc, 0,
11981 "%qT is promoted to %qT when passed through %<...%>",
4a7cb16f
AM
11982 type, promoted_type);
11983 if (!gave_help && warned)
11984 {
11985 gave_help = true;
bd02f00f 11986 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
4a7cb16f
AM
11987 promoted_type, type);
11988 }
b184c8f1 11989
4a7cb16f
AM
11990 /* We can, however, treat "undefined" any way we please.
11991 Call abort to encourage the user to fix the program. */
11992 if (warned)
bd02f00f 11993 inform (xloc, "if this code is reached, the program will abort");
4a7cb16f
AM
11994 /* Before the abort, allow the evaluation of the va_list
11995 expression to exit or longjmp. */
11996 gimplify_and_add (valist, pre_p);
11997 t = build_call_expr_loc (loc,
11998 builtin_decl_implicit (BUILT_IN_TRAP), 0);
b184c8f1
AM
11999 gimplify_and_add (t, pre_p);
12000
4a7cb16f
AM
12001 /* This is dead code, but go ahead and finish so that the
12002 mode of the result comes out right. */
12003 *expr_p = dummy_object (type);
12004 return GS_ALL_DONE;
b184c8f1 12005 }
b184c8f1 12006
f8e89441 12007 tag = build_int_cst (build_pointer_type (type), 0);
33f0852f
JJ
12008 aptag = build_int_cst (TREE_TYPE (valist), 0);
12009
12010 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12011 valist, tag, aptag);
b184c8f1 12012
d67cb100
TV
12013 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12014 needs to be expanded. */
12015 cfun->curr_properties &= ~PROP_gimple_lva;
12016
f8e89441 12017 return GS_OK;
b184c8f1 12018}
bcf71673 12019
45b0be94
AM
12020/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12021
12022 DST/SRC are the destination and source respectively. You can pass
12023 ungimplified trees in DST or SRC, in which case they will be
12024 converted to a gimple operand if necessary.
12025
12026 This function returns the newly created GIMPLE_ASSIGN tuple. */
12027
355fe088 12028gimple *
45b0be94
AM
12029gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12030{
12031 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12032 gimplify_and_add (t, seq_p);
12033 ggc_free (t);
12034 return gimple_seq_last_stmt (*seq_p);
12035}
12036
18f429e2 12037inline hashval_t
67f58944 12038gimplify_hasher::hash (const elt_t *p)
18f429e2
AM
12039{
12040 tree t = p->val;
12041 return iterative_hash_expr (t, 0);
12042}
12043
12044inline bool
67f58944 12045gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18f429e2
AM
12046{
12047 tree t1 = p1->val;
12048 tree t2 = p2->val;
12049 enum tree_code code = TREE_CODE (t1);
12050
12051 if (TREE_CODE (t2) != code
12052 || TREE_TYPE (t1) != TREE_TYPE (t2))
12053 return false;
12054
12055 if (!operand_equal_p (t1, t2, 0))
12056 return false;
12057
18f429e2
AM
12058 /* Only allow them to compare equal if they also hash equal; otherwise
12059 results are nondeterminate, and we fail bootstrap comparison. */
b2b29377 12060 gcc_checking_assert (hash (p1) == hash (p2));
18f429e2
AM
12061
12062 return true;
12063}