]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.cc
Don't build readline/libreadline.a, when --with-system-readline is supplied
[thirdparty/gcc.git] / gcc / gimplify.cc
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
47 #include "tree-eh.h"
48 #include "gimplify.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
58 #include "omp-low.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
69 #include "omp-offload.h"
70 #include "context.h"
71 #include "tree-nested.h"
72
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set<tree> *asan_poisoned_variables = NULL;
75
76 enum gimplify_omp_var_data
77 {
78 GOVD_SEEN = 0x000001,
79 GOVD_EXPLICIT = 0x000002,
80 GOVD_SHARED = 0x000004,
81 GOVD_PRIVATE = 0x000008,
82 GOVD_FIRSTPRIVATE = 0x000010,
83 GOVD_LASTPRIVATE = 0x000020,
84 GOVD_REDUCTION = 0x000040,
85 GOVD_LOCAL = 0x00080,
86 GOVD_MAP = 0x000100,
87 GOVD_DEBUG_PRIVATE = 0x000200,
88 GOVD_PRIVATE_OUTER_REF = 0x000400,
89 GOVD_LINEAR = 0x000800,
90 GOVD_ALIGNED = 0x001000,
91
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY = 0x002000,
94
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
97
98 GOVD_MAP_0LEN_ARRAY = 0x008000,
99
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO = 0x010000,
102
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN = 0x020000,
105
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE = 0x040000,
108
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT = 0x080000,
111
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY = 0x100000,
114
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY = 0x200000,
117
118 GOVD_NONTEMPORAL = 0x400000,
119
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
122
123 GOVD_CONDTEMP = 0x1000000,
124
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN = 0x2000000,
127
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
130
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
134 };
135
136
137 enum omp_region_type
138 {
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
142
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
145
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
150
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
155
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
158
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
163
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
171
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
175 };
176
177 /* Gimplify hashtable helper. */
178
179 struct gimplify_hasher : free_ptr_hash <elt_t>
180 {
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
183 };
184
185 struct gimplify_ctx
186 {
187 struct gimplify_ctx *prev_context;
188
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
194
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
199
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
207 };
208
209 enum gimplify_defaultmap_kind
210 {
211 GDMK_SCALAR,
212 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
213 GDMK_AGGREGATE,
214 GDMK_ALLOCATABLE,
215 GDMK_POINTER
216 };
217
218 struct gimplify_omp_ctx
219 {
220 struct gimplify_omp_ctx *outer_context;
221 splay_tree variables;
222 hash_set<tree> *privatized_types;
223 tree clauses;
224 /* Iteration variables in an OMP_FOR. */
225 vec<tree> loop_iter_var;
226 location_t location;
227 enum omp_clause_default_kind default_kind;
228 enum omp_region_type region_type;
229 enum tree_code code;
230 bool combined_loop;
231 bool distribute;
232 bool target_firstprivatize_array_bases;
233 bool add_safelen1;
234 bool order_concurrent;
235 bool has_depend;
236 bool in_for_exprs;
237 int defaultmap[5];
238 };
239
240 static struct gimplify_ctx *gimplify_ctxp;
241 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
242 static bool in_omp_construct;
243
244 /* Forward declaration. */
245 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
246 static hash_map<tree, tree> *oacc_declare_returns;
247 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
248 bool (*) (tree), fallback_t, bool);
249 static void prepare_gimple_addressable (tree *, gimple_seq *);
250
251 /* Shorter alias name for the above function for use in gimplify.cc
252 only. */
253
254 static inline void
255 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
256 {
257 gimple_seq_add_stmt_without_update (seq_p, gs);
258 }
259
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
265
266 static void
267 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
268 {
269 gimple_stmt_iterator si;
270
271 if (src == NULL)
272 return;
273
274 si = gsi_last (*dst_p);
275 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
276 }
277
278
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
281
282 static struct gimplify_ctx *ctx_pool = NULL;
283
284 /* Return a gimplify context struct from the pool. */
285
286 static inline struct gimplify_ctx *
287 ctx_alloc (void)
288 {
289 struct gimplify_ctx * c = ctx_pool;
290
291 if (c)
292 ctx_pool = c->prev_context;
293 else
294 c = XNEW (struct gimplify_ctx);
295
296 memset (c, '\0', sizeof (*c));
297 return c;
298 }
299
300 /* Put gimplify context C back into the pool. */
301
302 static inline void
303 ctx_free (struct gimplify_ctx *c)
304 {
305 c->prev_context = ctx_pool;
306 ctx_pool = c;
307 }
308
309 /* Free allocated ctx stack memory. */
310
311 void
312 free_gimplify_stack (void)
313 {
314 struct gimplify_ctx *c;
315
316 while ((c = ctx_pool))
317 {
318 ctx_pool = c->prev_context;
319 free (c);
320 }
321 }
322
323
324 /* Set up a context for the gimplifier. */
325
326 void
327 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
328 {
329 struct gimplify_ctx *c = ctx_alloc ();
330
331 c->prev_context = gimplify_ctxp;
332 gimplify_ctxp = c;
333 gimplify_ctxp->into_ssa = in_ssa;
334 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
335 }
336
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
339 in the local_decls.
340
341 BODY is not a sequence, but the first tuple in a sequence. */
342
343 void
344 pop_gimplify_context (gimple *body)
345 {
346 struct gimplify_ctx *c = gimplify_ctxp;
347
348 gcc_assert (c
349 && (!c->bind_expr_stack.exists ()
350 || c->bind_expr_stack.is_empty ()));
351 c->bind_expr_stack.release ();
352 gimplify_ctxp = c->prev_context;
353
354 if (body)
355 declare_vars (c->temps, body, false);
356 else
357 record_vars (c->temps);
358
359 delete c->temp_htab;
360 c->temp_htab = NULL;
361 ctx_free (c);
362 }
363
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
365
366 static void
367 gimple_push_bind_expr (gbind *bind_stmt)
368 {
369 gimplify_ctxp->bind_expr_stack.reserve (8);
370 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
371 }
372
373 /* Pop the first element off the stack of bindings. */
374
375 static void
376 gimple_pop_bind_expr (void)
377 {
378 gimplify_ctxp->bind_expr_stack.pop ();
379 }
380
381 /* Return the first element of the stack of bindings. */
382
383 gbind *
384 gimple_current_bind_expr (void)
385 {
386 return gimplify_ctxp->bind_expr_stack.last ();
387 }
388
389 /* Return the stack of bindings created during gimplification. */
390
391 vec<gbind *>
392 gimple_bind_expr_stack (void)
393 {
394 return gimplify_ctxp->bind_expr_stack;
395 }
396
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
399
400 static bool
401 gimple_conditional_context (void)
402 {
403 return gimplify_ctxp->conditions > 0;
404 }
405
406 /* Note that we've entered a COND_EXPR. */
407
408 static void
409 gimple_push_condition (void)
410 {
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp->conditions == 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
414 #endif
415 ++(gimplify_ctxp->conditions);
416 }
417
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
420
421 static void
422 gimple_pop_condition (gimple_seq *pre_p)
423 {
424 int conds = --(gimplify_ctxp->conditions);
425
426 gcc_assert (conds >= 0);
427 if (conds == 0)
428 {
429 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
430 gimplify_ctxp->conditional_cleanups = NULL;
431 }
432 }
433
434 /* A stable comparison routine for use with splay trees and DECLs. */
435
436 static int
437 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
438 {
439 tree a = (tree) xa;
440 tree b = (tree) xb;
441
442 return DECL_UID (a) - DECL_UID (b);
443 }
444
445 /* Create a new omp construct that deals with variable remapping. */
446
447 static struct gimplify_omp_ctx *
448 new_omp_context (enum omp_region_type region_type)
449 {
450 struct gimplify_omp_ctx *c;
451
452 c = XCNEW (struct gimplify_omp_ctx);
453 c->outer_context = gimplify_omp_ctxp;
454 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
455 c->privatized_types = new hash_set<tree>;
456 c->location = input_location;
457 c->region_type = region_type;
458 if ((region_type & ORT_TASK) == 0)
459 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
460 else
461 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
462 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
463 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
464 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
465 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
466 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
467
468 return c;
469 }
470
471 /* Destroy an omp construct that deals with variable remapping. */
472
473 static void
474 delete_omp_context (struct gimplify_omp_ctx *c)
475 {
476 splay_tree_delete (c->variables);
477 delete c->privatized_types;
478 c->loop_iter_var.release ();
479 XDELETE (c);
480 }
481
482 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
484
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
487 reference. */
488
489 void
490 gimplify_and_add (tree t, gimple_seq *seq_p)
491 {
492 gimplify_stmt (&t, seq_p);
493 }
494
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
498
499 static gimple *
500 gimplify_and_return_first (tree t, gimple_seq *seq_p)
501 {
502 gimple_stmt_iterator last = gsi_last (*seq_p);
503
504 gimplify_and_add (t, seq_p);
505
506 if (!gsi_end_p (last))
507 {
508 gsi_next (&last);
509 return gsi_stmt (last);
510 }
511 else
512 return gimple_seq_first_stmt (*seq_p);
513 }
514
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
517
518 static bool
519 is_gimple_mem_rhs (tree t)
520 {
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t)))
524 return is_gimple_val (t);
525 else
526 return is_gimple_val (t) || is_gimple_lvalue (t);
527 }
528
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
533
534 static bool
535 is_gimple_reg_rhs_or_call (tree t)
536 {
537 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t) == CALL_EXPR);
539 }
540
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
544
545 static bool
546 is_gimple_mem_rhs_or_call (tree t)
547 {
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t)))
551 return is_gimple_val (t);
552 else
553 return (is_gimple_val (t)
554 || is_gimple_lvalue (t)
555 || TREE_CLOBBER_P (t)
556 || TREE_CODE (t) == CALL_EXPR);
557 }
558
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
561
562 static inline tree
563 create_tmp_from_val (tree val)
564 {
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
567 tree var = create_tmp_var (type, get_name (val));
568 return var;
569 }
570
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
573
574 static tree
575 lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
576 {
577 tree ret;
578
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal || !not_gimple_reg);
581
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
588 {
589 ret = create_tmp_from_val (val);
590 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
591 }
592 else
593 {
594 elt_t elt, *elt_p;
595 elt_t **slot;
596
597 elt.val = val;
598 if (!gimplify_ctxp->temp_htab)
599 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
600 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
601 if (*slot == NULL)
602 {
603 elt_p = XNEW (elt_t);
604 elt_p->val = val;
605 elt_p->temp = ret = create_tmp_from_val (val);
606 *slot = elt_p;
607 }
608 else
609 {
610 elt_p = *slot;
611 ret = elt_p->temp;
612 }
613 }
614
615 return ret;
616 }
617
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
619
620 static tree
621 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
622 bool is_formal, bool allow_ssa, bool not_gimple_reg)
623 {
624 tree t, mod;
625
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
629 fb_rvalue);
630
631 if (allow_ssa
632 && gimplify_ctxp->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val)))
634 {
635 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
636 if (! gimple_in_ssa_p (cfun))
637 {
638 const char *name = get_name (val);
639 if (name)
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
641 }
642 }
643 else
644 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
645
646 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
647
648 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
649
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod, pre_p);
652 ggc_free (mod);
653
654 return t;
655 }
656
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
659
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
663
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
666
667 For other cases, use get_initialized_tmp_var instead. */
668
669 tree
670 get_formal_tmp_var (tree val, gimple_seq *pre_p)
671 {
672 return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
673 }
674
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
677
678 tree
679 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
680 gimple_seq *post_p /* = NULL */,
681 bool allow_ssa /* = true */)
682 {
683 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
684 }
685
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
688
689 void
690 declare_vars (tree vars, gimple *gs, bool debug_info)
691 {
692 tree last = vars;
693 if (last)
694 {
695 tree temps, block;
696
697 gbind *scope = as_a <gbind *> (gs);
698
699 temps = nreverse (last);
700
701 block = gimple_bind_block (scope);
702 gcc_assert (!block || TREE_CODE (block) == BLOCK);
703 if (!block || !debug_info)
704 {
705 DECL_CHAIN (last) = gimple_bind_vars (scope);
706 gimple_bind_set_vars (scope, temps);
707 }
708 else
709 {
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block))
715 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
716 else
717 {
718 gimple_bind_set_vars (scope,
719 chainon (gimple_bind_vars (scope), temps));
720 BLOCK_VARS (block) = temps;
721 }
722 }
723 }
724 }
725
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
729
730 static void
731 force_constant_size (tree var)
732 {
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
735
736 HOST_WIDE_INT max_size;
737
738 gcc_assert (VAR_P (var));
739
740 max_size = max_int_size_in_bytes (TREE_TYPE (var));
741
742 gcc_assert (max_size >= 0);
743
744 DECL_SIZE_UNIT (var)
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
746 DECL_SIZE (var)
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
748 }
749
750 /* Push the temporary variable TMP into the current binding. */
751
752 void
753 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
754 {
755 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
756
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
759 this case. */
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
761 force_constant_size (tmp);
762
763 DECL_CONTEXT (tmp) = fn->decl;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
765
766 record_vars_into (tmp, fn->decl);
767 }
768
769 /* Push the temporary variable TMP into the current binding. */
770
771 void
772 gimple_add_tmp_var (tree tmp)
773 {
774 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
775
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
778 this case. */
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
780 force_constant_size (tmp);
781
782 DECL_CONTEXT (tmp) = current_function_decl;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
784
785 if (gimplify_ctxp)
786 {
787 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
788 gimplify_ctxp->temps = tmp;
789
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp)
792 {
793 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
794 int flag = GOVD_LOCAL | GOVD_SEEN;
795 while (ctx
796 && (ctx->region_type == ORT_WORKSHARE
797 || ctx->region_type == ORT_TASKGROUP
798 || ctx->region_type == ORT_SIMD
799 || ctx->region_type == ORT_ACC))
800 {
801 if (ctx->region_type == ORT_SIMD
802 && TREE_ADDRESSABLE (tmp)
803 && !TREE_STATIC (tmp))
804 {
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
806 ctx->add_safelen1 = true;
807 else if (ctx->in_for_exprs)
808 flag = GOVD_PRIVATE;
809 else
810 flag = GOVD_PRIVATE | GOVD_SEEN;
811 break;
812 }
813 ctx = ctx->outer_context;
814 }
815 if (ctx)
816 omp_add_variable (ctx, tmp, flag);
817 }
818 }
819 else if (cfun)
820 record_vars (tmp);
821 else
822 {
823 gimple_seq body_seq;
824
825 /* This case is for nested functions. We need to expose the locals
826 they create. */
827 body_seq = gimple_body (current_function_decl);
828 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
829 }
830 }
831
832
833 \f
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
839
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
844
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
848
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
852
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
855
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
863
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
868 way to go. */
869
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
875
876 static tree
877 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
878 {
879 tree t = *tp;
880 enum tree_code code = TREE_CODE (t);
881
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
885 {
886 if (data && !((hash_set<tree> *)data)->add (t))
887 ;
888 else
889 *walk_subtrees = 0;
890 }
891
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code) == tcc_type
894 || TREE_CODE_CLASS (code) == tcc_declaration
895 || TREE_CODE_CLASS (code) == tcc_constant)
896 *walk_subtrees = 0;
897
898 /* Cope with the statement expression extension. */
899 else if (code == STATEMENT_LIST)
900 ;
901
902 /* Leave the bulk of the work to copy_tree_r itself. */
903 else
904 copy_tree_r (tp, walk_subtrees, NULL);
905
906 return NULL_TREE;
907 }
908
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
912
913 static tree
914 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
915 {
916 tree t = *tp;
917 enum tree_code code = TREE_CODE (t);
918
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code) == tcc_type
924 || TREE_CODE_CLASS (code) == tcc_declaration
925 || TREE_CODE_CLASS (code) == tcc_constant)
926 {
927 if (TREE_VISITED (t))
928 *walk_subtrees = 0;
929 else
930 TREE_VISITED (t) = 1;
931 }
932
933 /* If this node has been visited already, unshare it and don't look
934 any deeper. */
935 else if (TREE_VISITED (t))
936 {
937 walk_tree (tp, mostly_copy_tree_r, data, NULL);
938 *walk_subtrees = 0;
939 }
940
941 /* Otherwise, mark the node as visited and keep looking. */
942 else
943 TREE_VISITED (t) = 1;
944
945 return NULL_TREE;
946 }
947
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
950
951 void
952 copy_if_shared (tree *tp, void *data)
953 {
954 walk_tree (tp, copy_if_shared_r, data, NULL);
955 }
956
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
959
960 static void
961 unshare_body (tree fndecl)
962 {
963 struct cgraph_node *cgn = cgraph_node::get (fndecl);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set<tree> *visited
967 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
968
969 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
972
973 delete visited;
974
975 if (cgn)
976 for (cgn = first_nested_function (cgn); cgn;
977 cgn = next_nested_function (cgn))
978 unshare_body (cgn->decl);
979 }
980
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
983
984 static tree
985 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
986 {
987 tree t = *tp;
988
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t))
991 TREE_VISITED (t) = 0;
992
993 /* Otherwise, don't look any deeper. */
994 else
995 *walk_subtrees = 0;
996
997 return NULL_TREE;
998 }
999
1000 /* Unmark the visited trees rooted at *TP. */
1001
1002 static inline void
1003 unmark_visited (tree *tp)
1004 {
1005 walk_tree (tp, unmark_visited_r, NULL, NULL);
1006 }
1007
1008 /* Likewise, but mark all trees as not visited. */
1009
1010 static void
1011 unvisit_body (tree fndecl)
1012 {
1013 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1014
1015 unmark_visited (&DECL_SAVED_TREE (fndecl));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1018
1019 if (cgn)
1020 for (cgn = first_nested_function (cgn);
1021 cgn; cgn = next_nested_function (cgn))
1022 unvisit_body (cgn->decl);
1023 }
1024
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1028
1029 tree
1030 unshare_expr (tree expr)
1031 {
1032 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1033 return expr;
1034 }
1035
1036 /* Worker for unshare_expr_without_location. */
1037
1038 static tree
1039 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1040 {
1041 if (EXPR_P (*tp))
1042 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1043 else
1044 *walk_subtrees = 0;
1045 return NULL_TREE;
1046 }
1047
1048 /* Similar to unshare_expr but also prune all expression locations
1049 from EXPR. */
1050
1051 tree
1052 unshare_expr_without_location (tree expr)
1053 {
1054 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1055 if (EXPR_P (expr))
1056 walk_tree (&expr, prune_expr_location, NULL, NULL);
1057 return expr;
1058 }
1059
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1064
1065 static location_t
1066 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1067 {
1068 if (!expr)
1069 return or_else;
1070
1071 if (EXPR_HAS_LOCATION (expr))
1072 return EXPR_LOCATION (expr);
1073
1074 if (TREE_CODE (expr) != STATEMENT_LIST)
1075 return or_else;
1076
1077 tree_stmt_iterator i = tsi_start (expr);
1078
1079 bool found = false;
1080 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1081 {
1082 found = true;
1083 tsi_next (&i);
1084 }
1085
1086 if (!found || !tsi_one_before_end_p (i))
1087 return or_else;
1088
1089 return rexpr_location (tsi_stmt (i), or_else);
1090 }
1091
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1094
1095 static inline bool
1096 rexpr_has_location (tree expr)
1097 {
1098 return rexpr_location (expr) != UNKNOWN_LOCATION;
1099 }
1100
1101 \f
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1106
1107 tree
1108 voidify_wrapper_expr (tree wrapper, tree temp)
1109 {
1110 tree type = TREE_TYPE (wrapper);
1111 if (type && !VOID_TYPE_P (type))
1112 {
1113 tree *p;
1114
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p = &wrapper; p && *p; )
1118 {
1119 switch (TREE_CODE (*p))
1120 {
1121 case BIND_EXPR:
1122 TREE_SIDE_EFFECTS (*p) = 1;
1123 TREE_TYPE (*p) = void_type_node;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p = &BIND_EXPR_BODY (*p);
1126 break;
1127
1128 case CLEANUP_POINT_EXPR:
1129 case TRY_FINALLY_EXPR:
1130 case TRY_CATCH_EXPR:
1131 TREE_SIDE_EFFECTS (*p) = 1;
1132 TREE_TYPE (*p) = void_type_node;
1133 p = &TREE_OPERAND (*p, 0);
1134 break;
1135
1136 case STATEMENT_LIST:
1137 {
1138 tree_stmt_iterator i = tsi_last (*p);
1139 TREE_SIDE_EFFECTS (*p) = 1;
1140 TREE_TYPE (*p) = void_type_node;
1141 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1142 }
1143 break;
1144
1145 case COMPOUND_EXPR:
1146 /* Advance to the last statement. Set all container types to
1147 void. */
1148 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1149 {
1150 TREE_SIDE_EFFECTS (*p) = 1;
1151 TREE_TYPE (*p) = void_type_node;
1152 }
1153 break;
1154
1155 case TRANSACTION_EXPR:
1156 TREE_SIDE_EFFECTS (*p) = 1;
1157 TREE_TYPE (*p) = void_type_node;
1158 p = &TRANSACTION_EXPR_BODY (*p);
1159 break;
1160
1161 default:
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1164 if (p == &wrapper)
1165 {
1166 TREE_SIDE_EFFECTS (*p) = 1;
1167 TREE_TYPE (*p) = void_type_node;
1168 p = &TREE_OPERAND (*p, 0);
1169 break;
1170 }
1171 goto out;
1172 }
1173 }
1174
1175 out:
1176 if (p == NULL || IS_EMPTY_STMT (*p))
1177 temp = NULL_TREE;
1178 else if (temp)
1179 {
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1181 down. */
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1183 || TREE_CODE (temp) == MODIFY_EXPR);
1184 TREE_OPERAND (temp, 1) = *p;
1185 *p = temp;
1186 }
1187 else
1188 {
1189 temp = create_tmp_var (type, "retval");
1190 *p = build2 (INIT_EXPR, type, temp, *p);
1191 }
1192
1193 return temp;
1194 }
1195
1196 return NULL_TREE;
1197 }
1198
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1201
1202 static void
1203 build_stack_save_restore (gcall **save, gcall **restore)
1204 {
1205 tree tmp_var;
1206
1207 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1209 gimple_call_set_lhs (*save, tmp_var);
1210
1211 *restore
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1213 1, tmp_var);
1214 }
1215
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1217
1218 static tree
1219 build_asan_poison_call_expr (tree decl)
1220 {
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size = DECL_SIZE_UNIT (decl);
1223 if (zerop (unit_size))
1224 return NULL_TREE;
1225
1226 tree base = build_fold_addr_expr (decl);
1227
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1229 void_type_node, 3,
1230 build_int_cst (integer_type_node,
1231 ASAN_MARK_POISON),
1232 base, unit_size);
1233 }
1234
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1239
1240 static void
1241 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1242 bool before)
1243 {
1244 tree unit_size = DECL_SIZE_UNIT (decl);
1245 tree base = build_fold_addr_expr (decl);
1246
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size))
1249 return;
1250
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1252 bytes. */
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1256 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1257 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1258
1259 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1260
1261 gimple *g
1262 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1263 build_int_cst (integer_type_node, flags),
1264 base, unit_size);
1265
1266 if (before)
1267 gsi_insert_before (it, g, GSI_NEW_STMT);
1268 else
1269 gsi_insert_after (it, g, GSI_NEW_STMT);
1270 }
1271
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1275
1276 static void
1277 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1278 {
1279 gimple_stmt_iterator it = gsi_last (*seq_p);
1280 bool before = false;
1281
1282 if (gsi_end_p (it))
1283 before = true;
1284
1285 asan_poison_variable (decl, poison, &it, before);
1286 }
1287
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1289
1290 static int
1291 sort_by_decl_uid (const void *a, const void *b)
1292 {
1293 const tree *t1 = (const tree *)a;
1294 const tree *t2 = (const tree *)b;
1295
1296 int uid1 = DECL_UID (*t1);
1297 int uid2 = DECL_UID (*t2);
1298
1299 if (uid1 < uid2)
1300 return -1;
1301 else if (uid1 > uid2)
1302 return 1;
1303 else
1304 return 0;
1305 }
1306
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1310
1311 static void
1312 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1313 {
1314 unsigned c = variables->elements ();
1315 if (c == 0)
1316 return;
1317
1318 auto_vec<tree> sorted_variables (c);
1319
1320 for (hash_set<tree>::iterator it = variables->begin ();
1321 it != variables->end (); ++it)
1322 sorted_variables.safe_push (*it);
1323
1324 sorted_variables.qsort (sort_by_decl_uid);
1325
1326 unsigned i;
1327 tree var;
1328 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1329 {
1330 asan_poison_variable (var, poison, seq_p);
1331
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1335 DECL_ATTRIBUTES (var)))
1336 DECL_ATTRIBUTES (var)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1338 integer_one_node,
1339 DECL_ATTRIBUTES (var));
1340 }
1341 }
1342
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1344
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1347 {
1348 tree bind_expr = *expr_p;
1349 bool old_keep_stack = gimplify_ctxp->keep_stack;
1350 bool old_save_stack = gimplify_ctxp->save_stack;
1351 tree t;
1352 gbind *bind_stmt;
1353 gimple_seq body, cleanup;
1354 gcall *stack_save;
1355 location_t start_locus = 0, end_locus = 0;
1356 tree ret_clauses = NULL;
1357
1358 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1359
1360 /* Mark variables seen in this bind expr. */
1361 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1362 {
1363 if (VAR_P (t))
1364 {
1365 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1366
1367 /* Mark variable as local. */
1368 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1369 {
1370 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1371 || splay_tree_lookup (ctx->variables,
1372 (splay_tree_key) t) == NULL)
1373 {
1374 int flag = GOVD_LOCAL;
1375 if (ctx->region_type == ORT_SIMD
1376 && TREE_ADDRESSABLE (t)
1377 && !TREE_STATIC (t))
1378 {
1379 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1380 ctx->add_safelen1 = true;
1381 else
1382 flag = GOVD_PRIVATE;
1383 }
1384 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1385 }
1386 /* Static locals inside of target construct or offloaded
1387 routines need to be "omp declare target". */
1388 if (TREE_STATIC (t))
1389 for (; ctx; ctx = ctx->outer_context)
1390 if ((ctx->region_type & ORT_TARGET) != 0)
1391 {
1392 if (!lookup_attribute ("omp declare target",
1393 DECL_ATTRIBUTES (t)))
1394 {
1395 tree id = get_identifier ("omp declare target");
1396 DECL_ATTRIBUTES (t)
1397 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1398 varpool_node *node = varpool_node::get (t);
1399 if (node)
1400 {
1401 node->offloadable = 1;
1402 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1403 {
1404 g->have_offload = true;
1405 if (!in_lto_p)
1406 vec_safe_push (offload_vars, t);
1407 }
1408 }
1409 }
1410 break;
1411 }
1412 }
1413
1414 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1415
1416 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1417 cfun->has_local_explicit_reg_vars = true;
1418 }
1419 }
1420
1421 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1422 BIND_EXPR_BLOCK (bind_expr));
1423 gimple_push_bind_expr (bind_stmt);
1424
1425 gimplify_ctxp->keep_stack = false;
1426 gimplify_ctxp->save_stack = false;
1427
1428 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1429 body = NULL;
1430 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1431 gimple_bind_set_body (bind_stmt, body);
1432
1433 /* Source location wise, the cleanup code (stack_restore and clobbers)
1434 belongs to the end of the block, so propagate what we have. The
1435 stack_save operation belongs to the beginning of block, which we can
1436 infer from the bind_expr directly if the block has no explicit
1437 assignment. */
1438 if (BIND_EXPR_BLOCK (bind_expr))
1439 {
1440 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1441 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1442 }
1443 if (start_locus == 0)
1444 start_locus = EXPR_LOCATION (bind_expr);
1445
1446 cleanup = NULL;
1447 stack_save = NULL;
1448
1449 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1450 the stack space allocated to the VLAs. */
1451 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1452 {
1453 gcall *stack_restore;
1454
1455 /* Save stack on entry and restore it on exit. Add a try_finally
1456 block to achieve this. */
1457 build_stack_save_restore (&stack_save, &stack_restore);
1458
1459 gimple_set_location (stack_save, start_locus);
1460 gimple_set_location (stack_restore, end_locus);
1461
1462 gimplify_seq_add_stmt (&cleanup, stack_restore);
1463 }
1464
1465 /* Add clobbers for all variables that go out of scope. */
1466 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1467 {
1468 if (VAR_P (t)
1469 && !is_global_var (t)
1470 && DECL_CONTEXT (t) == current_function_decl)
1471 {
1472 if (!DECL_HARD_REGISTER (t)
1473 && !TREE_THIS_VOLATILE (t)
1474 && !DECL_HAS_VALUE_EXPR_P (t)
1475 /* Only care for variables that have to be in memory. Others
1476 will be rewritten into SSA names, hence moved to the
1477 top-level. */
1478 && !is_gimple_reg (t)
1479 && flag_stack_reuse != SR_NONE)
1480 {
1481 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1482 gimple *clobber_stmt;
1483 clobber_stmt = gimple_build_assign (t, clobber);
1484 gimple_set_location (clobber_stmt, end_locus);
1485 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1486 }
1487
1488 if (flag_openacc && oacc_declare_returns != NULL)
1489 {
1490 tree key = t;
1491 if (DECL_HAS_VALUE_EXPR_P (key))
1492 {
1493 key = DECL_VALUE_EXPR (key);
1494 if (TREE_CODE (key) == INDIRECT_REF)
1495 key = TREE_OPERAND (key, 0);
1496 }
1497 tree *c = oacc_declare_returns->get (key);
1498 if (c != NULL)
1499 {
1500 if (ret_clauses)
1501 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1502
1503 ret_clauses = unshare_expr (*c);
1504
1505 oacc_declare_returns->remove (key);
1506
1507 if (oacc_declare_returns->is_empty ())
1508 {
1509 delete oacc_declare_returns;
1510 oacc_declare_returns = NULL;
1511 }
1512 }
1513 }
1514 }
1515
1516 if (asan_poisoned_variables != NULL
1517 && asan_poisoned_variables->contains (t))
1518 {
1519 asan_poisoned_variables->remove (t);
1520 asan_poison_variable (t, true, &cleanup);
1521 }
1522
1523 if (gimplify_ctxp->live_switch_vars != NULL
1524 && gimplify_ctxp->live_switch_vars->contains (t))
1525 gimplify_ctxp->live_switch_vars->remove (t);
1526 }
1527
1528 if (ret_clauses)
1529 {
1530 gomp_target *stmt;
1531 gimple_stmt_iterator si = gsi_start (cleanup);
1532
1533 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1534 ret_clauses);
1535 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1536 }
1537
1538 if (cleanup)
1539 {
1540 gtry *gs;
1541 gimple_seq new_body;
1542
1543 new_body = NULL;
1544 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1545 GIMPLE_TRY_FINALLY);
1546
1547 if (stack_save)
1548 gimplify_seq_add_stmt (&new_body, stack_save);
1549 gimplify_seq_add_stmt (&new_body, gs);
1550 gimple_bind_set_body (bind_stmt, new_body);
1551 }
1552
1553 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1554 if (!gimplify_ctxp->keep_stack)
1555 gimplify_ctxp->keep_stack = old_keep_stack;
1556 gimplify_ctxp->save_stack = old_save_stack;
1557
1558 gimple_pop_bind_expr ();
1559
1560 gimplify_seq_add_stmt (pre_p, bind_stmt);
1561
1562 if (temp)
1563 {
1564 *expr_p = temp;
1565 return GS_OK;
1566 }
1567
1568 *expr_p = NULL_TREE;
1569 return GS_ALL_DONE;
1570 }
1571
1572 /* Maybe add early return predict statement to PRE_P sequence. */
1573
1574 static void
1575 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1576 {
1577 /* If we are not in a conditional context, add PREDICT statement. */
1578 if (gimple_conditional_context ())
1579 {
1580 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1581 NOT_TAKEN);
1582 gimplify_seq_add_stmt (pre_p, predict);
1583 }
1584 }
1585
1586 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1587 GIMPLE value, it is assigned to a new temporary and the statement is
1588 re-written to return the temporary.
1589
1590 PRE_P points to the sequence where side effects that must happen before
1591 STMT should be stored. */
1592
1593 static enum gimplify_status
1594 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1595 {
1596 greturn *ret;
1597 tree ret_expr = TREE_OPERAND (stmt, 0);
1598 tree result_decl, result;
1599
1600 if (ret_expr == error_mark_node)
1601 return GS_ERROR;
1602
1603 if (!ret_expr
1604 || TREE_CODE (ret_expr) == RESULT_DECL)
1605 {
1606 maybe_add_early_return_predict_stmt (pre_p);
1607 greturn *ret = gimple_build_return (ret_expr);
1608 copy_warning (ret, stmt);
1609 gimplify_seq_add_stmt (pre_p, ret);
1610 return GS_ALL_DONE;
1611 }
1612
1613 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1614 result_decl = NULL_TREE;
1615 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1616 {
1617 /* Used in C++ for handling EH cleanup of the return value if a local
1618 cleanup throws. Assume the front-end knows what it's doing. */
1619 result_decl = DECL_RESULT (current_function_decl);
1620 /* But crash if we end up trying to modify ret_expr below. */
1621 ret_expr = NULL_TREE;
1622 }
1623 else
1624 {
1625 result_decl = TREE_OPERAND (ret_expr, 0);
1626
1627 /* See through a return by reference. */
1628 if (TREE_CODE (result_decl) == INDIRECT_REF)
1629 result_decl = TREE_OPERAND (result_decl, 0);
1630
1631 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1632 || TREE_CODE (ret_expr) == INIT_EXPR)
1633 && TREE_CODE (result_decl) == RESULT_DECL);
1634 }
1635
1636 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1637 Recall that aggregate_value_p is FALSE for any aggregate type that is
1638 returned in registers. If we're returning values in registers, then
1639 we don't want to extend the lifetime of the RESULT_DECL, particularly
1640 across another call. In addition, for those aggregates for which
1641 hard_function_value generates a PARALLEL, we'll die during normal
1642 expansion of structure assignments; there's special code in expand_return
1643 to handle this case that does not exist in expand_expr. */
1644 if (!result_decl)
1645 result = NULL_TREE;
1646 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1647 {
1648 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1649 {
1650 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1651 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1652 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1653 should be effectively allocated by the caller, i.e. all calls to
1654 this function must be subject to the Return Slot Optimization. */
1655 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1656 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1657 }
1658 result = result_decl;
1659 }
1660 else if (gimplify_ctxp->return_temp)
1661 result = gimplify_ctxp->return_temp;
1662 else
1663 {
1664 result = create_tmp_reg (TREE_TYPE (result_decl));
1665
1666 /* ??? With complex control flow (usually involving abnormal edges),
1667 we can wind up warning about an uninitialized value for this. Due
1668 to how this variable is constructed and initialized, this is never
1669 true. Give up and never warn. */
1670 suppress_warning (result, OPT_Wuninitialized);
1671
1672 gimplify_ctxp->return_temp = result;
1673 }
1674
1675 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1676 Then gimplify the whole thing. */
1677 if (result != result_decl)
1678 TREE_OPERAND (ret_expr, 0) = result;
1679
1680 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1681
1682 maybe_add_early_return_predict_stmt (pre_p);
1683 ret = gimple_build_return (result);
1684 copy_warning (ret, stmt);
1685 gimplify_seq_add_stmt (pre_p, ret);
1686
1687 return GS_ALL_DONE;
1688 }
1689
1690 /* Gimplify a variable-length array DECL. */
1691
1692 static void
1693 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1694 {
1695 /* This is a variable-sized decl. Simplify its size and mark it
1696 for deferred expansion. */
1697 tree t, addr, ptr_type;
1698
1699 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1700 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1701
1702 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1703 if (DECL_HAS_VALUE_EXPR_P (decl))
1704 return;
1705
1706 /* All occurrences of this decl in final gimplified code will be
1707 replaced by indirection. Setting DECL_VALUE_EXPR does two
1708 things: First, it lets the rest of the gimplifier know what
1709 replacement to use. Second, it lets the debug info know
1710 where to find the value. */
1711 ptr_type = build_pointer_type (TREE_TYPE (decl));
1712 addr = create_tmp_var (ptr_type, get_name (decl));
1713 DECL_IGNORED_P (addr) = 0;
1714 t = build_fold_indirect_ref (addr);
1715 TREE_THIS_NOTRAP (t) = 1;
1716 SET_DECL_VALUE_EXPR (decl, t);
1717 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1718
1719 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1720 max_int_size_in_bytes (TREE_TYPE (decl)));
1721 /* The call has been built for a variable-sized object. */
1722 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1723 t = fold_convert (ptr_type, t);
1724 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1725
1726 gimplify_and_add (t, seq_p);
1727
1728 /* Record the dynamic allocation associated with DECL if requested. */
1729 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1730 record_dynamic_alloc (decl);
1731 }
1732
1733 /* A helper function to be called via walk_tree. Mark all labels under *TP
1734 as being forced. To be called for DECL_INITIAL of static variables. */
1735
1736 static tree
1737 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1738 {
1739 if (TYPE_P (*tp))
1740 *walk_subtrees = 0;
1741 if (TREE_CODE (*tp) == LABEL_DECL)
1742 {
1743 FORCED_LABEL (*tp) = 1;
1744 cfun->has_forced_label_in_static = 1;
1745 }
1746
1747 return NULL_TREE;
1748 }
1749
1750 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1751 Build a call to internal const function DEFERRED_INIT:
1752 1st argument: SIZE of the DECL;
1753 2nd argument: INIT_TYPE;
1754 3rd argument: NAME of the DECL;
1755
1756 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1757
1758 static void
1759 gimple_add_init_for_auto_var (tree decl,
1760 enum auto_init_type init_type,
1761 gimple_seq *seq_p)
1762 {
1763 gcc_assert (auto_var_p (decl));
1764 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1765 location_t loc = EXPR_LOCATION (decl);
1766 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1767
1768 tree init_type_node
1769 = build_int_cst (integer_type_node, (int) init_type);
1770
1771 tree decl_name = NULL_TREE;
1772 if (DECL_NAME (decl))
1773
1774 decl_name = build_string_literal (IDENTIFIER_LENGTH (DECL_NAME (decl)) + 1,
1775 IDENTIFIER_POINTER (DECL_NAME (decl)));
1776
1777 else
1778 {
1779 char *decl_name_anonymous = xasprintf ("D.%u", DECL_UID (decl));
1780 decl_name = build_string_literal (strlen (decl_name_anonymous) + 1,
1781 decl_name_anonymous);
1782 free (decl_name_anonymous);
1783 }
1784
1785 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1786 TREE_TYPE (decl), 3,
1787 decl_size, init_type_node,
1788 decl_name);
1789
1790 gimplify_assign (decl, call, seq_p);
1791 }
1792
1793 /* Generate padding initialization for automatic vairable DECL.
1794 C guarantees that brace-init with fewer initializers than members
1795 aggregate will initialize the rest of the aggregate as-if it were
1796 static initialization. In turn static initialization guarantees
1797 that padding is initialized to zero. So, we always initialize paddings
1798 to zeroes regardless INIT_TYPE.
1799 To do the padding initialization, we insert a call to
1800 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1801 Note, we add an additional dummy argument for __builtin_clear_padding,
1802 'for_auto_init' to distinguish whether this call is for automatic
1803 variable initialization or not.
1804 */
1805 static void
1806 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
1807 gimple_seq *seq_p)
1808 {
1809 tree addr_of_decl = NULL_TREE;
1810 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
1811
1812 if (is_vla)
1813 {
1814 /* The temporary address variable for this vla should be
1815 created in gimplify_vla_decl. */
1816 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
1817 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl)) == INDIRECT_REF);
1818 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
1819 }
1820 else
1821 {
1822 mark_addressable (decl);
1823 addr_of_decl = build_fold_addr_expr (decl);
1824 }
1825
1826 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
1827 build_one_cst (TREE_TYPE (addr_of_decl)));
1828 gimplify_seq_add_stmt (seq_p, call);
1829 }
1830
1831 /* Return true if the DECL need to be automaticly initialized by the
1832 compiler. */
1833 static bool
1834 is_var_need_auto_init (tree decl)
1835 {
1836 if (auto_var_p (decl)
1837 && (TREE_CODE (decl) != VAR_DECL
1838 || !DECL_HARD_REGISTER (decl))
1839 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
1840 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
1841 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
1842 && !is_empty_type (TREE_TYPE (decl)))
1843 return true;
1844 return false;
1845 }
1846
1847 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1848 and initialization explicit. */
1849
1850 static enum gimplify_status
1851 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1852 {
1853 tree stmt = *stmt_p;
1854 tree decl = DECL_EXPR_DECL (stmt);
1855
1856 *stmt_p = NULL_TREE;
1857
1858 if (TREE_TYPE (decl) == error_mark_node)
1859 return GS_ERROR;
1860
1861 if ((TREE_CODE (decl) == TYPE_DECL
1862 || VAR_P (decl))
1863 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1864 {
1865 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1866 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1867 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1868 }
1869
1870 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1871 in case its size expressions contain problematic nodes like CALL_EXPR. */
1872 if (TREE_CODE (decl) == TYPE_DECL
1873 && DECL_ORIGINAL_TYPE (decl)
1874 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1875 {
1876 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1877 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1878 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1879 }
1880
1881 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1882 {
1883 tree init = DECL_INITIAL (decl);
1884 bool is_vla = false;
1885 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1886 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1887 If the decl has VALUE_EXPR that was created by FE (usually
1888 C++FE), it's a proxy varaible, and FE already initialized
1889 the VALUE_EXPR of it, we should not initialize it anymore. */
1890 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
1891
1892 poly_uint64 size;
1893 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1894 || (!TREE_STATIC (decl)
1895 && flag_stack_check == GENERIC_STACK_CHECK
1896 && maybe_gt (size,
1897 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1898 {
1899 gimplify_vla_decl (decl, seq_p);
1900 is_vla = true;
1901 }
1902
1903 if (asan_poisoned_variables
1904 && !is_vla
1905 && TREE_ADDRESSABLE (decl)
1906 && !TREE_STATIC (decl)
1907 && !DECL_HAS_VALUE_EXPR_P (decl)
1908 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1909 && dbg_cnt (asan_use_after_scope)
1910 && !gimplify_omp_ctxp
1911 /* GNAT introduces temporaries to hold return values of calls in
1912 initializers of variables defined in other units, so the
1913 declaration of the variable is discarded completely. We do not
1914 want to issue poison calls for such dropped variables. */
1915 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1916 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1917 {
1918 asan_poisoned_variables->add (decl);
1919 asan_poison_variable (decl, false, seq_p);
1920 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1921 gimplify_ctxp->live_switch_vars->add (decl);
1922 }
1923
1924 /* Some front ends do not explicitly declare all anonymous
1925 artificial variables. We compensate here by declaring the
1926 variables, though it would be better if the front ends would
1927 explicitly declare them. */
1928 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1929 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1930 gimple_add_tmp_var (decl);
1931
1932 if (init && init != error_mark_node)
1933 {
1934 if (!TREE_STATIC (decl))
1935 {
1936 DECL_INITIAL (decl) = NULL_TREE;
1937 init = build2 (INIT_EXPR, void_type_node, decl, init);
1938 gimplify_and_add (init, seq_p);
1939 ggc_free (init);
1940 /* Clear TREE_READONLY if we really have an initialization. */
1941 if (!DECL_INITIAL (decl)
1942 && !omp_privatize_by_reference (decl))
1943 TREE_READONLY (decl) = 0;
1944 }
1945 else
1946 /* We must still examine initializers for static variables
1947 as they may contain a label address. */
1948 walk_tree (&init, force_labels_r, NULL, NULL);
1949 }
1950 /* When there is no explicit initializer, if the user requested,
1951 We should insert an artifical initializer for this automatic
1952 variable. */
1953 else if (is_var_need_auto_init (decl)
1954 && !decl_had_value_expr_p)
1955 {
1956 gimple_add_init_for_auto_var (decl,
1957 flag_auto_var_init,
1958 seq_p);
1959 /* The expanding of a call to the above .DEFERRED_INIT will apply
1960 block initialization to the whole space covered by this variable.
1961 As a result, all the paddings will be initialized to zeroes
1962 for zero initialization and 0xFE byte-repeatable patterns for
1963 pattern initialization.
1964 In order to make the paddings as zeroes for pattern init, We
1965 should add a call to __builtin_clear_padding to clear the
1966 paddings to zero in compatiple with CLANG.
1967 We cannot insert this call if the variable is a gimple register
1968 since __builtin_clear_padding will take the address of the
1969 variable. As a result, if a long double/_Complex long double
1970 variable will spilled into stack later, its padding is 0XFE. */
1971 if (flag_auto_var_init == AUTO_INIT_PATTERN
1972 && !is_gimple_reg (decl)
1973 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
1974 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
1975 }
1976 }
1977
1978 return GS_ALL_DONE;
1979 }
1980
1981 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1982 and replacing the LOOP_EXPR with goto, but if the loop contains an
1983 EXIT_EXPR, we need to append a label for it to jump to. */
1984
1985 static enum gimplify_status
1986 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1987 {
1988 tree saved_label = gimplify_ctxp->exit_label;
1989 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1990
1991 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1992
1993 gimplify_ctxp->exit_label = NULL_TREE;
1994
1995 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1996
1997 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1998
1999 if (gimplify_ctxp->exit_label)
2000 gimplify_seq_add_stmt (pre_p,
2001 gimple_build_label (gimplify_ctxp->exit_label));
2002
2003 gimplify_ctxp->exit_label = saved_label;
2004
2005 *expr_p = NULL;
2006 return GS_ALL_DONE;
2007 }
2008
2009 /* Gimplify a statement list onto a sequence. These may be created either
2010 by an enlightened front-end, or by shortcut_cond_expr. */
2011
2012 static enum gimplify_status
2013 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2014 {
2015 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2016
2017 tree_stmt_iterator i = tsi_start (*expr_p);
2018
2019 while (!tsi_end_p (i))
2020 {
2021 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2022 tsi_delink (&i);
2023 }
2024
2025 if (temp)
2026 {
2027 *expr_p = temp;
2028 return GS_OK;
2029 }
2030
2031 return GS_ALL_DONE;
2032 }
2033
2034
2035 /* Emit warning for the unreachable statment STMT if needed.
2036 Return the gimple itself when the warning is emitted, otherwise
2037 return NULL. */
2038 static gimple *
2039 emit_warn_switch_unreachable (gimple *stmt)
2040 {
2041 if (gimple_code (stmt) == GIMPLE_GOTO
2042 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2043 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2044 /* Don't warn for compiler-generated gotos. These occur
2045 in Duff's devices, for example. */
2046 return NULL;
2047 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2048 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2049 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2050 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2051 || (is_gimple_assign (stmt)
2052 && gimple_assign_single_p (stmt)
2053 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2054 && gimple_call_internal_p (
2055 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2056 IFN_DEFERRED_INIT))))
2057 /* Don't warn for compiler-generated initializations for
2058 -ftrivial-auto-var-init.
2059 There are 3 cases:
2060 case 1: a call to .DEFERRED_INIT;
2061 case 2: a call to __builtin_clear_padding with the 2nd argument is
2062 present and non-zero;
2063 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2064 that has the LHS of .DEFERRED_INIT as the RHS as following:
2065 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2066 i1 = _1. */
2067 return NULL;
2068 else
2069 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2070 "statement will never be executed");
2071 return stmt;
2072 }
2073
2074 /* Callback for walk_gimple_seq. */
2075
2076 static tree
2077 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2078 bool *handled_ops_p,
2079 struct walk_stmt_info *wi)
2080 {
2081 gimple *stmt = gsi_stmt (*gsi_p);
2082 bool unreachable_issued = wi->info != NULL;
2083
2084 *handled_ops_p = true;
2085 switch (gimple_code (stmt))
2086 {
2087 case GIMPLE_TRY:
2088 /* A compiler-generated cleanup or a user-written try block.
2089 If it's empty, don't dive into it--that would result in
2090 worse location info. */
2091 if (gimple_try_eval (stmt) == NULL)
2092 {
2093 if (warn_switch_unreachable && !unreachable_issued)
2094 wi->info = emit_warn_switch_unreachable (stmt);
2095
2096 /* Stop when auto var init warning is not on. */
2097 if (!warn_trivial_auto_var_init)
2098 return integer_zero_node;
2099 }
2100 /* Fall through. */
2101 case GIMPLE_BIND:
2102 case GIMPLE_CATCH:
2103 case GIMPLE_EH_FILTER:
2104 case GIMPLE_TRANSACTION:
2105 /* Walk the sub-statements. */
2106 *handled_ops_p = false;
2107 break;
2108
2109 case GIMPLE_DEBUG:
2110 /* Ignore these. We may generate them before declarations that
2111 are never executed. If there's something to warn about,
2112 there will be non-debug stmts too, and we'll catch those. */
2113 break;
2114
2115 case GIMPLE_LABEL:
2116 /* Stop till the first Label. */
2117 return integer_zero_node;
2118 case GIMPLE_CALL:
2119 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2120 {
2121 *handled_ops_p = false;
2122 break;
2123 }
2124 if (warn_trivial_auto_var_init
2125 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2126 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2127 {
2128 /* Get the variable name from the 3rd argument of call. */
2129 tree var_name = gimple_call_arg (stmt, 2);
2130 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2131 const char *var_name_str = TREE_STRING_POINTER (var_name);
2132
2133 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2134 "%qs cannot be initialized with"
2135 "%<-ftrivial-auto-var_init%>",
2136 var_name_str);
2137 break;
2138 }
2139
2140 /* Fall through. */
2141 default:
2142 /* check the first "real" statement (not a decl/lexical scope/...), issue
2143 warning if needed. */
2144 if (warn_switch_unreachable && !unreachable_issued)
2145 wi->info = emit_warn_switch_unreachable (stmt);
2146 /* Stop when auto var init warning is not on. */
2147 if (!warn_trivial_auto_var_init)
2148 return integer_zero_node;
2149 break;
2150 }
2151 return NULL_TREE;
2152 }
2153
2154
2155 /* Possibly warn about unreachable statements between switch's controlling
2156 expression and the first case. Also warn about -ftrivial-auto-var-init
2157 cannot initialize the auto variable under such situation.
2158 SEQ is the body of a switch expression. */
2159
2160 static void
2161 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2162 {
2163 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2164 /* This warning doesn't play well with Fortran when optimizations
2165 are on. */
2166 || lang_GNU_Fortran ()
2167 || seq == NULL)
2168 return;
2169
2170 struct walk_stmt_info wi;
2171
2172 memset (&wi, 0, sizeof (wi));
2173 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2174 }
2175
2176
2177 /* A label entry that pairs label and a location. */
2178 struct label_entry
2179 {
2180 tree label;
2181 location_t loc;
2182 };
2183
2184 /* Find LABEL in vector of label entries VEC. */
2185
2186 static struct label_entry *
2187 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2188 {
2189 unsigned int i;
2190 struct label_entry *l;
2191
2192 FOR_EACH_VEC_ELT (*vec, i, l)
2193 if (l->label == label)
2194 return l;
2195 return NULL;
2196 }
2197
2198 /* Return true if LABEL, a LABEL_DECL, represents a case label
2199 in a vector of labels CASES. */
2200
2201 static bool
2202 case_label_p (const vec<tree> *cases, tree label)
2203 {
2204 unsigned int i;
2205 tree l;
2206
2207 FOR_EACH_VEC_ELT (*cases, i, l)
2208 if (CASE_LABEL (l) == label)
2209 return true;
2210 return false;
2211 }
2212
2213 /* Find the last nondebug statement in a scope STMT. */
2214
2215 static gimple *
2216 last_stmt_in_scope (gimple *stmt)
2217 {
2218 if (!stmt)
2219 return NULL;
2220
2221 switch (gimple_code (stmt))
2222 {
2223 case GIMPLE_BIND:
2224 {
2225 gbind *bind = as_a <gbind *> (stmt);
2226 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2227 return last_stmt_in_scope (stmt);
2228 }
2229
2230 case GIMPLE_TRY:
2231 {
2232 gtry *try_stmt = as_a <gtry *> (stmt);
2233 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2234 gimple *last_eval = last_stmt_in_scope (stmt);
2235 if (gimple_stmt_may_fallthru (last_eval)
2236 && (last_eval == NULL
2237 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2238 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2239 {
2240 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2241 return last_stmt_in_scope (stmt);
2242 }
2243 else
2244 return last_eval;
2245 }
2246
2247 case GIMPLE_DEBUG:
2248 gcc_unreachable ();
2249
2250 default:
2251 return stmt;
2252 }
2253 }
2254
2255 /* Collect labels that may fall through into LABELS and return the statement
2256 preceding another case label, or a user-defined label. Store a location
2257 useful to give warnings at *PREVLOC (usually the location of the returned
2258 statement or of its surrounding scope). */
2259
2260 static gimple *
2261 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2262 auto_vec <struct label_entry> *labels,
2263 location_t *prevloc)
2264 {
2265 gimple *prev = NULL;
2266
2267 *prevloc = UNKNOWN_LOCATION;
2268 do
2269 {
2270 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2271 {
2272 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2273 which starts on a GIMPLE_SWITCH and ends with a break label.
2274 Handle that as a single statement that can fall through. */
2275 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2276 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2277 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2278 if (last
2279 && gimple_code (first) == GIMPLE_SWITCH
2280 && gimple_code (last) == GIMPLE_LABEL)
2281 {
2282 tree label = gimple_label_label (as_a <glabel *> (last));
2283 if (SWITCH_BREAK_LABEL_P (label))
2284 {
2285 prev = bind;
2286 gsi_next (gsi_p);
2287 continue;
2288 }
2289 }
2290 }
2291 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2292 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2293 {
2294 /* Nested scope. Only look at the last statement of
2295 the innermost scope. */
2296 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2297 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2298 if (last)
2299 {
2300 prev = last;
2301 /* It might be a label without a location. Use the
2302 location of the scope then. */
2303 if (!gimple_has_location (prev))
2304 *prevloc = bind_loc;
2305 }
2306 gsi_next (gsi_p);
2307 continue;
2308 }
2309
2310 /* Ifs are tricky. */
2311 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2312 {
2313 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2314 tree false_lab = gimple_cond_false_label (cond_stmt);
2315 location_t if_loc = gimple_location (cond_stmt);
2316
2317 /* If we have e.g.
2318 if (i > 1) goto <D.2259>; else goto D;
2319 we can't do much with the else-branch. */
2320 if (!DECL_ARTIFICIAL (false_lab))
2321 break;
2322
2323 /* Go on until the false label, then one step back. */
2324 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2325 {
2326 gimple *stmt = gsi_stmt (*gsi_p);
2327 if (gimple_code (stmt) == GIMPLE_LABEL
2328 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2329 break;
2330 }
2331
2332 /* Not found? Oops. */
2333 if (gsi_end_p (*gsi_p))
2334 break;
2335
2336 /* A dead label can't fall through. */
2337 if (!UNUSED_LABEL_P (false_lab))
2338 {
2339 struct label_entry l = { false_lab, if_loc };
2340 labels->safe_push (l);
2341 }
2342
2343 /* Go to the last statement of the then branch. */
2344 gsi_prev (gsi_p);
2345
2346 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2347 <D.1759>:
2348 <stmt>;
2349 goto <D.1761>;
2350 <D.1760>:
2351 */
2352 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2353 && !gimple_has_location (gsi_stmt (*gsi_p)))
2354 {
2355 /* Look at the statement before, it might be
2356 attribute fallthrough, in which case don't warn. */
2357 gsi_prev (gsi_p);
2358 bool fallthru_before_dest
2359 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2360 gsi_next (gsi_p);
2361 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2362 if (!fallthru_before_dest)
2363 {
2364 struct label_entry l = { goto_dest, if_loc };
2365 labels->safe_push (l);
2366 }
2367 }
2368 /* This case is about
2369 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2370 <D.2022>:
2371 n = n + 1; // #1
2372 <D.2023>: // #2
2373 <D.1988>: // #3
2374 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2375 through to #3. So set PREV to #1. */
2376 else if (UNUSED_LABEL_P (false_lab))
2377 prev = gsi_stmt (*gsi_p);
2378
2379 /* And move back. */
2380 gsi_next (gsi_p);
2381 }
2382
2383 /* Remember the last statement. Skip labels that are of no interest
2384 to us. */
2385 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2386 {
2387 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2388 if (find_label_entry (labels, label))
2389 prev = gsi_stmt (*gsi_p);
2390 }
2391 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2392 ;
2393 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2394 ;
2395 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2396 prev = gsi_stmt (*gsi_p);
2397 gsi_next (gsi_p);
2398 }
2399 while (!gsi_end_p (*gsi_p)
2400 /* Stop if we find a case or a user-defined label. */
2401 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2402 || !gimple_has_location (gsi_stmt (*gsi_p))));
2403
2404 if (prev && gimple_has_location (prev))
2405 *prevloc = gimple_location (prev);
2406 return prev;
2407 }
2408
2409 /* Return true if the switch fallthough warning should occur. LABEL is
2410 the label statement that we're falling through to. */
2411
2412 static bool
2413 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2414 {
2415 gimple_stmt_iterator gsi = *gsi_p;
2416
2417 /* Don't warn if the label is marked with a "falls through" comment. */
2418 if (FALLTHROUGH_LABEL_P (label))
2419 return false;
2420
2421 /* Don't warn for non-case labels followed by a statement:
2422 case 0:
2423 foo ();
2424 label:
2425 bar ();
2426 as these are likely intentional. */
2427 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2428 {
2429 tree l;
2430 while (!gsi_end_p (gsi)
2431 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2432 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2433 && !case_label_p (&gimplify_ctxp->case_labels, l))
2434 gsi_next_nondebug (&gsi);
2435 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2436 return false;
2437 }
2438
2439 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2440 immediately breaks. */
2441 gsi = *gsi_p;
2442
2443 /* Skip all immediately following labels. */
2444 while (!gsi_end_p (gsi)
2445 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2446 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2447 gsi_next_nondebug (&gsi);
2448
2449 /* { ... something; default:; } */
2450 if (gsi_end_p (gsi)
2451 /* { ... something; default: break; } or
2452 { ... something; default: goto L; } */
2453 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2454 /* { ... something; default: return; } */
2455 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2456 return false;
2457
2458 return true;
2459 }
2460
2461 /* Callback for walk_gimple_seq. */
2462
2463 static tree
2464 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2465 struct walk_stmt_info *)
2466 {
2467 gimple *stmt = gsi_stmt (*gsi_p);
2468
2469 *handled_ops_p = true;
2470 switch (gimple_code (stmt))
2471 {
2472 case GIMPLE_TRY:
2473 case GIMPLE_BIND:
2474 case GIMPLE_CATCH:
2475 case GIMPLE_EH_FILTER:
2476 case GIMPLE_TRANSACTION:
2477 /* Walk the sub-statements. */
2478 *handled_ops_p = false;
2479 break;
2480
2481 /* Find a sequence of form:
2482
2483 GIMPLE_LABEL
2484 [...]
2485 <may fallthru stmt>
2486 GIMPLE_LABEL
2487
2488 and possibly warn. */
2489 case GIMPLE_LABEL:
2490 {
2491 /* Found a label. Skip all immediately following labels. */
2492 while (!gsi_end_p (*gsi_p)
2493 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2494 gsi_next_nondebug (gsi_p);
2495
2496 /* There might be no more statements. */
2497 if (gsi_end_p (*gsi_p))
2498 return integer_zero_node;
2499
2500 /* Vector of labels that fall through. */
2501 auto_vec <struct label_entry> labels;
2502 location_t prevloc;
2503 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2504
2505 /* There might be no more statements. */
2506 if (gsi_end_p (*gsi_p))
2507 return integer_zero_node;
2508
2509 gimple *next = gsi_stmt (*gsi_p);
2510 tree label;
2511 /* If what follows is a label, then we may have a fallthrough. */
2512 if (gimple_code (next) == GIMPLE_LABEL
2513 && gimple_has_location (next)
2514 && (label = gimple_label_label (as_a <glabel *> (next)))
2515 && prev != NULL)
2516 {
2517 struct label_entry *l;
2518 bool warned_p = false;
2519 auto_diagnostic_group d;
2520 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2521 /* Quiet. */;
2522 else if (gimple_code (prev) == GIMPLE_LABEL
2523 && (label = gimple_label_label (as_a <glabel *> (prev)))
2524 && (l = find_label_entry (&labels, label)))
2525 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2526 "this statement may fall through");
2527 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2528 /* Try to be clever and don't warn when the statement
2529 can't actually fall through. */
2530 && gimple_stmt_may_fallthru (prev)
2531 && prevloc != UNKNOWN_LOCATION)
2532 warned_p = warning_at (prevloc,
2533 OPT_Wimplicit_fallthrough_,
2534 "this statement may fall through");
2535 if (warned_p)
2536 inform (gimple_location (next), "here");
2537
2538 /* Mark this label as processed so as to prevent multiple
2539 warnings in nested switches. */
2540 FALLTHROUGH_LABEL_P (label) = true;
2541
2542 /* So that next warn_implicit_fallthrough_r will start looking for
2543 a new sequence starting with this label. */
2544 gsi_prev (gsi_p);
2545 }
2546 }
2547 break;
2548 default:
2549 break;
2550 }
2551 return NULL_TREE;
2552 }
2553
2554 /* Warn when a switch case falls through. */
2555
2556 static void
2557 maybe_warn_implicit_fallthrough (gimple_seq seq)
2558 {
2559 if (!warn_implicit_fallthrough)
2560 return;
2561
2562 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2563 if (!(lang_GNU_C ()
2564 || lang_GNU_CXX ()
2565 || lang_GNU_OBJC ()))
2566 return;
2567
2568 struct walk_stmt_info wi;
2569 memset (&wi, 0, sizeof (wi));
2570 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2571 }
2572
2573 /* Callback for walk_gimple_seq. */
2574
2575 static tree
2576 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2577 struct walk_stmt_info *wi)
2578 {
2579 gimple *stmt = gsi_stmt (*gsi_p);
2580
2581 *handled_ops_p = true;
2582 switch (gimple_code (stmt))
2583 {
2584 case GIMPLE_TRY:
2585 case GIMPLE_BIND:
2586 case GIMPLE_CATCH:
2587 case GIMPLE_EH_FILTER:
2588 case GIMPLE_TRANSACTION:
2589 /* Walk the sub-statements. */
2590 *handled_ops_p = false;
2591 break;
2592 case GIMPLE_CALL:
2593 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2594 {
2595 gsi_remove (gsi_p, true);
2596 if (gsi_end_p (*gsi_p))
2597 {
2598 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2599 return integer_zero_node;
2600 }
2601
2602 bool found = false;
2603 location_t loc = gimple_location (stmt);
2604
2605 gimple_stmt_iterator gsi2 = *gsi_p;
2606 stmt = gsi_stmt (gsi2);
2607 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2608 {
2609 /* Go on until the artificial label. */
2610 tree goto_dest = gimple_goto_dest (stmt);
2611 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2612 {
2613 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2614 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2615 == goto_dest)
2616 break;
2617 }
2618
2619 /* Not found? Stop. */
2620 if (gsi_end_p (gsi2))
2621 break;
2622
2623 /* Look one past it. */
2624 gsi_next (&gsi2);
2625 }
2626
2627 /* We're looking for a case label or default label here. */
2628 while (!gsi_end_p (gsi2))
2629 {
2630 stmt = gsi_stmt (gsi2);
2631 if (gimple_code (stmt) == GIMPLE_LABEL)
2632 {
2633 tree label = gimple_label_label (as_a <glabel *> (stmt));
2634 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2635 {
2636 found = true;
2637 break;
2638 }
2639 }
2640 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2641 ;
2642 else if (!is_gimple_debug (stmt))
2643 /* Anything else is not expected. */
2644 break;
2645 gsi_next (&gsi2);
2646 }
2647 if (!found)
2648 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2649 "a case label or default label");
2650 }
2651 break;
2652 default:
2653 break;
2654 }
2655 return NULL_TREE;
2656 }
2657
2658 /* Expand all FALLTHROUGH () calls in SEQ. */
2659
2660 static void
2661 expand_FALLTHROUGH (gimple_seq *seq_p)
2662 {
2663 struct walk_stmt_info wi;
2664 location_t loc;
2665 memset (&wi, 0, sizeof (wi));
2666 wi.info = (void *) &loc;
2667 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2668 if (wi.callback_result == integer_zero_node)
2669 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2670 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2671 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2672 "a case label or default label");
2673 }
2674
2675 \f
2676 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2677 branch to. */
2678
2679 static enum gimplify_status
2680 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2681 {
2682 tree switch_expr = *expr_p;
2683 gimple_seq switch_body_seq = NULL;
2684 enum gimplify_status ret;
2685 tree index_type = TREE_TYPE (switch_expr);
2686 if (index_type == NULL_TREE)
2687 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2688
2689 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2690 fb_rvalue);
2691 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2692 return ret;
2693
2694 if (SWITCH_BODY (switch_expr))
2695 {
2696 vec<tree> labels;
2697 vec<tree> saved_labels;
2698 hash_set<tree> *saved_live_switch_vars = NULL;
2699 tree default_case = NULL_TREE;
2700 gswitch *switch_stmt;
2701
2702 /* Save old labels, get new ones from body, then restore the old
2703 labels. Save all the things from the switch body to append after. */
2704 saved_labels = gimplify_ctxp->case_labels;
2705 gimplify_ctxp->case_labels.create (8);
2706
2707 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2708 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2709 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2710 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2711 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2712 else
2713 gimplify_ctxp->live_switch_vars = NULL;
2714
2715 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2716 gimplify_ctxp->in_switch_expr = true;
2717
2718 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2719
2720 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2721 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2722 maybe_warn_implicit_fallthrough (switch_body_seq);
2723 /* Only do this for the outermost GIMPLE_SWITCH. */
2724 if (!gimplify_ctxp->in_switch_expr)
2725 expand_FALLTHROUGH (&switch_body_seq);
2726
2727 labels = gimplify_ctxp->case_labels;
2728 gimplify_ctxp->case_labels = saved_labels;
2729
2730 if (gimplify_ctxp->live_switch_vars)
2731 {
2732 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2733 delete gimplify_ctxp->live_switch_vars;
2734 }
2735 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2736
2737 preprocess_case_label_vec_for_gimple (labels, index_type,
2738 &default_case);
2739
2740 bool add_bind = false;
2741 if (!default_case)
2742 {
2743 glabel *new_default;
2744
2745 default_case
2746 = build_case_label (NULL_TREE, NULL_TREE,
2747 create_artificial_label (UNKNOWN_LOCATION));
2748 if (old_in_switch_expr)
2749 {
2750 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2751 add_bind = true;
2752 }
2753 new_default = gimple_build_label (CASE_LABEL (default_case));
2754 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2755 }
2756 else if (old_in_switch_expr)
2757 {
2758 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2759 if (last && gimple_code (last) == GIMPLE_LABEL)
2760 {
2761 tree label = gimple_label_label (as_a <glabel *> (last));
2762 if (SWITCH_BREAK_LABEL_P (label))
2763 add_bind = true;
2764 }
2765 }
2766
2767 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2768 default_case, labels);
2769 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2770 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2771 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2772 so that we can easily find the start and end of the switch
2773 statement. */
2774 if (add_bind)
2775 {
2776 gimple_seq bind_body = NULL;
2777 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2778 gimple_seq_add_seq (&bind_body, switch_body_seq);
2779 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2780 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2781 gimplify_seq_add_stmt (pre_p, bind);
2782 }
2783 else
2784 {
2785 gimplify_seq_add_stmt (pre_p, switch_stmt);
2786 gimplify_seq_add_seq (pre_p, switch_body_seq);
2787 }
2788 labels.release ();
2789 }
2790 else
2791 gcc_unreachable ();
2792
2793 return GS_ALL_DONE;
2794 }
2795
2796 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2797
2798 static enum gimplify_status
2799 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2800 {
2801 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2802 == current_function_decl);
2803
2804 tree label = LABEL_EXPR_LABEL (*expr_p);
2805 glabel *label_stmt = gimple_build_label (label);
2806 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2807 gimplify_seq_add_stmt (pre_p, label_stmt);
2808
2809 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2810 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2811 NOT_TAKEN));
2812 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2813 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2814 TAKEN));
2815
2816 return GS_ALL_DONE;
2817 }
2818
2819 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2820
2821 static enum gimplify_status
2822 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2823 {
2824 struct gimplify_ctx *ctxp;
2825 glabel *label_stmt;
2826
2827 /* Invalid programs can play Duff's Device type games with, for example,
2828 #pragma omp parallel. At least in the C front end, we don't
2829 detect such invalid branches until after gimplification, in the
2830 diagnose_omp_blocks pass. */
2831 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2832 if (ctxp->case_labels.exists ())
2833 break;
2834
2835 tree label = CASE_LABEL (*expr_p);
2836 label_stmt = gimple_build_label (label);
2837 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2838 ctxp->case_labels.safe_push (*expr_p);
2839 gimplify_seq_add_stmt (pre_p, label_stmt);
2840
2841 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2842 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2843 NOT_TAKEN));
2844 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2845 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2846 TAKEN));
2847
2848 return GS_ALL_DONE;
2849 }
2850
2851 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2852 if necessary. */
2853
2854 tree
2855 build_and_jump (tree *label_p)
2856 {
2857 if (label_p == NULL)
2858 /* If there's nowhere to jump, just fall through. */
2859 return NULL_TREE;
2860
2861 if (*label_p == NULL_TREE)
2862 {
2863 tree label = create_artificial_label (UNKNOWN_LOCATION);
2864 *label_p = label;
2865 }
2866
2867 return build1 (GOTO_EXPR, void_type_node, *label_p);
2868 }
2869
2870 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2871 This also involves building a label to jump to and communicating it to
2872 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2873
2874 static enum gimplify_status
2875 gimplify_exit_expr (tree *expr_p)
2876 {
2877 tree cond = TREE_OPERAND (*expr_p, 0);
2878 tree expr;
2879
2880 expr = build_and_jump (&gimplify_ctxp->exit_label);
2881 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2882 *expr_p = expr;
2883
2884 return GS_OK;
2885 }
2886
2887 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2888 different from its canonical type, wrap the whole thing inside a
2889 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2890 type.
2891
2892 The canonical type of a COMPONENT_REF is the type of the field being
2893 referenced--unless the field is a bit-field which can be read directly
2894 in a smaller mode, in which case the canonical type is the
2895 sign-appropriate type corresponding to that mode. */
2896
2897 static void
2898 canonicalize_component_ref (tree *expr_p)
2899 {
2900 tree expr = *expr_p;
2901 tree type;
2902
2903 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2904
2905 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2906 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2907 else
2908 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2909
2910 /* One could argue that all the stuff below is not necessary for
2911 the non-bitfield case and declare it a FE error if type
2912 adjustment would be needed. */
2913 if (TREE_TYPE (expr) != type)
2914 {
2915 #ifdef ENABLE_TYPES_CHECKING
2916 tree old_type = TREE_TYPE (expr);
2917 #endif
2918 int type_quals;
2919
2920 /* We need to preserve qualifiers and propagate them from
2921 operand 0. */
2922 type_quals = TYPE_QUALS (type)
2923 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2924 if (TYPE_QUALS (type) != type_quals)
2925 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2926
2927 /* Set the type of the COMPONENT_REF to the underlying type. */
2928 TREE_TYPE (expr) = type;
2929
2930 #ifdef ENABLE_TYPES_CHECKING
2931 /* It is now a FE error, if the conversion from the canonical
2932 type to the original expression type is not useless. */
2933 gcc_assert (useless_type_conversion_p (old_type, type));
2934 #endif
2935 }
2936 }
2937
2938 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2939 to foo, embed that change in the ADDR_EXPR by converting
2940 T array[U];
2941 (T *)&array
2942 ==>
2943 &array[L]
2944 where L is the lower bound. For simplicity, only do this for constant
2945 lower bound.
2946 The constraint is that the type of &array[L] is trivially convertible
2947 to T *. */
2948
2949 static void
2950 canonicalize_addr_expr (tree *expr_p)
2951 {
2952 tree expr = *expr_p;
2953 tree addr_expr = TREE_OPERAND (expr, 0);
2954 tree datype, ddatype, pddatype;
2955
2956 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2957 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2958 || TREE_CODE (addr_expr) != ADDR_EXPR)
2959 return;
2960
2961 /* The addr_expr type should be a pointer to an array. */
2962 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2963 if (TREE_CODE (datype) != ARRAY_TYPE)
2964 return;
2965
2966 /* The pointer to element type shall be trivially convertible to
2967 the expression pointer type. */
2968 ddatype = TREE_TYPE (datype);
2969 pddatype = build_pointer_type (ddatype);
2970 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2971 pddatype))
2972 return;
2973
2974 /* The lower bound and element sizes must be constant. */
2975 if (!TYPE_SIZE_UNIT (ddatype)
2976 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2977 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2978 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2979 return;
2980
2981 /* All checks succeeded. Build a new node to merge the cast. */
2982 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2983 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2984 NULL_TREE, NULL_TREE);
2985 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2986
2987 /* We can have stripped a required restrict qualifier above. */
2988 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2989 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2990 }
2991
2992 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2993 underneath as appropriate. */
2994
2995 static enum gimplify_status
2996 gimplify_conversion (tree *expr_p)
2997 {
2998 location_t loc = EXPR_LOCATION (*expr_p);
2999 gcc_assert (CONVERT_EXPR_P (*expr_p));
3000
3001 /* Then strip away all but the outermost conversion. */
3002 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3003
3004 /* And remove the outermost conversion if it's useless. */
3005 if (tree_ssa_useless_type_conversion (*expr_p))
3006 *expr_p = TREE_OPERAND (*expr_p, 0);
3007
3008 /* If we still have a conversion at the toplevel,
3009 then canonicalize some constructs. */
3010 if (CONVERT_EXPR_P (*expr_p))
3011 {
3012 tree sub = TREE_OPERAND (*expr_p, 0);
3013
3014 /* If a NOP conversion is changing the type of a COMPONENT_REF
3015 expression, then canonicalize its type now in order to expose more
3016 redundant conversions. */
3017 if (TREE_CODE (sub) == COMPONENT_REF)
3018 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3019
3020 /* If a NOP conversion is changing a pointer to array of foo
3021 to a pointer to foo, embed that change in the ADDR_EXPR. */
3022 else if (TREE_CODE (sub) == ADDR_EXPR)
3023 canonicalize_addr_expr (expr_p);
3024 }
3025
3026 /* If we have a conversion to a non-register type force the
3027 use of a VIEW_CONVERT_EXPR instead. */
3028 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3029 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3030 TREE_OPERAND (*expr_p, 0));
3031
3032 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3033 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3034 TREE_SET_CODE (*expr_p, NOP_EXPR);
3035
3036 return GS_OK;
3037 }
3038
3039 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3040 DECL_VALUE_EXPR, and it's worth re-examining things. */
3041
3042 static enum gimplify_status
3043 gimplify_var_or_parm_decl (tree *expr_p)
3044 {
3045 tree decl = *expr_p;
3046
3047 /* ??? If this is a local variable, and it has not been seen in any
3048 outer BIND_EXPR, then it's probably the result of a duplicate
3049 declaration, for which we've already issued an error. It would
3050 be really nice if the front end wouldn't leak these at all.
3051 Currently the only known culprit is C++ destructors, as seen
3052 in g++.old-deja/g++.jason/binding.C.
3053 Another possible culpit are size expressions for variably modified
3054 types which are lost in the FE or not gimplified correctly. */
3055 if (VAR_P (decl)
3056 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3057 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3058 && decl_function_context (decl) == current_function_decl)
3059 {
3060 gcc_assert (seen_error ());
3061 return GS_ERROR;
3062 }
3063
3064 /* When within an OMP context, notice uses of variables. */
3065 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3066 return GS_ALL_DONE;
3067
3068 /* If the decl is an alias for another expression, substitute it now. */
3069 if (DECL_HAS_VALUE_EXPR_P (decl))
3070 {
3071 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3072 return GS_OK;
3073 }
3074
3075 return GS_ALL_DONE;
3076 }
3077
3078 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3079
3080 static void
3081 recalculate_side_effects (tree t)
3082 {
3083 enum tree_code code = TREE_CODE (t);
3084 int len = TREE_OPERAND_LENGTH (t);
3085 int i;
3086
3087 switch (TREE_CODE_CLASS (code))
3088 {
3089 case tcc_expression:
3090 switch (code)
3091 {
3092 case INIT_EXPR:
3093 case MODIFY_EXPR:
3094 case VA_ARG_EXPR:
3095 case PREDECREMENT_EXPR:
3096 case PREINCREMENT_EXPR:
3097 case POSTDECREMENT_EXPR:
3098 case POSTINCREMENT_EXPR:
3099 /* All of these have side-effects, no matter what their
3100 operands are. */
3101 return;
3102
3103 default:
3104 break;
3105 }
3106 /* Fall through. */
3107
3108 case tcc_comparison: /* a comparison expression */
3109 case tcc_unary: /* a unary arithmetic expression */
3110 case tcc_binary: /* a binary arithmetic expression */
3111 case tcc_reference: /* a reference */
3112 case tcc_vl_exp: /* a function call */
3113 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3114 for (i = 0; i < len; ++i)
3115 {
3116 tree op = TREE_OPERAND (t, i);
3117 if (op && TREE_SIDE_EFFECTS (op))
3118 TREE_SIDE_EFFECTS (t) = 1;
3119 }
3120 break;
3121
3122 case tcc_constant:
3123 /* No side-effects. */
3124 return;
3125
3126 default:
3127 gcc_unreachable ();
3128 }
3129 }
3130
3131 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3132 node *EXPR_P.
3133
3134 compound_lval
3135 : min_lval '[' val ']'
3136 | min_lval '.' ID
3137 | compound_lval '[' val ']'
3138 | compound_lval '.' ID
3139
3140 This is not part of the original SIMPLE definition, which separates
3141 array and member references, but it seems reasonable to handle them
3142 together. Also, this way we don't run into problems with union
3143 aliasing; gcc requires that for accesses through a union to alias, the
3144 union reference must be explicit, which was not always the case when we
3145 were splitting up array and member refs.
3146
3147 PRE_P points to the sequence where side effects that must happen before
3148 *EXPR_P should be stored.
3149
3150 POST_P points to the sequence where side effects that must happen after
3151 *EXPR_P should be stored. */
3152
3153 static enum gimplify_status
3154 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3155 fallback_t fallback)
3156 {
3157 tree *p;
3158 enum gimplify_status ret = GS_ALL_DONE, tret;
3159 int i;
3160 location_t loc = EXPR_LOCATION (*expr_p);
3161 tree expr = *expr_p;
3162
3163 /* Create a stack of the subexpressions so later we can walk them in
3164 order from inner to outer. */
3165 auto_vec<tree, 10> expr_stack;
3166
3167 /* We can handle anything that get_inner_reference can deal with. */
3168 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3169 {
3170 restart:
3171 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3172 if (TREE_CODE (*p) == INDIRECT_REF)
3173 *p = fold_indirect_ref_loc (loc, *p);
3174
3175 if (handled_component_p (*p))
3176 ;
3177 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3178 additional COMPONENT_REFs. */
3179 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3180 && gimplify_var_or_parm_decl (p) == GS_OK)
3181 goto restart;
3182 else
3183 break;
3184
3185 expr_stack.safe_push (*p);
3186 }
3187
3188 gcc_assert (expr_stack.length ());
3189
3190 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3191 walked through and P points to the innermost expression.
3192
3193 Java requires that we elaborated nodes in source order. That
3194 means we must gimplify the inner expression followed by each of
3195 the indices, in order. But we can't gimplify the inner
3196 expression until we deal with any variable bounds, sizes, or
3197 positions in order to deal with PLACEHOLDER_EXPRs.
3198
3199 The base expression may contain a statement expression that
3200 has declarations used in size expressions, so has to be
3201 gimplified before gimplifying the size expressions.
3202
3203 So we do this in three steps. First we deal with variable
3204 bounds, sizes, and positions, then we gimplify the base and
3205 ensure it is memory if needed, then we deal with the annotations
3206 for any variables in the components and any indices, from left
3207 to right. */
3208
3209 bool need_non_reg = false;
3210 for (i = expr_stack.length () - 1; i >= 0; i--)
3211 {
3212 tree t = expr_stack[i];
3213
3214 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3215 {
3216 /* Deal with the low bound and element type size and put them into
3217 the ARRAY_REF. If these values are set, they have already been
3218 gimplified. */
3219 if (TREE_OPERAND (t, 2) == NULL_TREE)
3220 {
3221 tree low = unshare_expr (array_ref_low_bound (t));
3222 if (!is_gimple_min_invariant (low))
3223 {
3224 TREE_OPERAND (t, 2) = low;
3225 }
3226 }
3227
3228 if (TREE_OPERAND (t, 3) == NULL_TREE)
3229 {
3230 tree elmt_size = array_ref_element_size (t);
3231 if (!is_gimple_min_invariant (elmt_size))
3232 {
3233 elmt_size = unshare_expr (elmt_size);
3234 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3235 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3236
3237 /* Divide the element size by the alignment of the element
3238 type (above). */
3239 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3240 elmt_size, factor);
3241
3242 TREE_OPERAND (t, 3) = elmt_size;
3243 }
3244 }
3245 need_non_reg = true;
3246 }
3247 else if (TREE_CODE (t) == COMPONENT_REF)
3248 {
3249 /* Set the field offset into T and gimplify it. */
3250 if (TREE_OPERAND (t, 2) == NULL_TREE)
3251 {
3252 tree offset = component_ref_field_offset (t);
3253 if (!is_gimple_min_invariant (offset))
3254 {
3255 offset = unshare_expr (offset);
3256 tree field = TREE_OPERAND (t, 1);
3257 tree factor
3258 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3259
3260 /* Divide the offset by its alignment. */
3261 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3262 offset, factor);
3263
3264 TREE_OPERAND (t, 2) = offset;
3265 }
3266 }
3267 need_non_reg = true;
3268 }
3269 }
3270
3271 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3272 so as to match the min_lval predicate. Failure to do so may result
3273 in the creation of large aggregate temporaries. */
3274 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3275 fallback | fb_lvalue);
3276 ret = MIN (ret, tret);
3277
3278 /* Step 2a: if we have component references we do not support on
3279 registers then make sure the base isn't a register. Of course
3280 we can only do so if an rvalue is OK. */
3281 if (need_non_reg && (fallback & fb_rvalue))
3282 prepare_gimple_addressable (p, pre_p);
3283
3284 /* Step 3: gimplify size expressions and the indices and operands of
3285 ARRAY_REF. During this loop we also remove any useless conversions. */
3286
3287 for (; expr_stack.length () > 0; )
3288 {
3289 tree t = expr_stack.pop ();
3290
3291 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3292 {
3293 /* Gimplify the low bound and element type size. */
3294 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3295 is_gimple_reg, fb_rvalue);
3296 ret = MIN (ret, tret);
3297
3298 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3299 is_gimple_reg, fb_rvalue);
3300 ret = MIN (ret, tret);
3301
3302 /* Gimplify the dimension. */
3303 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3304 is_gimple_val, fb_rvalue);
3305 ret = MIN (ret, tret);
3306 }
3307 else if (TREE_CODE (t) == COMPONENT_REF)
3308 {
3309 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3310 is_gimple_reg, fb_rvalue);
3311 ret = MIN (ret, tret);
3312 }
3313
3314 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3315
3316 /* The innermost expression P may have originally had
3317 TREE_SIDE_EFFECTS set which would have caused all the outer
3318 expressions in *EXPR_P leading to P to also have had
3319 TREE_SIDE_EFFECTS set. */
3320 recalculate_side_effects (t);
3321 }
3322
3323 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3324 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3325 {
3326 canonicalize_component_ref (expr_p);
3327 }
3328
3329 expr_stack.release ();
3330
3331 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3332
3333 return ret;
3334 }
3335
3336 /* Gimplify the self modifying expression pointed to by EXPR_P
3337 (++, --, +=, -=).
3338
3339 PRE_P points to the list where side effects that must happen before
3340 *EXPR_P should be stored.
3341
3342 POST_P points to the list where side effects that must happen after
3343 *EXPR_P should be stored.
3344
3345 WANT_VALUE is nonzero iff we want to use the value of this expression
3346 in another expression.
3347
3348 ARITH_TYPE is the type the computation should be performed in. */
3349
3350 enum gimplify_status
3351 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3352 bool want_value, tree arith_type)
3353 {
3354 enum tree_code code;
3355 tree lhs, lvalue, rhs, t1;
3356 gimple_seq post = NULL, *orig_post_p = post_p;
3357 bool postfix;
3358 enum tree_code arith_code;
3359 enum gimplify_status ret;
3360 location_t loc = EXPR_LOCATION (*expr_p);
3361
3362 code = TREE_CODE (*expr_p);
3363
3364 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3365 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3366
3367 /* Prefix or postfix? */
3368 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3369 /* Faster to treat as prefix if result is not used. */
3370 postfix = want_value;
3371 else
3372 postfix = false;
3373
3374 /* For postfix, make sure the inner expression's post side effects
3375 are executed after side effects from this expression. */
3376 if (postfix)
3377 post_p = &post;
3378
3379 /* Add or subtract? */
3380 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3381 arith_code = PLUS_EXPR;
3382 else
3383 arith_code = MINUS_EXPR;
3384
3385 /* Gimplify the LHS into a GIMPLE lvalue. */
3386 lvalue = TREE_OPERAND (*expr_p, 0);
3387 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3388 if (ret == GS_ERROR)
3389 return ret;
3390
3391 /* Extract the operands to the arithmetic operation. */
3392 lhs = lvalue;
3393 rhs = TREE_OPERAND (*expr_p, 1);
3394
3395 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3396 that as the result value and in the postqueue operation. */
3397 if (postfix)
3398 {
3399 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3400 if (ret == GS_ERROR)
3401 return ret;
3402
3403 lhs = get_initialized_tmp_var (lhs, pre_p);
3404 }
3405
3406 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3407 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3408 {
3409 rhs = convert_to_ptrofftype_loc (loc, rhs);
3410 if (arith_code == MINUS_EXPR)
3411 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3412 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3413 }
3414 else
3415 t1 = fold_convert (TREE_TYPE (*expr_p),
3416 fold_build2 (arith_code, arith_type,
3417 fold_convert (arith_type, lhs),
3418 fold_convert (arith_type, rhs)));
3419
3420 if (postfix)
3421 {
3422 gimplify_assign (lvalue, t1, pre_p);
3423 gimplify_seq_add_seq (orig_post_p, post);
3424 *expr_p = lhs;
3425 return GS_ALL_DONE;
3426 }
3427 else
3428 {
3429 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3430 return GS_OK;
3431 }
3432 }
3433
3434 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3435
3436 static void
3437 maybe_with_size_expr (tree *expr_p)
3438 {
3439 tree expr = *expr_p;
3440 tree type = TREE_TYPE (expr);
3441 tree size;
3442
3443 /* If we've already wrapped this or the type is error_mark_node, we can't do
3444 anything. */
3445 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3446 || type == error_mark_node)
3447 return;
3448
3449 /* If the size isn't known or is a constant, we have nothing to do. */
3450 size = TYPE_SIZE_UNIT (type);
3451 if (!size || poly_int_tree_p (size))
3452 return;
3453
3454 /* Otherwise, make a WITH_SIZE_EXPR. */
3455 size = unshare_expr (size);
3456 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3457 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3458 }
3459
3460 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3461 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3462 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3463 gimplified to an SSA name. */
3464
3465 enum gimplify_status
3466 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3467 bool allow_ssa)
3468 {
3469 bool (*test) (tree);
3470 fallback_t fb;
3471
3472 /* In general, we allow lvalues for function arguments to avoid
3473 extra overhead of copying large aggregates out of even larger
3474 aggregates into temporaries only to copy the temporaries to
3475 the argument list. Make optimizers happy by pulling out to
3476 temporaries those types that fit in registers. */
3477 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3478 test = is_gimple_val, fb = fb_rvalue;
3479 else
3480 {
3481 test = is_gimple_lvalue, fb = fb_either;
3482 /* Also strip a TARGET_EXPR that would force an extra copy. */
3483 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3484 {
3485 tree init = TARGET_EXPR_INITIAL (*arg_p);
3486 if (init
3487 && !VOID_TYPE_P (TREE_TYPE (init)))
3488 *arg_p = init;
3489 }
3490 }
3491
3492 /* If this is a variable sized type, we must remember the size. */
3493 maybe_with_size_expr (arg_p);
3494
3495 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3496 /* Make sure arguments have the same location as the function call
3497 itself. */
3498 protected_set_expr_location (*arg_p, call_location);
3499
3500 /* There is a sequence point before a function call. Side effects in
3501 the argument list must occur before the actual call. So, when
3502 gimplifying arguments, force gimplify_expr to use an internal
3503 post queue which is then appended to the end of PRE_P. */
3504 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3505 }
3506
3507 /* Don't fold inside offloading or taskreg regions: it can break code by
3508 adding decl references that weren't in the source. We'll do it during
3509 omplower pass instead. */
3510
3511 static bool
3512 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3513 {
3514 struct gimplify_omp_ctx *ctx;
3515 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3516 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3517 return false;
3518 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3519 return false;
3520 /* Delay folding of builtins until the IL is in consistent state
3521 so the diagnostic machinery can do a better job. */
3522 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3523 return false;
3524 return fold_stmt (gsi);
3525 }
3526
3527 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3528 WANT_VALUE is true if the result of the call is desired. */
3529
3530 static enum gimplify_status
3531 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3532 {
3533 tree fndecl, parms, p, fnptrtype;
3534 enum gimplify_status ret;
3535 int i, nargs;
3536 gcall *call;
3537 bool builtin_va_start_p = false;
3538 location_t loc = EXPR_LOCATION (*expr_p);
3539
3540 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3541
3542 /* For reliable diagnostics during inlining, it is necessary that
3543 every call_expr be annotated with file and line. */
3544 if (! EXPR_HAS_LOCATION (*expr_p))
3545 SET_EXPR_LOCATION (*expr_p, input_location);
3546
3547 /* Gimplify internal functions created in the FEs. */
3548 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3549 {
3550 if (want_value)
3551 return GS_ALL_DONE;
3552
3553 nargs = call_expr_nargs (*expr_p);
3554 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3555 auto_vec<tree> vargs (nargs);
3556
3557 if (ifn == IFN_ASSUME)
3558 {
3559 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3560 {
3561 /* If the [[assume (cond)]]; condition is simple
3562 enough and can be evaluated unconditionally
3563 without side-effects, expand it as
3564 if (!cond) __builtin_unreachable (); */
3565 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
3566 *expr_p = build3 (COND_EXPR, void_type_node,
3567 CALL_EXPR_ARG (*expr_p, 0), void_node,
3568 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3569 fndecl, 0));
3570 return GS_OK;
3571 }
3572 /* If not optimizing, ignore the assumptions. */
3573 if (!optimize)
3574 {
3575 *expr_p = NULL_TREE;
3576 return GS_ALL_DONE;
3577 }
3578 /* Temporarily, until gimple lowering, transform
3579 .ASSUME (cond);
3580 into:
3581 [[assume (guard)]]
3582 {
3583 guard = cond;
3584 }
3585 such that gimple lowering can outline the condition into
3586 a separate function easily. */
3587 tree guard = create_tmp_var (boolean_type_node);
3588 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3589 CALL_EXPR_ARG (*expr_p, 0));
3590 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3591 push_gimplify_context ();
3592 gimple_seq body = NULL;
3593 gimple *g = gimplify_and_return_first (*expr_p, &body);
3594 pop_gimplify_context (g);
3595 g = gimple_build_assume (guard, body);
3596 gimple_set_location (g, loc);
3597 gimplify_seq_add_stmt (pre_p, g);
3598 *expr_p = NULL_TREE;
3599 return GS_ALL_DONE;
3600 }
3601
3602 for (i = 0; i < nargs; i++)
3603 {
3604 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3605 EXPR_LOCATION (*expr_p));
3606 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3607 }
3608
3609 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3610 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3611 gimplify_seq_add_stmt (pre_p, call);
3612 return GS_ALL_DONE;
3613 }
3614
3615 /* This may be a call to a builtin function.
3616
3617 Builtin function calls may be transformed into different
3618 (and more efficient) builtin function calls under certain
3619 circumstances. Unfortunately, gimplification can muck things
3620 up enough that the builtin expanders are not aware that certain
3621 transformations are still valid.
3622
3623 So we attempt transformation/gimplification of the call before
3624 we gimplify the CALL_EXPR. At this time we do not manage to
3625 transform all calls in the same manner as the expanders do, but
3626 we do transform most of them. */
3627 fndecl = get_callee_fndecl (*expr_p);
3628 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3629 switch (DECL_FUNCTION_CODE (fndecl))
3630 {
3631 CASE_BUILT_IN_ALLOCA:
3632 /* If the call has been built for a variable-sized object, then we
3633 want to restore the stack level when the enclosing BIND_EXPR is
3634 exited to reclaim the allocated space; otherwise, we precisely
3635 need to do the opposite and preserve the latest stack level. */
3636 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3637 gimplify_ctxp->save_stack = true;
3638 else
3639 gimplify_ctxp->keep_stack = true;
3640 break;
3641
3642 case BUILT_IN_VA_START:
3643 {
3644 builtin_va_start_p = TRUE;
3645 if (call_expr_nargs (*expr_p) < 2)
3646 {
3647 error ("too few arguments to function %<va_start%>");
3648 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3649 return GS_OK;
3650 }
3651
3652 if (fold_builtin_next_arg (*expr_p, true))
3653 {
3654 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3655 return GS_OK;
3656 }
3657 break;
3658 }
3659
3660 case BUILT_IN_EH_RETURN:
3661 cfun->calls_eh_return = true;
3662 break;
3663
3664 case BUILT_IN_CLEAR_PADDING:
3665 if (call_expr_nargs (*expr_p) == 1)
3666 {
3667 /* Remember the original type of the argument in an internal
3668 dummy second argument, as in GIMPLE pointer conversions are
3669 useless. Also mark this call as not for automatic
3670 initialization in the internal dummy third argument. */
3671 p = CALL_EXPR_ARG (*expr_p, 0);
3672 *expr_p
3673 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3674 build_zero_cst (TREE_TYPE (p)));
3675 return GS_OK;
3676 }
3677 break;
3678
3679 default:
3680 ;
3681 }
3682 if (fndecl && fndecl_built_in_p (fndecl))
3683 {
3684 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3685 if (new_tree && new_tree != *expr_p)
3686 {
3687 /* There was a transformation of this call which computes the
3688 same value, but in a more efficient way. Return and try
3689 again. */
3690 *expr_p = new_tree;
3691 return GS_OK;
3692 }
3693 }
3694
3695 /* Remember the original function pointer type. */
3696 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3697
3698 if (flag_openmp
3699 && fndecl
3700 && cfun
3701 && (cfun->curr_properties & PROP_gimple_any) == 0)
3702 {
3703 tree variant = omp_resolve_declare_variant (fndecl);
3704 if (variant != fndecl)
3705 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3706 }
3707
3708 /* There is a sequence point before the call, so any side effects in
3709 the calling expression must occur before the actual call. Force
3710 gimplify_expr to use an internal post queue. */
3711 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3712 is_gimple_call_addr, fb_rvalue);
3713
3714 nargs = call_expr_nargs (*expr_p);
3715
3716 /* Get argument types for verification. */
3717 fndecl = get_callee_fndecl (*expr_p);
3718 parms = NULL_TREE;
3719 if (fndecl)
3720 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3721 else
3722 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3723
3724 if (fndecl && DECL_ARGUMENTS (fndecl))
3725 p = DECL_ARGUMENTS (fndecl);
3726 else if (parms)
3727 p = parms;
3728 else
3729 p = NULL_TREE;
3730 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3731 ;
3732
3733 /* If the last argument is __builtin_va_arg_pack () and it is not
3734 passed as a named argument, decrease the number of CALL_EXPR
3735 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3736 if (!p
3737 && i < nargs
3738 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3739 {
3740 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3741 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3742
3743 if (last_arg_fndecl
3744 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3745 {
3746 tree call = *expr_p;
3747
3748 --nargs;
3749 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3750 CALL_EXPR_FN (call),
3751 nargs, CALL_EXPR_ARGP (call));
3752
3753 /* Copy all CALL_EXPR flags, location and block, except
3754 CALL_EXPR_VA_ARG_PACK flag. */
3755 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3756 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3757 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3758 = CALL_EXPR_RETURN_SLOT_OPT (call);
3759 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3760 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3761
3762 /* Set CALL_EXPR_VA_ARG_PACK. */
3763 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3764 }
3765 }
3766
3767 /* If the call returns twice then after building the CFG the call
3768 argument computations will no longer dominate the call because
3769 we add an abnormal incoming edge to the call. So do not use SSA
3770 vars there. */
3771 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3772
3773 /* Gimplify the function arguments. */
3774 if (nargs > 0)
3775 {
3776 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3777 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3778 PUSH_ARGS_REVERSED ? i-- : i++)
3779 {
3780 enum gimplify_status t;
3781
3782 /* Avoid gimplifying the second argument to va_start, which needs to
3783 be the plain PARM_DECL. */
3784 if ((i != 1) || !builtin_va_start_p)
3785 {
3786 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3787 EXPR_LOCATION (*expr_p), ! returns_twice);
3788
3789 if (t == GS_ERROR)
3790 ret = GS_ERROR;
3791 }
3792 }
3793 }
3794
3795 /* Gimplify the static chain. */
3796 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3797 {
3798 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3799 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3800 else
3801 {
3802 enum gimplify_status t;
3803 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3804 EXPR_LOCATION (*expr_p), ! returns_twice);
3805 if (t == GS_ERROR)
3806 ret = GS_ERROR;
3807 }
3808 }
3809
3810 /* Verify the function result. */
3811 if (want_value && fndecl
3812 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3813 {
3814 error_at (loc, "using result of function returning %<void%>");
3815 ret = GS_ERROR;
3816 }
3817
3818 /* Try this again in case gimplification exposed something. */
3819 if (ret != GS_ERROR)
3820 {
3821 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3822
3823 if (new_tree && new_tree != *expr_p)
3824 {
3825 /* There was a transformation of this call which computes the
3826 same value, but in a more efficient way. Return and try
3827 again. */
3828 *expr_p = new_tree;
3829 return GS_OK;
3830 }
3831 }
3832 else
3833 {
3834 *expr_p = error_mark_node;
3835 return GS_ERROR;
3836 }
3837
3838 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3839 decl. This allows us to eliminate redundant or useless
3840 calls to "const" functions. */
3841 if (TREE_CODE (*expr_p) == CALL_EXPR)
3842 {
3843 int flags = call_expr_flags (*expr_p);
3844 if (flags & (ECF_CONST | ECF_PURE)
3845 /* An infinite loop is considered a side effect. */
3846 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3847 TREE_SIDE_EFFECTS (*expr_p) = 0;
3848 }
3849
3850 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3851 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3852 form and delegate the creation of a GIMPLE_CALL to
3853 gimplify_modify_expr. This is always possible because when
3854 WANT_VALUE is true, the caller wants the result of this call into
3855 a temporary, which means that we will emit an INIT_EXPR in
3856 internal_get_tmp_var which will then be handled by
3857 gimplify_modify_expr. */
3858 if (!want_value)
3859 {
3860 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3861 have to do is replicate it as a GIMPLE_CALL tuple. */
3862 gimple_stmt_iterator gsi;
3863 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3864 notice_special_calls (call);
3865 gimplify_seq_add_stmt (pre_p, call);
3866 gsi = gsi_last (*pre_p);
3867 maybe_fold_stmt (&gsi);
3868 *expr_p = NULL_TREE;
3869 }
3870 else
3871 /* Remember the original function type. */
3872 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3873 CALL_EXPR_FN (*expr_p));
3874
3875 return ret;
3876 }
3877
3878 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3879 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3880
3881 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3882 condition is true or false, respectively. If null, we should generate
3883 our own to skip over the evaluation of this specific expression.
3884
3885 LOCUS is the source location of the COND_EXPR.
3886
3887 This function is the tree equivalent of do_jump.
3888
3889 shortcut_cond_r should only be called by shortcut_cond_expr. */
3890
3891 static tree
3892 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3893 location_t locus)
3894 {
3895 tree local_label = NULL_TREE;
3896 tree t, expr = NULL;
3897
3898 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3899 retain the shortcut semantics. Just insert the gotos here;
3900 shortcut_cond_expr will append the real blocks later. */
3901 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3902 {
3903 location_t new_locus;
3904
3905 /* Turn if (a && b) into
3906
3907 if (a); else goto no;
3908 if (b) goto yes; else goto no;
3909 (no:) */
3910
3911 if (false_label_p == NULL)
3912 false_label_p = &local_label;
3913
3914 /* Keep the original source location on the first 'if'. */
3915 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3916 append_to_statement_list (t, &expr);
3917
3918 /* Set the source location of the && on the second 'if'. */
3919 new_locus = rexpr_location (pred, locus);
3920 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3921 new_locus);
3922 append_to_statement_list (t, &expr);
3923 }
3924 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3925 {
3926 location_t new_locus;
3927
3928 /* Turn if (a || b) into
3929
3930 if (a) goto yes;
3931 if (b) goto yes; else goto no;
3932 (yes:) */
3933
3934 if (true_label_p == NULL)
3935 true_label_p = &local_label;
3936
3937 /* Keep the original source location on the first 'if'. */
3938 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3939 append_to_statement_list (t, &expr);
3940
3941 /* Set the source location of the || on the second 'if'. */
3942 new_locus = rexpr_location (pred, locus);
3943 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3944 new_locus);
3945 append_to_statement_list (t, &expr);
3946 }
3947 else if (TREE_CODE (pred) == COND_EXPR
3948 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3949 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3950 {
3951 location_t new_locus;
3952
3953 /* As long as we're messing with gotos, turn if (a ? b : c) into
3954 if (a)
3955 if (b) goto yes; else goto no;
3956 else
3957 if (c) goto yes; else goto no;
3958
3959 Don't do this if one of the arms has void type, which can happen
3960 in C++ when the arm is throw. */
3961
3962 /* Keep the original source location on the first 'if'. Set the source
3963 location of the ? on the second 'if'. */
3964 new_locus = rexpr_location (pred, locus);
3965 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3966 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3967 false_label_p, locus),
3968 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3969 false_label_p, new_locus));
3970 }
3971 else
3972 {
3973 expr = build3 (COND_EXPR, void_type_node, pred,
3974 build_and_jump (true_label_p),
3975 build_and_jump (false_label_p));
3976 SET_EXPR_LOCATION (expr, locus);
3977 }
3978
3979 if (local_label)
3980 {
3981 t = build1 (LABEL_EXPR, void_type_node, local_label);
3982 append_to_statement_list (t, &expr);
3983 }
3984
3985 return expr;
3986 }
3987
3988 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3989 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3990 statement, if it is the last one. Otherwise, return NULL. */
3991
3992 static tree
3993 find_goto (tree expr)
3994 {
3995 if (!expr)
3996 return NULL_TREE;
3997
3998 if (TREE_CODE (expr) == GOTO_EXPR)
3999 return expr;
4000
4001 if (TREE_CODE (expr) != STATEMENT_LIST)
4002 return NULL_TREE;
4003
4004 tree_stmt_iterator i = tsi_start (expr);
4005
4006 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4007 tsi_next (&i);
4008
4009 if (!tsi_one_before_end_p (i))
4010 return NULL_TREE;
4011
4012 return find_goto (tsi_stmt (i));
4013 }
4014
4015 /* Same as find_goto, except that it returns NULL if the destination
4016 is not a LABEL_DECL. */
4017
4018 static inline tree
4019 find_goto_label (tree expr)
4020 {
4021 tree dest = find_goto (expr);
4022 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4023 return dest;
4024 return NULL_TREE;
4025 }
4026
4027 /* Given a conditional expression EXPR with short-circuit boolean
4028 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4029 predicate apart into the equivalent sequence of conditionals. */
4030
4031 static tree
4032 shortcut_cond_expr (tree expr)
4033 {
4034 tree pred = TREE_OPERAND (expr, 0);
4035 tree then_ = TREE_OPERAND (expr, 1);
4036 tree else_ = TREE_OPERAND (expr, 2);
4037 tree true_label, false_label, end_label, t;
4038 tree *true_label_p;
4039 tree *false_label_p;
4040 bool emit_end, emit_false, jump_over_else;
4041 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4042 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4043
4044 /* First do simple transformations. */
4045 if (!else_se)
4046 {
4047 /* If there is no 'else', turn
4048 if (a && b) then c
4049 into
4050 if (a) if (b) then c. */
4051 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4052 {
4053 /* Keep the original source location on the first 'if'. */
4054 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4055 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4056 /* Set the source location of the && on the second 'if'. */
4057 if (rexpr_has_location (pred))
4058 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4059 then_ = shortcut_cond_expr (expr);
4060 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4061 pred = TREE_OPERAND (pred, 0);
4062 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4063 SET_EXPR_LOCATION (expr, locus);
4064 }
4065 }
4066
4067 if (!then_se)
4068 {
4069 /* If there is no 'then', turn
4070 if (a || b); else d
4071 into
4072 if (a); else if (b); else d. */
4073 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4074 {
4075 /* Keep the original source location on the first 'if'. */
4076 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4077 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4078 /* Set the source location of the || on the second 'if'. */
4079 if (rexpr_has_location (pred))
4080 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4081 else_ = shortcut_cond_expr (expr);
4082 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4083 pred = TREE_OPERAND (pred, 0);
4084 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4085 SET_EXPR_LOCATION (expr, locus);
4086 }
4087 }
4088
4089 /* If we're done, great. */
4090 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4091 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4092 return expr;
4093
4094 /* Otherwise we need to mess with gotos. Change
4095 if (a) c; else d;
4096 to
4097 if (a); else goto no;
4098 c; goto end;
4099 no: d; end:
4100 and recursively gimplify the condition. */
4101
4102 true_label = false_label = end_label = NULL_TREE;
4103
4104 /* If our arms just jump somewhere, hijack those labels so we don't
4105 generate jumps to jumps. */
4106
4107 if (tree then_goto = find_goto_label (then_))
4108 {
4109 true_label = GOTO_DESTINATION (then_goto);
4110 then_ = NULL;
4111 then_se = false;
4112 }
4113
4114 if (tree else_goto = find_goto_label (else_))
4115 {
4116 false_label = GOTO_DESTINATION (else_goto);
4117 else_ = NULL;
4118 else_se = false;
4119 }
4120
4121 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4122 if (true_label)
4123 true_label_p = &true_label;
4124 else
4125 true_label_p = NULL;
4126
4127 /* The 'else' branch also needs a label if it contains interesting code. */
4128 if (false_label || else_se)
4129 false_label_p = &false_label;
4130 else
4131 false_label_p = NULL;
4132
4133 /* If there was nothing else in our arms, just forward the label(s). */
4134 if (!then_se && !else_se)
4135 return shortcut_cond_r (pred, true_label_p, false_label_p,
4136 EXPR_LOC_OR_LOC (expr, input_location));
4137
4138 /* If our last subexpression already has a terminal label, reuse it. */
4139 if (else_se)
4140 t = expr_last (else_);
4141 else if (then_se)
4142 t = expr_last (then_);
4143 else
4144 t = NULL;
4145 if (t && TREE_CODE (t) == LABEL_EXPR)
4146 end_label = LABEL_EXPR_LABEL (t);
4147
4148 /* If we don't care about jumping to the 'else' branch, jump to the end
4149 if the condition is false. */
4150 if (!false_label_p)
4151 false_label_p = &end_label;
4152
4153 /* We only want to emit these labels if we aren't hijacking them. */
4154 emit_end = (end_label == NULL_TREE);
4155 emit_false = (false_label == NULL_TREE);
4156
4157 /* We only emit the jump over the else clause if we have to--if the
4158 then clause may fall through. Otherwise we can wind up with a
4159 useless jump and a useless label at the end of gimplified code,
4160 which will cause us to think that this conditional as a whole
4161 falls through even if it doesn't. If we then inline a function
4162 which ends with such a condition, that can cause us to issue an
4163 inappropriate warning about control reaching the end of a
4164 non-void function. */
4165 jump_over_else = block_may_fallthru (then_);
4166
4167 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4168 EXPR_LOC_OR_LOC (expr, input_location));
4169
4170 expr = NULL;
4171 append_to_statement_list (pred, &expr);
4172
4173 append_to_statement_list (then_, &expr);
4174 if (else_se)
4175 {
4176 if (jump_over_else)
4177 {
4178 tree last = expr_last (expr);
4179 t = build_and_jump (&end_label);
4180 if (rexpr_has_location (last))
4181 SET_EXPR_LOCATION (t, rexpr_location (last));
4182 append_to_statement_list (t, &expr);
4183 }
4184 if (emit_false)
4185 {
4186 t = build1 (LABEL_EXPR, void_type_node, false_label);
4187 append_to_statement_list (t, &expr);
4188 }
4189 append_to_statement_list (else_, &expr);
4190 }
4191 if (emit_end && end_label)
4192 {
4193 t = build1 (LABEL_EXPR, void_type_node, end_label);
4194 append_to_statement_list (t, &expr);
4195 }
4196
4197 return expr;
4198 }
4199
4200 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4201
4202 tree
4203 gimple_boolify (tree expr)
4204 {
4205 tree type = TREE_TYPE (expr);
4206 location_t loc = EXPR_LOCATION (expr);
4207
4208 if (TREE_CODE (expr) == NE_EXPR
4209 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4210 && integer_zerop (TREE_OPERAND (expr, 1)))
4211 {
4212 tree call = TREE_OPERAND (expr, 0);
4213 tree fn = get_callee_fndecl (call);
4214
4215 /* For __builtin_expect ((long) (x), y) recurse into x as well
4216 if x is truth_value_p. */
4217 if (fn
4218 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4219 && call_expr_nargs (call) == 2)
4220 {
4221 tree arg = CALL_EXPR_ARG (call, 0);
4222 if (arg)
4223 {
4224 if (TREE_CODE (arg) == NOP_EXPR
4225 && TREE_TYPE (arg) == TREE_TYPE (call))
4226 arg = TREE_OPERAND (arg, 0);
4227 if (truth_value_p (TREE_CODE (arg)))
4228 {
4229 arg = gimple_boolify (arg);
4230 CALL_EXPR_ARG (call, 0)
4231 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4232 }
4233 }
4234 }
4235 }
4236
4237 switch (TREE_CODE (expr))
4238 {
4239 case TRUTH_AND_EXPR:
4240 case TRUTH_OR_EXPR:
4241 case TRUTH_XOR_EXPR:
4242 case TRUTH_ANDIF_EXPR:
4243 case TRUTH_ORIF_EXPR:
4244 /* Also boolify the arguments of truth exprs. */
4245 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4246 /* FALLTHRU */
4247
4248 case TRUTH_NOT_EXPR:
4249 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4250
4251 /* These expressions always produce boolean results. */
4252 if (TREE_CODE (type) != BOOLEAN_TYPE)
4253 TREE_TYPE (expr) = boolean_type_node;
4254 return expr;
4255
4256 case ANNOTATE_EXPR:
4257 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4258 {
4259 case annot_expr_ivdep_kind:
4260 case annot_expr_unroll_kind:
4261 case annot_expr_no_vector_kind:
4262 case annot_expr_vector_kind:
4263 case annot_expr_parallel_kind:
4264 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4265 if (TREE_CODE (type) != BOOLEAN_TYPE)
4266 TREE_TYPE (expr) = boolean_type_node;
4267 return expr;
4268 default:
4269 gcc_unreachable ();
4270 }
4271
4272 default:
4273 if (COMPARISON_CLASS_P (expr))
4274 {
4275 /* There expressions always prduce boolean results. */
4276 if (TREE_CODE (type) != BOOLEAN_TYPE)
4277 TREE_TYPE (expr) = boolean_type_node;
4278 return expr;
4279 }
4280 /* Other expressions that get here must have boolean values, but
4281 might need to be converted to the appropriate mode. */
4282 if (TREE_CODE (type) == BOOLEAN_TYPE)
4283 return expr;
4284 return fold_convert_loc (loc, boolean_type_node, expr);
4285 }
4286 }
4287
4288 /* Given a conditional expression *EXPR_P without side effects, gimplify
4289 its operands. New statements are inserted to PRE_P. */
4290
4291 static enum gimplify_status
4292 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4293 {
4294 tree expr = *expr_p, cond;
4295 enum gimplify_status ret, tret;
4296 enum tree_code code;
4297
4298 cond = gimple_boolify (COND_EXPR_COND (expr));
4299
4300 /* We need to handle && and || specially, as their gimplification
4301 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4302 code = TREE_CODE (cond);
4303 if (code == TRUTH_ANDIF_EXPR)
4304 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4305 else if (code == TRUTH_ORIF_EXPR)
4306 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4307 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4308 COND_EXPR_COND (*expr_p) = cond;
4309
4310 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4311 is_gimple_val, fb_rvalue);
4312 ret = MIN (ret, tret);
4313 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4314 is_gimple_val, fb_rvalue);
4315
4316 return MIN (ret, tret);
4317 }
4318
4319 /* Return true if evaluating EXPR could trap.
4320 EXPR is GENERIC, while tree_could_trap_p can be called
4321 only on GIMPLE. */
4322
4323 bool
4324 generic_expr_could_trap_p (tree expr)
4325 {
4326 unsigned i, n;
4327
4328 if (!expr || is_gimple_val (expr))
4329 return false;
4330
4331 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4332 return true;
4333
4334 n = TREE_OPERAND_LENGTH (expr);
4335 for (i = 0; i < n; i++)
4336 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4337 return true;
4338
4339 return false;
4340 }
4341
4342 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4343 into
4344
4345 if (p) if (p)
4346 t1 = a; a;
4347 else or else
4348 t1 = b; b;
4349 t1;
4350
4351 The second form is used when *EXPR_P is of type void.
4352
4353 PRE_P points to the list where side effects that must happen before
4354 *EXPR_P should be stored. */
4355
4356 static enum gimplify_status
4357 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4358 {
4359 tree expr = *expr_p;
4360 tree type = TREE_TYPE (expr);
4361 location_t loc = EXPR_LOCATION (expr);
4362 tree tmp, arm1, arm2;
4363 enum gimplify_status ret;
4364 tree label_true, label_false, label_cont;
4365 bool have_then_clause_p, have_else_clause_p;
4366 gcond *cond_stmt;
4367 enum tree_code pred_code;
4368 gimple_seq seq = NULL;
4369
4370 /* If this COND_EXPR has a value, copy the values into a temporary within
4371 the arms. */
4372 if (!VOID_TYPE_P (type))
4373 {
4374 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4375 tree result;
4376
4377 /* If either an rvalue is ok or we do not require an lvalue, create the
4378 temporary. But we cannot do that if the type is addressable. */
4379 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4380 && !TREE_ADDRESSABLE (type))
4381 {
4382 if (gimplify_ctxp->allow_rhs_cond_expr
4383 /* If either branch has side effects or could trap, it can't be
4384 evaluated unconditionally. */
4385 && !TREE_SIDE_EFFECTS (then_)
4386 && !generic_expr_could_trap_p (then_)
4387 && !TREE_SIDE_EFFECTS (else_)
4388 && !generic_expr_could_trap_p (else_))
4389 return gimplify_pure_cond_expr (expr_p, pre_p);
4390
4391 tmp = create_tmp_var (type, "iftmp");
4392 result = tmp;
4393 }
4394
4395 /* Otherwise, only create and copy references to the values. */
4396 else
4397 {
4398 type = build_pointer_type (type);
4399
4400 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4401 then_ = build_fold_addr_expr_loc (loc, then_);
4402
4403 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4404 else_ = build_fold_addr_expr_loc (loc, else_);
4405
4406 expr
4407 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4408
4409 tmp = create_tmp_var (type, "iftmp");
4410 result = build_simple_mem_ref_loc (loc, tmp);
4411 }
4412
4413 /* Build the new then clause, `tmp = then_;'. But don't build the
4414 assignment if the value is void; in C++ it can be if it's a throw. */
4415 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4416 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4417
4418 /* Similarly, build the new else clause, `tmp = else_;'. */
4419 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4420 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4421
4422 TREE_TYPE (expr) = void_type_node;
4423 recalculate_side_effects (expr);
4424
4425 /* Move the COND_EXPR to the prequeue. */
4426 gimplify_stmt (&expr, pre_p);
4427
4428 *expr_p = result;
4429 return GS_ALL_DONE;
4430 }
4431
4432 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4433 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4434 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4435 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4436
4437 /* Make sure the condition has BOOLEAN_TYPE. */
4438 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4439
4440 /* Break apart && and || conditions. */
4441 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4442 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4443 {
4444 expr = shortcut_cond_expr (expr);
4445
4446 if (expr != *expr_p)
4447 {
4448 *expr_p = expr;
4449
4450 /* We can't rely on gimplify_expr to re-gimplify the expanded
4451 form properly, as cleanups might cause the target labels to be
4452 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4453 set up a conditional context. */
4454 gimple_push_condition ();
4455 gimplify_stmt (expr_p, &seq);
4456 gimple_pop_condition (pre_p);
4457 gimple_seq_add_seq (pre_p, seq);
4458
4459 return GS_ALL_DONE;
4460 }
4461 }
4462
4463 /* Now do the normal gimplification. */
4464
4465 /* Gimplify condition. */
4466 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4467 is_gimple_condexpr_for_cond, fb_rvalue);
4468 if (ret == GS_ERROR)
4469 return GS_ERROR;
4470 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4471
4472 gimple_push_condition ();
4473
4474 have_then_clause_p = have_else_clause_p = false;
4475 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4476 if (label_true
4477 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4478 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4479 have different locations, otherwise we end up with incorrect
4480 location information on the branches. */
4481 && (optimize
4482 || !EXPR_HAS_LOCATION (expr)
4483 || !rexpr_has_location (label_true)
4484 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4485 {
4486 have_then_clause_p = true;
4487 label_true = GOTO_DESTINATION (label_true);
4488 }
4489 else
4490 label_true = create_artificial_label (UNKNOWN_LOCATION);
4491 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4492 if (label_false
4493 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4494 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4495 have different locations, otherwise we end up with incorrect
4496 location information on the branches. */
4497 && (optimize
4498 || !EXPR_HAS_LOCATION (expr)
4499 || !rexpr_has_location (label_false)
4500 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4501 {
4502 have_else_clause_p = true;
4503 label_false = GOTO_DESTINATION (label_false);
4504 }
4505 else
4506 label_false = create_artificial_label (UNKNOWN_LOCATION);
4507
4508 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4509 &arm2);
4510 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4511 label_false);
4512 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4513 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4514 gimplify_seq_add_stmt (&seq, cond_stmt);
4515 gimple_stmt_iterator gsi = gsi_last (seq);
4516 maybe_fold_stmt (&gsi);
4517
4518 label_cont = NULL_TREE;
4519 if (!have_then_clause_p)
4520 {
4521 /* For if (...) {} else { code; } put label_true after
4522 the else block. */
4523 if (TREE_OPERAND (expr, 1) == NULL_TREE
4524 && !have_else_clause_p
4525 && TREE_OPERAND (expr, 2) != NULL_TREE)
4526 {
4527 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4528 handling that label_cont == label_true can be only reached
4529 through fallthrough from { code; }. */
4530 if (integer_zerop (COND_EXPR_COND (expr)))
4531 UNUSED_LABEL_P (label_true) = 1;
4532 label_cont = label_true;
4533 }
4534 else
4535 {
4536 bool then_side_effects
4537 = (TREE_OPERAND (expr, 1)
4538 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4539 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4540 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4541 /* For if (...) { code; } else {} or
4542 if (...) { code; } else goto label; or
4543 if (...) { code; return; } else { ... }
4544 label_cont isn't needed. */
4545 if (!have_else_clause_p
4546 && TREE_OPERAND (expr, 2) != NULL_TREE
4547 && gimple_seq_may_fallthru (seq))
4548 {
4549 gimple *g;
4550 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4551
4552 /* For if (0) { non-side-effect-code } else { code }
4553 tell -Wimplicit-fallthrough handling that label_cont can
4554 be only reached through fallthrough from { code }. */
4555 if (integer_zerop (COND_EXPR_COND (expr)))
4556 {
4557 UNUSED_LABEL_P (label_true) = 1;
4558 if (!then_side_effects)
4559 UNUSED_LABEL_P (label_cont) = 1;
4560 }
4561
4562 g = gimple_build_goto (label_cont);
4563
4564 /* GIMPLE_COND's are very low level; they have embedded
4565 gotos. This particular embedded goto should not be marked
4566 with the location of the original COND_EXPR, as it would
4567 correspond to the COND_EXPR's condition, not the ELSE or the
4568 THEN arms. To avoid marking it with the wrong location, flag
4569 it as "no location". */
4570 gimple_set_do_not_emit_location (g);
4571
4572 gimplify_seq_add_stmt (&seq, g);
4573 }
4574 }
4575 }
4576 if (!have_else_clause_p)
4577 {
4578 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4579 tell -Wimplicit-fallthrough handling that label_false can be only
4580 reached through fallthrough from { code }. */
4581 if (integer_nonzerop (COND_EXPR_COND (expr))
4582 && (TREE_OPERAND (expr, 2) == NULL_TREE
4583 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4584 UNUSED_LABEL_P (label_false) = 1;
4585 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4586 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4587 }
4588 if (label_cont)
4589 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4590
4591 gimple_pop_condition (pre_p);
4592 gimple_seq_add_seq (pre_p, seq);
4593
4594 if (ret == GS_ERROR)
4595 ; /* Do nothing. */
4596 else if (have_then_clause_p || have_else_clause_p)
4597 ret = GS_ALL_DONE;
4598 else
4599 {
4600 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4601 expr = TREE_OPERAND (expr, 0);
4602 gimplify_stmt (&expr, pre_p);
4603 }
4604
4605 *expr_p = NULL;
4606 return ret;
4607 }
4608
4609 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4610 to be marked addressable.
4611
4612 We cannot rely on such an expression being directly markable if a temporary
4613 has been created by the gimplification. In this case, we create another
4614 temporary and initialize it with a copy, which will become a store after we
4615 mark it addressable. This can happen if the front-end passed us something
4616 that it could not mark addressable yet, like a Fortran pass-by-reference
4617 parameter (int) floatvar. */
4618
4619 static void
4620 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4621 {
4622 while (handled_component_p (*expr_p))
4623 expr_p = &TREE_OPERAND (*expr_p, 0);
4624
4625 /* Do not allow an SSA name as the temporary. */
4626 if (is_gimple_reg (*expr_p))
4627 *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
4628 }
4629
4630 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4631 a call to __builtin_memcpy. */
4632
4633 static enum gimplify_status
4634 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4635 gimple_seq *seq_p)
4636 {
4637 tree t, to, to_ptr, from, from_ptr;
4638 gcall *gs;
4639 location_t loc = EXPR_LOCATION (*expr_p);
4640
4641 to = TREE_OPERAND (*expr_p, 0);
4642 from = TREE_OPERAND (*expr_p, 1);
4643
4644 /* Mark the RHS addressable. Beware that it may not be possible to do so
4645 directly if a temporary has been created by the gimplification. */
4646 prepare_gimple_addressable (&from, seq_p);
4647
4648 mark_addressable (from);
4649 from_ptr = build_fold_addr_expr_loc (loc, from);
4650 gimplify_arg (&from_ptr, seq_p, loc);
4651
4652 mark_addressable (to);
4653 to_ptr = build_fold_addr_expr_loc (loc, to);
4654 gimplify_arg (&to_ptr, seq_p, loc);
4655
4656 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4657
4658 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4659 gimple_call_set_alloca_for_var (gs, true);
4660
4661 if (want_value)
4662 {
4663 /* tmp = memcpy() */
4664 t = create_tmp_var (TREE_TYPE (to_ptr));
4665 gimple_call_set_lhs (gs, t);
4666 gimplify_seq_add_stmt (seq_p, gs);
4667
4668 *expr_p = build_simple_mem_ref (t);
4669 return GS_ALL_DONE;
4670 }
4671
4672 gimplify_seq_add_stmt (seq_p, gs);
4673 *expr_p = NULL;
4674 return GS_ALL_DONE;
4675 }
4676
4677 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4678 a call to __builtin_memset. In this case we know that the RHS is
4679 a CONSTRUCTOR with an empty element list. */
4680
4681 static enum gimplify_status
4682 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4683 gimple_seq *seq_p)
4684 {
4685 tree t, from, to, to_ptr;
4686 gcall *gs;
4687 location_t loc = EXPR_LOCATION (*expr_p);
4688
4689 /* Assert our assumptions, to abort instead of producing wrong code
4690 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4691 not be immediately exposed. */
4692 from = TREE_OPERAND (*expr_p, 1);
4693 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4694 from = TREE_OPERAND (from, 0);
4695
4696 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4697 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4698
4699 /* Now proceed. */
4700 to = TREE_OPERAND (*expr_p, 0);
4701
4702 to_ptr = build_fold_addr_expr_loc (loc, to);
4703 gimplify_arg (&to_ptr, seq_p, loc);
4704 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4705
4706 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4707
4708 if (want_value)
4709 {
4710 /* tmp = memset() */
4711 t = create_tmp_var (TREE_TYPE (to_ptr));
4712 gimple_call_set_lhs (gs, t);
4713 gimplify_seq_add_stmt (seq_p, gs);
4714
4715 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4716 return GS_ALL_DONE;
4717 }
4718
4719 gimplify_seq_add_stmt (seq_p, gs);
4720 *expr_p = NULL;
4721 return GS_ALL_DONE;
4722 }
4723
4724 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4725 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4726 assignment. Return non-null if we detect a potential overlap. */
4727
4728 struct gimplify_init_ctor_preeval_data
4729 {
4730 /* The base decl of the lhs object. May be NULL, in which case we
4731 have to assume the lhs is indirect. */
4732 tree lhs_base_decl;
4733
4734 /* The alias set of the lhs object. */
4735 alias_set_type lhs_alias_set;
4736 };
4737
4738 static tree
4739 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4740 {
4741 struct gimplify_init_ctor_preeval_data *data
4742 = (struct gimplify_init_ctor_preeval_data *) xdata;
4743 tree t = *tp;
4744
4745 /* If we find the base object, obviously we have overlap. */
4746 if (data->lhs_base_decl == t)
4747 return t;
4748
4749 /* If the constructor component is indirect, determine if we have a
4750 potential overlap with the lhs. The only bits of information we
4751 have to go on at this point are addressability and alias sets. */
4752 if ((INDIRECT_REF_P (t)
4753 || TREE_CODE (t) == MEM_REF)
4754 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4755 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4756 return t;
4757
4758 /* If the constructor component is a call, determine if it can hide a
4759 potential overlap with the lhs through an INDIRECT_REF like above.
4760 ??? Ugh - this is completely broken. In fact this whole analysis
4761 doesn't look conservative. */
4762 if (TREE_CODE (t) == CALL_EXPR)
4763 {
4764 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4765
4766 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4767 if (POINTER_TYPE_P (TREE_VALUE (type))
4768 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4769 && alias_sets_conflict_p (data->lhs_alias_set,
4770 get_alias_set
4771 (TREE_TYPE (TREE_VALUE (type)))))
4772 return t;
4773 }
4774
4775 if (IS_TYPE_OR_DECL_P (t))
4776 *walk_subtrees = 0;
4777 return NULL;
4778 }
4779
4780 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4781 force values that overlap with the lhs (as described by *DATA)
4782 into temporaries. */
4783
4784 static void
4785 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4786 struct gimplify_init_ctor_preeval_data *data)
4787 {
4788 enum gimplify_status one;
4789
4790 /* If the value is constant, then there's nothing to pre-evaluate. */
4791 if (TREE_CONSTANT (*expr_p))
4792 {
4793 /* Ensure it does not have side effects, it might contain a reference to
4794 the object we're initializing. */
4795 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4796 return;
4797 }
4798
4799 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4800 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4801 return;
4802
4803 /* Recurse for nested constructors. */
4804 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4805 {
4806 unsigned HOST_WIDE_INT ix;
4807 constructor_elt *ce;
4808 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4809
4810 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4811 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4812
4813 return;
4814 }
4815
4816 /* If this is a variable sized type, we must remember the size. */
4817 maybe_with_size_expr (expr_p);
4818
4819 /* Gimplify the constructor element to something appropriate for the rhs
4820 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4821 the gimplifier will consider this a store to memory. Doing this
4822 gimplification now means that we won't have to deal with complicated
4823 language-specific trees, nor trees like SAVE_EXPR that can induce
4824 exponential search behavior. */
4825 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4826 if (one == GS_ERROR)
4827 {
4828 *expr_p = NULL;
4829 return;
4830 }
4831
4832 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4833 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4834 always be true for all scalars, since is_gimple_mem_rhs insists on a
4835 temporary variable for them. */
4836 if (DECL_P (*expr_p))
4837 return;
4838
4839 /* If this is of variable size, we have no choice but to assume it doesn't
4840 overlap since we can't make a temporary for it. */
4841 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4842 return;
4843
4844 /* Otherwise, we must search for overlap ... */
4845 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4846 return;
4847
4848 /* ... and if found, force the value into a temporary. */
4849 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4850 }
4851
4852 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4853 a RANGE_EXPR in a CONSTRUCTOR for an array.
4854
4855 var = lower;
4856 loop_entry:
4857 object[var] = value;
4858 if (var == upper)
4859 goto loop_exit;
4860 var = var + 1;
4861 goto loop_entry;
4862 loop_exit:
4863
4864 We increment var _after_ the loop exit check because we might otherwise
4865 fail if upper == TYPE_MAX_VALUE (type for upper).
4866
4867 Note that we never have to deal with SAVE_EXPRs here, because this has
4868 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4869
4870 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4871 gimple_seq *, bool);
4872
4873 static void
4874 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4875 tree value, tree array_elt_type,
4876 gimple_seq *pre_p, bool cleared)
4877 {
4878 tree loop_entry_label, loop_exit_label, fall_thru_label;
4879 tree var, var_type, cref, tmp;
4880
4881 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4882 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4883 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4884
4885 /* Create and initialize the index variable. */
4886 var_type = TREE_TYPE (upper);
4887 var = create_tmp_var (var_type);
4888 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4889
4890 /* Add the loop entry label. */
4891 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4892
4893 /* Build the reference. */
4894 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4895 var, NULL_TREE, NULL_TREE);
4896
4897 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4898 the store. Otherwise just assign value to the reference. */
4899
4900 if (TREE_CODE (value) == CONSTRUCTOR)
4901 /* NB we might have to call ourself recursively through
4902 gimplify_init_ctor_eval if the value is a constructor. */
4903 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4904 pre_p, cleared);
4905 else
4906 {
4907 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4908 != GS_ERROR)
4909 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4910 }
4911
4912 /* We exit the loop when the index var is equal to the upper bound. */
4913 gimplify_seq_add_stmt (pre_p,
4914 gimple_build_cond (EQ_EXPR, var, upper,
4915 loop_exit_label, fall_thru_label));
4916
4917 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4918
4919 /* Otherwise, increment the index var... */
4920 tmp = build2 (PLUS_EXPR, var_type, var,
4921 fold_convert (var_type, integer_one_node));
4922 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4923
4924 /* ...and jump back to the loop entry. */
4925 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4926
4927 /* Add the loop exit label. */
4928 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4929 }
4930
4931 /* A subroutine of gimplify_init_constructor. Generate individual
4932 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4933 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4934 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4935 zeroed first. */
4936
4937 static void
4938 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4939 gimple_seq *pre_p, bool cleared)
4940 {
4941 tree array_elt_type = NULL;
4942 unsigned HOST_WIDE_INT ix;
4943 tree purpose, value;
4944
4945 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4946 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4947
4948 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4949 {
4950 tree cref;
4951
4952 /* NULL values are created above for gimplification errors. */
4953 if (value == NULL)
4954 continue;
4955
4956 if (cleared && initializer_zerop (value))
4957 continue;
4958
4959 /* ??? Here's to hoping the front end fills in all of the indices,
4960 so we don't have to figure out what's missing ourselves. */
4961 gcc_assert (purpose);
4962
4963 /* Skip zero-sized fields, unless value has side-effects. This can
4964 happen with calls to functions returning a empty type, which
4965 we shouldn't discard. As a number of downstream passes don't
4966 expect sets of empty type fields, we rely on the gimplification of
4967 the MODIFY_EXPR we make below to drop the assignment statement. */
4968 if (!TREE_SIDE_EFFECTS (value)
4969 && TREE_CODE (purpose) == FIELD_DECL
4970 && is_empty_type (TREE_TYPE (purpose)))
4971 continue;
4972
4973 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4974 whole range. */
4975 if (TREE_CODE (purpose) == RANGE_EXPR)
4976 {
4977 tree lower = TREE_OPERAND (purpose, 0);
4978 tree upper = TREE_OPERAND (purpose, 1);
4979
4980 /* If the lower bound is equal to upper, just treat it as if
4981 upper was the index. */
4982 if (simple_cst_equal (lower, upper))
4983 purpose = upper;
4984 else
4985 {
4986 gimplify_init_ctor_eval_range (object, lower, upper, value,
4987 array_elt_type, pre_p, cleared);
4988 continue;
4989 }
4990 }
4991
4992 if (array_elt_type)
4993 {
4994 /* Do not use bitsizetype for ARRAY_REF indices. */
4995 if (TYPE_DOMAIN (TREE_TYPE (object)))
4996 purpose
4997 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4998 purpose);
4999 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
5000 purpose, NULL_TREE, NULL_TREE);
5001 }
5002 else
5003 {
5004 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5005 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5006 unshare_expr (object), purpose, NULL_TREE);
5007 }
5008
5009 if (TREE_CODE (value) == CONSTRUCTOR
5010 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5011 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5012 pre_p, cleared);
5013 else
5014 {
5015 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5016 gimplify_and_add (init, pre_p);
5017 ggc_free (init);
5018 }
5019 }
5020 }
5021
5022 /* Return the appropriate RHS predicate for this LHS. */
5023
5024 gimple_predicate
5025 rhs_predicate_for (tree lhs)
5026 {
5027 if (is_gimple_reg (lhs))
5028 return is_gimple_reg_rhs_or_call;
5029 else
5030 return is_gimple_mem_rhs_or_call;
5031 }
5032
5033 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5034 before the LHS has been gimplified. */
5035
5036 static gimple_predicate
5037 initial_rhs_predicate_for (tree lhs)
5038 {
5039 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5040 return is_gimple_reg_rhs_or_call;
5041 else
5042 return is_gimple_mem_rhs_or_call;
5043 }
5044
5045 /* Gimplify a C99 compound literal expression. This just means adding
5046 the DECL_EXPR before the current statement and using its anonymous
5047 decl instead. */
5048
5049 static enum gimplify_status
5050 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5051 bool (*gimple_test_f) (tree),
5052 fallback_t fallback)
5053 {
5054 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5055 tree decl = DECL_EXPR_DECL (decl_s);
5056 tree init = DECL_INITIAL (decl);
5057 /* Mark the decl as addressable if the compound literal
5058 expression is addressable now, otherwise it is marked too late
5059 after we gimplify the initialization expression. */
5060 if (TREE_ADDRESSABLE (*expr_p))
5061 TREE_ADDRESSABLE (decl) = 1;
5062 /* Otherwise, if we don't need an lvalue and have a literal directly
5063 substitute it. Check if it matches the gimple predicate, as
5064 otherwise we'd generate a new temporary, and we can as well just
5065 use the decl we already have. */
5066 else if (!TREE_ADDRESSABLE (decl)
5067 && !TREE_THIS_VOLATILE (decl)
5068 && init
5069 && (fallback & fb_lvalue) == 0
5070 && gimple_test_f (init))
5071 {
5072 *expr_p = init;
5073 return GS_OK;
5074 }
5075
5076 /* If the decl is not addressable, then it is being used in some
5077 expression or on the right hand side of a statement, and it can
5078 be put into a readonly data section. */
5079 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5080 TREE_READONLY (decl) = 1;
5081
5082 /* This decl isn't mentioned in the enclosing block, so add it to the
5083 list of temps. FIXME it seems a bit of a kludge to say that
5084 anonymous artificial vars aren't pushed, but everything else is. */
5085 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5086 gimple_add_tmp_var (decl);
5087
5088 gimplify_and_add (decl_s, pre_p);
5089 *expr_p = decl;
5090 return GS_OK;
5091 }
5092
5093 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5094 return a new CONSTRUCTOR if something changed. */
5095
5096 static tree
5097 optimize_compound_literals_in_ctor (tree orig_ctor)
5098 {
5099 tree ctor = orig_ctor;
5100 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5101 unsigned int idx, num = vec_safe_length (elts);
5102
5103 for (idx = 0; idx < num; idx++)
5104 {
5105 tree value = (*elts)[idx].value;
5106 tree newval = value;
5107 if (TREE_CODE (value) == CONSTRUCTOR)
5108 newval = optimize_compound_literals_in_ctor (value);
5109 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5110 {
5111 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5112 tree decl = DECL_EXPR_DECL (decl_s);
5113 tree init = DECL_INITIAL (decl);
5114
5115 if (!TREE_ADDRESSABLE (value)
5116 && !TREE_ADDRESSABLE (decl)
5117 && init
5118 && TREE_CODE (init) == CONSTRUCTOR)
5119 newval = optimize_compound_literals_in_ctor (init);
5120 }
5121 if (newval == value)
5122 continue;
5123
5124 if (ctor == orig_ctor)
5125 {
5126 ctor = copy_node (orig_ctor);
5127 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5128 elts = CONSTRUCTOR_ELTS (ctor);
5129 }
5130 (*elts)[idx].value = newval;
5131 }
5132 return ctor;
5133 }
5134
5135 /* A subroutine of gimplify_modify_expr. Break out elements of a
5136 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5137
5138 Note that we still need to clear any elements that don't have explicit
5139 initializers, so if not all elements are initialized we keep the
5140 original MODIFY_EXPR, we just remove all of the constructor elements.
5141
5142 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5143 GS_ERROR if we would have to create a temporary when gimplifying
5144 this constructor. Otherwise, return GS_OK.
5145
5146 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5147
5148 static enum gimplify_status
5149 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5150 bool want_value, bool notify_temp_creation)
5151 {
5152 tree object, ctor, type;
5153 enum gimplify_status ret;
5154 vec<constructor_elt, va_gc> *elts;
5155 bool cleared = false;
5156 bool is_empty_ctor = false;
5157 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5158
5159 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5160
5161 if (!notify_temp_creation)
5162 {
5163 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5164 is_gimple_lvalue, fb_lvalue);
5165 if (ret == GS_ERROR)
5166 return ret;
5167 }
5168
5169 object = TREE_OPERAND (*expr_p, 0);
5170 ctor = TREE_OPERAND (*expr_p, 1)
5171 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5172 type = TREE_TYPE (ctor);
5173 elts = CONSTRUCTOR_ELTS (ctor);
5174 ret = GS_ALL_DONE;
5175
5176 switch (TREE_CODE (type))
5177 {
5178 case RECORD_TYPE:
5179 case UNION_TYPE:
5180 case QUAL_UNION_TYPE:
5181 case ARRAY_TYPE:
5182 {
5183 /* Use readonly data for initializers of this or smaller size
5184 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5185 ratio. */
5186 const HOST_WIDE_INT min_unique_size = 64;
5187 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5188 is smaller than this, use readonly data. */
5189 const int unique_nonzero_ratio = 8;
5190 /* True if a single access of the object must be ensured. This is the
5191 case if the target is volatile, the type is non-addressable and more
5192 than one field need to be assigned. */
5193 const bool ensure_single_access
5194 = TREE_THIS_VOLATILE (object)
5195 && !TREE_ADDRESSABLE (type)
5196 && vec_safe_length (elts) > 1;
5197 struct gimplify_init_ctor_preeval_data preeval_data;
5198 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5199 HOST_WIDE_INT num_unique_nonzero_elements;
5200 bool complete_p, valid_const_initializer;
5201
5202 /* Aggregate types must lower constructors to initialization of
5203 individual elements. The exception is that a CONSTRUCTOR node
5204 with no elements indicates zero-initialization of the whole. */
5205 if (vec_safe_is_empty (elts))
5206 {
5207 if (notify_temp_creation)
5208 return GS_OK;
5209
5210 /* The var will be initialized and so appear on lhs of
5211 assignment, it can't be TREE_READONLY anymore. */
5212 if (VAR_P (object))
5213 TREE_READONLY (object) = 0;
5214
5215 is_empty_ctor = true;
5216 break;
5217 }
5218
5219 /* Fetch information about the constructor to direct later processing.
5220 We might want to make static versions of it in various cases, and
5221 can only do so if it known to be a valid constant initializer. */
5222 valid_const_initializer
5223 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5224 &num_unique_nonzero_elements,
5225 &num_ctor_elements, &complete_p);
5226
5227 /* If a const aggregate variable is being initialized, then it
5228 should never be a lose to promote the variable to be static. */
5229 if (valid_const_initializer
5230 && num_nonzero_elements > 1
5231 && TREE_READONLY (object)
5232 && VAR_P (object)
5233 && !DECL_REGISTER (object)
5234 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
5235 /* For ctors that have many repeated nonzero elements
5236 represented through RANGE_EXPRs, prefer initializing
5237 those through runtime loops over copies of large amounts
5238 of data from readonly data section. */
5239 && (num_unique_nonzero_elements
5240 > num_nonzero_elements / unique_nonzero_ratio
5241 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5242 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5243 {
5244 if (notify_temp_creation)
5245 return GS_ERROR;
5246
5247 DECL_INITIAL (object) = ctor;
5248 TREE_STATIC (object) = 1;
5249 if (!DECL_NAME (object))
5250 DECL_NAME (object) = create_tmp_var_name ("C");
5251 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5252
5253 /* ??? C++ doesn't automatically append a .<number> to the
5254 assembler name, and even when it does, it looks at FE private
5255 data structures to figure out what that number should be,
5256 which are not set for this variable. I suppose this is
5257 important for local statics for inline functions, which aren't
5258 "local" in the object file sense. So in order to get a unique
5259 TU-local symbol, we must invoke the lhd version now. */
5260 lhd_set_decl_assembler_name (object);
5261
5262 *expr_p = NULL_TREE;
5263 break;
5264 }
5265
5266 /* The var will be initialized and so appear on lhs of
5267 assignment, it can't be TREE_READONLY anymore. */
5268 if (VAR_P (object) && !notify_temp_creation)
5269 TREE_READONLY (object) = 0;
5270
5271 /* If there are "lots" of initialized elements, even discounting
5272 those that are not address constants (and thus *must* be
5273 computed at runtime), then partition the constructor into
5274 constant and non-constant parts. Block copy the constant
5275 parts in, then generate code for the non-constant parts. */
5276 /* TODO. There's code in cp/typeck.cc to do this. */
5277
5278 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5279 /* store_constructor will ignore the clearing of variable-sized
5280 objects. Initializers for such objects must explicitly set
5281 every field that needs to be set. */
5282 cleared = false;
5283 else if (!complete_p)
5284 /* If the constructor isn't complete, clear the whole object
5285 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5286
5287 ??? This ought not to be needed. For any element not present
5288 in the initializer, we should simply set them to zero. Except
5289 we'd need to *find* the elements that are not present, and that
5290 requires trickery to avoid quadratic compile-time behavior in
5291 large cases or excessive memory use in small cases. */
5292 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5293 else if (num_ctor_elements - num_nonzero_elements
5294 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5295 && num_nonzero_elements < num_ctor_elements / 4)
5296 /* If there are "lots" of zeros, it's more efficient to clear
5297 the memory and then set the nonzero elements. */
5298 cleared = true;
5299 else if (ensure_single_access && num_nonzero_elements == 0)
5300 /* If a single access to the target must be ensured and all elements
5301 are zero, then it's optimal to clear whatever their number. */
5302 cleared = true;
5303 else
5304 cleared = false;
5305
5306 /* If there are "lots" of initialized elements, and all of them
5307 are valid address constants, then the entire initializer can
5308 be dropped to memory, and then memcpy'd out. Don't do this
5309 for sparse arrays, though, as it's more efficient to follow
5310 the standard CONSTRUCTOR behavior of memset followed by
5311 individual element initialization. Also don't do this for small
5312 all-zero initializers (which aren't big enough to merit
5313 clearing), and don't try to make bitwise copies of
5314 TREE_ADDRESSABLE types. */
5315 if (valid_const_initializer
5316 && complete_p
5317 && !(cleared || num_nonzero_elements == 0)
5318 && !TREE_ADDRESSABLE (type))
5319 {
5320 HOST_WIDE_INT size = int_size_in_bytes (type);
5321 unsigned int align;
5322
5323 /* ??? We can still get unbounded array types, at least
5324 from the C++ front end. This seems wrong, but attempt
5325 to work around it for now. */
5326 if (size < 0)
5327 {
5328 size = int_size_in_bytes (TREE_TYPE (object));
5329 if (size >= 0)
5330 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5331 }
5332
5333 /* Find the maximum alignment we can assume for the object. */
5334 /* ??? Make use of DECL_OFFSET_ALIGN. */
5335 if (DECL_P (object))
5336 align = DECL_ALIGN (object);
5337 else
5338 align = TYPE_ALIGN (type);
5339
5340 /* Do a block move either if the size is so small as to make
5341 each individual move a sub-unit move on average, or if it
5342 is so large as to make individual moves inefficient. */
5343 if (size > 0
5344 && num_nonzero_elements > 1
5345 /* For ctors that have many repeated nonzero elements
5346 represented through RANGE_EXPRs, prefer initializing
5347 those through runtime loops over copies of large amounts
5348 of data from readonly data section. */
5349 && (num_unique_nonzero_elements
5350 > num_nonzero_elements / unique_nonzero_ratio
5351 || size <= min_unique_size)
5352 && (size < num_nonzero_elements
5353 || !can_move_by_pieces (size, align)))
5354 {
5355 if (notify_temp_creation)
5356 return GS_ERROR;
5357
5358 walk_tree (&ctor, force_labels_r, NULL, NULL);
5359 ctor = tree_output_constant_def (ctor);
5360 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5361 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5362 TREE_OPERAND (*expr_p, 1) = ctor;
5363
5364 /* This is no longer an assignment of a CONSTRUCTOR, but
5365 we still may have processing to do on the LHS. So
5366 pretend we didn't do anything here to let that happen. */
5367 return GS_UNHANDLED;
5368 }
5369 }
5370
5371 /* If a single access to the target must be ensured and there are
5372 nonzero elements or the zero elements are not assigned en masse,
5373 initialize the target from a temporary. */
5374 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5375 {
5376 if (notify_temp_creation)
5377 return GS_ERROR;
5378
5379 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5380 TREE_OPERAND (*expr_p, 0) = temp;
5381 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5382 *expr_p,
5383 build2 (MODIFY_EXPR, void_type_node,
5384 object, temp));
5385 return GS_OK;
5386 }
5387
5388 if (notify_temp_creation)
5389 return GS_OK;
5390
5391 /* If there are nonzero elements and if needed, pre-evaluate to capture
5392 elements overlapping with the lhs into temporaries. We must do this
5393 before clearing to fetch the values before they are zeroed-out. */
5394 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5395 {
5396 preeval_data.lhs_base_decl = get_base_address (object);
5397 if (!DECL_P (preeval_data.lhs_base_decl))
5398 preeval_data.lhs_base_decl = NULL;
5399 preeval_data.lhs_alias_set = get_alias_set (object);
5400
5401 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5402 pre_p, post_p, &preeval_data);
5403 }
5404
5405 bool ctor_has_side_effects_p
5406 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5407
5408 if (cleared)
5409 {
5410 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5411 Note that we still have to gimplify, in order to handle the
5412 case of variable sized types. Avoid shared tree structures. */
5413 CONSTRUCTOR_ELTS (ctor) = NULL;
5414 TREE_SIDE_EFFECTS (ctor) = 0;
5415 object = unshare_expr (object);
5416 gimplify_stmt (expr_p, pre_p);
5417 }
5418
5419 /* If we have not block cleared the object, or if there are nonzero
5420 elements in the constructor, or if the constructor has side effects,
5421 add assignments to the individual scalar fields of the object. */
5422 if (!cleared
5423 || num_nonzero_elements > 0
5424 || ctor_has_side_effects_p)
5425 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5426
5427 *expr_p = NULL_TREE;
5428 }
5429 break;
5430
5431 case COMPLEX_TYPE:
5432 {
5433 tree r, i;
5434
5435 if (notify_temp_creation)
5436 return GS_OK;
5437
5438 /* Extract the real and imaginary parts out of the ctor. */
5439 gcc_assert (elts->length () == 2);
5440 r = (*elts)[0].value;
5441 i = (*elts)[1].value;
5442 if (r == NULL || i == NULL)
5443 {
5444 tree zero = build_zero_cst (TREE_TYPE (type));
5445 if (r == NULL)
5446 r = zero;
5447 if (i == NULL)
5448 i = zero;
5449 }
5450
5451 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5452 represent creation of a complex value. */
5453 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5454 {
5455 ctor = build_complex (type, r, i);
5456 TREE_OPERAND (*expr_p, 1) = ctor;
5457 }
5458 else
5459 {
5460 ctor = build2 (COMPLEX_EXPR, type, r, i);
5461 TREE_OPERAND (*expr_p, 1) = ctor;
5462 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5463 pre_p,
5464 post_p,
5465 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5466 fb_rvalue);
5467 }
5468 }
5469 break;
5470
5471 case VECTOR_TYPE:
5472 {
5473 unsigned HOST_WIDE_INT ix;
5474 constructor_elt *ce;
5475
5476 if (notify_temp_creation)
5477 return GS_OK;
5478
5479 /* Vector types use CONSTRUCTOR all the way through gimple
5480 compilation as a general initializer. */
5481 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5482 {
5483 enum gimplify_status tret;
5484 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5485 fb_rvalue);
5486 if (tret == GS_ERROR)
5487 ret = GS_ERROR;
5488 else if (TREE_STATIC (ctor)
5489 && !initializer_constant_valid_p (ce->value,
5490 TREE_TYPE (ce->value)))
5491 TREE_STATIC (ctor) = 0;
5492 }
5493 recompute_constructor_flags (ctor);
5494
5495 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5496 if (TREE_CONSTANT (ctor))
5497 {
5498 bool constant_p = true;
5499 tree value;
5500
5501 /* Even when ctor is constant, it might contain non-*_CST
5502 elements, such as addresses or trapping values like
5503 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5504 in VECTOR_CST nodes. */
5505 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5506 if (!CONSTANT_CLASS_P (value))
5507 {
5508 constant_p = false;
5509 break;
5510 }
5511
5512 if (constant_p)
5513 {
5514 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5515 break;
5516 }
5517 }
5518
5519 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5520 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5521 }
5522 break;
5523
5524 default:
5525 /* So how did we get a CONSTRUCTOR for a scalar type? */
5526 gcc_unreachable ();
5527 }
5528
5529 if (ret == GS_ERROR)
5530 return GS_ERROR;
5531 /* If we have gimplified both sides of the initializer but have
5532 not emitted an assignment, do so now. */
5533 if (*expr_p
5534 /* If the type is an empty type, we don't need to emit the
5535 assignment. */
5536 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5537 {
5538 tree lhs = TREE_OPERAND (*expr_p, 0);
5539 tree rhs = TREE_OPERAND (*expr_p, 1);
5540 if (want_value && object == lhs)
5541 lhs = unshare_expr (lhs);
5542 gassign *init = gimple_build_assign (lhs, rhs);
5543 gimplify_seq_add_stmt (pre_p, init);
5544 }
5545 if (want_value)
5546 {
5547 *expr_p = object;
5548 ret = GS_OK;
5549 }
5550 else
5551 {
5552 *expr_p = NULL;
5553 ret = GS_ALL_DONE;
5554 }
5555
5556 /* If the user requests to initialize automatic variables, we
5557 should initialize paddings inside the variable. Add a call to
5558 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5559 initialize paddings of object always to zero regardless of
5560 INIT_TYPE. Note, we will not insert this call if the aggregate
5561 variable has be completely cleared already or it's initialized
5562 with an empty constructor. We cannot insert this call if the
5563 variable is a gimple register since __builtin_clear_padding will take
5564 the address of the variable. As a result, if a long double/_Complex long
5565 double variable will be spilled into stack later, its padding cannot
5566 be cleared with __builtin_clear_padding. We should clear its padding
5567 when it is spilled into memory. */
5568 if (is_init_expr
5569 && !is_gimple_reg (object)
5570 && clear_padding_type_may_have_padding_p (type)
5571 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5572 || !AGGREGATE_TYPE_P (type))
5573 && is_var_need_auto_init (object))
5574 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5575
5576 return ret;
5577 }
5578
5579 /* Given a pointer value OP0, return a simplified version of an
5580 indirection through OP0, or NULL_TREE if no simplification is
5581 possible. This may only be applied to a rhs of an expression.
5582 Note that the resulting type may be different from the type pointed
5583 to in the sense that it is still compatible from the langhooks
5584 point of view. */
5585
5586 static tree
5587 gimple_fold_indirect_ref_rhs (tree t)
5588 {
5589 return gimple_fold_indirect_ref (t);
5590 }
5591
5592 /* Subroutine of gimplify_modify_expr to do simplifications of
5593 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5594 something changes. */
5595
5596 static enum gimplify_status
5597 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5598 gimple_seq *pre_p, gimple_seq *post_p,
5599 bool want_value)
5600 {
5601 enum gimplify_status ret = GS_UNHANDLED;
5602 bool changed;
5603
5604 do
5605 {
5606 changed = false;
5607 switch (TREE_CODE (*from_p))
5608 {
5609 case VAR_DECL:
5610 /* If we're assigning from a read-only variable initialized with
5611 a constructor and not volatile, do the direct assignment from
5612 the constructor, but only if the target is not volatile either
5613 since this latter assignment might end up being done on a per
5614 field basis. However, if the target is volatile and the type
5615 is aggregate and non-addressable, gimplify_init_constructor
5616 knows that it needs to ensure a single access to the target
5617 and it will return GS_OK only in this case. */
5618 if (TREE_READONLY (*from_p)
5619 && DECL_INITIAL (*from_p)
5620 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5621 && !TREE_THIS_VOLATILE (*from_p)
5622 && (!TREE_THIS_VOLATILE (*to_p)
5623 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5624 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5625 {
5626 tree old_from = *from_p;
5627 enum gimplify_status subret;
5628
5629 /* Move the constructor into the RHS. */
5630 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5631
5632 /* Let's see if gimplify_init_constructor will need to put
5633 it in memory. */
5634 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5635 false, true);
5636 if (subret == GS_ERROR)
5637 {
5638 /* If so, revert the change. */
5639 *from_p = old_from;
5640 }
5641 else
5642 {
5643 ret = GS_OK;
5644 changed = true;
5645 }
5646 }
5647 break;
5648 case INDIRECT_REF:
5649 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5650 /* If we have code like
5651
5652 *(const A*)(A*)&x
5653
5654 where the type of "x" is a (possibly cv-qualified variant
5655 of "A"), treat the entire expression as identical to "x".
5656 This kind of code arises in C++ when an object is bound
5657 to a const reference, and if "x" is a TARGET_EXPR we want
5658 to take advantage of the optimization below. But not if
5659 the type is TREE_ADDRESSABLE; then C++17 says that the
5660 TARGET_EXPR needs to be a temporary. */
5661 if (tree t
5662 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5663 {
5664 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5665 if (TREE_THIS_VOLATILE (t) != volatile_p)
5666 {
5667 if (DECL_P (t))
5668 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5669 build_fold_addr_expr (t));
5670 if (REFERENCE_CLASS_P (t))
5671 TREE_THIS_VOLATILE (t) = volatile_p;
5672 }
5673 *from_p = t;
5674 ret = GS_OK;
5675 changed = true;
5676 }
5677 break;
5678
5679 case TARGET_EXPR:
5680 {
5681 /* If we are initializing something from a TARGET_EXPR, strip the
5682 TARGET_EXPR and initialize it directly, if possible. This can't
5683 be done if the initializer is void, since that implies that the
5684 temporary is set in some non-trivial way.
5685
5686 ??? What about code that pulls out the temp and uses it
5687 elsewhere? I think that such code never uses the TARGET_EXPR as
5688 an initializer. If I'm wrong, we'll die because the temp won't
5689 have any RTL. In that case, I guess we'll need to replace
5690 references somehow. */
5691 tree init = TARGET_EXPR_INITIAL (*from_p);
5692
5693 if (init
5694 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5695 || !TARGET_EXPR_NO_ELIDE (*from_p))
5696 && !VOID_TYPE_P (TREE_TYPE (init)))
5697 {
5698 *from_p = init;
5699 ret = GS_OK;
5700 changed = true;
5701 }
5702 }
5703 break;
5704
5705 case COMPOUND_EXPR:
5706 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5707 caught. */
5708 gimplify_compound_expr (from_p, pre_p, true);
5709 ret = GS_OK;
5710 changed = true;
5711 break;
5712
5713 case CONSTRUCTOR:
5714 /* If we already made some changes, let the front end have a
5715 crack at this before we break it down. */
5716 if (ret != GS_UNHANDLED)
5717 break;
5718
5719 /* If we're initializing from a CONSTRUCTOR, break this into
5720 individual MODIFY_EXPRs. */
5721 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5722 false);
5723 return ret;
5724
5725 case COND_EXPR:
5726 /* If we're assigning to a non-register type, push the assignment
5727 down into the branches. This is mandatory for ADDRESSABLE types,
5728 since we cannot generate temporaries for such, but it saves a
5729 copy in other cases as well. */
5730 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5731 {
5732 /* This code should mirror the code in gimplify_cond_expr. */
5733 enum tree_code code = TREE_CODE (*expr_p);
5734 tree cond = *from_p;
5735 tree result = *to_p;
5736
5737 ret = gimplify_expr (&result, pre_p, post_p,
5738 is_gimple_lvalue, fb_lvalue);
5739 if (ret != GS_ERROR)
5740 ret = GS_OK;
5741
5742 /* If we are going to write RESULT more than once, clear
5743 TREE_READONLY flag, otherwise we might incorrectly promote
5744 the variable to static const and initialize it at compile
5745 time in one of the branches. */
5746 if (VAR_P (result)
5747 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5748 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5749 TREE_READONLY (result) = 0;
5750 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5751 TREE_OPERAND (cond, 1)
5752 = build2 (code, void_type_node, result,
5753 TREE_OPERAND (cond, 1));
5754 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5755 TREE_OPERAND (cond, 2)
5756 = build2 (code, void_type_node, unshare_expr (result),
5757 TREE_OPERAND (cond, 2));
5758
5759 TREE_TYPE (cond) = void_type_node;
5760 recalculate_side_effects (cond);
5761
5762 if (want_value)
5763 {
5764 gimplify_and_add (cond, pre_p);
5765 *expr_p = unshare_expr (result);
5766 }
5767 else
5768 *expr_p = cond;
5769 return ret;
5770 }
5771 break;
5772
5773 case CALL_EXPR:
5774 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5775 return slot so that we don't generate a temporary. */
5776 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5777 && aggregate_value_p (*from_p, *from_p))
5778 {
5779 bool use_target;
5780
5781 if (!(rhs_predicate_for (*to_p))(*from_p))
5782 /* If we need a temporary, *to_p isn't accurate. */
5783 use_target = false;
5784 /* It's OK to use the return slot directly unless it's an NRV. */
5785 else if (TREE_CODE (*to_p) == RESULT_DECL
5786 && DECL_NAME (*to_p) == NULL_TREE
5787 && needs_to_live_in_memory (*to_p))
5788 use_target = true;
5789 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5790 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5791 /* Don't force regs into memory. */
5792 use_target = false;
5793 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5794 /* It's OK to use the target directly if it's being
5795 initialized. */
5796 use_target = true;
5797 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5798 != INTEGER_CST)
5799 /* Always use the target and thus RSO for variable-sized types.
5800 GIMPLE cannot deal with a variable-sized assignment
5801 embedded in a call statement. */
5802 use_target = true;
5803 else if (TREE_CODE (*to_p) != SSA_NAME
5804 && (!is_gimple_variable (*to_p)
5805 || needs_to_live_in_memory (*to_p)))
5806 /* Don't use the original target if it's already addressable;
5807 if its address escapes, and the called function uses the
5808 NRV optimization, a conforming program could see *to_p
5809 change before the called function returns; see c++/19317.
5810 When optimizing, the return_slot pass marks more functions
5811 as safe after we have escape info. */
5812 use_target = false;
5813 else
5814 use_target = true;
5815
5816 if (use_target)
5817 {
5818 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5819 mark_addressable (*to_p);
5820 }
5821 }
5822 break;
5823
5824 case WITH_SIZE_EXPR:
5825 /* Likewise for calls that return an aggregate of non-constant size,
5826 since we would not be able to generate a temporary at all. */
5827 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5828 {
5829 *from_p = TREE_OPERAND (*from_p, 0);
5830 /* We don't change ret in this case because the
5831 WITH_SIZE_EXPR might have been added in
5832 gimplify_modify_expr, so returning GS_OK would lead to an
5833 infinite loop. */
5834 changed = true;
5835 }
5836 break;
5837
5838 /* If we're initializing from a container, push the initialization
5839 inside it. */
5840 case CLEANUP_POINT_EXPR:
5841 case BIND_EXPR:
5842 case STATEMENT_LIST:
5843 {
5844 tree wrap = *from_p;
5845 tree t;
5846
5847 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5848 fb_lvalue);
5849 if (ret != GS_ERROR)
5850 ret = GS_OK;
5851
5852 t = voidify_wrapper_expr (wrap, *expr_p);
5853 gcc_assert (t == *expr_p);
5854
5855 if (want_value)
5856 {
5857 gimplify_and_add (wrap, pre_p);
5858 *expr_p = unshare_expr (*to_p);
5859 }
5860 else
5861 *expr_p = wrap;
5862 return GS_OK;
5863 }
5864
5865 case NOP_EXPR:
5866 /* Pull out compound literal expressions from a NOP_EXPR.
5867 Those are created in the C FE to drop qualifiers during
5868 lvalue conversion. */
5869 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5870 && tree_ssa_useless_type_conversion (*from_p))
5871 {
5872 *from_p = TREE_OPERAND (*from_p, 0);
5873 ret = GS_OK;
5874 changed = true;
5875 }
5876 break;
5877
5878 case COMPOUND_LITERAL_EXPR:
5879 {
5880 tree complit = TREE_OPERAND (*expr_p, 1);
5881 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5882 tree decl = DECL_EXPR_DECL (decl_s);
5883 tree init = DECL_INITIAL (decl);
5884
5885 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5886 into struct T x = { 0, 1, 2 } if the address of the
5887 compound literal has never been taken. */
5888 if (!TREE_ADDRESSABLE (complit)
5889 && !TREE_ADDRESSABLE (decl)
5890 && init)
5891 {
5892 *expr_p = copy_node (*expr_p);
5893 TREE_OPERAND (*expr_p, 1) = init;
5894 return GS_OK;
5895 }
5896 }
5897
5898 default:
5899 break;
5900 }
5901 }
5902 while (changed);
5903
5904 return ret;
5905 }
5906
5907
5908 /* Return true if T looks like a valid GIMPLE statement. */
5909
5910 static bool
5911 is_gimple_stmt (tree t)
5912 {
5913 const enum tree_code code = TREE_CODE (t);
5914
5915 switch (code)
5916 {
5917 case NOP_EXPR:
5918 /* The only valid NOP_EXPR is the empty statement. */
5919 return IS_EMPTY_STMT (t);
5920
5921 case BIND_EXPR:
5922 case COND_EXPR:
5923 /* These are only valid if they're void. */
5924 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5925
5926 case SWITCH_EXPR:
5927 case GOTO_EXPR:
5928 case RETURN_EXPR:
5929 case LABEL_EXPR:
5930 case CASE_LABEL_EXPR:
5931 case TRY_CATCH_EXPR:
5932 case TRY_FINALLY_EXPR:
5933 case EH_FILTER_EXPR:
5934 case CATCH_EXPR:
5935 case ASM_EXPR:
5936 case STATEMENT_LIST:
5937 case OACC_PARALLEL:
5938 case OACC_KERNELS:
5939 case OACC_SERIAL:
5940 case OACC_DATA:
5941 case OACC_HOST_DATA:
5942 case OACC_DECLARE:
5943 case OACC_UPDATE:
5944 case OACC_ENTER_DATA:
5945 case OACC_EXIT_DATA:
5946 case OACC_CACHE:
5947 case OMP_PARALLEL:
5948 case OMP_FOR:
5949 case OMP_SIMD:
5950 case OMP_DISTRIBUTE:
5951 case OMP_LOOP:
5952 case OACC_LOOP:
5953 case OMP_SCAN:
5954 case OMP_SCOPE:
5955 case OMP_SECTIONS:
5956 case OMP_SECTION:
5957 case OMP_SINGLE:
5958 case OMP_MASTER:
5959 case OMP_MASKED:
5960 case OMP_TASKGROUP:
5961 case OMP_ORDERED:
5962 case OMP_CRITICAL:
5963 case OMP_TASK:
5964 case OMP_TARGET:
5965 case OMP_TARGET_DATA:
5966 case OMP_TARGET_UPDATE:
5967 case OMP_TARGET_ENTER_DATA:
5968 case OMP_TARGET_EXIT_DATA:
5969 case OMP_TASKLOOP:
5970 case OMP_TEAMS:
5971 /* These are always void. */
5972 return true;
5973
5974 case CALL_EXPR:
5975 case MODIFY_EXPR:
5976 case PREDICT_EXPR:
5977 /* These are valid regardless of their type. */
5978 return true;
5979
5980 default:
5981 return false;
5982 }
5983 }
5984
5985
5986 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5987 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5988
5989 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5990 other, unmodified part of the complex object just before the total store.
5991 As a consequence, if the object is still uninitialized, an undefined value
5992 will be loaded into a register, which may result in a spurious exception
5993 if the register is floating-point and the value happens to be a signaling
5994 NaN for example. Then the fully-fledged complex operations lowering pass
5995 followed by a DCE pass are necessary in order to fix things up. */
5996
5997 static enum gimplify_status
5998 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5999 bool want_value)
6000 {
6001 enum tree_code code, ocode;
6002 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6003
6004 lhs = TREE_OPERAND (*expr_p, 0);
6005 rhs = TREE_OPERAND (*expr_p, 1);
6006 code = TREE_CODE (lhs);
6007 lhs = TREE_OPERAND (lhs, 0);
6008
6009 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6010 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6011 suppress_warning (other);
6012 other = get_formal_tmp_var (other, pre_p);
6013
6014 realpart = code == REALPART_EXPR ? rhs : other;
6015 imagpart = code == REALPART_EXPR ? other : rhs;
6016
6017 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6018 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6019 else
6020 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6021
6022 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
6023 *expr_p = (want_value) ? rhs : NULL_TREE;
6024
6025 return GS_ALL_DONE;
6026 }
6027
6028 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6029
6030 modify_expr
6031 : varname '=' rhs
6032 | '*' ID '=' rhs
6033
6034 PRE_P points to the list where side effects that must happen before
6035 *EXPR_P should be stored.
6036
6037 POST_P points to the list where side effects that must happen after
6038 *EXPR_P should be stored.
6039
6040 WANT_VALUE is nonzero iff we want to use the value of this expression
6041 in another expression. */
6042
6043 static enum gimplify_status
6044 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6045 bool want_value)
6046 {
6047 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6048 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6049 enum gimplify_status ret = GS_UNHANDLED;
6050 gimple *assign;
6051 location_t loc = EXPR_LOCATION (*expr_p);
6052 gimple_stmt_iterator gsi;
6053
6054 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6055 || TREE_CODE (*expr_p) == INIT_EXPR);
6056
6057 /* Trying to simplify a clobber using normal logic doesn't work,
6058 so handle it here. */
6059 if (TREE_CLOBBER_P (*from_p))
6060 {
6061 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6062 if (ret == GS_ERROR)
6063 return ret;
6064 gcc_assert (!want_value);
6065 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6066 {
6067 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6068 pre_p, post_p);
6069 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6070 }
6071 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6072 *expr_p = NULL;
6073 return GS_ALL_DONE;
6074 }
6075
6076 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6077 memset. */
6078 if (TREE_TYPE (*from_p) != error_mark_node
6079 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6080 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6081 && TREE_CODE (*from_p) == CONSTRUCTOR
6082 && CONSTRUCTOR_NELTS (*from_p) == 0)
6083 {
6084 maybe_with_size_expr (from_p);
6085 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6086 return gimplify_modify_expr_to_memset (expr_p,
6087 TREE_OPERAND (*from_p, 1),
6088 want_value, pre_p);
6089 }
6090
6091 /* Insert pointer conversions required by the middle-end that are not
6092 required by the frontend. This fixes middle-end type checking for
6093 for example gcc.dg/redecl-6.c. */
6094 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6095 {
6096 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6097 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6098 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6099 }
6100
6101 /* See if any simplifications can be done based on what the RHS is. */
6102 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6103 want_value);
6104 if (ret != GS_UNHANDLED)
6105 return ret;
6106
6107 /* For empty types only gimplify the left hand side and right hand
6108 side as statements and throw away the assignment. Do this after
6109 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6110 types properly. */
6111 if (is_empty_type (TREE_TYPE (*from_p))
6112 && !want_value
6113 /* Don't do this for calls that return addressable types, expand_call
6114 relies on those having a lhs. */
6115 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6116 && TREE_CODE (*from_p) == CALL_EXPR))
6117 {
6118 gimplify_stmt (from_p, pre_p);
6119 gimplify_stmt (to_p, pre_p);
6120 *expr_p = NULL_TREE;
6121 return GS_ALL_DONE;
6122 }
6123
6124 /* If the value being copied is of variable width, compute the length
6125 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6126 before gimplifying any of the operands so that we can resolve any
6127 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6128 the size of the expression to be copied, not of the destination, so
6129 that is what we must do here. */
6130 maybe_with_size_expr (from_p);
6131
6132 /* As a special case, we have to temporarily allow for assignments
6133 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6134 a toplevel statement, when gimplifying the GENERIC expression
6135 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6136 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6137
6138 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6139 prevent gimplify_expr from trying to create a new temporary for
6140 foo's LHS, we tell it that it should only gimplify until it
6141 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6142 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6143 and all we need to do here is set 'a' to be its LHS. */
6144
6145 /* Gimplify the RHS first for C++17 and bug 71104. */
6146 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6147 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6148 if (ret == GS_ERROR)
6149 return ret;
6150
6151 /* Then gimplify the LHS. */
6152 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6153 twice we have to make sure to gimplify into non-SSA as otherwise
6154 the abnormal edge added later will make those defs not dominate
6155 their uses.
6156 ??? Technically this applies only to the registers used in the
6157 resulting non-register *TO_P. */
6158 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6159 if (saved_into_ssa
6160 && TREE_CODE (*from_p) == CALL_EXPR
6161 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6162 gimplify_ctxp->into_ssa = false;
6163 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6164 gimplify_ctxp->into_ssa = saved_into_ssa;
6165 if (ret == GS_ERROR)
6166 return ret;
6167
6168 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6169 guess for the predicate was wrong. */
6170 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6171 if (final_pred != initial_pred)
6172 {
6173 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6174 if (ret == GS_ERROR)
6175 return ret;
6176 }
6177
6178 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6179 size as argument to the call. */
6180 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6181 {
6182 tree call = TREE_OPERAND (*from_p, 0);
6183 tree vlasize = TREE_OPERAND (*from_p, 1);
6184
6185 if (TREE_CODE (call) == CALL_EXPR
6186 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6187 {
6188 int nargs = call_expr_nargs (call);
6189 tree type = TREE_TYPE (call);
6190 tree ap = CALL_EXPR_ARG (call, 0);
6191 tree tag = CALL_EXPR_ARG (call, 1);
6192 tree aptag = CALL_EXPR_ARG (call, 2);
6193 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6194 IFN_VA_ARG, type,
6195 nargs + 1, ap, tag,
6196 aptag, vlasize);
6197 TREE_OPERAND (*from_p, 0) = newcall;
6198 }
6199 }
6200
6201 /* Now see if the above changed *from_p to something we handle specially. */
6202 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6203 want_value);
6204 if (ret != GS_UNHANDLED)
6205 return ret;
6206
6207 /* If we've got a variable sized assignment between two lvalues (i.e. does
6208 not involve a call), then we can make things a bit more straightforward
6209 by converting the assignment to memcpy or memset. */
6210 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6211 {
6212 tree from = TREE_OPERAND (*from_p, 0);
6213 tree size = TREE_OPERAND (*from_p, 1);
6214
6215 if (TREE_CODE (from) == CONSTRUCTOR)
6216 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6217
6218 if (is_gimple_addressable (from))
6219 {
6220 *from_p = from;
6221 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6222 pre_p);
6223 }
6224 }
6225
6226 /* Transform partial stores to non-addressable complex variables into
6227 total stores. This allows us to use real instead of virtual operands
6228 for these variables, which improves optimization. */
6229 if ((TREE_CODE (*to_p) == REALPART_EXPR
6230 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6231 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6232 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6233
6234 /* Try to alleviate the effects of the gimplification creating artificial
6235 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6236 make sure not to create DECL_DEBUG_EXPR links across functions. */
6237 if (!gimplify_ctxp->into_ssa
6238 && VAR_P (*from_p)
6239 && DECL_IGNORED_P (*from_p)
6240 && DECL_P (*to_p)
6241 && !DECL_IGNORED_P (*to_p)
6242 && decl_function_context (*to_p) == current_function_decl
6243 && decl_function_context (*from_p) == current_function_decl)
6244 {
6245 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6246 DECL_NAME (*from_p)
6247 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6248 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6249 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6250 }
6251
6252 if (want_value && TREE_THIS_VOLATILE (*to_p))
6253 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6254
6255 if (TREE_CODE (*from_p) == CALL_EXPR)
6256 {
6257 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6258 instead of a GIMPLE_ASSIGN. */
6259 gcall *call_stmt;
6260 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6261 {
6262 /* Gimplify internal functions created in the FEs. */
6263 int nargs = call_expr_nargs (*from_p), i;
6264 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6265 auto_vec<tree> vargs (nargs);
6266
6267 for (i = 0; i < nargs; i++)
6268 {
6269 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6270 EXPR_LOCATION (*from_p));
6271 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6272 }
6273 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6274 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6275 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6276 }
6277 else
6278 {
6279 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6280 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6281 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6282 tree fndecl = get_callee_fndecl (*from_p);
6283 if (fndecl
6284 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6285 && call_expr_nargs (*from_p) == 3)
6286 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6287 CALL_EXPR_ARG (*from_p, 0),
6288 CALL_EXPR_ARG (*from_p, 1),
6289 CALL_EXPR_ARG (*from_p, 2));
6290 else
6291 {
6292 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6293 }
6294 }
6295 notice_special_calls (call_stmt);
6296 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6297 gimple_call_set_lhs (call_stmt, *to_p);
6298 else if (TREE_CODE (*to_p) == SSA_NAME)
6299 /* The above is somewhat premature, avoid ICEing later for a
6300 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6301 ??? This doesn't make it a default-def. */
6302 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6303
6304 assign = call_stmt;
6305 }
6306 else
6307 {
6308 assign = gimple_build_assign (*to_p, *from_p);
6309 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6310 if (COMPARISON_CLASS_P (*from_p))
6311 copy_warning (assign, *from_p);
6312 }
6313
6314 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6315 {
6316 /* We should have got an SSA name from the start. */
6317 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6318 || ! gimple_in_ssa_p (cfun));
6319 }
6320
6321 gimplify_seq_add_stmt (pre_p, assign);
6322 gsi = gsi_last (*pre_p);
6323 maybe_fold_stmt (&gsi);
6324
6325 if (want_value)
6326 {
6327 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6328 return GS_OK;
6329 }
6330 else
6331 *expr_p = NULL;
6332
6333 return GS_ALL_DONE;
6334 }
6335
6336 /* Gimplify a comparison between two variable-sized objects. Do this
6337 with a call to BUILT_IN_MEMCMP. */
6338
6339 static enum gimplify_status
6340 gimplify_variable_sized_compare (tree *expr_p)
6341 {
6342 location_t loc = EXPR_LOCATION (*expr_p);
6343 tree op0 = TREE_OPERAND (*expr_p, 0);
6344 tree op1 = TREE_OPERAND (*expr_p, 1);
6345 tree t, arg, dest, src, expr;
6346
6347 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6348 arg = unshare_expr (arg);
6349 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6350 src = build_fold_addr_expr_loc (loc, op1);
6351 dest = build_fold_addr_expr_loc (loc, op0);
6352 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6353 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6354
6355 expr
6356 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6357 SET_EXPR_LOCATION (expr, loc);
6358 *expr_p = expr;
6359
6360 return GS_OK;
6361 }
6362
6363 /* Gimplify a comparison between two aggregate objects of integral scalar
6364 mode as a comparison between the bitwise equivalent scalar values. */
6365
6366 static enum gimplify_status
6367 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6368 {
6369 location_t loc = EXPR_LOCATION (*expr_p);
6370 tree op0 = TREE_OPERAND (*expr_p, 0);
6371 tree op1 = TREE_OPERAND (*expr_p, 1);
6372
6373 tree type = TREE_TYPE (op0);
6374 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6375
6376 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6377 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6378
6379 *expr_p
6380 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6381
6382 return GS_OK;
6383 }
6384
6385 /* Gimplify an expression sequence. This function gimplifies each
6386 expression and rewrites the original expression with the last
6387 expression of the sequence in GIMPLE form.
6388
6389 PRE_P points to the list where the side effects for all the
6390 expressions in the sequence will be emitted.
6391
6392 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6393
6394 static enum gimplify_status
6395 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6396 {
6397 tree t = *expr_p;
6398
6399 do
6400 {
6401 tree *sub_p = &TREE_OPERAND (t, 0);
6402
6403 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6404 gimplify_compound_expr (sub_p, pre_p, false);
6405 else
6406 gimplify_stmt (sub_p, pre_p);
6407
6408 t = TREE_OPERAND (t, 1);
6409 }
6410 while (TREE_CODE (t) == COMPOUND_EXPR);
6411
6412 *expr_p = t;
6413 if (want_value)
6414 return GS_OK;
6415 else
6416 {
6417 gimplify_stmt (expr_p, pre_p);
6418 return GS_ALL_DONE;
6419 }
6420 }
6421
6422 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6423 gimplify. After gimplification, EXPR_P will point to a new temporary
6424 that holds the original value of the SAVE_EXPR node.
6425
6426 PRE_P points to the list where side effects that must happen before
6427 *EXPR_P should be stored. */
6428
6429 static enum gimplify_status
6430 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6431 {
6432 enum gimplify_status ret = GS_ALL_DONE;
6433 tree val;
6434
6435 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6436 val = TREE_OPERAND (*expr_p, 0);
6437
6438 if (TREE_TYPE (val) == error_mark_node)
6439 return GS_ERROR;
6440
6441 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6442 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6443 {
6444 /* The operand may be a void-valued expression. It is
6445 being executed only for its side-effects. */
6446 if (TREE_TYPE (val) == void_type_node)
6447 {
6448 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6449 is_gimple_stmt, fb_none);
6450 val = NULL;
6451 }
6452 else
6453 /* The temporary may not be an SSA name as later abnormal and EH
6454 control flow may invalidate use/def domination. When in SSA
6455 form then assume there are no such issues and SAVE_EXPRs only
6456 appear via GENERIC foldings. */
6457 val = get_initialized_tmp_var (val, pre_p, post_p,
6458 gimple_in_ssa_p (cfun));
6459
6460 TREE_OPERAND (*expr_p, 0) = val;
6461 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6462 }
6463
6464 *expr_p = val;
6465
6466 return ret;
6467 }
6468
6469 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6470
6471 unary_expr
6472 : ...
6473 | '&' varname
6474 ...
6475
6476 PRE_P points to the list where side effects that must happen before
6477 *EXPR_P should be stored.
6478
6479 POST_P points to the list where side effects that must happen after
6480 *EXPR_P should be stored. */
6481
6482 static enum gimplify_status
6483 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6484 {
6485 tree expr = *expr_p;
6486 tree op0 = TREE_OPERAND (expr, 0);
6487 enum gimplify_status ret;
6488 location_t loc = EXPR_LOCATION (*expr_p);
6489
6490 switch (TREE_CODE (op0))
6491 {
6492 case INDIRECT_REF:
6493 do_indirect_ref:
6494 /* Check if we are dealing with an expression of the form '&*ptr'.
6495 While the front end folds away '&*ptr' into 'ptr', these
6496 expressions may be generated internally by the compiler (e.g.,
6497 builtins like __builtin_va_end). */
6498 /* Caution: the silent array decomposition semantics we allow for
6499 ADDR_EXPR means we can't always discard the pair. */
6500 /* Gimplification of the ADDR_EXPR operand may drop
6501 cv-qualification conversions, so make sure we add them if
6502 needed. */
6503 {
6504 tree op00 = TREE_OPERAND (op0, 0);
6505 tree t_expr = TREE_TYPE (expr);
6506 tree t_op00 = TREE_TYPE (op00);
6507
6508 if (!useless_type_conversion_p (t_expr, t_op00))
6509 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6510 *expr_p = op00;
6511 ret = GS_OK;
6512 }
6513 break;
6514
6515 case VIEW_CONVERT_EXPR:
6516 /* Take the address of our operand and then convert it to the type of
6517 this ADDR_EXPR.
6518
6519 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6520 all clear. The impact of this transformation is even less clear. */
6521
6522 /* If the operand is a useless conversion, look through it. Doing so
6523 guarantees that the ADDR_EXPR and its operand will remain of the
6524 same type. */
6525 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6526 op0 = TREE_OPERAND (op0, 0);
6527
6528 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6529 build_fold_addr_expr_loc (loc,
6530 TREE_OPERAND (op0, 0)));
6531 ret = GS_OK;
6532 break;
6533
6534 case MEM_REF:
6535 if (integer_zerop (TREE_OPERAND (op0, 1)))
6536 goto do_indirect_ref;
6537
6538 /* fall through */
6539
6540 default:
6541 /* If we see a call to a declared builtin or see its address
6542 being taken (we can unify those cases here) then we can mark
6543 the builtin for implicit generation by GCC. */
6544 if (TREE_CODE (op0) == FUNCTION_DECL
6545 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6546 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6547 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6548
6549 /* We use fb_either here because the C frontend sometimes takes
6550 the address of a call that returns a struct; see
6551 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6552 the implied temporary explicit. */
6553
6554 /* Make the operand addressable. */
6555 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6556 is_gimple_addressable, fb_either);
6557 if (ret == GS_ERROR)
6558 break;
6559
6560 /* Then mark it. Beware that it may not be possible to do so directly
6561 if a temporary has been created by the gimplification. */
6562 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6563
6564 op0 = TREE_OPERAND (expr, 0);
6565
6566 /* For various reasons, the gimplification of the expression
6567 may have made a new INDIRECT_REF. */
6568 if (TREE_CODE (op0) == INDIRECT_REF
6569 || (TREE_CODE (op0) == MEM_REF
6570 && integer_zerop (TREE_OPERAND (op0, 1))))
6571 goto do_indirect_ref;
6572
6573 mark_addressable (TREE_OPERAND (expr, 0));
6574
6575 /* The FEs may end up building ADDR_EXPRs early on a decl with
6576 an incomplete type. Re-build ADDR_EXPRs in canonical form
6577 here. */
6578 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6579 *expr_p = build_fold_addr_expr (op0);
6580
6581 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6582 recompute_tree_invariant_for_addr_expr (*expr_p);
6583
6584 /* If we re-built the ADDR_EXPR add a conversion to the original type
6585 if required. */
6586 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6587 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6588
6589 break;
6590 }
6591
6592 return ret;
6593 }
6594
6595 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6596 value; output operands should be a gimple lvalue. */
6597
6598 static enum gimplify_status
6599 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6600 {
6601 tree expr;
6602 int noutputs;
6603 const char **oconstraints;
6604 int i;
6605 tree link;
6606 const char *constraint;
6607 bool allows_mem, allows_reg, is_inout;
6608 enum gimplify_status ret, tret;
6609 gasm *stmt;
6610 vec<tree, va_gc> *inputs;
6611 vec<tree, va_gc> *outputs;
6612 vec<tree, va_gc> *clobbers;
6613 vec<tree, va_gc> *labels;
6614 tree link_next;
6615
6616 expr = *expr_p;
6617 noutputs = list_length (ASM_OUTPUTS (expr));
6618 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6619
6620 inputs = NULL;
6621 outputs = NULL;
6622 clobbers = NULL;
6623 labels = NULL;
6624
6625 ret = GS_ALL_DONE;
6626 link_next = NULL_TREE;
6627 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6628 {
6629 bool ok;
6630 size_t constraint_len;
6631
6632 link_next = TREE_CHAIN (link);
6633
6634 oconstraints[i]
6635 = constraint
6636 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6637 constraint_len = strlen (constraint);
6638 if (constraint_len == 0)
6639 continue;
6640
6641 ok = parse_output_constraint (&constraint, i, 0, 0,
6642 &allows_mem, &allows_reg, &is_inout);
6643 if (!ok)
6644 {
6645 ret = GS_ERROR;
6646 is_inout = false;
6647 }
6648
6649 /* If we can't make copies, we can only accept memory.
6650 Similarly for VLAs. */
6651 tree outtype = TREE_TYPE (TREE_VALUE (link));
6652 if (outtype != error_mark_node
6653 && (TREE_ADDRESSABLE (outtype)
6654 || !COMPLETE_TYPE_P (outtype)
6655 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6656 {
6657 if (allows_mem)
6658 allows_reg = 0;
6659 else
6660 {
6661 error ("impossible constraint in %<asm%>");
6662 error ("non-memory output %d must stay in memory", i);
6663 return GS_ERROR;
6664 }
6665 }
6666
6667 if (!allows_reg && allows_mem)
6668 mark_addressable (TREE_VALUE (link));
6669
6670 tree orig = TREE_VALUE (link);
6671 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6672 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6673 fb_lvalue | fb_mayfail);
6674 if (tret == GS_ERROR)
6675 {
6676 if (orig != error_mark_node)
6677 error ("invalid lvalue in %<asm%> output %d", i);
6678 ret = tret;
6679 }
6680
6681 /* If the constraint does not allow memory make sure we gimplify
6682 it to a register if it is not already but its base is. This
6683 happens for complex and vector components. */
6684 if (!allows_mem)
6685 {
6686 tree op = TREE_VALUE (link);
6687 if (! is_gimple_val (op)
6688 && is_gimple_reg_type (TREE_TYPE (op))
6689 && is_gimple_reg (get_base_address (op)))
6690 {
6691 tree tem = create_tmp_reg (TREE_TYPE (op));
6692 tree ass;
6693 if (is_inout)
6694 {
6695 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6696 tem, unshare_expr (op));
6697 gimplify_and_add (ass, pre_p);
6698 }
6699 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6700 gimplify_and_add (ass, post_p);
6701
6702 TREE_VALUE (link) = tem;
6703 tret = GS_OK;
6704 }
6705 }
6706
6707 vec_safe_push (outputs, link);
6708 TREE_CHAIN (link) = NULL_TREE;
6709
6710 if (is_inout)
6711 {
6712 /* An input/output operand. To give the optimizers more
6713 flexibility, split it into separate input and output
6714 operands. */
6715 tree input;
6716 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6717 char buf[11];
6718
6719 /* Turn the in/out constraint into an output constraint. */
6720 char *p = xstrdup (constraint);
6721 p[0] = '=';
6722 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6723
6724 /* And add a matching input constraint. */
6725 if (allows_reg)
6726 {
6727 sprintf (buf, "%u", i);
6728
6729 /* If there are multiple alternatives in the constraint,
6730 handle each of them individually. Those that allow register
6731 will be replaced with operand number, the others will stay
6732 unchanged. */
6733 if (strchr (p, ',') != NULL)
6734 {
6735 size_t len = 0, buflen = strlen (buf);
6736 char *beg, *end, *str, *dst;
6737
6738 for (beg = p + 1;;)
6739 {
6740 end = strchr (beg, ',');
6741 if (end == NULL)
6742 end = strchr (beg, '\0');
6743 if ((size_t) (end - beg) < buflen)
6744 len += buflen + 1;
6745 else
6746 len += end - beg + 1;
6747 if (*end)
6748 beg = end + 1;
6749 else
6750 break;
6751 }
6752
6753 str = (char *) alloca (len);
6754 for (beg = p + 1, dst = str;;)
6755 {
6756 const char *tem;
6757 bool mem_p, reg_p, inout_p;
6758
6759 end = strchr (beg, ',');
6760 if (end)
6761 *end = '\0';
6762 beg[-1] = '=';
6763 tem = beg - 1;
6764 parse_output_constraint (&tem, i, 0, 0,
6765 &mem_p, &reg_p, &inout_p);
6766 if (dst != str)
6767 *dst++ = ',';
6768 if (reg_p)
6769 {
6770 memcpy (dst, buf, buflen);
6771 dst += buflen;
6772 }
6773 else
6774 {
6775 if (end)
6776 len = end - beg;
6777 else
6778 len = strlen (beg);
6779 memcpy (dst, beg, len);
6780 dst += len;
6781 }
6782 if (end)
6783 beg = end + 1;
6784 else
6785 break;
6786 }
6787 *dst = '\0';
6788 input = build_string (dst - str, str);
6789 }
6790 else
6791 input = build_string (strlen (buf), buf);
6792 }
6793 else
6794 input = build_string (constraint_len - 1, constraint + 1);
6795
6796 free (p);
6797
6798 input = build_tree_list (build_tree_list (NULL_TREE, input),
6799 unshare_expr (TREE_VALUE (link)));
6800 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6801 }
6802 }
6803
6804 link_next = NULL_TREE;
6805 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6806 {
6807 link_next = TREE_CHAIN (link);
6808 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6809 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6810 oconstraints, &allows_mem, &allows_reg);
6811
6812 /* If we can't make copies, we can only accept memory. */
6813 tree intype = TREE_TYPE (TREE_VALUE (link));
6814 if (intype != error_mark_node
6815 && (TREE_ADDRESSABLE (intype)
6816 || !COMPLETE_TYPE_P (intype)
6817 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6818 {
6819 if (allows_mem)
6820 allows_reg = 0;
6821 else
6822 {
6823 error ("impossible constraint in %<asm%>");
6824 error ("non-memory input %d must stay in memory", i);
6825 return GS_ERROR;
6826 }
6827 }
6828
6829 /* If the operand is a memory input, it should be an lvalue. */
6830 if (!allows_reg && allows_mem)
6831 {
6832 tree inputv = TREE_VALUE (link);
6833 STRIP_NOPS (inputv);
6834 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6835 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6836 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6837 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6838 || TREE_CODE (inputv) == MODIFY_EXPR)
6839 TREE_VALUE (link) = error_mark_node;
6840 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6841 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6842 if (tret != GS_ERROR)
6843 {
6844 /* Unlike output operands, memory inputs are not guaranteed
6845 to be lvalues by the FE, and while the expressions are
6846 marked addressable there, if it is e.g. a statement
6847 expression, temporaries in it might not end up being
6848 addressable. They might be already used in the IL and thus
6849 it is too late to make them addressable now though. */
6850 tree x = TREE_VALUE (link);
6851 while (handled_component_p (x))
6852 x = TREE_OPERAND (x, 0);
6853 if (TREE_CODE (x) == MEM_REF
6854 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6855 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6856 if ((VAR_P (x)
6857 || TREE_CODE (x) == PARM_DECL
6858 || TREE_CODE (x) == RESULT_DECL)
6859 && !TREE_ADDRESSABLE (x)
6860 && is_gimple_reg (x))
6861 {
6862 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6863 input_location), 0,
6864 "memory input %d is not directly addressable",
6865 i);
6866 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6867 }
6868 }
6869 mark_addressable (TREE_VALUE (link));
6870 if (tret == GS_ERROR)
6871 {
6872 if (inputv != error_mark_node)
6873 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6874 "memory input %d is not directly addressable", i);
6875 ret = tret;
6876 }
6877 }
6878 else
6879 {
6880 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6881 is_gimple_asm_val, fb_rvalue);
6882 if (tret == GS_ERROR)
6883 ret = tret;
6884 }
6885
6886 TREE_CHAIN (link) = NULL_TREE;
6887 vec_safe_push (inputs, link);
6888 }
6889
6890 link_next = NULL_TREE;
6891 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6892 {
6893 link_next = TREE_CHAIN (link);
6894 TREE_CHAIN (link) = NULL_TREE;
6895 vec_safe_push (clobbers, link);
6896 }
6897
6898 link_next = NULL_TREE;
6899 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6900 {
6901 link_next = TREE_CHAIN (link);
6902 TREE_CHAIN (link) = NULL_TREE;
6903 vec_safe_push (labels, link);
6904 }
6905
6906 /* Do not add ASMs with errors to the gimple IL stream. */
6907 if (ret != GS_ERROR)
6908 {
6909 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6910 inputs, outputs, clobbers, labels);
6911
6912 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6913 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6914 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6915
6916 gimplify_seq_add_stmt (pre_p, stmt);
6917 }
6918
6919 return ret;
6920 }
6921
6922 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6923 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6924 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6925 return to this function.
6926
6927 FIXME should we complexify the prequeue handling instead? Or use flags
6928 for all the cleanups and let the optimizer tighten them up? The current
6929 code seems pretty fragile; it will break on a cleanup within any
6930 non-conditional nesting. But any such nesting would be broken, anyway;
6931 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6932 and continues out of it. We can do that at the RTL level, though, so
6933 having an optimizer to tighten up try/finally regions would be a Good
6934 Thing. */
6935
6936 static enum gimplify_status
6937 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6938 {
6939 gimple_stmt_iterator iter;
6940 gimple_seq body_sequence = NULL;
6941
6942 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6943
6944 /* We only care about the number of conditions between the innermost
6945 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6946 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6947 int old_conds = gimplify_ctxp->conditions;
6948 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6949 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6950 gimplify_ctxp->conditions = 0;
6951 gimplify_ctxp->conditional_cleanups = NULL;
6952 gimplify_ctxp->in_cleanup_point_expr = true;
6953
6954 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6955
6956 gimplify_ctxp->conditions = old_conds;
6957 gimplify_ctxp->conditional_cleanups = old_cleanups;
6958 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6959
6960 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6961 {
6962 gimple *wce = gsi_stmt (iter);
6963
6964 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6965 {
6966 if (gsi_one_before_end_p (iter))
6967 {
6968 /* Note that gsi_insert_seq_before and gsi_remove do not
6969 scan operands, unlike some other sequence mutators. */
6970 if (!gimple_wce_cleanup_eh_only (wce))
6971 gsi_insert_seq_before_without_update (&iter,
6972 gimple_wce_cleanup (wce),
6973 GSI_SAME_STMT);
6974 gsi_remove (&iter, true);
6975 break;
6976 }
6977 else
6978 {
6979 gtry *gtry;
6980 gimple_seq seq;
6981 enum gimple_try_flags kind;
6982
6983 if (gimple_wce_cleanup_eh_only (wce))
6984 kind = GIMPLE_TRY_CATCH;
6985 else
6986 kind = GIMPLE_TRY_FINALLY;
6987 seq = gsi_split_seq_after (iter);
6988
6989 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6990 /* Do not use gsi_replace here, as it may scan operands.
6991 We want to do a simple structural modification only. */
6992 gsi_set_stmt (&iter, gtry);
6993 iter = gsi_start (gtry->eval);
6994 }
6995 }
6996 else
6997 gsi_next (&iter);
6998 }
6999
7000 gimplify_seq_add_seq (pre_p, body_sequence);
7001 if (temp)
7002 {
7003 *expr_p = temp;
7004 return GS_OK;
7005 }
7006 else
7007 {
7008 *expr_p = NULL;
7009 return GS_ALL_DONE;
7010 }
7011 }
7012
7013 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7014 is the cleanup action required. EH_ONLY is true if the cleanup should
7015 only be executed if an exception is thrown, not on normal exit.
7016 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7017 only valid for clobbers. */
7018
7019 static void
7020 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7021 bool force_uncond = false)
7022 {
7023 gimple *wce;
7024 gimple_seq cleanup_stmts = NULL;
7025
7026 /* Errors can result in improperly nested cleanups. Which results in
7027 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7028 if (seen_error ())
7029 return;
7030
7031 if (gimple_conditional_context ())
7032 {
7033 /* If we're in a conditional context, this is more complex. We only
7034 want to run the cleanup if we actually ran the initialization that
7035 necessitates it, but we want to run it after the end of the
7036 conditional context. So we wrap the try/finally around the
7037 condition and use a flag to determine whether or not to actually
7038 run the destructor. Thus
7039
7040 test ? f(A()) : 0
7041
7042 becomes (approximately)
7043
7044 flag = 0;
7045 try {
7046 if (test) { A::A(temp); flag = 1; val = f(temp); }
7047 else { val = 0; }
7048 } finally {
7049 if (flag) A::~A(temp);
7050 }
7051 val
7052 */
7053 if (force_uncond)
7054 {
7055 gimplify_stmt (&cleanup, &cleanup_stmts);
7056 wce = gimple_build_wce (cleanup_stmts);
7057 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7058 }
7059 else
7060 {
7061 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7062 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7063 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7064
7065 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7066 gimplify_stmt (&cleanup, &cleanup_stmts);
7067 wce = gimple_build_wce (cleanup_stmts);
7068 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7069
7070 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7071 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7072 gimplify_seq_add_stmt (pre_p, ftrue);
7073
7074 /* Because of this manipulation, and the EH edges that jump
7075 threading cannot redirect, the temporary (VAR) will appear
7076 to be used uninitialized. Don't warn. */
7077 suppress_warning (var, OPT_Wuninitialized);
7078 }
7079 }
7080 else
7081 {
7082 gimplify_stmt (&cleanup, &cleanup_stmts);
7083 wce = gimple_build_wce (cleanup_stmts);
7084 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7085 gimplify_seq_add_stmt (pre_p, wce);
7086 }
7087 }
7088
7089 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7090
7091 static enum gimplify_status
7092 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7093 {
7094 tree targ = *expr_p;
7095 tree temp = TARGET_EXPR_SLOT (targ);
7096 tree init = TARGET_EXPR_INITIAL (targ);
7097 enum gimplify_status ret;
7098
7099 bool unpoison_empty_seq = false;
7100 gimple_stmt_iterator unpoison_it;
7101
7102 if (init)
7103 {
7104 gimple_seq init_pre_p = NULL;
7105
7106 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7107 to the temps list. Handle also variable length TARGET_EXPRs. */
7108 if (!poly_int_tree_p (DECL_SIZE (temp)))
7109 {
7110 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7111 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7112 /* FIXME: this is correct only when the size of the type does
7113 not depend on expressions evaluated in init. */
7114 gimplify_vla_decl (temp, &init_pre_p);
7115 }
7116 else
7117 {
7118 /* Save location where we need to place unpoisoning. It's possible
7119 that a variable will be converted to needs_to_live_in_memory. */
7120 unpoison_it = gsi_last (*pre_p);
7121 unpoison_empty_seq = gsi_end_p (unpoison_it);
7122
7123 gimple_add_tmp_var (temp);
7124 }
7125
7126 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7127 expression is supposed to initialize the slot. */
7128 if (VOID_TYPE_P (TREE_TYPE (init)))
7129 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7130 fb_none);
7131 else
7132 {
7133 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7134 init = init_expr;
7135 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7136 fb_none);
7137 init = NULL;
7138 ggc_free (init_expr);
7139 }
7140 if (ret == GS_ERROR)
7141 {
7142 /* PR c++/28266 Make sure this is expanded only once. */
7143 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7144 return GS_ERROR;
7145 }
7146
7147 if (init)
7148 gimplify_and_add (init, &init_pre_p);
7149
7150 /* Add a clobber for the temporary going out of scope, like
7151 gimplify_bind_expr. */
7152 if (gimplify_ctxp->in_cleanup_point_expr
7153 && needs_to_live_in_memory (temp))
7154 {
7155 if (flag_stack_reuse == SR_ALL)
7156 {
7157 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7158 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7159 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7160 }
7161 if (asan_poisoned_variables
7162 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7163 && !TREE_STATIC (temp)
7164 && dbg_cnt (asan_use_after_scope)
7165 && !gimplify_omp_ctxp)
7166 {
7167 tree asan_cleanup = build_asan_poison_call_expr (temp);
7168 if (asan_cleanup)
7169 {
7170 if (unpoison_empty_seq)
7171 unpoison_it = gsi_start (*pre_p);
7172
7173 asan_poison_variable (temp, false, &unpoison_it,
7174 unpoison_empty_seq);
7175 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7176 }
7177 }
7178 }
7179
7180 gimple_seq_add_seq (pre_p, init_pre_p);
7181
7182 /* If needed, push the cleanup for the temp. */
7183 if (TARGET_EXPR_CLEANUP (targ))
7184 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7185 CLEANUP_EH_ONLY (targ), pre_p);
7186
7187 /* Only expand this once. */
7188 TREE_OPERAND (targ, 3) = init;
7189 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7190 }
7191 else
7192 /* We should have expanded this before. */
7193 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7194
7195 *expr_p = temp;
7196 return GS_OK;
7197 }
7198
7199 /* Gimplification of expression trees. */
7200
7201 /* Gimplify an expression which appears at statement context. The
7202 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7203 NULL, a new sequence is allocated.
7204
7205 Return true if we actually added a statement to the queue. */
7206
7207 bool
7208 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7209 {
7210 gimple_seq_node last;
7211
7212 last = gimple_seq_last (*seq_p);
7213 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7214 return last != gimple_seq_last (*seq_p);
7215 }
7216
7217 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7218 to CTX. If entries already exist, force them to be some flavor of private.
7219 If there is no enclosing parallel, do nothing. */
7220
7221 void
7222 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7223 {
7224 splay_tree_node n;
7225
7226 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7227 return;
7228
7229 do
7230 {
7231 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7232 if (n != NULL)
7233 {
7234 if (n->value & GOVD_SHARED)
7235 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7236 else if (n->value & GOVD_MAP)
7237 n->value |= GOVD_MAP_TO_ONLY;
7238 else
7239 return;
7240 }
7241 else if ((ctx->region_type & ORT_TARGET) != 0)
7242 {
7243 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7244 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7245 else
7246 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7247 }
7248 else if (ctx->region_type != ORT_WORKSHARE
7249 && ctx->region_type != ORT_TASKGROUP
7250 && ctx->region_type != ORT_SIMD
7251 && ctx->region_type != ORT_ACC
7252 && !(ctx->region_type & ORT_TARGET_DATA))
7253 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7254
7255 ctx = ctx->outer_context;
7256 }
7257 while (ctx);
7258 }
7259
7260 /* Similarly for each of the type sizes of TYPE. */
7261
7262 static void
7263 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7264 {
7265 if (type == NULL || type == error_mark_node)
7266 return;
7267 type = TYPE_MAIN_VARIANT (type);
7268
7269 if (ctx->privatized_types->add (type))
7270 return;
7271
7272 switch (TREE_CODE (type))
7273 {
7274 case INTEGER_TYPE:
7275 case ENUMERAL_TYPE:
7276 case BOOLEAN_TYPE:
7277 case REAL_TYPE:
7278 case FIXED_POINT_TYPE:
7279 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7280 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7281 break;
7282
7283 case ARRAY_TYPE:
7284 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7285 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7286 break;
7287
7288 case RECORD_TYPE:
7289 case UNION_TYPE:
7290 case QUAL_UNION_TYPE:
7291 {
7292 tree field;
7293 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7294 if (TREE_CODE (field) == FIELD_DECL)
7295 {
7296 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7297 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7298 }
7299 }
7300 break;
7301
7302 case POINTER_TYPE:
7303 case REFERENCE_TYPE:
7304 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7305 break;
7306
7307 default:
7308 break;
7309 }
7310
7311 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7312 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7313 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7314 }
7315
7316 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7317
7318 static void
7319 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7320 {
7321 splay_tree_node n;
7322 unsigned int nflags;
7323 tree t;
7324
7325 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7326 return;
7327
7328 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7329 there are constructors involved somewhere. Exception is a shared clause,
7330 there is nothing privatized in that case. */
7331 if ((flags & GOVD_SHARED) == 0
7332 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7333 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7334 flags |= GOVD_SEEN;
7335
7336 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7337 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7338 {
7339 /* We shouldn't be re-adding the decl with the same data
7340 sharing class. */
7341 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7342 nflags = n->value | flags;
7343 /* The only combination of data sharing classes we should see is
7344 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7345 reduction variables to be used in data sharing clauses. */
7346 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7347 || ((nflags & GOVD_DATA_SHARE_CLASS)
7348 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7349 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7350 n->value = nflags;
7351 return;
7352 }
7353
7354 /* When adding a variable-sized variable, we have to handle all sorts
7355 of additional bits of data: the pointer replacement variable, and
7356 the parameters of the type. */
7357 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7358 {
7359 /* Add the pointer replacement variable as PRIVATE if the variable
7360 replacement is private, else FIRSTPRIVATE since we'll need the
7361 address of the original variable either for SHARED, or for the
7362 copy into or out of the context. */
7363 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7364 {
7365 if (flags & GOVD_MAP)
7366 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7367 else if (flags & GOVD_PRIVATE)
7368 nflags = GOVD_PRIVATE;
7369 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7370 && (flags & GOVD_FIRSTPRIVATE))
7371 || (ctx->region_type == ORT_TARGET_DATA
7372 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7373 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7374 else
7375 nflags = GOVD_FIRSTPRIVATE;
7376 nflags |= flags & GOVD_SEEN;
7377 t = DECL_VALUE_EXPR (decl);
7378 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7379 t = TREE_OPERAND (t, 0);
7380 gcc_assert (DECL_P (t));
7381 omp_add_variable (ctx, t, nflags);
7382 }
7383
7384 /* Add all of the variable and type parameters (which should have
7385 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7386 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7387 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7388 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7389
7390 /* The variable-sized variable itself is never SHARED, only some form
7391 of PRIVATE. The sharing would take place via the pointer variable
7392 which we remapped above. */
7393 if (flags & GOVD_SHARED)
7394 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7395 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7396
7397 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7398 alloca statement we generate for the variable, so make sure it
7399 is available. This isn't automatically needed for the SHARED
7400 case, since we won't be allocating local storage then.
7401 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7402 in this case omp_notice_variable will be called later
7403 on when it is gimplified. */
7404 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7405 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7406 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7407 }
7408 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7409 && omp_privatize_by_reference (decl))
7410 {
7411 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7412
7413 /* Similar to the direct variable sized case above, we'll need the
7414 size of references being privatized. */
7415 if ((flags & GOVD_SHARED) == 0)
7416 {
7417 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7418 if (t && DECL_P (t))
7419 omp_notice_variable (ctx, t, true);
7420 }
7421 }
7422
7423 if (n != NULL)
7424 n->value |= flags;
7425 else
7426 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7427
7428 /* For reductions clauses in OpenACC loop directives, by default create a
7429 copy clause on the enclosing parallel construct for carrying back the
7430 results. */
7431 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7432 {
7433 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7434 while (outer_ctx)
7435 {
7436 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7437 if (n != NULL)
7438 {
7439 /* Ignore local variables and explicitly declared clauses. */
7440 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7441 break;
7442 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7443 {
7444 /* According to the OpenACC spec, such a reduction variable
7445 should already have a copy map on a kernels construct,
7446 verify that here. */
7447 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7448 && (n->value & GOVD_MAP));
7449 }
7450 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7451 {
7452 /* Remove firstprivate and make it a copy map. */
7453 n->value &= ~GOVD_FIRSTPRIVATE;
7454 n->value |= GOVD_MAP;
7455 }
7456 }
7457 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7458 {
7459 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7460 GOVD_MAP | GOVD_SEEN);
7461 break;
7462 }
7463 outer_ctx = outer_ctx->outer_context;
7464 }
7465 }
7466 }
7467
7468 /* Notice a threadprivate variable DECL used in OMP context CTX.
7469 This just prints out diagnostics about threadprivate variable uses
7470 in untied tasks. If DECL2 is non-NULL, prevent this warning
7471 on that variable. */
7472
7473 static bool
7474 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7475 tree decl2)
7476 {
7477 splay_tree_node n;
7478 struct gimplify_omp_ctx *octx;
7479
7480 for (octx = ctx; octx; octx = octx->outer_context)
7481 if ((octx->region_type & ORT_TARGET) != 0
7482 || octx->order_concurrent)
7483 {
7484 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7485 if (n == NULL)
7486 {
7487 if (octx->order_concurrent)
7488 {
7489 error ("threadprivate variable %qE used in a region with"
7490 " %<order(concurrent)%> clause", DECL_NAME (decl));
7491 inform (octx->location, "enclosing region");
7492 }
7493 else
7494 {
7495 error ("threadprivate variable %qE used in target region",
7496 DECL_NAME (decl));
7497 inform (octx->location, "enclosing target region");
7498 }
7499 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7500 }
7501 if (decl2)
7502 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7503 }
7504
7505 if (ctx->region_type != ORT_UNTIED_TASK)
7506 return false;
7507 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7508 if (n == NULL)
7509 {
7510 error ("threadprivate variable %qE used in untied task",
7511 DECL_NAME (decl));
7512 inform (ctx->location, "enclosing task");
7513 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7514 }
7515 if (decl2)
7516 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7517 return false;
7518 }
7519
7520 /* Return true if global var DECL is device resident. */
7521
7522 static bool
7523 device_resident_p (tree decl)
7524 {
7525 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7526
7527 if (!attr)
7528 return false;
7529
7530 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7531 {
7532 tree c = TREE_VALUE (t);
7533 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7534 return true;
7535 }
7536
7537 return false;
7538 }
7539
7540 /* Return true if DECL has an ACC DECLARE attribute. */
7541
7542 static bool
7543 is_oacc_declared (tree decl)
7544 {
7545 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7546 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7547 return declared != NULL_TREE;
7548 }
7549
7550 /* Determine outer default flags for DECL mentioned in an OMP region
7551 but not declared in an enclosing clause.
7552
7553 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7554 remapped firstprivate instead of shared. To some extent this is
7555 addressed in omp_firstprivatize_type_sizes, but not
7556 effectively. */
7557
7558 static unsigned
7559 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7560 bool in_code, unsigned flags)
7561 {
7562 enum omp_clause_default_kind default_kind = ctx->default_kind;
7563 enum omp_clause_default_kind kind;
7564
7565 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7566 if (ctx->region_type & ORT_TASK)
7567 {
7568 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7569
7570 /* The event-handle specified by a detach clause should always be firstprivate,
7571 regardless of the current default. */
7572 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7573 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7574 }
7575 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7576 default_kind = kind;
7577 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7578 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7579 /* For C/C++ default({,first}private), variables with static storage duration
7580 declared in a namespace or global scope and referenced in construct
7581 must be explicitly specified, i.e. acts as default(none). */
7582 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7583 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7584 && VAR_P (decl)
7585 && is_global_var (decl)
7586 && (DECL_FILE_SCOPE_P (decl)
7587 || (DECL_CONTEXT (decl)
7588 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7589 && !lang_GNU_Fortran ())
7590 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7591
7592 switch (default_kind)
7593 {
7594 case OMP_CLAUSE_DEFAULT_NONE:
7595 {
7596 const char *rtype;
7597
7598 if (ctx->region_type & ORT_PARALLEL)
7599 rtype = "parallel";
7600 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7601 rtype = "taskloop";
7602 else if (ctx->region_type & ORT_TASK)
7603 rtype = "task";
7604 else if (ctx->region_type & ORT_TEAMS)
7605 rtype = "teams";
7606 else
7607 gcc_unreachable ();
7608
7609 error ("%qE not specified in enclosing %qs",
7610 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7611 inform (ctx->location, "enclosing %qs", rtype);
7612 }
7613 /* FALLTHRU */
7614 case OMP_CLAUSE_DEFAULT_SHARED:
7615 flags |= GOVD_SHARED;
7616 break;
7617 case OMP_CLAUSE_DEFAULT_PRIVATE:
7618 flags |= GOVD_PRIVATE;
7619 break;
7620 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7621 flags |= GOVD_FIRSTPRIVATE;
7622 break;
7623 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7624 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7625 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7626 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7627 {
7628 omp_notice_variable (octx, decl, in_code);
7629 for (; octx; octx = octx->outer_context)
7630 {
7631 splay_tree_node n2;
7632
7633 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7634 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7635 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7636 continue;
7637 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7638 {
7639 flags |= GOVD_FIRSTPRIVATE;
7640 goto found_outer;
7641 }
7642 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7643 {
7644 flags |= GOVD_SHARED;
7645 goto found_outer;
7646 }
7647 }
7648 }
7649
7650 if (TREE_CODE (decl) == PARM_DECL
7651 || (!is_global_var (decl)
7652 && DECL_CONTEXT (decl) == current_function_decl))
7653 flags |= GOVD_FIRSTPRIVATE;
7654 else
7655 flags |= GOVD_SHARED;
7656 found_outer:
7657 break;
7658
7659 default:
7660 gcc_unreachable ();
7661 }
7662
7663 return flags;
7664 }
7665
7666
7667 /* Determine outer default flags for DECL mentioned in an OACC region
7668 but not declared in an enclosing clause. */
7669
7670 static unsigned
7671 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7672 {
7673 const char *rkind;
7674 bool on_device = false;
7675 bool is_private = false;
7676 bool declared = is_oacc_declared (decl);
7677 tree type = TREE_TYPE (decl);
7678
7679 if (omp_privatize_by_reference (decl))
7680 type = TREE_TYPE (type);
7681
7682 /* For Fortran COMMON blocks, only used variables in those blocks are
7683 transfered and remapped. The block itself will have a private clause to
7684 avoid transfering the data twice.
7685 The hook evaluates to false by default. For a variable in Fortran's COMMON
7686 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7687 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7688 the whole block. For C++ and Fortran, it can also be true under certain
7689 other conditions, if DECL_HAS_VALUE_EXPR. */
7690 if (RECORD_OR_UNION_TYPE_P (type))
7691 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7692
7693 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7694 && is_global_var (decl)
7695 && device_resident_p (decl)
7696 && !is_private)
7697 {
7698 on_device = true;
7699 flags |= GOVD_MAP_TO_ONLY;
7700 }
7701
7702 switch (ctx->region_type)
7703 {
7704 case ORT_ACC_KERNELS:
7705 rkind = "kernels";
7706
7707 if (is_private)
7708 flags |= GOVD_FIRSTPRIVATE;
7709 else if (AGGREGATE_TYPE_P (type))
7710 {
7711 /* Aggregates default to 'present_or_copy', or 'present'. */
7712 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7713 flags |= GOVD_MAP;
7714 else
7715 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7716 }
7717 else
7718 /* Scalars default to 'copy'. */
7719 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7720
7721 break;
7722
7723 case ORT_ACC_PARALLEL:
7724 case ORT_ACC_SERIAL:
7725 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7726
7727 if (is_private)
7728 flags |= GOVD_FIRSTPRIVATE;
7729 else if (on_device || declared)
7730 flags |= GOVD_MAP;
7731 else if (AGGREGATE_TYPE_P (type))
7732 {
7733 /* Aggregates default to 'present_or_copy', or 'present'. */
7734 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7735 flags |= GOVD_MAP;
7736 else
7737 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7738 }
7739 else
7740 /* Scalars default to 'firstprivate'. */
7741 flags |= GOVD_FIRSTPRIVATE;
7742
7743 break;
7744
7745 default:
7746 gcc_unreachable ();
7747 }
7748
7749 if (DECL_ARTIFICIAL (decl))
7750 ; /* We can get compiler-generated decls, and should not complain
7751 about them. */
7752 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7753 {
7754 error ("%qE not specified in enclosing OpenACC %qs construct",
7755 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7756 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7757 }
7758 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7759 ; /* Handled above. */
7760 else
7761 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7762
7763 return flags;
7764 }
7765
7766 /* Record the fact that DECL was used within the OMP context CTX.
7767 IN_CODE is true when real code uses DECL, and false when we should
7768 merely emit default(none) errors. Return true if DECL is going to
7769 be remapped and thus DECL shouldn't be gimplified into its
7770 DECL_VALUE_EXPR (if any). */
7771
7772 static bool
7773 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7774 {
7775 splay_tree_node n;
7776 unsigned flags = in_code ? GOVD_SEEN : 0;
7777 bool ret = false, shared;
7778
7779 if (error_operand_p (decl))
7780 return false;
7781
7782 if (ctx->region_type == ORT_NONE)
7783 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7784
7785 if (is_global_var (decl))
7786 {
7787 /* Threadprivate variables are predetermined. */
7788 if (DECL_THREAD_LOCAL_P (decl))
7789 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7790
7791 if (DECL_HAS_VALUE_EXPR_P (decl))
7792 {
7793 if (ctx->region_type & ORT_ACC)
7794 /* For OpenACC, defer expansion of value to avoid transfering
7795 privatized common block data instead of im-/explicitly transfered
7796 variables which are in common blocks. */
7797 ;
7798 else
7799 {
7800 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7801
7802 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7803 return omp_notice_threadprivate_variable (ctx, decl, value);
7804 }
7805 }
7806
7807 if (gimplify_omp_ctxp->outer_context == NULL
7808 && VAR_P (decl)
7809 && oacc_get_fn_attrib (current_function_decl))
7810 {
7811 location_t loc = DECL_SOURCE_LOCATION (decl);
7812
7813 if (lookup_attribute ("omp declare target link",
7814 DECL_ATTRIBUTES (decl)))
7815 {
7816 error_at (loc,
7817 "%qE with %<link%> clause used in %<routine%> function",
7818 DECL_NAME (decl));
7819 return false;
7820 }
7821 else if (!lookup_attribute ("omp declare target",
7822 DECL_ATTRIBUTES (decl)))
7823 {
7824 error_at (loc,
7825 "%qE requires a %<declare%> directive for use "
7826 "in a %<routine%> function", DECL_NAME (decl));
7827 return false;
7828 }
7829 }
7830 }
7831
7832 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7833 if ((ctx->region_type & ORT_TARGET) != 0)
7834 {
7835 if (ctx->region_type & ORT_ACC)
7836 /* For OpenACC, as remarked above, defer expansion. */
7837 shared = false;
7838 else
7839 shared = true;
7840
7841 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7842 if (n == NULL)
7843 {
7844 unsigned nflags = flags;
7845 if ((ctx->region_type & ORT_ACC) == 0)
7846 {
7847 bool is_declare_target = false;
7848 if (is_global_var (decl)
7849 && varpool_node::get_create (decl)->offloadable)
7850 {
7851 struct gimplify_omp_ctx *octx;
7852 for (octx = ctx->outer_context;
7853 octx; octx = octx->outer_context)
7854 {
7855 n = splay_tree_lookup (octx->variables,
7856 (splay_tree_key)decl);
7857 if (n
7858 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7859 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7860 break;
7861 }
7862 is_declare_target = octx == NULL;
7863 }
7864 if (!is_declare_target)
7865 {
7866 int gdmk;
7867 enum omp_clause_defaultmap_kind kind;
7868 if (lang_hooks.decls.omp_allocatable_p (decl))
7869 gdmk = GDMK_ALLOCATABLE;
7870 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7871 gdmk = GDMK_SCALAR_TARGET;
7872 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7873 gdmk = GDMK_SCALAR;
7874 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7875 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7876 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7877 == POINTER_TYPE)))
7878 gdmk = GDMK_POINTER;
7879 else
7880 gdmk = GDMK_AGGREGATE;
7881 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7882 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7883 {
7884 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7885 nflags |= GOVD_FIRSTPRIVATE;
7886 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7887 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7888 else
7889 gcc_unreachable ();
7890 }
7891 else if (ctx->defaultmap[gdmk] == 0)
7892 {
7893 tree d = lang_hooks.decls.omp_report_decl (decl);
7894 error ("%qE not specified in enclosing %<target%>",
7895 DECL_NAME (d));
7896 inform (ctx->location, "enclosing %<target%>");
7897 }
7898 else if (ctx->defaultmap[gdmk]
7899 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7900 nflags |= ctx->defaultmap[gdmk];
7901 else
7902 {
7903 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7904 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7905 }
7906 }
7907 }
7908
7909 struct gimplify_omp_ctx *octx = ctx->outer_context;
7910 if ((ctx->region_type & ORT_ACC) && octx)
7911 {
7912 /* Look in outer OpenACC contexts, to see if there's a
7913 data attribute for this variable. */
7914 omp_notice_variable (octx, decl, in_code);
7915
7916 for (; octx; octx = octx->outer_context)
7917 {
7918 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7919 break;
7920 splay_tree_node n2
7921 = splay_tree_lookup (octx->variables,
7922 (splay_tree_key) decl);
7923 if (n2)
7924 {
7925 if (octx->region_type == ORT_ACC_HOST_DATA)
7926 error ("variable %qE declared in enclosing "
7927 "%<host_data%> region", DECL_NAME (decl));
7928 nflags |= GOVD_MAP;
7929 if (octx->region_type == ORT_ACC_DATA
7930 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7931 nflags |= GOVD_MAP_0LEN_ARRAY;
7932 goto found_outer;
7933 }
7934 }
7935 }
7936
7937 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7938 | GOVD_MAP_ALLOC_ONLY)) == flags)
7939 {
7940 tree type = TREE_TYPE (decl);
7941
7942 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7943 && omp_privatize_by_reference (decl))
7944 type = TREE_TYPE (type);
7945 if (!omp_mappable_type (type))
7946 {
7947 error ("%qD referenced in target region does not have "
7948 "a mappable type", decl);
7949 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7950 }
7951 else
7952 {
7953 if ((ctx->region_type & ORT_ACC) != 0)
7954 nflags = oacc_default_clause (ctx, decl, flags);
7955 else
7956 nflags |= GOVD_MAP;
7957 }
7958 }
7959 found_outer:
7960 omp_add_variable (ctx, decl, nflags);
7961 }
7962 else
7963 {
7964 /* If nothing changed, there's nothing left to do. */
7965 if ((n->value & flags) == flags)
7966 return ret;
7967 flags |= n->value;
7968 n->value = flags;
7969 }
7970 goto do_outer;
7971 }
7972
7973 if (n == NULL)
7974 {
7975 if (ctx->region_type == ORT_WORKSHARE
7976 || ctx->region_type == ORT_TASKGROUP
7977 || ctx->region_type == ORT_SIMD
7978 || ctx->region_type == ORT_ACC
7979 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7980 goto do_outer;
7981
7982 flags = omp_default_clause (ctx, decl, in_code, flags);
7983
7984 if ((flags & GOVD_PRIVATE)
7985 && lang_hooks.decls.omp_private_outer_ref (decl))
7986 flags |= GOVD_PRIVATE_OUTER_REF;
7987
7988 omp_add_variable (ctx, decl, flags);
7989
7990 shared = (flags & GOVD_SHARED) != 0;
7991 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7992 goto do_outer;
7993 }
7994
7995 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7996 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7997 if (ctx->region_type == ORT_SIMD
7998 && ctx->in_for_exprs
7999 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8000 == GOVD_PRIVATE))
8001 flags &= ~GOVD_SEEN;
8002
8003 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8004 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8005 && DECL_SIZE (decl))
8006 {
8007 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8008 {
8009 splay_tree_node n2;
8010 tree t = DECL_VALUE_EXPR (decl);
8011 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
8012 t = TREE_OPERAND (t, 0);
8013 gcc_assert (DECL_P (t));
8014 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8015 n2->value |= GOVD_SEEN;
8016 }
8017 else if (omp_privatize_by_reference (decl)
8018 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8019 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8020 != INTEGER_CST))
8021 {
8022 splay_tree_node n2;
8023 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8024 gcc_assert (DECL_P (t));
8025 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8026 if (n2)
8027 omp_notice_variable (ctx, t, true);
8028 }
8029 }
8030
8031 if (ctx->region_type & ORT_ACC)
8032 /* For OpenACC, as remarked above, defer expansion. */
8033 shared = false;
8034 else
8035 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8036 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8037
8038 /* If nothing changed, there's nothing left to do. */
8039 if ((n->value & flags) == flags)
8040 return ret;
8041 flags |= n->value;
8042 n->value = flags;
8043
8044 do_outer:
8045 /* If the variable is private in the current context, then we don't
8046 need to propagate anything to an outer context. */
8047 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8048 return ret;
8049 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8050 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8051 return ret;
8052 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8053 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8054 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8055 return ret;
8056 if (ctx->outer_context
8057 && omp_notice_variable (ctx->outer_context, decl, in_code))
8058 return true;
8059 return ret;
8060 }
8061
8062 /* Verify that DECL is private within CTX. If there's specific information
8063 to the contrary in the innermost scope, generate an error. */
8064
8065 static bool
8066 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8067 {
8068 splay_tree_node n;
8069
8070 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8071 if (n != NULL)
8072 {
8073 if (n->value & GOVD_SHARED)
8074 {
8075 if (ctx == gimplify_omp_ctxp)
8076 {
8077 if (simd)
8078 error ("iteration variable %qE is predetermined linear",
8079 DECL_NAME (decl));
8080 else
8081 error ("iteration variable %qE should be private",
8082 DECL_NAME (decl));
8083 n->value = GOVD_PRIVATE;
8084 return true;
8085 }
8086 else
8087 return false;
8088 }
8089 else if ((n->value & GOVD_EXPLICIT) != 0
8090 && (ctx == gimplify_omp_ctxp
8091 || (ctx->region_type == ORT_COMBINED_PARALLEL
8092 && gimplify_omp_ctxp->outer_context == ctx)))
8093 {
8094 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8095 error ("iteration variable %qE should not be firstprivate",
8096 DECL_NAME (decl));
8097 else if ((n->value & GOVD_REDUCTION) != 0)
8098 error ("iteration variable %qE should not be reduction",
8099 DECL_NAME (decl));
8100 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8101 error ("iteration variable %qE should not be linear",
8102 DECL_NAME (decl));
8103 }
8104 return (ctx == gimplify_omp_ctxp
8105 || (ctx->region_type == ORT_COMBINED_PARALLEL
8106 && gimplify_omp_ctxp->outer_context == ctx));
8107 }
8108
8109 if (ctx->region_type != ORT_WORKSHARE
8110 && ctx->region_type != ORT_TASKGROUP
8111 && ctx->region_type != ORT_SIMD
8112 && ctx->region_type != ORT_ACC)
8113 return false;
8114 else if (ctx->outer_context)
8115 return omp_is_private (ctx->outer_context, decl, simd);
8116 return false;
8117 }
8118
8119 /* Return true if DECL is private within a parallel region
8120 that binds to the current construct's context or in parallel
8121 region's REDUCTION clause. */
8122
8123 static bool
8124 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8125 {
8126 splay_tree_node n;
8127
8128 do
8129 {
8130 ctx = ctx->outer_context;
8131 if (ctx == NULL)
8132 {
8133 if (is_global_var (decl))
8134 return false;
8135
8136 /* References might be private, but might be shared too,
8137 when checking for copyprivate, assume they might be
8138 private, otherwise assume they might be shared. */
8139 if (copyprivate)
8140 return true;
8141
8142 if (omp_privatize_by_reference (decl))
8143 return false;
8144
8145 /* Treat C++ privatized non-static data members outside
8146 of the privatization the same. */
8147 if (omp_member_access_dummy_var (decl))
8148 return false;
8149
8150 return true;
8151 }
8152
8153 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8154
8155 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8156 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8157 {
8158 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8159 || n == NULL
8160 || (n->value & GOVD_MAP) == 0)
8161 continue;
8162 return false;
8163 }
8164
8165 if (n != NULL)
8166 {
8167 if ((n->value & GOVD_LOCAL) != 0
8168 && omp_member_access_dummy_var (decl))
8169 return false;
8170 return (n->value & GOVD_SHARED) == 0;
8171 }
8172
8173 if (ctx->region_type == ORT_WORKSHARE
8174 || ctx->region_type == ORT_TASKGROUP
8175 || ctx->region_type == ORT_SIMD
8176 || ctx->region_type == ORT_ACC)
8177 continue;
8178
8179 break;
8180 }
8181 while (1);
8182 return false;
8183 }
8184
8185 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8186
8187 static tree
8188 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8189 {
8190 tree t = *tp;
8191
8192 /* If this node has been visited, unmark it and keep looking. */
8193 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8194 return t;
8195
8196 if (IS_TYPE_OR_DECL_P (t))
8197 *walk_subtrees = 0;
8198 return NULL_TREE;
8199 }
8200
8201
8202 /* Gimplify the affinity clause but effectively ignore it.
8203 Generate:
8204 var = begin;
8205 if ((step > 1) ? var <= end : var > end)
8206 locatator_var_expr; */
8207
8208 static void
8209 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8210 {
8211 tree last_iter = NULL_TREE;
8212 tree last_bind = NULL_TREE;
8213 tree label = NULL_TREE;
8214 tree *last_body = NULL;
8215 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8216 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8217 {
8218 tree t = OMP_CLAUSE_DECL (c);
8219 if (TREE_CODE (t) == TREE_LIST
8220 && TREE_PURPOSE (t)
8221 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8222 {
8223 if (TREE_VALUE (t) == null_pointer_node)
8224 continue;
8225 if (TREE_PURPOSE (t) != last_iter)
8226 {
8227 if (last_bind)
8228 {
8229 append_to_statement_list (label, last_body);
8230 gimplify_and_add (last_bind, pre_p);
8231 last_bind = NULL_TREE;
8232 }
8233 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8234 {
8235 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8236 is_gimple_val, fb_rvalue) == GS_ERROR
8237 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8238 is_gimple_val, fb_rvalue) == GS_ERROR
8239 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8240 is_gimple_val, fb_rvalue) == GS_ERROR
8241 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8242 is_gimple_val, fb_rvalue)
8243 == GS_ERROR))
8244 return;
8245 }
8246 last_iter = TREE_PURPOSE (t);
8247 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8248 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8249 NULL, block);
8250 last_body = &BIND_EXPR_BODY (last_bind);
8251 tree cond = NULL_TREE;
8252 location_t loc = OMP_CLAUSE_LOCATION (c);
8253 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8254 {
8255 tree var = TREE_VEC_ELT (it, 0);
8256 tree begin = TREE_VEC_ELT (it, 1);
8257 tree end = TREE_VEC_ELT (it, 2);
8258 tree step = TREE_VEC_ELT (it, 3);
8259 loc = DECL_SOURCE_LOCATION (var);
8260 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8261 var, begin);
8262 append_to_statement_list_force (tem, last_body);
8263
8264 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8265 step, build_zero_cst (TREE_TYPE (step)));
8266 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8267 var, end);
8268 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8269 var, end);
8270 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8271 cond1, cond2, cond3);
8272 if (cond)
8273 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8274 boolean_type_node, cond, cond1);
8275 else
8276 cond = cond1;
8277 }
8278 tree cont_label = create_artificial_label (loc);
8279 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8280 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8281 void_node,
8282 build_and_jump (&cont_label));
8283 append_to_statement_list_force (tem, last_body);
8284 }
8285 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8286 {
8287 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8288 last_body);
8289 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8290 }
8291 if (error_operand_p (TREE_VALUE (t)))
8292 return;
8293 append_to_statement_list_force (TREE_VALUE (t), last_body);
8294 TREE_VALUE (t) = null_pointer_node;
8295 }
8296 else
8297 {
8298 if (last_bind)
8299 {
8300 append_to_statement_list (label, last_body);
8301 gimplify_and_add (last_bind, pre_p);
8302 last_bind = NULL_TREE;
8303 }
8304 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8305 {
8306 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8307 NULL, is_gimple_val, fb_rvalue);
8308 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8309 }
8310 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8311 return;
8312 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8313 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8314 return;
8315 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8316 }
8317 }
8318 if (last_bind)
8319 {
8320 append_to_statement_list (label, last_body);
8321 gimplify_and_add (last_bind, pre_p);
8322 }
8323 return;
8324 }
8325
8326 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8327 lower all the depend clauses by populating corresponding depend
8328 array. Returns 0 if there are no such depend clauses, or
8329 2 if all depend clauses should be removed, 1 otherwise. */
8330
8331 static int
8332 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8333 {
8334 tree c;
8335 gimple *g;
8336 size_t n[5] = { 0, 0, 0, 0, 0 };
8337 bool unused[5];
8338 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8339 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8340 size_t i, j;
8341 location_t first_loc = UNKNOWN_LOCATION;
8342
8343 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8344 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8345 {
8346 switch (OMP_CLAUSE_DEPEND_KIND (c))
8347 {
8348 case OMP_CLAUSE_DEPEND_IN:
8349 i = 2;
8350 break;
8351 case OMP_CLAUSE_DEPEND_OUT:
8352 case OMP_CLAUSE_DEPEND_INOUT:
8353 i = 0;
8354 break;
8355 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8356 i = 1;
8357 break;
8358 case OMP_CLAUSE_DEPEND_DEPOBJ:
8359 i = 3;
8360 break;
8361 case OMP_CLAUSE_DEPEND_INOUTSET:
8362 i = 4;
8363 break;
8364 default:
8365 gcc_unreachable ();
8366 }
8367 tree t = OMP_CLAUSE_DECL (c);
8368 if (first_loc == UNKNOWN_LOCATION)
8369 first_loc = OMP_CLAUSE_LOCATION (c);
8370 if (TREE_CODE (t) == TREE_LIST
8371 && TREE_PURPOSE (t)
8372 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8373 {
8374 if (TREE_PURPOSE (t) != last_iter)
8375 {
8376 tree tcnt = size_one_node;
8377 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8378 {
8379 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8380 is_gimple_val, fb_rvalue) == GS_ERROR
8381 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8382 is_gimple_val, fb_rvalue) == GS_ERROR
8383 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8384 is_gimple_val, fb_rvalue) == GS_ERROR
8385 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8386 is_gimple_val, fb_rvalue)
8387 == GS_ERROR))
8388 return 2;
8389 tree var = TREE_VEC_ELT (it, 0);
8390 tree begin = TREE_VEC_ELT (it, 1);
8391 tree end = TREE_VEC_ELT (it, 2);
8392 tree step = TREE_VEC_ELT (it, 3);
8393 tree orig_step = TREE_VEC_ELT (it, 4);
8394 tree type = TREE_TYPE (var);
8395 tree stype = TREE_TYPE (step);
8396 location_t loc = DECL_SOURCE_LOCATION (var);
8397 tree endmbegin;
8398 /* Compute count for this iterator as
8399 orig_step > 0
8400 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8401 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8402 and compute product of those for the entire depend
8403 clause. */
8404 if (POINTER_TYPE_P (type))
8405 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8406 stype, end, begin);
8407 else
8408 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8409 end, begin);
8410 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8411 step,
8412 build_int_cst (stype, 1));
8413 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8414 build_int_cst (stype, 1));
8415 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8416 unshare_expr (endmbegin),
8417 stepm1);
8418 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8419 pos, step);
8420 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8421 endmbegin, stepp1);
8422 if (TYPE_UNSIGNED (stype))
8423 {
8424 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8425 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8426 }
8427 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8428 neg, step);
8429 step = NULL_TREE;
8430 tree cond = fold_build2_loc (loc, LT_EXPR,
8431 boolean_type_node,
8432 begin, end);
8433 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8434 build_int_cst (stype, 0));
8435 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8436 end, begin);
8437 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8438 build_int_cst (stype, 0));
8439 tree osteptype = TREE_TYPE (orig_step);
8440 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8441 orig_step,
8442 build_int_cst (osteptype, 0));
8443 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8444 cond, pos, neg);
8445 cnt = fold_convert_loc (loc, sizetype, cnt);
8446 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8447 fb_rvalue) == GS_ERROR)
8448 return 2;
8449 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8450 }
8451 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8452 fb_rvalue) == GS_ERROR)
8453 return 2;
8454 last_iter = TREE_PURPOSE (t);
8455 last_count = tcnt;
8456 }
8457 if (counts[i] == NULL_TREE)
8458 counts[i] = last_count;
8459 else
8460 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8461 PLUS_EXPR, counts[i], last_count);
8462 }
8463 else
8464 n[i]++;
8465 }
8466 for (i = 0; i < 5; i++)
8467 if (counts[i])
8468 break;
8469 if (i == 5)
8470 return 0;
8471
8472 tree total = size_zero_node;
8473 for (i = 0; i < 5; i++)
8474 {
8475 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8476 if (counts[i] == NULL_TREE)
8477 counts[i] = size_zero_node;
8478 if (n[i])
8479 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8480 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8481 fb_rvalue) == GS_ERROR)
8482 return 2;
8483 total = size_binop (PLUS_EXPR, total, counts[i]);
8484 }
8485
8486 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8487 == GS_ERROR)
8488 return 2;
8489 bool is_old = unused[1] && unused[3] && unused[4];
8490 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8491 size_int (is_old ? 1 : 4));
8492 if (!unused[4])
8493 totalpx = size_binop (PLUS_EXPR, totalpx,
8494 size_binop (MULT_EXPR, counts[4], size_int (2)));
8495 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8496 tree array = create_tmp_var_raw (type);
8497 TREE_ADDRESSABLE (array) = 1;
8498 if (!poly_int_tree_p (totalpx))
8499 {
8500 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8501 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8502 if (gimplify_omp_ctxp)
8503 {
8504 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8505 while (ctx
8506 && (ctx->region_type == ORT_WORKSHARE
8507 || ctx->region_type == ORT_TASKGROUP
8508 || ctx->region_type == ORT_SIMD
8509 || ctx->region_type == ORT_ACC))
8510 ctx = ctx->outer_context;
8511 if (ctx)
8512 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8513 }
8514 gimplify_vla_decl (array, pre_p);
8515 }
8516 else
8517 gimple_add_tmp_var (array);
8518 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8519 NULL_TREE);
8520 tree tem;
8521 if (!is_old)
8522 {
8523 tem = build2 (MODIFY_EXPR, void_type_node, r,
8524 build_int_cst (ptr_type_node, 0));
8525 gimplify_and_add (tem, pre_p);
8526 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8527 NULL_TREE);
8528 }
8529 tem = build2 (MODIFY_EXPR, void_type_node, r,
8530 fold_convert (ptr_type_node, total));
8531 gimplify_and_add (tem, pre_p);
8532 for (i = 1; i < (is_old ? 2 : 4); i++)
8533 {
8534 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8535 NULL_TREE, NULL_TREE);
8536 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8537 gimplify_and_add (tem, pre_p);
8538 }
8539
8540 tree cnts[6];
8541 for (j = 5; j; j--)
8542 if (!unused[j - 1])
8543 break;
8544 for (i = 0; i < 5; i++)
8545 {
8546 if (i && (i >= j || unused[i - 1]))
8547 {
8548 cnts[i] = cnts[i - 1];
8549 continue;
8550 }
8551 cnts[i] = create_tmp_var (sizetype);
8552 if (i == 0)
8553 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8554 else
8555 {
8556 tree t;
8557 if (is_old)
8558 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8559 else
8560 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8561 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8562 == GS_ERROR)
8563 return 2;
8564 g = gimple_build_assign (cnts[i], t);
8565 }
8566 gimple_seq_add_stmt (pre_p, g);
8567 }
8568 if (unused[4])
8569 cnts[5] = NULL_TREE;
8570 else
8571 {
8572 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8573 cnts[5] = create_tmp_var (sizetype);
8574 g = gimple_build_assign (cnts[i], t);
8575 gimple_seq_add_stmt (pre_p, g);
8576 }
8577
8578 last_iter = NULL_TREE;
8579 tree last_bind = NULL_TREE;
8580 tree *last_body = NULL;
8581 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8582 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8583 {
8584 switch (OMP_CLAUSE_DEPEND_KIND (c))
8585 {
8586 case OMP_CLAUSE_DEPEND_IN:
8587 i = 2;
8588 break;
8589 case OMP_CLAUSE_DEPEND_OUT:
8590 case OMP_CLAUSE_DEPEND_INOUT:
8591 i = 0;
8592 break;
8593 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8594 i = 1;
8595 break;
8596 case OMP_CLAUSE_DEPEND_DEPOBJ:
8597 i = 3;
8598 break;
8599 case OMP_CLAUSE_DEPEND_INOUTSET:
8600 i = 4;
8601 break;
8602 default:
8603 gcc_unreachable ();
8604 }
8605 tree t = OMP_CLAUSE_DECL (c);
8606 if (TREE_CODE (t) == TREE_LIST
8607 && TREE_PURPOSE (t)
8608 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8609 {
8610 if (TREE_PURPOSE (t) != last_iter)
8611 {
8612 if (last_bind)
8613 gimplify_and_add (last_bind, pre_p);
8614 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8615 last_bind = build3 (BIND_EXPR, void_type_node,
8616 BLOCK_VARS (block), NULL, block);
8617 TREE_SIDE_EFFECTS (last_bind) = 1;
8618 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8619 tree *p = &BIND_EXPR_BODY (last_bind);
8620 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8621 {
8622 tree var = TREE_VEC_ELT (it, 0);
8623 tree begin = TREE_VEC_ELT (it, 1);
8624 tree end = TREE_VEC_ELT (it, 2);
8625 tree step = TREE_VEC_ELT (it, 3);
8626 tree orig_step = TREE_VEC_ELT (it, 4);
8627 tree type = TREE_TYPE (var);
8628 location_t loc = DECL_SOURCE_LOCATION (var);
8629 /* Emit:
8630 var = begin;
8631 goto cond_label;
8632 beg_label:
8633 ...
8634 var = var + step;
8635 cond_label:
8636 if (orig_step > 0) {
8637 if (var < end) goto beg_label;
8638 } else {
8639 if (var > end) goto beg_label;
8640 }
8641 for each iterator, with inner iterators added to
8642 the ... above. */
8643 tree beg_label = create_artificial_label (loc);
8644 tree cond_label = NULL_TREE;
8645 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8646 var, begin);
8647 append_to_statement_list_force (tem, p);
8648 tem = build_and_jump (&cond_label);
8649 append_to_statement_list_force (tem, p);
8650 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8651 append_to_statement_list (tem, p);
8652 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8653 NULL_TREE, NULL_TREE);
8654 TREE_SIDE_EFFECTS (bind) = 1;
8655 SET_EXPR_LOCATION (bind, loc);
8656 append_to_statement_list_force (bind, p);
8657 if (POINTER_TYPE_P (type))
8658 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8659 var, fold_convert_loc (loc, sizetype,
8660 step));
8661 else
8662 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8663 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8664 var, tem);
8665 append_to_statement_list_force (tem, p);
8666 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8667 append_to_statement_list (tem, p);
8668 tree cond = fold_build2_loc (loc, LT_EXPR,
8669 boolean_type_node,
8670 var, end);
8671 tree pos
8672 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8673 cond, build_and_jump (&beg_label),
8674 void_node);
8675 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8676 var, end);
8677 tree neg
8678 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8679 cond, build_and_jump (&beg_label),
8680 void_node);
8681 tree osteptype = TREE_TYPE (orig_step);
8682 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8683 orig_step,
8684 build_int_cst (osteptype, 0));
8685 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8686 cond, pos, neg);
8687 append_to_statement_list_force (tem, p);
8688 p = &BIND_EXPR_BODY (bind);
8689 }
8690 last_body = p;
8691 }
8692 last_iter = TREE_PURPOSE (t);
8693 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8694 {
8695 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8696 0), last_body);
8697 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8698 }
8699 if (error_operand_p (TREE_VALUE (t)))
8700 return 2;
8701 if (TREE_VALUE (t) != null_pointer_node)
8702 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8703 if (i == 4)
8704 {
8705 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8706 NULL_TREE, NULL_TREE);
8707 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8708 NULL_TREE, NULL_TREE);
8709 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8710 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8711 void_type_node, r, r2);
8712 append_to_statement_list_force (tem, last_body);
8713 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8714 void_type_node, cnts[i],
8715 size_binop (PLUS_EXPR, cnts[i],
8716 size_int (1)));
8717 append_to_statement_list_force (tem, last_body);
8718 i = 5;
8719 }
8720 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8721 NULL_TREE, NULL_TREE);
8722 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8723 void_type_node, r, TREE_VALUE (t));
8724 append_to_statement_list_force (tem, last_body);
8725 if (i == 5)
8726 {
8727 r = build4 (ARRAY_REF, ptr_type_node, array,
8728 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8729 NULL_TREE, NULL_TREE);
8730 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8731 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8732 void_type_node, r, tem);
8733 append_to_statement_list_force (tem, last_body);
8734 }
8735 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8736 void_type_node, cnts[i],
8737 size_binop (PLUS_EXPR, cnts[i],
8738 size_int (1 + (i == 5))));
8739 append_to_statement_list_force (tem, last_body);
8740 TREE_VALUE (t) = null_pointer_node;
8741 }
8742 else
8743 {
8744 if (last_bind)
8745 {
8746 gimplify_and_add (last_bind, pre_p);
8747 last_bind = NULL_TREE;
8748 }
8749 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8750 {
8751 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8752 NULL, is_gimple_val, fb_rvalue);
8753 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8754 }
8755 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8756 return 2;
8757 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
8758 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8759 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8760 is_gimple_val, fb_rvalue) == GS_ERROR)
8761 return 2;
8762 if (i == 4)
8763 {
8764 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8765 NULL_TREE, NULL_TREE);
8766 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8767 NULL_TREE, NULL_TREE);
8768 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
8769 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
8770 gimplify_and_add (tem, pre_p);
8771 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
8772 cnts[i],
8773 size_int (1)));
8774 gimple_seq_add_stmt (pre_p, g);
8775 i = 5;
8776 }
8777 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8778 NULL_TREE, NULL_TREE);
8779 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8780 gimplify_and_add (tem, pre_p);
8781 if (i == 5)
8782 {
8783 r = build4 (ARRAY_REF, ptr_type_node, array,
8784 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
8785 NULL_TREE, NULL_TREE);
8786 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
8787 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
8788 append_to_statement_list_force (tem, last_body);
8789 gimplify_and_add (tem, pre_p);
8790 }
8791 g = gimple_build_assign (cnts[i],
8792 size_binop (PLUS_EXPR, cnts[i],
8793 size_int (1 + (i == 5))));
8794 gimple_seq_add_stmt (pre_p, g);
8795 }
8796 }
8797 if (last_bind)
8798 gimplify_and_add (last_bind, pre_p);
8799 tree cond = boolean_false_node;
8800 if (is_old)
8801 {
8802 if (!unused[0])
8803 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8804 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8805 size_int (2)));
8806 if (!unused[2])
8807 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8808 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8809 cnts[2],
8810 size_binop_loc (first_loc, PLUS_EXPR,
8811 totalpx,
8812 size_int (1))));
8813 }
8814 else
8815 {
8816 tree prev = size_int (5);
8817 for (i = 0; i < 5; i++)
8818 {
8819 if (unused[i])
8820 continue;
8821 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8822 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8823 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8824 cnts[i], unshare_expr (prev)));
8825 }
8826 }
8827 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8828 build_call_expr_loc (first_loc,
8829 builtin_decl_explicit (BUILT_IN_TRAP),
8830 0), void_node);
8831 gimplify_and_add (tem, pre_p);
8832 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8833 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8834 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8835 OMP_CLAUSE_CHAIN (c) = *list_p;
8836 *list_p = c;
8837 return 1;
8838 }
8839
8840 /* For a set of mappings describing an array section pointed to by a struct
8841 (or derived type, etc.) component, create an "alloc" or "release" node to
8842 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8843 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8844 be created that is inserted into the list of mapping nodes attached to the
8845 directive being processed -- not part of the sorted list of nodes after
8846 GOMP_MAP_STRUCT.
8847
8848 CODE is the code of the directive being processed. GRP_START and GRP_END
8849 are the first and last of two or three nodes representing this array section
8850 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8851 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8852 filled with the additional node described above, if needed.
8853
8854 This function does not add the new nodes to any lists itself. It is the
8855 responsibility of the caller to do that. */
8856
8857 static tree
8858 build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
8859 tree *extra_node)
8860 {
8861 enum gomp_map_kind mkind
8862 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8863 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8864
8865 gcc_assert (grp_start != grp_end);
8866
8867 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8868 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8869 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
8870 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
8871 tree grp_mid = NULL_TREE;
8872 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
8873 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
8874
8875 if (grp_mid
8876 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8877 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
8878 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
8879 else
8880 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8881
8882 if (grp_mid
8883 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
8884 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
8885 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
8886 {
8887 tree c3
8888 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
8889 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8890 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
8891 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8892 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
8893
8894 *extra_node = c3;
8895 }
8896 else
8897 *extra_node = NULL_TREE;
8898
8899 return c2;
8900 }
8901
8902 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8903 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8904 If BASE_REF is non-NULL and the containing object is a reference, set
8905 *BASE_REF to that reference before dereferencing the object.
8906 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8907 has array type, else return NULL. */
8908
8909 static tree
8910 extract_base_bit_offset (tree base, poly_int64 *bitposp,
8911 poly_offset_int *poffsetp)
8912 {
8913 tree offset;
8914 poly_int64 bitsize, bitpos;
8915 machine_mode mode;
8916 int unsignedp, reversep, volatilep = 0;
8917 poly_offset_int poffset;
8918
8919 STRIP_NOPS (base);
8920
8921 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8922 &unsignedp, &reversep, &volatilep);
8923
8924 STRIP_NOPS (base);
8925
8926 if (offset && poly_int_tree_p (offset))
8927 {
8928 poffset = wi::to_poly_offset (offset);
8929 offset = NULL_TREE;
8930 }
8931 else
8932 poffset = 0;
8933
8934 if (maybe_ne (bitpos, 0))
8935 poffset += bits_to_bytes_round_down (bitpos);
8936
8937 *bitposp = bitpos;
8938 *poffsetp = poffset;
8939
8940 return base;
8941 }
8942
8943 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
8944 started processing the group yet. The TEMPORARY mark is used when we first
8945 encounter a group on a depth-first traversal, and the PERMANENT mark is used
8946 when we have processed all the group's children (i.e. all the base pointers
8947 referred to by the group's mapping nodes, recursively). */
8948
8949 enum omp_tsort_mark {
8950 UNVISITED,
8951 TEMPORARY,
8952 PERMANENT
8953 };
8954
8955 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
8956 clause. */
8957
8958 struct omp_mapping_group {
8959 tree *grp_start;
8960 tree grp_end;
8961 omp_tsort_mark mark;
8962 /* If we've removed the group but need to reindex, mark the group as
8963 deleted. */
8964 bool deleted;
8965 struct omp_mapping_group *sibling;
8966 struct omp_mapping_group *next;
8967 };
8968
8969 DEBUG_FUNCTION void
8970 debug_mapping_group (omp_mapping_group *grp)
8971 {
8972 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
8973 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
8974 debug_generic_expr (*grp->grp_start);
8975 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
8976 }
8977
8978 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
8979 isn't one. */
8980
8981 static tree
8982 omp_get_base_pointer (tree expr)
8983 {
8984 while (TREE_CODE (expr) == ARRAY_REF
8985 || TREE_CODE (expr) == COMPONENT_REF)
8986 expr = TREE_OPERAND (expr, 0);
8987
8988 if (TREE_CODE (expr) == INDIRECT_REF
8989 || (TREE_CODE (expr) == MEM_REF
8990 && integer_zerop (TREE_OPERAND (expr, 1))))
8991 {
8992 expr = TREE_OPERAND (expr, 0);
8993 while (TREE_CODE (expr) == COMPOUND_EXPR)
8994 expr = TREE_OPERAND (expr, 1);
8995 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
8996 expr = TREE_OPERAND (expr, 0);
8997 if (TREE_CODE (expr) == SAVE_EXPR)
8998 expr = TREE_OPERAND (expr, 0);
8999 STRIP_NOPS (expr);
9000 return expr;
9001 }
9002
9003 return NULL_TREE;
9004 }
9005
9006 /* Remove COMPONENT_REFS and indirections from EXPR. */
9007
9008 static tree
9009 omp_strip_components_and_deref (tree expr)
9010 {
9011 while (TREE_CODE (expr) == COMPONENT_REF
9012 || TREE_CODE (expr) == INDIRECT_REF
9013 || (TREE_CODE (expr) == MEM_REF
9014 && integer_zerop (TREE_OPERAND (expr, 1)))
9015 || TREE_CODE (expr) == POINTER_PLUS_EXPR
9016 || TREE_CODE (expr) == COMPOUND_EXPR)
9017 if (TREE_CODE (expr) == COMPOUND_EXPR)
9018 expr = TREE_OPERAND (expr, 1);
9019 else
9020 expr = TREE_OPERAND (expr, 0);
9021
9022 STRIP_NOPS (expr);
9023
9024 return expr;
9025 }
9026
9027 static tree
9028 omp_strip_indirections (tree expr)
9029 {
9030 while (TREE_CODE (expr) == INDIRECT_REF
9031 || (TREE_CODE (expr) == MEM_REF
9032 && integer_zerop (TREE_OPERAND (expr, 1))))
9033 expr = TREE_OPERAND (expr, 0);
9034
9035 return expr;
9036 }
9037
9038 /* An attach or detach operation depends directly on the address being
9039 attached/detached. Return that address, or none if there are no
9040 attachments/detachments. */
9041
9042 static tree
9043 omp_get_attachment (omp_mapping_group *grp)
9044 {
9045 tree node = *grp->grp_start;
9046
9047 switch (OMP_CLAUSE_MAP_KIND (node))
9048 {
9049 case GOMP_MAP_TO:
9050 case GOMP_MAP_FROM:
9051 case GOMP_MAP_TOFROM:
9052 case GOMP_MAP_ALWAYS_FROM:
9053 case GOMP_MAP_ALWAYS_TO:
9054 case GOMP_MAP_ALWAYS_TOFROM:
9055 case GOMP_MAP_FORCE_FROM:
9056 case GOMP_MAP_FORCE_TO:
9057 case GOMP_MAP_FORCE_TOFROM:
9058 case GOMP_MAP_FORCE_PRESENT:
9059 case GOMP_MAP_ALLOC:
9060 case GOMP_MAP_RELEASE:
9061 case GOMP_MAP_DELETE:
9062 case GOMP_MAP_FORCE_ALLOC:
9063 if (node == grp->grp_end)
9064 return NULL_TREE;
9065
9066 node = OMP_CLAUSE_CHAIN (node);
9067 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9068 {
9069 gcc_assert (node != grp->grp_end);
9070 node = OMP_CLAUSE_CHAIN (node);
9071 }
9072 if (node)
9073 switch (OMP_CLAUSE_MAP_KIND (node))
9074 {
9075 case GOMP_MAP_POINTER:
9076 case GOMP_MAP_ALWAYS_POINTER:
9077 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9078 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9079 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9080 return NULL_TREE;
9081
9082 case GOMP_MAP_ATTACH_DETACH:
9083 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9084 return OMP_CLAUSE_DECL (node);
9085
9086 default:
9087 internal_error ("unexpected mapping node");
9088 }
9089 return error_mark_node;
9090
9091 case GOMP_MAP_TO_PSET:
9092 gcc_assert (node != grp->grp_end);
9093 node = OMP_CLAUSE_CHAIN (node);
9094 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9095 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9096 return OMP_CLAUSE_DECL (node);
9097 else
9098 internal_error ("unexpected mapping node");
9099 return error_mark_node;
9100
9101 case GOMP_MAP_ATTACH:
9102 case GOMP_MAP_DETACH:
9103 node = OMP_CLAUSE_CHAIN (node);
9104 if (!node || *grp->grp_start == grp->grp_end)
9105 return OMP_CLAUSE_DECL (*grp->grp_start);
9106 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9107 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9108 return OMP_CLAUSE_DECL (*grp->grp_start);
9109 else
9110 internal_error ("unexpected mapping node");
9111 return error_mark_node;
9112
9113 case GOMP_MAP_STRUCT:
9114 case GOMP_MAP_FORCE_DEVICEPTR:
9115 case GOMP_MAP_DEVICE_RESIDENT:
9116 case GOMP_MAP_LINK:
9117 case GOMP_MAP_IF_PRESENT:
9118 case GOMP_MAP_FIRSTPRIVATE:
9119 case GOMP_MAP_FIRSTPRIVATE_INT:
9120 case GOMP_MAP_USE_DEVICE_PTR:
9121 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9122 return NULL_TREE;
9123
9124 default:
9125 internal_error ("unexpected mapping node");
9126 }
9127
9128 return error_mark_node;
9129 }
9130
9131 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9132 mappings, return the chain pointer to the end of that group in the list. */
9133
9134 static tree *
9135 omp_group_last (tree *start_p)
9136 {
9137 tree c = *start_p, nc, *grp_last_p = start_p;
9138
9139 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9140
9141 nc = OMP_CLAUSE_CHAIN (c);
9142
9143 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9144 return grp_last_p;
9145
9146 switch (OMP_CLAUSE_MAP_KIND (c))
9147 {
9148 default:
9149 while (nc
9150 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9151 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9152 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9153 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9154 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9155 || (OMP_CLAUSE_MAP_KIND (nc)
9156 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9157 || (OMP_CLAUSE_MAP_KIND (nc)
9158 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9159 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9160 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9161 {
9162 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9163 c = nc;
9164 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9165 if (nc2
9166 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9167 && (OMP_CLAUSE_MAP_KIND (nc)
9168 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9169 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9170 {
9171 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9172 c = nc2;
9173 nc2 = OMP_CLAUSE_CHAIN (nc2);
9174 }
9175 nc = nc2;
9176 }
9177 break;
9178
9179 case GOMP_MAP_ATTACH:
9180 case GOMP_MAP_DETACH:
9181 /* This is a weird artifact of how directives are parsed: bare attach or
9182 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9183 FIRSTPRIVATE_REFERENCE node. FIXME. */
9184 if (nc
9185 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9186 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9187 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9188 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9189 break;
9190
9191 case GOMP_MAP_TO_PSET:
9192 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9193 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9194 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9195 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9196 break;
9197
9198 case GOMP_MAP_STRUCT:
9199 {
9200 unsigned HOST_WIDE_INT num_mappings
9201 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9202 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9203 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9204 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9205 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9206 for (unsigned i = 0; i < num_mappings; i++)
9207 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9208 }
9209 break;
9210 }
9211
9212 return grp_last_p;
9213 }
9214
9215 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9216 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9217 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9218 if we have more than one such group, else return NULL. */
9219
9220 static void
9221 omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9222 tree gather_sentinel)
9223 {
9224 for (tree *cp = list_p;
9225 *cp && *cp != gather_sentinel;
9226 cp = &OMP_CLAUSE_CHAIN (*cp))
9227 {
9228 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9229 continue;
9230
9231 tree *grp_last_p = omp_group_last (cp);
9232 omp_mapping_group grp;
9233
9234 grp.grp_start = cp;
9235 grp.grp_end = *grp_last_p;
9236 grp.mark = UNVISITED;
9237 grp.sibling = NULL;
9238 grp.deleted = false;
9239 grp.next = NULL;
9240 groups->safe_push (grp);
9241
9242 cp = grp_last_p;
9243 }
9244 }
9245
9246 static vec<omp_mapping_group> *
9247 omp_gather_mapping_groups (tree *list_p)
9248 {
9249 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9250
9251 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9252
9253 if (groups->length () > 0)
9254 return groups;
9255 else
9256 {
9257 delete groups;
9258 return NULL;
9259 }
9260 }
9261
9262 /* A pointer mapping group GRP may define a block of memory starting at some
9263 base address, and maybe also define a firstprivate pointer or firstprivate
9264 reference that points to that block. The return value is a node containing
9265 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9266 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9267 return the number of consecutive chained nodes in CHAINED. */
9268
9269 static tree
9270 omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9271 tree *firstprivate)
9272 {
9273 tree node = *grp->grp_start;
9274
9275 *firstprivate = NULL_TREE;
9276 *chained = 1;
9277
9278 switch (OMP_CLAUSE_MAP_KIND (node))
9279 {
9280 case GOMP_MAP_TO:
9281 case GOMP_MAP_FROM:
9282 case GOMP_MAP_TOFROM:
9283 case GOMP_MAP_ALWAYS_FROM:
9284 case GOMP_MAP_ALWAYS_TO:
9285 case GOMP_MAP_ALWAYS_TOFROM:
9286 case GOMP_MAP_FORCE_FROM:
9287 case GOMP_MAP_FORCE_TO:
9288 case GOMP_MAP_FORCE_TOFROM:
9289 case GOMP_MAP_FORCE_PRESENT:
9290 case GOMP_MAP_ALLOC:
9291 case GOMP_MAP_RELEASE:
9292 case GOMP_MAP_DELETE:
9293 case GOMP_MAP_FORCE_ALLOC:
9294 case GOMP_MAP_IF_PRESENT:
9295 if (node == grp->grp_end)
9296 return node;
9297
9298 node = OMP_CLAUSE_CHAIN (node);
9299 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9300 {
9301 if (node == grp->grp_end)
9302 return *grp->grp_start;
9303 node = OMP_CLAUSE_CHAIN (node);
9304 }
9305 if (node)
9306 switch (OMP_CLAUSE_MAP_KIND (node))
9307 {
9308 case GOMP_MAP_POINTER:
9309 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9310 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9311 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9312 *firstprivate = OMP_CLAUSE_DECL (node);
9313 return *grp->grp_start;
9314
9315 case GOMP_MAP_ALWAYS_POINTER:
9316 case GOMP_MAP_ATTACH_DETACH:
9317 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9318 return *grp->grp_start;
9319
9320 default:
9321 internal_error ("unexpected mapping node");
9322 }
9323 else
9324 internal_error ("unexpected mapping node");
9325 return error_mark_node;
9326
9327 case GOMP_MAP_TO_PSET:
9328 gcc_assert (node != grp->grp_end);
9329 node = OMP_CLAUSE_CHAIN (node);
9330 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9331 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9332 return NULL_TREE;
9333 else
9334 internal_error ("unexpected mapping node");
9335 return error_mark_node;
9336
9337 case GOMP_MAP_ATTACH:
9338 case GOMP_MAP_DETACH:
9339 node = OMP_CLAUSE_CHAIN (node);
9340 if (!node || *grp->grp_start == grp->grp_end)
9341 return NULL_TREE;
9342 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9343 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9344 {
9345 /* We're mapping the base pointer itself in a bare attach or detach
9346 node. This is a side effect of how parsing works, and the mapping
9347 will be removed anyway (at least for enter/exit data directives).
9348 We should ignore the mapping here. FIXME. */
9349 return NULL_TREE;
9350 }
9351 else
9352 internal_error ("unexpected mapping node");
9353 return error_mark_node;
9354
9355 case GOMP_MAP_STRUCT:
9356 {
9357 unsigned HOST_WIDE_INT num_mappings
9358 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9359 node = OMP_CLAUSE_CHAIN (node);
9360 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9361 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9362 {
9363 *firstprivate = OMP_CLAUSE_DECL (node);
9364 node = OMP_CLAUSE_CHAIN (node);
9365 }
9366 *chained = num_mappings;
9367 return node;
9368 }
9369
9370 case GOMP_MAP_FORCE_DEVICEPTR:
9371 case GOMP_MAP_DEVICE_RESIDENT:
9372 case GOMP_MAP_LINK:
9373 case GOMP_MAP_FIRSTPRIVATE:
9374 case GOMP_MAP_FIRSTPRIVATE_INT:
9375 case GOMP_MAP_USE_DEVICE_PTR:
9376 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9377 return NULL_TREE;
9378
9379 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9380 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9381 case GOMP_MAP_POINTER:
9382 case GOMP_MAP_ALWAYS_POINTER:
9383 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9384 /* These shouldn't appear by themselves. */
9385 if (!seen_error ())
9386 internal_error ("unexpected pointer mapping node");
9387 return error_mark_node;
9388
9389 default:
9390 gcc_unreachable ();
9391 }
9392
9393 return error_mark_node;
9394 }
9395
9396 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9397 nodes by tree_operand_hash. */
9398
9399 static void
9400 omp_index_mapping_groups_1 (hash_map<tree_operand_hash,
9401 omp_mapping_group *> *grpmap,
9402 vec<omp_mapping_group> *groups,
9403 tree reindex_sentinel)
9404 {
9405 omp_mapping_group *grp;
9406 unsigned int i;
9407 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9408
9409 FOR_EACH_VEC_ELT (*groups, i, grp)
9410 {
9411 if (reindexing && *grp->grp_start == reindex_sentinel)
9412 above_hwm = true;
9413
9414 if (reindexing && !above_hwm)
9415 continue;
9416
9417 tree fpp;
9418 unsigned int chained;
9419 tree node = omp_group_base (grp, &chained, &fpp);
9420
9421 if (node == error_mark_node || (!node && !fpp))
9422 continue;
9423
9424 for (unsigned j = 0;
9425 node && j < chained;
9426 node = OMP_CLAUSE_CHAIN (node), j++)
9427 {
9428 tree decl = OMP_CLAUSE_DECL (node);
9429
9430 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9431 meaning node-hash lookups don't work. This is a workaround for
9432 that, but ideally we should just create the INDIRECT_REF at
9433 source instead. FIXME. */
9434 if (TREE_CODE (decl) == MEM_REF
9435 && integer_zerop (TREE_OPERAND (decl, 1)))
9436 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9437
9438 omp_mapping_group **prev = grpmap->get (decl);
9439
9440 if (prev && *prev == grp)
9441 /* Empty. */;
9442 else if (prev)
9443 {
9444 /* Mapping the same thing twice is normally diagnosed as an error,
9445 but can happen under some circumstances, e.g. in pr99928-16.c,
9446 the directive:
9447
9448 #pragma omp target simd reduction(+:a[:3]) \
9449 map(always, tofrom: a[:6])
9450 ...
9451
9452 will result in two "a[0]" mappings (of different sizes). */
9453
9454 grp->sibling = (*prev)->sibling;
9455 (*prev)->sibling = grp;
9456 }
9457 else
9458 grpmap->put (decl, grp);
9459 }
9460
9461 if (!fpp)
9462 continue;
9463
9464 omp_mapping_group **prev = grpmap->get (fpp);
9465 if (prev && *prev != grp)
9466 {
9467 grp->sibling = (*prev)->sibling;
9468 (*prev)->sibling = grp;
9469 }
9470 else
9471 grpmap->put (fpp, grp);
9472 }
9473 }
9474
9475 static hash_map<tree_operand_hash, omp_mapping_group *> *
9476 omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9477 {
9478 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap
9479 = new hash_map<tree_operand_hash, omp_mapping_group *>;
9480
9481 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9482
9483 return grpmap;
9484 }
9485
9486 /* Rebuild group map from partially-processed clause list (during
9487 omp_build_struct_sibling_lists). We have already processed nodes up until
9488 a high-water mark (HWM). This is a bit tricky because the list is being
9489 reordered as it is scanned, but we know:
9490
9491 1. The list after HWM has not been touched yet, so we can reindex it safely.
9492
9493 2. The list before and including HWM has been altered, but remains
9494 well-formed throughout the sibling-list building operation.
9495
9496 so, we can do the reindex operation in two parts, on the processed and
9497 then the unprocessed halves of the list. */
9498
9499 static hash_map<tree_operand_hash, omp_mapping_group *> *
9500 omp_reindex_mapping_groups (tree *list_p,
9501 vec<omp_mapping_group> *groups,
9502 vec<omp_mapping_group> *processed_groups,
9503 tree sentinel)
9504 {
9505 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap
9506 = new hash_map<tree_operand_hash, omp_mapping_group *>;
9507
9508 processed_groups->truncate (0);
9509
9510 omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
9511 omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
9512 if (sentinel)
9513 omp_index_mapping_groups_1 (grpmap, groups, sentinel);
9514
9515 return grpmap;
9516 }
9517
9518 /* Find the immediately-containing struct for a component ref (etc.)
9519 expression EXPR. */
9520
9521 static tree
9522 omp_containing_struct (tree expr)
9523 {
9524 tree expr0 = expr;
9525
9526 STRIP_NOPS (expr);
9527
9528 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9529 component ref. */
9530 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9531 return expr0;
9532
9533 while (TREE_CODE (expr) == ARRAY_REF)
9534 expr = TREE_OPERAND (expr, 0);
9535
9536 if (TREE_CODE (expr) == COMPONENT_REF)
9537 expr = TREE_OPERAND (expr, 0);
9538
9539 return expr;
9540 }
9541
9542 /* Return TRUE if DECL describes a component that is part of a whole structure
9543 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9544 that maps that structure, if present. */
9545
9546 static bool
9547 omp_mapped_by_containing_struct (hash_map<tree_operand_hash,
9548 omp_mapping_group *> *grpmap,
9549 tree decl,
9550 omp_mapping_group **mapped_by_group)
9551 {
9552 tree wsdecl = NULL_TREE;
9553
9554 *mapped_by_group = NULL;
9555
9556 while (true)
9557 {
9558 wsdecl = omp_containing_struct (decl);
9559 if (wsdecl == decl)
9560 break;
9561 omp_mapping_group **wholestruct = grpmap->get (wsdecl);
9562 if (!wholestruct
9563 && TREE_CODE (wsdecl) == MEM_REF
9564 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9565 {
9566 tree deref = TREE_OPERAND (wsdecl, 0);
9567 deref = build_fold_indirect_ref (deref);
9568 wholestruct = grpmap->get (deref);
9569 }
9570 if (wholestruct)
9571 {
9572 *mapped_by_group = *wholestruct;
9573 return true;
9574 }
9575 decl = wsdecl;
9576 }
9577
9578 return false;
9579 }
9580
9581 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9582 FALSE on error. */
9583
9584 static bool
9585 omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9586 vec<omp_mapping_group> *groups,
9587 hash_map<tree_operand_hash, omp_mapping_group *>
9588 *grpmap,
9589 omp_mapping_group *grp)
9590 {
9591 if (grp->mark == PERMANENT)
9592 return true;
9593 if (grp->mark == TEMPORARY)
9594 {
9595 fprintf (stderr, "when processing group:\n");
9596 debug_mapping_group (grp);
9597 internal_error ("base pointer cycle detected");
9598 return false;
9599 }
9600 grp->mark = TEMPORARY;
9601
9602 tree attaches_to = omp_get_attachment (grp);
9603
9604 if (attaches_to)
9605 {
9606 omp_mapping_group **basep = grpmap->get (attaches_to);
9607
9608 if (basep && *basep != grp)
9609 {
9610 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9611 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9612 return false;
9613 }
9614 }
9615
9616 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9617
9618 while (decl)
9619 {
9620 tree base = omp_get_base_pointer (decl);
9621
9622 if (!base)
9623 break;
9624
9625 omp_mapping_group **innerp = grpmap->get (base);
9626 omp_mapping_group *wholestruct;
9627
9628 /* We should treat whole-structure mappings as if all (pointer, in this
9629 case) members are mapped as individual list items. Check if we have
9630 such a whole-structure mapping, if we don't have an explicit reference
9631 to the pointer member itself. */
9632 if (!innerp
9633 && TREE_CODE (base) == COMPONENT_REF
9634 && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
9635 innerp = &wholestruct;
9636
9637 if (innerp && *innerp != grp)
9638 {
9639 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9640 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
9641 return false;
9642 break;
9643 }
9644
9645 decl = base;
9646 }
9647
9648 grp->mark = PERMANENT;
9649
9650 /* Emit grp to output list. */
9651
9652 **outlist = grp;
9653 *outlist = &grp->next;
9654
9655 return true;
9656 }
9657
9658 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9659 before mappings that use those pointers. This is an implementation of the
9660 depth-first search algorithm, described e.g. at:
9661
9662 https://en.wikipedia.org/wiki/Topological_sorting
9663 */
9664
9665 static omp_mapping_group *
9666 omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
9667 hash_map<tree_operand_hash, omp_mapping_group *>
9668 *grpmap)
9669 {
9670 omp_mapping_group *grp, *outlist = NULL, **cursor;
9671 unsigned int i;
9672
9673 cursor = &outlist;
9674
9675 FOR_EACH_VEC_ELT (*groups, i, grp)
9676 {
9677 if (grp->mark != PERMANENT)
9678 if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
9679 return NULL;
9680 }
9681
9682 return outlist;
9683 }
9684
9685 /* Split INLIST into two parts, moving groups corresponding to
9686 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9687 The former list is then appended to the latter. Each sub-list retains the
9688 order of the original list.
9689 Note that ATTACH nodes are later moved to the end of the list in
9690 gimplify_adjust_omp_clauses, for target regions. */
9691
9692 static omp_mapping_group *
9693 omp_segregate_mapping_groups (omp_mapping_group *inlist)
9694 {
9695 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
9696 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
9697
9698 for (omp_mapping_group *w = inlist; w;)
9699 {
9700 tree c = *w->grp_start;
9701 omp_mapping_group *next = w->next;
9702
9703 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9704
9705 switch (OMP_CLAUSE_MAP_KIND (c))
9706 {
9707 case GOMP_MAP_ALLOC:
9708 case GOMP_MAP_RELEASE:
9709 case GOMP_MAP_DELETE:
9710 *ard_tail = w;
9711 w->next = NULL;
9712 ard_tail = &w->next;
9713 break;
9714
9715 default:
9716 *tf_tail = w;
9717 w->next = NULL;
9718 tf_tail = &w->next;
9719 }
9720
9721 w = next;
9722 }
9723
9724 /* Now splice the lists together... */
9725 *tf_tail = ard_groups;
9726
9727 return tf_groups;
9728 }
9729
9730 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9731 those groups based on the output list of omp_tsort_mapping_groups --
9732 singly-linked, threaded through each element's NEXT pointer starting at
9733 HEAD. Each list element appears exactly once in that linked list.
9734
9735 Each element of GROUPS may correspond to one or several mapping nodes.
9736 Node groups are kept together, and in the reordered list, the positions of
9737 the original groups are reused for the positions of the reordered list.
9738 Hence if we have e.g.
9739
9740 {to ptr ptr} firstprivate {tofrom ptr} ...
9741 ^ ^ ^
9742 first group non-"map" second group
9743
9744 and say the second group contains a base pointer for the first so must be
9745 moved before it, the resulting list will contain:
9746
9747 {tofrom ptr} firstprivate {to ptr ptr} ...
9748 ^ prev. second group ^ prev. first group
9749 */
9750
9751 static tree *
9752 omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
9753 omp_mapping_group *head,
9754 tree *list_p)
9755 {
9756 omp_mapping_group *grp;
9757 unsigned int i;
9758 unsigned numgroups = groups->length ();
9759 auto_vec<tree> old_heads (numgroups);
9760 auto_vec<tree *> old_headps (numgroups);
9761 auto_vec<tree> new_heads (numgroups);
9762 auto_vec<tree> old_succs (numgroups);
9763 bool map_at_start = (list_p == (*groups)[0].grp_start);
9764
9765 tree *new_grp_tail = NULL;
9766
9767 /* Stash the start & end nodes of each mapping group before we start
9768 modifying the list. */
9769 FOR_EACH_VEC_ELT (*groups, i, grp)
9770 {
9771 old_headps.quick_push (grp->grp_start);
9772 old_heads.quick_push (*grp->grp_start);
9773 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
9774 }
9775
9776 /* And similarly, the heads of the groups in the order we want to rearrange
9777 the list to. */
9778 for (omp_mapping_group *w = head; w; w = w->next)
9779 new_heads.quick_push (*w->grp_start);
9780
9781 FOR_EACH_VEC_ELT (*groups, i, grp)
9782 {
9783 gcc_assert (head);
9784
9785 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
9786 {
9787 /* a {b c d} {e f g} h i j (original)
9788 -->
9789 a {k l m} {e f g} h i j (inserted new group on last iter)
9790 -->
9791 a {k l m} {n o p} h i j (this time, chain last group to new one)
9792 ^new_grp_tail
9793 */
9794 *new_grp_tail = new_heads[i];
9795 }
9796 else if (new_grp_tail)
9797 {
9798 /* a {b c d} e {f g h} i j k (original)
9799 -->
9800 a {l m n} e {f g h} i j k (gap after last iter's group)
9801 -->
9802 a {l m n} e {o p q} h i j (chain last group to old successor)
9803 ^new_grp_tail
9804 */
9805 *new_grp_tail = old_succs[i - 1];
9806 *old_headps[i] = new_heads[i];
9807 }
9808 else
9809 {
9810 /* The first inserted group -- point to new group, and leave end
9811 open.
9812 a {b c d} e f
9813 -->
9814 a {g h i...
9815 */
9816 *grp->grp_start = new_heads[i];
9817 }
9818
9819 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
9820
9821 head = head->next;
9822 }
9823
9824 if (new_grp_tail)
9825 *new_grp_tail = old_succs[numgroups - 1];
9826
9827 gcc_assert (!head);
9828
9829 return map_at_start ? (*groups)[0].grp_start : list_p;
9830 }
9831
9832 /* DECL is supposed to have lastprivate semantics in the outer contexts
9833 of combined/composite constructs, starting with OCTX.
9834 Add needed lastprivate, shared or map clause if no data sharing or
9835 mapping clause are present. IMPLICIT_P is true if it is an implicit
9836 clause (IV on simd), in which case the lastprivate will not be
9837 copied to some constructs. */
9838
9839 static void
9840 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
9841 tree decl, bool implicit_p)
9842 {
9843 struct gimplify_omp_ctx *orig_octx = octx;
9844 for (; octx; octx = octx->outer_context)
9845 {
9846 if ((octx->region_type == ORT_COMBINED_PARALLEL
9847 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
9848 && splay_tree_lookup (octx->variables,
9849 (splay_tree_key) decl) == NULL)
9850 {
9851 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
9852 continue;
9853 }
9854 if ((octx->region_type & ORT_TASK) != 0
9855 && octx->combined_loop
9856 && splay_tree_lookup (octx->variables,
9857 (splay_tree_key) decl) == NULL)
9858 {
9859 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9860 continue;
9861 }
9862 if (implicit_p
9863 && octx->region_type == ORT_WORKSHARE
9864 && octx->combined_loop
9865 && splay_tree_lookup (octx->variables,
9866 (splay_tree_key) decl) == NULL
9867 && octx->outer_context
9868 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
9869 && splay_tree_lookup (octx->outer_context->variables,
9870 (splay_tree_key) decl) == NULL)
9871 {
9872 octx = octx->outer_context;
9873 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9874 continue;
9875 }
9876 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
9877 && octx->combined_loop
9878 && splay_tree_lookup (octx->variables,
9879 (splay_tree_key) decl) == NULL
9880 && !omp_check_private (octx, decl, false))
9881 {
9882 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9883 continue;
9884 }
9885 if (octx->region_type == ORT_COMBINED_TARGET)
9886 {
9887 splay_tree_node n = splay_tree_lookup (octx->variables,
9888 (splay_tree_key) decl);
9889 if (n == NULL)
9890 {
9891 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9892 octx = octx->outer_context;
9893 }
9894 else if (!implicit_p
9895 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
9896 {
9897 n->value &= ~(GOVD_FIRSTPRIVATE
9898 | GOVD_FIRSTPRIVATE_IMPLICIT
9899 | GOVD_EXPLICIT);
9900 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9901 octx = octx->outer_context;
9902 }
9903 }
9904 break;
9905 }
9906 if (octx && (implicit_p || octx != orig_octx))
9907 omp_notice_variable (octx, decl, true);
9908 }
9909
9910 /* If we have mappings INNER and OUTER, where INNER is a component access and
9911 OUTER is a mapping of the whole containing struct, check that the mappings
9912 are compatible. We'll be deleting the inner mapping, so we need to make
9913 sure the outer mapping does (at least) the same transfers to/from the device
9914 as the inner mapping. */
9915
9916 bool
9917 omp_check_mapping_compatibility (location_t loc,
9918 omp_mapping_group *outer,
9919 omp_mapping_group *inner)
9920 {
9921 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
9922
9923 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
9924 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
9925
9926 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
9927 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
9928
9929 if (outer_kind == inner_kind)
9930 return true;
9931
9932 switch (outer_kind)
9933 {
9934 case GOMP_MAP_ALWAYS_TO:
9935 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9936 || inner_kind == GOMP_MAP_ALLOC
9937 || inner_kind == GOMP_MAP_TO)
9938 return true;
9939 break;
9940
9941 case GOMP_MAP_ALWAYS_FROM:
9942 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9943 || inner_kind == GOMP_MAP_ALLOC
9944 || inner_kind == GOMP_MAP_FROM)
9945 return true;
9946 break;
9947
9948 case GOMP_MAP_TO:
9949 case GOMP_MAP_FROM:
9950 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9951 || inner_kind == GOMP_MAP_ALLOC)
9952 return true;
9953 break;
9954
9955 case GOMP_MAP_ALWAYS_TOFROM:
9956 case GOMP_MAP_TOFROM:
9957 if (inner_kind == GOMP_MAP_FORCE_PRESENT
9958 || inner_kind == GOMP_MAP_ALLOC
9959 || inner_kind == GOMP_MAP_TO
9960 || inner_kind == GOMP_MAP_FROM
9961 || inner_kind == GOMP_MAP_TOFROM)
9962 return true;
9963 break;
9964
9965 default:
9966 ;
9967 }
9968
9969 error_at (loc, "data movement for component %qE is not compatible with "
9970 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
9971 OMP_CLAUSE_DECL (first_outer));
9972
9973 return false;
9974 }
9975
9976 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
9977 clause dependencies we handle for now are struct element mappings and
9978 whole-struct mappings on the same directive, and duplicate clause
9979 detection. */
9980
9981 void
9982 oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
9983 hash_map<tree_operand_hash,
9984 omp_mapping_group *> *grpmap)
9985 {
9986 int i;
9987 omp_mapping_group *grp;
9988 hash_set<tree_operand_hash> *seen_components = NULL;
9989 hash_set<tree_operand_hash> *shown_error = NULL;
9990
9991 FOR_EACH_VEC_ELT (*groups, i, grp)
9992 {
9993 tree grp_end = grp->grp_end;
9994 tree decl = OMP_CLAUSE_DECL (grp_end);
9995
9996 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
9997
9998 if (DECL_P (grp_end))
9999 continue;
10000
10001 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10002 while (TREE_CODE (c) == ARRAY_REF)
10003 c = TREE_OPERAND (c, 0);
10004 if (TREE_CODE (c) != COMPONENT_REF)
10005 continue;
10006 if (!seen_components)
10007 seen_components = new hash_set<tree_operand_hash> ();
10008 if (!shown_error)
10009 shown_error = new hash_set<tree_operand_hash> ();
10010 if (seen_components->contains (c)
10011 && !shown_error->contains (c))
10012 {
10013 error_at (OMP_CLAUSE_LOCATION (grp_end),
10014 "%qE appears more than once in map clauses",
10015 OMP_CLAUSE_DECL (grp_end));
10016 shown_error->add (c);
10017 }
10018 else
10019 seen_components->add (c);
10020
10021 omp_mapping_group *struct_group;
10022 if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
10023 && *grp->grp_start == grp_end)
10024 {
10025 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10026 struct_group, grp);
10027 /* Remove the whole of this mapping -- redundant. */
10028 grp->deleted = true;
10029 }
10030 }
10031
10032 if (seen_components)
10033 delete seen_components;
10034 if (shown_error)
10035 delete shown_error;
10036 }
10037
10038 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10039 is linked to the previous node pointed to by INSERT_AT. */
10040
10041 static tree *
10042 omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10043 {
10044 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10045 *insert_at = newnode;
10046 return &OMP_CLAUSE_CHAIN (newnode);
10047 }
10048
10049 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10050 pointed to by chain MOVE_AFTER instead. */
10051
10052 static void
10053 omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10054 {
10055 gcc_assert (node == *old_pos);
10056 *old_pos = OMP_CLAUSE_CHAIN (node);
10057 OMP_CLAUSE_CHAIN (node) = *move_after;
10058 *move_after = node;
10059 }
10060
10061 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10062 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10063 new nodes are prepended to the list before splicing into the new position.
10064 Return the position we should continue scanning the list at, or NULL to
10065 stay where we were. */
10066
10067 static tree *
10068 omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10069 tree *move_after)
10070 {
10071 if (first_ptr == move_after)
10072 return NULL;
10073
10074 tree tmp = *first_ptr;
10075 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10076 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10077 *move_after = tmp;
10078
10079 return first_ptr;
10080 }
10081
10082 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10083 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10084 pointer MOVE_AFTER.
10085
10086 The latter list was previously part of the OMP clause list, and the former
10087 (prepended) part is comprised of new nodes.
10088
10089 We start with a list of nodes starting with a struct mapping node. We
10090 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10091 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10092 the group of mapping nodes we are currently processing (from the chain
10093 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10094 we should continue processing from, or NULL to stay where we were.
10095
10096 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10097 different) is worked through below. Here we are processing LAST_NODE, and
10098 FIRST_PTR points at the preceding mapping clause:
10099
10100 #. mapping node chain
10101 ---------------------------------------------------
10102 A. struct_node [->B]
10103 B. comp_1 [->C]
10104 C. comp_2 [->D (move_after)]
10105 D. map_to_3 [->E]
10106 E. attach_3 [->F (first_ptr)]
10107 F. map_to_4 [->G (continue_at)]
10108 G. attach_4 (last_node) [->H]
10109 H. ...
10110
10111 *last_new_tail = *first_ptr;
10112
10113 I. new_node (first_new) [->F (last_new_tail)]
10114
10115 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10116
10117 #. mapping node chain
10118 ----------------------------------------------------
10119 A. struct_node [->B]
10120 B. comp_1 [->C]
10121 C. comp_2 [->D (move_after)]
10122 D. map_to_3 [->E]
10123 E. attach_3 [->H (first_ptr)]
10124 F. map_to_4 [->G (continue_at)]
10125 G. attach_4 (last_node) [->H]
10126 H. ...
10127
10128 I. new_node (first_new) [->F (last_new_tail)]
10129
10130 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10131
10132 #. mapping node chain
10133 ---------------------------------------------------
10134 A. struct_node [->B]
10135 B. comp_1 [->C]
10136 C. comp_2 [->D (move_after)]
10137 D. map_to_3 [->E]
10138 E. attach_3 [->H (continue_at)]
10139 F. map_to_4 [->G]
10140 G. attach_4 (last_node) [->D]
10141 H. ...
10142
10143 I. new_node (first_new) [->F (last_new_tail)]
10144
10145 *move_after = first_new;
10146
10147 #. mapping node chain
10148 ---------------------------------------------------
10149 A. struct_node [->B]
10150 B. comp_1 [->C]
10151 C. comp_2 [->I (move_after)]
10152 D. map_to_3 [->E]
10153 E. attach_3 [->H (continue_at)]
10154 F. map_to_4 [->G]
10155 G. attach_4 (last_node) [->D]
10156 H. ...
10157 I. new_node (first_new) [->F (last_new_tail)]
10158
10159 or, in order:
10160
10161 #. mapping node chain
10162 ---------------------------------------------------
10163 A. struct_node [->B]
10164 B. comp_1 [->C]
10165 C. comp_2 [->I (move_after)]
10166 I. new_node (first_new) [->F (last_new_tail)]
10167 F. map_to_4 [->G]
10168 G. attach_4 (last_node) [->D]
10169 D. map_to_3 [->E]
10170 E. attach_3 [->H (continue_at)]
10171 H. ...
10172 */
10173
10174 static tree *
10175 omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10176 tree *first_ptr, tree last_node,
10177 tree *move_after)
10178 {
10179 tree *continue_at = NULL;
10180 *last_new_tail = *first_ptr;
10181 if (first_ptr == move_after)
10182 *move_after = first_new;
10183 else
10184 {
10185 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10186 continue_at = first_ptr;
10187 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10188 *move_after = first_new;
10189 }
10190 return continue_at;
10191 }
10192
10193 /* Mapping struct members causes an additional set of nodes to be created,
10194 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10195 number of members being mapped, in order of ascending position (address or
10196 bitwise).
10197
10198 We scan through the list of mapping clauses, calling this function for each
10199 struct member mapping we find, and build up the list of mappings after the
10200 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10201 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10202 moved into place in the sorted list.
10203
10204 struct {
10205 int *a;
10206 int *b;
10207 int c;
10208 int *d;
10209 };
10210
10211 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10212 struct.d[0:n])
10213
10214 GOMP_MAP_STRUCT (4)
10215 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10216 GOMP_MAP_ALLOC (struct.a)
10217 GOMP_MAP_ALLOC (struct.b)
10218 GOMP_MAP_TO (struct.c)
10219 GOMP_MAP_ALLOC (struct.d)
10220 ...
10221
10222 In the case where we are mapping references to pointers, or in Fortran if
10223 we are mapping an array with a descriptor, additional nodes may be created
10224 after the struct node list also.
10225
10226 The return code is either a pointer to the next node to process (if the
10227 list has been rearranged), else NULL to continue with the next node in the
10228 original list. */
10229
10230 static tree *
10231 omp_accumulate_sibling_list (enum omp_region_type region_type,
10232 enum tree_code code,
10233 hash_map<tree_operand_hash, tree>
10234 *&struct_map_to_clause, tree *grp_start_p,
10235 tree grp_end, tree *inner)
10236 {
10237 poly_offset_int coffset;
10238 poly_int64 cbitpos;
10239 tree ocd = OMP_CLAUSE_DECL (grp_end);
10240 bool openmp = !(region_type & ORT_ACC);
10241 tree *continue_at = NULL;
10242
10243 while (TREE_CODE (ocd) == ARRAY_REF)
10244 ocd = TREE_OPERAND (ocd, 0);
10245
10246 if (TREE_CODE (ocd) == INDIRECT_REF)
10247 ocd = TREE_OPERAND (ocd, 0);
10248
10249 tree base = extract_base_bit_offset (ocd, &cbitpos, &coffset);
10250
10251 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10252 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10253 == GOMP_MAP_ATTACH_DETACH)
10254 || (OMP_CLAUSE_MAP_KIND (grp_end)
10255 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10256 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10257 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10258
10259 /* FIXME: If we're not mapping the base pointer in some other clause on this
10260 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10261 early-exit. */
10262 if (openmp && attach_detach)
10263 return NULL;
10264
10265 if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
10266 {
10267 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10268 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10269
10270 OMP_CLAUSE_SET_MAP_KIND (l, k);
10271
10272 OMP_CLAUSE_DECL (l) = unshare_expr (base);
10273
10274 OMP_CLAUSE_SIZE (l)
10275 = (!attach ? size_int (1)
10276 : (DECL_P (OMP_CLAUSE_DECL (l))
10277 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10278 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10279 if (struct_map_to_clause == NULL)
10280 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10281 struct_map_to_clause->put (base, l);
10282
10283 if (ptr || attach_detach)
10284 {
10285 tree extra_node;
10286 tree alloc_node
10287 = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
10288 &extra_node);
10289 OMP_CLAUSE_CHAIN (l) = alloc_node;
10290
10291 tree *insert_node_pos = grp_start_p;
10292
10293 if (extra_node)
10294 {
10295 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10296 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10297 }
10298 else
10299 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10300
10301 *insert_node_pos = l;
10302 }
10303 else
10304 {
10305 gcc_assert (*grp_start_p == grp_end);
10306 grp_start_p = omp_siblist_insert_node_after (l, grp_start_p);
10307 }
10308
10309 tree noind = omp_strip_indirections (base);
10310
10311 if (!openmp
10312 && (region_type & ORT_TARGET)
10313 && TREE_CODE (noind) == COMPONENT_REF)
10314 {
10315 /* The base for this component access is a struct component access
10316 itself. Insert a node to be processed on the next iteration of
10317 our caller's loop, which will subsequently be turned into a new,
10318 inner GOMP_MAP_STRUCT mapping.
10319
10320 We need to do this else the non-DECL_P base won't be
10321 rewritten correctly in the offloaded region. */
10322 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10323 OMP_CLAUSE_MAP);
10324 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10325 OMP_CLAUSE_DECL (c2) = unshare_expr (noind);
10326 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10327 *inner = c2;
10328 return NULL;
10329 }
10330
10331 tree sdecl = omp_strip_components_and_deref (base);
10332
10333 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10334 {
10335 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10336 OMP_CLAUSE_MAP);
10337 bool base_ref
10338 = (TREE_CODE (base) == INDIRECT_REF
10339 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10340 == REFERENCE_TYPE)
10341 || ((TREE_CODE (TREE_OPERAND (base, 0))
10342 == INDIRECT_REF)
10343 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10344 (TREE_OPERAND (base, 0), 0)))
10345 == REFERENCE_TYPE))));
10346 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10347 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10348 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10349 OMP_CLAUSE_DECL (c2) = sdecl;
10350 tree baddr = build_fold_addr_expr (base);
10351 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10352 ptrdiff_type_node, baddr);
10353 /* This isn't going to be good enough when we add support for more
10354 complicated lvalue expressions. FIXME. */
10355 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10356 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10357 sdecl = build_simple_mem_ref (sdecl);
10358 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10359 ptrdiff_type_node, sdecl);
10360 OMP_CLAUSE_SIZE (c2)
10361 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10362 ptrdiff_type_node, baddr, decladdr);
10363 /* Insert after struct node. */
10364 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10365 OMP_CLAUSE_CHAIN (l) = c2;
10366 }
10367
10368 return NULL;
10369 }
10370 else if (struct_map_to_clause)
10371 {
10372 tree *osc = struct_map_to_clause->get (base);
10373 tree *sc = NULL, *scp = NULL;
10374 sc = &OMP_CLAUSE_CHAIN (*osc);
10375 /* The struct mapping might be immediately followed by a
10376 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10377 indirect access or a reference, or both. (This added node is removed
10378 in omp-low.c after it has been processed there.) */
10379 if (*sc != grp_end
10380 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10381 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10382 sc = &OMP_CLAUSE_CHAIN (*sc);
10383 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10384 if ((ptr || attach_detach) && sc == grp_start_p)
10385 break;
10386 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10387 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10388 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10389 break;
10390 else
10391 {
10392 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10393 poly_offset_int offset;
10394 poly_int64 bitpos;
10395
10396 if (TREE_CODE (sc_decl) == ARRAY_REF)
10397 {
10398 while (TREE_CODE (sc_decl) == ARRAY_REF)
10399 sc_decl = TREE_OPERAND (sc_decl, 0);
10400 if (TREE_CODE (sc_decl) != COMPONENT_REF
10401 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10402 break;
10403 }
10404 else if (TREE_CODE (sc_decl) == INDIRECT_REF
10405 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10406 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10407 == REFERENCE_TYPE))
10408 sc_decl = TREE_OPERAND (sc_decl, 0);
10409
10410 tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset);
10411 if (!base2 || !operand_equal_p (base2, base, 0))
10412 break;
10413 if (scp)
10414 continue;
10415 if (maybe_lt (coffset, offset)
10416 || (known_eq (coffset, offset)
10417 && maybe_lt (cbitpos, bitpos)))
10418 {
10419 if (ptr || attach_detach)
10420 scp = sc;
10421 else
10422 break;
10423 }
10424 }
10425
10426 if (!attach)
10427 OMP_CLAUSE_SIZE (*osc)
10428 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10429 if (ptr || attach_detach)
10430 {
10431 tree cl = NULL_TREE, extra_node;
10432 tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
10433 grp_end, &extra_node);
10434 tree *tail_chain = NULL;
10435
10436 /* Here, we have:
10437
10438 grp_end : the last (or only) node in this group.
10439 grp_start_p : pointer to the first node in a pointer mapping group
10440 up to and including GRP_END.
10441 sc : pointer to the chain for the end of the struct component
10442 list.
10443 scp : pointer to the chain for the sorted position at which we
10444 should insert in the middle of the struct component list
10445 (else NULL to insert at end).
10446 alloc_node : the "alloc" node for the structure (pointer-type)
10447 component. We insert at SCP (if present), else SC
10448 (the end of the struct component list).
10449 extra_node : a newly-synthesized node for an additional indirect
10450 pointer mapping or a Fortran pointer set, if needed.
10451 cl : first node to prepend before grp_start_p.
10452 tail_chain : pointer to chain of last prepended node.
10453
10454 The general idea is we move the nodes for this struct mapping
10455 together: the alloc node goes into the sorted list directly after
10456 the struct mapping, and any extra nodes (together with the nodes
10457 mapping arrays pointed to by struct components) get moved after
10458 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10459 the end of the struct component mapping list. It's important that
10460 the alloc_node comes first in that case because it's part of the
10461 sorted component mapping list (but subsequent nodes are not!). */
10462
10463 if (scp)
10464 omp_siblist_insert_node_after (alloc_node, scp);
10465
10466 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10467 already inserted it) and the extra_node (if it is present). The
10468 list can be empty if we added alloc_node above and there is no
10469 extra node. */
10470 if (scp && extra_node)
10471 {
10472 cl = extra_node;
10473 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10474 }
10475 else if (extra_node)
10476 {
10477 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10478 cl = alloc_node;
10479 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10480 }
10481 else if (!scp)
10482 {
10483 cl = alloc_node;
10484 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10485 }
10486
10487 continue_at
10488 = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
10489 grp_start_p, grp_end,
10490 sc)
10491 : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
10492 }
10493 else if (*sc != grp_end)
10494 {
10495 gcc_assert (*grp_start_p == grp_end);
10496
10497 /* We are moving the current node back to a previous struct node:
10498 the node that used to point to the current node will now point to
10499 the next node. */
10500 continue_at = grp_start_p;
10501 /* In the non-pointer case, the mapping clause itself is moved into
10502 the correct position in the struct component list, which in this
10503 case is just SC. */
10504 omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
10505 }
10506 }
10507 return continue_at;
10508 }
10509
10510 /* Scan through GROUPS, and create sorted structure sibling lists without
10511 gimplifying. */
10512
10513 static bool
10514 omp_build_struct_sibling_lists (enum tree_code code,
10515 enum omp_region_type region_type,
10516 vec<omp_mapping_group> *groups,
10517 hash_map<tree_operand_hash, omp_mapping_group *>
10518 **grpmap,
10519 tree *list_p)
10520 {
10521 unsigned i;
10522 omp_mapping_group *grp;
10523 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10524 bool success = true;
10525 tree *new_next = NULL;
10526 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10527 auto_vec<omp_mapping_group> pre_hwm_groups;
10528
10529 FOR_EACH_VEC_ELT (*groups, i, grp)
10530 {
10531 tree c = grp->grp_end;
10532 tree decl = OMP_CLAUSE_DECL (c);
10533 tree grp_end = grp->grp_end;
10534 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10535
10536 if (new_next)
10537 grp->grp_start = new_next;
10538
10539 new_next = NULL;
10540
10541 tree *grp_start_p = grp->grp_start;
10542
10543 if (DECL_P (decl))
10544 continue;
10545
10546 /* Skip groups we marked for deletion in
10547 oacc_resolve_clause_dependencies. */
10548 if (grp->deleted)
10549 continue;
10550
10551 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10552 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10553 {
10554 /* Don't process an array descriptor that isn't inside a derived type
10555 as a struct (the GOMP_MAP_POINTER following will have the form
10556 "var.data", but such mappings are handled specially). */
10557 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10558 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10559 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10560 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10561 continue;
10562 }
10563
10564 tree d = decl;
10565 if (TREE_CODE (d) == ARRAY_REF)
10566 {
10567 while (TREE_CODE (d) == ARRAY_REF)
10568 d = TREE_OPERAND (d, 0);
10569 if (TREE_CODE (d) == COMPONENT_REF
10570 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10571 decl = d;
10572 }
10573 if (d == decl
10574 && TREE_CODE (decl) == INDIRECT_REF
10575 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10576 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10577 == REFERENCE_TYPE)
10578 && (OMP_CLAUSE_MAP_KIND (c)
10579 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10580 decl = TREE_OPERAND (decl, 0);
10581
10582 STRIP_NOPS (decl);
10583
10584 if (TREE_CODE (decl) != COMPONENT_REF)
10585 continue;
10586
10587 /* If we're mapping the whole struct in another node, skip adding this
10588 node to a sibling list. */
10589 omp_mapping_group *wholestruct;
10590 if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
10591 &wholestruct))
10592 {
10593 if (!(region_type & ORT_ACC)
10594 && *grp_start_p == grp_end)
10595 /* Remove the whole of this mapping -- redundant. */
10596 grp->deleted = true;
10597
10598 continue;
10599 }
10600
10601 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10602 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10603 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10604 && code != OACC_UPDATE
10605 && code != OMP_TARGET_UPDATE)
10606 {
10607 if (error_operand_p (decl))
10608 {
10609 success = false;
10610 goto error_out;
10611 }
10612
10613 tree stype = TREE_TYPE (decl);
10614 if (TREE_CODE (stype) == REFERENCE_TYPE)
10615 stype = TREE_TYPE (stype);
10616 if (TYPE_SIZE_UNIT (stype) == NULL
10617 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10618 {
10619 error_at (OMP_CLAUSE_LOCATION (c),
10620 "mapping field %qE of variable length "
10621 "structure", OMP_CLAUSE_DECL (c));
10622 success = false;
10623 goto error_out;
10624 }
10625
10626 tree inner = NULL_TREE;
10627
10628 new_next
10629 = omp_accumulate_sibling_list (region_type, code,
10630 struct_map_to_clause, grp_start_p,
10631 grp_end, &inner);
10632
10633 if (inner)
10634 {
10635 if (new_next && *new_next == NULL_TREE)
10636 *new_next = inner;
10637 else
10638 *tail = inner;
10639
10640 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10641 omp_mapping_group newgrp;
10642 newgrp.grp_start = new_next ? new_next : tail;
10643 newgrp.grp_end = inner;
10644 newgrp.mark = UNVISITED;
10645 newgrp.sibling = NULL;
10646 newgrp.deleted = false;
10647 newgrp.next = NULL;
10648 groups->safe_push (newgrp);
10649
10650 /* !!! Growing GROUPS might invalidate the pointers in the group
10651 map. Rebuild it here. This is a bit inefficient, but
10652 shouldn't happen very often. */
10653 delete (*grpmap);
10654 *grpmap
10655 = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
10656 sentinel);
10657
10658 tail = &OMP_CLAUSE_CHAIN (inner);
10659 }
10660 }
10661 }
10662
10663 /* Delete groups marked for deletion above. At this point the order of the
10664 groups may no longer correspond to the order of the underlying list,
10665 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10666 deleted nodes... */
10667
10668 FOR_EACH_VEC_ELT (*groups, i, grp)
10669 if (grp->deleted)
10670 for (tree d = *grp->grp_start;
10671 d != OMP_CLAUSE_CHAIN (grp->grp_end);
10672 d = OMP_CLAUSE_CHAIN (d))
10673 OMP_CLAUSE_DECL (d) = NULL_TREE;
10674
10675 /* ...then sweep through the list removing the now-empty nodes. */
10676
10677 tail = list_p;
10678 while (*tail)
10679 {
10680 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
10681 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
10682 *tail = OMP_CLAUSE_CHAIN (*tail);
10683 else
10684 tail = &OMP_CLAUSE_CHAIN (*tail);
10685 }
10686
10687 error_out:
10688 if (struct_map_to_clause)
10689 delete struct_map_to_clause;
10690
10691 return success;
10692 }
10693
10694 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10695 and previous omp contexts. */
10696
10697 static void
10698 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
10699 enum omp_region_type region_type,
10700 enum tree_code code)
10701 {
10702 struct gimplify_omp_ctx *ctx, *outer_ctx;
10703 tree c;
10704 tree *prev_list_p = NULL, *orig_list_p = list_p;
10705 int handled_depend_iterators = -1;
10706 int nowait = -1;
10707
10708 ctx = new_omp_context (region_type);
10709 ctx->code = code;
10710 outer_ctx = ctx->outer_context;
10711 if (code == OMP_TARGET)
10712 {
10713 if (!lang_GNU_Fortran ())
10714 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
10715 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
10716 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
10717 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10718 }
10719 if (!lang_GNU_Fortran ())
10720 switch (code)
10721 {
10722 case OMP_TARGET:
10723 case OMP_TARGET_DATA:
10724 case OMP_TARGET_ENTER_DATA:
10725 case OMP_TARGET_EXIT_DATA:
10726 case OACC_DECLARE:
10727 case OACC_HOST_DATA:
10728 case OACC_PARALLEL:
10729 case OACC_KERNELS:
10730 ctx->target_firstprivatize_array_bases = true;
10731 default:
10732 break;
10733 }
10734
10735 if (code == OMP_TARGET
10736 || code == OMP_TARGET_DATA
10737 || code == OMP_TARGET_ENTER_DATA
10738 || code == OMP_TARGET_EXIT_DATA)
10739 {
10740 vec<omp_mapping_group> *groups;
10741 groups = omp_gather_mapping_groups (list_p);
10742 if (groups)
10743 {
10744 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap;
10745 grpmap = omp_index_mapping_groups (groups);
10746
10747 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10748 list_p);
10749
10750 omp_mapping_group *outlist = NULL;
10751
10752 /* Topological sorting may fail if we have duplicate nodes, which
10753 we should have detected and shown an error for already. Skip
10754 sorting in that case. */
10755 if (seen_error ())
10756 goto failure;
10757
10758 delete grpmap;
10759 delete groups;
10760
10761 /* Rebuild now we have struct sibling lists. */
10762 groups = omp_gather_mapping_groups (list_p);
10763 grpmap = omp_index_mapping_groups (groups);
10764
10765 outlist = omp_tsort_mapping_groups (groups, grpmap);
10766 outlist = omp_segregate_mapping_groups (outlist);
10767 list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
10768
10769 failure:
10770 delete grpmap;
10771 delete groups;
10772 }
10773 }
10774 else if (region_type & ORT_ACC)
10775 {
10776 vec<omp_mapping_group> *groups;
10777 groups = omp_gather_mapping_groups (list_p);
10778 if (groups)
10779 {
10780 hash_map<tree_operand_hash, omp_mapping_group *> *grpmap;
10781 grpmap = omp_index_mapping_groups (groups);
10782
10783 oacc_resolve_clause_dependencies (groups, grpmap);
10784 omp_build_struct_sibling_lists (code, region_type, groups, &grpmap,
10785 list_p);
10786
10787 delete groups;
10788 delete grpmap;
10789 }
10790 }
10791
10792 while ((c = *list_p) != NULL)
10793 {
10794 bool remove = false;
10795 bool notice_outer = true;
10796 const char *check_non_private = NULL;
10797 unsigned int flags;
10798 tree decl;
10799
10800 switch (OMP_CLAUSE_CODE (c))
10801 {
10802 case OMP_CLAUSE_PRIVATE:
10803 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
10804 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
10805 {
10806 flags |= GOVD_PRIVATE_OUTER_REF;
10807 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
10808 }
10809 else
10810 notice_outer = false;
10811 goto do_add;
10812 case OMP_CLAUSE_SHARED:
10813 flags = GOVD_SHARED | GOVD_EXPLICIT;
10814 goto do_add;
10815 case OMP_CLAUSE_FIRSTPRIVATE:
10816 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
10817 check_non_private = "firstprivate";
10818 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10819 {
10820 gcc_assert (code == OMP_TARGET);
10821 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
10822 }
10823 goto do_add;
10824 case OMP_CLAUSE_LASTPRIVATE:
10825 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10826 switch (code)
10827 {
10828 case OMP_DISTRIBUTE:
10829 error_at (OMP_CLAUSE_LOCATION (c),
10830 "conditional %<lastprivate%> clause on "
10831 "%qs construct", "distribute");
10832 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10833 break;
10834 case OMP_TASKLOOP:
10835 error_at (OMP_CLAUSE_LOCATION (c),
10836 "conditional %<lastprivate%> clause on "
10837 "%qs construct", "taskloop");
10838 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10839 break;
10840 default:
10841 break;
10842 }
10843 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
10844 if (code != OMP_LOOP)
10845 check_non_private = "lastprivate";
10846 decl = OMP_CLAUSE_DECL (c);
10847 if (error_operand_p (decl))
10848 goto do_add;
10849 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
10850 && !lang_hooks.decls.omp_scalar_p (decl, true))
10851 {
10852 error_at (OMP_CLAUSE_LOCATION (c),
10853 "non-scalar variable %qD in conditional "
10854 "%<lastprivate%> clause", decl);
10855 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
10856 }
10857 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10858 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
10859 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
10860 false);
10861 goto do_add;
10862 case OMP_CLAUSE_REDUCTION:
10863 if (OMP_CLAUSE_REDUCTION_TASK (c))
10864 {
10865 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
10866 {
10867 if (nowait == -1)
10868 nowait = omp_find_clause (*list_p,
10869 OMP_CLAUSE_NOWAIT) != NULL_TREE;
10870 if (nowait
10871 && (outer_ctx == NULL
10872 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
10873 {
10874 error_at (OMP_CLAUSE_LOCATION (c),
10875 "%<task%> reduction modifier on a construct "
10876 "with a %<nowait%> clause");
10877 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10878 }
10879 }
10880 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
10881 {
10882 error_at (OMP_CLAUSE_LOCATION (c),
10883 "invalid %<task%> reduction modifier on construct "
10884 "other than %<parallel%>, %qs, %<sections%> or "
10885 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
10886 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
10887 }
10888 }
10889 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10890 switch (code)
10891 {
10892 case OMP_SECTIONS:
10893 error_at (OMP_CLAUSE_LOCATION (c),
10894 "%<inscan%> %<reduction%> clause on "
10895 "%qs construct", "sections");
10896 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10897 break;
10898 case OMP_PARALLEL:
10899 error_at (OMP_CLAUSE_LOCATION (c),
10900 "%<inscan%> %<reduction%> clause on "
10901 "%qs construct", "parallel");
10902 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10903 break;
10904 case OMP_TEAMS:
10905 error_at (OMP_CLAUSE_LOCATION (c),
10906 "%<inscan%> %<reduction%> clause on "
10907 "%qs construct", "teams");
10908 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10909 break;
10910 case OMP_TASKLOOP:
10911 error_at (OMP_CLAUSE_LOCATION (c),
10912 "%<inscan%> %<reduction%> clause on "
10913 "%qs construct", "taskloop");
10914 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10915 break;
10916 case OMP_SCOPE:
10917 error_at (OMP_CLAUSE_LOCATION (c),
10918 "%<inscan%> %<reduction%> clause on "
10919 "%qs construct", "scope");
10920 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
10921 break;
10922 default:
10923 break;
10924 }
10925 /* FALLTHRU */
10926 case OMP_CLAUSE_IN_REDUCTION:
10927 case OMP_CLAUSE_TASK_REDUCTION:
10928 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
10929 /* OpenACC permits reductions on private variables. */
10930 if (!(region_type & ORT_ACC)
10931 /* taskgroup is actually not a worksharing region. */
10932 && code != OMP_TASKGROUP)
10933 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
10934 decl = OMP_CLAUSE_DECL (c);
10935 if (TREE_CODE (decl) == MEM_REF)
10936 {
10937 tree type = TREE_TYPE (decl);
10938 bool saved_into_ssa = gimplify_ctxp->into_ssa;
10939 gimplify_ctxp->into_ssa = false;
10940 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
10941 NULL, is_gimple_val, fb_rvalue, false)
10942 == GS_ERROR)
10943 {
10944 gimplify_ctxp->into_ssa = saved_into_ssa;
10945 remove = true;
10946 break;
10947 }
10948 gimplify_ctxp->into_ssa = saved_into_ssa;
10949 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
10950 if (DECL_P (v))
10951 {
10952 omp_firstprivatize_variable (ctx, v);
10953 omp_notice_variable (ctx, v, true);
10954 }
10955 decl = TREE_OPERAND (decl, 0);
10956 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10957 {
10958 gimplify_ctxp->into_ssa = false;
10959 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
10960 NULL, is_gimple_val, fb_rvalue, false)
10961 == GS_ERROR)
10962 {
10963 gimplify_ctxp->into_ssa = saved_into_ssa;
10964 remove = true;
10965 break;
10966 }
10967 gimplify_ctxp->into_ssa = saved_into_ssa;
10968 v = TREE_OPERAND (decl, 1);
10969 if (DECL_P (v))
10970 {
10971 omp_firstprivatize_variable (ctx, v);
10972 omp_notice_variable (ctx, v, true);
10973 }
10974 decl = TREE_OPERAND (decl, 0);
10975 }
10976 if (TREE_CODE (decl) == ADDR_EXPR
10977 || TREE_CODE (decl) == INDIRECT_REF)
10978 decl = TREE_OPERAND (decl, 0);
10979 }
10980 goto do_add_decl;
10981 case OMP_CLAUSE_LINEAR:
10982 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
10983 is_gimple_val, fb_rvalue) == GS_ERROR)
10984 {
10985 remove = true;
10986 break;
10987 }
10988 else
10989 {
10990 if (code == OMP_SIMD
10991 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10992 {
10993 struct gimplify_omp_ctx *octx = outer_ctx;
10994 if (octx
10995 && octx->region_type == ORT_WORKSHARE
10996 && octx->combined_loop
10997 && !octx->distribute)
10998 {
10999 if (octx->outer_context
11000 && (octx->outer_context->region_type
11001 == ORT_COMBINED_PARALLEL))
11002 octx = octx->outer_context->outer_context;
11003 else
11004 octx = octx->outer_context;
11005 }
11006 if (octx
11007 && octx->region_type == ORT_WORKSHARE
11008 && octx->combined_loop
11009 && octx->distribute)
11010 {
11011 error_at (OMP_CLAUSE_LOCATION (c),
11012 "%<linear%> clause for variable other than "
11013 "loop iterator specified on construct "
11014 "combined with %<distribute%>");
11015 remove = true;
11016 break;
11017 }
11018 }
11019 /* For combined #pragma omp parallel for simd, need to put
11020 lastprivate and perhaps firstprivate too on the
11021 parallel. Similarly for #pragma omp for simd. */
11022 struct gimplify_omp_ctx *octx = outer_ctx;
11023 bool taskloop_seen = false;
11024 decl = NULL_TREE;
11025 do
11026 {
11027 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11028 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11029 break;
11030 decl = OMP_CLAUSE_DECL (c);
11031 if (error_operand_p (decl))
11032 {
11033 decl = NULL_TREE;
11034 break;
11035 }
11036 flags = GOVD_SEEN;
11037 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11038 flags |= GOVD_FIRSTPRIVATE;
11039 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11040 flags |= GOVD_LASTPRIVATE;
11041 if (octx
11042 && octx->region_type == ORT_WORKSHARE
11043 && octx->combined_loop)
11044 {
11045 if (octx->outer_context
11046 && (octx->outer_context->region_type
11047 == ORT_COMBINED_PARALLEL))
11048 octx = octx->outer_context;
11049 else if (omp_check_private (octx, decl, false))
11050 break;
11051 }
11052 else if (octx
11053 && (octx->region_type & ORT_TASK) != 0
11054 && octx->combined_loop)
11055 taskloop_seen = true;
11056 else if (octx
11057 && octx->region_type == ORT_COMBINED_PARALLEL
11058 && ((ctx->region_type == ORT_WORKSHARE
11059 && octx == outer_ctx)
11060 || taskloop_seen))
11061 flags = GOVD_SEEN | GOVD_SHARED;
11062 else if (octx
11063 && ((octx->region_type & ORT_COMBINED_TEAMS)
11064 == ORT_COMBINED_TEAMS))
11065 flags = GOVD_SEEN | GOVD_SHARED;
11066 else if (octx
11067 && octx->region_type == ORT_COMBINED_TARGET)
11068 {
11069 if (flags & GOVD_LASTPRIVATE)
11070 flags = GOVD_SEEN | GOVD_MAP;
11071 }
11072 else
11073 break;
11074 splay_tree_node on
11075 = splay_tree_lookup (octx->variables,
11076 (splay_tree_key) decl);
11077 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11078 {
11079 octx = NULL;
11080 break;
11081 }
11082 omp_add_variable (octx, decl, flags);
11083 if (octx->outer_context == NULL)
11084 break;
11085 octx = octx->outer_context;
11086 }
11087 while (1);
11088 if (octx
11089 && decl
11090 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11091 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11092 omp_notice_variable (octx, decl, true);
11093 }
11094 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11095 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11096 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11097 {
11098 notice_outer = false;
11099 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11100 }
11101 goto do_add;
11102
11103 case OMP_CLAUSE_MAP:
11104 decl = OMP_CLAUSE_DECL (c);
11105 if (error_operand_p (decl))
11106 remove = true;
11107 switch (code)
11108 {
11109 case OMP_TARGET:
11110 break;
11111 case OACC_DATA:
11112 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11113 break;
11114 /* FALLTHRU */
11115 case OMP_TARGET_DATA:
11116 case OMP_TARGET_ENTER_DATA:
11117 case OMP_TARGET_EXIT_DATA:
11118 case OACC_ENTER_DATA:
11119 case OACC_EXIT_DATA:
11120 case OACC_HOST_DATA:
11121 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11122 || (OMP_CLAUSE_MAP_KIND (c)
11123 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11124 /* For target {,enter ,exit }data only the array slice is
11125 mapped, but not the pointer to it. */
11126 remove = true;
11127 break;
11128 default:
11129 break;
11130 }
11131 /* For Fortran, not only the pointer to the data is mapped but also
11132 the address of the pointer, the array descriptor etc.; for
11133 'exit data' - and in particular for 'delete:' - having an 'alloc:'
11134 does not make sense. Likewise, for 'update' only transferring the
11135 data itself is needed as the rest has been handled in previous
11136 directives. However, for 'exit data', the array descriptor needs
11137 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
11138
11139 NOTE: Generally, it is not safe to perform "enter data" operations
11140 on arrays where the data *or the descriptor* may go out of scope
11141 before a corresponding "exit data" operation -- and such a
11142 descriptor may be synthesized temporarily, e.g. to pass an
11143 explicit-shape array to a function expecting an assumed-shape
11144 argument. Performing "enter data" inside the called function
11145 would thus be problematic. */
11146 if (code == OMP_TARGET_EXIT_DATA
11147 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
11148 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
11149 == GOMP_MAP_DELETE
11150 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
11151 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
11152 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11153 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
11154 remove = true;
11155
11156 if (remove)
11157 break;
11158 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11159 {
11160 struct gimplify_omp_ctx *octx;
11161 for (octx = outer_ctx; octx; octx = octx->outer_context)
11162 {
11163 if (octx->region_type != ORT_ACC_HOST_DATA)
11164 break;
11165 splay_tree_node n2
11166 = splay_tree_lookup (octx->variables,
11167 (splay_tree_key) decl);
11168 if (n2)
11169 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11170 "declared in enclosing %<host_data%> region",
11171 DECL_NAME (decl));
11172 }
11173 }
11174 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11175 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11176 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11177 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11178 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11179 {
11180 remove = true;
11181 break;
11182 }
11183 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11184 || (OMP_CLAUSE_MAP_KIND (c)
11185 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11186 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11187 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11188 {
11189 OMP_CLAUSE_SIZE (c)
11190 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11191 false);
11192 if ((region_type & ORT_TARGET) != 0)
11193 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11194 GOVD_FIRSTPRIVATE | GOVD_SEEN);
11195 }
11196
11197 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11198 {
11199 tree base = omp_strip_components_and_deref (decl);
11200 if (DECL_P (base))
11201 {
11202 decl = base;
11203 splay_tree_node n
11204 = splay_tree_lookup (ctx->variables,
11205 (splay_tree_key) decl);
11206 if (seen_error ()
11207 && n
11208 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11209 {
11210 remove = true;
11211 break;
11212 }
11213 flags = GOVD_MAP | GOVD_EXPLICIT;
11214
11215 goto do_add_decl;
11216 }
11217 }
11218
11219 if (TREE_CODE (decl) == TARGET_EXPR)
11220 {
11221 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11222 is_gimple_lvalue, fb_lvalue)
11223 == GS_ERROR)
11224 remove = true;
11225 }
11226 else if (!DECL_P (decl))
11227 {
11228 tree d = decl, *pd;
11229 if (TREE_CODE (d) == ARRAY_REF)
11230 {
11231 while (TREE_CODE (d) == ARRAY_REF)
11232 d = TREE_OPERAND (d, 0);
11233 if (TREE_CODE (d) == COMPONENT_REF
11234 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11235 decl = d;
11236 }
11237 pd = &OMP_CLAUSE_DECL (c);
11238 if (d == decl
11239 && TREE_CODE (decl) == INDIRECT_REF
11240 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11241 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11242 == REFERENCE_TYPE)
11243 && (OMP_CLAUSE_MAP_KIND (c)
11244 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11245 {
11246 pd = &TREE_OPERAND (decl, 0);
11247 decl = TREE_OPERAND (decl, 0);
11248 }
11249 /* An "attach/detach" operation on an update directive should
11250 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11251 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11252 depends on the previous mapping. */
11253 if (code == OACC_UPDATE
11254 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11255 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11256
11257 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11258 {
11259 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11260 == ARRAY_TYPE)
11261 remove = true;
11262 else
11263 {
11264 gomp_map_kind k = ((code == OACC_EXIT_DATA
11265 || code == OMP_TARGET_EXIT_DATA)
11266 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11267 OMP_CLAUSE_SET_MAP_KIND (c, k);
11268 }
11269 }
11270
11271 tree cref = decl;
11272
11273 while (TREE_CODE (cref) == ARRAY_REF)
11274 cref = TREE_OPERAND (cref, 0);
11275
11276 if (TREE_CODE (cref) == INDIRECT_REF)
11277 cref = TREE_OPERAND (cref, 0);
11278
11279 if (TREE_CODE (cref) == COMPONENT_REF)
11280 {
11281 tree base = cref;
11282 while (base && !DECL_P (base))
11283 {
11284 tree innerbase = omp_get_base_pointer (base);
11285 if (!innerbase)
11286 break;
11287 base = innerbase;
11288 }
11289 if (base
11290 && DECL_P (base)
11291 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11292 && POINTER_TYPE_P (TREE_TYPE (base)))
11293 {
11294 splay_tree_node n
11295 = splay_tree_lookup (ctx->variables,
11296 (splay_tree_key) base);
11297 n->value |= GOVD_SEEN;
11298 }
11299 }
11300
11301 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11302 {
11303 /* Don't gimplify *pd fully at this point, as the base
11304 will need to be adjusted during omp lowering. */
11305 auto_vec<tree, 10> expr_stack;
11306 tree *p = pd;
11307 while (handled_component_p (*p)
11308 || TREE_CODE (*p) == INDIRECT_REF
11309 || TREE_CODE (*p) == ADDR_EXPR
11310 || TREE_CODE (*p) == MEM_REF
11311 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11312 {
11313 expr_stack.safe_push (*p);
11314 p = &TREE_OPERAND (*p, 0);
11315 }
11316 for (int i = expr_stack.length () - 1; i >= 0; i--)
11317 {
11318 tree t = expr_stack[i];
11319 if (TREE_CODE (t) == ARRAY_REF
11320 || TREE_CODE (t) == ARRAY_RANGE_REF)
11321 {
11322 if (TREE_OPERAND (t, 2) == NULL_TREE)
11323 {
11324 tree low = unshare_expr (array_ref_low_bound (t));
11325 if (!is_gimple_min_invariant (low))
11326 {
11327 TREE_OPERAND (t, 2) = low;
11328 if (gimplify_expr (&TREE_OPERAND (t, 2),
11329 pre_p, NULL,
11330 is_gimple_reg,
11331 fb_rvalue) == GS_ERROR)
11332 remove = true;
11333 }
11334 }
11335 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11336 NULL, is_gimple_reg,
11337 fb_rvalue) == GS_ERROR)
11338 remove = true;
11339 if (TREE_OPERAND (t, 3) == NULL_TREE)
11340 {
11341 tree elmt_size = array_ref_element_size (t);
11342 if (!is_gimple_min_invariant (elmt_size))
11343 {
11344 elmt_size = unshare_expr (elmt_size);
11345 tree elmt_type
11346 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11347 0)));
11348 tree factor
11349 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11350 elmt_size
11351 = size_binop (EXACT_DIV_EXPR, elmt_size,
11352 factor);
11353 TREE_OPERAND (t, 3) = elmt_size;
11354 if (gimplify_expr (&TREE_OPERAND (t, 3),
11355 pre_p, NULL,
11356 is_gimple_reg,
11357 fb_rvalue) == GS_ERROR)
11358 remove = true;
11359 }
11360 }
11361 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11362 NULL, is_gimple_reg,
11363 fb_rvalue) == GS_ERROR)
11364 remove = true;
11365 }
11366 else if (TREE_CODE (t) == COMPONENT_REF)
11367 {
11368 if (TREE_OPERAND (t, 2) == NULL_TREE)
11369 {
11370 tree offset = component_ref_field_offset (t);
11371 if (!is_gimple_min_invariant (offset))
11372 {
11373 offset = unshare_expr (offset);
11374 tree field = TREE_OPERAND (t, 1);
11375 tree factor
11376 = size_int (DECL_OFFSET_ALIGN (field)
11377 / BITS_PER_UNIT);
11378 offset = size_binop (EXACT_DIV_EXPR, offset,
11379 factor);
11380 TREE_OPERAND (t, 2) = offset;
11381 if (gimplify_expr (&TREE_OPERAND (t, 2),
11382 pre_p, NULL,
11383 is_gimple_reg,
11384 fb_rvalue) == GS_ERROR)
11385 remove = true;
11386 }
11387 }
11388 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11389 NULL, is_gimple_reg,
11390 fb_rvalue) == GS_ERROR)
11391 remove = true;
11392 }
11393 }
11394 for (; expr_stack.length () > 0; )
11395 {
11396 tree t = expr_stack.pop ();
11397
11398 if (TREE_CODE (t) == ARRAY_REF
11399 || TREE_CODE (t) == ARRAY_RANGE_REF)
11400 {
11401 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11402 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11403 NULL, is_gimple_val,
11404 fb_rvalue) == GS_ERROR)
11405 remove = true;
11406 }
11407 }
11408 }
11409 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11410 fb_lvalue) == GS_ERROR)
11411 {
11412 remove = true;
11413 break;
11414 }
11415
11416 if (!remove
11417 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
11418 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
11419 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
11420 && OMP_CLAUSE_CHAIN (c)
11421 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
11422 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11423 == GOMP_MAP_ALWAYS_POINTER)
11424 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11425 == GOMP_MAP_ATTACH_DETACH)
11426 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
11427 == GOMP_MAP_TO_PSET)))
11428 prev_list_p = list_p;
11429
11430 break;
11431 }
11432 flags = GOVD_MAP | GOVD_EXPLICIT;
11433 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11434 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11435 flags |= GOVD_MAP_ALWAYS_TO;
11436
11437 if ((code == OMP_TARGET
11438 || code == OMP_TARGET_DATA
11439 || code == OMP_TARGET_ENTER_DATA
11440 || code == OMP_TARGET_EXIT_DATA)
11441 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11442 {
11443 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11444 octx = octx->outer_context)
11445 {
11446 splay_tree_node n
11447 = splay_tree_lookup (octx->variables,
11448 (splay_tree_key) OMP_CLAUSE_DECL (c));
11449 /* If this is contained in an outer OpenMP region as a
11450 firstprivate value, remove the attach/detach. */
11451 if (n && (n->value & GOVD_FIRSTPRIVATE))
11452 {
11453 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11454 goto do_add;
11455 }
11456 }
11457
11458 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11459 ? GOMP_MAP_DETACH
11460 : GOMP_MAP_ATTACH);
11461 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11462 }
11463
11464 goto do_add;
11465
11466 case OMP_CLAUSE_AFFINITY:
11467 gimplify_omp_affinity (list_p, pre_p);
11468 remove = true;
11469 break;
11470 case OMP_CLAUSE_DOACROSS:
11471 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11472 {
11473 tree deps = OMP_CLAUSE_DECL (c);
11474 while (deps && TREE_CODE (deps) == TREE_LIST)
11475 {
11476 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11477 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11478 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11479 pre_p, NULL, is_gimple_val, fb_rvalue);
11480 deps = TREE_CHAIN (deps);
11481 }
11482 }
11483 else
11484 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11485 == OMP_CLAUSE_DOACROSS_SOURCE);
11486 break;
11487 case OMP_CLAUSE_DEPEND:
11488 if (handled_depend_iterators == -1)
11489 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11490 if (handled_depend_iterators)
11491 {
11492 if (handled_depend_iterators == 2)
11493 remove = true;
11494 break;
11495 }
11496 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11497 {
11498 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11499 NULL, is_gimple_val, fb_rvalue);
11500 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11501 }
11502 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11503 {
11504 remove = true;
11505 break;
11506 }
11507 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11508 {
11509 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11510 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11511 is_gimple_val, fb_rvalue) == GS_ERROR)
11512 {
11513 remove = true;
11514 break;
11515 }
11516 }
11517 if (code == OMP_TASK)
11518 ctx->has_depend = true;
11519 break;
11520
11521 case OMP_CLAUSE_TO:
11522 case OMP_CLAUSE_FROM:
11523 case OMP_CLAUSE__CACHE_:
11524 decl = OMP_CLAUSE_DECL (c);
11525 if (error_operand_p (decl))
11526 {
11527 remove = true;
11528 break;
11529 }
11530 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11531 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11532 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11533 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11534 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11535 {
11536 remove = true;
11537 break;
11538 }
11539 if (!DECL_P (decl))
11540 {
11541 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11542 NULL, is_gimple_lvalue, fb_lvalue)
11543 == GS_ERROR)
11544 {
11545 remove = true;
11546 break;
11547 }
11548 break;
11549 }
11550 goto do_notice;
11551
11552 case OMP_CLAUSE_USE_DEVICE_PTR:
11553 case OMP_CLAUSE_USE_DEVICE_ADDR:
11554 flags = GOVD_EXPLICIT;
11555 goto do_add;
11556
11557 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11558 decl = OMP_CLAUSE_DECL (c);
11559 while (TREE_CODE (decl) == INDIRECT_REF
11560 || TREE_CODE (decl) == ARRAY_REF)
11561 decl = TREE_OPERAND (decl, 0);
11562 flags = GOVD_EXPLICIT;
11563 goto do_add_decl;
11564
11565 case OMP_CLAUSE_IS_DEVICE_PTR:
11566 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11567 goto do_add;
11568
11569 do_add:
11570 decl = OMP_CLAUSE_DECL (c);
11571 do_add_decl:
11572 if (error_operand_p (decl))
11573 {
11574 remove = true;
11575 break;
11576 }
11577 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11578 {
11579 tree t = omp_member_access_dummy_var (decl);
11580 if (t)
11581 {
11582 tree v = DECL_VALUE_EXPR (decl);
11583 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11584 if (outer_ctx)
11585 omp_notice_variable (outer_ctx, t, true);
11586 }
11587 }
11588 if (code == OACC_DATA
11589 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11590 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11591 flags |= GOVD_MAP_0LEN_ARRAY;
11592 omp_add_variable (ctx, decl, flags);
11593 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11594 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11595 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11596 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11597 {
11598 struct gimplify_omp_ctx *pctx
11599 = code == OMP_TARGET ? outer_ctx : ctx;
11600 if (pctx)
11601 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11602 GOVD_LOCAL | GOVD_SEEN);
11603 if (pctx
11604 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11605 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11606 find_decl_expr,
11607 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11608 NULL) == NULL_TREE)
11609 omp_add_variable (pctx,
11610 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11611 GOVD_LOCAL | GOVD_SEEN);
11612 gimplify_omp_ctxp = pctx;
11613 push_gimplify_context ();
11614
11615 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11616 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11617
11618 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11619 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11620 pop_gimplify_context
11621 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11622 push_gimplify_context ();
11623 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11624 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11625 pop_gimplify_context
11626 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11627 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11628 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11629
11630 gimplify_omp_ctxp = outer_ctx;
11631 }
11632 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11633 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11634 {
11635 gimplify_omp_ctxp = ctx;
11636 push_gimplify_context ();
11637 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11638 {
11639 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11640 NULL, NULL);
11641 TREE_SIDE_EFFECTS (bind) = 1;
11642 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11643 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11644 }
11645 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11646 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11647 pop_gimplify_context
11648 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11649 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11650
11651 gimplify_omp_ctxp = outer_ctx;
11652 }
11653 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11654 && OMP_CLAUSE_LINEAR_STMT (c))
11655 {
11656 gimplify_omp_ctxp = ctx;
11657 push_gimplify_context ();
11658 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
11659 {
11660 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11661 NULL, NULL);
11662 TREE_SIDE_EFFECTS (bind) = 1;
11663 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
11664 OMP_CLAUSE_LINEAR_STMT (c) = bind;
11665 }
11666 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
11667 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
11668 pop_gimplify_context
11669 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
11670 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
11671
11672 gimplify_omp_ctxp = outer_ctx;
11673 }
11674 if (notice_outer)
11675 goto do_notice;
11676 break;
11677
11678 case OMP_CLAUSE_COPYIN:
11679 case OMP_CLAUSE_COPYPRIVATE:
11680 decl = OMP_CLAUSE_DECL (c);
11681 if (error_operand_p (decl))
11682 {
11683 remove = true;
11684 break;
11685 }
11686 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
11687 && !remove
11688 && !omp_check_private (ctx, decl, true))
11689 {
11690 remove = true;
11691 if (is_global_var (decl))
11692 {
11693 if (DECL_THREAD_LOCAL_P (decl))
11694 remove = false;
11695 else if (DECL_HAS_VALUE_EXPR_P (decl))
11696 {
11697 tree value = get_base_address (DECL_VALUE_EXPR (decl));
11698
11699 if (value
11700 && DECL_P (value)
11701 && DECL_THREAD_LOCAL_P (value))
11702 remove = false;
11703 }
11704 }
11705 if (remove)
11706 error_at (OMP_CLAUSE_LOCATION (c),
11707 "copyprivate variable %qE is not threadprivate"
11708 " or private in outer context", DECL_NAME (decl));
11709 }
11710 do_notice:
11711 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11712 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
11713 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11714 && outer_ctx
11715 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
11716 || (region_type == ORT_WORKSHARE
11717 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11718 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
11719 || code == OMP_LOOP)))
11720 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
11721 || (code == OMP_LOOP
11722 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11723 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
11724 == ORT_COMBINED_TEAMS))))
11725 {
11726 splay_tree_node on
11727 = splay_tree_lookup (outer_ctx->variables,
11728 (splay_tree_key)decl);
11729 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
11730 {
11731 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11732 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11733 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
11734 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11735 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
11736 == POINTER_TYPE))))
11737 omp_firstprivatize_variable (outer_ctx, decl);
11738 else
11739 {
11740 omp_add_variable (outer_ctx, decl,
11741 GOVD_SEEN | GOVD_SHARED);
11742 if (outer_ctx->outer_context)
11743 omp_notice_variable (outer_ctx->outer_context, decl,
11744 true);
11745 }
11746 }
11747 }
11748 if (outer_ctx)
11749 omp_notice_variable (outer_ctx, decl, true);
11750 if (check_non_private
11751 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11752 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
11753 || decl == OMP_CLAUSE_DECL (c)
11754 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
11755 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11756 == ADDR_EXPR
11757 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
11758 == POINTER_PLUS_EXPR
11759 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11760 (OMP_CLAUSE_DECL (c), 0), 0))
11761 == ADDR_EXPR)))))
11762 && omp_check_private (ctx, decl, false))
11763 {
11764 error ("%s variable %qE is private in outer context",
11765 check_non_private, DECL_NAME (decl));
11766 remove = true;
11767 }
11768 break;
11769
11770 case OMP_CLAUSE_DETACH:
11771 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
11772 goto do_add;
11773
11774 case OMP_CLAUSE_IF:
11775 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
11776 && OMP_CLAUSE_IF_MODIFIER (c) != code)
11777 {
11778 const char *p[2];
11779 for (int i = 0; i < 2; i++)
11780 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
11781 {
11782 case VOID_CST: p[i] = "cancel"; break;
11783 case OMP_PARALLEL: p[i] = "parallel"; break;
11784 case OMP_SIMD: p[i] = "simd"; break;
11785 case OMP_TASK: p[i] = "task"; break;
11786 case OMP_TASKLOOP: p[i] = "taskloop"; break;
11787 case OMP_TARGET_DATA: p[i] = "target data"; break;
11788 case OMP_TARGET: p[i] = "target"; break;
11789 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
11790 case OMP_TARGET_ENTER_DATA:
11791 p[i] = "target enter data"; break;
11792 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
11793 default: gcc_unreachable ();
11794 }
11795 error_at (OMP_CLAUSE_LOCATION (c),
11796 "expected %qs %<if%> clause modifier rather than %qs",
11797 p[0], p[1]);
11798 remove = true;
11799 }
11800 /* Fall through. */
11801
11802 case OMP_CLAUSE_FINAL:
11803 OMP_CLAUSE_OPERAND (c, 0)
11804 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
11805 /* Fall through. */
11806
11807 case OMP_CLAUSE_NUM_TEAMS:
11808 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
11809 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11810 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11811 {
11812 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
11813 {
11814 remove = true;
11815 break;
11816 }
11817 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
11818 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
11819 pre_p, NULL, true);
11820 }
11821 /* Fall through. */
11822
11823 case OMP_CLAUSE_SCHEDULE:
11824 case OMP_CLAUSE_NUM_THREADS:
11825 case OMP_CLAUSE_THREAD_LIMIT:
11826 case OMP_CLAUSE_DIST_SCHEDULE:
11827 case OMP_CLAUSE_DEVICE:
11828 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
11829 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
11830 {
11831 if (code != OMP_TARGET)
11832 {
11833 error_at (OMP_CLAUSE_LOCATION (c),
11834 "%<device%> clause with %<ancestor%> is only "
11835 "allowed on %<target%> construct");
11836 remove = true;
11837 break;
11838 }
11839
11840 tree clauses = *orig_list_p;
11841 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
11842 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
11843 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
11844 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
11845 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
11846 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
11847 )
11848 {
11849 error_at (OMP_CLAUSE_LOCATION (c),
11850 "with %<ancestor%>, only the %<device%>, "
11851 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
11852 "and %<map%> clauses may appear on the "
11853 "construct");
11854 remove = true;
11855 break;
11856 }
11857 }
11858 /* Fall through. */
11859
11860 case OMP_CLAUSE_PRIORITY:
11861 case OMP_CLAUSE_GRAINSIZE:
11862 case OMP_CLAUSE_NUM_TASKS:
11863 case OMP_CLAUSE_FILTER:
11864 case OMP_CLAUSE_HINT:
11865 case OMP_CLAUSE_ASYNC:
11866 case OMP_CLAUSE_WAIT:
11867 case OMP_CLAUSE_NUM_GANGS:
11868 case OMP_CLAUSE_NUM_WORKERS:
11869 case OMP_CLAUSE_VECTOR_LENGTH:
11870 case OMP_CLAUSE_WORKER:
11871 case OMP_CLAUSE_VECTOR:
11872 if (OMP_CLAUSE_OPERAND (c, 0)
11873 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
11874 {
11875 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
11876 {
11877 remove = true;
11878 break;
11879 }
11880 /* All these clauses care about value, not a particular decl,
11881 so try to force it into a SSA_NAME or fresh temporary. */
11882 OMP_CLAUSE_OPERAND (c, 0)
11883 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
11884 pre_p, NULL, true);
11885 }
11886 break;
11887
11888 case OMP_CLAUSE_GANG:
11889 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
11890 is_gimple_val, fb_rvalue) == GS_ERROR)
11891 remove = true;
11892 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
11893 is_gimple_val, fb_rvalue) == GS_ERROR)
11894 remove = true;
11895 break;
11896
11897 case OMP_CLAUSE_NOWAIT:
11898 nowait = 1;
11899 break;
11900
11901 case OMP_CLAUSE_ORDERED:
11902 case OMP_CLAUSE_UNTIED:
11903 case OMP_CLAUSE_COLLAPSE:
11904 case OMP_CLAUSE_TILE:
11905 case OMP_CLAUSE_AUTO:
11906 case OMP_CLAUSE_SEQ:
11907 case OMP_CLAUSE_INDEPENDENT:
11908 case OMP_CLAUSE_MERGEABLE:
11909 case OMP_CLAUSE_PROC_BIND:
11910 case OMP_CLAUSE_SAFELEN:
11911 case OMP_CLAUSE_SIMDLEN:
11912 case OMP_CLAUSE_NOGROUP:
11913 case OMP_CLAUSE_THREADS:
11914 case OMP_CLAUSE_SIMD:
11915 case OMP_CLAUSE_BIND:
11916 case OMP_CLAUSE_IF_PRESENT:
11917 case OMP_CLAUSE_FINALIZE:
11918 break;
11919
11920 case OMP_CLAUSE_ORDER:
11921 ctx->order_concurrent = true;
11922 break;
11923
11924 case OMP_CLAUSE_DEFAULTMAP:
11925 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
11926 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
11927 {
11928 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
11929 gdmkmin = GDMK_SCALAR;
11930 gdmkmax = GDMK_POINTER;
11931 break;
11932 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
11933 gdmkmin = GDMK_SCALAR;
11934 gdmkmax = GDMK_SCALAR_TARGET;
11935 break;
11936 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
11937 gdmkmin = gdmkmax = GDMK_AGGREGATE;
11938 break;
11939 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
11940 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
11941 break;
11942 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
11943 gdmkmin = gdmkmax = GDMK_POINTER;
11944 break;
11945 default:
11946 gcc_unreachable ();
11947 }
11948 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
11949 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
11950 {
11951 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
11952 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
11953 break;
11954 case OMP_CLAUSE_DEFAULTMAP_TO:
11955 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
11956 break;
11957 case OMP_CLAUSE_DEFAULTMAP_FROM:
11958 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
11959 break;
11960 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
11961 ctx->defaultmap[gdmk] = GOVD_MAP;
11962 break;
11963 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
11964 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
11965 break;
11966 case OMP_CLAUSE_DEFAULTMAP_NONE:
11967 ctx->defaultmap[gdmk] = 0;
11968 break;
11969 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
11970 switch (gdmk)
11971 {
11972 case GDMK_SCALAR:
11973 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
11974 break;
11975 case GDMK_SCALAR_TARGET:
11976 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
11977 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
11978 break;
11979 case GDMK_AGGREGATE:
11980 case GDMK_ALLOCATABLE:
11981 ctx->defaultmap[gdmk] = GOVD_MAP;
11982 break;
11983 case GDMK_POINTER:
11984 ctx->defaultmap[gdmk] = GOVD_MAP;
11985 if (!lang_GNU_Fortran ())
11986 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
11987 break;
11988 default:
11989 gcc_unreachable ();
11990 }
11991 break;
11992 default:
11993 gcc_unreachable ();
11994 }
11995 break;
11996
11997 case OMP_CLAUSE_ALIGNED:
11998 decl = OMP_CLAUSE_DECL (c);
11999 if (error_operand_p (decl))
12000 {
12001 remove = true;
12002 break;
12003 }
12004 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
12005 is_gimple_val, fb_rvalue) == GS_ERROR)
12006 {
12007 remove = true;
12008 break;
12009 }
12010 if (!is_global_var (decl)
12011 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12012 omp_add_variable (ctx, decl, GOVD_ALIGNED);
12013 break;
12014
12015 case OMP_CLAUSE_NONTEMPORAL:
12016 decl = OMP_CLAUSE_DECL (c);
12017 if (error_operand_p (decl))
12018 {
12019 remove = true;
12020 break;
12021 }
12022 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
12023 break;
12024
12025 case OMP_CLAUSE_ALLOCATE:
12026 decl = OMP_CLAUSE_DECL (c);
12027 if (error_operand_p (decl))
12028 {
12029 remove = true;
12030 break;
12031 }
12032 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12033 is_gimple_val, fb_rvalue) == GS_ERROR)
12034 {
12035 remove = true;
12036 break;
12037 }
12038 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12039 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12040 == INTEGER_CST))
12041 ;
12042 else if (code == OMP_TASKLOOP
12043 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12044 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12045 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12046 pre_p, NULL, false);
12047 break;
12048
12049 case OMP_CLAUSE_DEFAULT:
12050 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12051 break;
12052
12053 case OMP_CLAUSE_INCLUSIVE:
12054 case OMP_CLAUSE_EXCLUSIVE:
12055 decl = OMP_CLAUSE_DECL (c);
12056 {
12057 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12058 (splay_tree_key) decl);
12059 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12060 {
12061 error_at (OMP_CLAUSE_LOCATION (c),
12062 "%qD specified in %qs clause but not in %<inscan%> "
12063 "%<reduction%> clause on the containing construct",
12064 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12065 remove = true;
12066 }
12067 else
12068 {
12069 n->value |= GOVD_REDUCTION_INSCAN;
12070 if (outer_ctx->region_type == ORT_SIMD
12071 && outer_ctx->outer_context
12072 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12073 {
12074 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12075 (splay_tree_key) decl);
12076 if (n && (n->value & GOVD_REDUCTION) != 0)
12077 n->value |= GOVD_REDUCTION_INSCAN;
12078 }
12079 }
12080 }
12081 break;
12082
12083 case OMP_CLAUSE_NOHOST:
12084 default:
12085 gcc_unreachable ();
12086 }
12087
12088 if (code == OACC_DATA
12089 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12090 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12091 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12092 remove = true;
12093 if (remove)
12094 *list_p = OMP_CLAUSE_CHAIN (c);
12095 else
12096 list_p = &OMP_CLAUSE_CHAIN (c);
12097 }
12098
12099 ctx->clauses = *orig_list_p;
12100 gimplify_omp_ctxp = ctx;
12101 }
12102
12103 /* Return true if DECL is a candidate for shared to firstprivate
12104 optimization. We only consider non-addressable scalars, not
12105 too big, and not references. */
12106
12107 static bool
12108 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12109 {
12110 if (TREE_ADDRESSABLE (decl))
12111 return false;
12112 tree type = TREE_TYPE (decl);
12113 if (!is_gimple_reg_type (type)
12114 || TREE_CODE (type) == REFERENCE_TYPE
12115 || TREE_ADDRESSABLE (type))
12116 return false;
12117 /* Don't optimize too large decls, as each thread/task will have
12118 its own. */
12119 HOST_WIDE_INT len = int_size_in_bytes (type);
12120 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12121 return false;
12122 if (omp_privatize_by_reference (decl))
12123 return false;
12124 return true;
12125 }
12126
12127 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12128 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12129 GOVD_WRITTEN in outer contexts. */
12130
12131 static void
12132 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12133 {
12134 for (; ctx; ctx = ctx->outer_context)
12135 {
12136 splay_tree_node n = splay_tree_lookup (ctx->variables,
12137 (splay_tree_key) decl);
12138 if (n == NULL)
12139 continue;
12140 else if (n->value & GOVD_SHARED)
12141 {
12142 n->value |= GOVD_WRITTEN;
12143 return;
12144 }
12145 else if (n->value & GOVD_DATA_SHARE_CLASS)
12146 return;
12147 }
12148 }
12149
12150 /* Helper callback for walk_gimple_seq to discover possible stores
12151 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12152 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12153 for those. */
12154
12155 static tree
12156 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12157 {
12158 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12159
12160 *walk_subtrees = 0;
12161 if (!wi->is_lhs)
12162 return NULL_TREE;
12163
12164 tree op = *tp;
12165 do
12166 {
12167 if (handled_component_p (op))
12168 op = TREE_OPERAND (op, 0);
12169 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12170 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12171 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12172 else
12173 break;
12174 }
12175 while (1);
12176 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
12177 return NULL_TREE;
12178
12179 omp_mark_stores (gimplify_omp_ctxp, op);
12180 return NULL_TREE;
12181 }
12182
12183 /* Helper callback for walk_gimple_seq to discover possible stores
12184 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12185 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12186 for those. */
12187
12188 static tree
12189 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12190 bool *handled_ops_p,
12191 struct walk_stmt_info *wi)
12192 {
12193 gimple *stmt = gsi_stmt (*gsi_p);
12194 switch (gimple_code (stmt))
12195 {
12196 /* Don't recurse on OpenMP constructs for which
12197 gimplify_adjust_omp_clauses already handled the bodies,
12198 except handle gimple_omp_for_pre_body. */
12199 case GIMPLE_OMP_FOR:
12200 *handled_ops_p = true;
12201 if (gimple_omp_for_pre_body (stmt))
12202 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
12203 omp_find_stores_stmt, omp_find_stores_op, wi);
12204 break;
12205 case GIMPLE_OMP_PARALLEL:
12206 case GIMPLE_OMP_TASK:
12207 case GIMPLE_OMP_SECTIONS:
12208 case GIMPLE_OMP_SINGLE:
12209 case GIMPLE_OMP_SCOPE:
12210 case GIMPLE_OMP_TARGET:
12211 case GIMPLE_OMP_TEAMS:
12212 case GIMPLE_OMP_CRITICAL:
12213 *handled_ops_p = true;
12214 break;
12215 default:
12216 break;
12217 }
12218 return NULL_TREE;
12219 }
12220
12221 struct gimplify_adjust_omp_clauses_data
12222 {
12223 tree *list_p;
12224 gimple_seq *pre_p;
12225 };
12226
12227 /* For all variables that were not actually used within the context,
12228 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12229
12230 static int
12231 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12232 {
12233 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12234 gimple_seq *pre_p
12235 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12236 tree decl = (tree) n->key;
12237 unsigned flags = n->value;
12238 enum omp_clause_code code;
12239 tree clause;
12240 bool private_debug;
12241
12242 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12243 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12244 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12245 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12246 return 0;
12247 if ((flags & GOVD_SEEN) == 0)
12248 return 0;
12249 if (flags & GOVD_DEBUG_PRIVATE)
12250 {
12251 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12252 private_debug = true;
12253 }
12254 else if (flags & GOVD_MAP)
12255 private_debug = false;
12256 else
12257 private_debug
12258 = lang_hooks.decls.omp_private_debug_clause (decl,
12259 !!(flags & GOVD_SHARED));
12260 if (private_debug)
12261 code = OMP_CLAUSE_PRIVATE;
12262 else if (flags & GOVD_MAP)
12263 {
12264 code = OMP_CLAUSE_MAP;
12265 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12266 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12267 {
12268 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12269 return 0;
12270 }
12271 if (VAR_P (decl)
12272 && DECL_IN_CONSTANT_POOL (decl)
12273 && !lookup_attribute ("omp declare target",
12274 DECL_ATTRIBUTES (decl)))
12275 {
12276 tree id = get_identifier ("omp declare target");
12277 DECL_ATTRIBUTES (decl)
12278 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12279 varpool_node *node = varpool_node::get (decl);
12280 if (node)
12281 {
12282 node->offloadable = 1;
12283 if (ENABLE_OFFLOADING)
12284 g->have_offload = true;
12285 }
12286 }
12287 }
12288 else if (flags & GOVD_SHARED)
12289 {
12290 if (is_global_var (decl))
12291 {
12292 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12293 while (ctx != NULL)
12294 {
12295 splay_tree_node on
12296 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12297 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12298 | GOVD_PRIVATE | GOVD_REDUCTION
12299 | GOVD_LINEAR | GOVD_MAP)) != 0)
12300 break;
12301 ctx = ctx->outer_context;
12302 }
12303 if (ctx == NULL)
12304 return 0;
12305 }
12306 code = OMP_CLAUSE_SHARED;
12307 /* Don't optimize shared into firstprivate for read-only vars
12308 on tasks with depend clause, we shouldn't try to copy them
12309 until the dependencies are satisfied. */
12310 if (gimplify_omp_ctxp->has_depend)
12311 flags |= GOVD_WRITTEN;
12312 }
12313 else if (flags & GOVD_PRIVATE)
12314 code = OMP_CLAUSE_PRIVATE;
12315 else if (flags & GOVD_FIRSTPRIVATE)
12316 {
12317 code = OMP_CLAUSE_FIRSTPRIVATE;
12318 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12319 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12320 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12321 {
12322 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12323 "%<target%> construct", decl);
12324 return 0;
12325 }
12326 }
12327 else if (flags & GOVD_LASTPRIVATE)
12328 code = OMP_CLAUSE_LASTPRIVATE;
12329 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12330 return 0;
12331 else if (flags & GOVD_CONDTEMP)
12332 {
12333 code = OMP_CLAUSE__CONDTEMP_;
12334 gimple_add_tmp_var (decl);
12335 }
12336 else
12337 gcc_unreachable ();
12338
12339 if (((flags & GOVD_LASTPRIVATE)
12340 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12341 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12342 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12343
12344 tree chain = *list_p;
12345 clause = build_omp_clause (input_location, code);
12346 OMP_CLAUSE_DECL (clause) = decl;
12347 OMP_CLAUSE_CHAIN (clause) = chain;
12348 if (private_debug)
12349 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12350 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12351 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12352 else if (code == OMP_CLAUSE_SHARED
12353 && (flags & GOVD_WRITTEN) == 0
12354 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12355 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12356 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12357 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12358 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12359 {
12360 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12361 OMP_CLAUSE_DECL (nc) = decl;
12362 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12363 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12364 OMP_CLAUSE_DECL (clause)
12365 = build_simple_mem_ref_loc (input_location, decl);
12366 OMP_CLAUSE_DECL (clause)
12367 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12368 build_int_cst (build_pointer_type (char_type_node), 0));
12369 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12370 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12371 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12372 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12373 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12374 OMP_CLAUSE_CHAIN (nc) = chain;
12375 OMP_CLAUSE_CHAIN (clause) = nc;
12376 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12377 gimplify_omp_ctxp = ctx->outer_context;
12378 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12379 pre_p, NULL, is_gimple_val, fb_rvalue);
12380 gimplify_omp_ctxp = ctx;
12381 }
12382 else if (code == OMP_CLAUSE_MAP)
12383 {
12384 int kind;
12385 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12386 switch (flags & (GOVD_MAP_TO_ONLY
12387 | GOVD_MAP_FORCE
12388 | GOVD_MAP_FORCE_PRESENT
12389 | GOVD_MAP_ALLOC_ONLY
12390 | GOVD_MAP_FROM_ONLY))
12391 {
12392 case 0:
12393 kind = GOMP_MAP_TOFROM;
12394 break;
12395 case GOVD_MAP_FORCE:
12396 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12397 break;
12398 case GOVD_MAP_TO_ONLY:
12399 kind = GOMP_MAP_TO;
12400 break;
12401 case GOVD_MAP_FROM_ONLY:
12402 kind = GOMP_MAP_FROM;
12403 break;
12404 case GOVD_MAP_ALLOC_ONLY:
12405 kind = GOMP_MAP_ALLOC;
12406 break;
12407 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12408 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12409 break;
12410 case GOVD_MAP_FORCE_PRESENT:
12411 kind = GOMP_MAP_FORCE_PRESENT;
12412 break;
12413 default:
12414 gcc_unreachable ();
12415 }
12416 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12417 /* Setting of the implicit flag for the runtime is currently disabled for
12418 OpenACC. */
12419 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12420 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12421 if (DECL_SIZE (decl)
12422 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12423 {
12424 tree decl2 = DECL_VALUE_EXPR (decl);
12425 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12426 decl2 = TREE_OPERAND (decl2, 0);
12427 gcc_assert (DECL_P (decl2));
12428 tree mem = build_simple_mem_ref (decl2);
12429 OMP_CLAUSE_DECL (clause) = mem;
12430 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12431 if (gimplify_omp_ctxp->outer_context)
12432 {
12433 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12434 omp_notice_variable (ctx, decl2, true);
12435 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
12436 }
12437 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12438 OMP_CLAUSE_MAP);
12439 OMP_CLAUSE_DECL (nc) = decl;
12440 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12441 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12442 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12443 else
12444 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12445 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12446 OMP_CLAUSE_CHAIN (clause) = nc;
12447 }
12448 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12449 && omp_privatize_by_reference (decl))
12450 {
12451 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12452 OMP_CLAUSE_SIZE (clause)
12453 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12454 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12455 gimplify_omp_ctxp = ctx->outer_context;
12456 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12457 pre_p, NULL, is_gimple_val, fb_rvalue);
12458 gimplify_omp_ctxp = ctx;
12459 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12460 OMP_CLAUSE_MAP);
12461 OMP_CLAUSE_DECL (nc) = decl;
12462 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12463 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12464 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12465 OMP_CLAUSE_CHAIN (clause) = nc;
12466 }
12467 else
12468 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12469 }
12470 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12471 {
12472 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12473 OMP_CLAUSE_DECL (nc) = decl;
12474 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12475 OMP_CLAUSE_CHAIN (nc) = chain;
12476 OMP_CLAUSE_CHAIN (clause) = nc;
12477 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12478 gimplify_omp_ctxp = ctx->outer_context;
12479 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12480 (ctx->region_type & ORT_ACC) != 0);
12481 gimplify_omp_ctxp = ctx;
12482 }
12483 *list_p = clause;
12484 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12485 gimplify_omp_ctxp = ctx->outer_context;
12486 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12487 in simd. Those are only added for the local vars inside of simd body
12488 and they don't need to be e.g. default constructible. */
12489 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12490 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12491 (ctx->region_type & ORT_ACC) != 0);
12492 if (gimplify_omp_ctxp)
12493 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12494 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12495 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12496 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12497 true);
12498 gimplify_omp_ctxp = ctx;
12499 return 0;
12500 }
12501
12502 static void
12503 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12504 enum tree_code code)
12505 {
12506 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12507 tree *orig_list_p = list_p;
12508 tree c, decl;
12509 bool has_inscan_reductions = false;
12510
12511 if (body)
12512 {
12513 struct gimplify_omp_ctx *octx;
12514 for (octx = ctx; octx; octx = octx->outer_context)
12515 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12516 break;
12517 if (octx)
12518 {
12519 struct walk_stmt_info wi;
12520 memset (&wi, 0, sizeof (wi));
12521 walk_gimple_seq (body, omp_find_stores_stmt,
12522 omp_find_stores_op, &wi);
12523 }
12524 }
12525
12526 if (ctx->add_safelen1)
12527 {
12528 /* If there are VLAs in the body of simd loop, prevent
12529 vectorization. */
12530 gcc_assert (ctx->region_type == ORT_SIMD);
12531 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12532 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12533 OMP_CLAUSE_CHAIN (c) = *list_p;
12534 *list_p = c;
12535 list_p = &OMP_CLAUSE_CHAIN (c);
12536 }
12537
12538 if (ctx->region_type == ORT_WORKSHARE
12539 && ctx->outer_context
12540 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12541 {
12542 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12543 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12544 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12545 {
12546 decl = OMP_CLAUSE_DECL (c);
12547 splay_tree_node n
12548 = splay_tree_lookup (ctx->outer_context->variables,
12549 (splay_tree_key) decl);
12550 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12551 (splay_tree_key) decl));
12552 omp_add_variable (ctx, decl, n->value);
12553 tree c2 = copy_node (c);
12554 OMP_CLAUSE_CHAIN (c2) = *list_p;
12555 *list_p = c2;
12556 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12557 continue;
12558 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12559 OMP_CLAUSE_FIRSTPRIVATE);
12560 OMP_CLAUSE_DECL (c2) = decl;
12561 OMP_CLAUSE_CHAIN (c2) = *list_p;
12562 *list_p = c2;
12563 }
12564 }
12565
12566 tree attach_list = NULL_TREE;
12567 tree *attach_tail = &attach_list;
12568
12569 while ((c = *list_p) != NULL)
12570 {
12571 splay_tree_node n;
12572 bool remove = false;
12573 bool move_attach = false;
12574
12575 switch (OMP_CLAUSE_CODE (c))
12576 {
12577 case OMP_CLAUSE_FIRSTPRIVATE:
12578 if ((ctx->region_type & ORT_TARGET)
12579 && (ctx->region_type & ORT_ACC) == 0
12580 && TYPE_ATOMIC (strip_array_types
12581 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12582 {
12583 error_at (OMP_CLAUSE_LOCATION (c),
12584 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12585 "%<target%> construct", OMP_CLAUSE_DECL (c));
12586 remove = true;
12587 break;
12588 }
12589 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12590 {
12591 decl = OMP_CLAUSE_DECL (c);
12592 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12593 if ((n->value & GOVD_MAP) != 0)
12594 {
12595 remove = true;
12596 break;
12597 }
12598 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12599 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12600 }
12601 /* FALLTHRU */
12602 case OMP_CLAUSE_PRIVATE:
12603 case OMP_CLAUSE_SHARED:
12604 case OMP_CLAUSE_LINEAR:
12605 decl = OMP_CLAUSE_DECL (c);
12606 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12607 remove = !(n->value & GOVD_SEEN);
12608 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12609 && code == OMP_PARALLEL
12610 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12611 remove = true;
12612 if (! remove)
12613 {
12614 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12615 if ((n->value & GOVD_DEBUG_PRIVATE)
12616 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12617 {
12618 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12619 || ((n->value & GOVD_DATA_SHARE_CLASS)
12620 == GOVD_SHARED));
12621 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12622 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12623 }
12624 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12625 && ctx->has_depend
12626 && DECL_P (decl))
12627 n->value |= GOVD_WRITTEN;
12628 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12629 && (n->value & GOVD_WRITTEN) == 0
12630 && DECL_P (decl)
12631 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12632 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12633 else if (DECL_P (decl)
12634 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12635 && (n->value & GOVD_WRITTEN) != 0)
12636 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12637 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12638 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12639 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12640 }
12641 else
12642 n->value &= ~GOVD_EXPLICIT;
12643 break;
12644
12645 case OMP_CLAUSE_LASTPRIVATE:
12646 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12647 accurately reflect the presence of a FIRSTPRIVATE clause. */
12648 decl = OMP_CLAUSE_DECL (c);
12649 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12650 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
12651 = (n->value & GOVD_FIRSTPRIVATE) != 0;
12652 if (code == OMP_DISTRIBUTE
12653 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12654 {
12655 remove = true;
12656 error_at (OMP_CLAUSE_LOCATION (c),
12657 "same variable used in %<firstprivate%> and "
12658 "%<lastprivate%> clauses on %<distribute%> "
12659 "construct");
12660 }
12661 if (!remove
12662 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12663 && DECL_P (decl)
12664 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12665 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12666 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
12667 remove = true;
12668 break;
12669
12670 case OMP_CLAUSE_ALIGNED:
12671 decl = OMP_CLAUSE_DECL (c);
12672 if (!is_global_var (decl))
12673 {
12674 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12675 remove = n == NULL || !(n->value & GOVD_SEEN);
12676 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12677 {
12678 struct gimplify_omp_ctx *octx;
12679 if (n != NULL
12680 && (n->value & (GOVD_DATA_SHARE_CLASS
12681 & ~GOVD_FIRSTPRIVATE)))
12682 remove = true;
12683 else
12684 for (octx = ctx->outer_context; octx;
12685 octx = octx->outer_context)
12686 {
12687 n = splay_tree_lookup (octx->variables,
12688 (splay_tree_key) decl);
12689 if (n == NULL)
12690 continue;
12691 if (n->value & GOVD_LOCAL)
12692 break;
12693 /* We have to avoid assigning a shared variable
12694 to itself when trying to add
12695 __builtin_assume_aligned. */
12696 if (n->value & GOVD_SHARED)
12697 {
12698 remove = true;
12699 break;
12700 }
12701 }
12702 }
12703 }
12704 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
12705 {
12706 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12707 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12708 remove = true;
12709 }
12710 break;
12711
12712 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12713 decl = OMP_CLAUSE_DECL (c);
12714 while (TREE_CODE (decl) == INDIRECT_REF
12715 || TREE_CODE (decl) == ARRAY_REF)
12716 decl = TREE_OPERAND (decl, 0);
12717 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12718 remove = n == NULL || !(n->value & GOVD_SEEN);
12719 break;
12720
12721 case OMP_CLAUSE_IS_DEVICE_PTR:
12722 case OMP_CLAUSE_NONTEMPORAL:
12723 decl = OMP_CLAUSE_DECL (c);
12724 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12725 remove = n == NULL || !(n->value & GOVD_SEEN);
12726 break;
12727
12728 case OMP_CLAUSE_MAP:
12729 if (code == OMP_TARGET_EXIT_DATA
12730 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
12731 {
12732 remove = true;
12733 break;
12734 }
12735 /* If we have a target region, we can push all the attaches to the
12736 end of the list (we may have standalone "attach" operations
12737 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12738 the attachment point AND the pointed-to block have been mapped).
12739 If we have something else, e.g. "enter data", we need to keep
12740 "attach" nodes together with the previous node they attach to so
12741 that separate "exit data" operations work properly (see
12742 libgomp/target.c). */
12743 if ((ctx->region_type & ORT_TARGET) != 0
12744 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12745 || (OMP_CLAUSE_MAP_KIND (c)
12746 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
12747 move_attach = true;
12748 decl = OMP_CLAUSE_DECL (c);
12749 /* Data clauses associated with reductions must be
12750 compatible with present_or_copy. Warn and adjust the clause
12751 if that is not the case. */
12752 if (ctx->region_type == ORT_ACC_PARALLEL
12753 || ctx->region_type == ORT_ACC_SERIAL)
12754 {
12755 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
12756 n = NULL;
12757
12758 if (DECL_P (t))
12759 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
12760
12761 if (n && (n->value & GOVD_REDUCTION))
12762 {
12763 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
12764
12765 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
12766 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
12767 && kind != GOMP_MAP_FORCE_PRESENT
12768 && kind != GOMP_MAP_POINTER)
12769 {
12770 warning_at (OMP_CLAUSE_LOCATION (c), 0,
12771 "incompatible data clause with reduction "
12772 "on %qE; promoting to %<present_or_copy%>",
12773 DECL_NAME (t));
12774 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
12775 }
12776 }
12777 }
12778 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
12779 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
12780 {
12781 remove = true;
12782 break;
12783 }
12784 if (!DECL_P (decl))
12785 {
12786 if ((ctx->region_type & ORT_TARGET) != 0
12787 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
12788 {
12789 if (TREE_CODE (decl) == INDIRECT_REF
12790 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
12791 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
12792 == REFERENCE_TYPE))
12793 decl = TREE_OPERAND (decl, 0);
12794 if (TREE_CODE (decl) == COMPONENT_REF)
12795 {
12796 while (TREE_CODE (decl) == COMPONENT_REF)
12797 decl = TREE_OPERAND (decl, 0);
12798 if (DECL_P (decl))
12799 {
12800 n = splay_tree_lookup (ctx->variables,
12801 (splay_tree_key) decl);
12802 if (!(n->value & GOVD_SEEN))
12803 remove = true;
12804 }
12805 }
12806 }
12807 break;
12808 }
12809 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12810 if ((ctx->region_type & ORT_TARGET) != 0
12811 && !(n->value & GOVD_SEEN)
12812 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
12813 && (!is_global_var (decl)
12814 || !lookup_attribute ("omp declare target link",
12815 DECL_ATTRIBUTES (decl))))
12816 {
12817 remove = true;
12818 /* For struct element mapping, if struct is never referenced
12819 in target block and none of the mapping has always modifier,
12820 remove all the struct element mappings, which immediately
12821 follow the GOMP_MAP_STRUCT map clause. */
12822 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
12823 {
12824 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
12825 while (cnt--)
12826 OMP_CLAUSE_CHAIN (c)
12827 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
12828 }
12829 }
12830 else if (DECL_SIZE (decl)
12831 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
12832 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
12833 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
12834 && (OMP_CLAUSE_MAP_KIND (c)
12835 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12836 {
12837 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
12838 for these, TREE_CODE (DECL_SIZE (decl)) will always be
12839 INTEGER_CST. */
12840 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
12841
12842 tree decl2 = DECL_VALUE_EXPR (decl);
12843 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12844 decl2 = TREE_OPERAND (decl2, 0);
12845 gcc_assert (DECL_P (decl2));
12846 tree mem = build_simple_mem_ref (decl2);
12847 OMP_CLAUSE_DECL (c) = mem;
12848 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12849 if (ctx->outer_context)
12850 {
12851 omp_notice_variable (ctx->outer_context, decl2, true);
12852 omp_notice_variable (ctx->outer_context,
12853 OMP_CLAUSE_SIZE (c), true);
12854 }
12855 if (((ctx->region_type & ORT_TARGET) != 0
12856 || !ctx->target_firstprivatize_array_bases)
12857 && ((n->value & GOVD_SEEN) == 0
12858 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
12859 {
12860 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12861 OMP_CLAUSE_MAP);
12862 OMP_CLAUSE_DECL (nc) = decl;
12863 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12864 if (ctx->target_firstprivatize_array_bases)
12865 OMP_CLAUSE_SET_MAP_KIND (nc,
12866 GOMP_MAP_FIRSTPRIVATE_POINTER);
12867 else
12868 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12869 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
12870 OMP_CLAUSE_CHAIN (c) = nc;
12871 c = nc;
12872 }
12873 }
12874 else
12875 {
12876 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12877 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12878 gcc_assert ((n->value & GOVD_SEEN) == 0
12879 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12880 == 0));
12881 }
12882 break;
12883
12884 case OMP_CLAUSE_TO:
12885 case OMP_CLAUSE_FROM:
12886 case OMP_CLAUSE__CACHE_:
12887 decl = OMP_CLAUSE_DECL (c);
12888 if (!DECL_P (decl))
12889 break;
12890 if (DECL_SIZE (decl)
12891 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12892 {
12893 tree decl2 = DECL_VALUE_EXPR (decl);
12894 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
12895 decl2 = TREE_OPERAND (decl2, 0);
12896 gcc_assert (DECL_P (decl2));
12897 tree mem = build_simple_mem_ref (decl2);
12898 OMP_CLAUSE_DECL (c) = mem;
12899 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12900 if (ctx->outer_context)
12901 {
12902 omp_notice_variable (ctx->outer_context, decl2, true);
12903 omp_notice_variable (ctx->outer_context,
12904 OMP_CLAUSE_SIZE (c), true);
12905 }
12906 }
12907 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
12908 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
12909 break;
12910
12911 case OMP_CLAUSE_REDUCTION:
12912 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
12913 {
12914 decl = OMP_CLAUSE_DECL (c);
12915 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12916 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
12917 {
12918 remove = true;
12919 error_at (OMP_CLAUSE_LOCATION (c),
12920 "%qD specified in %<inscan%> %<reduction%> clause "
12921 "but not in %<scan%> directive clause", decl);
12922 break;
12923 }
12924 has_inscan_reductions = true;
12925 }
12926 /* FALLTHRU */
12927 case OMP_CLAUSE_IN_REDUCTION:
12928 case OMP_CLAUSE_TASK_REDUCTION:
12929 decl = OMP_CLAUSE_DECL (c);
12930 /* OpenACC reductions need a present_or_copy data clause.
12931 Add one if necessary. Emit error when the reduction is private. */
12932 if (ctx->region_type == ORT_ACC_PARALLEL
12933 || ctx->region_type == ORT_ACC_SERIAL)
12934 {
12935 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12936 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
12937 {
12938 remove = true;
12939 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
12940 "reduction on %qE", DECL_NAME (decl));
12941 }
12942 else if ((n->value & GOVD_MAP) == 0)
12943 {
12944 tree next = OMP_CLAUSE_CHAIN (c);
12945 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
12946 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
12947 OMP_CLAUSE_DECL (nc) = decl;
12948 OMP_CLAUSE_CHAIN (c) = nc;
12949 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12950 (ctx->region_type
12951 & ORT_ACC) != 0);
12952 while (1)
12953 {
12954 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
12955 if (OMP_CLAUSE_CHAIN (nc) == NULL)
12956 break;
12957 nc = OMP_CLAUSE_CHAIN (nc);
12958 }
12959 OMP_CLAUSE_CHAIN (nc) = next;
12960 n->value |= GOVD_MAP;
12961 }
12962 }
12963 if (DECL_P (decl)
12964 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12965 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
12966 break;
12967
12968 case OMP_CLAUSE_ALLOCATE:
12969 decl = OMP_CLAUSE_DECL (c);
12970 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12971 if (n != NULL && !(n->value & GOVD_SEEN))
12972 {
12973 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
12974 != 0
12975 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
12976 remove = true;
12977 }
12978 if (!remove
12979 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12980 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
12981 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
12982 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
12983 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
12984 {
12985 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
12986 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
12987 if (n == NULL)
12988 {
12989 enum omp_clause_default_kind default_kind
12990 = ctx->default_kind;
12991 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
12992 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12993 true);
12994 ctx->default_kind = default_kind;
12995 }
12996 else
12997 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12998 true);
12999 }
13000 break;
13001
13002 case OMP_CLAUSE_COPYIN:
13003 case OMP_CLAUSE_COPYPRIVATE:
13004 case OMP_CLAUSE_IF:
13005 case OMP_CLAUSE_NUM_THREADS:
13006 case OMP_CLAUSE_NUM_TEAMS:
13007 case OMP_CLAUSE_THREAD_LIMIT:
13008 case OMP_CLAUSE_DIST_SCHEDULE:
13009 case OMP_CLAUSE_DEVICE:
13010 case OMP_CLAUSE_SCHEDULE:
13011 case OMP_CLAUSE_NOWAIT:
13012 case OMP_CLAUSE_ORDERED:
13013 case OMP_CLAUSE_DEFAULT:
13014 case OMP_CLAUSE_UNTIED:
13015 case OMP_CLAUSE_COLLAPSE:
13016 case OMP_CLAUSE_FINAL:
13017 case OMP_CLAUSE_MERGEABLE:
13018 case OMP_CLAUSE_PROC_BIND:
13019 case OMP_CLAUSE_SAFELEN:
13020 case OMP_CLAUSE_SIMDLEN:
13021 case OMP_CLAUSE_DEPEND:
13022 case OMP_CLAUSE_DOACROSS:
13023 case OMP_CLAUSE_PRIORITY:
13024 case OMP_CLAUSE_GRAINSIZE:
13025 case OMP_CLAUSE_NUM_TASKS:
13026 case OMP_CLAUSE_NOGROUP:
13027 case OMP_CLAUSE_THREADS:
13028 case OMP_CLAUSE_SIMD:
13029 case OMP_CLAUSE_FILTER:
13030 case OMP_CLAUSE_HINT:
13031 case OMP_CLAUSE_DEFAULTMAP:
13032 case OMP_CLAUSE_ORDER:
13033 case OMP_CLAUSE_BIND:
13034 case OMP_CLAUSE_DETACH:
13035 case OMP_CLAUSE_USE_DEVICE_PTR:
13036 case OMP_CLAUSE_USE_DEVICE_ADDR:
13037 case OMP_CLAUSE_ASYNC:
13038 case OMP_CLAUSE_WAIT:
13039 case OMP_CLAUSE_INDEPENDENT:
13040 case OMP_CLAUSE_NUM_GANGS:
13041 case OMP_CLAUSE_NUM_WORKERS:
13042 case OMP_CLAUSE_VECTOR_LENGTH:
13043 case OMP_CLAUSE_GANG:
13044 case OMP_CLAUSE_WORKER:
13045 case OMP_CLAUSE_VECTOR:
13046 case OMP_CLAUSE_AUTO:
13047 case OMP_CLAUSE_SEQ:
13048 case OMP_CLAUSE_TILE:
13049 case OMP_CLAUSE_IF_PRESENT:
13050 case OMP_CLAUSE_FINALIZE:
13051 case OMP_CLAUSE_INCLUSIVE:
13052 case OMP_CLAUSE_EXCLUSIVE:
13053 break;
13054
13055 case OMP_CLAUSE_NOHOST:
13056 default:
13057 gcc_unreachable ();
13058 }
13059
13060 if (remove)
13061 *list_p = OMP_CLAUSE_CHAIN (c);
13062 else if (move_attach)
13063 {
13064 /* Remove attach node from here, separate out into its own list. */
13065 *attach_tail = c;
13066 *list_p = OMP_CLAUSE_CHAIN (c);
13067 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13068 attach_tail = &OMP_CLAUSE_CHAIN (c);
13069 }
13070 else
13071 list_p = &OMP_CLAUSE_CHAIN (c);
13072 }
13073
13074 /* Splice attach nodes at the end of the list. */
13075 if (attach_list)
13076 {
13077 *list_p = attach_list;
13078 list_p = attach_tail;
13079 }
13080
13081 /* Add in any implicit data sharing. */
13082 struct gimplify_adjust_omp_clauses_data data;
13083 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13084 {
13085 /* OpenMP. Implicit clauses are added at the start of the clause list,
13086 but after any non-map clauses. */
13087 tree *implicit_add_list_p = orig_list_p;
13088 while (*implicit_add_list_p
13089 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13090 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13091 data.list_p = implicit_add_list_p;
13092 }
13093 else
13094 /* OpenACC. */
13095 data.list_p = list_p;
13096 data.pre_p = pre_p;
13097 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13098
13099 if (has_inscan_reductions)
13100 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13101 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13102 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13103 {
13104 error_at (OMP_CLAUSE_LOCATION (c),
13105 "%<inscan%> %<reduction%> clause used together with "
13106 "%<linear%> clause for a variable other than loop "
13107 "iterator");
13108 break;
13109 }
13110
13111 gimplify_omp_ctxp = ctx->outer_context;
13112 delete_omp_context (ctx);
13113 }
13114
13115 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13116 -1 if unknown yet (simd is involved, won't be known until vectorization)
13117 and 1 if they do. If SCORES is non-NULL, it should point to an array
13118 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13119 of the CONSTRUCTS (position -1 if it will never match) followed by
13120 number of constructs in the OpenMP context construct trait. If the
13121 score depends on whether it will be in a declare simd clone or not,
13122 the function returns 2 and there will be two sets of the scores, the first
13123 one for the case that it is not in a declare simd clone, the other
13124 that it is in a declare simd clone. */
13125
13126 int
13127 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13128 int *scores)
13129 {
13130 int matched = 0, cnt = 0;
13131 bool simd_seen = false;
13132 bool target_seen = false;
13133 int declare_simd_cnt = -1;
13134 auto_vec<enum tree_code, 16> codes;
13135 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13136 {
13137 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13138 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13139 == ORT_TARGET && ctx->code == OMP_TARGET)
13140 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13141 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13142 || (ctx->region_type == ORT_SIMD
13143 && ctx->code == OMP_SIMD
13144 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
13145 {
13146 ++cnt;
13147 if (scores)
13148 codes.safe_push (ctx->code);
13149 else if (matched < nconstructs && ctx->code == constructs[matched])
13150 {
13151 if (ctx->code == OMP_SIMD)
13152 {
13153 if (matched)
13154 return 0;
13155 simd_seen = true;
13156 }
13157 ++matched;
13158 }
13159 if (ctx->code == OMP_TARGET)
13160 {
13161 if (scores == NULL)
13162 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13163 target_seen = true;
13164 break;
13165 }
13166 }
13167 else if (ctx->region_type == ORT_WORKSHARE
13168 && ctx->code == OMP_LOOP
13169 && ctx->outer_context
13170 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13171 && ctx->outer_context->outer_context
13172 && ctx->outer_context->outer_context->code == OMP_LOOP
13173 && ctx->outer_context->outer_context->distribute)
13174 ctx = ctx->outer_context->outer_context;
13175 ctx = ctx->outer_context;
13176 }
13177 if (!target_seen
13178 && lookup_attribute ("omp declare simd",
13179 DECL_ATTRIBUTES (current_function_decl)))
13180 {
13181 /* Declare simd is a maybe case, it is supposed to be added only to the
13182 omp-simd-clone.cc added clones and not to the base function. */
13183 declare_simd_cnt = cnt++;
13184 if (scores)
13185 codes.safe_push (OMP_SIMD);
13186 else if (cnt == 0
13187 && constructs[0] == OMP_SIMD)
13188 {
13189 gcc_assert (matched == 0);
13190 simd_seen = true;
13191 if (++matched == nconstructs)
13192 return -1;
13193 }
13194 }
13195 if (tree attr = lookup_attribute ("omp declare variant variant",
13196 DECL_ATTRIBUTES (current_function_decl)))
13197 {
13198 enum tree_code variant_constructs[5];
13199 int variant_nconstructs = 0;
13200 if (!target_seen)
13201 variant_nconstructs
13202 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13203 variant_constructs);
13204 for (int i = 0; i < variant_nconstructs; i++)
13205 {
13206 ++cnt;
13207 if (scores)
13208 codes.safe_push (variant_constructs[i]);
13209 else if (matched < nconstructs
13210 && variant_constructs[i] == constructs[matched])
13211 {
13212 if (variant_constructs[i] == OMP_SIMD)
13213 {
13214 if (matched)
13215 return 0;
13216 simd_seen = true;
13217 }
13218 ++matched;
13219 }
13220 }
13221 }
13222 if (!target_seen
13223 && lookup_attribute ("omp declare target block",
13224 DECL_ATTRIBUTES (current_function_decl)))
13225 {
13226 if (scores)
13227 codes.safe_push (OMP_TARGET);
13228 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13229 ++matched;
13230 }
13231 if (scores)
13232 {
13233 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13234 {
13235 int j = codes.length () - 1;
13236 for (int i = nconstructs - 1; i >= 0; i--)
13237 {
13238 while (j >= 0
13239 && (pass != 0 || declare_simd_cnt != j)
13240 && constructs[i] != codes[j])
13241 --j;
13242 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13243 *scores++ = j - 1;
13244 else
13245 *scores++ = j;
13246 }
13247 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13248 ? codes.length () - 1 : codes.length ());
13249 }
13250 return declare_simd_cnt == -1 ? 1 : 2;
13251 }
13252 if (matched == nconstructs)
13253 return simd_seen ? -1 : 1;
13254 return 0;
13255 }
13256
13257 /* Gimplify OACC_CACHE. */
13258
13259 static void
13260 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13261 {
13262 tree expr = *expr_p;
13263
13264 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
13265 OACC_CACHE);
13266 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
13267 OACC_CACHE);
13268
13269 /* TODO: Do something sensible with this information. */
13270
13271 *expr_p = NULL_TREE;
13272 }
13273
13274 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13275 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13276 kind. The entry kind will replace the one in CLAUSE, while the exit
13277 kind will be used in a new omp_clause and returned to the caller. */
13278
13279 static tree
13280 gimplify_oacc_declare_1 (tree clause)
13281 {
13282 HOST_WIDE_INT kind, new_op;
13283 bool ret = false;
13284 tree c = NULL;
13285
13286 kind = OMP_CLAUSE_MAP_KIND (clause);
13287
13288 switch (kind)
13289 {
13290 case GOMP_MAP_ALLOC:
13291 new_op = GOMP_MAP_RELEASE;
13292 ret = true;
13293 break;
13294
13295 case GOMP_MAP_FROM:
13296 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13297 new_op = GOMP_MAP_FROM;
13298 ret = true;
13299 break;
13300
13301 case GOMP_MAP_TOFROM:
13302 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13303 new_op = GOMP_MAP_FROM;
13304 ret = true;
13305 break;
13306
13307 case GOMP_MAP_DEVICE_RESIDENT:
13308 case GOMP_MAP_FORCE_DEVICEPTR:
13309 case GOMP_MAP_FORCE_PRESENT:
13310 case GOMP_MAP_LINK:
13311 case GOMP_MAP_POINTER:
13312 case GOMP_MAP_TO:
13313 break;
13314
13315 default:
13316 gcc_unreachable ();
13317 break;
13318 }
13319
13320 if (ret)
13321 {
13322 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13323 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13324 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13325 }
13326
13327 return c;
13328 }
13329
13330 /* Gimplify OACC_DECLARE. */
13331
13332 static void
13333 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13334 {
13335 tree expr = *expr_p;
13336 gomp_target *stmt;
13337 tree clauses, t, decl;
13338
13339 clauses = OACC_DECLARE_CLAUSES (expr);
13340
13341 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
13342 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
13343
13344 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13345 {
13346 decl = OMP_CLAUSE_DECL (t);
13347
13348 if (TREE_CODE (decl) == MEM_REF)
13349 decl = TREE_OPERAND (decl, 0);
13350
13351 if (VAR_P (decl) && !is_oacc_declared (decl))
13352 {
13353 tree attr = get_identifier ("oacc declare target");
13354 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13355 DECL_ATTRIBUTES (decl));
13356 }
13357
13358 if (VAR_P (decl)
13359 && !is_global_var (decl)
13360 && DECL_CONTEXT (decl) == current_function_decl)
13361 {
13362 tree c = gimplify_oacc_declare_1 (t);
13363 if (c)
13364 {
13365 if (oacc_declare_returns == NULL)
13366 oacc_declare_returns = new hash_map<tree, tree>;
13367
13368 oacc_declare_returns->put (decl, c);
13369 }
13370 }
13371
13372 if (gimplify_omp_ctxp)
13373 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
13374 }
13375
13376 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13377 clauses);
13378
13379 gimplify_seq_add_stmt (pre_p, stmt);
13380
13381 *expr_p = NULL_TREE;
13382 }
13383
13384 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13385 gimplification of the body, as well as scanning the body for used
13386 variables. We need to do this scan now, because variable-sized
13387 decls will be decomposed during gimplification. */
13388
13389 static void
13390 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13391 {
13392 tree expr = *expr_p;
13393 gimple *g;
13394 gimple_seq body = NULL;
13395
13396 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
13397 OMP_PARALLEL_COMBINED (expr)
13398 ? ORT_COMBINED_PARALLEL
13399 : ORT_PARALLEL, OMP_PARALLEL);
13400
13401 push_gimplify_context ();
13402
13403 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
13404 if (gimple_code (g) == GIMPLE_BIND)
13405 pop_gimplify_context (g);
13406 else
13407 pop_gimplify_context (NULL);
13408
13409 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
13410 OMP_PARALLEL);
13411
13412 g = gimple_build_omp_parallel (body,
13413 OMP_PARALLEL_CLAUSES (expr),
13414 NULL_TREE, NULL_TREE);
13415 if (OMP_PARALLEL_COMBINED (expr))
13416 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
13417 gimplify_seq_add_stmt (pre_p, g);
13418 *expr_p = NULL_TREE;
13419 }
13420
13421 /* Gimplify the contents of an OMP_TASK statement. This involves
13422 gimplification of the body, as well as scanning the body for used
13423 variables. We need to do this scan now, because variable-sized
13424 decls will be decomposed during gimplification. */
13425
13426 static void
13427 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13428 {
13429 tree expr = *expr_p;
13430 gimple *g;
13431 gimple_seq body = NULL;
13432 bool nowait = false;
13433 bool has_depend = false;
13434
13435 if (OMP_TASK_BODY (expr) == NULL_TREE)
13436 {
13437 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13438 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13439 {
13440 has_depend = true;
13441 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13442 {
13443 error_at (OMP_CLAUSE_LOCATION (c),
13444 "%<mutexinoutset%> kind in %<depend%> clause on a "
13445 "%<taskwait%> construct");
13446 break;
13447 }
13448 }
13449 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13450 nowait = true;
13451 if (nowait && !has_depend)
13452 {
13453 error_at (EXPR_LOCATION (expr),
13454 "%<taskwait%> construct with %<nowait%> clause but no "
13455 "%<depend%> clauses");
13456 *expr_p = NULL_TREE;
13457 return;
13458 }
13459 }
13460
13461 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
13462 omp_find_clause (OMP_TASK_CLAUSES (expr),
13463 OMP_CLAUSE_UNTIED)
13464 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
13465
13466 if (OMP_TASK_BODY (expr))
13467 {
13468 push_gimplify_context ();
13469
13470 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
13471 if (gimple_code (g) == GIMPLE_BIND)
13472 pop_gimplify_context (g);
13473 else
13474 pop_gimplify_context (NULL);
13475 }
13476
13477 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
13478 OMP_TASK);
13479
13480 g = gimple_build_omp_task (body,
13481 OMP_TASK_CLAUSES (expr),
13482 NULL_TREE, NULL_TREE,
13483 NULL_TREE, NULL_TREE, NULL_TREE);
13484 if (OMP_TASK_BODY (expr) == NULL_TREE)
13485 gimple_omp_task_set_taskwait_p (g, true);
13486 gimplify_seq_add_stmt (pre_p, g);
13487 *expr_p = NULL_TREE;
13488 }
13489
13490 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13491 force it into a temporary initialized in PRE_P and add firstprivate clause
13492 to ORIG_FOR_STMT. */
13493
13494 static void
13495 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13496 tree orig_for_stmt)
13497 {
13498 if (*tp == NULL || is_gimple_constant (*tp))
13499 return;
13500
13501 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
13502 /* Reference to pointer conversion is considered useless,
13503 but is significant for firstprivate clause. Force it
13504 here. */
13505 if (type
13506 && TREE_CODE (type) == POINTER_TYPE
13507 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13508 {
13509 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13510 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13511 gimplify_and_add (m, pre_p);
13512 *tp = v;
13513 }
13514
13515 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13516 OMP_CLAUSE_DECL (c) = *tp;
13517 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13518 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13519 }
13520
13521 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13522 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13523
13524 static tree
13525 find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13526 {
13527 switch (TREE_CODE (*tp))
13528 {
13529 case OMP_ORDERED:
13530 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13531 return *tp;
13532 break;
13533 case OMP_SIMD:
13534 case OMP_PARALLEL:
13535 case OMP_TARGET:
13536 *walk_subtrees = 0;
13537 break;
13538 default:
13539 break;
13540 }
13541 return NULL_TREE;
13542 }
13543
13544 /* Gimplify the gross structure of an OMP_FOR statement. */
13545
13546 static enum gimplify_status
13547 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13548 {
13549 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13550 enum gimplify_status ret = GS_ALL_DONE;
13551 enum gimplify_status tret;
13552 gomp_for *gfor;
13553 gimple_seq for_body, for_pre_body;
13554 int i;
13555 bitmap has_decl_expr = NULL;
13556 enum omp_region_type ort = ORT_WORKSHARE;
13557 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13558
13559 orig_for_stmt = for_stmt = *expr_p;
13560
13561 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
13562 != NULL_TREE);
13563 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13564 {
13565 tree *data[4] = { NULL, NULL, NULL, NULL };
13566 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13567 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13568 find_combined_omp_for, data, NULL);
13569 if (inner_for_stmt == NULL_TREE)
13570 {
13571 gcc_assert (seen_error ());
13572 *expr_p = NULL_TREE;
13573 return GS_ERROR;
13574 }
13575 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13576 {
13577 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13578 &OMP_FOR_PRE_BODY (for_stmt));
13579 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13580 }
13581 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13582 {
13583 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13584 &OMP_FOR_PRE_BODY (for_stmt));
13585 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13586 }
13587
13588 if (data[0])
13589 {
13590 /* We have some statements or variable declarations in between
13591 the composite construct directives. Move them around the
13592 inner_for_stmt. */
13593 data[0] = expr_p;
13594 for (i = 0; i < 3; i++)
13595 if (data[i])
13596 {
13597 tree t = *data[i];
13598 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13599 data[i + 1] = data[i];
13600 *data[i] = OMP_BODY (t);
13601 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13602 NULL_TREE, make_node (BLOCK));
13603 OMP_BODY (t) = body;
13604 append_to_statement_list_force (inner_for_stmt,
13605 &BIND_EXPR_BODY (body));
13606 *data[3] = t;
13607 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
13608 gcc_assert (*data[3] == inner_for_stmt);
13609 }
13610 return GS_OK;
13611 }
13612
13613 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13614 if (!loop_p
13615 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13616 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13617 i)) == TREE_LIST
13618 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13619 i)))
13620 {
13621 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13622 /* Class iterators aren't allowed on OMP_SIMD, so the only
13623 case we need to solve is distribute parallel for. They are
13624 allowed on the loop construct, but that is already handled
13625 in gimplify_omp_loop. */
13626 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13627 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13628 && data[1]);
13629 tree orig_decl = TREE_PURPOSE (orig);
13630 tree last = TREE_VALUE (orig);
13631 tree *pc;
13632 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13633 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13634 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
13635 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
13636 && OMP_CLAUSE_DECL (*pc) == orig_decl)
13637 break;
13638 if (*pc == NULL_TREE)
13639 {
13640 tree *spc;
13641 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
13642 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
13643 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
13644 && OMP_CLAUSE_DECL (*spc) == orig_decl)
13645 break;
13646 if (*spc)
13647 {
13648 tree c = *spc;
13649 *spc = OMP_CLAUSE_CHAIN (c);
13650 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13651 *pc = c;
13652 }
13653 }
13654 if (*pc == NULL_TREE)
13655 ;
13656 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
13657 {
13658 /* private clause will appear only on inner_for_stmt.
13659 Change it into firstprivate, and add private clause
13660 on for_stmt. */
13661 tree c = copy_node (*pc);
13662 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
13663 OMP_FOR_CLAUSES (for_stmt) = c;
13664 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
13665 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13666 }
13667 else
13668 {
13669 /* lastprivate clause will appear on both inner_for_stmt
13670 and for_stmt. Add firstprivate clause to
13671 inner_for_stmt. */
13672 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
13673 OMP_CLAUSE_FIRSTPRIVATE);
13674 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
13675 OMP_CLAUSE_CHAIN (c) = *pc;
13676 *pc = c;
13677 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
13678 }
13679 tree c = build_omp_clause (UNKNOWN_LOCATION,
13680 OMP_CLAUSE_FIRSTPRIVATE);
13681 OMP_CLAUSE_DECL (c) = last;
13682 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13683 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13684 c = build_omp_clause (UNKNOWN_LOCATION,
13685 *pc ? OMP_CLAUSE_SHARED
13686 : OMP_CLAUSE_FIRSTPRIVATE);
13687 OMP_CLAUSE_DECL (c) = orig_decl;
13688 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13689 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13690 }
13691 /* Similarly, take care of C++ range for temporaries, those should
13692 be firstprivate on OMP_PARALLEL if any. */
13693 if (data[1])
13694 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13695 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
13696 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13697 i)) == TREE_LIST
13698 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13699 i)))
13700 {
13701 tree orig
13702 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13703 tree v = TREE_CHAIN (orig);
13704 tree c = build_omp_clause (UNKNOWN_LOCATION,
13705 OMP_CLAUSE_FIRSTPRIVATE);
13706 /* First add firstprivate clause for the __for_end artificial
13707 decl. */
13708 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
13709 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13710 == REFERENCE_TYPE)
13711 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13712 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13713 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13714 if (TREE_VEC_ELT (v, 0))
13715 {
13716 /* And now the same for __for_range artificial decl if it
13717 exists. */
13718 c = build_omp_clause (UNKNOWN_LOCATION,
13719 OMP_CLAUSE_FIRSTPRIVATE);
13720 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
13721 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
13722 == REFERENCE_TYPE)
13723 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
13724 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
13725 OMP_PARALLEL_CLAUSES (*data[1]) = c;
13726 }
13727 }
13728 }
13729
13730 switch (TREE_CODE (for_stmt))
13731 {
13732 case OMP_FOR:
13733 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13734 {
13735 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13736 OMP_CLAUSE_SCHEDULE))
13737 error_at (EXPR_LOCATION (for_stmt),
13738 "%qs clause may not appear on non-rectangular %qs",
13739 "schedule", lang_GNU_Fortran () ? "do" : "for");
13740 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
13741 error_at (EXPR_LOCATION (for_stmt),
13742 "%qs clause may not appear on non-rectangular %qs",
13743 "ordered", lang_GNU_Fortran () ? "do" : "for");
13744 }
13745 break;
13746 case OMP_DISTRIBUTE:
13747 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
13748 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13749 OMP_CLAUSE_DIST_SCHEDULE))
13750 error_at (EXPR_LOCATION (for_stmt),
13751 "%qs clause may not appear on non-rectangular %qs",
13752 "dist_schedule", "distribute");
13753 break;
13754 case OACC_LOOP:
13755 ort = ORT_ACC;
13756 break;
13757 case OMP_TASKLOOP:
13758 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
13759 {
13760 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13761 OMP_CLAUSE_GRAINSIZE))
13762 error_at (EXPR_LOCATION (for_stmt),
13763 "%qs clause may not appear on non-rectangular %qs",
13764 "grainsize", "taskloop");
13765 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
13766 OMP_CLAUSE_NUM_TASKS))
13767 error_at (EXPR_LOCATION (for_stmt),
13768 "%qs clause may not appear on non-rectangular %qs",
13769 "num_tasks", "taskloop");
13770 }
13771 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
13772 ort = ORT_UNTIED_TASKLOOP;
13773 else
13774 ort = ORT_TASKLOOP;
13775 break;
13776 case OMP_SIMD:
13777 ort = ORT_SIMD;
13778 break;
13779 default:
13780 gcc_unreachable ();
13781 }
13782
13783 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13784 clause for the IV. */
13785 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
13786 {
13787 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
13788 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13789 decl = TREE_OPERAND (t, 0);
13790 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13791 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13792 && OMP_CLAUSE_DECL (c) == decl)
13793 {
13794 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
13795 break;
13796 }
13797 }
13798
13799 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
13800 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
13801 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
13802 ? OMP_LOOP : TREE_CODE (for_stmt));
13803
13804 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
13805 gimplify_omp_ctxp->distribute = true;
13806
13807 /* Handle OMP_FOR_INIT. */
13808 for_pre_body = NULL;
13809 if ((ort == ORT_SIMD
13810 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
13811 && OMP_FOR_PRE_BODY (for_stmt))
13812 {
13813 has_decl_expr = BITMAP_ALLOC (NULL);
13814 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
13815 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
13816 == VAR_DECL)
13817 {
13818 t = OMP_FOR_PRE_BODY (for_stmt);
13819 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13820 }
13821 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
13822 {
13823 tree_stmt_iterator si;
13824 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
13825 tsi_next (&si))
13826 {
13827 t = tsi_stmt (si);
13828 if (TREE_CODE (t) == DECL_EXPR
13829 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
13830 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
13831 }
13832 }
13833 }
13834 if (OMP_FOR_PRE_BODY (for_stmt))
13835 {
13836 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
13837 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13838 else
13839 {
13840 struct gimplify_omp_ctx ctx;
13841 memset (&ctx, 0, sizeof (ctx));
13842 ctx.region_type = ORT_NONE;
13843 gimplify_omp_ctxp = &ctx;
13844 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
13845 gimplify_omp_ctxp = NULL;
13846 }
13847 }
13848 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
13849
13850 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13851 for_stmt = inner_for_stmt;
13852
13853 /* For taskloop, need to gimplify the start, end and step before the
13854 taskloop, outside of the taskloop omp context. */
13855 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13856 {
13857 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13858 {
13859 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13860 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
13861 ? pre_p : &for_pre_body);
13862 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
13863 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13864 {
13865 tree v = TREE_OPERAND (t, 1);
13866 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13867 for_pre_p, orig_for_stmt);
13868 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13869 for_pre_p, orig_for_stmt);
13870 }
13871 else
13872 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13873 orig_for_stmt);
13874
13875 /* Handle OMP_FOR_COND. */
13876 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
13877 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
13878 {
13879 tree v = TREE_OPERAND (t, 1);
13880 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
13881 for_pre_p, orig_for_stmt);
13882 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
13883 for_pre_p, orig_for_stmt);
13884 }
13885 else
13886 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
13887 orig_for_stmt);
13888
13889 /* Handle OMP_FOR_INCR. */
13890 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13891 if (TREE_CODE (t) == MODIFY_EXPR)
13892 {
13893 decl = TREE_OPERAND (t, 0);
13894 t = TREE_OPERAND (t, 1);
13895 tree *tp = &TREE_OPERAND (t, 1);
13896 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
13897 tp = &TREE_OPERAND (t, 0);
13898
13899 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
13900 orig_for_stmt);
13901 }
13902 }
13903
13904 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
13905 OMP_TASKLOOP);
13906 }
13907
13908 if (orig_for_stmt != for_stmt)
13909 gimplify_omp_ctxp->combined_loop = true;
13910
13911 for_body = NULL;
13912 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13913 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
13914 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
13915 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
13916
13917 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
13918 bool is_doacross = false;
13919 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
13920 find_standalone_omp_ordered, NULL))
13921 {
13922 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
13923 is_doacross = true;
13924 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
13925 gimplify_omp_ctxp->loop_iter_var.create (len * 2);
13926 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
13927 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
13928 {
13929 error_at (OMP_CLAUSE_LOCATION (*pc),
13930 "%<linear%> clause may not be specified together "
13931 "with %<ordered%> clause if stand-alone %<ordered%> "
13932 "construct is nested in it");
13933 *pc = OMP_CLAUSE_CHAIN (*pc);
13934 }
13935 else
13936 pc = &OMP_CLAUSE_CHAIN (*pc);
13937 }
13938 int collapse = 1, tile = 0;
13939 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
13940 if (c)
13941 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
13942 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
13943 if (c)
13944 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
13945 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
13946 hash_set<tree> *allocate_uids = NULL;
13947 if (c)
13948 {
13949 allocate_uids = new hash_set<tree>;
13950 for (; c; c = OMP_CLAUSE_CHAIN (c))
13951 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
13952 allocate_uids->add (OMP_CLAUSE_DECL (c));
13953 }
13954 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13955 {
13956 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13957 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13958 decl = TREE_OPERAND (t, 0);
13959 gcc_assert (DECL_P (decl));
13960 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
13961 || POINTER_TYPE_P (TREE_TYPE (decl)));
13962 if (is_doacross)
13963 {
13964 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
13965 {
13966 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13967 if (TREE_CODE (orig_decl) == TREE_LIST)
13968 {
13969 orig_decl = TREE_PURPOSE (orig_decl);
13970 if (!orig_decl)
13971 orig_decl = decl;
13972 }
13973 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
13974 }
13975 else
13976 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
13977 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
13978 }
13979
13980 if (for_stmt == orig_for_stmt)
13981 {
13982 tree orig_decl = decl;
13983 if (OMP_FOR_ORIG_DECLS (for_stmt))
13984 {
13985 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13986 if (TREE_CODE (orig_decl) == TREE_LIST)
13987 {
13988 orig_decl = TREE_PURPOSE (orig_decl);
13989 if (!orig_decl)
13990 orig_decl = decl;
13991 }
13992 }
13993 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
13994 error_at (EXPR_LOCATION (for_stmt),
13995 "threadprivate iteration variable %qD", orig_decl);
13996 }
13997
13998 /* Make sure the iteration variable is private. */
13999 tree c = NULL_TREE;
14000 tree c2 = NULL_TREE;
14001 if (orig_for_stmt != for_stmt)
14002 {
14003 /* Preserve this information until we gimplify the inner simd. */
14004 if (has_decl_expr
14005 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14006 TREE_PRIVATE (t) = 1;
14007 }
14008 else if (ort == ORT_SIMD)
14009 {
14010 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14011 (splay_tree_key) decl);
14012 omp_is_private (gimplify_omp_ctxp, decl,
14013 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14014 != 1));
14015 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14016 {
14017 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14018 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
14019 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14020 OMP_CLAUSE_LASTPRIVATE);
14021 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14022 OMP_CLAUSE_LASTPRIVATE))
14023 if (OMP_CLAUSE_DECL (c3) == decl)
14024 {
14025 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14026 "conditional %<lastprivate%> on loop "
14027 "iterator %qD ignored", decl);
14028 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14029 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14030 }
14031 }
14032 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14033 {
14034 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14035 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14036 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14037 if ((has_decl_expr
14038 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14039 || TREE_PRIVATE (t))
14040 {
14041 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14042 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14043 }
14044 struct gimplify_omp_ctx *outer
14045 = gimplify_omp_ctxp->outer_context;
14046 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14047 {
14048 if (outer->region_type == ORT_WORKSHARE
14049 && outer->combined_loop)
14050 {
14051 n = splay_tree_lookup (outer->variables,
14052 (splay_tree_key)decl);
14053 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14054 {
14055 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14056 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14057 }
14058 else
14059 {
14060 struct gimplify_omp_ctx *octx = outer->outer_context;
14061 if (octx
14062 && octx->region_type == ORT_COMBINED_PARALLEL
14063 && octx->outer_context
14064 && (octx->outer_context->region_type
14065 == ORT_WORKSHARE)
14066 && octx->outer_context->combined_loop)
14067 {
14068 octx = octx->outer_context;
14069 n = splay_tree_lookup (octx->variables,
14070 (splay_tree_key)decl);
14071 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14072 {
14073 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14074 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14075 }
14076 }
14077 }
14078 }
14079 }
14080
14081 OMP_CLAUSE_DECL (c) = decl;
14082 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14083 OMP_FOR_CLAUSES (for_stmt) = c;
14084 omp_add_variable (gimplify_omp_ctxp, decl, flags);
14085 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14086 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14087 true);
14088 }
14089 else
14090 {
14091 bool lastprivate
14092 = (!has_decl_expr
14093 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14094 if (TREE_PRIVATE (t))
14095 lastprivate = false;
14096 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14097 {
14098 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14099 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14100 lastprivate = false;
14101 }
14102
14103 struct gimplify_omp_ctx *outer
14104 = gimplify_omp_ctxp->outer_context;
14105 if (outer && lastprivate)
14106 omp_lastprivate_for_combined_outer_constructs (outer, decl,
14107 true);
14108
14109 c = build_omp_clause (input_location,
14110 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14111 : OMP_CLAUSE_PRIVATE);
14112 OMP_CLAUSE_DECL (c) = decl;
14113 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14114 OMP_FOR_CLAUSES (for_stmt) = c;
14115 omp_add_variable (gimplify_omp_ctxp, decl,
14116 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14117 | GOVD_EXPLICIT | GOVD_SEEN);
14118 c = NULL_TREE;
14119 }
14120 }
14121 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
14122 {
14123 omp_notice_variable (gimplify_omp_ctxp, decl, true);
14124 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14125 (splay_tree_key) decl);
14126 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14127 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14128 OMP_CLAUSE_LASTPRIVATE);
14129 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14130 OMP_CLAUSE_LASTPRIVATE))
14131 if (OMP_CLAUSE_DECL (c3) == decl)
14132 {
14133 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14134 "conditional %<lastprivate%> on loop "
14135 "iterator %qD ignored", decl);
14136 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14137 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14138 }
14139 }
14140 else
14141 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
14142
14143 /* If DECL is not a gimple register, create a temporary variable to act
14144 as an iteration counter. This is valid, since DECL cannot be
14145 modified in the body of the loop. Similarly for any iteration vars
14146 in simd with collapse > 1 where the iterator vars must be
14147 lastprivate. And similarly for vars mentioned in allocate clauses. */
14148 if (orig_for_stmt != for_stmt)
14149 var = decl;
14150 else if (!is_gimple_reg (decl)
14151 || (ort == ORT_SIMD
14152 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14153 || (allocate_uids && allocate_uids->contains (decl)))
14154 {
14155 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14156 /* Make sure omp_add_variable is not called on it prematurely.
14157 We call it ourselves a few lines later. */
14158 gimplify_omp_ctxp = NULL;
14159 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14160 gimplify_omp_ctxp = ctx;
14161 TREE_OPERAND (t, 0) = var;
14162
14163 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
14164
14165 if (ort == ORT_SIMD
14166 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14167 {
14168 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14169 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14170 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14171 OMP_CLAUSE_DECL (c2) = var;
14172 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14173 OMP_FOR_CLAUSES (for_stmt) = c2;
14174 omp_add_variable (gimplify_omp_ctxp, var,
14175 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14176 if (c == NULL_TREE)
14177 {
14178 c = c2;
14179 c2 = NULL_TREE;
14180 }
14181 }
14182 else
14183 omp_add_variable (gimplify_omp_ctxp, var,
14184 GOVD_PRIVATE | GOVD_SEEN);
14185 }
14186 else
14187 var = decl;
14188
14189 gimplify_omp_ctxp->in_for_exprs = true;
14190 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14191 {
14192 tree lb = TREE_OPERAND (t, 1);
14193 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14194 is_gimple_val, fb_rvalue, false);
14195 ret = MIN (ret, tret);
14196 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14197 is_gimple_val, fb_rvalue, false);
14198 }
14199 else
14200 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14201 is_gimple_val, fb_rvalue, false);
14202 gimplify_omp_ctxp->in_for_exprs = false;
14203 ret = MIN (ret, tret);
14204 if (ret == GS_ERROR)
14205 return ret;
14206
14207 /* Handle OMP_FOR_COND. */
14208 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14209 gcc_assert (COMPARISON_CLASS_P (t));
14210 gcc_assert (TREE_OPERAND (t, 0) == decl);
14211
14212 gimplify_omp_ctxp->in_for_exprs = true;
14213 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14214 {
14215 tree ub = TREE_OPERAND (t, 1);
14216 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14217 is_gimple_val, fb_rvalue, false);
14218 ret = MIN (ret, tret);
14219 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14220 is_gimple_val, fb_rvalue, false);
14221 }
14222 else
14223 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14224 is_gimple_val, fb_rvalue, false);
14225 gimplify_omp_ctxp->in_for_exprs = false;
14226 ret = MIN (ret, tret);
14227
14228 /* Handle OMP_FOR_INCR. */
14229 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14230 switch (TREE_CODE (t))
14231 {
14232 case PREINCREMENT_EXPR:
14233 case POSTINCREMENT_EXPR:
14234 {
14235 tree decl = TREE_OPERAND (t, 0);
14236 /* c_omp_for_incr_canonicalize_ptr() should have been
14237 called to massage things appropriately. */
14238 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14239
14240 if (orig_for_stmt != for_stmt)
14241 break;
14242 t = build_int_cst (TREE_TYPE (decl), 1);
14243 if (c)
14244 OMP_CLAUSE_LINEAR_STEP (c) = t;
14245 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14246 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14247 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14248 break;
14249 }
14250
14251 case PREDECREMENT_EXPR:
14252 case POSTDECREMENT_EXPR:
14253 /* c_omp_for_incr_canonicalize_ptr() should have been
14254 called to massage things appropriately. */
14255 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14256 if (orig_for_stmt != for_stmt)
14257 break;
14258 t = build_int_cst (TREE_TYPE (decl), -1);
14259 if (c)
14260 OMP_CLAUSE_LINEAR_STEP (c) = t;
14261 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14262 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14263 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14264 break;
14265
14266 case MODIFY_EXPR:
14267 gcc_assert (TREE_OPERAND (t, 0) == decl);
14268 TREE_OPERAND (t, 0) = var;
14269
14270 t = TREE_OPERAND (t, 1);
14271 switch (TREE_CODE (t))
14272 {
14273 case PLUS_EXPR:
14274 if (TREE_OPERAND (t, 1) == decl)
14275 {
14276 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14277 TREE_OPERAND (t, 0) = var;
14278 break;
14279 }
14280
14281 /* Fallthru. */
14282 case MINUS_EXPR:
14283 case POINTER_PLUS_EXPR:
14284 gcc_assert (TREE_OPERAND (t, 0) == decl);
14285 TREE_OPERAND (t, 0) = var;
14286 break;
14287 default:
14288 gcc_unreachable ();
14289 }
14290
14291 gimplify_omp_ctxp->in_for_exprs = true;
14292 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14293 is_gimple_val, fb_rvalue, false);
14294 ret = MIN (ret, tret);
14295 if (c)
14296 {
14297 tree step = TREE_OPERAND (t, 1);
14298 tree stept = TREE_TYPE (decl);
14299 if (POINTER_TYPE_P (stept))
14300 stept = sizetype;
14301 step = fold_convert (stept, step);
14302 if (TREE_CODE (t) == MINUS_EXPR)
14303 step = fold_build1 (NEGATE_EXPR, stept, step);
14304 OMP_CLAUSE_LINEAR_STEP (c) = step;
14305 if (step != TREE_OPERAND (t, 1))
14306 {
14307 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14308 &for_pre_body, NULL,
14309 is_gimple_val, fb_rvalue, false);
14310 ret = MIN (ret, tret);
14311 }
14312 }
14313 gimplify_omp_ctxp->in_for_exprs = false;
14314 break;
14315
14316 default:
14317 gcc_unreachable ();
14318 }
14319
14320 if (c2)
14321 {
14322 gcc_assert (c);
14323 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14324 }
14325
14326 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14327 {
14328 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14329 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14330 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14331 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14332 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14333 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14334 && OMP_CLAUSE_DECL (c) == decl)
14335 {
14336 if (is_doacross && (collapse == 1 || i >= collapse))
14337 t = var;
14338 else
14339 {
14340 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14341 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14342 gcc_assert (TREE_OPERAND (t, 0) == var);
14343 t = TREE_OPERAND (t, 1);
14344 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14345 || TREE_CODE (t) == MINUS_EXPR
14346 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14347 gcc_assert (TREE_OPERAND (t, 0) == var);
14348 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14349 is_doacross ? var : decl,
14350 TREE_OPERAND (t, 1));
14351 }
14352 gimple_seq *seq;
14353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14354 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14355 else
14356 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14357 push_gimplify_context ();
14358 gimplify_assign (decl, t, seq);
14359 gimple *bind = NULL;
14360 if (gimplify_ctxp->temps)
14361 {
14362 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14363 *seq = NULL;
14364 gimplify_seq_add_stmt (seq, bind);
14365 }
14366 pop_gimplify_context (bind);
14367 }
14368 }
14369 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14370 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14371 {
14372 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14373 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14374 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14375 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14376 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14377 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14378 gcc_assert (COMPARISON_CLASS_P (t));
14379 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14380 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14381 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14382 }
14383 }
14384
14385 BITMAP_FREE (has_decl_expr);
14386 delete allocate_uids;
14387
14388 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14389 || (loop_p && orig_for_stmt == for_stmt))
14390 {
14391 push_gimplify_context ();
14392 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14393 {
14394 OMP_FOR_BODY (orig_for_stmt)
14395 = build3 (BIND_EXPR, void_type_node, NULL,
14396 OMP_FOR_BODY (orig_for_stmt), NULL);
14397 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14398 }
14399 }
14400
14401 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14402 &for_body);
14403
14404 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14405 || (loop_p && orig_for_stmt == for_stmt))
14406 {
14407 if (gimple_code (g) == GIMPLE_BIND)
14408 pop_gimplify_context (g);
14409 else
14410 pop_gimplify_context (NULL);
14411 }
14412
14413 if (orig_for_stmt != for_stmt)
14414 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14415 {
14416 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14417 decl = TREE_OPERAND (t, 0);
14418 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14419 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14420 gimplify_omp_ctxp = ctx->outer_context;
14421 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14422 gimplify_omp_ctxp = ctx;
14423 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
14424 TREE_OPERAND (t, 0) = var;
14425 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14426 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14427 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14428 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14429 for (int j = i + 1;
14430 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14431 {
14432 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14433 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14434 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14435 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14436 {
14437 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14438 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14439 }
14440 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14441 gcc_assert (COMPARISON_CLASS_P (t));
14442 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14443 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14444 {
14445 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14446 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14447 }
14448 }
14449 }
14450
14451 gimplify_adjust_omp_clauses (pre_p, for_body,
14452 &OMP_FOR_CLAUSES (orig_for_stmt),
14453 TREE_CODE (orig_for_stmt));
14454
14455 int kind;
14456 switch (TREE_CODE (orig_for_stmt))
14457 {
14458 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14459 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14460 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14461 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14462 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14463 default:
14464 gcc_unreachable ();
14465 }
14466 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14467 {
14468 gimplify_seq_add_seq (pre_p, for_pre_body);
14469 for_pre_body = NULL;
14470 }
14471 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14472 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14473 for_pre_body);
14474 if (orig_for_stmt != for_stmt)
14475 gimple_omp_for_set_combined_p (gfor, true);
14476 if (gimplify_omp_ctxp
14477 && (gimplify_omp_ctxp->combined_loop
14478 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14479 && gimplify_omp_ctxp->outer_context
14480 && gimplify_omp_ctxp->outer_context->combined_loop)))
14481 {
14482 gimple_omp_for_set_combined_into_p (gfor, true);
14483 if (gimplify_omp_ctxp->combined_loop)
14484 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14485 else
14486 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14487 }
14488
14489 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14490 {
14491 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14492 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
14493 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
14494 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14495 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
14496 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
14497 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14498 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
14499 }
14500
14501 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14502 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14503 The outer taskloop stands for computing the number of iterations,
14504 counts for collapsed loops and holding taskloop specific clauses.
14505 The task construct stands for the effect of data sharing on the
14506 explicit task it creates and the inner taskloop stands for expansion
14507 of the static loop inside of the explicit task construct. */
14508 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14509 {
14510 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
14511 tree task_clauses = NULL_TREE;
14512 tree c = *gfor_clauses_ptr;
14513 tree *gtask_clauses_ptr = &task_clauses;
14514 tree outer_for_clauses = NULL_TREE;
14515 tree *gforo_clauses_ptr = &outer_for_clauses;
14516 bitmap lastprivate_uids = NULL;
14517 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
14518 {
14519 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
14520 if (c)
14521 {
14522 lastprivate_uids = BITMAP_ALLOC (NULL);
14523 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14524 OMP_CLAUSE_LASTPRIVATE))
14525 bitmap_set_bit (lastprivate_uids,
14526 DECL_UID (OMP_CLAUSE_DECL (c)));
14527 }
14528 c = *gfor_clauses_ptr;
14529 }
14530 for (; c; c = OMP_CLAUSE_CHAIN (c))
14531 switch (OMP_CLAUSE_CODE (c))
14532 {
14533 /* These clauses are allowed on task, move them there. */
14534 case OMP_CLAUSE_SHARED:
14535 case OMP_CLAUSE_FIRSTPRIVATE:
14536 case OMP_CLAUSE_DEFAULT:
14537 case OMP_CLAUSE_IF:
14538 case OMP_CLAUSE_UNTIED:
14539 case OMP_CLAUSE_FINAL:
14540 case OMP_CLAUSE_MERGEABLE:
14541 case OMP_CLAUSE_PRIORITY:
14542 case OMP_CLAUSE_REDUCTION:
14543 case OMP_CLAUSE_IN_REDUCTION:
14544 *gtask_clauses_ptr = c;
14545 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14546 break;
14547 case OMP_CLAUSE_PRIVATE:
14548 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14549 {
14550 /* We want private on outer for and firstprivate
14551 on task. */
14552 *gtask_clauses_ptr
14553 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14554 OMP_CLAUSE_FIRSTPRIVATE);
14555 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14556 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14557 openacc);
14558 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14559 *gforo_clauses_ptr = c;
14560 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14561 }
14562 else
14563 {
14564 *gtask_clauses_ptr = c;
14565 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14566 }
14567 break;
14568 /* These clauses go into outer taskloop clauses. */
14569 case OMP_CLAUSE_GRAINSIZE:
14570 case OMP_CLAUSE_NUM_TASKS:
14571 case OMP_CLAUSE_NOGROUP:
14572 *gforo_clauses_ptr = c;
14573 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14574 break;
14575 /* Collapse clause we duplicate on both taskloops. */
14576 case OMP_CLAUSE_COLLAPSE:
14577 *gfor_clauses_ptr = c;
14578 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14579 *gforo_clauses_ptr = copy_node (c);
14580 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14581 break;
14582 /* For lastprivate, keep the clause on inner taskloop, and add
14583 a shared clause on task. If the same decl is also firstprivate,
14584 add also firstprivate clause on the inner taskloop. */
14585 case OMP_CLAUSE_LASTPRIVATE:
14586 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14587 {
14588 /* For taskloop C++ lastprivate IVs, we want:
14589 1) private on outer taskloop
14590 2) firstprivate and shared on task
14591 3) lastprivate on inner taskloop */
14592 *gtask_clauses_ptr
14593 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14594 OMP_CLAUSE_FIRSTPRIVATE);
14595 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14596 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14597 openacc);
14598 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14599 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14600 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14601 OMP_CLAUSE_PRIVATE);
14602 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14603 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14604 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14605 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14606 }
14607 *gfor_clauses_ptr = c;
14608 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14609 *gtask_clauses_ptr
14610 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14611 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14612 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14613 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14614 gtask_clauses_ptr
14615 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14616 break;
14617 /* Allocate clause we duplicate on task and inner taskloop
14618 if the decl is lastprivate, otherwise just put on task. */
14619 case OMP_CLAUSE_ALLOCATE:
14620 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14621 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14622 {
14623 /* Additionally, put firstprivate clause on task
14624 for the allocator if it is not constant. */
14625 *gtask_clauses_ptr
14626 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14627 OMP_CLAUSE_FIRSTPRIVATE);
14628 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14629 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14630 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14631 }
14632 if (lastprivate_uids
14633 && bitmap_bit_p (lastprivate_uids,
14634 DECL_UID (OMP_CLAUSE_DECL (c))))
14635 {
14636 *gfor_clauses_ptr = c;
14637 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14638 *gtask_clauses_ptr = copy_node (c);
14639 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14640 }
14641 else
14642 {
14643 *gtask_clauses_ptr = c;
14644 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14645 }
14646 break;
14647 default:
14648 gcc_unreachable ();
14649 }
14650 *gfor_clauses_ptr = NULL_TREE;
14651 *gtask_clauses_ptr = NULL_TREE;
14652 *gforo_clauses_ptr = NULL_TREE;
14653 BITMAP_FREE (lastprivate_uids);
14654 gimple_set_location (gfor, input_location);
14655 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
14656 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
14657 NULL_TREE, NULL_TREE, NULL_TREE);
14658 gimple_set_location (g, input_location);
14659 gimple_omp_task_set_taskloop_p (g, true);
14660 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
14661 gomp_for *gforo
14662 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
14663 gimple_omp_for_collapse (gfor),
14664 gimple_omp_for_pre_body (gfor));
14665 gimple_omp_for_set_pre_body (gfor, NULL);
14666 gimple_omp_for_set_combined_p (gforo, true);
14667 gimple_omp_for_set_combined_into_p (gfor, true);
14668 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
14669 {
14670 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
14671 tree v = create_tmp_var (type);
14672 gimple_omp_for_set_index (gforo, i, v);
14673 t = unshare_expr (gimple_omp_for_initial (gfor, i));
14674 gimple_omp_for_set_initial (gforo, i, t);
14675 gimple_omp_for_set_cond (gforo, i,
14676 gimple_omp_for_cond (gfor, i));
14677 t = unshare_expr (gimple_omp_for_final (gfor, i));
14678 gimple_omp_for_set_final (gforo, i, t);
14679 t = unshare_expr (gimple_omp_for_incr (gfor, i));
14680 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
14681 TREE_OPERAND (t, 0) = v;
14682 gimple_omp_for_set_incr (gforo, i, t);
14683 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
14684 OMP_CLAUSE_DECL (t) = v;
14685 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
14686 gimple_omp_for_set_clauses (gforo, t);
14687 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14688 {
14689 tree *p1 = NULL, *p2 = NULL;
14690 t = gimple_omp_for_initial (gforo, i);
14691 if (TREE_CODE (t) == TREE_VEC)
14692 p1 = &TREE_VEC_ELT (t, 0);
14693 t = gimple_omp_for_final (gforo, i);
14694 if (TREE_CODE (t) == TREE_VEC)
14695 {
14696 if (p1)
14697 p2 = &TREE_VEC_ELT (t, 0);
14698 else
14699 p1 = &TREE_VEC_ELT (t, 0);
14700 }
14701 if (p1)
14702 {
14703 int j;
14704 for (j = 0; j < i; j++)
14705 if (*p1 == gimple_omp_for_index (gfor, j))
14706 {
14707 *p1 = gimple_omp_for_index (gforo, j);
14708 if (p2)
14709 *p2 = *p1;
14710 break;
14711 }
14712 gcc_assert (j < i);
14713 }
14714 }
14715 }
14716 gimplify_seq_add_stmt (pre_p, gforo);
14717 }
14718 else
14719 gimplify_seq_add_stmt (pre_p, gfor);
14720
14721 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
14722 {
14723 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14724 unsigned lastprivate_conditional = 0;
14725 while (ctx
14726 && (ctx->region_type == ORT_TARGET_DATA
14727 || ctx->region_type == ORT_TASKGROUP))
14728 ctx = ctx->outer_context;
14729 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
14730 for (tree c = gimple_omp_for_clauses (gfor);
14731 c; c = OMP_CLAUSE_CHAIN (c))
14732 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14733 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14734 ++lastprivate_conditional;
14735 if (lastprivate_conditional)
14736 {
14737 struct omp_for_data fd;
14738 omp_extract_for_data (gfor, &fd, NULL);
14739 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
14740 lastprivate_conditional);
14741 tree var = create_tmp_var_raw (type);
14742 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
14743 OMP_CLAUSE_DECL (c) = var;
14744 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14745 gimple_omp_for_set_clauses (gfor, c);
14746 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
14747 }
14748 }
14749 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
14750 {
14751 unsigned lastprivate_conditional = 0;
14752 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
14753 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14754 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
14755 ++lastprivate_conditional;
14756 if (lastprivate_conditional)
14757 {
14758 struct omp_for_data fd;
14759 omp_extract_for_data (gfor, &fd, NULL);
14760 tree type = unsigned_type_for (fd.iter_type);
14761 while (lastprivate_conditional--)
14762 {
14763 tree c = build_omp_clause (UNKNOWN_LOCATION,
14764 OMP_CLAUSE__CONDTEMP_);
14765 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
14766 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
14767 gimple_omp_for_set_clauses (gfor, c);
14768 }
14769 }
14770 }
14771
14772 if (ret != GS_ALL_DONE)
14773 return GS_ERROR;
14774 *expr_p = NULL_TREE;
14775 return GS_ALL_DONE;
14776 }
14777
14778 /* Helper for gimplify_omp_loop, called through walk_tree. */
14779
14780 static tree
14781 note_no_context_vars (tree *tp, int *, void *data)
14782 {
14783 if (VAR_P (*tp)
14784 && DECL_CONTEXT (*tp) == NULL_TREE
14785 && !is_global_var (*tp))
14786 {
14787 vec<tree> *d = (vec<tree> *) data;
14788 d->safe_push (*tp);
14789 DECL_CONTEXT (*tp) = current_function_decl;
14790 }
14791 return NULL_TREE;
14792 }
14793
14794 /* Gimplify the gross structure of an OMP_LOOP statement. */
14795
14796 static enum gimplify_status
14797 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
14798 {
14799 tree for_stmt = *expr_p;
14800 tree clauses = OMP_FOR_CLAUSES (for_stmt);
14801 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
14802 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
14803 int i;
14804
14805 /* If order is not present, the behavior is as if order(concurrent)
14806 appeared. */
14807 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
14808 if (order == NULL_TREE)
14809 {
14810 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
14811 OMP_CLAUSE_CHAIN (order) = clauses;
14812 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
14813 }
14814
14815 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
14816 if (bind == NULL_TREE)
14817 {
14818 if (!flag_openmp) /* flag_openmp_simd */
14819 ;
14820 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
14821 kind = OMP_CLAUSE_BIND_TEAMS;
14822 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
14823 kind = OMP_CLAUSE_BIND_PARALLEL;
14824 else
14825 {
14826 for (; octx; octx = octx->outer_context)
14827 {
14828 if ((octx->region_type & ORT_ACC) != 0
14829 || octx->region_type == ORT_NONE
14830 || octx->region_type == ORT_IMPLICIT_TARGET)
14831 continue;
14832 break;
14833 }
14834 if (octx == NULL && !in_omp_construct)
14835 error_at (EXPR_LOCATION (for_stmt),
14836 "%<bind%> clause not specified on a %<loop%> "
14837 "construct not nested inside another OpenMP construct");
14838 }
14839 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
14840 OMP_CLAUSE_CHAIN (bind) = clauses;
14841 OMP_CLAUSE_BIND_KIND (bind) = kind;
14842 OMP_FOR_CLAUSES (for_stmt) = bind;
14843 }
14844 else
14845 switch (OMP_CLAUSE_BIND_KIND (bind))
14846 {
14847 case OMP_CLAUSE_BIND_THREAD:
14848 break;
14849 case OMP_CLAUSE_BIND_PARALLEL:
14850 if (!flag_openmp) /* flag_openmp_simd */
14851 {
14852 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14853 break;
14854 }
14855 for (; octx; octx = octx->outer_context)
14856 if (octx->region_type == ORT_SIMD
14857 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
14858 {
14859 error_at (EXPR_LOCATION (for_stmt),
14860 "%<bind(parallel)%> on a %<loop%> construct nested "
14861 "inside %<simd%> construct");
14862 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14863 break;
14864 }
14865 kind = OMP_CLAUSE_BIND_PARALLEL;
14866 break;
14867 case OMP_CLAUSE_BIND_TEAMS:
14868 if (!flag_openmp) /* flag_openmp_simd */
14869 {
14870 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14871 break;
14872 }
14873 if ((octx
14874 && octx->region_type != ORT_IMPLICIT_TARGET
14875 && octx->region_type != ORT_NONE
14876 && (octx->region_type & ORT_TEAMS) == 0)
14877 || in_omp_construct)
14878 {
14879 error_at (EXPR_LOCATION (for_stmt),
14880 "%<bind(teams)%> on a %<loop%> region not strictly "
14881 "nested inside of a %<teams%> region");
14882 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
14883 break;
14884 }
14885 kind = OMP_CLAUSE_BIND_TEAMS;
14886 break;
14887 default:
14888 gcc_unreachable ();
14889 }
14890
14891 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14892 switch (OMP_CLAUSE_CODE (*pc))
14893 {
14894 case OMP_CLAUSE_REDUCTION:
14895 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
14896 {
14897 error_at (OMP_CLAUSE_LOCATION (*pc),
14898 "%<inscan%> %<reduction%> clause on "
14899 "%qs construct", "loop");
14900 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
14901 }
14902 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
14903 {
14904 error_at (OMP_CLAUSE_LOCATION (*pc),
14905 "invalid %<task%> reduction modifier on construct "
14906 "other than %<parallel%>, %qs or %<sections%>",
14907 lang_GNU_Fortran () ? "do" : "for");
14908 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
14909 }
14910 pc = &OMP_CLAUSE_CHAIN (*pc);
14911 break;
14912 case OMP_CLAUSE_LASTPRIVATE:
14913 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14914 {
14915 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14916 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14917 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
14918 break;
14919 if (OMP_FOR_ORIG_DECLS (for_stmt)
14920 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
14921 i)) == TREE_LIST
14922 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
14923 i)))
14924 {
14925 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14926 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
14927 break;
14928 }
14929 }
14930 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
14931 {
14932 error_at (OMP_CLAUSE_LOCATION (*pc),
14933 "%<lastprivate%> clause on a %<loop%> construct refers "
14934 "to a variable %qD which is not the loop iterator",
14935 OMP_CLAUSE_DECL (*pc));
14936 *pc = OMP_CLAUSE_CHAIN (*pc);
14937 break;
14938 }
14939 pc = &OMP_CLAUSE_CHAIN (*pc);
14940 break;
14941 default:
14942 pc = &OMP_CLAUSE_CHAIN (*pc);
14943 break;
14944 }
14945
14946 TREE_SET_CODE (for_stmt, OMP_SIMD);
14947
14948 int last;
14949 switch (kind)
14950 {
14951 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
14952 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
14953 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
14954 }
14955 for (int pass = 1; pass <= last; pass++)
14956 {
14957 if (pass == 2)
14958 {
14959 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
14960 make_node (BLOCK));
14961 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
14962 *expr_p = make_node (OMP_PARALLEL);
14963 TREE_TYPE (*expr_p) = void_type_node;
14964 OMP_PARALLEL_BODY (*expr_p) = bind;
14965 OMP_PARALLEL_COMBINED (*expr_p) = 1;
14966 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
14967 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
14968 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14969 if (OMP_FOR_ORIG_DECLS (for_stmt)
14970 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
14971 == TREE_LIST))
14972 {
14973 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14974 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
14975 {
14976 *pc = build_omp_clause (UNKNOWN_LOCATION,
14977 OMP_CLAUSE_FIRSTPRIVATE);
14978 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
14979 pc = &OMP_CLAUSE_CHAIN (*pc);
14980 }
14981 }
14982 }
14983 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
14984 tree *pc = &OMP_FOR_CLAUSES (t);
14985 TREE_TYPE (t) = void_type_node;
14986 OMP_FOR_BODY (t) = *expr_p;
14987 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
14988 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
14989 switch (OMP_CLAUSE_CODE (c))
14990 {
14991 case OMP_CLAUSE_BIND:
14992 case OMP_CLAUSE_ORDER:
14993 case OMP_CLAUSE_COLLAPSE:
14994 *pc = copy_node (c);
14995 pc = &OMP_CLAUSE_CHAIN (*pc);
14996 break;
14997 case OMP_CLAUSE_PRIVATE:
14998 case OMP_CLAUSE_FIRSTPRIVATE:
14999 /* Only needed on innermost. */
15000 break;
15001 case OMP_CLAUSE_LASTPRIVATE:
15002 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
15003 {
15004 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15005 OMP_CLAUSE_FIRSTPRIVATE);
15006 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
15007 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15008 pc = &OMP_CLAUSE_CHAIN (*pc);
15009 }
15010 *pc = copy_node (c);
15011 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
15012 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15013 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
15014 {
15015 if (pass != last)
15016 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
15017 else
15018 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15019 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
15020 }
15021 pc = &OMP_CLAUSE_CHAIN (*pc);
15022 break;
15023 case OMP_CLAUSE_REDUCTION:
15024 *pc = copy_node (c);
15025 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15026 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15027 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15028 {
15029 auto_vec<tree> no_context_vars;
15030 int walk_subtrees = 0;
15031 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15032 &walk_subtrees, &no_context_vars);
15033 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15034 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
15035 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15036 note_no_context_vars,
15037 &no_context_vars);
15038 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15039 note_no_context_vars,
15040 &no_context_vars);
15041
15042 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15043 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15044 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15045 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15046 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15047
15048 hash_map<tree, tree> decl_map;
15049 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15050 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15051 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15052 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15053 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15054 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15055
15056 copy_body_data id;
15057 memset (&id, 0, sizeof (id));
15058 id.src_fn = current_function_decl;
15059 id.dst_fn = current_function_decl;
15060 id.src_cfun = cfun;
15061 id.decl_map = &decl_map;
15062 id.copy_decl = copy_decl_no_change;
15063 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15064 id.transform_new_cfg = true;
15065 id.transform_return_to_modify = false;
15066 id.eh_lp_nr = 0;
15067 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15068 &id, NULL);
15069 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15070 &id, NULL);
15071
15072 for (tree d : no_context_vars)
15073 {
15074 DECL_CONTEXT (d) = NULL_TREE;
15075 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15076 }
15077 }
15078 else
15079 {
15080 OMP_CLAUSE_REDUCTION_INIT (*pc)
15081 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15082 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15083 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15084 }
15085 pc = &OMP_CLAUSE_CHAIN (*pc);
15086 break;
15087 default:
15088 gcc_unreachable ();
15089 }
15090 *pc = NULL_TREE;
15091 *expr_p = t;
15092 }
15093 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15094 }
15095
15096
15097 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15098 of OMP_TARGET's body. */
15099
15100 static tree
15101 find_omp_teams (tree *tp, int *walk_subtrees, void *)
15102 {
15103 *walk_subtrees = 0;
15104 switch (TREE_CODE (*tp))
15105 {
15106 case OMP_TEAMS:
15107 return *tp;
15108 case BIND_EXPR:
15109 case STATEMENT_LIST:
15110 *walk_subtrees = 1;
15111 break;
15112 default:
15113 break;
15114 }
15115 return NULL_TREE;
15116 }
15117
15118 /* Helper function of optimize_target_teams, determine if the expression
15119 can be computed safely before the target construct on the host. */
15120
15121 static tree
15122 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15123 {
15124 splay_tree_node n;
15125
15126 if (TYPE_P (*tp))
15127 {
15128 *walk_subtrees = 0;
15129 return NULL_TREE;
15130 }
15131 switch (TREE_CODE (*tp))
15132 {
15133 case VAR_DECL:
15134 case PARM_DECL:
15135 case RESULT_DECL:
15136 *walk_subtrees = 0;
15137 if (error_operand_p (*tp)
15138 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15139 || DECL_HAS_VALUE_EXPR_P (*tp)
15140 || DECL_THREAD_LOCAL_P (*tp)
15141 || TREE_SIDE_EFFECTS (*tp)
15142 || TREE_THIS_VOLATILE (*tp))
15143 return *tp;
15144 if (is_global_var (*tp)
15145 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
15146 || lookup_attribute ("omp declare target link",
15147 DECL_ATTRIBUTES (*tp))))
15148 return *tp;
15149 if (VAR_P (*tp)
15150 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15151 && !is_global_var (*tp)
15152 && decl_function_context (*tp) == current_function_decl)
15153 return *tp;
15154 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15155 (splay_tree_key) *tp);
15156 if (n == NULL)
15157 {
15158 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15159 return NULL_TREE;
15160 return *tp;
15161 }
15162 else if (n->value & GOVD_LOCAL)
15163 return *tp;
15164 else if (n->value & GOVD_FIRSTPRIVATE)
15165 return NULL_TREE;
15166 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15167 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15168 return NULL_TREE;
15169 return *tp;
15170 case INTEGER_CST:
15171 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15172 return *tp;
15173 return NULL_TREE;
15174 case TARGET_EXPR:
15175 if (TARGET_EXPR_INITIAL (*tp)
15176 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15177 return *tp;
15178 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
15179 walk_subtrees, NULL);
15180 /* Allow some reasonable subset of integral arithmetics. */
15181 case PLUS_EXPR:
15182 case MINUS_EXPR:
15183 case MULT_EXPR:
15184 case TRUNC_DIV_EXPR:
15185 case CEIL_DIV_EXPR:
15186 case FLOOR_DIV_EXPR:
15187 case ROUND_DIV_EXPR:
15188 case TRUNC_MOD_EXPR:
15189 case CEIL_MOD_EXPR:
15190 case FLOOR_MOD_EXPR:
15191 case ROUND_MOD_EXPR:
15192 case RDIV_EXPR:
15193 case EXACT_DIV_EXPR:
15194 case MIN_EXPR:
15195 case MAX_EXPR:
15196 case LSHIFT_EXPR:
15197 case RSHIFT_EXPR:
15198 case BIT_IOR_EXPR:
15199 case BIT_XOR_EXPR:
15200 case BIT_AND_EXPR:
15201 case NEGATE_EXPR:
15202 case ABS_EXPR:
15203 case BIT_NOT_EXPR:
15204 case NON_LVALUE_EXPR:
15205 CASE_CONVERT:
15206 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15207 return *tp;
15208 return NULL_TREE;
15209 /* And disallow anything else, except for comparisons. */
15210 default:
15211 if (COMPARISON_CLASS_P (*tp))
15212 return NULL_TREE;
15213 return *tp;
15214 }
15215 }
15216
15217 /* Try to determine if the num_teams and/or thread_limit expressions
15218 can have their values determined already before entering the
15219 target construct.
15220 INTEGER_CSTs trivially are,
15221 integral decls that are firstprivate (explicitly or implicitly)
15222 or explicitly map(always, to:) or map(always, tofrom:) on the target
15223 region too, and expressions involving simple arithmetics on those
15224 too, function calls are not ok, dereferencing something neither etc.
15225 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15226 EXPR based on what we find:
15227 0 stands for clause not specified at all, use implementation default
15228 -1 stands for value that can't be determined easily before entering
15229 the target construct.
15230 If teams construct is not present at all, use 1 for num_teams
15231 and 0 for thread_limit (only one team is involved, and the thread
15232 limit is implementation defined. */
15233
15234 static void
15235 optimize_target_teams (tree target, gimple_seq *pre_p)
15236 {
15237 tree body = OMP_BODY (target);
15238 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15239 tree num_teams_lower = NULL_TREE;
15240 tree num_teams_upper = integer_zero_node;
15241 tree thread_limit = integer_zero_node;
15242 location_t num_teams_loc = EXPR_LOCATION (target);
15243 location_t thread_limit_loc = EXPR_LOCATION (target);
15244 tree c, *p, expr;
15245 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15246
15247 if (teams == NULL_TREE)
15248 num_teams_upper = integer_one_node;
15249 else
15250 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15251 {
15252 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15253 {
15254 p = &num_teams_upper;
15255 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15256 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15257 {
15258 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15259 if (TREE_CODE (expr) == INTEGER_CST)
15260 num_teams_lower = expr;
15261 else if (walk_tree (&expr, computable_teams_clause,
15262 NULL, NULL))
15263 num_teams_lower = integer_minus_one_node;
15264 else
15265 {
15266 num_teams_lower = expr;
15267 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15268 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15269 is_gimple_val, fb_rvalue, false)
15270 == GS_ERROR)
15271 {
15272 gimplify_omp_ctxp = target_ctx;
15273 num_teams_lower = integer_minus_one_node;
15274 }
15275 else
15276 {
15277 gimplify_omp_ctxp = target_ctx;
15278 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15279 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15280 = num_teams_lower;
15281 }
15282 }
15283 }
15284 }
15285 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15286 {
15287 p = &thread_limit;
15288 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15289 }
15290 else
15291 continue;
15292 expr = OMP_CLAUSE_OPERAND (c, 0);
15293 if (TREE_CODE (expr) == INTEGER_CST)
15294 {
15295 *p = expr;
15296 continue;
15297 }
15298 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15299 {
15300 *p = integer_minus_one_node;
15301 continue;
15302 }
15303 *p = expr;
15304 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15305 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15306 == GS_ERROR)
15307 {
15308 gimplify_omp_ctxp = target_ctx;
15309 *p = integer_minus_one_node;
15310 continue;
15311 }
15312 gimplify_omp_ctxp = target_ctx;
15313 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15314 OMP_CLAUSE_OPERAND (c, 0) = *p;
15315 }
15316 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
15317 {
15318 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15319 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15320 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15321 OMP_TARGET_CLAUSES (target) = c;
15322 }
15323 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15324 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15325 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15326 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15327 OMP_TARGET_CLAUSES (target) = c;
15328 }
15329
15330 /* Gimplify the gross structure of several OMP constructs. */
15331
15332 static void
15333 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15334 {
15335 tree expr = *expr_p;
15336 gimple *stmt;
15337 gimple_seq body = NULL;
15338 enum omp_region_type ort;
15339
15340 switch (TREE_CODE (expr))
15341 {
15342 case OMP_SECTIONS:
15343 case OMP_SINGLE:
15344 ort = ORT_WORKSHARE;
15345 break;
15346 case OMP_SCOPE:
15347 ort = ORT_TASKGROUP;
15348 break;
15349 case OMP_TARGET:
15350 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15351 break;
15352 case OACC_KERNELS:
15353 ort = ORT_ACC_KERNELS;
15354 break;
15355 case OACC_PARALLEL:
15356 ort = ORT_ACC_PARALLEL;
15357 break;
15358 case OACC_SERIAL:
15359 ort = ORT_ACC_SERIAL;
15360 break;
15361 case OACC_DATA:
15362 ort = ORT_ACC_DATA;
15363 break;
15364 case OMP_TARGET_DATA:
15365 ort = ORT_TARGET_DATA;
15366 break;
15367 case OMP_TEAMS:
15368 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15369 if (gimplify_omp_ctxp == NULL
15370 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15371 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15372 break;
15373 case OACC_HOST_DATA:
15374 ort = ORT_ACC_HOST_DATA;
15375 break;
15376 default:
15377 gcc_unreachable ();
15378 }
15379
15380 bool save_in_omp_construct = in_omp_construct;
15381 if ((ort & ORT_ACC) == 0)
15382 in_omp_construct = false;
15383 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
15384 TREE_CODE (expr));
15385 if (TREE_CODE (expr) == OMP_TARGET)
15386 optimize_target_teams (expr, pre_p);
15387 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15388 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15389 {
15390 push_gimplify_context ();
15391 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
15392 if (gimple_code (g) == GIMPLE_BIND)
15393 pop_gimplify_context (g);
15394 else
15395 pop_gimplify_context (NULL);
15396 if ((ort & ORT_TARGET_DATA) != 0)
15397 {
15398 enum built_in_function end_ix;
15399 switch (TREE_CODE (expr))
15400 {
15401 case OACC_DATA:
15402 case OACC_HOST_DATA:
15403 end_ix = BUILT_IN_GOACC_DATA_END;
15404 break;
15405 case OMP_TARGET_DATA:
15406 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15407 break;
15408 default:
15409 gcc_unreachable ();
15410 }
15411 tree fn = builtin_decl_explicit (end_ix);
15412 g = gimple_build_call (fn, 0);
15413 gimple_seq cleanup = NULL;
15414 gimple_seq_add_stmt (&cleanup, g);
15415 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15416 body = NULL;
15417 gimple_seq_add_stmt (&body, g);
15418 }
15419 }
15420 else
15421 gimplify_and_add (OMP_BODY (expr), &body);
15422 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
15423 TREE_CODE (expr));
15424 in_omp_construct = save_in_omp_construct;
15425
15426 switch (TREE_CODE (expr))
15427 {
15428 case OACC_DATA:
15429 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15430 OMP_CLAUSES (expr));
15431 break;
15432 case OACC_HOST_DATA:
15433 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
15434 {
15435 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15437 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15438 }
15439
15440 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15441 OMP_CLAUSES (expr));
15442 break;
15443 case OACC_KERNELS:
15444 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15445 OMP_CLAUSES (expr));
15446 break;
15447 case OACC_PARALLEL:
15448 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15449 OMP_CLAUSES (expr));
15450 break;
15451 case OACC_SERIAL:
15452 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15453 OMP_CLAUSES (expr));
15454 break;
15455 case OMP_SECTIONS:
15456 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15457 break;
15458 case OMP_SINGLE:
15459 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15460 break;
15461 case OMP_SCOPE:
15462 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15463 break;
15464 case OMP_TARGET:
15465 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15466 OMP_CLAUSES (expr));
15467 break;
15468 case OMP_TARGET_DATA:
15469 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15470 to be evaluated before the use_device_{ptr,addr} clauses if they
15471 refer to the same variables. */
15472 {
15473 tree use_device_clauses;
15474 tree *pc, *uc = &use_device_clauses;
15475 for (pc = &OMP_CLAUSES (expr); *pc; )
15476 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15477 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15478 {
15479 *uc = *pc;
15480 *pc = OMP_CLAUSE_CHAIN (*pc);
15481 uc = &OMP_CLAUSE_CHAIN (*uc);
15482 }
15483 else
15484 pc = &OMP_CLAUSE_CHAIN (*pc);
15485 *uc = NULL_TREE;
15486 *pc = use_device_clauses;
15487 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15488 OMP_CLAUSES (expr));
15489 }
15490 break;
15491 case OMP_TEAMS:
15492 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15493 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15494 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
15495 break;
15496 default:
15497 gcc_unreachable ();
15498 }
15499
15500 gimplify_seq_add_stmt (pre_p, stmt);
15501 *expr_p = NULL_TREE;
15502 }
15503
15504 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15505 target update constructs. */
15506
15507 static void
15508 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15509 {
15510 tree expr = *expr_p;
15511 int kind;
15512 gomp_target *stmt;
15513 enum omp_region_type ort = ORT_WORKSHARE;
15514
15515 switch (TREE_CODE (expr))
15516 {
15517 case OACC_ENTER_DATA:
15518 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15519 ort = ORT_ACC;
15520 break;
15521 case OACC_EXIT_DATA:
15522 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15523 ort = ORT_ACC;
15524 break;
15525 case OACC_UPDATE:
15526 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15527 ort = ORT_ACC;
15528 break;
15529 case OMP_TARGET_UPDATE:
15530 kind = GF_OMP_TARGET_KIND_UPDATE;
15531 break;
15532 case OMP_TARGET_ENTER_DATA:
15533 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15534 break;
15535 case OMP_TARGET_EXIT_DATA:
15536 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15537 break;
15538 default:
15539 gcc_unreachable ();
15540 }
15541 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
15542 ort, TREE_CODE (expr));
15543 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
15544 TREE_CODE (expr));
15545 if (TREE_CODE (expr) == OACC_UPDATE
15546 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15547 OMP_CLAUSE_IF_PRESENT))
15548 {
15549 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15550 clause. */
15551 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15553 switch (OMP_CLAUSE_MAP_KIND (c))
15554 {
15555 case GOMP_MAP_FORCE_TO:
15556 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15557 break;
15558 case GOMP_MAP_FORCE_FROM:
15559 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15560 break;
15561 default:
15562 break;
15563 }
15564 }
15565 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15566 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15567 OMP_CLAUSE_FINALIZE))
15568 {
15569 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15570 semantics. */
15571 bool have_clause = false;
15572 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15573 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15574 switch (OMP_CLAUSE_MAP_KIND (c))
15575 {
15576 case GOMP_MAP_FROM:
15577 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15578 have_clause = true;
15579 break;
15580 case GOMP_MAP_RELEASE:
15581 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15582 have_clause = true;
15583 break;
15584 case GOMP_MAP_TO_PSET:
15585 /* Fortran arrays with descriptors must map that descriptor when
15586 doing standalone "attach" operations (in OpenACC). In that
15587 case GOMP_MAP_TO_PSET appears by itself with no preceding
15588 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15589 break;
15590 case GOMP_MAP_POINTER:
15591 /* TODO PR92929: we may see these here, but they'll always follow
15592 one of the clauses above, and will be handled by libgomp as
15593 one group, so no handling required here. */
15594 gcc_assert (have_clause);
15595 break;
15596 case GOMP_MAP_DETACH:
15597 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15598 have_clause = false;
15599 break;
15600 case GOMP_MAP_STRUCT:
15601 have_clause = false;
15602 break;
15603 default:
15604 gcc_unreachable ();
15605 }
15606 }
15607 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15608
15609 gimplify_seq_add_stmt (pre_p, stmt);
15610 *expr_p = NULL_TREE;
15611 }
15612
15613 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15614 stabilized the lhs of the atomic operation as *ADDR. Return true if
15615 EXPR is this stabilized form. */
15616
15617 static bool
15618 goa_lhs_expr_p (tree expr, tree addr)
15619 {
15620 /* Also include casts to other type variants. The C front end is fond
15621 of adding these for e.g. volatile variables. This is like
15622 STRIP_TYPE_NOPS but includes the main variant lookup. */
15623 STRIP_USELESS_TYPE_CONVERSION (expr);
15624
15625 if (TREE_CODE (expr) == INDIRECT_REF)
15626 {
15627 expr = TREE_OPERAND (expr, 0);
15628 while (expr != addr
15629 && (CONVERT_EXPR_P (expr)
15630 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15631 && TREE_CODE (expr) == TREE_CODE (addr)
15632 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15633 {
15634 expr = TREE_OPERAND (expr, 0);
15635 addr = TREE_OPERAND (addr, 0);
15636 }
15637 if (expr == addr)
15638 return true;
15639 return (TREE_CODE (addr) == ADDR_EXPR
15640 && TREE_CODE (expr) == ADDR_EXPR
15641 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
15642 }
15643 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
15644 return true;
15645 return false;
15646 }
15647
15648 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15649 expression does not involve the lhs, evaluate it into a temporary.
15650 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15651 or -1 if an error was encountered. */
15652
15653 static int
15654 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
15655 tree lhs_var, tree &target_expr, bool rhs, int depth)
15656 {
15657 tree expr = *expr_p;
15658 int saw_lhs = 0;
15659
15660 if (goa_lhs_expr_p (expr, lhs_addr))
15661 {
15662 if (pre_p)
15663 *expr_p = lhs_var;
15664 return 1;
15665 }
15666 if (is_gimple_val (expr))
15667 return 0;
15668
15669 /* Maximum depth of lhs in expression is for the
15670 __builtin_clear_padding (...), __builtin_clear_padding (...),
15671 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15672 if (++depth > 7)
15673 goto finish;
15674
15675 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
15676 {
15677 case tcc_binary:
15678 case tcc_comparison:
15679 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
15680 lhs_var, target_expr, true, depth);
15681 /* FALLTHRU */
15682 case tcc_unary:
15683 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
15684 lhs_var, target_expr, true, depth);
15685 break;
15686 case tcc_expression:
15687 switch (TREE_CODE (expr))
15688 {
15689 case TRUTH_ANDIF_EXPR:
15690 case TRUTH_ORIF_EXPR:
15691 case TRUTH_AND_EXPR:
15692 case TRUTH_OR_EXPR:
15693 case TRUTH_XOR_EXPR:
15694 case BIT_INSERT_EXPR:
15695 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15696 lhs_addr, lhs_var, target_expr, true,
15697 depth);
15698 /* FALLTHRU */
15699 case TRUTH_NOT_EXPR:
15700 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15701 lhs_addr, lhs_var, target_expr, true,
15702 depth);
15703 break;
15704 case MODIFY_EXPR:
15705 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15706 target_expr, true, depth))
15707 break;
15708 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15709 lhs_addr, lhs_var, target_expr, true,
15710 depth);
15711 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15712 lhs_addr, lhs_var, target_expr, false,
15713 depth);
15714 break;
15715 /* FALLTHRU */
15716 case ADDR_EXPR:
15717 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
15718 target_expr, true, depth))
15719 break;
15720 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15721 lhs_addr, lhs_var, target_expr, false,
15722 depth);
15723 break;
15724 case COMPOUND_EXPR:
15725 /* Break out any preevaluations from cp_build_modify_expr. */
15726 for (; TREE_CODE (expr) == COMPOUND_EXPR;
15727 expr = TREE_OPERAND (expr, 1))
15728 {
15729 /* Special-case __builtin_clear_padding call before
15730 __builtin_memcmp. */
15731 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
15732 {
15733 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
15734 if (fndecl
15735 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15736 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
15737 && (!pre_p
15738 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
15739 lhs_addr, lhs_var,
15740 target_expr, true, depth)))
15741 {
15742 if (pre_p)
15743 *expr_p = expr;
15744 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
15745 pre_p, lhs_addr, lhs_var,
15746 target_expr, true, depth);
15747 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
15748 pre_p, lhs_addr, lhs_var,
15749 target_expr, rhs, depth);
15750 return saw_lhs;
15751 }
15752 }
15753
15754 if (pre_p)
15755 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
15756 }
15757 if (!pre_p)
15758 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
15759 target_expr, rhs, depth);
15760 *expr_p = expr;
15761 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
15762 target_expr, rhs, depth);
15763 case COND_EXPR:
15764 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
15765 lhs_var, target_expr, true, depth))
15766 break;
15767 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15768 lhs_addr, lhs_var, target_expr, true,
15769 depth);
15770 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
15771 lhs_addr, lhs_var, target_expr, true,
15772 depth);
15773 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
15774 lhs_addr, lhs_var, target_expr, true,
15775 depth);
15776 break;
15777 case TARGET_EXPR:
15778 if (TARGET_EXPR_INITIAL (expr))
15779 {
15780 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
15781 lhs_var, target_expr, true,
15782 depth))
15783 break;
15784 if (expr == target_expr)
15785 saw_lhs = 1;
15786 else
15787 {
15788 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
15789 pre_p, lhs_addr, lhs_var,
15790 target_expr, true, depth);
15791 if (saw_lhs && target_expr == NULL_TREE && pre_p)
15792 target_expr = expr;
15793 }
15794 }
15795 break;
15796 default:
15797 break;
15798 }
15799 break;
15800 case tcc_reference:
15801 if (TREE_CODE (expr) == BIT_FIELD_REF
15802 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
15803 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
15804 lhs_addr, lhs_var, target_expr, true,
15805 depth);
15806 break;
15807 case tcc_vl_exp:
15808 if (TREE_CODE (expr) == CALL_EXPR)
15809 {
15810 if (tree fndecl = get_callee_fndecl (expr))
15811 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
15812 || fndecl_built_in_p (fndecl, BUILT_IN_MEMCMP))
15813 {
15814 int nargs = call_expr_nargs (expr);
15815 for (int i = 0; i < nargs; i++)
15816 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
15817 pre_p, lhs_addr, lhs_var,
15818 target_expr, true, depth);
15819 }
15820 }
15821 break;
15822 default:
15823 break;
15824 }
15825
15826 finish:
15827 if (saw_lhs == 0 && pre_p)
15828 {
15829 enum gimplify_status gs;
15830 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
15831 {
15832 gimplify_stmt (&expr, pre_p);
15833 return saw_lhs;
15834 }
15835 else if (rhs)
15836 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
15837 else
15838 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
15839 if (gs != GS_ALL_DONE)
15840 saw_lhs = -1;
15841 }
15842
15843 return saw_lhs;
15844 }
15845
15846 /* Gimplify an OMP_ATOMIC statement. */
15847
15848 static enum gimplify_status
15849 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
15850 {
15851 tree addr = TREE_OPERAND (*expr_p, 0);
15852 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
15853 ? NULL : TREE_OPERAND (*expr_p, 1);
15854 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
15855 tree tmp_load;
15856 gomp_atomic_load *loadstmt;
15857 gomp_atomic_store *storestmt;
15858 tree target_expr = NULL_TREE;
15859
15860 tmp_load = create_tmp_reg (type);
15861 if (rhs
15862 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
15863 true, 0) < 0)
15864 return GS_ERROR;
15865
15866 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
15867 != GS_ALL_DONE)
15868 return GS_ERROR;
15869
15870 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
15871 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15872 gimplify_seq_add_stmt (pre_p, loadstmt);
15873 if (rhs)
15874 {
15875 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
15876 representatives. Use BIT_FIELD_REF on the lhs instead. */
15877 tree rhsarg = rhs;
15878 if (TREE_CODE (rhs) == COND_EXPR)
15879 rhsarg = TREE_OPERAND (rhs, 1);
15880 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
15881 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
15882 {
15883 tree bitpos = TREE_OPERAND (rhsarg, 2);
15884 tree op1 = TREE_OPERAND (rhsarg, 1);
15885 tree bitsize;
15886 tree tmp_store = tmp_load;
15887 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
15888 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
15889 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
15890 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
15891 else
15892 bitsize = TYPE_SIZE (TREE_TYPE (op1));
15893 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
15894 tree t = build2_loc (EXPR_LOCATION (rhsarg),
15895 MODIFY_EXPR, void_type_node,
15896 build3_loc (EXPR_LOCATION (rhsarg),
15897 BIT_FIELD_REF, TREE_TYPE (op1),
15898 tmp_store, bitsize, bitpos), op1);
15899 if (TREE_CODE (rhs) == COND_EXPR)
15900 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
15901 TREE_OPERAND (rhs, 0), t, void_node);
15902 gimplify_and_add (t, pre_p);
15903 rhs = tmp_store;
15904 }
15905 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
15906 if (TREE_CODE (rhs) == COND_EXPR)
15907 gimplify_ctxp->allow_rhs_cond_expr = true;
15908 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
15909 is_gimple_val, fb_rvalue);
15910 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
15911 if (gs != GS_ALL_DONE)
15912 return GS_ERROR;
15913 }
15914
15915 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
15916 rhs = tmp_load;
15917 storestmt
15918 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
15919 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
15920 {
15921 gimple_omp_atomic_set_weak (loadstmt);
15922 gimple_omp_atomic_set_weak (storestmt);
15923 }
15924 gimplify_seq_add_stmt (pre_p, storestmt);
15925 switch (TREE_CODE (*expr_p))
15926 {
15927 case OMP_ATOMIC_READ:
15928 case OMP_ATOMIC_CAPTURE_OLD:
15929 *expr_p = tmp_load;
15930 gimple_omp_atomic_set_need_value (loadstmt);
15931 break;
15932 case OMP_ATOMIC_CAPTURE_NEW:
15933 *expr_p = rhs;
15934 gimple_omp_atomic_set_need_value (storestmt);
15935 break;
15936 default:
15937 *expr_p = NULL;
15938 break;
15939 }
15940
15941 return GS_ALL_DONE;
15942 }
15943
15944 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
15945 body, and adding some EH bits. */
15946
15947 static enum gimplify_status
15948 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
15949 {
15950 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
15951 gimple *body_stmt;
15952 gtransaction *trans_stmt;
15953 gimple_seq body = NULL;
15954 int subcode = 0;
15955
15956 /* Wrap the transaction body in a BIND_EXPR so we have a context
15957 where to put decls for OMP. */
15958 if (TREE_CODE (tbody) != BIND_EXPR)
15959 {
15960 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
15961 TREE_SIDE_EFFECTS (bind) = 1;
15962 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
15963 TRANSACTION_EXPR_BODY (expr) = bind;
15964 }
15965
15966 push_gimplify_context ();
15967 temp = voidify_wrapper_expr (*expr_p, NULL);
15968
15969 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
15970 pop_gimplify_context (body_stmt);
15971
15972 trans_stmt = gimple_build_transaction (body);
15973 if (TRANSACTION_EXPR_OUTER (expr))
15974 subcode = GTMA_IS_OUTER;
15975 else if (TRANSACTION_EXPR_RELAXED (expr))
15976 subcode = GTMA_IS_RELAXED;
15977 gimple_transaction_set_subcode (trans_stmt, subcode);
15978
15979 gimplify_seq_add_stmt (pre_p, trans_stmt);
15980
15981 if (temp)
15982 {
15983 *expr_p = temp;
15984 return GS_OK;
15985 }
15986
15987 *expr_p = NULL_TREE;
15988 return GS_ALL_DONE;
15989 }
15990
15991 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
15992 is the OMP_BODY of the original EXPR (which has already been
15993 gimplified so it's not present in the EXPR).
15994
15995 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
15996
15997 static gimple *
15998 gimplify_omp_ordered (tree expr, gimple_seq body)
15999 {
16000 tree c, decls;
16001 int failures = 0;
16002 unsigned int i;
16003 tree source_c = NULL_TREE;
16004 tree sink_c = NULL_TREE;
16005
16006 if (gimplify_omp_ctxp)
16007 {
16008 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16009 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16010 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
16011 {
16012 error_at (OMP_CLAUSE_LOCATION (c),
16013 "%<ordered%> construct with %qs clause must be "
16014 "closely nested inside a loop with %<ordered%> clause",
16015 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
16016 failures++;
16017 }
16018 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16019 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
16020 {
16021 bool fail = false;
16022 sink_c = c;
16023 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
16024 continue; /* omp_cur_iteration - 1 */
16025 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16026 decls && TREE_CODE (decls) == TREE_LIST;
16027 decls = TREE_CHAIN (decls), ++i)
16028 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16029 continue;
16030 else if (TREE_VALUE (decls)
16031 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16032 {
16033 error_at (OMP_CLAUSE_LOCATION (c),
16034 "variable %qE is not an iteration "
16035 "of outermost loop %d, expected %qE",
16036 TREE_VALUE (decls), i + 1,
16037 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16038 fail = true;
16039 failures++;
16040 }
16041 else
16042 TREE_VALUE (decls)
16043 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16044 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16045 {
16046 error_at (OMP_CLAUSE_LOCATION (c),
16047 "number of variables in %qs clause with "
16048 "%<sink%> modifier does not match number of "
16049 "iteration variables",
16050 OMP_CLAUSE_DOACROSS_DEPEND (c)
16051 ? "depend" : "doacross");
16052 failures++;
16053 }
16054 }
16055 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16056 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16057 {
16058 if (source_c)
16059 {
16060 error_at (OMP_CLAUSE_LOCATION (c),
16061 "more than one %qs clause with %<source%> "
16062 "modifier on an %<ordered%> construct",
16063 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16064 ? "depend" : "doacross");
16065 failures++;
16066 }
16067 else
16068 source_c = c;
16069 }
16070 }
16071 if (source_c && sink_c)
16072 {
16073 error_at (OMP_CLAUSE_LOCATION (source_c),
16074 "%qs clause with %<source%> modifier specified "
16075 "together with %qs clauses with %<sink%> modifier "
16076 "on the same construct",
16077 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16078 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16079 failures++;
16080 }
16081
16082 if (failures)
16083 return gimple_build_nop ();
16084 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16085 }
16086
16087 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16088 expression produces a value to be used as an operand inside a GIMPLE
16089 statement, the value will be stored back in *EXPR_P. This value will
16090 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16091 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16092 emitted in PRE_P and POST_P.
16093
16094 Additionally, this process may overwrite parts of the input
16095 expression during gimplification. Ideally, it should be
16096 possible to do non-destructive gimplification.
16097
16098 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16099 the expression needs to evaluate to a value to be used as
16100 an operand in a GIMPLE statement, this value will be stored in
16101 *EXPR_P on exit. This happens when the caller specifies one
16102 of fb_lvalue or fb_rvalue fallback flags.
16103
16104 PRE_P will contain the sequence of GIMPLE statements corresponding
16105 to the evaluation of EXPR and all the side-effects that must
16106 be executed before the main expression. On exit, the last
16107 statement of PRE_P is the core statement being gimplified. For
16108 instance, when gimplifying 'if (++a)' the last statement in
16109 PRE_P will be 'if (t.1)' where t.1 is the result of
16110 pre-incrementing 'a'.
16111
16112 POST_P will contain the sequence of GIMPLE statements corresponding
16113 to the evaluation of all the side-effects that must be executed
16114 after the main expression. If this is NULL, the post
16115 side-effects are stored at the end of PRE_P.
16116
16117 The reason why the output is split in two is to handle post
16118 side-effects explicitly. In some cases, an expression may have
16119 inner and outer post side-effects which need to be emitted in
16120 an order different from the one given by the recursive
16121 traversal. For instance, for the expression (*p--)++ the post
16122 side-effects of '--' must actually occur *after* the post
16123 side-effects of '++'. However, gimplification will first visit
16124 the inner expression, so if a separate POST sequence was not
16125 used, the resulting sequence would be:
16126
16127 1 t.1 = *p
16128 2 p = p - 1
16129 3 t.2 = t.1 + 1
16130 4 *p = t.2
16131
16132 However, the post-decrement operation in line #2 must not be
16133 evaluated until after the store to *p at line #4, so the
16134 correct sequence should be:
16135
16136 1 t.1 = *p
16137 2 t.2 = t.1 + 1
16138 3 *p = t.2
16139 4 p = p - 1
16140
16141 So, by specifying a separate post queue, it is possible
16142 to emit the post side-effects in the correct order.
16143 If POST_P is NULL, an internal queue will be used. Before
16144 returning to the caller, the sequence POST_P is appended to
16145 the main output sequence PRE_P.
16146
16147 GIMPLE_TEST_F points to a function that takes a tree T and
16148 returns nonzero if T is in the GIMPLE form requested by the
16149 caller. The GIMPLE predicates are in gimple.cc.
16150
16151 FALLBACK tells the function what sort of a temporary we want if
16152 gimplification cannot produce an expression that complies with
16153 GIMPLE_TEST_F.
16154
16155 fb_none means that no temporary should be generated
16156 fb_rvalue means that an rvalue is OK to generate
16157 fb_lvalue means that an lvalue is OK to generate
16158 fb_either means that either is OK, but an lvalue is preferable.
16159 fb_mayfail means that gimplification may fail (in which case
16160 GS_ERROR will be returned)
16161
16162 The return value is either GS_ERROR or GS_ALL_DONE, since this
16163 function iterates until EXPR is completely gimplified or an error
16164 occurs. */
16165
16166 enum gimplify_status
16167 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16168 bool (*gimple_test_f) (tree), fallback_t fallback)
16169 {
16170 tree tmp;
16171 gimple_seq internal_pre = NULL;
16172 gimple_seq internal_post = NULL;
16173 tree save_expr;
16174 bool is_statement;
16175 location_t saved_location;
16176 enum gimplify_status ret;
16177 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16178 tree label;
16179
16180 save_expr = *expr_p;
16181 if (save_expr == NULL_TREE)
16182 return GS_ALL_DONE;
16183
16184 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16185 is_statement = gimple_test_f == is_gimple_stmt;
16186 if (is_statement)
16187 gcc_assert (pre_p);
16188
16189 /* Consistency checks. */
16190 if (gimple_test_f == is_gimple_reg)
16191 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16192 else if (gimple_test_f == is_gimple_val
16193 || gimple_test_f == is_gimple_call_addr
16194 || gimple_test_f == is_gimple_condexpr_for_cond
16195 || gimple_test_f == is_gimple_mem_rhs
16196 || gimple_test_f == is_gimple_mem_rhs_or_call
16197 || gimple_test_f == is_gimple_reg_rhs
16198 || gimple_test_f == is_gimple_reg_rhs_or_call
16199 || gimple_test_f == is_gimple_asm_val
16200 || gimple_test_f == is_gimple_mem_ref_addr)
16201 gcc_assert (fallback & fb_rvalue);
16202 else if (gimple_test_f == is_gimple_min_lval
16203 || gimple_test_f == is_gimple_lvalue)
16204 gcc_assert (fallback & fb_lvalue);
16205 else if (gimple_test_f == is_gimple_addressable)
16206 gcc_assert (fallback & fb_either);
16207 else if (gimple_test_f == is_gimple_stmt)
16208 gcc_assert (fallback == fb_none);
16209 else
16210 {
16211 /* We should have recognized the GIMPLE_TEST_F predicate to
16212 know what kind of fallback to use in case a temporary is
16213 needed to hold the value or address of *EXPR_P. */
16214 gcc_unreachable ();
16215 }
16216
16217 /* We used to check the predicate here and return immediately if it
16218 succeeds. This is wrong; the design is for gimplification to be
16219 idempotent, and for the predicates to only test for valid forms, not
16220 whether they are fully simplified. */
16221 if (pre_p == NULL)
16222 pre_p = &internal_pre;
16223
16224 if (post_p == NULL)
16225 post_p = &internal_post;
16226
16227 /* Remember the last statements added to PRE_P and POST_P. Every
16228 new statement added by the gimplification helpers needs to be
16229 annotated with location information. To centralize the
16230 responsibility, we remember the last statement that had been
16231 added to both queues before gimplifying *EXPR_P. If
16232 gimplification produces new statements in PRE_P and POST_P, those
16233 statements will be annotated with the same location information
16234 as *EXPR_P. */
16235 pre_last_gsi = gsi_last (*pre_p);
16236 post_last_gsi = gsi_last (*post_p);
16237
16238 saved_location = input_location;
16239 if (save_expr != error_mark_node
16240 && EXPR_HAS_LOCATION (*expr_p))
16241 input_location = EXPR_LOCATION (*expr_p);
16242
16243 /* Loop over the specific gimplifiers until the toplevel node
16244 remains the same. */
16245 do
16246 {
16247 /* Strip away as many useless type conversions as possible
16248 at the toplevel. */
16249 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16250
16251 /* Remember the expr. */
16252 save_expr = *expr_p;
16253
16254 /* Die, die, die, my darling. */
16255 if (error_operand_p (save_expr))
16256 {
16257 ret = GS_ERROR;
16258 break;
16259 }
16260
16261 /* Do any language-specific gimplification. */
16262 ret = ((enum gimplify_status)
16263 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16264 if (ret == GS_OK)
16265 {
16266 if (*expr_p == NULL_TREE)
16267 break;
16268 if (*expr_p != save_expr)
16269 continue;
16270 }
16271 else if (ret != GS_UNHANDLED)
16272 break;
16273
16274 /* Make sure that all the cases set 'ret' appropriately. */
16275 ret = GS_UNHANDLED;
16276 switch (TREE_CODE (*expr_p))
16277 {
16278 /* First deal with the special cases. */
16279
16280 case POSTINCREMENT_EXPR:
16281 case POSTDECREMENT_EXPR:
16282 case PREINCREMENT_EXPR:
16283 case PREDECREMENT_EXPR:
16284 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16285 fallback != fb_none,
16286 TREE_TYPE (*expr_p));
16287 break;
16288
16289 case VIEW_CONVERT_EXPR:
16290 if ((fallback & fb_rvalue)
16291 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16292 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16293 {
16294 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16295 post_p, is_gimple_val, fb_rvalue);
16296 recalculate_side_effects (*expr_p);
16297 break;
16298 }
16299 /* Fallthru. */
16300
16301 case ARRAY_REF:
16302 case ARRAY_RANGE_REF:
16303 case REALPART_EXPR:
16304 case IMAGPART_EXPR:
16305 case COMPONENT_REF:
16306 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16307 fallback ? fallback : fb_rvalue);
16308 break;
16309
16310 case COND_EXPR:
16311 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16312
16313 /* C99 code may assign to an array in a structure value of a
16314 conditional expression, and this has undefined behavior
16315 only on execution, so create a temporary if an lvalue is
16316 required. */
16317 if (fallback == fb_lvalue)
16318 {
16319 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16320 mark_addressable (*expr_p);
16321 ret = GS_OK;
16322 }
16323 break;
16324
16325 case CALL_EXPR:
16326 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
16327
16328 /* C99 code may assign to an array in a structure returned
16329 from a function, and this has undefined behavior only on
16330 execution, so create a temporary if an lvalue is
16331 required. */
16332 if (fallback == fb_lvalue)
16333 {
16334 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16335 mark_addressable (*expr_p);
16336 ret = GS_OK;
16337 }
16338 break;
16339
16340 case TREE_LIST:
16341 gcc_unreachable ();
16342
16343 case COMPOUND_EXPR:
16344 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
16345 break;
16346
16347 case COMPOUND_LITERAL_EXPR:
16348 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16349 gimple_test_f, fallback);
16350 break;
16351
16352 case MODIFY_EXPR:
16353 case INIT_EXPR:
16354 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16355 fallback != fb_none);
16356 break;
16357
16358 case TRUTH_ANDIF_EXPR:
16359 case TRUTH_ORIF_EXPR:
16360 {
16361 /* Preserve the original type of the expression and the
16362 source location of the outer expression. */
16363 tree org_type = TREE_TYPE (*expr_p);
16364 *expr_p = gimple_boolify (*expr_p);
16365 *expr_p = build3_loc (input_location, COND_EXPR,
16366 org_type, *expr_p,
16367 fold_convert_loc
16368 (input_location,
16369 org_type, boolean_true_node),
16370 fold_convert_loc
16371 (input_location,
16372 org_type, boolean_false_node));
16373 ret = GS_OK;
16374 break;
16375 }
16376
16377 case TRUTH_NOT_EXPR:
16378 {
16379 tree type = TREE_TYPE (*expr_p);
16380 /* The parsers are careful to generate TRUTH_NOT_EXPR
16381 only with operands that are always zero or one.
16382 We do not fold here but handle the only interesting case
16383 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16384 *expr_p = gimple_boolify (*expr_p);
16385 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16386 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
16387 TREE_TYPE (*expr_p),
16388 TREE_OPERAND (*expr_p, 0));
16389 else
16390 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
16391 TREE_TYPE (*expr_p),
16392 TREE_OPERAND (*expr_p, 0),
16393 build_int_cst (TREE_TYPE (*expr_p), 1));
16394 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16395 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16396 ret = GS_OK;
16397 break;
16398 }
16399
16400 case ADDR_EXPR:
16401 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16402 break;
16403
16404 case ANNOTATE_EXPR:
16405 {
16406 tree cond = TREE_OPERAND (*expr_p, 0);
16407 tree kind = TREE_OPERAND (*expr_p, 1);
16408 tree data = TREE_OPERAND (*expr_p, 2);
16409 tree type = TREE_TYPE (cond);
16410 if (!INTEGRAL_TYPE_P (type))
16411 {
16412 *expr_p = cond;
16413 ret = GS_OK;
16414 break;
16415 }
16416 tree tmp = create_tmp_var (type);
16417 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
16418 gcall *call
16419 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16420 gimple_call_set_lhs (call, tmp);
16421 gimplify_seq_add_stmt (pre_p, call);
16422 *expr_p = tmp;
16423 ret = GS_ALL_DONE;
16424 break;
16425 }
16426
16427 case VA_ARG_EXPR:
16428 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16429 break;
16430
16431 CASE_CONVERT:
16432 if (IS_EMPTY_STMT (*expr_p))
16433 {
16434 ret = GS_ALL_DONE;
16435 break;
16436 }
16437
16438 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16439 || fallback == fb_none)
16440 {
16441 /* Just strip a conversion to void (or in void context) and
16442 try again. */
16443 *expr_p = TREE_OPERAND (*expr_p, 0);
16444 ret = GS_OK;
16445 break;
16446 }
16447
16448 ret = gimplify_conversion (expr_p);
16449 if (ret == GS_ERROR)
16450 break;
16451 if (*expr_p != save_expr)
16452 break;
16453 /* FALLTHRU */
16454
16455 case FIX_TRUNC_EXPR:
16456 /* unary_expr: ... | '(' cast ')' val | ... */
16457 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16458 is_gimple_val, fb_rvalue);
16459 recalculate_side_effects (*expr_p);
16460 break;
16461
16462 case INDIRECT_REF:
16463 {
16464 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16465 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16466 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16467
16468 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16469 if (*expr_p != save_expr)
16470 {
16471 ret = GS_OK;
16472 break;
16473 }
16474
16475 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16476 is_gimple_reg, fb_rvalue);
16477 if (ret == GS_ERROR)
16478 break;
16479
16480 recalculate_side_effects (*expr_p);
16481 *expr_p = fold_build2_loc (input_location, MEM_REF,
16482 TREE_TYPE (*expr_p),
16483 TREE_OPERAND (*expr_p, 0),
16484 build_int_cst (saved_ptr_type, 0));
16485 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16486 TREE_THIS_NOTRAP (*expr_p) = notrap;
16487 ret = GS_OK;
16488 break;
16489 }
16490
16491 /* We arrive here through the various re-gimplifcation paths. */
16492 case MEM_REF:
16493 /* First try re-folding the whole thing. */
16494 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16495 TREE_OPERAND (*expr_p, 0),
16496 TREE_OPERAND (*expr_p, 1));
16497 if (tmp)
16498 {
16499 REF_REVERSE_STORAGE_ORDER (tmp)
16500 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16501 *expr_p = tmp;
16502 recalculate_side_effects (*expr_p);
16503 ret = GS_OK;
16504 break;
16505 }
16506 /* Avoid re-gimplifying the address operand if it is already
16507 in suitable form. Re-gimplifying would mark the address
16508 operand addressable. Always gimplify when not in SSA form
16509 as we still may have to gimplify decls with value-exprs. */
16510 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16511 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16512 {
16513 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16514 is_gimple_mem_ref_addr, fb_rvalue);
16515 if (ret == GS_ERROR)
16516 break;
16517 }
16518 recalculate_side_effects (*expr_p);
16519 ret = GS_ALL_DONE;
16520 break;
16521
16522 /* Constants need not be gimplified. */
16523 case INTEGER_CST:
16524 case REAL_CST:
16525 case FIXED_CST:
16526 case STRING_CST:
16527 case COMPLEX_CST:
16528 case VECTOR_CST:
16529 /* Drop the overflow flag on constants, we do not want
16530 that in the GIMPLE IL. */
16531 if (TREE_OVERFLOW_P (*expr_p))
16532 *expr_p = drop_tree_overflow (*expr_p);
16533 ret = GS_ALL_DONE;
16534 break;
16535
16536 case CONST_DECL:
16537 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16538 CONST_DECL node. Otherwise the decl is replaceable by its
16539 value. */
16540 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16541 if (fallback & fb_lvalue)
16542 ret = GS_ALL_DONE;
16543 else
16544 {
16545 *expr_p = DECL_INITIAL (*expr_p);
16546 ret = GS_OK;
16547 }
16548 break;
16549
16550 case DECL_EXPR:
16551 ret = gimplify_decl_expr (expr_p, pre_p);
16552 break;
16553
16554 case BIND_EXPR:
16555 ret = gimplify_bind_expr (expr_p, pre_p);
16556 break;
16557
16558 case LOOP_EXPR:
16559 ret = gimplify_loop_expr (expr_p, pre_p);
16560 break;
16561
16562 case SWITCH_EXPR:
16563 ret = gimplify_switch_expr (expr_p, pre_p);
16564 break;
16565
16566 case EXIT_EXPR:
16567 ret = gimplify_exit_expr (expr_p);
16568 break;
16569
16570 case GOTO_EXPR:
16571 /* If the target is not LABEL, then it is a computed jump
16572 and the target needs to be gimplified. */
16573 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16574 {
16575 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
16576 NULL, is_gimple_val, fb_rvalue);
16577 if (ret == GS_ERROR)
16578 break;
16579 }
16580 gimplify_seq_add_stmt (pre_p,
16581 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16582 ret = GS_ALL_DONE;
16583 break;
16584
16585 case PREDICT_EXPR:
16586 gimplify_seq_add_stmt (pre_p,
16587 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16588 PREDICT_EXPR_OUTCOME (*expr_p)));
16589 ret = GS_ALL_DONE;
16590 break;
16591
16592 case LABEL_EXPR:
16593 ret = gimplify_label_expr (expr_p, pre_p);
16594 label = LABEL_EXPR_LABEL (*expr_p);
16595 gcc_assert (decl_function_context (label) == current_function_decl);
16596
16597 /* If the label is used in a goto statement, or address of the label
16598 is taken, we need to unpoison all variables that were seen so far.
16599 Doing so would prevent us from reporting a false positives. */
16600 if (asan_poisoned_variables
16601 && asan_used_labels != NULL
16602 && asan_used_labels->contains (label)
16603 && !gimplify_omp_ctxp)
16604 asan_poison_variables (asan_poisoned_variables, false, pre_p);
16605 break;
16606
16607 case CASE_LABEL_EXPR:
16608 ret = gimplify_case_label_expr (expr_p, pre_p);
16609
16610 if (gimplify_ctxp->live_switch_vars)
16611 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
16612 pre_p);
16613 break;
16614
16615 case RETURN_EXPR:
16616 ret = gimplify_return_expr (*expr_p, pre_p);
16617 break;
16618
16619 case CONSTRUCTOR:
16620 /* Don't reduce this in place; let gimplify_init_constructor work its
16621 magic. Buf if we're just elaborating this for side effects, just
16622 gimplify any element that has side-effects. */
16623 if (fallback == fb_none)
16624 {
16625 unsigned HOST_WIDE_INT ix;
16626 tree val;
16627 tree temp = NULL_TREE;
16628 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16629 if (TREE_SIDE_EFFECTS (val))
16630 append_to_statement_list (val, &temp);
16631
16632 *expr_p = temp;
16633 ret = temp ? GS_OK : GS_ALL_DONE;
16634 }
16635 /* C99 code may assign to an array in a constructed
16636 structure or union, and this has undefined behavior only
16637 on execution, so create a temporary if an lvalue is
16638 required. */
16639 else if (fallback == fb_lvalue)
16640 {
16641 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
16642 mark_addressable (*expr_p);
16643 ret = GS_OK;
16644 }
16645 else
16646 ret = GS_ALL_DONE;
16647 break;
16648
16649 /* The following are special cases that are not handled by the
16650 original GIMPLE grammar. */
16651
16652 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16653 eliminated. */
16654 case SAVE_EXPR:
16655 ret = gimplify_save_expr (expr_p, pre_p, post_p);
16656 break;
16657
16658 case BIT_FIELD_REF:
16659 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16660 post_p, is_gimple_lvalue, fb_either);
16661 recalculate_side_effects (*expr_p);
16662 break;
16663
16664 case TARGET_MEM_REF:
16665 {
16666 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
16667
16668 if (TMR_BASE (*expr_p))
16669 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
16670 post_p, is_gimple_mem_ref_addr, fb_either);
16671 if (TMR_INDEX (*expr_p))
16672 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
16673 post_p, is_gimple_val, fb_rvalue);
16674 if (TMR_INDEX2 (*expr_p))
16675 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
16676 post_p, is_gimple_val, fb_rvalue);
16677 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16678 ret = MIN (r0, r1);
16679 }
16680 break;
16681
16682 case NON_LVALUE_EXPR:
16683 /* This should have been stripped above. */
16684 gcc_unreachable ();
16685
16686 case ASM_EXPR:
16687 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
16688 break;
16689
16690 case TRY_FINALLY_EXPR:
16691 case TRY_CATCH_EXPR:
16692 {
16693 gimple_seq eval, cleanup;
16694 gtry *try_;
16695
16696 /* Calls to destructors are generated automatically in FINALLY/CATCH
16697 block. They should have location as UNKNOWN_LOCATION. However,
16698 gimplify_call_expr will reset these call stmts to input_location
16699 if it finds stmt's location is unknown. To prevent resetting for
16700 destructors, we set the input_location to unknown.
16701 Note that this only affects the destructor calls in FINALLY/CATCH
16702 block, and will automatically reset to its original value by the
16703 end of gimplify_expr. */
16704 input_location = UNKNOWN_LOCATION;
16705 eval = cleanup = NULL;
16706 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
16707 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16708 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
16709 {
16710 gimple_seq n = NULL, e = NULL;
16711 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16712 0), &n);
16713 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
16714 1), &e);
16715 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
16716 {
16717 geh_else *stmt = gimple_build_eh_else (n, e);
16718 gimple_seq_add_stmt (&cleanup, stmt);
16719 }
16720 }
16721 else
16722 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
16723 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16724 if (gimple_seq_empty_p (cleanup))
16725 {
16726 gimple_seq_add_seq (pre_p, eval);
16727 ret = GS_ALL_DONE;
16728 break;
16729 }
16730 try_ = gimple_build_try (eval, cleanup,
16731 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
16732 ? GIMPLE_TRY_FINALLY
16733 : GIMPLE_TRY_CATCH);
16734 if (EXPR_HAS_LOCATION (save_expr))
16735 gimple_set_location (try_, EXPR_LOCATION (save_expr));
16736 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
16737 gimple_set_location (try_, saved_location);
16738 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
16739 gimple_try_set_catch_is_cleanup (try_,
16740 TRY_CATCH_IS_CLEANUP (*expr_p));
16741 gimplify_seq_add_stmt (pre_p, try_);
16742 ret = GS_ALL_DONE;
16743 break;
16744 }
16745
16746 case CLEANUP_POINT_EXPR:
16747 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
16748 break;
16749
16750 case TARGET_EXPR:
16751 ret = gimplify_target_expr (expr_p, pre_p, post_p);
16752 break;
16753
16754 case CATCH_EXPR:
16755 {
16756 gimple *c;
16757 gimple_seq handler = NULL;
16758 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
16759 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
16760 gimplify_seq_add_stmt (pre_p, c);
16761 ret = GS_ALL_DONE;
16762 break;
16763 }
16764
16765 case EH_FILTER_EXPR:
16766 {
16767 gimple *ehf;
16768 gimple_seq failure = NULL;
16769
16770 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
16771 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
16772 copy_warning (ehf, *expr_p);
16773 gimplify_seq_add_stmt (pre_p, ehf);
16774 ret = GS_ALL_DONE;
16775 break;
16776 }
16777
16778 case OBJ_TYPE_REF:
16779 {
16780 enum gimplify_status r0, r1;
16781 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
16782 post_p, is_gimple_val, fb_rvalue);
16783 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
16784 post_p, is_gimple_val, fb_rvalue);
16785 TREE_SIDE_EFFECTS (*expr_p) = 0;
16786 ret = MIN (r0, r1);
16787 }
16788 break;
16789
16790 case LABEL_DECL:
16791 /* We get here when taking the address of a label. We mark
16792 the label as "forced"; meaning it can never be removed and
16793 it is a potential target for any computed goto. */
16794 FORCED_LABEL (*expr_p) = 1;
16795 ret = GS_ALL_DONE;
16796 break;
16797
16798 case STATEMENT_LIST:
16799 ret = gimplify_statement_list (expr_p, pre_p);
16800 break;
16801
16802 case WITH_SIZE_EXPR:
16803 {
16804 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
16805 post_p == &internal_post ? NULL : post_p,
16806 gimple_test_f, fallback);
16807 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
16808 is_gimple_val, fb_rvalue);
16809 ret = GS_ALL_DONE;
16810 }
16811 break;
16812
16813 case VAR_DECL:
16814 case PARM_DECL:
16815 ret = gimplify_var_or_parm_decl (expr_p);
16816 break;
16817
16818 case RESULT_DECL:
16819 /* When within an OMP context, notice uses of variables. */
16820 if (gimplify_omp_ctxp)
16821 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
16822 ret = GS_ALL_DONE;
16823 break;
16824
16825 case DEBUG_EXPR_DECL:
16826 gcc_unreachable ();
16827
16828 case DEBUG_BEGIN_STMT:
16829 gimplify_seq_add_stmt (pre_p,
16830 gimple_build_debug_begin_stmt
16831 (TREE_BLOCK (*expr_p),
16832 EXPR_LOCATION (*expr_p)));
16833 ret = GS_ALL_DONE;
16834 *expr_p = NULL;
16835 break;
16836
16837 case SSA_NAME:
16838 /* Allow callbacks into the gimplifier during optimization. */
16839 ret = GS_ALL_DONE;
16840 break;
16841
16842 case OMP_PARALLEL:
16843 gimplify_omp_parallel (expr_p, pre_p);
16844 ret = GS_ALL_DONE;
16845 break;
16846
16847 case OMP_TASK:
16848 gimplify_omp_task (expr_p, pre_p);
16849 ret = GS_ALL_DONE;
16850 break;
16851
16852 case OMP_SIMD:
16853 {
16854 /* Temporarily disable into_ssa, as scan_omp_simd
16855 which calls copy_gimple_seq_and_replace_locals can't deal
16856 with SSA_NAMEs defined outside of the body properly. */
16857 bool saved_into_ssa = gimplify_ctxp->into_ssa;
16858 gimplify_ctxp->into_ssa = false;
16859 ret = gimplify_omp_for (expr_p, pre_p);
16860 gimplify_ctxp->into_ssa = saved_into_ssa;
16861 break;
16862 }
16863
16864 case OMP_FOR:
16865 case OMP_DISTRIBUTE:
16866 case OMP_TASKLOOP:
16867 case OACC_LOOP:
16868 ret = gimplify_omp_for (expr_p, pre_p);
16869 break;
16870
16871 case OMP_LOOP:
16872 ret = gimplify_omp_loop (expr_p, pre_p);
16873 break;
16874
16875 case OACC_CACHE:
16876 gimplify_oacc_cache (expr_p, pre_p);
16877 ret = GS_ALL_DONE;
16878 break;
16879
16880 case OACC_DECLARE:
16881 gimplify_oacc_declare (expr_p, pre_p);
16882 ret = GS_ALL_DONE;
16883 break;
16884
16885 case OACC_HOST_DATA:
16886 case OACC_DATA:
16887 case OACC_KERNELS:
16888 case OACC_PARALLEL:
16889 case OACC_SERIAL:
16890 case OMP_SCOPE:
16891 case OMP_SECTIONS:
16892 case OMP_SINGLE:
16893 case OMP_TARGET:
16894 case OMP_TARGET_DATA:
16895 case OMP_TEAMS:
16896 gimplify_omp_workshare (expr_p, pre_p);
16897 ret = GS_ALL_DONE;
16898 break;
16899
16900 case OACC_ENTER_DATA:
16901 case OACC_EXIT_DATA:
16902 case OACC_UPDATE:
16903 case OMP_TARGET_UPDATE:
16904 case OMP_TARGET_ENTER_DATA:
16905 case OMP_TARGET_EXIT_DATA:
16906 gimplify_omp_target_update (expr_p, pre_p);
16907 ret = GS_ALL_DONE;
16908 break;
16909
16910 case OMP_SECTION:
16911 case OMP_MASTER:
16912 case OMP_MASKED:
16913 case OMP_ORDERED:
16914 case OMP_CRITICAL:
16915 case OMP_SCAN:
16916 {
16917 gimple_seq body = NULL;
16918 gimple *g;
16919 bool saved_in_omp_construct = in_omp_construct;
16920
16921 in_omp_construct = true;
16922 gimplify_and_add (OMP_BODY (*expr_p), &body);
16923 in_omp_construct = saved_in_omp_construct;
16924 switch (TREE_CODE (*expr_p))
16925 {
16926 case OMP_SECTION:
16927 g = gimple_build_omp_section (body);
16928 break;
16929 case OMP_MASTER:
16930 g = gimple_build_omp_master (body);
16931 break;
16932 case OMP_ORDERED:
16933 g = gimplify_omp_ordered (*expr_p, body);
16934 if (OMP_BODY (*expr_p) == NULL_TREE
16935 && gimple_code (g) == GIMPLE_OMP_ORDERED)
16936 gimple_omp_ordered_standalone (g);
16937 break;
16938 case OMP_MASKED:
16939 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
16940 pre_p, ORT_WORKSHARE, OMP_MASKED);
16941 gimplify_adjust_omp_clauses (pre_p, body,
16942 &OMP_MASKED_CLAUSES (*expr_p),
16943 OMP_MASKED);
16944 g = gimple_build_omp_masked (body,
16945 OMP_MASKED_CLAUSES (*expr_p));
16946 break;
16947 case OMP_CRITICAL:
16948 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
16949 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
16950 gimplify_adjust_omp_clauses (pre_p, body,
16951 &OMP_CRITICAL_CLAUSES (*expr_p),
16952 OMP_CRITICAL);
16953 g = gimple_build_omp_critical (body,
16954 OMP_CRITICAL_NAME (*expr_p),
16955 OMP_CRITICAL_CLAUSES (*expr_p));
16956 break;
16957 case OMP_SCAN:
16958 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
16959 pre_p, ORT_WORKSHARE, OMP_SCAN);
16960 gimplify_adjust_omp_clauses (pre_p, body,
16961 &OMP_SCAN_CLAUSES (*expr_p),
16962 OMP_SCAN);
16963 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
16964 break;
16965 default:
16966 gcc_unreachable ();
16967 }
16968 gimplify_seq_add_stmt (pre_p, g);
16969 ret = GS_ALL_DONE;
16970 break;
16971 }
16972
16973 case OMP_TASKGROUP:
16974 {
16975 gimple_seq body = NULL;
16976
16977 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
16978 bool saved_in_omp_construct = in_omp_construct;
16979 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
16980 OMP_TASKGROUP);
16981 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
16982
16983 in_omp_construct = true;
16984 gimplify_and_add (OMP_BODY (*expr_p), &body);
16985 in_omp_construct = saved_in_omp_construct;
16986 gimple_seq cleanup = NULL;
16987 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
16988 gimple *g = gimple_build_call (fn, 0);
16989 gimple_seq_add_stmt (&cleanup, g);
16990 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
16991 body = NULL;
16992 gimple_seq_add_stmt (&body, g);
16993 g = gimple_build_omp_taskgroup (body, *pclauses);
16994 gimplify_seq_add_stmt (pre_p, g);
16995 ret = GS_ALL_DONE;
16996 break;
16997 }
16998
16999 case OMP_ATOMIC:
17000 case OMP_ATOMIC_READ:
17001 case OMP_ATOMIC_CAPTURE_OLD:
17002 case OMP_ATOMIC_CAPTURE_NEW:
17003 ret = gimplify_omp_atomic (expr_p, pre_p);
17004 break;
17005
17006 case TRANSACTION_EXPR:
17007 ret = gimplify_transaction (expr_p, pre_p);
17008 break;
17009
17010 case TRUTH_AND_EXPR:
17011 case TRUTH_OR_EXPR:
17012 case TRUTH_XOR_EXPR:
17013 {
17014 tree orig_type = TREE_TYPE (*expr_p);
17015 tree new_type, xop0, xop1;
17016 *expr_p = gimple_boolify (*expr_p);
17017 new_type = TREE_TYPE (*expr_p);
17018 if (!useless_type_conversion_p (orig_type, new_type))
17019 {
17020 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
17021 ret = GS_OK;
17022 break;
17023 }
17024
17025 /* Boolified binary truth expressions are semantically equivalent
17026 to bitwise binary expressions. Canonicalize them to the
17027 bitwise variant. */
17028 switch (TREE_CODE (*expr_p))
17029 {
17030 case TRUTH_AND_EXPR:
17031 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17032 break;
17033 case TRUTH_OR_EXPR:
17034 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17035 break;
17036 case TRUTH_XOR_EXPR:
17037 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17038 break;
17039 default:
17040 break;
17041 }
17042 /* Now make sure that operands have compatible type to
17043 expression's new_type. */
17044 xop0 = TREE_OPERAND (*expr_p, 0);
17045 xop1 = TREE_OPERAND (*expr_p, 1);
17046 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17047 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17048 new_type,
17049 xop0);
17050 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17051 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17052 new_type,
17053 xop1);
17054 /* Continue classified as tcc_binary. */
17055 goto expr_2;
17056 }
17057
17058 case VEC_COND_EXPR:
17059 goto expr_3;
17060
17061 case VEC_PERM_EXPR:
17062 /* Classified as tcc_expression. */
17063 goto expr_3;
17064
17065 case BIT_INSERT_EXPR:
17066 /* Argument 3 is a constant. */
17067 goto expr_2;
17068
17069 case POINTER_PLUS_EXPR:
17070 {
17071 enum gimplify_status r0, r1;
17072 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17073 post_p, is_gimple_val, fb_rvalue);
17074 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17075 post_p, is_gimple_val, fb_rvalue);
17076 recalculate_side_effects (*expr_p);
17077 ret = MIN (r0, r1);
17078 break;
17079 }
17080
17081 default:
17082 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17083 {
17084 case tcc_comparison:
17085 /* Handle comparison of objects of non scalar mode aggregates
17086 with a call to memcmp. It would be nice to only have to do
17087 this for variable-sized objects, but then we'd have to allow
17088 the same nest of reference nodes we allow for MODIFY_EXPR and
17089 that's too complex.
17090
17091 Compare scalar mode aggregates as scalar mode values. Using
17092 memcmp for them would be very inefficient at best, and is
17093 plain wrong if bitfields are involved. */
17094 {
17095 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17096
17097 /* Vector comparisons need no boolification. */
17098 if (TREE_CODE (type) == VECTOR_TYPE)
17099 goto expr_2;
17100 else if (!AGGREGATE_TYPE_P (type))
17101 {
17102 tree org_type = TREE_TYPE (*expr_p);
17103 *expr_p = gimple_boolify (*expr_p);
17104 if (!useless_type_conversion_p (org_type,
17105 TREE_TYPE (*expr_p)))
17106 {
17107 *expr_p = fold_convert_loc (input_location,
17108 org_type, *expr_p);
17109 ret = GS_OK;
17110 }
17111 else
17112 goto expr_2;
17113 }
17114 else if (TYPE_MODE (type) != BLKmode)
17115 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17116 else
17117 ret = gimplify_variable_sized_compare (expr_p);
17118
17119 break;
17120 }
17121
17122 /* If *EXPR_P does not need to be special-cased, handle it
17123 according to its class. */
17124 case tcc_unary:
17125 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17126 post_p, is_gimple_val, fb_rvalue);
17127 break;
17128
17129 case tcc_binary:
17130 expr_2:
17131 {
17132 enum gimplify_status r0, r1;
17133
17134 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17135 post_p, is_gimple_val, fb_rvalue);
17136 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17137 post_p, is_gimple_val, fb_rvalue);
17138
17139 ret = MIN (r0, r1);
17140 break;
17141 }
17142
17143 expr_3:
17144 {
17145 enum gimplify_status r0, r1, r2;
17146
17147 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
17148 post_p, is_gimple_val, fb_rvalue);
17149 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
17150 post_p, is_gimple_val, fb_rvalue);
17151 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
17152 post_p, is_gimple_val, fb_rvalue);
17153
17154 ret = MIN (MIN (r0, r1), r2);
17155 break;
17156 }
17157
17158 case tcc_declaration:
17159 case tcc_constant:
17160 ret = GS_ALL_DONE;
17161 goto dont_recalculate;
17162
17163 default:
17164 gcc_unreachable ();
17165 }
17166
17167 recalculate_side_effects (*expr_p);
17168
17169 dont_recalculate:
17170 break;
17171 }
17172
17173 gcc_assert (*expr_p || ret != GS_OK);
17174 }
17175 while (ret == GS_OK);
17176
17177 /* If we encountered an error_mark somewhere nested inside, either
17178 stub out the statement or propagate the error back out. */
17179 if (ret == GS_ERROR)
17180 {
17181 if (is_statement)
17182 *expr_p = NULL;
17183 goto out;
17184 }
17185
17186 /* This was only valid as a return value from the langhook, which
17187 we handled. Make sure it doesn't escape from any other context. */
17188 gcc_assert (ret != GS_UNHANDLED);
17189
17190 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
17191 {
17192 /* We aren't looking for a value, and we don't have a valid
17193 statement. If it doesn't have side-effects, throw it away.
17194 We can also get here with code such as "*&&L;", where L is
17195 a LABEL_DECL that is marked as FORCED_LABEL. */
17196 if (TREE_CODE (*expr_p) == LABEL_DECL
17197 || !TREE_SIDE_EFFECTS (*expr_p))
17198 *expr_p = NULL;
17199 else if (!TREE_THIS_VOLATILE (*expr_p))
17200 {
17201 /* This is probably a _REF that contains something nested that
17202 has side effects. Recurse through the operands to find it. */
17203 enum tree_code code = TREE_CODE (*expr_p);
17204
17205 switch (code)
17206 {
17207 case COMPONENT_REF:
17208 case REALPART_EXPR:
17209 case IMAGPART_EXPR:
17210 case VIEW_CONVERT_EXPR:
17211 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17212 gimple_test_f, fallback);
17213 break;
17214
17215 case ARRAY_REF:
17216 case ARRAY_RANGE_REF:
17217 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17218 gimple_test_f, fallback);
17219 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17220 gimple_test_f, fallback);
17221 break;
17222
17223 default:
17224 /* Anything else with side-effects must be converted to
17225 a valid statement before we get here. */
17226 gcc_unreachable ();
17227 }
17228
17229 *expr_p = NULL;
17230 }
17231 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17232 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17233 && !is_empty_type (TREE_TYPE (*expr_p)))
17234 {
17235 /* Historically, the compiler has treated a bare reference
17236 to a non-BLKmode volatile lvalue as forcing a load. */
17237 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17238
17239 /* Normally, we do not want to create a temporary for a
17240 TREE_ADDRESSABLE type because such a type should not be
17241 copied by bitwise-assignment. However, we make an
17242 exception here, as all we are doing here is ensuring that
17243 we read the bytes that make up the type. We use
17244 create_tmp_var_raw because create_tmp_var will abort when
17245 given a TREE_ADDRESSABLE type. */
17246 tree tmp = create_tmp_var_raw (type, "vol");
17247 gimple_add_tmp_var (tmp);
17248 gimplify_assign (tmp, *expr_p, pre_p);
17249 *expr_p = NULL;
17250 }
17251 else
17252 /* We can't do anything useful with a volatile reference to
17253 an incomplete type, so just throw it away. Likewise for
17254 a BLKmode type, since any implicit inner load should
17255 already have been turned into an explicit one by the
17256 gimplification process. */
17257 *expr_p = NULL;
17258 }
17259
17260 /* If we are gimplifying at the statement level, we're done. Tack
17261 everything together and return. */
17262 if (fallback == fb_none || is_statement)
17263 {
17264 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17265 it out for GC to reclaim it. */
17266 *expr_p = NULL_TREE;
17267
17268 if (!gimple_seq_empty_p (internal_pre)
17269 || !gimple_seq_empty_p (internal_post))
17270 {
17271 gimplify_seq_add_seq (&internal_pre, internal_post);
17272 gimplify_seq_add_seq (pre_p, internal_pre);
17273 }
17274
17275 /* The result of gimplifying *EXPR_P is going to be the last few
17276 statements in *PRE_P and *POST_P. Add location information
17277 to all the statements that were added by the gimplification
17278 helpers. */
17279 if (!gimple_seq_empty_p (*pre_p))
17280 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17281
17282 if (!gimple_seq_empty_p (*post_p))
17283 annotate_all_with_location_after (*post_p, post_last_gsi,
17284 input_location);
17285
17286 goto out;
17287 }
17288
17289 #ifdef ENABLE_GIMPLE_CHECKING
17290 if (*expr_p)
17291 {
17292 enum tree_code code = TREE_CODE (*expr_p);
17293 /* These expressions should already be in gimple IR form. */
17294 gcc_assert (code != MODIFY_EXPR
17295 && code != ASM_EXPR
17296 && code != BIND_EXPR
17297 && code != CATCH_EXPR
17298 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17299 && code != EH_FILTER_EXPR
17300 && code != GOTO_EXPR
17301 && code != LABEL_EXPR
17302 && code != LOOP_EXPR
17303 && code != SWITCH_EXPR
17304 && code != TRY_FINALLY_EXPR
17305 && code != EH_ELSE_EXPR
17306 && code != OACC_PARALLEL
17307 && code != OACC_KERNELS
17308 && code != OACC_SERIAL
17309 && code != OACC_DATA
17310 && code != OACC_HOST_DATA
17311 && code != OACC_DECLARE
17312 && code != OACC_UPDATE
17313 && code != OACC_ENTER_DATA
17314 && code != OACC_EXIT_DATA
17315 && code != OACC_CACHE
17316 && code != OMP_CRITICAL
17317 && code != OMP_FOR
17318 && code != OACC_LOOP
17319 && code != OMP_MASTER
17320 && code != OMP_MASKED
17321 && code != OMP_TASKGROUP
17322 && code != OMP_ORDERED
17323 && code != OMP_PARALLEL
17324 && code != OMP_SCAN
17325 && code != OMP_SECTIONS
17326 && code != OMP_SECTION
17327 && code != OMP_SINGLE
17328 && code != OMP_SCOPE);
17329 }
17330 #endif
17331
17332 /* Otherwise we're gimplifying a subexpression, so the resulting
17333 value is interesting. If it's a valid operand that matches
17334 GIMPLE_TEST_F, we're done. Unless we are handling some
17335 post-effects internally; if that's the case, we need to copy into
17336 a temporary before adding the post-effects to POST_P. */
17337 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
17338 goto out;
17339
17340 /* Otherwise, we need to create a new temporary for the gimplified
17341 expression. */
17342
17343 /* We can't return an lvalue if we have an internal postqueue. The
17344 object the lvalue refers to would (probably) be modified by the
17345 postqueue; we need to copy the value out first, which means an
17346 rvalue. */
17347 if ((fallback & fb_lvalue)
17348 && gimple_seq_empty_p (internal_post)
17349 && is_gimple_addressable (*expr_p))
17350 {
17351 /* An lvalue will do. Take the address of the expression, store it
17352 in a temporary, and replace the expression with an INDIRECT_REF of
17353 that temporary. */
17354 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17355 unsigned int ref_align = get_object_alignment (*expr_p);
17356 tree ref_type = TREE_TYPE (*expr_p);
17357 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17358 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
17359 if (TYPE_ALIGN (ref_type) != ref_align)
17360 ref_type = build_aligned_type (ref_type, ref_align);
17361 *expr_p = build2 (MEM_REF, ref_type,
17362 tmp, build_zero_cst (ref_alias_type));
17363 }
17364 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
17365 {
17366 /* An rvalue will do. Assign the gimplified expression into a
17367 new temporary TMP and replace the original expression with
17368 TMP. First, make sure that the expression has a type so that
17369 it can be assigned into a temporary. */
17370 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17371 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
17372 }
17373 else
17374 {
17375 #ifdef ENABLE_GIMPLE_CHECKING
17376 if (!(fallback & fb_mayfail))
17377 {
17378 fprintf (stderr, "gimplification failed:\n");
17379 print_generic_expr (stderr, *expr_p);
17380 debug_tree (*expr_p);
17381 internal_error ("gimplification failed");
17382 }
17383 #endif
17384 gcc_assert (fallback & fb_mayfail);
17385
17386 /* If this is an asm statement, and the user asked for the
17387 impossible, don't die. Fail and let gimplify_asm_expr
17388 issue an error. */
17389 ret = GS_ERROR;
17390 goto out;
17391 }
17392
17393 /* Make sure the temporary matches our predicate. */
17394 gcc_assert ((*gimple_test_f) (*expr_p));
17395
17396 if (!gimple_seq_empty_p (internal_post))
17397 {
17398 annotate_all_with_location (internal_post, input_location);
17399 gimplify_seq_add_seq (pre_p, internal_post);
17400 }
17401
17402 out:
17403 input_location = saved_location;
17404 return ret;
17405 }
17406
17407 /* Like gimplify_expr but make sure the gimplified result is not itself
17408 a SSA name (but a decl if it were). Temporaries required by
17409 evaluating *EXPR_P may be still SSA names. */
17410
17411 static enum gimplify_status
17412 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17413 bool (*gimple_test_f) (tree), fallback_t fallback,
17414 bool allow_ssa)
17415 {
17416 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17417 gimple_test_f, fallback);
17418 if (! allow_ssa
17419 && TREE_CODE (*expr_p) == SSA_NAME)
17420 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
17421 return ret;
17422 }
17423
17424 /* Look through TYPE for variable-sized objects and gimplify each such
17425 size that we find. Add to LIST_P any statements generated. */
17426
17427 void
17428 gimplify_type_sizes (tree type, gimple_seq *list_p)
17429 {
17430 if (type == NULL || type == error_mark_node)
17431 return;
17432
17433 const bool ignored_p
17434 = TYPE_NAME (type)
17435 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17436 && DECL_IGNORED_P (TYPE_NAME (type));
17437 tree t;
17438
17439 /* We first do the main variant, then copy into any other variants. */
17440 type = TYPE_MAIN_VARIANT (type);
17441
17442 /* Avoid infinite recursion. */
17443 if (TYPE_SIZES_GIMPLIFIED (type))
17444 return;
17445
17446 TYPE_SIZES_GIMPLIFIED (type) = 1;
17447
17448 switch (TREE_CODE (type))
17449 {
17450 case INTEGER_TYPE:
17451 case ENUMERAL_TYPE:
17452 case BOOLEAN_TYPE:
17453 case REAL_TYPE:
17454 case FIXED_POINT_TYPE:
17455 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17456 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17457
17458 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17459 {
17460 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17461 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17462 }
17463 break;
17464
17465 case ARRAY_TYPE:
17466 /* These types may not have declarations, so handle them here. */
17467 gimplify_type_sizes (TREE_TYPE (type), list_p);
17468 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17469 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17470 with assigned stack slots, for -O1+ -g they should be tracked
17471 by VTA. */
17472 if (!ignored_p
17473 && TYPE_DOMAIN (type)
17474 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17475 {
17476 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17477 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17478 DECL_IGNORED_P (t) = 0;
17479 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17480 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17481 DECL_IGNORED_P (t) = 0;
17482 }
17483 break;
17484
17485 case RECORD_TYPE:
17486 case UNION_TYPE:
17487 case QUAL_UNION_TYPE:
17488 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17489 if (TREE_CODE (field) == FIELD_DECL)
17490 {
17491 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17492 /* Likewise, ensure variable offsets aren't removed. */
17493 if (!ignored_p
17494 && (t = DECL_FIELD_OFFSET (field))
17495 && VAR_P (t)
17496 && DECL_ARTIFICIAL (t))
17497 DECL_IGNORED_P (t) = 0;
17498 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17499 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17500 gimplify_type_sizes (TREE_TYPE (field), list_p);
17501 }
17502 break;
17503
17504 case POINTER_TYPE:
17505 case REFERENCE_TYPE:
17506 /* We used to recurse on the pointed-to type here, which turned out to
17507 be incorrect because its definition might refer to variables not
17508 yet initialized at this point if a forward declaration is involved.
17509
17510 It was actually useful for anonymous pointed-to types to ensure
17511 that the sizes evaluation dominates every possible later use of the
17512 values. Restricting to such types here would be safe since there
17513 is no possible forward declaration around, but would introduce an
17514 undesirable middle-end semantic to anonymity. We then defer to
17515 front-ends the responsibility of ensuring that the sizes are
17516 evaluated both early and late enough, e.g. by attaching artificial
17517 type declarations to the tree. */
17518 break;
17519
17520 default:
17521 break;
17522 }
17523
17524 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17525 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17526
17527 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17528 {
17529 TYPE_SIZE (t) = TYPE_SIZE (type);
17530 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17531 TYPE_SIZES_GIMPLIFIED (t) = 1;
17532 }
17533 }
17534
17535 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17536 a size or position, has had all of its SAVE_EXPRs evaluated.
17537 We add any required statements to *STMT_P. */
17538
17539 void
17540 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17541 {
17542 tree expr = *expr_p;
17543
17544 /* We don't do anything if the value isn't there, is constant, or contains
17545 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17546 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17547 will want to replace it with a new variable, but that will cause problems
17548 if this type is from outside the function. It's OK to have that here. */
17549 if (expr == NULL_TREE
17550 || is_gimple_constant (expr)
17551 || TREE_CODE (expr) == VAR_DECL
17552 || CONTAINS_PLACEHOLDER_P (expr))
17553 return;
17554
17555 *expr_p = unshare_expr (expr);
17556
17557 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17558 if the def vanishes. */
17559 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
17560
17561 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17562 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17563 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17564 if (is_gimple_constant (*expr_p))
17565 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
17566 }
17567
17568 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17569 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17570 is true, also gimplify the parameters. */
17571
17572 gbind *
17573 gimplify_body (tree fndecl, bool do_parms)
17574 {
17575 location_t saved_location = input_location;
17576 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17577 gimple *outer_stmt;
17578 gbind *outer_bind;
17579
17580 timevar_push (TV_TREE_GIMPLIFY);
17581
17582 init_tree_ssa (cfun);
17583
17584 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17585 gimplification. */
17586 default_rtl_profile ();
17587
17588 gcc_assert (gimplify_ctxp == NULL);
17589 push_gimplify_context (true);
17590
17591 if (flag_openacc || flag_openmp)
17592 {
17593 gcc_assert (gimplify_omp_ctxp == NULL);
17594 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
17595 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
17596 }
17597
17598 /* Unshare most shared trees in the body and in that of any nested functions.
17599 It would seem we don't have to do this for nested functions because
17600 they are supposed to be output and then the outer function gimplified
17601 first, but the g++ front end doesn't always do it that way. */
17602 unshare_body (fndecl);
17603 unvisit_body (fndecl);
17604
17605 /* Make sure input_location isn't set to something weird. */
17606 input_location = DECL_SOURCE_LOCATION (fndecl);
17607
17608 /* Resolve callee-copies. This has to be done before processing
17609 the body so that DECL_VALUE_EXPR gets processed correctly. */
17610 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17611
17612 /* Gimplify the function's body. */
17613 seq = NULL;
17614 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
17615 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
17616 if (!outer_stmt)
17617 {
17618 outer_stmt = gimple_build_nop ();
17619 gimplify_seq_add_stmt (&seq, outer_stmt);
17620 }
17621
17622 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17623 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17624 if (gimple_code (outer_stmt) == GIMPLE_BIND
17625 && (gimple_seq_first_nondebug_stmt (seq)
17626 == gimple_seq_last_nondebug_stmt (seq)))
17627 {
17628 outer_bind = as_a <gbind *> (outer_stmt);
17629 if (gimple_seq_first_stmt (seq) != outer_stmt
17630 || gimple_seq_last_stmt (seq) != outer_stmt)
17631 {
17632 /* If there are debug stmts before or after outer_stmt, move them
17633 inside of outer_bind body. */
17634 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
17635 gimple_seq second_seq = NULL;
17636 if (gimple_seq_first_stmt (seq) != outer_stmt
17637 && gimple_seq_last_stmt (seq) != outer_stmt)
17638 {
17639 second_seq = gsi_split_seq_after (gsi);
17640 gsi_remove (&gsi, false);
17641 }
17642 else if (gimple_seq_first_stmt (seq) != outer_stmt)
17643 gsi_remove (&gsi, false);
17644 else
17645 {
17646 gsi_remove (&gsi, false);
17647 second_seq = seq;
17648 seq = NULL;
17649 }
17650 gimple_seq_add_seq_without_update (&seq,
17651 gimple_bind_body (outer_bind));
17652 gimple_seq_add_seq_without_update (&seq, second_seq);
17653 gimple_bind_set_body (outer_bind, seq);
17654 }
17655 }
17656 else
17657 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
17658
17659 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17660
17661 /* If we had callee-copies statements, insert them at the beginning
17662 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17663 if (!gimple_seq_empty_p (parm_stmts))
17664 {
17665 tree parm;
17666
17667 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
17668 if (parm_cleanup)
17669 {
17670 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
17671 GIMPLE_TRY_FINALLY);
17672 parm_stmts = NULL;
17673 gimple_seq_add_stmt (&parm_stmts, g);
17674 }
17675 gimple_bind_set_body (outer_bind, parm_stmts);
17676
17677 for (parm = DECL_ARGUMENTS (current_function_decl);
17678 parm; parm = DECL_CHAIN (parm))
17679 if (DECL_HAS_VALUE_EXPR_P (parm))
17680 {
17681 DECL_HAS_VALUE_EXPR_P (parm) = 0;
17682 DECL_IGNORED_P (parm) = 0;
17683 }
17684 }
17685
17686 if ((flag_openacc || flag_openmp || flag_openmp_simd)
17687 && gimplify_omp_ctxp)
17688 {
17689 delete_omp_context (gimplify_omp_ctxp);
17690 gimplify_omp_ctxp = NULL;
17691 }
17692
17693 pop_gimplify_context (outer_bind);
17694 gcc_assert (gimplify_ctxp == NULL);
17695
17696 if (flag_checking && !seen_error ())
17697 verify_gimple_in_seq (gimple_bind_body (outer_bind));
17698
17699 timevar_pop (TV_TREE_GIMPLIFY);
17700 input_location = saved_location;
17701
17702 return outer_bind;
17703 }
17704
17705 typedef char *char_p; /* For DEF_VEC_P. */
17706
17707 /* Return whether we should exclude FNDECL from instrumentation. */
17708
17709 static bool
17710 flag_instrument_functions_exclude_p (tree fndecl)
17711 {
17712 vec<char_p> *v;
17713
17714 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
17715 if (v && v->length () > 0)
17716 {
17717 const char *name;
17718 int i;
17719 char *s;
17720
17721 name = lang_hooks.decl_printable_name (fndecl, 1);
17722 FOR_EACH_VEC_ELT (*v, i, s)
17723 if (strstr (name, s) != NULL)
17724 return true;
17725 }
17726
17727 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
17728 if (v && v->length () > 0)
17729 {
17730 const char *name;
17731 int i;
17732 char *s;
17733
17734 name = DECL_SOURCE_FILE (fndecl);
17735 FOR_EACH_VEC_ELT (*v, i, s)
17736 if (strstr (name, s) != NULL)
17737 return true;
17738 }
17739
17740 return false;
17741 }
17742
17743 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17744 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17745 the instrumentation function. IF STMT is not NULL, it is a statement
17746 to be executed just before the call to the instrumentation function. */
17747
17748 static void
17749 build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
17750 tree cond_var, gimple *stmt)
17751 {
17752 /* The instrumentation hooks aren't going to call the instrumented
17753 function and the address they receive is expected to be matchable
17754 against symbol addresses. Make sure we don't create a trampoline,
17755 in case the current function is nested. */
17756 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
17757 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
17758
17759 tree label_true, label_false;
17760 if (cond_var)
17761 {
17762 label_true = create_artificial_label (UNKNOWN_LOCATION);
17763 label_false = create_artificial_label (UNKNOWN_LOCATION);
17764 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
17765 label_true, label_false);
17766 gimplify_seq_add_stmt (seq, cond);
17767 gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
17768 gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
17769 NOT_TAKEN));
17770 }
17771
17772 if (stmt)
17773 gimplify_seq_add_stmt (seq, stmt);
17774
17775 tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
17776 gcall *call = gimple_build_call (x, 1, integer_zero_node);
17777 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
17778 gimple_call_set_lhs (call, tmp_var);
17779 gimplify_seq_add_stmt (seq, call);
17780 x = builtin_decl_implicit (fncode);
17781 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
17782 gimplify_seq_add_stmt (seq, call);
17783
17784 if (cond_var)
17785 gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
17786 }
17787
17788 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17789 node for the function we want to gimplify.
17790
17791 Return the sequence of GIMPLE statements corresponding to the body
17792 of FNDECL. */
17793
17794 void
17795 gimplify_function_tree (tree fndecl)
17796 {
17797 gimple_seq seq;
17798 gbind *bind;
17799
17800 gcc_assert (!gimple_body (fndecl));
17801
17802 if (DECL_STRUCT_FUNCTION (fndecl))
17803 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
17804 else
17805 push_struct_function (fndecl);
17806
17807 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17808 if necessary. */
17809 cfun->curr_properties |= PROP_gimple_lva;
17810
17811 if (asan_sanitize_use_after_scope ())
17812 asan_poisoned_variables = new hash_set<tree> ();
17813 bind = gimplify_body (fndecl, true);
17814 if (asan_poisoned_variables)
17815 {
17816 delete asan_poisoned_variables;
17817 asan_poisoned_variables = NULL;
17818 }
17819
17820 /* The tree body of the function is no longer needed, replace it
17821 with the new GIMPLE body. */
17822 seq = NULL;
17823 gimple_seq_add_stmt (&seq, bind);
17824 gimple_set_body (fndecl, seq);
17825
17826 /* If we're instrumenting function entry/exit, then prepend the call to
17827 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
17828 catch the exit hook. */
17829 /* ??? Add some way to ignore exceptions for this TFE. */
17830 if (flag_instrument_function_entry_exit
17831 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
17832 /* Do not instrument extern inline functions. */
17833 && !(DECL_DECLARED_INLINE_P (fndecl)
17834 && DECL_EXTERNAL (fndecl)
17835 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
17836 && !flag_instrument_functions_exclude_p (fndecl))
17837 {
17838 gimple_seq body = NULL, cleanup = NULL;
17839 gassign *assign;
17840 tree cond_var;
17841
17842 /* If -finstrument-functions-once is specified, generate:
17843
17844 static volatile bool C.0 = false;
17845 bool tmp_called;
17846
17847 tmp_called = C.0;
17848 if (!tmp_called)
17849 {
17850 C.0 = true;
17851 [call profiling enter function]
17852 }
17853
17854 without specific protection for data races. */
17855 if (flag_instrument_function_entry_exit > 1)
17856 {
17857 tree first_var
17858 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
17859 VAR_DECL,
17860 create_tmp_var_name ("C"),
17861 boolean_type_node);
17862 DECL_ARTIFICIAL (first_var) = 1;
17863 DECL_IGNORED_P (first_var) = 1;
17864 TREE_STATIC (first_var) = 1;
17865 TREE_THIS_VOLATILE (first_var) = 1;
17866 TREE_USED (first_var) = 1;
17867 DECL_INITIAL (first_var) = boolean_false_node;
17868 varpool_node::add (first_var);
17869
17870 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
17871 assign = gimple_build_assign (cond_var, first_var);
17872 gimplify_seq_add_stmt (&body, assign);
17873
17874 assign = gimple_build_assign (first_var, boolean_true_node);
17875 }
17876
17877 else
17878 {
17879 cond_var = NULL_TREE;
17880 assign = NULL;
17881 }
17882
17883 build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
17884 cond_var, assign);
17885
17886 /* If -finstrument-functions-once is specified, generate:
17887
17888 if (!tmp_called)
17889 [call profiling exit function]
17890
17891 without specific protection for data races. */
17892 build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
17893 cond_var, NULL);
17894
17895 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
17896 gimplify_seq_add_stmt (&body, tf);
17897 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
17898
17899 /* Replace the current function body with the body
17900 wrapped in the try/finally TF. */
17901 seq = NULL;
17902 gimple_seq_add_stmt (&seq, new_bind);
17903 gimple_set_body (fndecl, seq);
17904 bind = new_bind;
17905 }
17906
17907 if (sanitize_flags_p (SANITIZE_THREAD)
17908 && param_tsan_instrument_func_entry_exit)
17909 {
17910 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
17911 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
17912 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
17913 /* Replace the current function body with the body
17914 wrapped in the try/finally TF. */
17915 seq = NULL;
17916 gimple_seq_add_stmt (&seq, new_bind);
17917 gimple_set_body (fndecl, seq);
17918 }
17919
17920 DECL_SAVED_TREE (fndecl) = NULL_TREE;
17921 cfun->curr_properties |= PROP_gimple_any;
17922
17923 pop_cfun ();
17924
17925 dump_function (TDI_gimple, fndecl);
17926 }
17927
17928 /* Return a dummy expression of type TYPE in order to keep going after an
17929 error. */
17930
17931 static tree
17932 dummy_object (tree type)
17933 {
17934 tree t = build_int_cst (build_pointer_type (type), 0);
17935 return build2 (MEM_REF, type, t, t);
17936 }
17937
17938 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
17939 builtin function, but a very special sort of operator. */
17940
17941 enum gimplify_status
17942 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
17943 gimple_seq *post_p ATTRIBUTE_UNUSED)
17944 {
17945 tree promoted_type, have_va_type;
17946 tree valist = TREE_OPERAND (*expr_p, 0);
17947 tree type = TREE_TYPE (*expr_p);
17948 tree t, tag, aptag;
17949 location_t loc = EXPR_LOCATION (*expr_p);
17950
17951 /* Verify that valist is of the proper type. */
17952 have_va_type = TREE_TYPE (valist);
17953 if (have_va_type == error_mark_node)
17954 return GS_ERROR;
17955 have_va_type = targetm.canonical_va_list_type (have_va_type);
17956 if (have_va_type == NULL_TREE
17957 && POINTER_TYPE_P (TREE_TYPE (valist)))
17958 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
17959 have_va_type
17960 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
17961 gcc_assert (have_va_type != NULL_TREE);
17962
17963 /* Generate a diagnostic for requesting data of a type that cannot
17964 be passed through `...' due to type promotion at the call site. */
17965 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
17966 != type)
17967 {
17968 static bool gave_help;
17969 bool warned;
17970 /* Use the expansion point to handle cases such as passing bool (defined
17971 in a system header) through `...'. */
17972 location_t xloc
17973 = expansion_point_location_if_in_system_header (loc);
17974
17975 /* Unfortunately, this is merely undefined, rather than a constraint
17976 violation, so we cannot make this an error. If this call is never
17977 executed, the program is still strictly conforming. */
17978 auto_diagnostic_group d;
17979 warned = warning_at (xloc, 0,
17980 "%qT is promoted to %qT when passed through %<...%>",
17981 type, promoted_type);
17982 if (!gave_help && warned)
17983 {
17984 gave_help = true;
17985 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
17986 promoted_type, type);
17987 }
17988
17989 /* We can, however, treat "undefined" any way we please.
17990 Call abort to encourage the user to fix the program. */
17991 if (warned)
17992 inform (xloc, "if this code is reached, the program will abort");
17993 /* Before the abort, allow the evaluation of the va_list
17994 expression to exit or longjmp. */
17995 gimplify_and_add (valist, pre_p);
17996 t = build_call_expr_loc (loc,
17997 builtin_decl_implicit (BUILT_IN_TRAP), 0);
17998 gimplify_and_add (t, pre_p);
17999
18000 /* This is dead code, but go ahead and finish so that the
18001 mode of the result comes out right. */
18002 *expr_p = dummy_object (type);
18003 return GS_ALL_DONE;
18004 }
18005
18006 tag = build_int_cst (build_pointer_type (type), 0);
18007 aptag = build_int_cst (TREE_TYPE (valist), 0);
18008
18009 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
18010 valist, tag, aptag);
18011
18012 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18013 needs to be expanded. */
18014 cfun->curr_properties &= ~PROP_gimple_lva;
18015
18016 return GS_OK;
18017 }
18018
18019 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18020
18021 DST/SRC are the destination and source respectively. You can pass
18022 ungimplified trees in DST or SRC, in which case they will be
18023 converted to a gimple operand if necessary.
18024
18025 This function returns the newly created GIMPLE_ASSIGN tuple. */
18026
18027 gimple *
18028 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18029 {
18030 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18031 gimplify_and_add (t, seq_p);
18032 ggc_free (t);
18033 return gimple_seq_last_stmt (*seq_p);
18034 }
18035
18036 inline hashval_t
18037 gimplify_hasher::hash (const elt_t *p)
18038 {
18039 tree t = p->val;
18040 return iterative_hash_expr (t, 0);
18041 }
18042
18043 inline bool
18044 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18045 {
18046 tree t1 = p1->val;
18047 tree t2 = p2->val;
18048 enum tree_code code = TREE_CODE (t1);
18049
18050 if (TREE_CODE (t2) != code
18051 || TREE_TYPE (t1) != TREE_TYPE (t2))
18052 return false;
18053
18054 if (!operand_equal_p (t1, t2, 0))
18055 return false;
18056
18057 /* Only allow them to compare equal if they also hash equal; otherwise
18058 results are nondeterminate, and we fail bootstrap comparison. */
18059 gcc_checking_assert (hash (p1) == hash (p2));
18060
18061 return true;
18062 }