]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.c
middle-end: add support for per-location warning groups.
[thirdparty/gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2021 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70 #include "tree-nested.h"
71
72 /* Hash set of poisoned variables in a bind expr. */
73 static hash_set<tree> *asan_poisoned_variables = NULL;
74
75 enum gimplify_omp_var_data
76 {
77 GOVD_SEEN = 0x000001,
78 GOVD_EXPLICIT = 0x000002,
79 GOVD_SHARED = 0x000004,
80 GOVD_PRIVATE = 0x000008,
81 GOVD_FIRSTPRIVATE = 0x000010,
82 GOVD_LASTPRIVATE = 0x000020,
83 GOVD_REDUCTION = 0x000040,
84 GOVD_LOCAL = 0x00080,
85 GOVD_MAP = 0x000100,
86 GOVD_DEBUG_PRIVATE = 0x000200,
87 GOVD_PRIVATE_OUTER_REF = 0x000400,
88 GOVD_LINEAR = 0x000800,
89 GOVD_ALIGNED = 0x001000,
90
91 /* Flag for GOVD_MAP: don't copy back. */
92 GOVD_MAP_TO_ONLY = 0x002000,
93
94 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
95 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
96
97 GOVD_MAP_0LEN_ARRAY = 0x008000,
98
99 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
100 GOVD_MAP_ALWAYS_TO = 0x010000,
101
102 /* Flag for shared vars that are or might be stored to in the region. */
103 GOVD_WRITTEN = 0x020000,
104
105 /* Flag for GOVD_MAP, if it is a forced mapping. */
106 GOVD_MAP_FORCE = 0x040000,
107
108 /* Flag for GOVD_MAP: must be present already. */
109 GOVD_MAP_FORCE_PRESENT = 0x080000,
110
111 /* Flag for GOVD_MAP: only allocate. */
112 GOVD_MAP_ALLOC_ONLY = 0x100000,
113
114 /* Flag for GOVD_MAP: only copy back. */
115 GOVD_MAP_FROM_ONLY = 0x200000,
116
117 GOVD_NONTEMPORAL = 0x400000,
118
119 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
120 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
121
122 GOVD_CONDTEMP = 0x1000000,
123
124 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
125 GOVD_REDUCTION_INSCAN = 0x2000000,
126
127 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
128 fields. */
129 GOVD_MAP_HAS_ATTACHMENTS = 0x4000000,
130
131 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
132 GOVD_FIRSTPRIVATE_IMPLICIT = 0x8000000,
133
134 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
135 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
136 | GOVD_LOCAL)
137 };
138
139
140 enum omp_region_type
141 {
142 ORT_WORKSHARE = 0x00,
143 ORT_TASKGROUP = 0x01,
144 ORT_SIMD = 0x04,
145
146 ORT_PARALLEL = 0x08,
147 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
148
149 ORT_TASK = 0x10,
150 ORT_UNTIED_TASK = ORT_TASK | 1,
151 ORT_TASKLOOP = ORT_TASK | 2,
152 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
153
154 ORT_TEAMS = 0x20,
155 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
156 ORT_HOST_TEAMS = ORT_TEAMS | 2,
157 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
158
159 /* Data region. */
160 ORT_TARGET_DATA = 0x40,
161
162 /* Data region with offloading. */
163 ORT_TARGET = 0x80,
164 ORT_COMBINED_TARGET = ORT_TARGET | 1,
165 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
166
167 /* OpenACC variants. */
168 ORT_ACC = 0x100, /* A generic OpenACC region. */
169 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
170 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
171 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
172 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
173 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
174
175 /* Dummy OpenMP region, used to disable expansion of
176 DECL_VALUE_EXPRs in taskloop pre body. */
177 ORT_NONE = 0x200
178 };
179
180 /* Gimplify hashtable helper. */
181
182 struct gimplify_hasher : free_ptr_hash <elt_t>
183 {
184 static inline hashval_t hash (const elt_t *);
185 static inline bool equal (const elt_t *, const elt_t *);
186 };
187
188 struct gimplify_ctx
189 {
190 struct gimplify_ctx *prev_context;
191
192 vec<gbind *> bind_expr_stack;
193 tree temps;
194 gimple_seq conditional_cleanups;
195 tree exit_label;
196 tree return_temp;
197
198 vec<tree> case_labels;
199 hash_set<tree> *live_switch_vars;
200 /* The formal temporary table. Should this be persistent? */
201 hash_table<gimplify_hasher> *temp_htab;
202
203 int conditions;
204 unsigned into_ssa : 1;
205 unsigned allow_rhs_cond_expr : 1;
206 unsigned in_cleanup_point_expr : 1;
207 unsigned keep_stack : 1;
208 unsigned save_stack : 1;
209 unsigned in_switch_expr : 1;
210 };
211
212 enum gimplify_defaultmap_kind
213 {
214 GDMK_SCALAR,
215 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
216 GDMK_AGGREGATE,
217 GDMK_ALLOCATABLE,
218 GDMK_POINTER
219 };
220
221 struct gimplify_omp_ctx
222 {
223 struct gimplify_omp_ctx *outer_context;
224 splay_tree variables;
225 hash_set<tree> *privatized_types;
226 tree clauses;
227 /* Iteration variables in an OMP_FOR. */
228 vec<tree> loop_iter_var;
229 location_t location;
230 enum omp_clause_default_kind default_kind;
231 enum omp_region_type region_type;
232 enum tree_code code;
233 bool combined_loop;
234 bool distribute;
235 bool target_firstprivatize_array_bases;
236 bool add_safelen1;
237 bool order_concurrent;
238 bool has_depend;
239 bool in_for_exprs;
240 int defaultmap[5];
241 };
242
243 static struct gimplify_ctx *gimplify_ctxp;
244 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
245 static bool in_omp_construct;
246
247 /* Forward declaration. */
248 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
249 static hash_map<tree, tree> *oacc_declare_returns;
250 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
251 bool (*) (tree), fallback_t, bool);
252
253 /* Shorter alias name for the above function for use in gimplify.c
254 only. */
255
256 static inline void
257 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
258 {
259 gimple_seq_add_stmt_without_update (seq_p, gs);
260 }
261
262 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
263 NULL, a new sequence is allocated. This function is
264 similar to gimple_seq_add_seq, but does not scan the operands.
265 During gimplification, we need to manipulate statement sequences
266 before the def/use vectors have been constructed. */
267
268 static void
269 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
270 {
271 gimple_stmt_iterator si;
272
273 if (src == NULL)
274 return;
275
276 si = gsi_last (*dst_p);
277 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
278 }
279
280
281 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
282 and popping gimplify contexts. */
283
284 static struct gimplify_ctx *ctx_pool = NULL;
285
286 /* Return a gimplify context struct from the pool. */
287
288 static inline struct gimplify_ctx *
289 ctx_alloc (void)
290 {
291 struct gimplify_ctx * c = ctx_pool;
292
293 if (c)
294 ctx_pool = c->prev_context;
295 else
296 c = XNEW (struct gimplify_ctx);
297
298 memset (c, '\0', sizeof (*c));
299 return c;
300 }
301
302 /* Put gimplify context C back into the pool. */
303
304 static inline void
305 ctx_free (struct gimplify_ctx *c)
306 {
307 c->prev_context = ctx_pool;
308 ctx_pool = c;
309 }
310
311 /* Free allocated ctx stack memory. */
312
313 void
314 free_gimplify_stack (void)
315 {
316 struct gimplify_ctx *c;
317
318 while ((c = ctx_pool))
319 {
320 ctx_pool = c->prev_context;
321 free (c);
322 }
323 }
324
325
326 /* Set up a context for the gimplifier. */
327
328 void
329 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
330 {
331 struct gimplify_ctx *c = ctx_alloc ();
332
333 c->prev_context = gimplify_ctxp;
334 gimplify_ctxp = c;
335 gimplify_ctxp->into_ssa = in_ssa;
336 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
337 }
338
339 /* Tear down a context for the gimplifier. If BODY is non-null, then
340 put the temporaries into the outer BIND_EXPR. Otherwise, put them
341 in the local_decls.
342
343 BODY is not a sequence, but the first tuple in a sequence. */
344
345 void
346 pop_gimplify_context (gimple *body)
347 {
348 struct gimplify_ctx *c = gimplify_ctxp;
349
350 gcc_assert (c
351 && (!c->bind_expr_stack.exists ()
352 || c->bind_expr_stack.is_empty ()));
353 c->bind_expr_stack.release ();
354 gimplify_ctxp = c->prev_context;
355
356 if (body)
357 declare_vars (c->temps, body, false);
358 else
359 record_vars (c->temps);
360
361 delete c->temp_htab;
362 c->temp_htab = NULL;
363 ctx_free (c);
364 }
365
366 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
367
368 static void
369 gimple_push_bind_expr (gbind *bind_stmt)
370 {
371 gimplify_ctxp->bind_expr_stack.reserve (8);
372 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
373 }
374
375 /* Pop the first element off the stack of bindings. */
376
377 static void
378 gimple_pop_bind_expr (void)
379 {
380 gimplify_ctxp->bind_expr_stack.pop ();
381 }
382
383 /* Return the first element of the stack of bindings. */
384
385 gbind *
386 gimple_current_bind_expr (void)
387 {
388 return gimplify_ctxp->bind_expr_stack.last ();
389 }
390
391 /* Return the stack of bindings created during gimplification. */
392
393 vec<gbind *>
394 gimple_bind_expr_stack (void)
395 {
396 return gimplify_ctxp->bind_expr_stack;
397 }
398
399 /* Return true iff there is a COND_EXPR between us and the innermost
400 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
401
402 static bool
403 gimple_conditional_context (void)
404 {
405 return gimplify_ctxp->conditions > 0;
406 }
407
408 /* Note that we've entered a COND_EXPR. */
409
410 static void
411 gimple_push_condition (void)
412 {
413 #ifdef ENABLE_GIMPLE_CHECKING
414 if (gimplify_ctxp->conditions == 0)
415 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
416 #endif
417 ++(gimplify_ctxp->conditions);
418 }
419
420 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
421 now, add any conditional cleanups we've seen to the prequeue. */
422
423 static void
424 gimple_pop_condition (gimple_seq *pre_p)
425 {
426 int conds = --(gimplify_ctxp->conditions);
427
428 gcc_assert (conds >= 0);
429 if (conds == 0)
430 {
431 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
432 gimplify_ctxp->conditional_cleanups = NULL;
433 }
434 }
435
436 /* A stable comparison routine for use with splay trees and DECLs. */
437
438 static int
439 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
440 {
441 tree a = (tree) xa;
442 tree b = (tree) xb;
443
444 return DECL_UID (a) - DECL_UID (b);
445 }
446
447 /* Create a new omp construct that deals with variable remapping. */
448
449 static struct gimplify_omp_ctx *
450 new_omp_context (enum omp_region_type region_type)
451 {
452 struct gimplify_omp_ctx *c;
453
454 c = XCNEW (struct gimplify_omp_ctx);
455 c->outer_context = gimplify_omp_ctxp;
456 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
457 c->privatized_types = new hash_set<tree>;
458 c->location = input_location;
459 c->region_type = region_type;
460 if ((region_type & ORT_TASK) == 0)
461 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
462 else
463 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
464 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
465 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
466 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
467 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
468 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
469
470 return c;
471 }
472
473 /* Destroy an omp construct that deals with variable remapping. */
474
475 static void
476 delete_omp_context (struct gimplify_omp_ctx *c)
477 {
478 splay_tree_delete (c->variables);
479 delete c->privatized_types;
480 c->loop_iter_var.release ();
481 XDELETE (c);
482 }
483
484 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
485 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
486
487 /* Both gimplify the statement T and append it to *SEQ_P. This function
488 behaves exactly as gimplify_stmt, but you don't have to pass T as a
489 reference. */
490
491 void
492 gimplify_and_add (tree t, gimple_seq *seq_p)
493 {
494 gimplify_stmt (&t, seq_p);
495 }
496
497 /* Gimplify statement T into sequence *SEQ_P, and return the first
498 tuple in the sequence of generated tuples for this statement.
499 Return NULL if gimplifying T produced no tuples. */
500
501 static gimple *
502 gimplify_and_return_first (tree t, gimple_seq *seq_p)
503 {
504 gimple_stmt_iterator last = gsi_last (*seq_p);
505
506 gimplify_and_add (t, seq_p);
507
508 if (!gsi_end_p (last))
509 {
510 gsi_next (&last);
511 return gsi_stmt (last);
512 }
513 else
514 return gimple_seq_first_stmt (*seq_p);
515 }
516
517 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
518 LHS, or for a call argument. */
519
520 static bool
521 is_gimple_mem_rhs (tree t)
522 {
523 /* If we're dealing with a renamable type, either source or dest must be
524 a renamed variable. */
525 if (is_gimple_reg_type (TREE_TYPE (t)))
526 return is_gimple_val (t);
527 else
528 return is_gimple_val (t) || is_gimple_lvalue (t);
529 }
530
531 /* Return true if T is a CALL_EXPR or an expression that can be
532 assigned to a temporary. Note that this predicate should only be
533 used during gimplification. See the rationale for this in
534 gimplify_modify_expr. */
535
536 static bool
537 is_gimple_reg_rhs_or_call (tree t)
538 {
539 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
540 || TREE_CODE (t) == CALL_EXPR);
541 }
542
543 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
544 this predicate should only be used during gimplification. See the
545 rationale for this in gimplify_modify_expr. */
546
547 static bool
548 is_gimple_mem_rhs_or_call (tree t)
549 {
550 /* If we're dealing with a renamable type, either source or dest must be
551 a renamed variable. */
552 if (is_gimple_reg_type (TREE_TYPE (t)))
553 return is_gimple_val (t);
554 else
555 return (is_gimple_val (t)
556 || is_gimple_lvalue (t)
557 || TREE_CLOBBER_P (t)
558 || TREE_CODE (t) == CALL_EXPR);
559 }
560
561 /* Create a temporary with a name derived from VAL. Subroutine of
562 lookup_tmp_var; nobody else should call this function. */
563
564 static inline tree
565 create_tmp_from_val (tree val)
566 {
567 /* Drop all qualifiers and address-space information from the value type. */
568 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
569 tree var = create_tmp_var (type, get_name (val));
570 return var;
571 }
572
573 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
574 an existing expression temporary. */
575
576 static tree
577 lookup_tmp_var (tree val, bool is_formal)
578 {
579 tree ret;
580
581 /* If not optimizing, never really reuse a temporary. local-alloc
582 won't allocate any variable that is used in more than one basic
583 block, which means it will go into memory, causing much extra
584 work in reload and final and poorer code generation, outweighing
585 the extra memory allocation here. */
586 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
587 ret = create_tmp_from_val (val);
588 else
589 {
590 elt_t elt, *elt_p;
591 elt_t **slot;
592
593 elt.val = val;
594 if (!gimplify_ctxp->temp_htab)
595 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
596 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
597 if (*slot == NULL)
598 {
599 elt_p = XNEW (elt_t);
600 elt_p->val = val;
601 elt_p->temp = ret = create_tmp_from_val (val);
602 *slot = elt_p;
603 }
604 else
605 {
606 elt_p = *slot;
607 ret = elt_p->temp;
608 }
609 }
610
611 return ret;
612 }
613
614 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
615
616 static tree
617 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
618 bool is_formal, bool allow_ssa)
619 {
620 tree t, mod;
621
622 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
623 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
624 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
625 fb_rvalue);
626
627 if (allow_ssa
628 && gimplify_ctxp->into_ssa
629 && is_gimple_reg_type (TREE_TYPE (val)))
630 {
631 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
632 if (! gimple_in_ssa_p (cfun))
633 {
634 const char *name = get_name (val);
635 if (name)
636 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
637 }
638 }
639 else
640 t = lookup_tmp_var (val, is_formal);
641
642 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
643
644 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
645
646 /* gimplify_modify_expr might want to reduce this further. */
647 gimplify_and_add (mod, pre_p);
648 ggc_free (mod);
649
650 return t;
651 }
652
653 /* Return a formal temporary variable initialized with VAL. PRE_P is as
654 in gimplify_expr. Only use this function if:
655
656 1) The value of the unfactored expression represented by VAL will not
657 change between the initialization and use of the temporary, and
658 2) The temporary will not be otherwise modified.
659
660 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
661 and #2 means it is inappropriate for && temps.
662
663 For other cases, use get_initialized_tmp_var instead. */
664
665 tree
666 get_formal_tmp_var (tree val, gimple_seq *pre_p)
667 {
668 return internal_get_tmp_var (val, pre_p, NULL, true, true);
669 }
670
671 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
672 are as in gimplify_expr. */
673
674 tree
675 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
676 gimple_seq *post_p /* = NULL */,
677 bool allow_ssa /* = true */)
678 {
679 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
680 }
681
682 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
683 generate debug info for them; otherwise don't. */
684
685 void
686 declare_vars (tree vars, gimple *gs, bool debug_info)
687 {
688 tree last = vars;
689 if (last)
690 {
691 tree temps, block;
692
693 gbind *scope = as_a <gbind *> (gs);
694
695 temps = nreverse (last);
696
697 block = gimple_bind_block (scope);
698 gcc_assert (!block || TREE_CODE (block) == BLOCK);
699 if (!block || !debug_info)
700 {
701 DECL_CHAIN (last) = gimple_bind_vars (scope);
702 gimple_bind_set_vars (scope, temps);
703 }
704 else
705 {
706 /* We need to attach the nodes both to the BIND_EXPR and to its
707 associated BLOCK for debugging purposes. The key point here
708 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
709 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
710 if (BLOCK_VARS (block))
711 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
712 else
713 {
714 gimple_bind_set_vars (scope,
715 chainon (gimple_bind_vars (scope), temps));
716 BLOCK_VARS (block) = temps;
717 }
718 }
719 }
720 }
721
722 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
723 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
724 no such upper bound can be obtained. */
725
726 static void
727 force_constant_size (tree var)
728 {
729 /* The only attempt we make is by querying the maximum size of objects
730 of the variable's type. */
731
732 HOST_WIDE_INT max_size;
733
734 gcc_assert (VAR_P (var));
735
736 max_size = max_int_size_in_bytes (TREE_TYPE (var));
737
738 gcc_assert (max_size >= 0);
739
740 DECL_SIZE_UNIT (var)
741 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
742 DECL_SIZE (var)
743 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
744 }
745
746 /* Push the temporary variable TMP into the current binding. */
747
748 void
749 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
750 {
751 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
752
753 /* Later processing assumes that the object size is constant, which might
754 not be true at this point. Force the use of a constant upper bound in
755 this case. */
756 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
757 force_constant_size (tmp);
758
759 DECL_CONTEXT (tmp) = fn->decl;
760 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
761
762 record_vars_into (tmp, fn->decl);
763 }
764
765 /* Push the temporary variable TMP into the current binding. */
766
767 void
768 gimple_add_tmp_var (tree tmp)
769 {
770 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
771
772 /* Later processing assumes that the object size is constant, which might
773 not be true at this point. Force the use of a constant upper bound in
774 this case. */
775 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
776 force_constant_size (tmp);
777
778 DECL_CONTEXT (tmp) = current_function_decl;
779 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
780
781 if (gimplify_ctxp)
782 {
783 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
784 gimplify_ctxp->temps = tmp;
785
786 /* Mark temporaries local within the nearest enclosing parallel. */
787 if (gimplify_omp_ctxp)
788 {
789 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
790 int flag = GOVD_LOCAL | GOVD_SEEN;
791 while (ctx
792 && (ctx->region_type == ORT_WORKSHARE
793 || ctx->region_type == ORT_TASKGROUP
794 || ctx->region_type == ORT_SIMD
795 || ctx->region_type == ORT_ACC))
796 {
797 if (ctx->region_type == ORT_SIMD
798 && TREE_ADDRESSABLE (tmp)
799 && !TREE_STATIC (tmp))
800 {
801 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
802 ctx->add_safelen1 = true;
803 else if (ctx->in_for_exprs)
804 flag = GOVD_PRIVATE;
805 else
806 flag = GOVD_PRIVATE | GOVD_SEEN;
807 break;
808 }
809 ctx = ctx->outer_context;
810 }
811 if (ctx)
812 omp_add_variable (ctx, tmp, flag);
813 }
814 }
815 else if (cfun)
816 record_vars (tmp);
817 else
818 {
819 gimple_seq body_seq;
820
821 /* This case is for nested functions. We need to expose the locals
822 they create. */
823 body_seq = gimple_body (current_function_decl);
824 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
825 }
826 }
827
828
829 \f
830 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
831 nodes that are referenced more than once in GENERIC functions. This is
832 necessary because gimplification (translation into GIMPLE) is performed
833 by modifying tree nodes in-place, so gimplication of a shared node in a
834 first context could generate an invalid GIMPLE form in a second context.
835
836 This is achieved with a simple mark/copy/unmark algorithm that walks the
837 GENERIC representation top-down, marks nodes with TREE_VISITED the first
838 time it encounters them, duplicates them if they already have TREE_VISITED
839 set, and finally removes the TREE_VISITED marks it has set.
840
841 The algorithm works only at the function level, i.e. it generates a GENERIC
842 representation of a function with no nodes shared within the function when
843 passed a GENERIC function (except for nodes that are allowed to be shared).
844
845 At the global level, it is also necessary to unshare tree nodes that are
846 referenced in more than one function, for the same aforementioned reason.
847 This requires some cooperation from the front-end. There are 2 strategies:
848
849 1. Manual unsharing. The front-end needs to call unshare_expr on every
850 expression that might end up being shared across functions.
851
852 2. Deep unsharing. This is an extension of regular unsharing. Instead
853 of calling unshare_expr on expressions that might be shared across
854 functions, the front-end pre-marks them with TREE_VISITED. This will
855 ensure that they are unshared on the first reference within functions
856 when the regular unsharing algorithm runs. The counterpart is that
857 this algorithm must look deeper than for manual unsharing, which is
858 specified by LANG_HOOKS_DEEP_UNSHARING.
859
860 If there are only few specific cases of node sharing across functions, it is
861 probably easier for a front-end to unshare the expressions manually. On the
862 contrary, if the expressions generated at the global level are as widespread
863 as expressions generated within functions, deep unsharing is very likely the
864 way to go. */
865
866 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
867 These nodes model computations that must be done once. If we were to
868 unshare something like SAVE_EXPR(i++), the gimplification process would
869 create wrong code. However, if DATA is non-null, it must hold a pointer
870 set that is used to unshare the subtrees of these nodes. */
871
872 static tree
873 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
874 {
875 tree t = *tp;
876 enum tree_code code = TREE_CODE (t);
877
878 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
879 copy their subtrees if we can make sure to do it only once. */
880 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
881 {
882 if (data && !((hash_set<tree> *)data)->add (t))
883 ;
884 else
885 *walk_subtrees = 0;
886 }
887
888 /* Stop at types, decls, constants like copy_tree_r. */
889 else if (TREE_CODE_CLASS (code) == tcc_type
890 || TREE_CODE_CLASS (code) == tcc_declaration
891 || TREE_CODE_CLASS (code) == tcc_constant)
892 *walk_subtrees = 0;
893
894 /* Cope with the statement expression extension. */
895 else if (code == STATEMENT_LIST)
896 ;
897
898 /* Leave the bulk of the work to copy_tree_r itself. */
899 else
900 copy_tree_r (tp, walk_subtrees, NULL);
901
902 return NULL_TREE;
903 }
904
905 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
906 If *TP has been visited already, then *TP is deeply copied by calling
907 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
908
909 static tree
910 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
911 {
912 tree t = *tp;
913 enum tree_code code = TREE_CODE (t);
914
915 /* Skip types, decls, and constants. But we do want to look at their
916 types and the bounds of types. Mark them as visited so we properly
917 unmark their subtrees on the unmark pass. If we've already seen them,
918 don't look down further. */
919 if (TREE_CODE_CLASS (code) == tcc_type
920 || TREE_CODE_CLASS (code) == tcc_declaration
921 || TREE_CODE_CLASS (code) == tcc_constant)
922 {
923 if (TREE_VISITED (t))
924 *walk_subtrees = 0;
925 else
926 TREE_VISITED (t) = 1;
927 }
928
929 /* If this node has been visited already, unshare it and don't look
930 any deeper. */
931 else if (TREE_VISITED (t))
932 {
933 walk_tree (tp, mostly_copy_tree_r, data, NULL);
934 *walk_subtrees = 0;
935 }
936
937 /* Otherwise, mark the node as visited and keep looking. */
938 else
939 TREE_VISITED (t) = 1;
940
941 return NULL_TREE;
942 }
943
944 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
945 copy_if_shared_r callback unmodified. */
946
947 void
948 copy_if_shared (tree *tp, void *data)
949 {
950 walk_tree (tp, copy_if_shared_r, data, NULL);
951 }
952
953 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
954 any nested functions. */
955
956 static void
957 unshare_body (tree fndecl)
958 {
959 struct cgraph_node *cgn = cgraph_node::get (fndecl);
960 /* If the language requires deep unsharing, we need a pointer set to make
961 sure we don't repeatedly unshare subtrees of unshareable nodes. */
962 hash_set<tree> *visited
963 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
964
965 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
966 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
967 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
968
969 delete visited;
970
971 if (cgn)
972 for (cgn = first_nested_function (cgn); cgn;
973 cgn = next_nested_function (cgn))
974 unshare_body (cgn->decl);
975 }
976
977 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
978 Subtrees are walked until the first unvisited node is encountered. */
979
980 static tree
981 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
982 {
983 tree t = *tp;
984
985 /* If this node has been visited, unmark it and keep looking. */
986 if (TREE_VISITED (t))
987 TREE_VISITED (t) = 0;
988
989 /* Otherwise, don't look any deeper. */
990 else
991 *walk_subtrees = 0;
992
993 return NULL_TREE;
994 }
995
996 /* Unmark the visited trees rooted at *TP. */
997
998 static inline void
999 unmark_visited (tree *tp)
1000 {
1001 walk_tree (tp, unmark_visited_r, NULL, NULL);
1002 }
1003
1004 /* Likewise, but mark all trees as not visited. */
1005
1006 static void
1007 unvisit_body (tree fndecl)
1008 {
1009 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1010
1011 unmark_visited (&DECL_SAVED_TREE (fndecl));
1012 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1013 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1014
1015 if (cgn)
1016 for (cgn = first_nested_function (cgn);
1017 cgn; cgn = next_nested_function (cgn))
1018 unvisit_body (cgn->decl);
1019 }
1020
1021 /* Unconditionally make an unshared copy of EXPR. This is used when using
1022 stored expressions which span multiple functions, such as BINFO_VTABLE,
1023 as the normal unsharing process can't tell that they're shared. */
1024
1025 tree
1026 unshare_expr (tree expr)
1027 {
1028 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1029 return expr;
1030 }
1031
1032 /* Worker for unshare_expr_without_location. */
1033
1034 static tree
1035 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1036 {
1037 if (EXPR_P (*tp))
1038 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1039 else
1040 *walk_subtrees = 0;
1041 return NULL_TREE;
1042 }
1043
1044 /* Similar to unshare_expr but also prune all expression locations
1045 from EXPR. */
1046
1047 tree
1048 unshare_expr_without_location (tree expr)
1049 {
1050 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1051 if (EXPR_P (expr))
1052 walk_tree (&expr, prune_expr_location, NULL, NULL);
1053 return expr;
1054 }
1055
1056 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1057 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1058 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1059 EXPR is the location of the EXPR. */
1060
1061 static location_t
1062 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1063 {
1064 if (!expr)
1065 return or_else;
1066
1067 if (EXPR_HAS_LOCATION (expr))
1068 return EXPR_LOCATION (expr);
1069
1070 if (TREE_CODE (expr) != STATEMENT_LIST)
1071 return or_else;
1072
1073 tree_stmt_iterator i = tsi_start (expr);
1074
1075 bool found = false;
1076 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1077 {
1078 found = true;
1079 tsi_next (&i);
1080 }
1081
1082 if (!found || !tsi_one_before_end_p (i))
1083 return or_else;
1084
1085 return rexpr_location (tsi_stmt (i), or_else);
1086 }
1087
1088 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1089 rexpr_location for the potential recursion. */
1090
1091 static inline bool
1092 rexpr_has_location (tree expr)
1093 {
1094 return rexpr_location (expr) != UNKNOWN_LOCATION;
1095 }
1096
1097 \f
1098 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1099 contain statements and have a value. Assign its value to a temporary
1100 and give it void_type_node. Return the temporary, or NULL_TREE if
1101 WRAPPER was already void. */
1102
1103 tree
1104 voidify_wrapper_expr (tree wrapper, tree temp)
1105 {
1106 tree type = TREE_TYPE (wrapper);
1107 if (type && !VOID_TYPE_P (type))
1108 {
1109 tree *p;
1110
1111 /* Set p to point to the body of the wrapper. Loop until we find
1112 something that isn't a wrapper. */
1113 for (p = &wrapper; p && *p; )
1114 {
1115 switch (TREE_CODE (*p))
1116 {
1117 case BIND_EXPR:
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 /* For a BIND_EXPR, the body is operand 1. */
1121 p = &BIND_EXPR_BODY (*p);
1122 break;
1123
1124 case CLEANUP_POINT_EXPR:
1125 case TRY_FINALLY_EXPR:
1126 case TRY_CATCH_EXPR:
1127 TREE_SIDE_EFFECTS (*p) = 1;
1128 TREE_TYPE (*p) = void_type_node;
1129 p = &TREE_OPERAND (*p, 0);
1130 break;
1131
1132 case STATEMENT_LIST:
1133 {
1134 tree_stmt_iterator i = tsi_last (*p);
1135 TREE_SIDE_EFFECTS (*p) = 1;
1136 TREE_TYPE (*p) = void_type_node;
1137 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1138 }
1139 break;
1140
1141 case COMPOUND_EXPR:
1142 /* Advance to the last statement. Set all container types to
1143 void. */
1144 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1145 {
1146 TREE_SIDE_EFFECTS (*p) = 1;
1147 TREE_TYPE (*p) = void_type_node;
1148 }
1149 break;
1150
1151 case TRANSACTION_EXPR:
1152 TREE_SIDE_EFFECTS (*p) = 1;
1153 TREE_TYPE (*p) = void_type_node;
1154 p = &TRANSACTION_EXPR_BODY (*p);
1155 break;
1156
1157 default:
1158 /* Assume that any tree upon which voidify_wrapper_expr is
1159 directly called is a wrapper, and that its body is op0. */
1160 if (p == &wrapper)
1161 {
1162 TREE_SIDE_EFFECTS (*p) = 1;
1163 TREE_TYPE (*p) = void_type_node;
1164 p = &TREE_OPERAND (*p, 0);
1165 break;
1166 }
1167 goto out;
1168 }
1169 }
1170
1171 out:
1172 if (p == NULL || IS_EMPTY_STMT (*p))
1173 temp = NULL_TREE;
1174 else if (temp)
1175 {
1176 /* The wrapper is on the RHS of an assignment that we're pushing
1177 down. */
1178 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1179 || TREE_CODE (temp) == MODIFY_EXPR);
1180 TREE_OPERAND (temp, 1) = *p;
1181 *p = temp;
1182 }
1183 else
1184 {
1185 temp = create_tmp_var (type, "retval");
1186 *p = build2 (INIT_EXPR, type, temp, *p);
1187 }
1188
1189 return temp;
1190 }
1191
1192 return NULL_TREE;
1193 }
1194
1195 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1196 a temporary through which they communicate. */
1197
1198 static void
1199 build_stack_save_restore (gcall **save, gcall **restore)
1200 {
1201 tree tmp_var;
1202
1203 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1204 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1205 gimple_call_set_lhs (*save, tmp_var);
1206
1207 *restore
1208 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1209 1, tmp_var);
1210 }
1211
1212 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1213
1214 static tree
1215 build_asan_poison_call_expr (tree decl)
1216 {
1217 /* Do not poison variables that have size equal to zero. */
1218 tree unit_size = DECL_SIZE_UNIT (decl);
1219 if (zerop (unit_size))
1220 return NULL_TREE;
1221
1222 tree base = build_fold_addr_expr (decl);
1223
1224 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1225 void_type_node, 3,
1226 build_int_cst (integer_type_node,
1227 ASAN_MARK_POISON),
1228 base, unit_size);
1229 }
1230
1231 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1232 on POISON flag, shadow memory of a DECL variable. The call will be
1233 put on location identified by IT iterator, where BEFORE flag drives
1234 position where the stmt will be put. */
1235
1236 static void
1237 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1238 bool before)
1239 {
1240 tree unit_size = DECL_SIZE_UNIT (decl);
1241 tree base = build_fold_addr_expr (decl);
1242
1243 /* Do not poison variables that have size equal to zero. */
1244 if (zerop (unit_size))
1245 return;
1246
1247 /* It's necessary to have all stack variables aligned to ASAN granularity
1248 bytes. */
1249 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1250 unsigned shadow_granularity
1251 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1252 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1253 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1254
1255 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1256
1257 gimple *g
1258 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1259 build_int_cst (integer_type_node, flags),
1260 base, unit_size);
1261
1262 if (before)
1263 gsi_insert_before (it, g, GSI_NEW_STMT);
1264 else
1265 gsi_insert_after (it, g, GSI_NEW_STMT);
1266 }
1267
1268 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1269 either poisons or unpoisons a DECL. Created statement is appended
1270 to SEQ_P gimple sequence. */
1271
1272 static void
1273 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1274 {
1275 gimple_stmt_iterator it = gsi_last (*seq_p);
1276 bool before = false;
1277
1278 if (gsi_end_p (it))
1279 before = true;
1280
1281 asan_poison_variable (decl, poison, &it, before);
1282 }
1283
1284 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1285
1286 static int
1287 sort_by_decl_uid (const void *a, const void *b)
1288 {
1289 const tree *t1 = (const tree *)a;
1290 const tree *t2 = (const tree *)b;
1291
1292 int uid1 = DECL_UID (*t1);
1293 int uid2 = DECL_UID (*t2);
1294
1295 if (uid1 < uid2)
1296 return -1;
1297 else if (uid1 > uid2)
1298 return 1;
1299 else
1300 return 0;
1301 }
1302
1303 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1304 depending on POISON flag. Created statement is appended
1305 to SEQ_P gimple sequence. */
1306
1307 static void
1308 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1309 {
1310 unsigned c = variables->elements ();
1311 if (c == 0)
1312 return;
1313
1314 auto_vec<tree> sorted_variables (c);
1315
1316 for (hash_set<tree>::iterator it = variables->begin ();
1317 it != variables->end (); ++it)
1318 sorted_variables.safe_push (*it);
1319
1320 sorted_variables.qsort (sort_by_decl_uid);
1321
1322 unsigned i;
1323 tree var;
1324 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1325 {
1326 asan_poison_variable (var, poison, seq_p);
1327
1328 /* Add use_after_scope_memory attribute for the variable in order
1329 to prevent re-written into SSA. */
1330 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1331 DECL_ATTRIBUTES (var)))
1332 DECL_ATTRIBUTES (var)
1333 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1334 integer_one_node,
1335 DECL_ATTRIBUTES (var));
1336 }
1337 }
1338
1339 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1340
1341 static enum gimplify_status
1342 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1343 {
1344 tree bind_expr = *expr_p;
1345 bool old_keep_stack = gimplify_ctxp->keep_stack;
1346 bool old_save_stack = gimplify_ctxp->save_stack;
1347 tree t;
1348 gbind *bind_stmt;
1349 gimple_seq body, cleanup;
1350 gcall *stack_save;
1351 location_t start_locus = 0, end_locus = 0;
1352 tree ret_clauses = NULL;
1353
1354 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1355
1356 /* Mark variables seen in this bind expr. */
1357 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1358 {
1359 if (VAR_P (t))
1360 {
1361 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1362
1363 /* Mark variable as local. */
1364 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1365 {
1366 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1367 || splay_tree_lookup (ctx->variables,
1368 (splay_tree_key) t) == NULL)
1369 {
1370 int flag = GOVD_LOCAL;
1371 if (ctx->region_type == ORT_SIMD
1372 && TREE_ADDRESSABLE (t)
1373 && !TREE_STATIC (t))
1374 {
1375 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1376 ctx->add_safelen1 = true;
1377 else
1378 flag = GOVD_PRIVATE;
1379 }
1380 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1381 }
1382 /* Static locals inside of target construct or offloaded
1383 routines need to be "omp declare target". */
1384 if (TREE_STATIC (t))
1385 for (; ctx; ctx = ctx->outer_context)
1386 if ((ctx->region_type & ORT_TARGET) != 0)
1387 {
1388 if (!lookup_attribute ("omp declare target",
1389 DECL_ATTRIBUTES (t)))
1390 {
1391 tree id = get_identifier ("omp declare target");
1392 DECL_ATTRIBUTES (t)
1393 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1394 varpool_node *node = varpool_node::get (t);
1395 if (node)
1396 {
1397 node->offloadable = 1;
1398 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1399 {
1400 g->have_offload = true;
1401 if (!in_lto_p)
1402 vec_safe_push (offload_vars, t);
1403 }
1404 }
1405 }
1406 break;
1407 }
1408 }
1409
1410 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1411
1412 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1413 cfun->has_local_explicit_reg_vars = true;
1414 }
1415 }
1416
1417 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1418 BIND_EXPR_BLOCK (bind_expr));
1419 gimple_push_bind_expr (bind_stmt);
1420
1421 gimplify_ctxp->keep_stack = false;
1422 gimplify_ctxp->save_stack = false;
1423
1424 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1425 body = NULL;
1426 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1427 gimple_bind_set_body (bind_stmt, body);
1428
1429 /* Source location wise, the cleanup code (stack_restore and clobbers)
1430 belongs to the end of the block, so propagate what we have. The
1431 stack_save operation belongs to the beginning of block, which we can
1432 infer from the bind_expr directly if the block has no explicit
1433 assignment. */
1434 if (BIND_EXPR_BLOCK (bind_expr))
1435 {
1436 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1437 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1438 }
1439 if (start_locus == 0)
1440 start_locus = EXPR_LOCATION (bind_expr);
1441
1442 cleanup = NULL;
1443 stack_save = NULL;
1444
1445 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1446 the stack space allocated to the VLAs. */
1447 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1448 {
1449 gcall *stack_restore;
1450
1451 /* Save stack on entry and restore it on exit. Add a try_finally
1452 block to achieve this. */
1453 build_stack_save_restore (&stack_save, &stack_restore);
1454
1455 gimple_set_location (stack_save, start_locus);
1456 gimple_set_location (stack_restore, end_locus);
1457
1458 gimplify_seq_add_stmt (&cleanup, stack_restore);
1459 }
1460
1461 /* Add clobbers for all variables that go out of scope. */
1462 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1463 {
1464 if (VAR_P (t)
1465 && !is_global_var (t)
1466 && DECL_CONTEXT (t) == current_function_decl)
1467 {
1468 if (!DECL_HARD_REGISTER (t)
1469 && !TREE_THIS_VOLATILE (t)
1470 && !DECL_HAS_VALUE_EXPR_P (t)
1471 /* Only care for variables that have to be in memory. Others
1472 will be rewritten into SSA names, hence moved to the
1473 top-level. */
1474 && !is_gimple_reg (t)
1475 && flag_stack_reuse != SR_NONE)
1476 {
1477 tree clobber = build_clobber (TREE_TYPE (t));
1478 gimple *clobber_stmt;
1479 clobber_stmt = gimple_build_assign (t, clobber);
1480 gimple_set_location (clobber_stmt, end_locus);
1481 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1482 }
1483
1484 if (flag_openacc && oacc_declare_returns != NULL)
1485 {
1486 tree key = t;
1487 if (DECL_HAS_VALUE_EXPR_P (key))
1488 {
1489 key = DECL_VALUE_EXPR (key);
1490 if (TREE_CODE (key) == INDIRECT_REF)
1491 key = TREE_OPERAND (key, 0);
1492 }
1493 tree *c = oacc_declare_returns->get (key);
1494 if (c != NULL)
1495 {
1496 if (ret_clauses)
1497 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1498
1499 ret_clauses = unshare_expr (*c);
1500
1501 oacc_declare_returns->remove (key);
1502
1503 if (oacc_declare_returns->is_empty ())
1504 {
1505 delete oacc_declare_returns;
1506 oacc_declare_returns = NULL;
1507 }
1508 }
1509 }
1510 }
1511
1512 if (asan_poisoned_variables != NULL
1513 && asan_poisoned_variables->contains (t))
1514 {
1515 asan_poisoned_variables->remove (t);
1516 asan_poison_variable (t, true, &cleanup);
1517 }
1518
1519 if (gimplify_ctxp->live_switch_vars != NULL
1520 && gimplify_ctxp->live_switch_vars->contains (t))
1521 gimplify_ctxp->live_switch_vars->remove (t);
1522 }
1523
1524 if (ret_clauses)
1525 {
1526 gomp_target *stmt;
1527 gimple_stmt_iterator si = gsi_start (cleanup);
1528
1529 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1530 ret_clauses);
1531 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1532 }
1533
1534 if (cleanup)
1535 {
1536 gtry *gs;
1537 gimple_seq new_body;
1538
1539 new_body = NULL;
1540 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1541 GIMPLE_TRY_FINALLY);
1542
1543 if (stack_save)
1544 gimplify_seq_add_stmt (&new_body, stack_save);
1545 gimplify_seq_add_stmt (&new_body, gs);
1546 gimple_bind_set_body (bind_stmt, new_body);
1547 }
1548
1549 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1550 if (!gimplify_ctxp->keep_stack)
1551 gimplify_ctxp->keep_stack = old_keep_stack;
1552 gimplify_ctxp->save_stack = old_save_stack;
1553
1554 gimple_pop_bind_expr ();
1555
1556 gimplify_seq_add_stmt (pre_p, bind_stmt);
1557
1558 if (temp)
1559 {
1560 *expr_p = temp;
1561 return GS_OK;
1562 }
1563
1564 *expr_p = NULL_TREE;
1565 return GS_ALL_DONE;
1566 }
1567
1568 /* Maybe add early return predict statement to PRE_P sequence. */
1569
1570 static void
1571 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1572 {
1573 /* If we are not in a conditional context, add PREDICT statement. */
1574 if (gimple_conditional_context ())
1575 {
1576 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1577 NOT_TAKEN);
1578 gimplify_seq_add_stmt (pre_p, predict);
1579 }
1580 }
1581
1582 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1583 GIMPLE value, it is assigned to a new temporary and the statement is
1584 re-written to return the temporary.
1585
1586 PRE_P points to the sequence where side effects that must happen before
1587 STMT should be stored. */
1588
1589 static enum gimplify_status
1590 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1591 {
1592 greturn *ret;
1593 tree ret_expr = TREE_OPERAND (stmt, 0);
1594 tree result_decl, result;
1595
1596 if (ret_expr == error_mark_node)
1597 return GS_ERROR;
1598
1599 if (!ret_expr
1600 || TREE_CODE (ret_expr) == RESULT_DECL)
1601 {
1602 maybe_add_early_return_predict_stmt (pre_p);
1603 greturn *ret = gimple_build_return (ret_expr);
1604 copy_warning (ret, stmt);
1605 gimplify_seq_add_stmt (pre_p, ret);
1606 return GS_ALL_DONE;
1607 }
1608
1609 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1610 result_decl = NULL_TREE;
1611 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1612 {
1613 /* Used in C++ for handling EH cleanup of the return value if a local
1614 cleanup throws. Assume the front-end knows what it's doing. */
1615 result_decl = DECL_RESULT (current_function_decl);
1616 /* But crash if we end up trying to modify ret_expr below. */
1617 ret_expr = NULL_TREE;
1618 }
1619 else
1620 {
1621 result_decl = TREE_OPERAND (ret_expr, 0);
1622
1623 /* See through a return by reference. */
1624 if (TREE_CODE (result_decl) == INDIRECT_REF)
1625 result_decl = TREE_OPERAND (result_decl, 0);
1626
1627 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1628 || TREE_CODE (ret_expr) == INIT_EXPR)
1629 && TREE_CODE (result_decl) == RESULT_DECL);
1630 }
1631
1632 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1633 Recall that aggregate_value_p is FALSE for any aggregate type that is
1634 returned in registers. If we're returning values in registers, then
1635 we don't want to extend the lifetime of the RESULT_DECL, particularly
1636 across another call. In addition, for those aggregates for which
1637 hard_function_value generates a PARALLEL, we'll die during normal
1638 expansion of structure assignments; there's special code in expand_return
1639 to handle this case that does not exist in expand_expr. */
1640 if (!result_decl)
1641 result = NULL_TREE;
1642 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1643 {
1644 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1645 {
1646 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1647 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1648 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1649 should be effectively allocated by the caller, i.e. all calls to
1650 this function must be subject to the Return Slot Optimization. */
1651 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1652 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1653 }
1654 result = result_decl;
1655 }
1656 else if (gimplify_ctxp->return_temp)
1657 result = gimplify_ctxp->return_temp;
1658 else
1659 {
1660 result = create_tmp_reg (TREE_TYPE (result_decl));
1661
1662 /* ??? With complex control flow (usually involving abnormal edges),
1663 we can wind up warning about an uninitialized value for this. Due
1664 to how this variable is constructed and initialized, this is never
1665 true. Give up and never warn. */
1666 suppress_warning (result, OPT_Wuninitialized);
1667
1668 gimplify_ctxp->return_temp = result;
1669 }
1670
1671 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1672 Then gimplify the whole thing. */
1673 if (result != result_decl)
1674 TREE_OPERAND (ret_expr, 0) = result;
1675
1676 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1677
1678 maybe_add_early_return_predict_stmt (pre_p);
1679 ret = gimple_build_return (result);
1680 copy_warning (ret, stmt);
1681 gimplify_seq_add_stmt (pre_p, ret);
1682
1683 return GS_ALL_DONE;
1684 }
1685
1686 /* Gimplify a variable-length array DECL. */
1687
1688 static void
1689 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1690 {
1691 /* This is a variable-sized decl. Simplify its size and mark it
1692 for deferred expansion. */
1693 tree t, addr, ptr_type;
1694
1695 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1696 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1697
1698 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1699 if (DECL_HAS_VALUE_EXPR_P (decl))
1700 return;
1701
1702 /* All occurrences of this decl in final gimplified code will be
1703 replaced by indirection. Setting DECL_VALUE_EXPR does two
1704 things: First, it lets the rest of the gimplifier know what
1705 replacement to use. Second, it lets the debug info know
1706 where to find the value. */
1707 ptr_type = build_pointer_type (TREE_TYPE (decl));
1708 addr = create_tmp_var (ptr_type, get_name (decl));
1709 DECL_IGNORED_P (addr) = 0;
1710 t = build_fold_indirect_ref (addr);
1711 TREE_THIS_NOTRAP (t) = 1;
1712 SET_DECL_VALUE_EXPR (decl, t);
1713 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1714
1715 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1716 max_int_size_in_bytes (TREE_TYPE (decl)));
1717 /* The call has been built for a variable-sized object. */
1718 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1719 t = fold_convert (ptr_type, t);
1720 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1721
1722 gimplify_and_add (t, seq_p);
1723
1724 /* Record the dynamic allocation associated with DECL if requested. */
1725 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1726 record_dynamic_alloc (decl);
1727 }
1728
1729 /* A helper function to be called via walk_tree. Mark all labels under *TP
1730 as being forced. To be called for DECL_INITIAL of static variables. */
1731
1732 static tree
1733 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1734 {
1735 if (TYPE_P (*tp))
1736 *walk_subtrees = 0;
1737 if (TREE_CODE (*tp) == LABEL_DECL)
1738 {
1739 FORCED_LABEL (*tp) = 1;
1740 cfun->has_forced_label_in_static = 1;
1741 }
1742
1743 return NULL_TREE;
1744 }
1745
1746 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1747 and initialization explicit. */
1748
1749 static enum gimplify_status
1750 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1751 {
1752 tree stmt = *stmt_p;
1753 tree decl = DECL_EXPR_DECL (stmt);
1754
1755 *stmt_p = NULL_TREE;
1756
1757 if (TREE_TYPE (decl) == error_mark_node)
1758 return GS_ERROR;
1759
1760 if ((TREE_CODE (decl) == TYPE_DECL
1761 || VAR_P (decl))
1762 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1763 {
1764 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1765 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1766 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1767 }
1768
1769 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1770 in case its size expressions contain problematic nodes like CALL_EXPR. */
1771 if (TREE_CODE (decl) == TYPE_DECL
1772 && DECL_ORIGINAL_TYPE (decl)
1773 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1774 {
1775 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1776 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1777 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1778 }
1779
1780 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1781 {
1782 tree init = DECL_INITIAL (decl);
1783 bool is_vla = false;
1784
1785 poly_uint64 size;
1786 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1787 || (!TREE_STATIC (decl)
1788 && flag_stack_check == GENERIC_STACK_CHECK
1789 && maybe_gt (size,
1790 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1791 {
1792 gimplify_vla_decl (decl, seq_p);
1793 is_vla = true;
1794 }
1795
1796 if (asan_poisoned_variables
1797 && !is_vla
1798 && TREE_ADDRESSABLE (decl)
1799 && !TREE_STATIC (decl)
1800 && !DECL_HAS_VALUE_EXPR_P (decl)
1801 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1802 && dbg_cnt (asan_use_after_scope)
1803 && !gimplify_omp_ctxp
1804 /* GNAT introduces temporaries to hold return values of calls in
1805 initializers of variables defined in other units, so the
1806 declaration of the variable is discarded completely. We do not
1807 want to issue poison calls for such dropped variables. */
1808 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1809 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1810 {
1811 asan_poisoned_variables->add (decl);
1812 asan_poison_variable (decl, false, seq_p);
1813 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1814 gimplify_ctxp->live_switch_vars->add (decl);
1815 }
1816
1817 /* Some front ends do not explicitly declare all anonymous
1818 artificial variables. We compensate here by declaring the
1819 variables, though it would be better if the front ends would
1820 explicitly declare them. */
1821 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1822 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1823 gimple_add_tmp_var (decl);
1824
1825 if (init && init != error_mark_node)
1826 {
1827 if (!TREE_STATIC (decl))
1828 {
1829 DECL_INITIAL (decl) = NULL_TREE;
1830 init = build2 (INIT_EXPR, void_type_node, decl, init);
1831 gimplify_and_add (init, seq_p);
1832 ggc_free (init);
1833 /* Clear TREE_READONLY if we really have an initialization. */
1834 if (!DECL_INITIAL (decl) && !omp_is_reference (decl))
1835 TREE_READONLY (decl) = 0;
1836 }
1837 else
1838 /* We must still examine initializers for static variables
1839 as they may contain a label address. */
1840 walk_tree (&init, force_labels_r, NULL, NULL);
1841 }
1842 }
1843
1844 return GS_ALL_DONE;
1845 }
1846
1847 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1848 and replacing the LOOP_EXPR with goto, but if the loop contains an
1849 EXIT_EXPR, we need to append a label for it to jump to. */
1850
1851 static enum gimplify_status
1852 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1853 {
1854 tree saved_label = gimplify_ctxp->exit_label;
1855 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1856
1857 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1858
1859 gimplify_ctxp->exit_label = NULL_TREE;
1860
1861 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1862
1863 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1864
1865 if (gimplify_ctxp->exit_label)
1866 gimplify_seq_add_stmt (pre_p,
1867 gimple_build_label (gimplify_ctxp->exit_label));
1868
1869 gimplify_ctxp->exit_label = saved_label;
1870
1871 *expr_p = NULL;
1872 return GS_ALL_DONE;
1873 }
1874
1875 /* Gimplify a statement list onto a sequence. These may be created either
1876 by an enlightened front-end, or by shortcut_cond_expr. */
1877
1878 static enum gimplify_status
1879 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1880 {
1881 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1882
1883 tree_stmt_iterator i = tsi_start (*expr_p);
1884
1885 while (!tsi_end_p (i))
1886 {
1887 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1888 tsi_delink (&i);
1889 }
1890
1891 if (temp)
1892 {
1893 *expr_p = temp;
1894 return GS_OK;
1895 }
1896
1897 return GS_ALL_DONE;
1898 }
1899
1900 /* Callback for walk_gimple_seq. */
1901
1902 static tree
1903 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1904 struct walk_stmt_info *wi)
1905 {
1906 gimple *stmt = gsi_stmt (*gsi_p);
1907
1908 *handled_ops_p = true;
1909 switch (gimple_code (stmt))
1910 {
1911 case GIMPLE_TRY:
1912 /* A compiler-generated cleanup or a user-written try block.
1913 If it's empty, don't dive into it--that would result in
1914 worse location info. */
1915 if (gimple_try_eval (stmt) == NULL)
1916 {
1917 wi->info = stmt;
1918 return integer_zero_node;
1919 }
1920 /* Fall through. */
1921 case GIMPLE_BIND:
1922 case GIMPLE_CATCH:
1923 case GIMPLE_EH_FILTER:
1924 case GIMPLE_TRANSACTION:
1925 /* Walk the sub-statements. */
1926 *handled_ops_p = false;
1927 break;
1928
1929 case GIMPLE_DEBUG:
1930 /* Ignore these. We may generate them before declarations that
1931 are never executed. If there's something to warn about,
1932 there will be non-debug stmts too, and we'll catch those. */
1933 break;
1934
1935 case GIMPLE_CALL:
1936 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1937 {
1938 *handled_ops_p = false;
1939 break;
1940 }
1941 /* Fall through. */
1942 default:
1943 /* Save the first "real" statement (not a decl/lexical scope/...). */
1944 wi->info = stmt;
1945 return integer_zero_node;
1946 }
1947 return NULL_TREE;
1948 }
1949
1950 /* Possibly warn about unreachable statements between switch's controlling
1951 expression and the first case. SEQ is the body of a switch expression. */
1952
1953 static void
1954 maybe_warn_switch_unreachable (gimple_seq seq)
1955 {
1956 if (!warn_switch_unreachable
1957 /* This warning doesn't play well with Fortran when optimizations
1958 are on. */
1959 || lang_GNU_Fortran ()
1960 || seq == NULL)
1961 return;
1962
1963 struct walk_stmt_info wi;
1964 memset (&wi, 0, sizeof (wi));
1965 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1966 gimple *stmt = (gimple *) wi.info;
1967
1968 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1969 {
1970 if (gimple_code (stmt) == GIMPLE_GOTO
1971 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1972 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1973 /* Don't warn for compiler-generated gotos. These occur
1974 in Duff's devices, for example. */;
1975 else
1976 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1977 "statement will never be executed");
1978 }
1979 }
1980
1981
1982 /* A label entry that pairs label and a location. */
1983 struct label_entry
1984 {
1985 tree label;
1986 location_t loc;
1987 };
1988
1989 /* Find LABEL in vector of label entries VEC. */
1990
1991 static struct label_entry *
1992 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1993 {
1994 unsigned int i;
1995 struct label_entry *l;
1996
1997 FOR_EACH_VEC_ELT (*vec, i, l)
1998 if (l->label == label)
1999 return l;
2000 return NULL;
2001 }
2002
2003 /* Return true if LABEL, a LABEL_DECL, represents a case label
2004 in a vector of labels CASES. */
2005
2006 static bool
2007 case_label_p (const vec<tree> *cases, tree label)
2008 {
2009 unsigned int i;
2010 tree l;
2011
2012 FOR_EACH_VEC_ELT (*cases, i, l)
2013 if (CASE_LABEL (l) == label)
2014 return true;
2015 return false;
2016 }
2017
2018 /* Find the last nondebug statement in a scope STMT. */
2019
2020 static gimple *
2021 last_stmt_in_scope (gimple *stmt)
2022 {
2023 if (!stmt)
2024 return NULL;
2025
2026 switch (gimple_code (stmt))
2027 {
2028 case GIMPLE_BIND:
2029 {
2030 gbind *bind = as_a <gbind *> (stmt);
2031 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2032 return last_stmt_in_scope (stmt);
2033 }
2034
2035 case GIMPLE_TRY:
2036 {
2037 gtry *try_stmt = as_a <gtry *> (stmt);
2038 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2039 gimple *last_eval = last_stmt_in_scope (stmt);
2040 if (gimple_stmt_may_fallthru (last_eval)
2041 && (last_eval == NULL
2042 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2043 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2044 {
2045 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2046 return last_stmt_in_scope (stmt);
2047 }
2048 else
2049 return last_eval;
2050 }
2051
2052 case GIMPLE_DEBUG:
2053 gcc_unreachable ();
2054
2055 default:
2056 return stmt;
2057 }
2058 }
2059
2060 /* Collect interesting labels in LABELS and return the statement preceding
2061 another case label, or a user-defined label. Store a location useful
2062 to give warnings at *PREVLOC (usually the location of the returned
2063 statement or of its surrounding scope). */
2064
2065 static gimple *
2066 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2067 auto_vec <struct label_entry> *labels,
2068 location_t *prevloc)
2069 {
2070 gimple *prev = NULL;
2071
2072 *prevloc = UNKNOWN_LOCATION;
2073 do
2074 {
2075 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2076 {
2077 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2078 which starts on a GIMPLE_SWITCH and ends with a break label.
2079 Handle that as a single statement that can fall through. */
2080 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2081 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2082 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2083 if (last
2084 && gimple_code (first) == GIMPLE_SWITCH
2085 && gimple_code (last) == GIMPLE_LABEL)
2086 {
2087 tree label = gimple_label_label (as_a <glabel *> (last));
2088 if (SWITCH_BREAK_LABEL_P (label))
2089 {
2090 prev = bind;
2091 gsi_next (gsi_p);
2092 continue;
2093 }
2094 }
2095 }
2096 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2097 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2098 {
2099 /* Nested scope. Only look at the last statement of
2100 the innermost scope. */
2101 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2102 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2103 if (last)
2104 {
2105 prev = last;
2106 /* It might be a label without a location. Use the
2107 location of the scope then. */
2108 if (!gimple_has_location (prev))
2109 *prevloc = bind_loc;
2110 }
2111 gsi_next (gsi_p);
2112 continue;
2113 }
2114
2115 /* Ifs are tricky. */
2116 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2117 {
2118 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2119 tree false_lab = gimple_cond_false_label (cond_stmt);
2120 location_t if_loc = gimple_location (cond_stmt);
2121
2122 /* If we have e.g.
2123 if (i > 1) goto <D.2259>; else goto D;
2124 we can't do much with the else-branch. */
2125 if (!DECL_ARTIFICIAL (false_lab))
2126 break;
2127
2128 /* Go on until the false label, then one step back. */
2129 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2130 {
2131 gimple *stmt = gsi_stmt (*gsi_p);
2132 if (gimple_code (stmt) == GIMPLE_LABEL
2133 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2134 break;
2135 }
2136
2137 /* Not found? Oops. */
2138 if (gsi_end_p (*gsi_p))
2139 break;
2140
2141 struct label_entry l = { false_lab, if_loc };
2142 labels->safe_push (l);
2143
2144 /* Go to the last statement of the then branch. */
2145 gsi_prev (gsi_p);
2146
2147 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2148 <D.1759>:
2149 <stmt>;
2150 goto <D.1761>;
2151 <D.1760>:
2152 */
2153 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2154 && !gimple_has_location (gsi_stmt (*gsi_p)))
2155 {
2156 /* Look at the statement before, it might be
2157 attribute fallthrough, in which case don't warn. */
2158 gsi_prev (gsi_p);
2159 bool fallthru_before_dest
2160 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2161 gsi_next (gsi_p);
2162 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2163 if (!fallthru_before_dest)
2164 {
2165 struct label_entry l = { goto_dest, if_loc };
2166 labels->safe_push (l);
2167 }
2168 }
2169 /* And move back. */
2170 gsi_next (gsi_p);
2171 }
2172
2173 /* Remember the last statement. Skip labels that are of no interest
2174 to us. */
2175 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2176 {
2177 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2178 if (find_label_entry (labels, label))
2179 prev = gsi_stmt (*gsi_p);
2180 }
2181 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2182 ;
2183 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2184 ;
2185 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2186 prev = gsi_stmt (*gsi_p);
2187 gsi_next (gsi_p);
2188 }
2189 while (!gsi_end_p (*gsi_p)
2190 /* Stop if we find a case or a user-defined label. */
2191 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2192 || !gimple_has_location (gsi_stmt (*gsi_p))));
2193
2194 if (prev && gimple_has_location (prev))
2195 *prevloc = gimple_location (prev);
2196 return prev;
2197 }
2198
2199 /* Return true if the switch fallthough warning should occur. LABEL is
2200 the label statement that we're falling through to. */
2201
2202 static bool
2203 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2204 {
2205 gimple_stmt_iterator gsi = *gsi_p;
2206
2207 /* Don't warn if the label is marked with a "falls through" comment. */
2208 if (FALLTHROUGH_LABEL_P (label))
2209 return false;
2210
2211 /* Don't warn for non-case labels followed by a statement:
2212 case 0:
2213 foo ();
2214 label:
2215 bar ();
2216 as these are likely intentional. */
2217 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2218 {
2219 tree l;
2220 while (!gsi_end_p (gsi)
2221 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2222 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2223 && !case_label_p (&gimplify_ctxp->case_labels, l))
2224 gsi_next_nondebug (&gsi);
2225 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2226 return false;
2227 }
2228
2229 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2230 immediately breaks. */
2231 gsi = *gsi_p;
2232
2233 /* Skip all immediately following labels. */
2234 while (!gsi_end_p (gsi)
2235 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2236 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2237 gsi_next_nondebug (&gsi);
2238
2239 /* { ... something; default:; } */
2240 if (gsi_end_p (gsi)
2241 /* { ... something; default: break; } or
2242 { ... something; default: goto L; } */
2243 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2244 /* { ... something; default: return; } */
2245 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2246 return false;
2247
2248 return true;
2249 }
2250
2251 /* Callback for walk_gimple_seq. */
2252
2253 static tree
2254 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2255 struct walk_stmt_info *)
2256 {
2257 gimple *stmt = gsi_stmt (*gsi_p);
2258
2259 *handled_ops_p = true;
2260 switch (gimple_code (stmt))
2261 {
2262 case GIMPLE_TRY:
2263 case GIMPLE_BIND:
2264 case GIMPLE_CATCH:
2265 case GIMPLE_EH_FILTER:
2266 case GIMPLE_TRANSACTION:
2267 /* Walk the sub-statements. */
2268 *handled_ops_p = false;
2269 break;
2270
2271 /* Find a sequence of form:
2272
2273 GIMPLE_LABEL
2274 [...]
2275 <may fallthru stmt>
2276 GIMPLE_LABEL
2277
2278 and possibly warn. */
2279 case GIMPLE_LABEL:
2280 {
2281 /* Found a label. Skip all immediately following labels. */
2282 while (!gsi_end_p (*gsi_p)
2283 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2284 gsi_next_nondebug (gsi_p);
2285
2286 /* There might be no more statements. */
2287 if (gsi_end_p (*gsi_p))
2288 return integer_zero_node;
2289
2290 /* Vector of labels that fall through. */
2291 auto_vec <struct label_entry> labels;
2292 location_t prevloc;
2293 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2294
2295 /* There might be no more statements. */
2296 if (gsi_end_p (*gsi_p))
2297 return integer_zero_node;
2298
2299 gimple *next = gsi_stmt (*gsi_p);
2300 tree label;
2301 /* If what follows is a label, then we may have a fallthrough. */
2302 if (gimple_code (next) == GIMPLE_LABEL
2303 && gimple_has_location (next)
2304 && (label = gimple_label_label (as_a <glabel *> (next)))
2305 && prev != NULL)
2306 {
2307 struct label_entry *l;
2308 bool warned_p = false;
2309 auto_diagnostic_group d;
2310 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2311 /* Quiet. */;
2312 else if (gimple_code (prev) == GIMPLE_LABEL
2313 && (label = gimple_label_label (as_a <glabel *> (prev)))
2314 && (l = find_label_entry (&labels, label)))
2315 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2316 "this statement may fall through");
2317 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2318 /* Try to be clever and don't warn when the statement
2319 can't actually fall through. */
2320 && gimple_stmt_may_fallthru (prev)
2321 && prevloc != UNKNOWN_LOCATION)
2322 warned_p = warning_at (prevloc,
2323 OPT_Wimplicit_fallthrough_,
2324 "this statement may fall through");
2325 if (warned_p)
2326 inform (gimple_location (next), "here");
2327
2328 /* Mark this label as processed so as to prevent multiple
2329 warnings in nested switches. */
2330 FALLTHROUGH_LABEL_P (label) = true;
2331
2332 /* So that next warn_implicit_fallthrough_r will start looking for
2333 a new sequence starting with this label. */
2334 gsi_prev (gsi_p);
2335 }
2336 }
2337 break;
2338 default:
2339 break;
2340 }
2341 return NULL_TREE;
2342 }
2343
2344 /* Warn when a switch case falls through. */
2345
2346 static void
2347 maybe_warn_implicit_fallthrough (gimple_seq seq)
2348 {
2349 if (!warn_implicit_fallthrough)
2350 return;
2351
2352 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2353 if (!(lang_GNU_C ()
2354 || lang_GNU_CXX ()
2355 || lang_GNU_OBJC ()))
2356 return;
2357
2358 struct walk_stmt_info wi;
2359 memset (&wi, 0, sizeof (wi));
2360 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2361 }
2362
2363 /* Callback for walk_gimple_seq. */
2364
2365 static tree
2366 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2367 struct walk_stmt_info *wi)
2368 {
2369 gimple *stmt = gsi_stmt (*gsi_p);
2370
2371 *handled_ops_p = true;
2372 switch (gimple_code (stmt))
2373 {
2374 case GIMPLE_TRY:
2375 case GIMPLE_BIND:
2376 case GIMPLE_CATCH:
2377 case GIMPLE_EH_FILTER:
2378 case GIMPLE_TRANSACTION:
2379 /* Walk the sub-statements. */
2380 *handled_ops_p = false;
2381 break;
2382 case GIMPLE_CALL:
2383 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2384 {
2385 gsi_remove (gsi_p, true);
2386 if (gsi_end_p (*gsi_p))
2387 {
2388 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2389 return integer_zero_node;
2390 }
2391
2392 bool found = false;
2393 location_t loc = gimple_location (stmt);
2394
2395 gimple_stmt_iterator gsi2 = *gsi_p;
2396 stmt = gsi_stmt (gsi2);
2397 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2398 {
2399 /* Go on until the artificial label. */
2400 tree goto_dest = gimple_goto_dest (stmt);
2401 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2402 {
2403 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2404 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2405 == goto_dest)
2406 break;
2407 }
2408
2409 /* Not found? Stop. */
2410 if (gsi_end_p (gsi2))
2411 break;
2412
2413 /* Look one past it. */
2414 gsi_next (&gsi2);
2415 }
2416
2417 /* We're looking for a case label or default label here. */
2418 while (!gsi_end_p (gsi2))
2419 {
2420 stmt = gsi_stmt (gsi2);
2421 if (gimple_code (stmt) == GIMPLE_LABEL)
2422 {
2423 tree label = gimple_label_label (as_a <glabel *> (stmt));
2424 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2425 {
2426 found = true;
2427 break;
2428 }
2429 }
2430 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2431 ;
2432 else if (!is_gimple_debug (stmt))
2433 /* Anything else is not expected. */
2434 break;
2435 gsi_next (&gsi2);
2436 }
2437 if (!found)
2438 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2439 "a case label or default label");
2440 }
2441 break;
2442 default:
2443 break;
2444 }
2445 return NULL_TREE;
2446 }
2447
2448 /* Expand all FALLTHROUGH () calls in SEQ. */
2449
2450 static void
2451 expand_FALLTHROUGH (gimple_seq *seq_p)
2452 {
2453 struct walk_stmt_info wi;
2454 location_t loc;
2455 memset (&wi, 0, sizeof (wi));
2456 wi.info = (void *) &loc;
2457 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2458 if (wi.callback_result == integer_zero_node)
2459 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2460 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2461 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2462 "a case label or default label");
2463 }
2464
2465 \f
2466 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2467 branch to. */
2468
2469 static enum gimplify_status
2470 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2471 {
2472 tree switch_expr = *expr_p;
2473 gimple_seq switch_body_seq = NULL;
2474 enum gimplify_status ret;
2475 tree index_type = TREE_TYPE (switch_expr);
2476 if (index_type == NULL_TREE)
2477 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2478
2479 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2480 fb_rvalue);
2481 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2482 return ret;
2483
2484 if (SWITCH_BODY (switch_expr))
2485 {
2486 vec<tree> labels;
2487 vec<tree> saved_labels;
2488 hash_set<tree> *saved_live_switch_vars = NULL;
2489 tree default_case = NULL_TREE;
2490 gswitch *switch_stmt;
2491
2492 /* Save old labels, get new ones from body, then restore the old
2493 labels. Save all the things from the switch body to append after. */
2494 saved_labels = gimplify_ctxp->case_labels;
2495 gimplify_ctxp->case_labels.create (8);
2496
2497 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2498 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2499 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2500 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2501 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2502 else
2503 gimplify_ctxp->live_switch_vars = NULL;
2504
2505 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2506 gimplify_ctxp->in_switch_expr = true;
2507
2508 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2509
2510 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2511 maybe_warn_switch_unreachable (switch_body_seq);
2512 maybe_warn_implicit_fallthrough (switch_body_seq);
2513 /* Only do this for the outermost GIMPLE_SWITCH. */
2514 if (!gimplify_ctxp->in_switch_expr)
2515 expand_FALLTHROUGH (&switch_body_seq);
2516
2517 labels = gimplify_ctxp->case_labels;
2518 gimplify_ctxp->case_labels = saved_labels;
2519
2520 if (gimplify_ctxp->live_switch_vars)
2521 {
2522 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2523 delete gimplify_ctxp->live_switch_vars;
2524 }
2525 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2526
2527 preprocess_case_label_vec_for_gimple (labels, index_type,
2528 &default_case);
2529
2530 bool add_bind = false;
2531 if (!default_case)
2532 {
2533 glabel *new_default;
2534
2535 default_case
2536 = build_case_label (NULL_TREE, NULL_TREE,
2537 create_artificial_label (UNKNOWN_LOCATION));
2538 if (old_in_switch_expr)
2539 {
2540 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2541 add_bind = true;
2542 }
2543 new_default = gimple_build_label (CASE_LABEL (default_case));
2544 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2545 }
2546 else if (old_in_switch_expr)
2547 {
2548 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2549 if (last && gimple_code (last) == GIMPLE_LABEL)
2550 {
2551 tree label = gimple_label_label (as_a <glabel *> (last));
2552 if (SWITCH_BREAK_LABEL_P (label))
2553 add_bind = true;
2554 }
2555 }
2556
2557 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2558 default_case, labels);
2559 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2560 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2561 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2562 so that we can easily find the start and end of the switch
2563 statement. */
2564 if (add_bind)
2565 {
2566 gimple_seq bind_body = NULL;
2567 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2568 gimple_seq_add_seq (&bind_body, switch_body_seq);
2569 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2570 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2571 gimplify_seq_add_stmt (pre_p, bind);
2572 }
2573 else
2574 {
2575 gimplify_seq_add_stmt (pre_p, switch_stmt);
2576 gimplify_seq_add_seq (pre_p, switch_body_seq);
2577 }
2578 labels.release ();
2579 }
2580 else
2581 gcc_unreachable ();
2582
2583 return GS_ALL_DONE;
2584 }
2585
2586 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2587
2588 static enum gimplify_status
2589 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2590 {
2591 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2592 == current_function_decl);
2593
2594 tree label = LABEL_EXPR_LABEL (*expr_p);
2595 glabel *label_stmt = gimple_build_label (label);
2596 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2597 gimplify_seq_add_stmt (pre_p, label_stmt);
2598
2599 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2600 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2601 NOT_TAKEN));
2602 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2603 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2604 TAKEN));
2605
2606 return GS_ALL_DONE;
2607 }
2608
2609 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2610
2611 static enum gimplify_status
2612 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2613 {
2614 struct gimplify_ctx *ctxp;
2615 glabel *label_stmt;
2616
2617 /* Invalid programs can play Duff's Device type games with, for example,
2618 #pragma omp parallel. At least in the C front end, we don't
2619 detect such invalid branches until after gimplification, in the
2620 diagnose_omp_blocks pass. */
2621 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2622 if (ctxp->case_labels.exists ())
2623 break;
2624
2625 tree label = CASE_LABEL (*expr_p);
2626 label_stmt = gimple_build_label (label);
2627 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2628 ctxp->case_labels.safe_push (*expr_p);
2629 gimplify_seq_add_stmt (pre_p, label_stmt);
2630
2631 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2632 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2633 NOT_TAKEN));
2634 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2635 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2636 TAKEN));
2637
2638 return GS_ALL_DONE;
2639 }
2640
2641 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2642 if necessary. */
2643
2644 tree
2645 build_and_jump (tree *label_p)
2646 {
2647 if (label_p == NULL)
2648 /* If there's nowhere to jump, just fall through. */
2649 return NULL_TREE;
2650
2651 if (*label_p == NULL_TREE)
2652 {
2653 tree label = create_artificial_label (UNKNOWN_LOCATION);
2654 *label_p = label;
2655 }
2656
2657 return build1 (GOTO_EXPR, void_type_node, *label_p);
2658 }
2659
2660 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2661 This also involves building a label to jump to and communicating it to
2662 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2663
2664 static enum gimplify_status
2665 gimplify_exit_expr (tree *expr_p)
2666 {
2667 tree cond = TREE_OPERAND (*expr_p, 0);
2668 tree expr;
2669
2670 expr = build_and_jump (&gimplify_ctxp->exit_label);
2671 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2672 *expr_p = expr;
2673
2674 return GS_OK;
2675 }
2676
2677 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2678 different from its canonical type, wrap the whole thing inside a
2679 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2680 type.
2681
2682 The canonical type of a COMPONENT_REF is the type of the field being
2683 referenced--unless the field is a bit-field which can be read directly
2684 in a smaller mode, in which case the canonical type is the
2685 sign-appropriate type corresponding to that mode. */
2686
2687 static void
2688 canonicalize_component_ref (tree *expr_p)
2689 {
2690 tree expr = *expr_p;
2691 tree type;
2692
2693 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2694
2695 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2696 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2697 else
2698 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2699
2700 /* One could argue that all the stuff below is not necessary for
2701 the non-bitfield case and declare it a FE error if type
2702 adjustment would be needed. */
2703 if (TREE_TYPE (expr) != type)
2704 {
2705 #ifdef ENABLE_TYPES_CHECKING
2706 tree old_type = TREE_TYPE (expr);
2707 #endif
2708 int type_quals;
2709
2710 /* We need to preserve qualifiers and propagate them from
2711 operand 0. */
2712 type_quals = TYPE_QUALS (type)
2713 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2714 if (TYPE_QUALS (type) != type_quals)
2715 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2716
2717 /* Set the type of the COMPONENT_REF to the underlying type. */
2718 TREE_TYPE (expr) = type;
2719
2720 #ifdef ENABLE_TYPES_CHECKING
2721 /* It is now a FE error, if the conversion from the canonical
2722 type to the original expression type is not useless. */
2723 gcc_assert (useless_type_conversion_p (old_type, type));
2724 #endif
2725 }
2726 }
2727
2728 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2729 to foo, embed that change in the ADDR_EXPR by converting
2730 T array[U];
2731 (T *)&array
2732 ==>
2733 &array[L]
2734 where L is the lower bound. For simplicity, only do this for constant
2735 lower bound.
2736 The constraint is that the type of &array[L] is trivially convertible
2737 to T *. */
2738
2739 static void
2740 canonicalize_addr_expr (tree *expr_p)
2741 {
2742 tree expr = *expr_p;
2743 tree addr_expr = TREE_OPERAND (expr, 0);
2744 tree datype, ddatype, pddatype;
2745
2746 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2747 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2748 || TREE_CODE (addr_expr) != ADDR_EXPR)
2749 return;
2750
2751 /* The addr_expr type should be a pointer to an array. */
2752 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2753 if (TREE_CODE (datype) != ARRAY_TYPE)
2754 return;
2755
2756 /* The pointer to element type shall be trivially convertible to
2757 the expression pointer type. */
2758 ddatype = TREE_TYPE (datype);
2759 pddatype = build_pointer_type (ddatype);
2760 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2761 pddatype))
2762 return;
2763
2764 /* The lower bound and element sizes must be constant. */
2765 if (!TYPE_SIZE_UNIT (ddatype)
2766 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2767 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2768 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2769 return;
2770
2771 /* All checks succeeded. Build a new node to merge the cast. */
2772 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2773 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2774 NULL_TREE, NULL_TREE);
2775 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2776
2777 /* We can have stripped a required restrict qualifier above. */
2778 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2779 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2780 }
2781
2782 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2783 underneath as appropriate. */
2784
2785 static enum gimplify_status
2786 gimplify_conversion (tree *expr_p)
2787 {
2788 location_t loc = EXPR_LOCATION (*expr_p);
2789 gcc_assert (CONVERT_EXPR_P (*expr_p));
2790
2791 /* Then strip away all but the outermost conversion. */
2792 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2793
2794 /* And remove the outermost conversion if it's useless. */
2795 if (tree_ssa_useless_type_conversion (*expr_p))
2796 *expr_p = TREE_OPERAND (*expr_p, 0);
2797
2798 /* If we still have a conversion at the toplevel,
2799 then canonicalize some constructs. */
2800 if (CONVERT_EXPR_P (*expr_p))
2801 {
2802 tree sub = TREE_OPERAND (*expr_p, 0);
2803
2804 /* If a NOP conversion is changing the type of a COMPONENT_REF
2805 expression, then canonicalize its type now in order to expose more
2806 redundant conversions. */
2807 if (TREE_CODE (sub) == COMPONENT_REF)
2808 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2809
2810 /* If a NOP conversion is changing a pointer to array of foo
2811 to a pointer to foo, embed that change in the ADDR_EXPR. */
2812 else if (TREE_CODE (sub) == ADDR_EXPR)
2813 canonicalize_addr_expr (expr_p);
2814 }
2815
2816 /* If we have a conversion to a non-register type force the
2817 use of a VIEW_CONVERT_EXPR instead. */
2818 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2819 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2820 TREE_OPERAND (*expr_p, 0));
2821
2822 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2823 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2824 TREE_SET_CODE (*expr_p, NOP_EXPR);
2825
2826 return GS_OK;
2827 }
2828
2829 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2830 DECL_VALUE_EXPR, and it's worth re-examining things. */
2831
2832 static enum gimplify_status
2833 gimplify_var_or_parm_decl (tree *expr_p)
2834 {
2835 tree decl = *expr_p;
2836
2837 /* ??? If this is a local variable, and it has not been seen in any
2838 outer BIND_EXPR, then it's probably the result of a duplicate
2839 declaration, for which we've already issued an error. It would
2840 be really nice if the front end wouldn't leak these at all.
2841 Currently the only known culprit is C++ destructors, as seen
2842 in g++.old-deja/g++.jason/binding.C. */
2843 if (VAR_P (decl)
2844 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2845 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2846 && decl_function_context (decl) == current_function_decl)
2847 {
2848 gcc_assert (seen_error ());
2849 return GS_ERROR;
2850 }
2851
2852 /* When within an OMP context, notice uses of variables. */
2853 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2854 return GS_ALL_DONE;
2855
2856 /* If the decl is an alias for another expression, substitute it now. */
2857 if (DECL_HAS_VALUE_EXPR_P (decl))
2858 {
2859 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2860 return GS_OK;
2861 }
2862
2863 return GS_ALL_DONE;
2864 }
2865
2866 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2867
2868 static void
2869 recalculate_side_effects (tree t)
2870 {
2871 enum tree_code code = TREE_CODE (t);
2872 int len = TREE_OPERAND_LENGTH (t);
2873 int i;
2874
2875 switch (TREE_CODE_CLASS (code))
2876 {
2877 case tcc_expression:
2878 switch (code)
2879 {
2880 case INIT_EXPR:
2881 case MODIFY_EXPR:
2882 case VA_ARG_EXPR:
2883 case PREDECREMENT_EXPR:
2884 case PREINCREMENT_EXPR:
2885 case POSTDECREMENT_EXPR:
2886 case POSTINCREMENT_EXPR:
2887 /* All of these have side-effects, no matter what their
2888 operands are. */
2889 return;
2890
2891 default:
2892 break;
2893 }
2894 /* Fall through. */
2895
2896 case tcc_comparison: /* a comparison expression */
2897 case tcc_unary: /* a unary arithmetic expression */
2898 case tcc_binary: /* a binary arithmetic expression */
2899 case tcc_reference: /* a reference */
2900 case tcc_vl_exp: /* a function call */
2901 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2902 for (i = 0; i < len; ++i)
2903 {
2904 tree op = TREE_OPERAND (t, i);
2905 if (op && TREE_SIDE_EFFECTS (op))
2906 TREE_SIDE_EFFECTS (t) = 1;
2907 }
2908 break;
2909
2910 case tcc_constant:
2911 /* No side-effects. */
2912 return;
2913
2914 default:
2915 gcc_unreachable ();
2916 }
2917 }
2918
2919 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2920 node *EXPR_P.
2921
2922 compound_lval
2923 : min_lval '[' val ']'
2924 | min_lval '.' ID
2925 | compound_lval '[' val ']'
2926 | compound_lval '.' ID
2927
2928 This is not part of the original SIMPLE definition, which separates
2929 array and member references, but it seems reasonable to handle them
2930 together. Also, this way we don't run into problems with union
2931 aliasing; gcc requires that for accesses through a union to alias, the
2932 union reference must be explicit, which was not always the case when we
2933 were splitting up array and member refs.
2934
2935 PRE_P points to the sequence where side effects that must happen before
2936 *EXPR_P should be stored.
2937
2938 POST_P points to the sequence where side effects that must happen after
2939 *EXPR_P should be stored. */
2940
2941 static enum gimplify_status
2942 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2943 fallback_t fallback)
2944 {
2945 tree *p;
2946 enum gimplify_status ret = GS_ALL_DONE, tret;
2947 int i;
2948 location_t loc = EXPR_LOCATION (*expr_p);
2949 tree expr = *expr_p;
2950
2951 /* Create a stack of the subexpressions so later we can walk them in
2952 order from inner to outer. */
2953 auto_vec<tree, 10> expr_stack;
2954
2955 /* We can handle anything that get_inner_reference can deal with. */
2956 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2957 {
2958 restart:
2959 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2960 if (TREE_CODE (*p) == INDIRECT_REF)
2961 *p = fold_indirect_ref_loc (loc, *p);
2962
2963 if (handled_component_p (*p))
2964 ;
2965 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2966 additional COMPONENT_REFs. */
2967 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2968 && gimplify_var_or_parm_decl (p) == GS_OK)
2969 goto restart;
2970 else
2971 break;
2972
2973 expr_stack.safe_push (*p);
2974 }
2975
2976 gcc_assert (expr_stack.length ());
2977
2978 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2979 walked through and P points to the innermost expression.
2980
2981 Java requires that we elaborated nodes in source order. That
2982 means we must gimplify the inner expression followed by each of
2983 the indices, in order. But we can't gimplify the inner
2984 expression until we deal with any variable bounds, sizes, or
2985 positions in order to deal with PLACEHOLDER_EXPRs.
2986
2987 So we do this in three steps. First we deal with the annotations
2988 for any variables in the components, then we gimplify the base,
2989 then we gimplify any indices, from left to right. */
2990 for (i = expr_stack.length () - 1; i >= 0; i--)
2991 {
2992 tree t = expr_stack[i];
2993
2994 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2995 {
2996 /* Gimplify the low bound and element type size and put them into
2997 the ARRAY_REF. If these values are set, they have already been
2998 gimplified. */
2999 if (TREE_OPERAND (t, 2) == NULL_TREE)
3000 {
3001 tree low = unshare_expr (array_ref_low_bound (t));
3002 if (!is_gimple_min_invariant (low))
3003 {
3004 TREE_OPERAND (t, 2) = low;
3005 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3006 post_p, is_gimple_reg,
3007 fb_rvalue);
3008 ret = MIN (ret, tret);
3009 }
3010 }
3011 else
3012 {
3013 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3014 is_gimple_reg, fb_rvalue);
3015 ret = MIN (ret, tret);
3016 }
3017
3018 if (TREE_OPERAND (t, 3) == NULL_TREE)
3019 {
3020 tree elmt_size = array_ref_element_size (t);
3021 if (!is_gimple_min_invariant (elmt_size))
3022 {
3023 elmt_size = unshare_expr (elmt_size);
3024 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3025 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3026
3027 /* Divide the element size by the alignment of the element
3028 type (above). */
3029 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3030 elmt_size, factor);
3031
3032 TREE_OPERAND (t, 3) = elmt_size;
3033 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
3034 post_p, is_gimple_reg,
3035 fb_rvalue);
3036 ret = MIN (ret, tret);
3037 }
3038 }
3039 else
3040 {
3041 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3042 is_gimple_reg, fb_rvalue);
3043 ret = MIN (ret, tret);
3044 }
3045 }
3046 else if (TREE_CODE (t) == COMPONENT_REF)
3047 {
3048 /* Set the field offset into T and gimplify it. */
3049 if (TREE_OPERAND (t, 2) == NULL_TREE)
3050 {
3051 tree offset = component_ref_field_offset (t);
3052 if (!is_gimple_min_invariant (offset))
3053 {
3054 offset = unshare_expr (offset);
3055 tree field = TREE_OPERAND (t, 1);
3056 tree factor
3057 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3058
3059 /* Divide the offset by its alignment. */
3060 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3061 offset, factor);
3062
3063 TREE_OPERAND (t, 2) = offset;
3064 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3065 post_p, is_gimple_reg,
3066 fb_rvalue);
3067 ret = MIN (ret, tret);
3068 }
3069 }
3070 else
3071 {
3072 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3073 is_gimple_reg, fb_rvalue);
3074 ret = MIN (ret, tret);
3075 }
3076 }
3077 }
3078
3079 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3080 so as to match the min_lval predicate. Failure to do so may result
3081 in the creation of large aggregate temporaries. */
3082 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3083 fallback | fb_lvalue);
3084 ret = MIN (ret, tret);
3085
3086 /* And finally, the indices and operands of ARRAY_REF. During this
3087 loop we also remove any useless conversions. */
3088 for (; expr_stack.length () > 0; )
3089 {
3090 tree t = expr_stack.pop ();
3091
3092 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3093 {
3094 /* Gimplify the dimension. */
3095 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3096 {
3097 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3098 is_gimple_val, fb_rvalue);
3099 ret = MIN (ret, tret);
3100 }
3101 }
3102
3103 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3104
3105 /* The innermost expression P may have originally had
3106 TREE_SIDE_EFFECTS set which would have caused all the outer
3107 expressions in *EXPR_P leading to P to also have had
3108 TREE_SIDE_EFFECTS set. */
3109 recalculate_side_effects (t);
3110 }
3111
3112 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3113 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3114 {
3115 canonicalize_component_ref (expr_p);
3116 }
3117
3118 expr_stack.release ();
3119
3120 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3121
3122 return ret;
3123 }
3124
3125 /* Gimplify the self modifying expression pointed to by EXPR_P
3126 (++, --, +=, -=).
3127
3128 PRE_P points to the list where side effects that must happen before
3129 *EXPR_P should be stored.
3130
3131 POST_P points to the list where side effects that must happen after
3132 *EXPR_P should be stored.
3133
3134 WANT_VALUE is nonzero iff we want to use the value of this expression
3135 in another expression.
3136
3137 ARITH_TYPE is the type the computation should be performed in. */
3138
3139 enum gimplify_status
3140 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3141 bool want_value, tree arith_type)
3142 {
3143 enum tree_code code;
3144 tree lhs, lvalue, rhs, t1;
3145 gimple_seq post = NULL, *orig_post_p = post_p;
3146 bool postfix;
3147 enum tree_code arith_code;
3148 enum gimplify_status ret;
3149 location_t loc = EXPR_LOCATION (*expr_p);
3150
3151 code = TREE_CODE (*expr_p);
3152
3153 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3154 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3155
3156 /* Prefix or postfix? */
3157 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3158 /* Faster to treat as prefix if result is not used. */
3159 postfix = want_value;
3160 else
3161 postfix = false;
3162
3163 /* For postfix, make sure the inner expression's post side effects
3164 are executed after side effects from this expression. */
3165 if (postfix)
3166 post_p = &post;
3167
3168 /* Add or subtract? */
3169 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3170 arith_code = PLUS_EXPR;
3171 else
3172 arith_code = MINUS_EXPR;
3173
3174 /* Gimplify the LHS into a GIMPLE lvalue. */
3175 lvalue = TREE_OPERAND (*expr_p, 0);
3176 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3177 if (ret == GS_ERROR)
3178 return ret;
3179
3180 /* Extract the operands to the arithmetic operation. */
3181 lhs = lvalue;
3182 rhs = TREE_OPERAND (*expr_p, 1);
3183
3184 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3185 that as the result value and in the postqueue operation. */
3186 if (postfix)
3187 {
3188 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3189 if (ret == GS_ERROR)
3190 return ret;
3191
3192 lhs = get_initialized_tmp_var (lhs, pre_p);
3193 }
3194
3195 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3196 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3197 {
3198 rhs = convert_to_ptrofftype_loc (loc, rhs);
3199 if (arith_code == MINUS_EXPR)
3200 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3201 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3202 }
3203 else
3204 t1 = fold_convert (TREE_TYPE (*expr_p),
3205 fold_build2 (arith_code, arith_type,
3206 fold_convert (arith_type, lhs),
3207 fold_convert (arith_type, rhs)));
3208
3209 if (postfix)
3210 {
3211 gimplify_assign (lvalue, t1, pre_p);
3212 gimplify_seq_add_seq (orig_post_p, post);
3213 *expr_p = lhs;
3214 return GS_ALL_DONE;
3215 }
3216 else
3217 {
3218 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3219 return GS_OK;
3220 }
3221 }
3222
3223 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3224
3225 static void
3226 maybe_with_size_expr (tree *expr_p)
3227 {
3228 tree expr = *expr_p;
3229 tree type = TREE_TYPE (expr);
3230 tree size;
3231
3232 /* If we've already wrapped this or the type is error_mark_node, we can't do
3233 anything. */
3234 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3235 || type == error_mark_node)
3236 return;
3237
3238 /* If the size isn't known or is a constant, we have nothing to do. */
3239 size = TYPE_SIZE_UNIT (type);
3240 if (!size || poly_int_tree_p (size))
3241 return;
3242
3243 /* Otherwise, make a WITH_SIZE_EXPR. */
3244 size = unshare_expr (size);
3245 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3246 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3247 }
3248
3249 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3250 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3251 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3252 gimplified to an SSA name. */
3253
3254 enum gimplify_status
3255 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3256 bool allow_ssa)
3257 {
3258 bool (*test) (tree);
3259 fallback_t fb;
3260
3261 /* In general, we allow lvalues for function arguments to avoid
3262 extra overhead of copying large aggregates out of even larger
3263 aggregates into temporaries only to copy the temporaries to
3264 the argument list. Make optimizers happy by pulling out to
3265 temporaries those types that fit in registers. */
3266 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3267 test = is_gimple_val, fb = fb_rvalue;
3268 else
3269 {
3270 test = is_gimple_lvalue, fb = fb_either;
3271 /* Also strip a TARGET_EXPR that would force an extra copy. */
3272 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3273 {
3274 tree init = TARGET_EXPR_INITIAL (*arg_p);
3275 if (init
3276 && !VOID_TYPE_P (TREE_TYPE (init)))
3277 *arg_p = init;
3278 }
3279 }
3280
3281 /* If this is a variable sized type, we must remember the size. */
3282 maybe_with_size_expr (arg_p);
3283
3284 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3285 /* Make sure arguments have the same location as the function call
3286 itself. */
3287 protected_set_expr_location (*arg_p, call_location);
3288
3289 /* There is a sequence point before a function call. Side effects in
3290 the argument list must occur before the actual call. So, when
3291 gimplifying arguments, force gimplify_expr to use an internal
3292 post queue which is then appended to the end of PRE_P. */
3293 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3294 }
3295
3296 /* Don't fold inside offloading or taskreg regions: it can break code by
3297 adding decl references that weren't in the source. We'll do it during
3298 omplower pass instead. */
3299
3300 static bool
3301 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3302 {
3303 struct gimplify_omp_ctx *ctx;
3304 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3305 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3306 return false;
3307 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3308 return false;
3309 /* Delay folding of builtins until the IL is in consistent state
3310 so the diagnostic machinery can do a better job. */
3311 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3312 return false;
3313 return fold_stmt (gsi);
3314 }
3315
3316 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3317 WANT_VALUE is true if the result of the call is desired. */
3318
3319 static enum gimplify_status
3320 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3321 {
3322 tree fndecl, parms, p, fnptrtype;
3323 enum gimplify_status ret;
3324 int i, nargs;
3325 gcall *call;
3326 bool builtin_va_start_p = false;
3327 location_t loc = EXPR_LOCATION (*expr_p);
3328
3329 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3330
3331 /* For reliable diagnostics during inlining, it is necessary that
3332 every call_expr be annotated with file and line. */
3333 if (! EXPR_HAS_LOCATION (*expr_p))
3334 SET_EXPR_LOCATION (*expr_p, input_location);
3335
3336 /* Gimplify internal functions created in the FEs. */
3337 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3338 {
3339 if (want_value)
3340 return GS_ALL_DONE;
3341
3342 nargs = call_expr_nargs (*expr_p);
3343 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3344 auto_vec<tree> vargs (nargs);
3345
3346 for (i = 0; i < nargs; i++)
3347 {
3348 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3349 EXPR_LOCATION (*expr_p));
3350 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3351 }
3352
3353 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3354 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3355 gimplify_seq_add_stmt (pre_p, call);
3356 return GS_ALL_DONE;
3357 }
3358
3359 /* This may be a call to a builtin function.
3360
3361 Builtin function calls may be transformed into different
3362 (and more efficient) builtin function calls under certain
3363 circumstances. Unfortunately, gimplification can muck things
3364 up enough that the builtin expanders are not aware that certain
3365 transformations are still valid.
3366
3367 So we attempt transformation/gimplification of the call before
3368 we gimplify the CALL_EXPR. At this time we do not manage to
3369 transform all calls in the same manner as the expanders do, but
3370 we do transform most of them. */
3371 fndecl = get_callee_fndecl (*expr_p);
3372 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3373 switch (DECL_FUNCTION_CODE (fndecl))
3374 {
3375 CASE_BUILT_IN_ALLOCA:
3376 /* If the call has been built for a variable-sized object, then we
3377 want to restore the stack level when the enclosing BIND_EXPR is
3378 exited to reclaim the allocated space; otherwise, we precisely
3379 need to do the opposite and preserve the latest stack level. */
3380 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3381 gimplify_ctxp->save_stack = true;
3382 else
3383 gimplify_ctxp->keep_stack = true;
3384 break;
3385
3386 case BUILT_IN_VA_START:
3387 {
3388 builtin_va_start_p = TRUE;
3389 if (call_expr_nargs (*expr_p) < 2)
3390 {
3391 error ("too few arguments to function %<va_start%>");
3392 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3393 return GS_OK;
3394 }
3395
3396 if (fold_builtin_next_arg (*expr_p, true))
3397 {
3398 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3399 return GS_OK;
3400 }
3401 break;
3402 }
3403
3404 case BUILT_IN_EH_RETURN:
3405 cfun->calls_eh_return = true;
3406 break;
3407
3408 case BUILT_IN_CLEAR_PADDING:
3409 if (call_expr_nargs (*expr_p) == 1)
3410 {
3411 /* Remember the original type of the argument in an internal
3412 dummy second argument, as in GIMPLE pointer conversions are
3413 useless. */
3414 p = CALL_EXPR_ARG (*expr_p, 0);
3415 *expr_p
3416 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3417 build_zero_cst (TREE_TYPE (p)));
3418 return GS_OK;
3419 }
3420 break;
3421
3422 default:
3423 ;
3424 }
3425 if (fndecl && fndecl_built_in_p (fndecl))
3426 {
3427 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3428 if (new_tree && new_tree != *expr_p)
3429 {
3430 /* There was a transformation of this call which computes the
3431 same value, but in a more efficient way. Return and try
3432 again. */
3433 *expr_p = new_tree;
3434 return GS_OK;
3435 }
3436 }
3437
3438 /* Remember the original function pointer type. */
3439 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3440
3441 if (flag_openmp
3442 && fndecl
3443 && cfun
3444 && (cfun->curr_properties & PROP_gimple_any) == 0)
3445 {
3446 tree variant = omp_resolve_declare_variant (fndecl);
3447 if (variant != fndecl)
3448 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3449 }
3450
3451 /* There is a sequence point before the call, so any side effects in
3452 the calling expression must occur before the actual call. Force
3453 gimplify_expr to use an internal post queue. */
3454 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3455 is_gimple_call_addr, fb_rvalue);
3456
3457 nargs = call_expr_nargs (*expr_p);
3458
3459 /* Get argument types for verification. */
3460 fndecl = get_callee_fndecl (*expr_p);
3461 parms = NULL_TREE;
3462 if (fndecl)
3463 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3464 else
3465 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3466
3467 if (fndecl && DECL_ARGUMENTS (fndecl))
3468 p = DECL_ARGUMENTS (fndecl);
3469 else if (parms)
3470 p = parms;
3471 else
3472 p = NULL_TREE;
3473 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3474 ;
3475
3476 /* If the last argument is __builtin_va_arg_pack () and it is not
3477 passed as a named argument, decrease the number of CALL_EXPR
3478 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3479 if (!p
3480 && i < nargs
3481 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3482 {
3483 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3484 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3485
3486 if (last_arg_fndecl
3487 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3488 {
3489 tree call = *expr_p;
3490
3491 --nargs;
3492 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3493 CALL_EXPR_FN (call),
3494 nargs, CALL_EXPR_ARGP (call));
3495
3496 /* Copy all CALL_EXPR flags, location and block, except
3497 CALL_EXPR_VA_ARG_PACK flag. */
3498 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3499 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3500 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3501 = CALL_EXPR_RETURN_SLOT_OPT (call);
3502 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3503 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3504
3505 /* Set CALL_EXPR_VA_ARG_PACK. */
3506 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3507 }
3508 }
3509
3510 /* If the call returns twice then after building the CFG the call
3511 argument computations will no longer dominate the call because
3512 we add an abnormal incoming edge to the call. So do not use SSA
3513 vars there. */
3514 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3515
3516 /* Gimplify the function arguments. */
3517 if (nargs > 0)
3518 {
3519 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3520 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3521 PUSH_ARGS_REVERSED ? i-- : i++)
3522 {
3523 enum gimplify_status t;
3524
3525 /* Avoid gimplifying the second argument to va_start, which needs to
3526 be the plain PARM_DECL. */
3527 if ((i != 1) || !builtin_va_start_p)
3528 {
3529 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3530 EXPR_LOCATION (*expr_p), ! returns_twice);
3531
3532 if (t == GS_ERROR)
3533 ret = GS_ERROR;
3534 }
3535 }
3536 }
3537
3538 /* Gimplify the static chain. */
3539 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3540 {
3541 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3542 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3543 else
3544 {
3545 enum gimplify_status t;
3546 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3547 EXPR_LOCATION (*expr_p), ! returns_twice);
3548 if (t == GS_ERROR)
3549 ret = GS_ERROR;
3550 }
3551 }
3552
3553 /* Verify the function result. */
3554 if (want_value && fndecl
3555 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3556 {
3557 error_at (loc, "using result of function returning %<void%>");
3558 ret = GS_ERROR;
3559 }
3560
3561 /* Try this again in case gimplification exposed something. */
3562 if (ret != GS_ERROR)
3563 {
3564 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3565
3566 if (new_tree && new_tree != *expr_p)
3567 {
3568 /* There was a transformation of this call which computes the
3569 same value, but in a more efficient way. Return and try
3570 again. */
3571 *expr_p = new_tree;
3572 return GS_OK;
3573 }
3574 }
3575 else
3576 {
3577 *expr_p = error_mark_node;
3578 return GS_ERROR;
3579 }
3580
3581 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3582 decl. This allows us to eliminate redundant or useless
3583 calls to "const" functions. */
3584 if (TREE_CODE (*expr_p) == CALL_EXPR)
3585 {
3586 int flags = call_expr_flags (*expr_p);
3587 if (flags & (ECF_CONST | ECF_PURE)
3588 /* An infinite loop is considered a side effect. */
3589 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3590 TREE_SIDE_EFFECTS (*expr_p) = 0;
3591 }
3592
3593 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3594 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3595 form and delegate the creation of a GIMPLE_CALL to
3596 gimplify_modify_expr. This is always possible because when
3597 WANT_VALUE is true, the caller wants the result of this call into
3598 a temporary, which means that we will emit an INIT_EXPR in
3599 internal_get_tmp_var which will then be handled by
3600 gimplify_modify_expr. */
3601 if (!want_value)
3602 {
3603 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3604 have to do is replicate it as a GIMPLE_CALL tuple. */
3605 gimple_stmt_iterator gsi;
3606 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3607 notice_special_calls (call);
3608 gimplify_seq_add_stmt (pre_p, call);
3609 gsi = gsi_last (*pre_p);
3610 maybe_fold_stmt (&gsi);
3611 *expr_p = NULL_TREE;
3612 }
3613 else
3614 /* Remember the original function type. */
3615 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3616 CALL_EXPR_FN (*expr_p));
3617
3618 return ret;
3619 }
3620
3621 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3622 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3623
3624 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3625 condition is true or false, respectively. If null, we should generate
3626 our own to skip over the evaluation of this specific expression.
3627
3628 LOCUS is the source location of the COND_EXPR.
3629
3630 This function is the tree equivalent of do_jump.
3631
3632 shortcut_cond_r should only be called by shortcut_cond_expr. */
3633
3634 static tree
3635 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3636 location_t locus)
3637 {
3638 tree local_label = NULL_TREE;
3639 tree t, expr = NULL;
3640
3641 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3642 retain the shortcut semantics. Just insert the gotos here;
3643 shortcut_cond_expr will append the real blocks later. */
3644 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3645 {
3646 location_t new_locus;
3647
3648 /* Turn if (a && b) into
3649
3650 if (a); else goto no;
3651 if (b) goto yes; else goto no;
3652 (no:) */
3653
3654 if (false_label_p == NULL)
3655 false_label_p = &local_label;
3656
3657 /* Keep the original source location on the first 'if'. */
3658 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3659 append_to_statement_list (t, &expr);
3660
3661 /* Set the source location of the && on the second 'if'. */
3662 new_locus = rexpr_location (pred, locus);
3663 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3664 new_locus);
3665 append_to_statement_list (t, &expr);
3666 }
3667 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3668 {
3669 location_t new_locus;
3670
3671 /* Turn if (a || b) into
3672
3673 if (a) goto yes;
3674 if (b) goto yes; else goto no;
3675 (yes:) */
3676
3677 if (true_label_p == NULL)
3678 true_label_p = &local_label;
3679
3680 /* Keep the original source location on the first 'if'. */
3681 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3682 append_to_statement_list (t, &expr);
3683
3684 /* Set the source location of the || on the second 'if'. */
3685 new_locus = rexpr_location (pred, locus);
3686 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3687 new_locus);
3688 append_to_statement_list (t, &expr);
3689 }
3690 else if (TREE_CODE (pred) == COND_EXPR
3691 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3692 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3693 {
3694 location_t new_locus;
3695
3696 /* As long as we're messing with gotos, turn if (a ? b : c) into
3697 if (a)
3698 if (b) goto yes; else goto no;
3699 else
3700 if (c) goto yes; else goto no;
3701
3702 Don't do this if one of the arms has void type, which can happen
3703 in C++ when the arm is throw. */
3704
3705 /* Keep the original source location on the first 'if'. Set the source
3706 location of the ? on the second 'if'. */
3707 new_locus = rexpr_location (pred, locus);
3708 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3709 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3710 false_label_p, locus),
3711 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3712 false_label_p, new_locus));
3713 }
3714 else
3715 {
3716 expr = build3 (COND_EXPR, void_type_node, pred,
3717 build_and_jump (true_label_p),
3718 build_and_jump (false_label_p));
3719 SET_EXPR_LOCATION (expr, locus);
3720 }
3721
3722 if (local_label)
3723 {
3724 t = build1 (LABEL_EXPR, void_type_node, local_label);
3725 append_to_statement_list (t, &expr);
3726 }
3727
3728 return expr;
3729 }
3730
3731 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3732 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3733 statement, if it is the last one. Otherwise, return NULL. */
3734
3735 static tree
3736 find_goto (tree expr)
3737 {
3738 if (!expr)
3739 return NULL_TREE;
3740
3741 if (TREE_CODE (expr) == GOTO_EXPR)
3742 return expr;
3743
3744 if (TREE_CODE (expr) != STATEMENT_LIST)
3745 return NULL_TREE;
3746
3747 tree_stmt_iterator i = tsi_start (expr);
3748
3749 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3750 tsi_next (&i);
3751
3752 if (!tsi_one_before_end_p (i))
3753 return NULL_TREE;
3754
3755 return find_goto (tsi_stmt (i));
3756 }
3757
3758 /* Same as find_goto, except that it returns NULL if the destination
3759 is not a LABEL_DECL. */
3760
3761 static inline tree
3762 find_goto_label (tree expr)
3763 {
3764 tree dest = find_goto (expr);
3765 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3766 return dest;
3767 return NULL_TREE;
3768 }
3769
3770 /* Given a conditional expression EXPR with short-circuit boolean
3771 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3772 predicate apart into the equivalent sequence of conditionals. */
3773
3774 static tree
3775 shortcut_cond_expr (tree expr)
3776 {
3777 tree pred = TREE_OPERAND (expr, 0);
3778 tree then_ = TREE_OPERAND (expr, 1);
3779 tree else_ = TREE_OPERAND (expr, 2);
3780 tree true_label, false_label, end_label, t;
3781 tree *true_label_p;
3782 tree *false_label_p;
3783 bool emit_end, emit_false, jump_over_else;
3784 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3785 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3786
3787 /* First do simple transformations. */
3788 if (!else_se)
3789 {
3790 /* If there is no 'else', turn
3791 if (a && b) then c
3792 into
3793 if (a) if (b) then c. */
3794 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3795 {
3796 /* Keep the original source location on the first 'if'. */
3797 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3798 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3799 /* Set the source location of the && on the second 'if'. */
3800 if (rexpr_has_location (pred))
3801 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3802 then_ = shortcut_cond_expr (expr);
3803 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3804 pred = TREE_OPERAND (pred, 0);
3805 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3806 SET_EXPR_LOCATION (expr, locus);
3807 }
3808 }
3809
3810 if (!then_se)
3811 {
3812 /* If there is no 'then', turn
3813 if (a || b); else d
3814 into
3815 if (a); else if (b); else d. */
3816 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3817 {
3818 /* Keep the original source location on the first 'if'. */
3819 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3820 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3821 /* Set the source location of the || on the second 'if'. */
3822 if (rexpr_has_location (pred))
3823 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3824 else_ = shortcut_cond_expr (expr);
3825 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3826 pred = TREE_OPERAND (pred, 0);
3827 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3828 SET_EXPR_LOCATION (expr, locus);
3829 }
3830 }
3831
3832 /* If we're done, great. */
3833 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3834 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3835 return expr;
3836
3837 /* Otherwise we need to mess with gotos. Change
3838 if (a) c; else d;
3839 to
3840 if (a); else goto no;
3841 c; goto end;
3842 no: d; end:
3843 and recursively gimplify the condition. */
3844
3845 true_label = false_label = end_label = NULL_TREE;
3846
3847 /* If our arms just jump somewhere, hijack those labels so we don't
3848 generate jumps to jumps. */
3849
3850 if (tree then_goto = find_goto_label (then_))
3851 {
3852 true_label = GOTO_DESTINATION (then_goto);
3853 then_ = NULL;
3854 then_se = false;
3855 }
3856
3857 if (tree else_goto = find_goto_label (else_))
3858 {
3859 false_label = GOTO_DESTINATION (else_goto);
3860 else_ = NULL;
3861 else_se = false;
3862 }
3863
3864 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3865 if (true_label)
3866 true_label_p = &true_label;
3867 else
3868 true_label_p = NULL;
3869
3870 /* The 'else' branch also needs a label if it contains interesting code. */
3871 if (false_label || else_se)
3872 false_label_p = &false_label;
3873 else
3874 false_label_p = NULL;
3875
3876 /* If there was nothing else in our arms, just forward the label(s). */
3877 if (!then_se && !else_se)
3878 return shortcut_cond_r (pred, true_label_p, false_label_p,
3879 EXPR_LOC_OR_LOC (expr, input_location));
3880
3881 /* If our last subexpression already has a terminal label, reuse it. */
3882 if (else_se)
3883 t = expr_last (else_);
3884 else if (then_se)
3885 t = expr_last (then_);
3886 else
3887 t = NULL;
3888 if (t && TREE_CODE (t) == LABEL_EXPR)
3889 end_label = LABEL_EXPR_LABEL (t);
3890
3891 /* If we don't care about jumping to the 'else' branch, jump to the end
3892 if the condition is false. */
3893 if (!false_label_p)
3894 false_label_p = &end_label;
3895
3896 /* We only want to emit these labels if we aren't hijacking them. */
3897 emit_end = (end_label == NULL_TREE);
3898 emit_false = (false_label == NULL_TREE);
3899
3900 /* We only emit the jump over the else clause if we have to--if the
3901 then clause may fall through. Otherwise we can wind up with a
3902 useless jump and a useless label at the end of gimplified code,
3903 which will cause us to think that this conditional as a whole
3904 falls through even if it doesn't. If we then inline a function
3905 which ends with such a condition, that can cause us to issue an
3906 inappropriate warning about control reaching the end of a
3907 non-void function. */
3908 jump_over_else = block_may_fallthru (then_);
3909
3910 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3911 EXPR_LOC_OR_LOC (expr, input_location));
3912
3913 expr = NULL;
3914 append_to_statement_list (pred, &expr);
3915
3916 append_to_statement_list (then_, &expr);
3917 if (else_se)
3918 {
3919 if (jump_over_else)
3920 {
3921 tree last = expr_last (expr);
3922 t = build_and_jump (&end_label);
3923 if (rexpr_has_location (last))
3924 SET_EXPR_LOCATION (t, rexpr_location (last));
3925 append_to_statement_list (t, &expr);
3926 }
3927 if (emit_false)
3928 {
3929 t = build1 (LABEL_EXPR, void_type_node, false_label);
3930 append_to_statement_list (t, &expr);
3931 }
3932 append_to_statement_list (else_, &expr);
3933 }
3934 if (emit_end && end_label)
3935 {
3936 t = build1 (LABEL_EXPR, void_type_node, end_label);
3937 append_to_statement_list (t, &expr);
3938 }
3939
3940 return expr;
3941 }
3942
3943 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3944
3945 tree
3946 gimple_boolify (tree expr)
3947 {
3948 tree type = TREE_TYPE (expr);
3949 location_t loc = EXPR_LOCATION (expr);
3950
3951 if (TREE_CODE (expr) == NE_EXPR
3952 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3953 && integer_zerop (TREE_OPERAND (expr, 1)))
3954 {
3955 tree call = TREE_OPERAND (expr, 0);
3956 tree fn = get_callee_fndecl (call);
3957
3958 /* For __builtin_expect ((long) (x), y) recurse into x as well
3959 if x is truth_value_p. */
3960 if (fn
3961 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3962 && call_expr_nargs (call) == 2)
3963 {
3964 tree arg = CALL_EXPR_ARG (call, 0);
3965 if (arg)
3966 {
3967 if (TREE_CODE (arg) == NOP_EXPR
3968 && TREE_TYPE (arg) == TREE_TYPE (call))
3969 arg = TREE_OPERAND (arg, 0);
3970 if (truth_value_p (TREE_CODE (arg)))
3971 {
3972 arg = gimple_boolify (arg);
3973 CALL_EXPR_ARG (call, 0)
3974 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3975 }
3976 }
3977 }
3978 }
3979
3980 switch (TREE_CODE (expr))
3981 {
3982 case TRUTH_AND_EXPR:
3983 case TRUTH_OR_EXPR:
3984 case TRUTH_XOR_EXPR:
3985 case TRUTH_ANDIF_EXPR:
3986 case TRUTH_ORIF_EXPR:
3987 /* Also boolify the arguments of truth exprs. */
3988 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3989 /* FALLTHRU */
3990
3991 case TRUTH_NOT_EXPR:
3992 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3993
3994 /* These expressions always produce boolean results. */
3995 if (TREE_CODE (type) != BOOLEAN_TYPE)
3996 TREE_TYPE (expr) = boolean_type_node;
3997 return expr;
3998
3999 case ANNOTATE_EXPR:
4000 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4001 {
4002 case annot_expr_ivdep_kind:
4003 case annot_expr_unroll_kind:
4004 case annot_expr_no_vector_kind:
4005 case annot_expr_vector_kind:
4006 case annot_expr_parallel_kind:
4007 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4008 if (TREE_CODE (type) != BOOLEAN_TYPE)
4009 TREE_TYPE (expr) = boolean_type_node;
4010 return expr;
4011 default:
4012 gcc_unreachable ();
4013 }
4014
4015 default:
4016 if (COMPARISON_CLASS_P (expr))
4017 {
4018 /* There expressions always prduce boolean results. */
4019 if (TREE_CODE (type) != BOOLEAN_TYPE)
4020 TREE_TYPE (expr) = boolean_type_node;
4021 return expr;
4022 }
4023 /* Other expressions that get here must have boolean values, but
4024 might need to be converted to the appropriate mode. */
4025 if (TREE_CODE (type) == BOOLEAN_TYPE)
4026 return expr;
4027 return fold_convert_loc (loc, boolean_type_node, expr);
4028 }
4029 }
4030
4031 /* Given a conditional expression *EXPR_P without side effects, gimplify
4032 its operands. New statements are inserted to PRE_P. */
4033
4034 static enum gimplify_status
4035 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4036 {
4037 tree expr = *expr_p, cond;
4038 enum gimplify_status ret, tret;
4039 enum tree_code code;
4040
4041 cond = gimple_boolify (COND_EXPR_COND (expr));
4042
4043 /* We need to handle && and || specially, as their gimplification
4044 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4045 code = TREE_CODE (cond);
4046 if (code == TRUTH_ANDIF_EXPR)
4047 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4048 else if (code == TRUTH_ORIF_EXPR)
4049 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4050 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4051 COND_EXPR_COND (*expr_p) = cond;
4052
4053 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4054 is_gimple_val, fb_rvalue);
4055 ret = MIN (ret, tret);
4056 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4057 is_gimple_val, fb_rvalue);
4058
4059 return MIN (ret, tret);
4060 }
4061
4062 /* Return true if evaluating EXPR could trap.
4063 EXPR is GENERIC, while tree_could_trap_p can be called
4064 only on GIMPLE. */
4065
4066 bool
4067 generic_expr_could_trap_p (tree expr)
4068 {
4069 unsigned i, n;
4070
4071 if (!expr || is_gimple_val (expr))
4072 return false;
4073
4074 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4075 return true;
4076
4077 n = TREE_OPERAND_LENGTH (expr);
4078 for (i = 0; i < n; i++)
4079 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4080 return true;
4081
4082 return false;
4083 }
4084
4085 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4086 into
4087
4088 if (p) if (p)
4089 t1 = a; a;
4090 else or else
4091 t1 = b; b;
4092 t1;
4093
4094 The second form is used when *EXPR_P is of type void.
4095
4096 PRE_P points to the list where side effects that must happen before
4097 *EXPR_P should be stored. */
4098
4099 static enum gimplify_status
4100 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4101 {
4102 tree expr = *expr_p;
4103 tree type = TREE_TYPE (expr);
4104 location_t loc = EXPR_LOCATION (expr);
4105 tree tmp, arm1, arm2;
4106 enum gimplify_status ret;
4107 tree label_true, label_false, label_cont;
4108 bool have_then_clause_p, have_else_clause_p;
4109 gcond *cond_stmt;
4110 enum tree_code pred_code;
4111 gimple_seq seq = NULL;
4112
4113 /* If this COND_EXPR has a value, copy the values into a temporary within
4114 the arms. */
4115 if (!VOID_TYPE_P (type))
4116 {
4117 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4118 tree result;
4119
4120 /* If either an rvalue is ok or we do not require an lvalue, create the
4121 temporary. But we cannot do that if the type is addressable. */
4122 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4123 && !TREE_ADDRESSABLE (type))
4124 {
4125 if (gimplify_ctxp->allow_rhs_cond_expr
4126 /* If either branch has side effects or could trap, it can't be
4127 evaluated unconditionally. */
4128 && !TREE_SIDE_EFFECTS (then_)
4129 && !generic_expr_could_trap_p (then_)
4130 && !TREE_SIDE_EFFECTS (else_)
4131 && !generic_expr_could_trap_p (else_))
4132 return gimplify_pure_cond_expr (expr_p, pre_p);
4133
4134 tmp = create_tmp_var (type, "iftmp");
4135 result = tmp;
4136 }
4137
4138 /* Otherwise, only create and copy references to the values. */
4139 else
4140 {
4141 type = build_pointer_type (type);
4142
4143 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4144 then_ = build_fold_addr_expr_loc (loc, then_);
4145
4146 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4147 else_ = build_fold_addr_expr_loc (loc, else_);
4148
4149 expr
4150 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4151
4152 tmp = create_tmp_var (type, "iftmp");
4153 result = build_simple_mem_ref_loc (loc, tmp);
4154 }
4155
4156 /* Build the new then clause, `tmp = then_;'. But don't build the
4157 assignment if the value is void; in C++ it can be if it's a throw. */
4158 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4159 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4160
4161 /* Similarly, build the new else clause, `tmp = else_;'. */
4162 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4163 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4164
4165 TREE_TYPE (expr) = void_type_node;
4166 recalculate_side_effects (expr);
4167
4168 /* Move the COND_EXPR to the prequeue. */
4169 gimplify_stmt (&expr, pre_p);
4170
4171 *expr_p = result;
4172 return GS_ALL_DONE;
4173 }
4174
4175 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4176 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4177 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4178 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4179
4180 /* Make sure the condition has BOOLEAN_TYPE. */
4181 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4182
4183 /* Break apart && and || conditions. */
4184 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4185 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4186 {
4187 expr = shortcut_cond_expr (expr);
4188
4189 if (expr != *expr_p)
4190 {
4191 *expr_p = expr;
4192
4193 /* We can't rely on gimplify_expr to re-gimplify the expanded
4194 form properly, as cleanups might cause the target labels to be
4195 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4196 set up a conditional context. */
4197 gimple_push_condition ();
4198 gimplify_stmt (expr_p, &seq);
4199 gimple_pop_condition (pre_p);
4200 gimple_seq_add_seq (pre_p, seq);
4201
4202 return GS_ALL_DONE;
4203 }
4204 }
4205
4206 /* Now do the normal gimplification. */
4207
4208 /* Gimplify condition. */
4209 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4210 is_gimple_condexpr_for_cond, fb_rvalue);
4211 if (ret == GS_ERROR)
4212 return GS_ERROR;
4213 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4214
4215 gimple_push_condition ();
4216
4217 have_then_clause_p = have_else_clause_p = false;
4218 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4219 if (label_true
4220 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4221 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4222 have different locations, otherwise we end up with incorrect
4223 location information on the branches. */
4224 && (optimize
4225 || !EXPR_HAS_LOCATION (expr)
4226 || !rexpr_has_location (label_true)
4227 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4228 {
4229 have_then_clause_p = true;
4230 label_true = GOTO_DESTINATION (label_true);
4231 }
4232 else
4233 label_true = create_artificial_label (UNKNOWN_LOCATION);
4234 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4235 if (label_false
4236 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4237 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4238 have different locations, otherwise we end up with incorrect
4239 location information on the branches. */
4240 && (optimize
4241 || !EXPR_HAS_LOCATION (expr)
4242 || !rexpr_has_location (label_false)
4243 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4244 {
4245 have_else_clause_p = true;
4246 label_false = GOTO_DESTINATION (label_false);
4247 }
4248 else
4249 label_false = create_artificial_label (UNKNOWN_LOCATION);
4250
4251 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4252 &arm2);
4253 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4254 label_false);
4255 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4256 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4257 gimplify_seq_add_stmt (&seq, cond_stmt);
4258 gimple_stmt_iterator gsi = gsi_last (seq);
4259 maybe_fold_stmt (&gsi);
4260
4261 label_cont = NULL_TREE;
4262 if (!have_then_clause_p)
4263 {
4264 /* For if (...) {} else { code; } put label_true after
4265 the else block. */
4266 if (TREE_OPERAND (expr, 1) == NULL_TREE
4267 && !have_else_clause_p
4268 && TREE_OPERAND (expr, 2) != NULL_TREE)
4269 label_cont = label_true;
4270 else
4271 {
4272 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4273 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4274 /* For if (...) { code; } else {} or
4275 if (...) { code; } else goto label; or
4276 if (...) { code; return; } else { ... }
4277 label_cont isn't needed. */
4278 if (!have_else_clause_p
4279 && TREE_OPERAND (expr, 2) != NULL_TREE
4280 && gimple_seq_may_fallthru (seq))
4281 {
4282 gimple *g;
4283 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4284
4285 g = gimple_build_goto (label_cont);
4286
4287 /* GIMPLE_COND's are very low level; they have embedded
4288 gotos. This particular embedded goto should not be marked
4289 with the location of the original COND_EXPR, as it would
4290 correspond to the COND_EXPR's condition, not the ELSE or the
4291 THEN arms. To avoid marking it with the wrong location, flag
4292 it as "no location". */
4293 gimple_set_do_not_emit_location (g);
4294
4295 gimplify_seq_add_stmt (&seq, g);
4296 }
4297 }
4298 }
4299 if (!have_else_clause_p)
4300 {
4301 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4302 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4303 }
4304 if (label_cont)
4305 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4306
4307 gimple_pop_condition (pre_p);
4308 gimple_seq_add_seq (pre_p, seq);
4309
4310 if (ret == GS_ERROR)
4311 ; /* Do nothing. */
4312 else if (have_then_clause_p || have_else_clause_p)
4313 ret = GS_ALL_DONE;
4314 else
4315 {
4316 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4317 expr = TREE_OPERAND (expr, 0);
4318 gimplify_stmt (&expr, pre_p);
4319 }
4320
4321 *expr_p = NULL;
4322 return ret;
4323 }
4324
4325 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4326 to be marked addressable.
4327
4328 We cannot rely on such an expression being directly markable if a temporary
4329 has been created by the gimplification. In this case, we create another
4330 temporary and initialize it with a copy, which will become a store after we
4331 mark it addressable. This can happen if the front-end passed us something
4332 that it could not mark addressable yet, like a Fortran pass-by-reference
4333 parameter (int) floatvar. */
4334
4335 static void
4336 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4337 {
4338 while (handled_component_p (*expr_p))
4339 expr_p = &TREE_OPERAND (*expr_p, 0);
4340 if (is_gimple_reg (*expr_p))
4341 {
4342 /* Do not allow an SSA name as the temporary. */
4343 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4344 DECL_NOT_GIMPLE_REG_P (var) = 1;
4345 *expr_p = var;
4346 }
4347 }
4348
4349 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4350 a call to __builtin_memcpy. */
4351
4352 static enum gimplify_status
4353 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4354 gimple_seq *seq_p)
4355 {
4356 tree t, to, to_ptr, from, from_ptr;
4357 gcall *gs;
4358 location_t loc = EXPR_LOCATION (*expr_p);
4359
4360 to = TREE_OPERAND (*expr_p, 0);
4361 from = TREE_OPERAND (*expr_p, 1);
4362
4363 /* Mark the RHS addressable. Beware that it may not be possible to do so
4364 directly if a temporary has been created by the gimplification. */
4365 prepare_gimple_addressable (&from, seq_p);
4366
4367 mark_addressable (from);
4368 from_ptr = build_fold_addr_expr_loc (loc, from);
4369 gimplify_arg (&from_ptr, seq_p, loc);
4370
4371 mark_addressable (to);
4372 to_ptr = build_fold_addr_expr_loc (loc, to);
4373 gimplify_arg (&to_ptr, seq_p, loc);
4374
4375 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4376
4377 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4378 gimple_call_set_alloca_for_var (gs, true);
4379
4380 if (want_value)
4381 {
4382 /* tmp = memcpy() */
4383 t = create_tmp_var (TREE_TYPE (to_ptr));
4384 gimple_call_set_lhs (gs, t);
4385 gimplify_seq_add_stmt (seq_p, gs);
4386
4387 *expr_p = build_simple_mem_ref (t);
4388 return GS_ALL_DONE;
4389 }
4390
4391 gimplify_seq_add_stmt (seq_p, gs);
4392 *expr_p = NULL;
4393 return GS_ALL_DONE;
4394 }
4395
4396 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4397 a call to __builtin_memset. In this case we know that the RHS is
4398 a CONSTRUCTOR with an empty element list. */
4399
4400 static enum gimplify_status
4401 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4402 gimple_seq *seq_p)
4403 {
4404 tree t, from, to, to_ptr;
4405 gcall *gs;
4406 location_t loc = EXPR_LOCATION (*expr_p);
4407
4408 /* Assert our assumptions, to abort instead of producing wrong code
4409 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4410 not be immediately exposed. */
4411 from = TREE_OPERAND (*expr_p, 1);
4412 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4413 from = TREE_OPERAND (from, 0);
4414
4415 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4416 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4417
4418 /* Now proceed. */
4419 to = TREE_OPERAND (*expr_p, 0);
4420
4421 to_ptr = build_fold_addr_expr_loc (loc, to);
4422 gimplify_arg (&to_ptr, seq_p, loc);
4423 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4424
4425 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4426
4427 if (want_value)
4428 {
4429 /* tmp = memset() */
4430 t = create_tmp_var (TREE_TYPE (to_ptr));
4431 gimple_call_set_lhs (gs, t);
4432 gimplify_seq_add_stmt (seq_p, gs);
4433
4434 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4435 return GS_ALL_DONE;
4436 }
4437
4438 gimplify_seq_add_stmt (seq_p, gs);
4439 *expr_p = NULL;
4440 return GS_ALL_DONE;
4441 }
4442
4443 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4444 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4445 assignment. Return non-null if we detect a potential overlap. */
4446
4447 struct gimplify_init_ctor_preeval_data
4448 {
4449 /* The base decl of the lhs object. May be NULL, in which case we
4450 have to assume the lhs is indirect. */
4451 tree lhs_base_decl;
4452
4453 /* The alias set of the lhs object. */
4454 alias_set_type lhs_alias_set;
4455 };
4456
4457 static tree
4458 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4459 {
4460 struct gimplify_init_ctor_preeval_data *data
4461 = (struct gimplify_init_ctor_preeval_data *) xdata;
4462 tree t = *tp;
4463
4464 /* If we find the base object, obviously we have overlap. */
4465 if (data->lhs_base_decl == t)
4466 return t;
4467
4468 /* If the constructor component is indirect, determine if we have a
4469 potential overlap with the lhs. The only bits of information we
4470 have to go on at this point are addressability and alias sets. */
4471 if ((INDIRECT_REF_P (t)
4472 || TREE_CODE (t) == MEM_REF)
4473 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4474 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4475 return t;
4476
4477 /* If the constructor component is a call, determine if it can hide a
4478 potential overlap with the lhs through an INDIRECT_REF like above.
4479 ??? Ugh - this is completely broken. In fact this whole analysis
4480 doesn't look conservative. */
4481 if (TREE_CODE (t) == CALL_EXPR)
4482 {
4483 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4484
4485 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4486 if (POINTER_TYPE_P (TREE_VALUE (type))
4487 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4488 && alias_sets_conflict_p (data->lhs_alias_set,
4489 get_alias_set
4490 (TREE_TYPE (TREE_VALUE (type)))))
4491 return t;
4492 }
4493
4494 if (IS_TYPE_OR_DECL_P (t))
4495 *walk_subtrees = 0;
4496 return NULL;
4497 }
4498
4499 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4500 force values that overlap with the lhs (as described by *DATA)
4501 into temporaries. */
4502
4503 static void
4504 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4505 struct gimplify_init_ctor_preeval_data *data)
4506 {
4507 enum gimplify_status one;
4508
4509 /* If the value is constant, then there's nothing to pre-evaluate. */
4510 if (TREE_CONSTANT (*expr_p))
4511 {
4512 /* Ensure it does not have side effects, it might contain a reference to
4513 the object we're initializing. */
4514 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4515 return;
4516 }
4517
4518 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4519 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4520 return;
4521
4522 /* Recurse for nested constructors. */
4523 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4524 {
4525 unsigned HOST_WIDE_INT ix;
4526 constructor_elt *ce;
4527 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4528
4529 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4530 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4531
4532 return;
4533 }
4534
4535 /* If this is a variable sized type, we must remember the size. */
4536 maybe_with_size_expr (expr_p);
4537
4538 /* Gimplify the constructor element to something appropriate for the rhs
4539 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4540 the gimplifier will consider this a store to memory. Doing this
4541 gimplification now means that we won't have to deal with complicated
4542 language-specific trees, nor trees like SAVE_EXPR that can induce
4543 exponential search behavior. */
4544 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4545 if (one == GS_ERROR)
4546 {
4547 *expr_p = NULL;
4548 return;
4549 }
4550
4551 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4552 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4553 always be true for all scalars, since is_gimple_mem_rhs insists on a
4554 temporary variable for them. */
4555 if (DECL_P (*expr_p))
4556 return;
4557
4558 /* If this is of variable size, we have no choice but to assume it doesn't
4559 overlap since we can't make a temporary for it. */
4560 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4561 return;
4562
4563 /* Otherwise, we must search for overlap ... */
4564 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4565 return;
4566
4567 /* ... and if found, force the value into a temporary. */
4568 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4569 }
4570
4571 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4572 a RANGE_EXPR in a CONSTRUCTOR for an array.
4573
4574 var = lower;
4575 loop_entry:
4576 object[var] = value;
4577 if (var == upper)
4578 goto loop_exit;
4579 var = var + 1;
4580 goto loop_entry;
4581 loop_exit:
4582
4583 We increment var _after_ the loop exit check because we might otherwise
4584 fail if upper == TYPE_MAX_VALUE (type for upper).
4585
4586 Note that we never have to deal with SAVE_EXPRs here, because this has
4587 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4588
4589 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4590 gimple_seq *, bool);
4591
4592 static void
4593 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4594 tree value, tree array_elt_type,
4595 gimple_seq *pre_p, bool cleared)
4596 {
4597 tree loop_entry_label, loop_exit_label, fall_thru_label;
4598 tree var, var_type, cref, tmp;
4599
4600 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4601 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4602 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4603
4604 /* Create and initialize the index variable. */
4605 var_type = TREE_TYPE (upper);
4606 var = create_tmp_var (var_type);
4607 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4608
4609 /* Add the loop entry label. */
4610 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4611
4612 /* Build the reference. */
4613 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4614 var, NULL_TREE, NULL_TREE);
4615
4616 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4617 the store. Otherwise just assign value to the reference. */
4618
4619 if (TREE_CODE (value) == CONSTRUCTOR)
4620 /* NB we might have to call ourself recursively through
4621 gimplify_init_ctor_eval if the value is a constructor. */
4622 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4623 pre_p, cleared);
4624 else
4625 {
4626 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4627 != GS_ERROR)
4628 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4629 }
4630
4631 /* We exit the loop when the index var is equal to the upper bound. */
4632 gimplify_seq_add_stmt (pre_p,
4633 gimple_build_cond (EQ_EXPR, var, upper,
4634 loop_exit_label, fall_thru_label));
4635
4636 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4637
4638 /* Otherwise, increment the index var... */
4639 tmp = build2 (PLUS_EXPR, var_type, var,
4640 fold_convert (var_type, integer_one_node));
4641 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4642
4643 /* ...and jump back to the loop entry. */
4644 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4645
4646 /* Add the loop exit label. */
4647 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4648 }
4649
4650 /* A subroutine of gimplify_init_constructor. Generate individual
4651 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4652 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4653 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4654 zeroed first. */
4655
4656 static void
4657 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4658 gimple_seq *pre_p, bool cleared)
4659 {
4660 tree array_elt_type = NULL;
4661 unsigned HOST_WIDE_INT ix;
4662 tree purpose, value;
4663
4664 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4665 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4666
4667 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4668 {
4669 tree cref;
4670
4671 /* NULL values are created above for gimplification errors. */
4672 if (value == NULL)
4673 continue;
4674
4675 if (cleared && initializer_zerop (value))
4676 continue;
4677
4678 /* ??? Here's to hoping the front end fills in all of the indices,
4679 so we don't have to figure out what's missing ourselves. */
4680 gcc_assert (purpose);
4681
4682 /* Skip zero-sized fields, unless value has side-effects. This can
4683 happen with calls to functions returning a empty type, which
4684 we shouldn't discard. As a number of downstream passes don't
4685 expect sets of empty type fields, we rely on the gimplification of
4686 the MODIFY_EXPR we make below to drop the assignment statement. */
4687 if (!TREE_SIDE_EFFECTS (value)
4688 && TREE_CODE (purpose) == FIELD_DECL
4689 && is_empty_type (TREE_TYPE (purpose)))
4690 continue;
4691
4692 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4693 whole range. */
4694 if (TREE_CODE (purpose) == RANGE_EXPR)
4695 {
4696 tree lower = TREE_OPERAND (purpose, 0);
4697 tree upper = TREE_OPERAND (purpose, 1);
4698
4699 /* If the lower bound is equal to upper, just treat it as if
4700 upper was the index. */
4701 if (simple_cst_equal (lower, upper))
4702 purpose = upper;
4703 else
4704 {
4705 gimplify_init_ctor_eval_range (object, lower, upper, value,
4706 array_elt_type, pre_p, cleared);
4707 continue;
4708 }
4709 }
4710
4711 if (array_elt_type)
4712 {
4713 /* Do not use bitsizetype for ARRAY_REF indices. */
4714 if (TYPE_DOMAIN (TREE_TYPE (object)))
4715 purpose
4716 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4717 purpose);
4718 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4719 purpose, NULL_TREE, NULL_TREE);
4720 }
4721 else
4722 {
4723 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4724 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4725 unshare_expr (object), purpose, NULL_TREE);
4726 }
4727
4728 if (TREE_CODE (value) == CONSTRUCTOR
4729 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4730 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4731 pre_p, cleared);
4732 else
4733 {
4734 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4735 gimplify_and_add (init, pre_p);
4736 ggc_free (init);
4737 }
4738 }
4739 }
4740
4741 /* Return the appropriate RHS predicate for this LHS. */
4742
4743 gimple_predicate
4744 rhs_predicate_for (tree lhs)
4745 {
4746 if (is_gimple_reg (lhs))
4747 return is_gimple_reg_rhs_or_call;
4748 else
4749 return is_gimple_mem_rhs_or_call;
4750 }
4751
4752 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4753 before the LHS has been gimplified. */
4754
4755 static gimple_predicate
4756 initial_rhs_predicate_for (tree lhs)
4757 {
4758 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4759 return is_gimple_reg_rhs_or_call;
4760 else
4761 return is_gimple_mem_rhs_or_call;
4762 }
4763
4764 /* Gimplify a C99 compound literal expression. This just means adding
4765 the DECL_EXPR before the current statement and using its anonymous
4766 decl instead. */
4767
4768 static enum gimplify_status
4769 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4770 bool (*gimple_test_f) (tree),
4771 fallback_t fallback)
4772 {
4773 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4774 tree decl = DECL_EXPR_DECL (decl_s);
4775 tree init = DECL_INITIAL (decl);
4776 /* Mark the decl as addressable if the compound literal
4777 expression is addressable now, otherwise it is marked too late
4778 after we gimplify the initialization expression. */
4779 if (TREE_ADDRESSABLE (*expr_p))
4780 TREE_ADDRESSABLE (decl) = 1;
4781 /* Otherwise, if we don't need an lvalue and have a literal directly
4782 substitute it. Check if it matches the gimple predicate, as
4783 otherwise we'd generate a new temporary, and we can as well just
4784 use the decl we already have. */
4785 else if (!TREE_ADDRESSABLE (decl)
4786 && !TREE_THIS_VOLATILE (decl)
4787 && init
4788 && (fallback & fb_lvalue) == 0
4789 && gimple_test_f (init))
4790 {
4791 *expr_p = init;
4792 return GS_OK;
4793 }
4794
4795 /* If the decl is not addressable, then it is being used in some
4796 expression or on the right hand side of a statement, and it can
4797 be put into a readonly data section. */
4798 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4799 TREE_READONLY (decl) = 1;
4800
4801 /* This decl isn't mentioned in the enclosing block, so add it to the
4802 list of temps. FIXME it seems a bit of a kludge to say that
4803 anonymous artificial vars aren't pushed, but everything else is. */
4804 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4805 gimple_add_tmp_var (decl);
4806
4807 gimplify_and_add (decl_s, pre_p);
4808 *expr_p = decl;
4809 return GS_OK;
4810 }
4811
4812 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4813 return a new CONSTRUCTOR if something changed. */
4814
4815 static tree
4816 optimize_compound_literals_in_ctor (tree orig_ctor)
4817 {
4818 tree ctor = orig_ctor;
4819 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4820 unsigned int idx, num = vec_safe_length (elts);
4821
4822 for (idx = 0; idx < num; idx++)
4823 {
4824 tree value = (*elts)[idx].value;
4825 tree newval = value;
4826 if (TREE_CODE (value) == CONSTRUCTOR)
4827 newval = optimize_compound_literals_in_ctor (value);
4828 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4829 {
4830 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4831 tree decl = DECL_EXPR_DECL (decl_s);
4832 tree init = DECL_INITIAL (decl);
4833
4834 if (!TREE_ADDRESSABLE (value)
4835 && !TREE_ADDRESSABLE (decl)
4836 && init
4837 && TREE_CODE (init) == CONSTRUCTOR)
4838 newval = optimize_compound_literals_in_ctor (init);
4839 }
4840 if (newval == value)
4841 continue;
4842
4843 if (ctor == orig_ctor)
4844 {
4845 ctor = copy_node (orig_ctor);
4846 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4847 elts = CONSTRUCTOR_ELTS (ctor);
4848 }
4849 (*elts)[idx].value = newval;
4850 }
4851 return ctor;
4852 }
4853
4854 /* A subroutine of gimplify_modify_expr. Break out elements of a
4855 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4856
4857 Note that we still need to clear any elements that don't have explicit
4858 initializers, so if not all elements are initialized we keep the
4859 original MODIFY_EXPR, we just remove all of the constructor elements.
4860
4861 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4862 GS_ERROR if we would have to create a temporary when gimplifying
4863 this constructor. Otherwise, return GS_OK.
4864
4865 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4866
4867 static enum gimplify_status
4868 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4869 bool want_value, bool notify_temp_creation)
4870 {
4871 tree object, ctor, type;
4872 enum gimplify_status ret;
4873 vec<constructor_elt, va_gc> *elts;
4874
4875 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4876
4877 if (!notify_temp_creation)
4878 {
4879 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4880 is_gimple_lvalue, fb_lvalue);
4881 if (ret == GS_ERROR)
4882 return ret;
4883 }
4884
4885 object = TREE_OPERAND (*expr_p, 0);
4886 ctor = TREE_OPERAND (*expr_p, 1)
4887 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4888 type = TREE_TYPE (ctor);
4889 elts = CONSTRUCTOR_ELTS (ctor);
4890 ret = GS_ALL_DONE;
4891
4892 switch (TREE_CODE (type))
4893 {
4894 case RECORD_TYPE:
4895 case UNION_TYPE:
4896 case QUAL_UNION_TYPE:
4897 case ARRAY_TYPE:
4898 {
4899 /* Use readonly data for initializers of this or smaller size
4900 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4901 ratio. */
4902 const HOST_WIDE_INT min_unique_size = 64;
4903 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4904 is smaller than this, use readonly data. */
4905 const int unique_nonzero_ratio = 8;
4906 /* True if a single access of the object must be ensured. This is the
4907 case if the target is volatile, the type is non-addressable and more
4908 than one field need to be assigned. */
4909 const bool ensure_single_access
4910 = TREE_THIS_VOLATILE (object)
4911 && !TREE_ADDRESSABLE (type)
4912 && vec_safe_length (elts) > 1;
4913 struct gimplify_init_ctor_preeval_data preeval_data;
4914 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4915 HOST_WIDE_INT num_unique_nonzero_elements;
4916 bool cleared, complete_p, valid_const_initializer;
4917
4918 /* Aggregate types must lower constructors to initialization of
4919 individual elements. The exception is that a CONSTRUCTOR node
4920 with no elements indicates zero-initialization of the whole. */
4921 if (vec_safe_is_empty (elts))
4922 {
4923 if (notify_temp_creation)
4924 return GS_OK;
4925 break;
4926 }
4927
4928 /* Fetch information about the constructor to direct later processing.
4929 We might want to make static versions of it in various cases, and
4930 can only do so if it known to be a valid constant initializer. */
4931 valid_const_initializer
4932 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4933 &num_unique_nonzero_elements,
4934 &num_ctor_elements, &complete_p);
4935
4936 /* If a const aggregate variable is being initialized, then it
4937 should never be a lose to promote the variable to be static. */
4938 if (valid_const_initializer
4939 && num_nonzero_elements > 1
4940 && TREE_READONLY (object)
4941 && VAR_P (object)
4942 && !DECL_REGISTER (object)
4943 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4944 /* For ctors that have many repeated nonzero elements
4945 represented through RANGE_EXPRs, prefer initializing
4946 those through runtime loops over copies of large amounts
4947 of data from readonly data section. */
4948 && (num_unique_nonzero_elements
4949 > num_nonzero_elements / unique_nonzero_ratio
4950 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4951 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4952 {
4953 if (notify_temp_creation)
4954 return GS_ERROR;
4955
4956 DECL_INITIAL (object) = ctor;
4957 TREE_STATIC (object) = 1;
4958 if (!DECL_NAME (object))
4959 DECL_NAME (object) = create_tmp_var_name ("C");
4960 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4961
4962 /* ??? C++ doesn't automatically append a .<number> to the
4963 assembler name, and even when it does, it looks at FE private
4964 data structures to figure out what that number should be,
4965 which are not set for this variable. I suppose this is
4966 important for local statics for inline functions, which aren't
4967 "local" in the object file sense. So in order to get a unique
4968 TU-local symbol, we must invoke the lhd version now. */
4969 lhd_set_decl_assembler_name (object);
4970
4971 *expr_p = NULL_TREE;
4972 break;
4973 }
4974
4975 /* If there are "lots" of initialized elements, even discounting
4976 those that are not address constants (and thus *must* be
4977 computed at runtime), then partition the constructor into
4978 constant and non-constant parts. Block copy the constant
4979 parts in, then generate code for the non-constant parts. */
4980 /* TODO. There's code in cp/typeck.c to do this. */
4981
4982 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4983 /* store_constructor will ignore the clearing of variable-sized
4984 objects. Initializers for such objects must explicitly set
4985 every field that needs to be set. */
4986 cleared = false;
4987 else if (!complete_p)
4988 /* If the constructor isn't complete, clear the whole object
4989 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4990
4991 ??? This ought not to be needed. For any element not present
4992 in the initializer, we should simply set them to zero. Except
4993 we'd need to *find* the elements that are not present, and that
4994 requires trickery to avoid quadratic compile-time behavior in
4995 large cases or excessive memory use in small cases. */
4996 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4997 else if (num_ctor_elements - num_nonzero_elements
4998 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4999 && num_nonzero_elements < num_ctor_elements / 4)
5000 /* If there are "lots" of zeros, it's more efficient to clear
5001 the memory and then set the nonzero elements. */
5002 cleared = true;
5003 else if (ensure_single_access && num_nonzero_elements == 0)
5004 /* If a single access to the target must be ensured and all elements
5005 are zero, then it's optimal to clear whatever their number. */
5006 cleared = true;
5007 else
5008 cleared = false;
5009
5010 /* If there are "lots" of initialized elements, and all of them
5011 are valid address constants, then the entire initializer can
5012 be dropped to memory, and then memcpy'd out. Don't do this
5013 for sparse arrays, though, as it's more efficient to follow
5014 the standard CONSTRUCTOR behavior of memset followed by
5015 individual element initialization. Also don't do this for small
5016 all-zero initializers (which aren't big enough to merit
5017 clearing), and don't try to make bitwise copies of
5018 TREE_ADDRESSABLE types. */
5019 if (valid_const_initializer
5020 && complete_p
5021 && !(cleared || num_nonzero_elements == 0)
5022 && !TREE_ADDRESSABLE (type))
5023 {
5024 HOST_WIDE_INT size = int_size_in_bytes (type);
5025 unsigned int align;
5026
5027 /* ??? We can still get unbounded array types, at least
5028 from the C++ front end. This seems wrong, but attempt
5029 to work around it for now. */
5030 if (size < 0)
5031 {
5032 size = int_size_in_bytes (TREE_TYPE (object));
5033 if (size >= 0)
5034 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5035 }
5036
5037 /* Find the maximum alignment we can assume for the object. */
5038 /* ??? Make use of DECL_OFFSET_ALIGN. */
5039 if (DECL_P (object))
5040 align = DECL_ALIGN (object);
5041 else
5042 align = TYPE_ALIGN (type);
5043
5044 /* Do a block move either if the size is so small as to make
5045 each individual move a sub-unit move on average, or if it
5046 is so large as to make individual moves inefficient. */
5047 if (size > 0
5048 && num_nonzero_elements > 1
5049 /* For ctors that have many repeated nonzero elements
5050 represented through RANGE_EXPRs, prefer initializing
5051 those through runtime loops over copies of large amounts
5052 of data from readonly data section. */
5053 && (num_unique_nonzero_elements
5054 > num_nonzero_elements / unique_nonzero_ratio
5055 || size <= min_unique_size)
5056 && (size < num_nonzero_elements
5057 || !can_move_by_pieces (size, align)))
5058 {
5059 if (notify_temp_creation)
5060 return GS_ERROR;
5061
5062 walk_tree (&ctor, force_labels_r, NULL, NULL);
5063 ctor = tree_output_constant_def (ctor);
5064 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5065 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5066 TREE_OPERAND (*expr_p, 1) = ctor;
5067
5068 /* This is no longer an assignment of a CONSTRUCTOR, but
5069 we still may have processing to do on the LHS. So
5070 pretend we didn't do anything here to let that happen. */
5071 return GS_UNHANDLED;
5072 }
5073 }
5074
5075 /* If a single access to the target must be ensured and there are
5076 nonzero elements or the zero elements are not assigned en masse,
5077 initialize the target from a temporary. */
5078 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5079 {
5080 if (notify_temp_creation)
5081 return GS_ERROR;
5082
5083 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5084 TREE_OPERAND (*expr_p, 0) = temp;
5085 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5086 *expr_p,
5087 build2 (MODIFY_EXPR, void_type_node,
5088 object, temp));
5089 return GS_OK;
5090 }
5091
5092 if (notify_temp_creation)
5093 return GS_OK;
5094
5095 /* If there are nonzero elements and if needed, pre-evaluate to capture
5096 elements overlapping with the lhs into temporaries. We must do this
5097 before clearing to fetch the values before they are zeroed-out. */
5098 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5099 {
5100 preeval_data.lhs_base_decl = get_base_address (object);
5101 if (!DECL_P (preeval_data.lhs_base_decl))
5102 preeval_data.lhs_base_decl = NULL;
5103 preeval_data.lhs_alias_set = get_alias_set (object);
5104
5105 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5106 pre_p, post_p, &preeval_data);
5107 }
5108
5109 bool ctor_has_side_effects_p
5110 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5111
5112 if (cleared)
5113 {
5114 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5115 Note that we still have to gimplify, in order to handle the
5116 case of variable sized types. Avoid shared tree structures. */
5117 CONSTRUCTOR_ELTS (ctor) = NULL;
5118 TREE_SIDE_EFFECTS (ctor) = 0;
5119 object = unshare_expr (object);
5120 gimplify_stmt (expr_p, pre_p);
5121 }
5122
5123 /* If we have not block cleared the object, or if there are nonzero
5124 elements in the constructor, or if the constructor has side effects,
5125 add assignments to the individual scalar fields of the object. */
5126 if (!cleared
5127 || num_nonzero_elements > 0
5128 || ctor_has_side_effects_p)
5129 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5130
5131 *expr_p = NULL_TREE;
5132 }
5133 break;
5134
5135 case COMPLEX_TYPE:
5136 {
5137 tree r, i;
5138
5139 if (notify_temp_creation)
5140 return GS_OK;
5141
5142 /* Extract the real and imaginary parts out of the ctor. */
5143 gcc_assert (elts->length () == 2);
5144 r = (*elts)[0].value;
5145 i = (*elts)[1].value;
5146 if (r == NULL || i == NULL)
5147 {
5148 tree zero = build_zero_cst (TREE_TYPE (type));
5149 if (r == NULL)
5150 r = zero;
5151 if (i == NULL)
5152 i = zero;
5153 }
5154
5155 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5156 represent creation of a complex value. */
5157 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5158 {
5159 ctor = build_complex (type, r, i);
5160 TREE_OPERAND (*expr_p, 1) = ctor;
5161 }
5162 else
5163 {
5164 ctor = build2 (COMPLEX_EXPR, type, r, i);
5165 TREE_OPERAND (*expr_p, 1) = ctor;
5166 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5167 pre_p,
5168 post_p,
5169 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5170 fb_rvalue);
5171 }
5172 }
5173 break;
5174
5175 case VECTOR_TYPE:
5176 {
5177 unsigned HOST_WIDE_INT ix;
5178 constructor_elt *ce;
5179
5180 if (notify_temp_creation)
5181 return GS_OK;
5182
5183 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5184 if (TREE_CONSTANT (ctor))
5185 {
5186 bool constant_p = true;
5187 tree value;
5188
5189 /* Even when ctor is constant, it might contain non-*_CST
5190 elements, such as addresses or trapping values like
5191 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5192 in VECTOR_CST nodes. */
5193 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5194 if (!CONSTANT_CLASS_P (value))
5195 {
5196 constant_p = false;
5197 break;
5198 }
5199
5200 if (constant_p)
5201 {
5202 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5203 break;
5204 }
5205
5206 TREE_CONSTANT (ctor) = 0;
5207 }
5208
5209 /* Vector types use CONSTRUCTOR all the way through gimple
5210 compilation as a general initializer. */
5211 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5212 {
5213 enum gimplify_status tret;
5214 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5215 fb_rvalue);
5216 if (tret == GS_ERROR)
5217 ret = GS_ERROR;
5218 else if (TREE_STATIC (ctor)
5219 && !initializer_constant_valid_p (ce->value,
5220 TREE_TYPE (ce->value)))
5221 TREE_STATIC (ctor) = 0;
5222 }
5223 recompute_constructor_flags (ctor);
5224 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5225 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5226 }
5227 break;
5228
5229 default:
5230 /* So how did we get a CONSTRUCTOR for a scalar type? */
5231 gcc_unreachable ();
5232 }
5233
5234 if (ret == GS_ERROR)
5235 return GS_ERROR;
5236 /* If we have gimplified both sides of the initializer but have
5237 not emitted an assignment, do so now. */
5238 if (*expr_p)
5239 {
5240 tree lhs = TREE_OPERAND (*expr_p, 0);
5241 tree rhs = TREE_OPERAND (*expr_p, 1);
5242 if (want_value && object == lhs)
5243 lhs = unshare_expr (lhs);
5244 gassign *init = gimple_build_assign (lhs, rhs);
5245 gimplify_seq_add_stmt (pre_p, init);
5246 }
5247 if (want_value)
5248 {
5249 *expr_p = object;
5250 return GS_OK;
5251 }
5252 else
5253 {
5254 *expr_p = NULL;
5255 return GS_ALL_DONE;
5256 }
5257 }
5258
5259 /* Given a pointer value OP0, return a simplified version of an
5260 indirection through OP0, or NULL_TREE if no simplification is
5261 possible. This may only be applied to a rhs of an expression.
5262 Note that the resulting type may be different from the type pointed
5263 to in the sense that it is still compatible from the langhooks
5264 point of view. */
5265
5266 static tree
5267 gimple_fold_indirect_ref_rhs (tree t)
5268 {
5269 return gimple_fold_indirect_ref (t);
5270 }
5271
5272 /* Subroutine of gimplify_modify_expr to do simplifications of
5273 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5274 something changes. */
5275
5276 static enum gimplify_status
5277 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5278 gimple_seq *pre_p, gimple_seq *post_p,
5279 bool want_value)
5280 {
5281 enum gimplify_status ret = GS_UNHANDLED;
5282 bool changed;
5283
5284 do
5285 {
5286 changed = false;
5287 switch (TREE_CODE (*from_p))
5288 {
5289 case VAR_DECL:
5290 /* If we're assigning from a read-only variable initialized with
5291 a constructor and not volatile, do the direct assignment from
5292 the constructor, but only if the target is not volatile either
5293 since this latter assignment might end up being done on a per
5294 field basis. However, if the target is volatile and the type
5295 is aggregate and non-addressable, gimplify_init_constructor
5296 knows that it needs to ensure a single access to the target
5297 and it will return GS_OK only in this case. */
5298 if (TREE_READONLY (*from_p)
5299 && DECL_INITIAL (*from_p)
5300 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5301 && !TREE_THIS_VOLATILE (*from_p)
5302 && (!TREE_THIS_VOLATILE (*to_p)
5303 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5304 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5305 {
5306 tree old_from = *from_p;
5307 enum gimplify_status subret;
5308
5309 /* Move the constructor into the RHS. */
5310 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5311
5312 /* Let's see if gimplify_init_constructor will need to put
5313 it in memory. */
5314 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5315 false, true);
5316 if (subret == GS_ERROR)
5317 {
5318 /* If so, revert the change. */
5319 *from_p = old_from;
5320 }
5321 else
5322 {
5323 ret = GS_OK;
5324 changed = true;
5325 }
5326 }
5327 break;
5328 case INDIRECT_REF:
5329 {
5330 /* If we have code like
5331
5332 *(const A*)(A*)&x
5333
5334 where the type of "x" is a (possibly cv-qualified variant
5335 of "A"), treat the entire expression as identical to "x".
5336 This kind of code arises in C++ when an object is bound
5337 to a const reference, and if "x" is a TARGET_EXPR we want
5338 to take advantage of the optimization below. */
5339 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5340 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5341 if (t)
5342 {
5343 if (TREE_THIS_VOLATILE (t) != volatile_p)
5344 {
5345 if (DECL_P (t))
5346 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5347 build_fold_addr_expr (t));
5348 if (REFERENCE_CLASS_P (t))
5349 TREE_THIS_VOLATILE (t) = volatile_p;
5350 }
5351 *from_p = t;
5352 ret = GS_OK;
5353 changed = true;
5354 }
5355 break;
5356 }
5357
5358 case TARGET_EXPR:
5359 {
5360 /* If we are initializing something from a TARGET_EXPR, strip the
5361 TARGET_EXPR and initialize it directly, if possible. This can't
5362 be done if the initializer is void, since that implies that the
5363 temporary is set in some non-trivial way.
5364
5365 ??? What about code that pulls out the temp and uses it
5366 elsewhere? I think that such code never uses the TARGET_EXPR as
5367 an initializer. If I'm wrong, we'll die because the temp won't
5368 have any RTL. In that case, I guess we'll need to replace
5369 references somehow. */
5370 tree init = TARGET_EXPR_INITIAL (*from_p);
5371
5372 if (init
5373 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5374 || !TARGET_EXPR_NO_ELIDE (*from_p))
5375 && !VOID_TYPE_P (TREE_TYPE (init)))
5376 {
5377 *from_p = init;
5378 ret = GS_OK;
5379 changed = true;
5380 }
5381 }
5382 break;
5383
5384 case COMPOUND_EXPR:
5385 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5386 caught. */
5387 gimplify_compound_expr (from_p, pre_p, true);
5388 ret = GS_OK;
5389 changed = true;
5390 break;
5391
5392 case CONSTRUCTOR:
5393 /* If we already made some changes, let the front end have a
5394 crack at this before we break it down. */
5395 if (ret != GS_UNHANDLED)
5396 break;
5397 /* If we're initializing from a CONSTRUCTOR, break this into
5398 individual MODIFY_EXPRs. */
5399 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5400 false);
5401
5402 case COND_EXPR:
5403 /* If we're assigning to a non-register type, push the assignment
5404 down into the branches. This is mandatory for ADDRESSABLE types,
5405 since we cannot generate temporaries for such, but it saves a
5406 copy in other cases as well. */
5407 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5408 {
5409 /* This code should mirror the code in gimplify_cond_expr. */
5410 enum tree_code code = TREE_CODE (*expr_p);
5411 tree cond = *from_p;
5412 tree result = *to_p;
5413
5414 ret = gimplify_expr (&result, pre_p, post_p,
5415 is_gimple_lvalue, fb_lvalue);
5416 if (ret != GS_ERROR)
5417 ret = GS_OK;
5418
5419 /* If we are going to write RESULT more than once, clear
5420 TREE_READONLY flag, otherwise we might incorrectly promote
5421 the variable to static const and initialize it at compile
5422 time in one of the branches. */
5423 if (VAR_P (result)
5424 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5425 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5426 TREE_READONLY (result) = 0;
5427 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5428 TREE_OPERAND (cond, 1)
5429 = build2 (code, void_type_node, result,
5430 TREE_OPERAND (cond, 1));
5431 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5432 TREE_OPERAND (cond, 2)
5433 = build2 (code, void_type_node, unshare_expr (result),
5434 TREE_OPERAND (cond, 2));
5435
5436 TREE_TYPE (cond) = void_type_node;
5437 recalculate_side_effects (cond);
5438
5439 if (want_value)
5440 {
5441 gimplify_and_add (cond, pre_p);
5442 *expr_p = unshare_expr (result);
5443 }
5444 else
5445 *expr_p = cond;
5446 return ret;
5447 }
5448 break;
5449
5450 case CALL_EXPR:
5451 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5452 return slot so that we don't generate a temporary. */
5453 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5454 && aggregate_value_p (*from_p, *from_p))
5455 {
5456 bool use_target;
5457
5458 if (!(rhs_predicate_for (*to_p))(*from_p))
5459 /* If we need a temporary, *to_p isn't accurate. */
5460 use_target = false;
5461 /* It's OK to use the return slot directly unless it's an NRV. */
5462 else if (TREE_CODE (*to_p) == RESULT_DECL
5463 && DECL_NAME (*to_p) == NULL_TREE
5464 && needs_to_live_in_memory (*to_p))
5465 use_target = true;
5466 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5467 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5468 /* Don't force regs into memory. */
5469 use_target = false;
5470 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5471 /* It's OK to use the target directly if it's being
5472 initialized. */
5473 use_target = true;
5474 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5475 != INTEGER_CST)
5476 /* Always use the target and thus RSO for variable-sized types.
5477 GIMPLE cannot deal with a variable-sized assignment
5478 embedded in a call statement. */
5479 use_target = true;
5480 else if (TREE_CODE (*to_p) != SSA_NAME
5481 && (!is_gimple_variable (*to_p)
5482 || needs_to_live_in_memory (*to_p)))
5483 /* Don't use the original target if it's already addressable;
5484 if its address escapes, and the called function uses the
5485 NRV optimization, a conforming program could see *to_p
5486 change before the called function returns; see c++/19317.
5487 When optimizing, the return_slot pass marks more functions
5488 as safe after we have escape info. */
5489 use_target = false;
5490 else
5491 use_target = true;
5492
5493 if (use_target)
5494 {
5495 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5496 mark_addressable (*to_p);
5497 }
5498 }
5499 break;
5500
5501 case WITH_SIZE_EXPR:
5502 /* Likewise for calls that return an aggregate of non-constant size,
5503 since we would not be able to generate a temporary at all. */
5504 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5505 {
5506 *from_p = TREE_OPERAND (*from_p, 0);
5507 /* We don't change ret in this case because the
5508 WITH_SIZE_EXPR might have been added in
5509 gimplify_modify_expr, so returning GS_OK would lead to an
5510 infinite loop. */
5511 changed = true;
5512 }
5513 break;
5514
5515 /* If we're initializing from a container, push the initialization
5516 inside it. */
5517 case CLEANUP_POINT_EXPR:
5518 case BIND_EXPR:
5519 case STATEMENT_LIST:
5520 {
5521 tree wrap = *from_p;
5522 tree t;
5523
5524 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5525 fb_lvalue);
5526 if (ret != GS_ERROR)
5527 ret = GS_OK;
5528
5529 t = voidify_wrapper_expr (wrap, *expr_p);
5530 gcc_assert (t == *expr_p);
5531
5532 if (want_value)
5533 {
5534 gimplify_and_add (wrap, pre_p);
5535 *expr_p = unshare_expr (*to_p);
5536 }
5537 else
5538 *expr_p = wrap;
5539 return GS_OK;
5540 }
5541
5542 case NOP_EXPR:
5543 /* Pull out compound literal expressions from a NOP_EXPR.
5544 Those are created in the C FE to drop qualifiers during
5545 lvalue conversion. */
5546 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5547 && tree_ssa_useless_type_conversion (*from_p))
5548 {
5549 *from_p = TREE_OPERAND (*from_p, 0);
5550 ret = GS_OK;
5551 changed = true;
5552 }
5553 break;
5554
5555 case COMPOUND_LITERAL_EXPR:
5556 {
5557 tree complit = TREE_OPERAND (*expr_p, 1);
5558 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5559 tree decl = DECL_EXPR_DECL (decl_s);
5560 tree init = DECL_INITIAL (decl);
5561
5562 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5563 into struct T x = { 0, 1, 2 } if the address of the
5564 compound literal has never been taken. */
5565 if (!TREE_ADDRESSABLE (complit)
5566 && !TREE_ADDRESSABLE (decl)
5567 && init)
5568 {
5569 *expr_p = copy_node (*expr_p);
5570 TREE_OPERAND (*expr_p, 1) = init;
5571 return GS_OK;
5572 }
5573 }
5574
5575 default:
5576 break;
5577 }
5578 }
5579 while (changed);
5580
5581 return ret;
5582 }
5583
5584
5585 /* Return true if T looks like a valid GIMPLE statement. */
5586
5587 static bool
5588 is_gimple_stmt (tree t)
5589 {
5590 const enum tree_code code = TREE_CODE (t);
5591
5592 switch (code)
5593 {
5594 case NOP_EXPR:
5595 /* The only valid NOP_EXPR is the empty statement. */
5596 return IS_EMPTY_STMT (t);
5597
5598 case BIND_EXPR:
5599 case COND_EXPR:
5600 /* These are only valid if they're void. */
5601 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5602
5603 case SWITCH_EXPR:
5604 case GOTO_EXPR:
5605 case RETURN_EXPR:
5606 case LABEL_EXPR:
5607 case CASE_LABEL_EXPR:
5608 case TRY_CATCH_EXPR:
5609 case TRY_FINALLY_EXPR:
5610 case EH_FILTER_EXPR:
5611 case CATCH_EXPR:
5612 case ASM_EXPR:
5613 case STATEMENT_LIST:
5614 case OACC_PARALLEL:
5615 case OACC_KERNELS:
5616 case OACC_SERIAL:
5617 case OACC_DATA:
5618 case OACC_HOST_DATA:
5619 case OACC_DECLARE:
5620 case OACC_UPDATE:
5621 case OACC_ENTER_DATA:
5622 case OACC_EXIT_DATA:
5623 case OACC_CACHE:
5624 case OMP_PARALLEL:
5625 case OMP_FOR:
5626 case OMP_SIMD:
5627 case OMP_DISTRIBUTE:
5628 case OMP_LOOP:
5629 case OACC_LOOP:
5630 case OMP_SCAN:
5631 case OMP_SECTIONS:
5632 case OMP_SECTION:
5633 case OMP_SINGLE:
5634 case OMP_MASTER:
5635 case OMP_TASKGROUP:
5636 case OMP_ORDERED:
5637 case OMP_CRITICAL:
5638 case OMP_TASK:
5639 case OMP_TARGET:
5640 case OMP_TARGET_DATA:
5641 case OMP_TARGET_UPDATE:
5642 case OMP_TARGET_ENTER_DATA:
5643 case OMP_TARGET_EXIT_DATA:
5644 case OMP_TASKLOOP:
5645 case OMP_TEAMS:
5646 /* These are always void. */
5647 return true;
5648
5649 case CALL_EXPR:
5650 case MODIFY_EXPR:
5651 case PREDICT_EXPR:
5652 /* These are valid regardless of their type. */
5653 return true;
5654
5655 default:
5656 return false;
5657 }
5658 }
5659
5660
5661 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5662 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5663
5664 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5665 other, unmodified part of the complex object just before the total store.
5666 As a consequence, if the object is still uninitialized, an undefined value
5667 will be loaded into a register, which may result in a spurious exception
5668 if the register is floating-point and the value happens to be a signaling
5669 NaN for example. Then the fully-fledged complex operations lowering pass
5670 followed by a DCE pass are necessary in order to fix things up. */
5671
5672 static enum gimplify_status
5673 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5674 bool want_value)
5675 {
5676 enum tree_code code, ocode;
5677 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5678
5679 lhs = TREE_OPERAND (*expr_p, 0);
5680 rhs = TREE_OPERAND (*expr_p, 1);
5681 code = TREE_CODE (lhs);
5682 lhs = TREE_OPERAND (lhs, 0);
5683
5684 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5685 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5686 suppress_warning (other);
5687 other = get_formal_tmp_var (other, pre_p);
5688
5689 realpart = code == REALPART_EXPR ? rhs : other;
5690 imagpart = code == REALPART_EXPR ? other : rhs;
5691
5692 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5693 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5694 else
5695 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5696
5697 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5698 *expr_p = (want_value) ? rhs : NULL_TREE;
5699
5700 return GS_ALL_DONE;
5701 }
5702
5703 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5704
5705 modify_expr
5706 : varname '=' rhs
5707 | '*' ID '=' rhs
5708
5709 PRE_P points to the list where side effects that must happen before
5710 *EXPR_P should be stored.
5711
5712 POST_P points to the list where side effects that must happen after
5713 *EXPR_P should be stored.
5714
5715 WANT_VALUE is nonzero iff we want to use the value of this expression
5716 in another expression. */
5717
5718 static enum gimplify_status
5719 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5720 bool want_value)
5721 {
5722 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5723 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5724 enum gimplify_status ret = GS_UNHANDLED;
5725 gimple *assign;
5726 location_t loc = EXPR_LOCATION (*expr_p);
5727 gimple_stmt_iterator gsi;
5728
5729 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5730 || TREE_CODE (*expr_p) == INIT_EXPR);
5731
5732 /* Trying to simplify a clobber using normal logic doesn't work,
5733 so handle it here. */
5734 if (TREE_CLOBBER_P (*from_p))
5735 {
5736 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5737 if (ret == GS_ERROR)
5738 return ret;
5739 gcc_assert (!want_value);
5740 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5741 {
5742 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5743 pre_p, post_p);
5744 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5745 }
5746 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5747 *expr_p = NULL;
5748 return GS_ALL_DONE;
5749 }
5750
5751 /* Insert pointer conversions required by the middle-end that are not
5752 required by the frontend. This fixes middle-end type checking for
5753 for example gcc.dg/redecl-6.c. */
5754 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5755 {
5756 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5757 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5758 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5759 }
5760
5761 /* See if any simplifications can be done based on what the RHS is. */
5762 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5763 want_value);
5764 if (ret != GS_UNHANDLED)
5765 return ret;
5766
5767 /* For empty types only gimplify the left hand side and right hand
5768 side as statements and throw away the assignment. Do this after
5769 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5770 types properly. */
5771 if (is_empty_type (TREE_TYPE (*from_p))
5772 && !want_value
5773 /* Don't do this for calls that return addressable types, expand_call
5774 relies on those having a lhs. */
5775 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5776 && TREE_CODE (*from_p) == CALL_EXPR))
5777 {
5778 gimplify_stmt (from_p, pre_p);
5779 gimplify_stmt (to_p, pre_p);
5780 *expr_p = NULL_TREE;
5781 return GS_ALL_DONE;
5782 }
5783
5784 /* If the value being copied is of variable width, compute the length
5785 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5786 before gimplifying any of the operands so that we can resolve any
5787 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5788 the size of the expression to be copied, not of the destination, so
5789 that is what we must do here. */
5790 maybe_with_size_expr (from_p);
5791
5792 /* As a special case, we have to temporarily allow for assignments
5793 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5794 a toplevel statement, when gimplifying the GENERIC expression
5795 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5796 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5797
5798 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5799 prevent gimplify_expr from trying to create a new temporary for
5800 foo's LHS, we tell it that it should only gimplify until it
5801 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5802 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5803 and all we need to do here is set 'a' to be its LHS. */
5804
5805 /* Gimplify the RHS first for C++17 and bug 71104. */
5806 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5807 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5808 if (ret == GS_ERROR)
5809 return ret;
5810
5811 /* Then gimplify the LHS. */
5812 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5813 twice we have to make sure to gimplify into non-SSA as otherwise
5814 the abnormal edge added later will make those defs not dominate
5815 their uses.
5816 ??? Technically this applies only to the registers used in the
5817 resulting non-register *TO_P. */
5818 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5819 if (saved_into_ssa
5820 && TREE_CODE (*from_p) == CALL_EXPR
5821 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5822 gimplify_ctxp->into_ssa = false;
5823 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5824 gimplify_ctxp->into_ssa = saved_into_ssa;
5825 if (ret == GS_ERROR)
5826 return ret;
5827
5828 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5829 guess for the predicate was wrong. */
5830 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5831 if (final_pred != initial_pred)
5832 {
5833 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5834 if (ret == GS_ERROR)
5835 return ret;
5836 }
5837
5838 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5839 size as argument to the call. */
5840 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5841 {
5842 tree call = TREE_OPERAND (*from_p, 0);
5843 tree vlasize = TREE_OPERAND (*from_p, 1);
5844
5845 if (TREE_CODE (call) == CALL_EXPR
5846 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5847 {
5848 int nargs = call_expr_nargs (call);
5849 tree type = TREE_TYPE (call);
5850 tree ap = CALL_EXPR_ARG (call, 0);
5851 tree tag = CALL_EXPR_ARG (call, 1);
5852 tree aptag = CALL_EXPR_ARG (call, 2);
5853 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5854 IFN_VA_ARG, type,
5855 nargs + 1, ap, tag,
5856 aptag, vlasize);
5857 TREE_OPERAND (*from_p, 0) = newcall;
5858 }
5859 }
5860
5861 /* Now see if the above changed *from_p to something we handle specially. */
5862 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5863 want_value);
5864 if (ret != GS_UNHANDLED)
5865 return ret;
5866
5867 /* If we've got a variable sized assignment between two lvalues (i.e. does
5868 not involve a call), then we can make things a bit more straightforward
5869 by converting the assignment to memcpy or memset. */
5870 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5871 {
5872 tree from = TREE_OPERAND (*from_p, 0);
5873 tree size = TREE_OPERAND (*from_p, 1);
5874
5875 if (TREE_CODE (from) == CONSTRUCTOR)
5876 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5877
5878 if (is_gimple_addressable (from))
5879 {
5880 *from_p = from;
5881 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5882 pre_p);
5883 }
5884 }
5885
5886 /* Transform partial stores to non-addressable complex variables into
5887 total stores. This allows us to use real instead of virtual operands
5888 for these variables, which improves optimization. */
5889 if ((TREE_CODE (*to_p) == REALPART_EXPR
5890 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5891 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5892 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5893
5894 /* Try to alleviate the effects of the gimplification creating artificial
5895 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5896 make sure not to create DECL_DEBUG_EXPR links across functions. */
5897 if (!gimplify_ctxp->into_ssa
5898 && VAR_P (*from_p)
5899 && DECL_IGNORED_P (*from_p)
5900 && DECL_P (*to_p)
5901 && !DECL_IGNORED_P (*to_p)
5902 && decl_function_context (*to_p) == current_function_decl
5903 && decl_function_context (*from_p) == current_function_decl)
5904 {
5905 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5906 DECL_NAME (*from_p)
5907 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5908 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5909 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5910 }
5911
5912 if (want_value && TREE_THIS_VOLATILE (*to_p))
5913 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5914
5915 if (TREE_CODE (*from_p) == CALL_EXPR)
5916 {
5917 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5918 instead of a GIMPLE_ASSIGN. */
5919 gcall *call_stmt;
5920 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5921 {
5922 /* Gimplify internal functions created in the FEs. */
5923 int nargs = call_expr_nargs (*from_p), i;
5924 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5925 auto_vec<tree> vargs (nargs);
5926
5927 for (i = 0; i < nargs; i++)
5928 {
5929 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5930 EXPR_LOCATION (*from_p));
5931 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5932 }
5933 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5934 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5935 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5936 }
5937 else
5938 {
5939 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5940 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5941 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5942 tree fndecl = get_callee_fndecl (*from_p);
5943 if (fndecl
5944 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5945 && call_expr_nargs (*from_p) == 3)
5946 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5947 CALL_EXPR_ARG (*from_p, 0),
5948 CALL_EXPR_ARG (*from_p, 1),
5949 CALL_EXPR_ARG (*from_p, 2));
5950 else
5951 {
5952 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5953 }
5954 }
5955 notice_special_calls (call_stmt);
5956 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5957 gimple_call_set_lhs (call_stmt, *to_p);
5958 else if (TREE_CODE (*to_p) == SSA_NAME)
5959 /* The above is somewhat premature, avoid ICEing later for a
5960 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5961 ??? This doesn't make it a default-def. */
5962 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5963
5964 assign = call_stmt;
5965 }
5966 else
5967 {
5968 assign = gimple_build_assign (*to_p, *from_p);
5969 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5970 if (COMPARISON_CLASS_P (*from_p))
5971 copy_warning (assign, *from_p);
5972 }
5973
5974 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5975 {
5976 /* We should have got an SSA name from the start. */
5977 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5978 || ! gimple_in_ssa_p (cfun));
5979 }
5980
5981 gimplify_seq_add_stmt (pre_p, assign);
5982 gsi = gsi_last (*pre_p);
5983 maybe_fold_stmt (&gsi);
5984
5985 if (want_value)
5986 {
5987 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5988 return GS_OK;
5989 }
5990 else
5991 *expr_p = NULL;
5992
5993 return GS_ALL_DONE;
5994 }
5995
5996 /* Gimplify a comparison between two variable-sized objects. Do this
5997 with a call to BUILT_IN_MEMCMP. */
5998
5999 static enum gimplify_status
6000 gimplify_variable_sized_compare (tree *expr_p)
6001 {
6002 location_t loc = EXPR_LOCATION (*expr_p);
6003 tree op0 = TREE_OPERAND (*expr_p, 0);
6004 tree op1 = TREE_OPERAND (*expr_p, 1);
6005 tree t, arg, dest, src, expr;
6006
6007 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6008 arg = unshare_expr (arg);
6009 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6010 src = build_fold_addr_expr_loc (loc, op1);
6011 dest = build_fold_addr_expr_loc (loc, op0);
6012 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6013 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6014
6015 expr
6016 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6017 SET_EXPR_LOCATION (expr, loc);
6018 *expr_p = expr;
6019
6020 return GS_OK;
6021 }
6022
6023 /* Gimplify a comparison between two aggregate objects of integral scalar
6024 mode as a comparison between the bitwise equivalent scalar values. */
6025
6026 static enum gimplify_status
6027 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6028 {
6029 location_t loc = EXPR_LOCATION (*expr_p);
6030 tree op0 = TREE_OPERAND (*expr_p, 0);
6031 tree op1 = TREE_OPERAND (*expr_p, 1);
6032
6033 tree type = TREE_TYPE (op0);
6034 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6035
6036 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6037 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6038
6039 *expr_p
6040 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6041
6042 return GS_OK;
6043 }
6044
6045 /* Gimplify an expression sequence. This function gimplifies each
6046 expression and rewrites the original expression with the last
6047 expression of the sequence in GIMPLE form.
6048
6049 PRE_P points to the list where the side effects for all the
6050 expressions in the sequence will be emitted.
6051
6052 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6053
6054 static enum gimplify_status
6055 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6056 {
6057 tree t = *expr_p;
6058
6059 do
6060 {
6061 tree *sub_p = &TREE_OPERAND (t, 0);
6062
6063 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6064 gimplify_compound_expr (sub_p, pre_p, false);
6065 else
6066 gimplify_stmt (sub_p, pre_p);
6067
6068 t = TREE_OPERAND (t, 1);
6069 }
6070 while (TREE_CODE (t) == COMPOUND_EXPR);
6071
6072 *expr_p = t;
6073 if (want_value)
6074 return GS_OK;
6075 else
6076 {
6077 gimplify_stmt (expr_p, pre_p);
6078 return GS_ALL_DONE;
6079 }
6080 }
6081
6082 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6083 gimplify. After gimplification, EXPR_P will point to a new temporary
6084 that holds the original value of the SAVE_EXPR node.
6085
6086 PRE_P points to the list where side effects that must happen before
6087 *EXPR_P should be stored. */
6088
6089 static enum gimplify_status
6090 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6091 {
6092 enum gimplify_status ret = GS_ALL_DONE;
6093 tree val;
6094
6095 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6096 val = TREE_OPERAND (*expr_p, 0);
6097
6098 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6099 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6100 {
6101 /* The operand may be a void-valued expression. It is
6102 being executed only for its side-effects. */
6103 if (TREE_TYPE (val) == void_type_node)
6104 {
6105 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6106 is_gimple_stmt, fb_none);
6107 val = NULL;
6108 }
6109 else
6110 /* The temporary may not be an SSA name as later abnormal and EH
6111 control flow may invalidate use/def domination. When in SSA
6112 form then assume there are no such issues and SAVE_EXPRs only
6113 appear via GENERIC foldings. */
6114 val = get_initialized_tmp_var (val, pre_p, post_p,
6115 gimple_in_ssa_p (cfun));
6116
6117 TREE_OPERAND (*expr_p, 0) = val;
6118 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6119 }
6120
6121 *expr_p = val;
6122
6123 return ret;
6124 }
6125
6126 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6127
6128 unary_expr
6129 : ...
6130 | '&' varname
6131 ...
6132
6133 PRE_P points to the list where side effects that must happen before
6134 *EXPR_P should be stored.
6135
6136 POST_P points to the list where side effects that must happen after
6137 *EXPR_P should be stored. */
6138
6139 static enum gimplify_status
6140 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6141 {
6142 tree expr = *expr_p;
6143 tree op0 = TREE_OPERAND (expr, 0);
6144 enum gimplify_status ret;
6145 location_t loc = EXPR_LOCATION (*expr_p);
6146
6147 switch (TREE_CODE (op0))
6148 {
6149 case INDIRECT_REF:
6150 do_indirect_ref:
6151 /* Check if we are dealing with an expression of the form '&*ptr'.
6152 While the front end folds away '&*ptr' into 'ptr', these
6153 expressions may be generated internally by the compiler (e.g.,
6154 builtins like __builtin_va_end). */
6155 /* Caution: the silent array decomposition semantics we allow for
6156 ADDR_EXPR means we can't always discard the pair. */
6157 /* Gimplification of the ADDR_EXPR operand may drop
6158 cv-qualification conversions, so make sure we add them if
6159 needed. */
6160 {
6161 tree op00 = TREE_OPERAND (op0, 0);
6162 tree t_expr = TREE_TYPE (expr);
6163 tree t_op00 = TREE_TYPE (op00);
6164
6165 if (!useless_type_conversion_p (t_expr, t_op00))
6166 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6167 *expr_p = op00;
6168 ret = GS_OK;
6169 }
6170 break;
6171
6172 case VIEW_CONVERT_EXPR:
6173 /* Take the address of our operand and then convert it to the type of
6174 this ADDR_EXPR.
6175
6176 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6177 all clear. The impact of this transformation is even less clear. */
6178
6179 /* If the operand is a useless conversion, look through it. Doing so
6180 guarantees that the ADDR_EXPR and its operand will remain of the
6181 same type. */
6182 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6183 op0 = TREE_OPERAND (op0, 0);
6184
6185 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6186 build_fold_addr_expr_loc (loc,
6187 TREE_OPERAND (op0, 0)));
6188 ret = GS_OK;
6189 break;
6190
6191 case MEM_REF:
6192 if (integer_zerop (TREE_OPERAND (op0, 1)))
6193 goto do_indirect_ref;
6194
6195 /* fall through */
6196
6197 default:
6198 /* If we see a call to a declared builtin or see its address
6199 being taken (we can unify those cases here) then we can mark
6200 the builtin for implicit generation by GCC. */
6201 if (TREE_CODE (op0) == FUNCTION_DECL
6202 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6203 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6204 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6205
6206 /* We use fb_either here because the C frontend sometimes takes
6207 the address of a call that returns a struct; see
6208 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6209 the implied temporary explicit. */
6210
6211 /* Make the operand addressable. */
6212 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6213 is_gimple_addressable, fb_either);
6214 if (ret == GS_ERROR)
6215 break;
6216
6217 /* Then mark it. Beware that it may not be possible to do so directly
6218 if a temporary has been created by the gimplification. */
6219 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6220
6221 op0 = TREE_OPERAND (expr, 0);
6222
6223 /* For various reasons, the gimplification of the expression
6224 may have made a new INDIRECT_REF. */
6225 if (TREE_CODE (op0) == INDIRECT_REF
6226 || (TREE_CODE (op0) == MEM_REF
6227 && integer_zerop (TREE_OPERAND (op0, 1))))
6228 goto do_indirect_ref;
6229
6230 mark_addressable (TREE_OPERAND (expr, 0));
6231
6232 /* The FEs may end up building ADDR_EXPRs early on a decl with
6233 an incomplete type. Re-build ADDR_EXPRs in canonical form
6234 here. */
6235 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6236 *expr_p = build_fold_addr_expr (op0);
6237
6238 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6239 recompute_tree_invariant_for_addr_expr (*expr_p);
6240
6241 /* If we re-built the ADDR_EXPR add a conversion to the original type
6242 if required. */
6243 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6244 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6245
6246 break;
6247 }
6248
6249 return ret;
6250 }
6251
6252 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6253 value; output operands should be a gimple lvalue. */
6254
6255 static enum gimplify_status
6256 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6257 {
6258 tree expr;
6259 int noutputs;
6260 const char **oconstraints;
6261 int i;
6262 tree link;
6263 const char *constraint;
6264 bool allows_mem, allows_reg, is_inout;
6265 enum gimplify_status ret, tret;
6266 gasm *stmt;
6267 vec<tree, va_gc> *inputs;
6268 vec<tree, va_gc> *outputs;
6269 vec<tree, va_gc> *clobbers;
6270 vec<tree, va_gc> *labels;
6271 tree link_next;
6272
6273 expr = *expr_p;
6274 noutputs = list_length (ASM_OUTPUTS (expr));
6275 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6276
6277 inputs = NULL;
6278 outputs = NULL;
6279 clobbers = NULL;
6280 labels = NULL;
6281
6282 ret = GS_ALL_DONE;
6283 link_next = NULL_TREE;
6284 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6285 {
6286 bool ok;
6287 size_t constraint_len;
6288
6289 link_next = TREE_CHAIN (link);
6290
6291 oconstraints[i]
6292 = constraint
6293 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6294 constraint_len = strlen (constraint);
6295 if (constraint_len == 0)
6296 continue;
6297
6298 ok = parse_output_constraint (&constraint, i, 0, 0,
6299 &allows_mem, &allows_reg, &is_inout);
6300 if (!ok)
6301 {
6302 ret = GS_ERROR;
6303 is_inout = false;
6304 }
6305
6306 /* If we can't make copies, we can only accept memory.
6307 Similarly for VLAs. */
6308 tree outtype = TREE_TYPE (TREE_VALUE (link));
6309 if (outtype != error_mark_node
6310 && (TREE_ADDRESSABLE (outtype)
6311 || !COMPLETE_TYPE_P (outtype)
6312 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6313 {
6314 if (allows_mem)
6315 allows_reg = 0;
6316 else
6317 {
6318 error ("impossible constraint in %<asm%>");
6319 error ("non-memory output %d must stay in memory", i);
6320 return GS_ERROR;
6321 }
6322 }
6323
6324 if (!allows_reg && allows_mem)
6325 mark_addressable (TREE_VALUE (link));
6326
6327 tree orig = TREE_VALUE (link);
6328 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6329 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6330 fb_lvalue | fb_mayfail);
6331 if (tret == GS_ERROR)
6332 {
6333 if (orig != error_mark_node)
6334 error ("invalid lvalue in %<asm%> output %d", i);
6335 ret = tret;
6336 }
6337
6338 /* If the constraint does not allow memory make sure we gimplify
6339 it to a register if it is not already but its base is. This
6340 happens for complex and vector components. */
6341 if (!allows_mem)
6342 {
6343 tree op = TREE_VALUE (link);
6344 if (! is_gimple_val (op)
6345 && is_gimple_reg_type (TREE_TYPE (op))
6346 && is_gimple_reg (get_base_address (op)))
6347 {
6348 tree tem = create_tmp_reg (TREE_TYPE (op));
6349 tree ass;
6350 if (is_inout)
6351 {
6352 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6353 tem, unshare_expr (op));
6354 gimplify_and_add (ass, pre_p);
6355 }
6356 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6357 gimplify_and_add (ass, post_p);
6358
6359 TREE_VALUE (link) = tem;
6360 tret = GS_OK;
6361 }
6362 }
6363
6364 vec_safe_push (outputs, link);
6365 TREE_CHAIN (link) = NULL_TREE;
6366
6367 if (is_inout)
6368 {
6369 /* An input/output operand. To give the optimizers more
6370 flexibility, split it into separate input and output
6371 operands. */
6372 tree input;
6373 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6374 char buf[11];
6375
6376 /* Turn the in/out constraint into an output constraint. */
6377 char *p = xstrdup (constraint);
6378 p[0] = '=';
6379 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6380
6381 /* And add a matching input constraint. */
6382 if (allows_reg)
6383 {
6384 sprintf (buf, "%u", i);
6385
6386 /* If there are multiple alternatives in the constraint,
6387 handle each of them individually. Those that allow register
6388 will be replaced with operand number, the others will stay
6389 unchanged. */
6390 if (strchr (p, ',') != NULL)
6391 {
6392 size_t len = 0, buflen = strlen (buf);
6393 char *beg, *end, *str, *dst;
6394
6395 for (beg = p + 1;;)
6396 {
6397 end = strchr (beg, ',');
6398 if (end == NULL)
6399 end = strchr (beg, '\0');
6400 if ((size_t) (end - beg) < buflen)
6401 len += buflen + 1;
6402 else
6403 len += end - beg + 1;
6404 if (*end)
6405 beg = end + 1;
6406 else
6407 break;
6408 }
6409
6410 str = (char *) alloca (len);
6411 for (beg = p + 1, dst = str;;)
6412 {
6413 const char *tem;
6414 bool mem_p, reg_p, inout_p;
6415
6416 end = strchr (beg, ',');
6417 if (end)
6418 *end = '\0';
6419 beg[-1] = '=';
6420 tem = beg - 1;
6421 parse_output_constraint (&tem, i, 0, 0,
6422 &mem_p, &reg_p, &inout_p);
6423 if (dst != str)
6424 *dst++ = ',';
6425 if (reg_p)
6426 {
6427 memcpy (dst, buf, buflen);
6428 dst += buflen;
6429 }
6430 else
6431 {
6432 if (end)
6433 len = end - beg;
6434 else
6435 len = strlen (beg);
6436 memcpy (dst, beg, len);
6437 dst += len;
6438 }
6439 if (end)
6440 beg = end + 1;
6441 else
6442 break;
6443 }
6444 *dst = '\0';
6445 input = build_string (dst - str, str);
6446 }
6447 else
6448 input = build_string (strlen (buf), buf);
6449 }
6450 else
6451 input = build_string (constraint_len - 1, constraint + 1);
6452
6453 free (p);
6454
6455 input = build_tree_list (build_tree_list (NULL_TREE, input),
6456 unshare_expr (TREE_VALUE (link)));
6457 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6458 }
6459 }
6460
6461 link_next = NULL_TREE;
6462 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6463 {
6464 link_next = TREE_CHAIN (link);
6465 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6466 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6467 oconstraints, &allows_mem, &allows_reg);
6468
6469 /* If we can't make copies, we can only accept memory. */
6470 tree intype = TREE_TYPE (TREE_VALUE (link));
6471 if (intype != error_mark_node
6472 && (TREE_ADDRESSABLE (intype)
6473 || !COMPLETE_TYPE_P (intype)
6474 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6475 {
6476 if (allows_mem)
6477 allows_reg = 0;
6478 else
6479 {
6480 error ("impossible constraint in %<asm%>");
6481 error ("non-memory input %d must stay in memory", i);
6482 return GS_ERROR;
6483 }
6484 }
6485
6486 /* If the operand is a memory input, it should be an lvalue. */
6487 if (!allows_reg && allows_mem)
6488 {
6489 tree inputv = TREE_VALUE (link);
6490 STRIP_NOPS (inputv);
6491 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6492 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6493 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6494 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6495 || TREE_CODE (inputv) == MODIFY_EXPR)
6496 TREE_VALUE (link) = error_mark_node;
6497 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6498 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6499 if (tret != GS_ERROR)
6500 {
6501 /* Unlike output operands, memory inputs are not guaranteed
6502 to be lvalues by the FE, and while the expressions are
6503 marked addressable there, if it is e.g. a statement
6504 expression, temporaries in it might not end up being
6505 addressable. They might be already used in the IL and thus
6506 it is too late to make them addressable now though. */
6507 tree x = TREE_VALUE (link);
6508 while (handled_component_p (x))
6509 x = TREE_OPERAND (x, 0);
6510 if (TREE_CODE (x) == MEM_REF
6511 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6512 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6513 if ((VAR_P (x)
6514 || TREE_CODE (x) == PARM_DECL
6515 || TREE_CODE (x) == RESULT_DECL)
6516 && !TREE_ADDRESSABLE (x)
6517 && is_gimple_reg (x))
6518 {
6519 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6520 input_location), 0,
6521 "memory input %d is not directly addressable",
6522 i);
6523 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6524 }
6525 }
6526 mark_addressable (TREE_VALUE (link));
6527 if (tret == GS_ERROR)
6528 {
6529 if (inputv != error_mark_node)
6530 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6531 "memory input %d is not directly addressable", i);
6532 ret = tret;
6533 }
6534 }
6535 else
6536 {
6537 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6538 is_gimple_asm_val, fb_rvalue);
6539 if (tret == GS_ERROR)
6540 ret = tret;
6541 }
6542
6543 TREE_CHAIN (link) = NULL_TREE;
6544 vec_safe_push (inputs, link);
6545 }
6546
6547 link_next = NULL_TREE;
6548 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6549 {
6550 link_next = TREE_CHAIN (link);
6551 TREE_CHAIN (link) = NULL_TREE;
6552 vec_safe_push (clobbers, link);
6553 }
6554
6555 link_next = NULL_TREE;
6556 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6557 {
6558 link_next = TREE_CHAIN (link);
6559 TREE_CHAIN (link) = NULL_TREE;
6560 vec_safe_push (labels, link);
6561 }
6562
6563 /* Do not add ASMs with errors to the gimple IL stream. */
6564 if (ret != GS_ERROR)
6565 {
6566 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6567 inputs, outputs, clobbers, labels);
6568
6569 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6570 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6571 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6572
6573 gimplify_seq_add_stmt (pre_p, stmt);
6574 }
6575
6576 return ret;
6577 }
6578
6579 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6580 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6581 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6582 return to this function.
6583
6584 FIXME should we complexify the prequeue handling instead? Or use flags
6585 for all the cleanups and let the optimizer tighten them up? The current
6586 code seems pretty fragile; it will break on a cleanup within any
6587 non-conditional nesting. But any such nesting would be broken, anyway;
6588 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6589 and continues out of it. We can do that at the RTL level, though, so
6590 having an optimizer to tighten up try/finally regions would be a Good
6591 Thing. */
6592
6593 static enum gimplify_status
6594 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6595 {
6596 gimple_stmt_iterator iter;
6597 gimple_seq body_sequence = NULL;
6598
6599 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6600
6601 /* We only care about the number of conditions between the innermost
6602 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6603 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6604 int old_conds = gimplify_ctxp->conditions;
6605 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6606 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6607 gimplify_ctxp->conditions = 0;
6608 gimplify_ctxp->conditional_cleanups = NULL;
6609 gimplify_ctxp->in_cleanup_point_expr = true;
6610
6611 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6612
6613 gimplify_ctxp->conditions = old_conds;
6614 gimplify_ctxp->conditional_cleanups = old_cleanups;
6615 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6616
6617 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6618 {
6619 gimple *wce = gsi_stmt (iter);
6620
6621 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6622 {
6623 if (gsi_one_before_end_p (iter))
6624 {
6625 /* Note that gsi_insert_seq_before and gsi_remove do not
6626 scan operands, unlike some other sequence mutators. */
6627 if (!gimple_wce_cleanup_eh_only (wce))
6628 gsi_insert_seq_before_without_update (&iter,
6629 gimple_wce_cleanup (wce),
6630 GSI_SAME_STMT);
6631 gsi_remove (&iter, true);
6632 break;
6633 }
6634 else
6635 {
6636 gtry *gtry;
6637 gimple_seq seq;
6638 enum gimple_try_flags kind;
6639
6640 if (gimple_wce_cleanup_eh_only (wce))
6641 kind = GIMPLE_TRY_CATCH;
6642 else
6643 kind = GIMPLE_TRY_FINALLY;
6644 seq = gsi_split_seq_after (iter);
6645
6646 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6647 /* Do not use gsi_replace here, as it may scan operands.
6648 We want to do a simple structural modification only. */
6649 gsi_set_stmt (&iter, gtry);
6650 iter = gsi_start (gtry->eval);
6651 }
6652 }
6653 else
6654 gsi_next (&iter);
6655 }
6656
6657 gimplify_seq_add_seq (pre_p, body_sequence);
6658 if (temp)
6659 {
6660 *expr_p = temp;
6661 return GS_OK;
6662 }
6663 else
6664 {
6665 *expr_p = NULL;
6666 return GS_ALL_DONE;
6667 }
6668 }
6669
6670 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6671 is the cleanup action required. EH_ONLY is true if the cleanup should
6672 only be executed if an exception is thrown, not on normal exit.
6673 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6674 only valid for clobbers. */
6675
6676 static void
6677 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6678 bool force_uncond = false)
6679 {
6680 gimple *wce;
6681 gimple_seq cleanup_stmts = NULL;
6682
6683 /* Errors can result in improperly nested cleanups. Which results in
6684 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6685 if (seen_error ())
6686 return;
6687
6688 if (gimple_conditional_context ())
6689 {
6690 /* If we're in a conditional context, this is more complex. We only
6691 want to run the cleanup if we actually ran the initialization that
6692 necessitates it, but we want to run it after the end of the
6693 conditional context. So we wrap the try/finally around the
6694 condition and use a flag to determine whether or not to actually
6695 run the destructor. Thus
6696
6697 test ? f(A()) : 0
6698
6699 becomes (approximately)
6700
6701 flag = 0;
6702 try {
6703 if (test) { A::A(temp); flag = 1; val = f(temp); }
6704 else { val = 0; }
6705 } finally {
6706 if (flag) A::~A(temp);
6707 }
6708 val
6709 */
6710 if (force_uncond)
6711 {
6712 gimplify_stmt (&cleanup, &cleanup_stmts);
6713 wce = gimple_build_wce (cleanup_stmts);
6714 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6715 }
6716 else
6717 {
6718 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6719 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6720 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6721
6722 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6723 gimplify_stmt (&cleanup, &cleanup_stmts);
6724 wce = gimple_build_wce (cleanup_stmts);
6725
6726 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6727 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6728 gimplify_seq_add_stmt (pre_p, ftrue);
6729
6730 /* Because of this manipulation, and the EH edges that jump
6731 threading cannot redirect, the temporary (VAR) will appear
6732 to be used uninitialized. Don't warn. */
6733 suppress_warning (var, OPT_Wuninitialized);
6734 }
6735 }
6736 else
6737 {
6738 gimplify_stmt (&cleanup, &cleanup_stmts);
6739 wce = gimple_build_wce (cleanup_stmts);
6740 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6741 gimplify_seq_add_stmt (pre_p, wce);
6742 }
6743 }
6744
6745 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6746
6747 static enum gimplify_status
6748 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6749 {
6750 tree targ = *expr_p;
6751 tree temp = TARGET_EXPR_SLOT (targ);
6752 tree init = TARGET_EXPR_INITIAL (targ);
6753 enum gimplify_status ret;
6754
6755 bool unpoison_empty_seq = false;
6756 gimple_stmt_iterator unpoison_it;
6757
6758 if (init)
6759 {
6760 tree cleanup = NULL_TREE;
6761
6762 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6763 to the temps list. Handle also variable length TARGET_EXPRs. */
6764 if (!poly_int_tree_p (DECL_SIZE (temp)))
6765 {
6766 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6767 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6768 gimplify_vla_decl (temp, pre_p);
6769 }
6770 else
6771 {
6772 /* Save location where we need to place unpoisoning. It's possible
6773 that a variable will be converted to needs_to_live_in_memory. */
6774 unpoison_it = gsi_last (*pre_p);
6775 unpoison_empty_seq = gsi_end_p (unpoison_it);
6776
6777 gimple_add_tmp_var (temp);
6778 }
6779
6780 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6781 expression is supposed to initialize the slot. */
6782 if (VOID_TYPE_P (TREE_TYPE (init)))
6783 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6784 else
6785 {
6786 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6787 init = init_expr;
6788 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6789 init = NULL;
6790 ggc_free (init_expr);
6791 }
6792 if (ret == GS_ERROR)
6793 {
6794 /* PR c++/28266 Make sure this is expanded only once. */
6795 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6796 return GS_ERROR;
6797 }
6798 if (init)
6799 gimplify_and_add (init, pre_p);
6800
6801 /* If needed, push the cleanup for the temp. */
6802 if (TARGET_EXPR_CLEANUP (targ))
6803 {
6804 if (CLEANUP_EH_ONLY (targ))
6805 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6806 CLEANUP_EH_ONLY (targ), pre_p);
6807 else
6808 cleanup = TARGET_EXPR_CLEANUP (targ);
6809 }
6810
6811 /* Add a clobber for the temporary going out of scope, like
6812 gimplify_bind_expr. */
6813 if (gimplify_ctxp->in_cleanup_point_expr
6814 && needs_to_live_in_memory (temp))
6815 {
6816 if (flag_stack_reuse == SR_ALL)
6817 {
6818 tree clobber = build_clobber (TREE_TYPE (temp));
6819 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6820 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6821 }
6822 if (asan_poisoned_variables
6823 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6824 && !TREE_STATIC (temp)
6825 && dbg_cnt (asan_use_after_scope)
6826 && !gimplify_omp_ctxp)
6827 {
6828 tree asan_cleanup = build_asan_poison_call_expr (temp);
6829 if (asan_cleanup)
6830 {
6831 if (unpoison_empty_seq)
6832 unpoison_it = gsi_start (*pre_p);
6833
6834 asan_poison_variable (temp, false, &unpoison_it,
6835 unpoison_empty_seq);
6836 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6837 }
6838 }
6839 }
6840 if (cleanup)
6841 gimple_push_cleanup (temp, cleanup, false, pre_p);
6842
6843 /* Only expand this once. */
6844 TREE_OPERAND (targ, 3) = init;
6845 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6846 }
6847 else
6848 /* We should have expanded this before. */
6849 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6850
6851 *expr_p = temp;
6852 return GS_OK;
6853 }
6854
6855 /* Gimplification of expression trees. */
6856
6857 /* Gimplify an expression which appears at statement context. The
6858 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6859 NULL, a new sequence is allocated.
6860
6861 Return true if we actually added a statement to the queue. */
6862
6863 bool
6864 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6865 {
6866 gimple_seq_node last;
6867
6868 last = gimple_seq_last (*seq_p);
6869 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6870 return last != gimple_seq_last (*seq_p);
6871 }
6872
6873 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6874 to CTX. If entries already exist, force them to be some flavor of private.
6875 If there is no enclosing parallel, do nothing. */
6876
6877 void
6878 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6879 {
6880 splay_tree_node n;
6881
6882 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6883 return;
6884
6885 do
6886 {
6887 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6888 if (n != NULL)
6889 {
6890 if (n->value & GOVD_SHARED)
6891 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6892 else if (n->value & GOVD_MAP)
6893 n->value |= GOVD_MAP_TO_ONLY;
6894 else
6895 return;
6896 }
6897 else if ((ctx->region_type & ORT_TARGET) != 0)
6898 {
6899 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6900 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6901 else
6902 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6903 }
6904 else if (ctx->region_type != ORT_WORKSHARE
6905 && ctx->region_type != ORT_TASKGROUP
6906 && ctx->region_type != ORT_SIMD
6907 && ctx->region_type != ORT_ACC
6908 && !(ctx->region_type & ORT_TARGET_DATA))
6909 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6910
6911 ctx = ctx->outer_context;
6912 }
6913 while (ctx);
6914 }
6915
6916 /* Similarly for each of the type sizes of TYPE. */
6917
6918 static void
6919 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6920 {
6921 if (type == NULL || type == error_mark_node)
6922 return;
6923 type = TYPE_MAIN_VARIANT (type);
6924
6925 if (ctx->privatized_types->add (type))
6926 return;
6927
6928 switch (TREE_CODE (type))
6929 {
6930 case INTEGER_TYPE:
6931 case ENUMERAL_TYPE:
6932 case BOOLEAN_TYPE:
6933 case REAL_TYPE:
6934 case FIXED_POINT_TYPE:
6935 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6936 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6937 break;
6938
6939 case ARRAY_TYPE:
6940 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6941 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6942 break;
6943
6944 case RECORD_TYPE:
6945 case UNION_TYPE:
6946 case QUAL_UNION_TYPE:
6947 {
6948 tree field;
6949 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6950 if (TREE_CODE (field) == FIELD_DECL)
6951 {
6952 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6953 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6954 }
6955 }
6956 break;
6957
6958 case POINTER_TYPE:
6959 case REFERENCE_TYPE:
6960 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6961 break;
6962
6963 default:
6964 break;
6965 }
6966
6967 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6968 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6969 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6970 }
6971
6972 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6973
6974 static void
6975 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6976 {
6977 splay_tree_node n;
6978 unsigned int nflags;
6979 tree t;
6980
6981 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6982 return;
6983
6984 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6985 there are constructors involved somewhere. Exception is a shared clause,
6986 there is nothing privatized in that case. */
6987 if ((flags & GOVD_SHARED) == 0
6988 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6989 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6990 flags |= GOVD_SEEN;
6991
6992 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6993 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6994 {
6995 /* We shouldn't be re-adding the decl with the same data
6996 sharing class. */
6997 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6998 nflags = n->value | flags;
6999 /* The only combination of data sharing classes we should see is
7000 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7001 reduction variables to be used in data sharing clauses. */
7002 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7003 || ((nflags & GOVD_DATA_SHARE_CLASS)
7004 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7005 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7006 n->value = nflags;
7007 return;
7008 }
7009
7010 /* When adding a variable-sized variable, we have to handle all sorts
7011 of additional bits of data: the pointer replacement variable, and
7012 the parameters of the type. */
7013 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7014 {
7015 /* Add the pointer replacement variable as PRIVATE if the variable
7016 replacement is private, else FIRSTPRIVATE since we'll need the
7017 address of the original variable either for SHARED, or for the
7018 copy into or out of the context. */
7019 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7020 {
7021 if (flags & GOVD_MAP)
7022 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7023 else if (flags & GOVD_PRIVATE)
7024 nflags = GOVD_PRIVATE;
7025 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7026 && (flags & GOVD_FIRSTPRIVATE))
7027 || (ctx->region_type == ORT_TARGET_DATA
7028 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7029 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7030 else
7031 nflags = GOVD_FIRSTPRIVATE;
7032 nflags |= flags & GOVD_SEEN;
7033 t = DECL_VALUE_EXPR (decl);
7034 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7035 t = TREE_OPERAND (t, 0);
7036 gcc_assert (DECL_P (t));
7037 omp_add_variable (ctx, t, nflags);
7038 }
7039
7040 /* Add all of the variable and type parameters (which should have
7041 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7042 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7043 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7044 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7045
7046 /* The variable-sized variable itself is never SHARED, only some form
7047 of PRIVATE. The sharing would take place via the pointer variable
7048 which we remapped above. */
7049 if (flags & GOVD_SHARED)
7050 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7051 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7052
7053 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7054 alloca statement we generate for the variable, so make sure it
7055 is available. This isn't automatically needed for the SHARED
7056 case, since we won't be allocating local storage then.
7057 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7058 in this case omp_notice_variable will be called later
7059 on when it is gimplified. */
7060 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7061 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7062 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7063 }
7064 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7065 && lang_hooks.decls.omp_privatize_by_reference (decl))
7066 {
7067 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7068
7069 /* Similar to the direct variable sized case above, we'll need the
7070 size of references being privatized. */
7071 if ((flags & GOVD_SHARED) == 0)
7072 {
7073 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7074 if (t && DECL_P (t))
7075 omp_notice_variable (ctx, t, true);
7076 }
7077 }
7078
7079 if (n != NULL)
7080 n->value |= flags;
7081 else
7082 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7083
7084 /* For reductions clauses in OpenACC loop directives, by default create a
7085 copy clause on the enclosing parallel construct for carrying back the
7086 results. */
7087 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7088 {
7089 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7090 while (outer_ctx)
7091 {
7092 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7093 if (n != NULL)
7094 {
7095 /* Ignore local variables and explicitly declared clauses. */
7096 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7097 break;
7098 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7099 {
7100 /* According to the OpenACC spec, such a reduction variable
7101 should already have a copy map on a kernels construct,
7102 verify that here. */
7103 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7104 && (n->value & GOVD_MAP));
7105 }
7106 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7107 {
7108 /* Remove firstprivate and make it a copy map. */
7109 n->value &= ~GOVD_FIRSTPRIVATE;
7110 n->value |= GOVD_MAP;
7111 }
7112 }
7113 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7114 {
7115 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7116 GOVD_MAP | GOVD_SEEN);
7117 break;
7118 }
7119 outer_ctx = outer_ctx->outer_context;
7120 }
7121 }
7122 }
7123
7124 /* Notice a threadprivate variable DECL used in OMP context CTX.
7125 This just prints out diagnostics about threadprivate variable uses
7126 in untied tasks. If DECL2 is non-NULL, prevent this warning
7127 on that variable. */
7128
7129 static bool
7130 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7131 tree decl2)
7132 {
7133 splay_tree_node n;
7134 struct gimplify_omp_ctx *octx;
7135
7136 for (octx = ctx; octx; octx = octx->outer_context)
7137 if ((octx->region_type & ORT_TARGET) != 0
7138 || octx->order_concurrent)
7139 {
7140 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7141 if (n == NULL)
7142 {
7143 if (octx->order_concurrent)
7144 {
7145 error ("threadprivate variable %qE used in a region with"
7146 " %<order(concurrent)%> clause", DECL_NAME (decl));
7147 inform (octx->location, "enclosing region");
7148 }
7149 else
7150 {
7151 error ("threadprivate variable %qE used in target region",
7152 DECL_NAME (decl));
7153 inform (octx->location, "enclosing target region");
7154 }
7155 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7156 }
7157 if (decl2)
7158 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7159 }
7160
7161 if (ctx->region_type != ORT_UNTIED_TASK)
7162 return false;
7163 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7164 if (n == NULL)
7165 {
7166 error ("threadprivate variable %qE used in untied task",
7167 DECL_NAME (decl));
7168 inform (ctx->location, "enclosing task");
7169 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7170 }
7171 if (decl2)
7172 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7173 return false;
7174 }
7175
7176 /* Return true if global var DECL is device resident. */
7177
7178 static bool
7179 device_resident_p (tree decl)
7180 {
7181 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7182
7183 if (!attr)
7184 return false;
7185
7186 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7187 {
7188 tree c = TREE_VALUE (t);
7189 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7190 return true;
7191 }
7192
7193 return false;
7194 }
7195
7196 /* Return true if DECL has an ACC DECLARE attribute. */
7197
7198 static bool
7199 is_oacc_declared (tree decl)
7200 {
7201 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7202 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7203 return declared != NULL_TREE;
7204 }
7205
7206 /* Determine outer default flags for DECL mentioned in an OMP region
7207 but not declared in an enclosing clause.
7208
7209 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7210 remapped firstprivate instead of shared. To some extent this is
7211 addressed in omp_firstprivatize_type_sizes, but not
7212 effectively. */
7213
7214 static unsigned
7215 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7216 bool in_code, unsigned flags)
7217 {
7218 enum omp_clause_default_kind default_kind = ctx->default_kind;
7219 enum omp_clause_default_kind kind;
7220
7221 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7222 if (ctx->region_type & ORT_TASK)
7223 {
7224 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7225
7226 /* The event-handle specified by a detach clause should always be firstprivate,
7227 regardless of the current default. */
7228 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7229 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7230 }
7231 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7232 default_kind = kind;
7233 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7234 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7235
7236 switch (default_kind)
7237 {
7238 case OMP_CLAUSE_DEFAULT_NONE:
7239 {
7240 const char *rtype;
7241
7242 if (ctx->region_type & ORT_PARALLEL)
7243 rtype = "parallel";
7244 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7245 rtype = "taskloop";
7246 else if (ctx->region_type & ORT_TASK)
7247 rtype = "task";
7248 else if (ctx->region_type & ORT_TEAMS)
7249 rtype = "teams";
7250 else
7251 gcc_unreachable ();
7252
7253 error ("%qE not specified in enclosing %qs",
7254 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7255 inform (ctx->location, "enclosing %qs", rtype);
7256 }
7257 /* FALLTHRU */
7258 case OMP_CLAUSE_DEFAULT_SHARED:
7259 flags |= GOVD_SHARED;
7260 break;
7261 case OMP_CLAUSE_DEFAULT_PRIVATE:
7262 flags |= GOVD_PRIVATE;
7263 break;
7264 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7265 flags |= GOVD_FIRSTPRIVATE;
7266 break;
7267 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7268 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7269 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7270 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7271 {
7272 omp_notice_variable (octx, decl, in_code);
7273 for (; octx; octx = octx->outer_context)
7274 {
7275 splay_tree_node n2;
7276
7277 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7278 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7279 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7280 continue;
7281 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7282 {
7283 flags |= GOVD_FIRSTPRIVATE;
7284 goto found_outer;
7285 }
7286 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7287 {
7288 flags |= GOVD_SHARED;
7289 goto found_outer;
7290 }
7291 }
7292 }
7293
7294 if (TREE_CODE (decl) == PARM_DECL
7295 || (!is_global_var (decl)
7296 && DECL_CONTEXT (decl) == current_function_decl))
7297 flags |= GOVD_FIRSTPRIVATE;
7298 else
7299 flags |= GOVD_SHARED;
7300 found_outer:
7301 break;
7302
7303 default:
7304 gcc_unreachable ();
7305 }
7306
7307 return flags;
7308 }
7309
7310
7311 /* Determine outer default flags for DECL mentioned in an OACC region
7312 but not declared in an enclosing clause. */
7313
7314 static unsigned
7315 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7316 {
7317 const char *rkind;
7318 bool on_device = false;
7319 bool is_private = false;
7320 bool declared = is_oacc_declared (decl);
7321 tree type = TREE_TYPE (decl);
7322
7323 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7324 type = TREE_TYPE (type);
7325
7326 /* For Fortran COMMON blocks, only used variables in those blocks are
7327 transfered and remapped. The block itself will have a private clause to
7328 avoid transfering the data twice.
7329 The hook evaluates to false by default. For a variable in Fortran's COMMON
7330 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7331 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7332 the whole block. For C++ and Fortran, it can also be true under certain
7333 other conditions, if DECL_HAS_VALUE_EXPR. */
7334 if (RECORD_OR_UNION_TYPE_P (type))
7335 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7336
7337 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7338 && is_global_var (decl)
7339 && device_resident_p (decl)
7340 && !is_private)
7341 {
7342 on_device = true;
7343 flags |= GOVD_MAP_TO_ONLY;
7344 }
7345
7346 switch (ctx->region_type)
7347 {
7348 case ORT_ACC_KERNELS:
7349 rkind = "kernels";
7350
7351 if (is_private)
7352 flags |= GOVD_FIRSTPRIVATE;
7353 else if (AGGREGATE_TYPE_P (type))
7354 {
7355 /* Aggregates default to 'present_or_copy', or 'present'. */
7356 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7357 flags |= GOVD_MAP;
7358 else
7359 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7360 }
7361 else
7362 /* Scalars default to 'copy'. */
7363 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7364
7365 break;
7366
7367 case ORT_ACC_PARALLEL:
7368 case ORT_ACC_SERIAL:
7369 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7370
7371 if (is_private)
7372 flags |= GOVD_FIRSTPRIVATE;
7373 else if (on_device || declared)
7374 flags |= GOVD_MAP;
7375 else if (AGGREGATE_TYPE_P (type))
7376 {
7377 /* Aggregates default to 'present_or_copy', or 'present'. */
7378 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7379 flags |= GOVD_MAP;
7380 else
7381 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7382 }
7383 else
7384 /* Scalars default to 'firstprivate'. */
7385 flags |= GOVD_FIRSTPRIVATE;
7386
7387 break;
7388
7389 default:
7390 gcc_unreachable ();
7391 }
7392
7393 if (DECL_ARTIFICIAL (decl))
7394 ; /* We can get compiler-generated decls, and should not complain
7395 about them. */
7396 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7397 {
7398 error ("%qE not specified in enclosing OpenACC %qs construct",
7399 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7400 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7401 }
7402 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7403 ; /* Handled above. */
7404 else
7405 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7406
7407 return flags;
7408 }
7409
7410 /* Record the fact that DECL was used within the OMP context CTX.
7411 IN_CODE is true when real code uses DECL, and false when we should
7412 merely emit default(none) errors. Return true if DECL is going to
7413 be remapped and thus DECL shouldn't be gimplified into its
7414 DECL_VALUE_EXPR (if any). */
7415
7416 static bool
7417 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7418 {
7419 splay_tree_node n;
7420 unsigned flags = in_code ? GOVD_SEEN : 0;
7421 bool ret = false, shared;
7422
7423 if (error_operand_p (decl))
7424 return false;
7425
7426 if (ctx->region_type == ORT_NONE)
7427 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7428
7429 if (is_global_var (decl))
7430 {
7431 /* Threadprivate variables are predetermined. */
7432 if (DECL_THREAD_LOCAL_P (decl))
7433 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7434
7435 if (DECL_HAS_VALUE_EXPR_P (decl))
7436 {
7437 if (ctx->region_type & ORT_ACC)
7438 /* For OpenACC, defer expansion of value to avoid transfering
7439 privatized common block data instead of im-/explicitly transfered
7440 variables which are in common blocks. */
7441 ;
7442 else
7443 {
7444 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7445
7446 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7447 return omp_notice_threadprivate_variable (ctx, decl, value);
7448 }
7449 }
7450
7451 if (gimplify_omp_ctxp->outer_context == NULL
7452 && VAR_P (decl)
7453 && oacc_get_fn_attrib (current_function_decl))
7454 {
7455 location_t loc = DECL_SOURCE_LOCATION (decl);
7456
7457 if (lookup_attribute ("omp declare target link",
7458 DECL_ATTRIBUTES (decl)))
7459 {
7460 error_at (loc,
7461 "%qE with %<link%> clause used in %<routine%> function",
7462 DECL_NAME (decl));
7463 return false;
7464 }
7465 else if (!lookup_attribute ("omp declare target",
7466 DECL_ATTRIBUTES (decl)))
7467 {
7468 error_at (loc,
7469 "%qE requires a %<declare%> directive for use "
7470 "in a %<routine%> function", DECL_NAME (decl));
7471 return false;
7472 }
7473 }
7474 }
7475
7476 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7477 if ((ctx->region_type & ORT_TARGET) != 0)
7478 {
7479 if (ctx->region_type & ORT_ACC)
7480 /* For OpenACC, as remarked above, defer expansion. */
7481 shared = false;
7482 else
7483 shared = true;
7484
7485 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7486 if (n == NULL)
7487 {
7488 unsigned nflags = flags;
7489 if ((ctx->region_type & ORT_ACC) == 0)
7490 {
7491 bool is_declare_target = false;
7492 if (is_global_var (decl)
7493 && varpool_node::get_create (decl)->offloadable)
7494 {
7495 struct gimplify_omp_ctx *octx;
7496 for (octx = ctx->outer_context;
7497 octx; octx = octx->outer_context)
7498 {
7499 n = splay_tree_lookup (octx->variables,
7500 (splay_tree_key)decl);
7501 if (n
7502 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7503 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7504 break;
7505 }
7506 is_declare_target = octx == NULL;
7507 }
7508 if (!is_declare_target)
7509 {
7510 int gdmk;
7511 enum omp_clause_defaultmap_kind kind;
7512 if (lang_hooks.decls.omp_allocatable_p (decl))
7513 gdmk = GDMK_ALLOCATABLE;
7514 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7515 gdmk = GDMK_SCALAR_TARGET;
7516 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7517 gdmk = GDMK_SCALAR;
7518 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7519 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7520 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7521 == POINTER_TYPE)))
7522 gdmk = GDMK_POINTER;
7523 else
7524 gdmk = GDMK_AGGREGATE;
7525 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7526 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7527 {
7528 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7529 nflags |= GOVD_FIRSTPRIVATE;
7530 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7531 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7532 else
7533 gcc_unreachable ();
7534 }
7535 else if (ctx->defaultmap[gdmk] == 0)
7536 {
7537 tree d = lang_hooks.decls.omp_report_decl (decl);
7538 error ("%qE not specified in enclosing %<target%>",
7539 DECL_NAME (d));
7540 inform (ctx->location, "enclosing %<target%>");
7541 }
7542 else if (ctx->defaultmap[gdmk]
7543 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7544 nflags |= ctx->defaultmap[gdmk];
7545 else
7546 {
7547 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7548 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7549 }
7550 }
7551 }
7552
7553 struct gimplify_omp_ctx *octx = ctx->outer_context;
7554 if ((ctx->region_type & ORT_ACC) && octx)
7555 {
7556 /* Look in outer OpenACC contexts, to see if there's a
7557 data attribute for this variable. */
7558 omp_notice_variable (octx, decl, in_code);
7559
7560 for (; octx; octx = octx->outer_context)
7561 {
7562 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7563 break;
7564 splay_tree_node n2
7565 = splay_tree_lookup (octx->variables,
7566 (splay_tree_key) decl);
7567 if (n2)
7568 {
7569 if (octx->region_type == ORT_ACC_HOST_DATA)
7570 error ("variable %qE declared in enclosing "
7571 "%<host_data%> region", DECL_NAME (decl));
7572 nflags |= GOVD_MAP;
7573 if (octx->region_type == ORT_ACC_DATA
7574 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7575 nflags |= GOVD_MAP_0LEN_ARRAY;
7576 goto found_outer;
7577 }
7578 }
7579 }
7580
7581 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7582 | GOVD_MAP_ALLOC_ONLY)) == flags)
7583 {
7584 tree type = TREE_TYPE (decl);
7585
7586 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7587 && lang_hooks.decls.omp_privatize_by_reference (decl))
7588 type = TREE_TYPE (type);
7589 if (!lang_hooks.types.omp_mappable_type (type))
7590 {
7591 error ("%qD referenced in target region does not have "
7592 "a mappable type", decl);
7593 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7594 }
7595 else
7596 {
7597 if ((ctx->region_type & ORT_ACC) != 0)
7598 nflags = oacc_default_clause (ctx, decl, flags);
7599 else
7600 nflags |= GOVD_MAP;
7601 }
7602 }
7603 found_outer:
7604 omp_add_variable (ctx, decl, nflags);
7605 }
7606 else
7607 {
7608 /* If nothing changed, there's nothing left to do. */
7609 if ((n->value & flags) == flags)
7610 return ret;
7611 flags |= n->value;
7612 n->value = flags;
7613 }
7614 goto do_outer;
7615 }
7616
7617 if (n == NULL)
7618 {
7619 if (ctx->region_type == ORT_WORKSHARE
7620 || ctx->region_type == ORT_TASKGROUP
7621 || ctx->region_type == ORT_SIMD
7622 || ctx->region_type == ORT_ACC
7623 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7624 goto do_outer;
7625
7626 flags = omp_default_clause (ctx, decl, in_code, flags);
7627
7628 if ((flags & GOVD_PRIVATE)
7629 && lang_hooks.decls.omp_private_outer_ref (decl))
7630 flags |= GOVD_PRIVATE_OUTER_REF;
7631
7632 omp_add_variable (ctx, decl, flags);
7633
7634 shared = (flags & GOVD_SHARED) != 0;
7635 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7636 goto do_outer;
7637 }
7638
7639 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7640 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7641 if (ctx->region_type == ORT_SIMD
7642 && ctx->in_for_exprs
7643 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
7644 == GOVD_PRIVATE))
7645 flags &= ~GOVD_SEEN;
7646
7647 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7648 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7649 && DECL_SIZE (decl))
7650 {
7651 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7652 {
7653 splay_tree_node n2;
7654 tree t = DECL_VALUE_EXPR (decl);
7655 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7656 t = TREE_OPERAND (t, 0);
7657 gcc_assert (DECL_P (t));
7658 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7659 n2->value |= GOVD_SEEN;
7660 }
7661 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7662 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7663 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7664 != INTEGER_CST))
7665 {
7666 splay_tree_node n2;
7667 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7668 gcc_assert (DECL_P (t));
7669 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7670 if (n2)
7671 omp_notice_variable (ctx, t, true);
7672 }
7673 }
7674
7675 if (ctx->region_type & ORT_ACC)
7676 /* For OpenACC, as remarked above, defer expansion. */
7677 shared = false;
7678 else
7679 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7680 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7681
7682 /* If nothing changed, there's nothing left to do. */
7683 if ((n->value & flags) == flags)
7684 return ret;
7685 flags |= n->value;
7686 n->value = flags;
7687
7688 do_outer:
7689 /* If the variable is private in the current context, then we don't
7690 need to propagate anything to an outer context. */
7691 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7692 return ret;
7693 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7694 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7695 return ret;
7696 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7697 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7698 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7699 return ret;
7700 if (ctx->outer_context
7701 && omp_notice_variable (ctx->outer_context, decl, in_code))
7702 return true;
7703 return ret;
7704 }
7705
7706 /* Verify that DECL is private within CTX. If there's specific information
7707 to the contrary in the innermost scope, generate an error. */
7708
7709 static bool
7710 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7711 {
7712 splay_tree_node n;
7713
7714 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7715 if (n != NULL)
7716 {
7717 if (n->value & GOVD_SHARED)
7718 {
7719 if (ctx == gimplify_omp_ctxp)
7720 {
7721 if (simd)
7722 error ("iteration variable %qE is predetermined linear",
7723 DECL_NAME (decl));
7724 else
7725 error ("iteration variable %qE should be private",
7726 DECL_NAME (decl));
7727 n->value = GOVD_PRIVATE;
7728 return true;
7729 }
7730 else
7731 return false;
7732 }
7733 else if ((n->value & GOVD_EXPLICIT) != 0
7734 && (ctx == gimplify_omp_ctxp
7735 || (ctx->region_type == ORT_COMBINED_PARALLEL
7736 && gimplify_omp_ctxp->outer_context == ctx)))
7737 {
7738 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7739 error ("iteration variable %qE should not be firstprivate",
7740 DECL_NAME (decl));
7741 else if ((n->value & GOVD_REDUCTION) != 0)
7742 error ("iteration variable %qE should not be reduction",
7743 DECL_NAME (decl));
7744 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7745 error ("iteration variable %qE should not be linear",
7746 DECL_NAME (decl));
7747 }
7748 return (ctx == gimplify_omp_ctxp
7749 || (ctx->region_type == ORT_COMBINED_PARALLEL
7750 && gimplify_omp_ctxp->outer_context == ctx));
7751 }
7752
7753 if (ctx->region_type != ORT_WORKSHARE
7754 && ctx->region_type != ORT_TASKGROUP
7755 && ctx->region_type != ORT_SIMD
7756 && ctx->region_type != ORT_ACC)
7757 return false;
7758 else if (ctx->outer_context)
7759 return omp_is_private (ctx->outer_context, decl, simd);
7760 return false;
7761 }
7762
7763 /* Return true if DECL is private within a parallel region
7764 that binds to the current construct's context or in parallel
7765 region's REDUCTION clause. */
7766
7767 static bool
7768 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7769 {
7770 splay_tree_node n;
7771
7772 do
7773 {
7774 ctx = ctx->outer_context;
7775 if (ctx == NULL)
7776 {
7777 if (is_global_var (decl))
7778 return false;
7779
7780 /* References might be private, but might be shared too,
7781 when checking for copyprivate, assume they might be
7782 private, otherwise assume they might be shared. */
7783 if (copyprivate)
7784 return true;
7785
7786 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7787 return false;
7788
7789 /* Treat C++ privatized non-static data members outside
7790 of the privatization the same. */
7791 if (omp_member_access_dummy_var (decl))
7792 return false;
7793
7794 return true;
7795 }
7796
7797 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7798
7799 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7800 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7801 continue;
7802
7803 if (n != NULL)
7804 {
7805 if ((n->value & GOVD_LOCAL) != 0
7806 && omp_member_access_dummy_var (decl))
7807 return false;
7808 return (n->value & GOVD_SHARED) == 0;
7809 }
7810 }
7811 while (ctx->region_type == ORT_WORKSHARE
7812 || ctx->region_type == ORT_TASKGROUP
7813 || ctx->region_type == ORT_SIMD
7814 || ctx->region_type == ORT_ACC);
7815 return false;
7816 }
7817
7818 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7819
7820 static tree
7821 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7822 {
7823 tree t = *tp;
7824
7825 /* If this node has been visited, unmark it and keep looking. */
7826 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7827 return t;
7828
7829 if (IS_TYPE_OR_DECL_P (t))
7830 *walk_subtrees = 0;
7831 return NULL_TREE;
7832 }
7833
7834
7835 /* Gimplify the affinity clause but effectively ignore it.
7836 Generate:
7837 var = begin;
7838 if ((step > 1) ? var <= end : var > end)
7839 locatator_var_expr; */
7840
7841 static void
7842 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
7843 {
7844 tree last_iter = NULL_TREE;
7845 tree last_bind = NULL_TREE;
7846 tree label = NULL_TREE;
7847 tree *last_body = NULL;
7848 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7849 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
7850 {
7851 tree t = OMP_CLAUSE_DECL (c);
7852 if (TREE_CODE (t) == TREE_LIST
7853 && TREE_PURPOSE (t)
7854 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7855 {
7856 if (TREE_VALUE (t) == null_pointer_node)
7857 continue;
7858 if (TREE_PURPOSE (t) != last_iter)
7859 {
7860 if (last_bind)
7861 {
7862 append_to_statement_list (label, last_body);
7863 gimplify_and_add (last_bind, pre_p);
7864 last_bind = NULL_TREE;
7865 }
7866 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7867 {
7868 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7869 is_gimple_val, fb_rvalue) == GS_ERROR
7870 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7871 is_gimple_val, fb_rvalue) == GS_ERROR
7872 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7873 is_gimple_val, fb_rvalue) == GS_ERROR
7874 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7875 is_gimple_val, fb_rvalue)
7876 == GS_ERROR))
7877 return;
7878 }
7879 last_iter = TREE_PURPOSE (t);
7880 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
7881 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
7882 NULL, block);
7883 last_body = &BIND_EXPR_BODY (last_bind);
7884 tree cond = NULL_TREE;
7885 location_t loc = OMP_CLAUSE_LOCATION (c);
7886 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7887 {
7888 tree var = TREE_VEC_ELT (it, 0);
7889 tree begin = TREE_VEC_ELT (it, 1);
7890 tree end = TREE_VEC_ELT (it, 2);
7891 tree step = TREE_VEC_ELT (it, 3);
7892 loc = DECL_SOURCE_LOCATION (var);
7893 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7894 var, begin);
7895 append_to_statement_list_force (tem, last_body);
7896
7897 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7898 step, build_zero_cst (TREE_TYPE (step)));
7899 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
7900 var, end);
7901 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7902 var, end);
7903 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
7904 cond1, cond2, cond3);
7905 if (cond)
7906 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
7907 boolean_type_node, cond, cond1);
7908 else
7909 cond = cond1;
7910 }
7911 tree cont_label = create_artificial_label (loc);
7912 label = build1 (LABEL_EXPR, void_type_node, cont_label);
7913 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
7914 void_node,
7915 build_and_jump (&cont_label));
7916 append_to_statement_list_force (tem, last_body);
7917 }
7918 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
7919 {
7920 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
7921 last_body);
7922 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
7923 }
7924 if (error_operand_p (TREE_VALUE (t)))
7925 return;
7926 append_to_statement_list_force (TREE_VALUE (t), last_body);
7927 TREE_VALUE (t) = null_pointer_node;
7928 }
7929 else
7930 {
7931 if (last_bind)
7932 {
7933 append_to_statement_list (label, last_body);
7934 gimplify_and_add (last_bind, pre_p);
7935 last_bind = NULL_TREE;
7936 }
7937 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7938 {
7939 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7940 NULL, is_gimple_val, fb_rvalue);
7941 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7942 }
7943 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7944 return;
7945 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7946 is_gimple_val, fb_rvalue) == GS_ERROR)
7947 return;
7948 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
7949 }
7950 }
7951 if (last_bind)
7952 {
7953 append_to_statement_list (label, last_body);
7954 gimplify_and_add (last_bind, pre_p);
7955 }
7956 return;
7957 }
7958
7959 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7960 lower all the depend clauses by populating corresponding depend
7961 array. Returns 0 if there are no such depend clauses, or
7962 2 if all depend clauses should be removed, 1 otherwise. */
7963
7964 static int
7965 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7966 {
7967 tree c;
7968 gimple *g;
7969 size_t n[4] = { 0, 0, 0, 0 };
7970 bool unused[4];
7971 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7972 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7973 size_t i, j;
7974 location_t first_loc = UNKNOWN_LOCATION;
7975
7976 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7977 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7978 {
7979 switch (OMP_CLAUSE_DEPEND_KIND (c))
7980 {
7981 case OMP_CLAUSE_DEPEND_IN:
7982 i = 2;
7983 break;
7984 case OMP_CLAUSE_DEPEND_OUT:
7985 case OMP_CLAUSE_DEPEND_INOUT:
7986 i = 0;
7987 break;
7988 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7989 i = 1;
7990 break;
7991 case OMP_CLAUSE_DEPEND_DEPOBJ:
7992 i = 3;
7993 break;
7994 case OMP_CLAUSE_DEPEND_SOURCE:
7995 case OMP_CLAUSE_DEPEND_SINK:
7996 continue;
7997 default:
7998 gcc_unreachable ();
7999 }
8000 tree t = OMP_CLAUSE_DECL (c);
8001 if (first_loc == UNKNOWN_LOCATION)
8002 first_loc = OMP_CLAUSE_LOCATION (c);
8003 if (TREE_CODE (t) == TREE_LIST
8004 && TREE_PURPOSE (t)
8005 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8006 {
8007 if (TREE_PURPOSE (t) != last_iter)
8008 {
8009 tree tcnt = size_one_node;
8010 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8011 {
8012 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8013 is_gimple_val, fb_rvalue) == GS_ERROR
8014 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8015 is_gimple_val, fb_rvalue) == GS_ERROR
8016 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8017 is_gimple_val, fb_rvalue) == GS_ERROR
8018 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8019 is_gimple_val, fb_rvalue)
8020 == GS_ERROR))
8021 return 2;
8022 tree var = TREE_VEC_ELT (it, 0);
8023 tree begin = TREE_VEC_ELT (it, 1);
8024 tree end = TREE_VEC_ELT (it, 2);
8025 tree step = TREE_VEC_ELT (it, 3);
8026 tree orig_step = TREE_VEC_ELT (it, 4);
8027 tree type = TREE_TYPE (var);
8028 tree stype = TREE_TYPE (step);
8029 location_t loc = DECL_SOURCE_LOCATION (var);
8030 tree endmbegin;
8031 /* Compute count for this iterator as
8032 orig_step > 0
8033 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8034 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8035 and compute product of those for the entire depend
8036 clause. */
8037 if (POINTER_TYPE_P (type))
8038 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8039 stype, end, begin);
8040 else
8041 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8042 end, begin);
8043 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8044 step,
8045 build_int_cst (stype, 1));
8046 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8047 build_int_cst (stype, 1));
8048 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8049 unshare_expr (endmbegin),
8050 stepm1);
8051 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8052 pos, step);
8053 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8054 endmbegin, stepp1);
8055 if (TYPE_UNSIGNED (stype))
8056 {
8057 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8058 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8059 }
8060 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8061 neg, step);
8062 step = NULL_TREE;
8063 tree cond = fold_build2_loc (loc, LT_EXPR,
8064 boolean_type_node,
8065 begin, end);
8066 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8067 build_int_cst (stype, 0));
8068 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8069 end, begin);
8070 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8071 build_int_cst (stype, 0));
8072 tree osteptype = TREE_TYPE (orig_step);
8073 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8074 orig_step,
8075 build_int_cst (osteptype, 0));
8076 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8077 cond, pos, neg);
8078 cnt = fold_convert_loc (loc, sizetype, cnt);
8079 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8080 fb_rvalue) == GS_ERROR)
8081 return 2;
8082 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8083 }
8084 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8085 fb_rvalue) == GS_ERROR)
8086 return 2;
8087 last_iter = TREE_PURPOSE (t);
8088 last_count = tcnt;
8089 }
8090 if (counts[i] == NULL_TREE)
8091 counts[i] = last_count;
8092 else
8093 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8094 PLUS_EXPR, counts[i], last_count);
8095 }
8096 else
8097 n[i]++;
8098 }
8099 for (i = 0; i < 4; i++)
8100 if (counts[i])
8101 break;
8102 if (i == 4)
8103 return 0;
8104
8105 tree total = size_zero_node;
8106 for (i = 0; i < 4; i++)
8107 {
8108 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8109 if (counts[i] == NULL_TREE)
8110 counts[i] = size_zero_node;
8111 if (n[i])
8112 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8113 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8114 fb_rvalue) == GS_ERROR)
8115 return 2;
8116 total = size_binop (PLUS_EXPR, total, counts[i]);
8117 }
8118
8119 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8120 == GS_ERROR)
8121 return 2;
8122 bool is_old = unused[1] && unused[3];
8123 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8124 size_int (is_old ? 1 : 4));
8125 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8126 tree array = create_tmp_var_raw (type);
8127 TREE_ADDRESSABLE (array) = 1;
8128 if (!poly_int_tree_p (totalpx))
8129 {
8130 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8131 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8132 if (gimplify_omp_ctxp)
8133 {
8134 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8135 while (ctx
8136 && (ctx->region_type == ORT_WORKSHARE
8137 || ctx->region_type == ORT_TASKGROUP
8138 || ctx->region_type == ORT_SIMD
8139 || ctx->region_type == ORT_ACC))
8140 ctx = ctx->outer_context;
8141 if (ctx)
8142 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8143 }
8144 gimplify_vla_decl (array, pre_p);
8145 }
8146 else
8147 gimple_add_tmp_var (array);
8148 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8149 NULL_TREE);
8150 tree tem;
8151 if (!is_old)
8152 {
8153 tem = build2 (MODIFY_EXPR, void_type_node, r,
8154 build_int_cst (ptr_type_node, 0));
8155 gimplify_and_add (tem, pre_p);
8156 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8157 NULL_TREE);
8158 }
8159 tem = build2 (MODIFY_EXPR, void_type_node, r,
8160 fold_convert (ptr_type_node, total));
8161 gimplify_and_add (tem, pre_p);
8162 for (i = 1; i < (is_old ? 2 : 4); i++)
8163 {
8164 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8165 NULL_TREE, NULL_TREE);
8166 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8167 gimplify_and_add (tem, pre_p);
8168 }
8169
8170 tree cnts[4];
8171 for (j = 4; j; j--)
8172 if (!unused[j - 1])
8173 break;
8174 for (i = 0; i < 4; i++)
8175 {
8176 if (i && (i >= j || unused[i - 1]))
8177 {
8178 cnts[i] = cnts[i - 1];
8179 continue;
8180 }
8181 cnts[i] = create_tmp_var (sizetype);
8182 if (i == 0)
8183 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8184 else
8185 {
8186 tree t;
8187 if (is_old)
8188 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8189 else
8190 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8191 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8192 == GS_ERROR)
8193 return 2;
8194 g = gimple_build_assign (cnts[i], t);
8195 }
8196 gimple_seq_add_stmt (pre_p, g);
8197 }
8198
8199 last_iter = NULL_TREE;
8200 tree last_bind = NULL_TREE;
8201 tree *last_body = NULL;
8202 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8203 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8204 {
8205 switch (OMP_CLAUSE_DEPEND_KIND (c))
8206 {
8207 case OMP_CLAUSE_DEPEND_IN:
8208 i = 2;
8209 break;
8210 case OMP_CLAUSE_DEPEND_OUT:
8211 case OMP_CLAUSE_DEPEND_INOUT:
8212 i = 0;
8213 break;
8214 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8215 i = 1;
8216 break;
8217 case OMP_CLAUSE_DEPEND_DEPOBJ:
8218 i = 3;
8219 break;
8220 case OMP_CLAUSE_DEPEND_SOURCE:
8221 case OMP_CLAUSE_DEPEND_SINK:
8222 continue;
8223 default:
8224 gcc_unreachable ();
8225 }
8226 tree t = OMP_CLAUSE_DECL (c);
8227 if (TREE_CODE (t) == TREE_LIST
8228 && TREE_PURPOSE (t)
8229 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8230 {
8231 if (TREE_PURPOSE (t) != last_iter)
8232 {
8233 if (last_bind)
8234 gimplify_and_add (last_bind, pre_p);
8235 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8236 last_bind = build3 (BIND_EXPR, void_type_node,
8237 BLOCK_VARS (block), NULL, block);
8238 TREE_SIDE_EFFECTS (last_bind) = 1;
8239 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8240 tree *p = &BIND_EXPR_BODY (last_bind);
8241 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8242 {
8243 tree var = TREE_VEC_ELT (it, 0);
8244 tree begin = TREE_VEC_ELT (it, 1);
8245 tree end = TREE_VEC_ELT (it, 2);
8246 tree step = TREE_VEC_ELT (it, 3);
8247 tree orig_step = TREE_VEC_ELT (it, 4);
8248 tree type = TREE_TYPE (var);
8249 location_t loc = DECL_SOURCE_LOCATION (var);
8250 /* Emit:
8251 var = begin;
8252 goto cond_label;
8253 beg_label:
8254 ...
8255 var = var + step;
8256 cond_label:
8257 if (orig_step > 0) {
8258 if (var < end) goto beg_label;
8259 } else {
8260 if (var > end) goto beg_label;
8261 }
8262 for each iterator, with inner iterators added to
8263 the ... above. */
8264 tree beg_label = create_artificial_label (loc);
8265 tree cond_label = NULL_TREE;
8266 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8267 var, begin);
8268 append_to_statement_list_force (tem, p);
8269 tem = build_and_jump (&cond_label);
8270 append_to_statement_list_force (tem, p);
8271 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8272 append_to_statement_list (tem, p);
8273 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8274 NULL_TREE, NULL_TREE);
8275 TREE_SIDE_EFFECTS (bind) = 1;
8276 SET_EXPR_LOCATION (bind, loc);
8277 append_to_statement_list_force (bind, p);
8278 if (POINTER_TYPE_P (type))
8279 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8280 var, fold_convert_loc (loc, sizetype,
8281 step));
8282 else
8283 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8284 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8285 var, tem);
8286 append_to_statement_list_force (tem, p);
8287 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8288 append_to_statement_list (tem, p);
8289 tree cond = fold_build2_loc (loc, LT_EXPR,
8290 boolean_type_node,
8291 var, end);
8292 tree pos
8293 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8294 cond, build_and_jump (&beg_label),
8295 void_node);
8296 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8297 var, end);
8298 tree neg
8299 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8300 cond, build_and_jump (&beg_label),
8301 void_node);
8302 tree osteptype = TREE_TYPE (orig_step);
8303 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8304 orig_step,
8305 build_int_cst (osteptype, 0));
8306 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8307 cond, pos, neg);
8308 append_to_statement_list_force (tem, p);
8309 p = &BIND_EXPR_BODY (bind);
8310 }
8311 last_body = p;
8312 }
8313 last_iter = TREE_PURPOSE (t);
8314 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8315 {
8316 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8317 0), last_body);
8318 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8319 }
8320 if (error_operand_p (TREE_VALUE (t)))
8321 return 2;
8322 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8323 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8324 NULL_TREE, NULL_TREE);
8325 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8326 void_type_node, r, TREE_VALUE (t));
8327 append_to_statement_list_force (tem, last_body);
8328 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8329 void_type_node, cnts[i],
8330 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8331 append_to_statement_list_force (tem, last_body);
8332 TREE_VALUE (t) = null_pointer_node;
8333 }
8334 else
8335 {
8336 if (last_bind)
8337 {
8338 gimplify_and_add (last_bind, pre_p);
8339 last_bind = NULL_TREE;
8340 }
8341 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8342 {
8343 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8344 NULL, is_gimple_val, fb_rvalue);
8345 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8346 }
8347 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8348 return 2;
8349 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8350 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8351 is_gimple_val, fb_rvalue) == GS_ERROR)
8352 return 2;
8353 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8354 NULL_TREE, NULL_TREE);
8355 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8356 gimplify_and_add (tem, pre_p);
8357 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8358 size_int (1)));
8359 gimple_seq_add_stmt (pre_p, g);
8360 }
8361 }
8362 if (last_bind)
8363 gimplify_and_add (last_bind, pre_p);
8364 tree cond = boolean_false_node;
8365 if (is_old)
8366 {
8367 if (!unused[0])
8368 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8369 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8370 size_int (2)));
8371 if (!unused[2])
8372 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8373 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8374 cnts[2],
8375 size_binop_loc (first_loc, PLUS_EXPR,
8376 totalpx,
8377 size_int (1))));
8378 }
8379 else
8380 {
8381 tree prev = size_int (5);
8382 for (i = 0; i < 4; i++)
8383 {
8384 if (unused[i])
8385 continue;
8386 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8387 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8388 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8389 cnts[i], unshare_expr (prev)));
8390 }
8391 }
8392 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8393 build_call_expr_loc (first_loc,
8394 builtin_decl_explicit (BUILT_IN_TRAP),
8395 0), void_node);
8396 gimplify_and_add (tem, pre_p);
8397 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8398 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8399 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8400 OMP_CLAUSE_CHAIN (c) = *list_p;
8401 *list_p = c;
8402 return 1;
8403 }
8404
8405 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8406 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8407 the struct node to insert the new mapping after (when the struct node is
8408 initially created). PREV_NODE is the first of two or three mappings for a
8409 pointer, and is either:
8410 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8411 array section.
8412 - not the node before C. This is true when we have a reference-to-pointer
8413 type (with a mapping for the reference and for the pointer), or for
8414 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8415 If SCP is non-null, the new node is inserted before *SCP.
8416 if SCP is null, the new node is inserted before PREV_NODE.
8417 The return type is:
8418 - PREV_NODE, if SCP is non-null.
8419 - The newly-created ALLOC or RELEASE node, if SCP is null.
8420 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8421 reference to a pointer. */
8422
8423 static tree
8424 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8425 tree prev_node, tree *scp)
8426 {
8427 enum gomp_map_kind mkind
8428 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8429 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8430
8431 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8432 tree cl = scp ? prev_node : c2;
8433 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8434 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8435 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8436 if (OMP_CLAUSE_CHAIN (prev_node) != c
8437 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8438 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8439 == GOMP_MAP_TO_PSET))
8440 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8441 else
8442 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8443 if (struct_node)
8444 OMP_CLAUSE_CHAIN (struct_node) = c2;
8445
8446 /* We might need to create an additional mapping if we have a reference to a
8447 pointer (in C++). Don't do this if we have something other than a
8448 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8449 if (OMP_CLAUSE_CHAIN (prev_node) != c
8450 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8451 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8452 == GOMP_MAP_ALWAYS_POINTER)
8453 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8454 == GOMP_MAP_ATTACH_DETACH)))
8455 {
8456 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8457 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8458 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8459 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8460 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8461 OMP_CLAUSE_CHAIN (c3) = prev_node;
8462 if (!scp)
8463 OMP_CLAUSE_CHAIN (c2) = c3;
8464 else
8465 cl = c3;
8466 }
8467
8468 if (scp)
8469 *scp = c2;
8470
8471 return cl;
8472 }
8473
8474 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8475 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8476 If BASE_REF is non-NULL and the containing object is a reference, set
8477 *BASE_REF to that reference before dereferencing the object.
8478 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8479 has array type, else return NULL. */
8480
8481 static tree
8482 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8483 poly_offset_int *poffsetp)
8484 {
8485 tree offset;
8486 poly_int64 bitsize, bitpos;
8487 machine_mode mode;
8488 int unsignedp, reversep, volatilep = 0;
8489 poly_offset_int poffset;
8490
8491 if (base_ref)
8492 {
8493 *base_ref = NULL_TREE;
8494
8495 while (TREE_CODE (base) == ARRAY_REF)
8496 base = TREE_OPERAND (base, 0);
8497
8498 if (TREE_CODE (base) == INDIRECT_REF)
8499 base = TREE_OPERAND (base, 0);
8500 }
8501 else
8502 {
8503 if (TREE_CODE (base) == ARRAY_REF)
8504 {
8505 while (TREE_CODE (base) == ARRAY_REF)
8506 base = TREE_OPERAND (base, 0);
8507 if (TREE_CODE (base) != COMPONENT_REF
8508 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8509 return NULL_TREE;
8510 }
8511 else if (TREE_CODE (base) == INDIRECT_REF
8512 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8513 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8514 == REFERENCE_TYPE))
8515 base = TREE_OPERAND (base, 0);
8516 }
8517
8518 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8519 &unsignedp, &reversep, &volatilep);
8520
8521 tree orig_base = base;
8522
8523 if ((TREE_CODE (base) == INDIRECT_REF
8524 || (TREE_CODE (base) == MEM_REF
8525 && integer_zerop (TREE_OPERAND (base, 1))))
8526 && DECL_P (TREE_OPERAND (base, 0))
8527 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8528 base = TREE_OPERAND (base, 0);
8529
8530 gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8531
8532 if (offset)
8533 poffset = wi::to_poly_offset (offset);
8534 else
8535 poffset = 0;
8536
8537 if (maybe_ne (bitpos, 0))
8538 poffset += bits_to_bytes_round_down (bitpos);
8539
8540 *bitposp = bitpos;
8541 *poffsetp = poffset;
8542
8543 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8544 if (base_ref && orig_base != base)
8545 *base_ref = orig_base;
8546
8547 return base;
8548 }
8549
8550 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8551
8552 static bool
8553 is_or_contains_p (tree expr, tree base_ptr)
8554 {
8555 while (expr != base_ptr)
8556 if (TREE_CODE (base_ptr) == COMPONENT_REF)
8557 base_ptr = TREE_OPERAND (base_ptr, 0);
8558 else
8559 break;
8560 return expr == base_ptr;
8561 }
8562
8563 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8564 several rules, and with some level of ambiguity, hopefully we can at least
8565 collect the complexity here in one place. */
8566
8567 static void
8568 omp_target_reorder_clauses (tree *list_p)
8569 {
8570 /* Collect refs to alloc/release/delete maps. */
8571 auto_vec<tree, 32> ard;
8572 tree *cp = list_p;
8573 while (*cp != NULL_TREE)
8574 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8575 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALLOC
8576 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_RELEASE
8577 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_DELETE))
8578 {
8579 /* Unlink cp and push to ard. */
8580 tree c = *cp;
8581 tree nc = OMP_CLAUSE_CHAIN (c);
8582 *cp = nc;
8583 ard.safe_push (c);
8584
8585 /* Any associated pointer type maps should also move along. */
8586 while (*cp != NULL_TREE
8587 && OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8588 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8589 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_POINTER
8590 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ATTACH_DETACH
8591 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_POINTER
8592 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALWAYS_POINTER
8593 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_TO_PSET))
8594 {
8595 c = *cp;
8596 nc = OMP_CLAUSE_CHAIN (c);
8597 *cp = nc;
8598 ard.safe_push (c);
8599 }
8600 }
8601 else
8602 cp = &OMP_CLAUSE_CHAIN (*cp);
8603
8604 /* Link alloc/release/delete maps to the end of list. */
8605 for (unsigned int i = 0; i < ard.length (); i++)
8606 {
8607 *cp = ard[i];
8608 cp = &OMP_CLAUSE_CHAIN (ard[i]);
8609 }
8610 *cp = NULL_TREE;
8611
8612 /* OpenMP 5.0 requires that pointer variables are mapped before
8613 its use as a base-pointer. */
8614 auto_vec<tree *, 32> atf;
8615 for (tree *cp = list_p; *cp; cp = &OMP_CLAUSE_CHAIN (*cp))
8616 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP)
8617 {
8618 /* Collect alloc, to, from, to/from clause tree pointers. */
8619 gomp_map_kind k = OMP_CLAUSE_MAP_KIND (*cp);
8620 if (k == GOMP_MAP_ALLOC
8621 || k == GOMP_MAP_TO
8622 || k == GOMP_MAP_FROM
8623 || k == GOMP_MAP_TOFROM
8624 || k == GOMP_MAP_ALWAYS_TO
8625 || k == GOMP_MAP_ALWAYS_FROM
8626 || k == GOMP_MAP_ALWAYS_TOFROM)
8627 atf.safe_push (cp);
8628 }
8629
8630 for (unsigned int i = 0; i < atf.length (); i++)
8631 if (atf[i])
8632 {
8633 tree *cp = atf[i];
8634 tree decl = OMP_CLAUSE_DECL (*cp);
8635 if (TREE_CODE (decl) == INDIRECT_REF || TREE_CODE (decl) == MEM_REF)
8636 {
8637 tree base_ptr = TREE_OPERAND (decl, 0);
8638 STRIP_TYPE_NOPS (base_ptr);
8639 for (unsigned int j = i + 1; j < atf.length (); j++)
8640 {
8641 tree *cp2 = atf[j];
8642 tree decl2 = OMP_CLAUSE_DECL (*cp2);
8643 if (is_or_contains_p (decl2, base_ptr))
8644 {
8645 /* Move *cp2 to before *cp. */
8646 tree c = *cp2;
8647 *cp2 = OMP_CLAUSE_CHAIN (c);
8648 OMP_CLAUSE_CHAIN (c) = *cp;
8649 *cp = c;
8650 atf[j] = NULL;
8651 }
8652 }
8653 }
8654 }
8655 }
8656
8657 /* DECL is supposed to have lastprivate semantics in the outer contexts
8658 of combined/composite constructs, starting with OCTX.
8659 Add needed lastprivate, shared or map clause if no data sharing or
8660 mapping clause are present. IMPLICIT_P is true if it is an implicit
8661 clause (IV on simd), in which case the lastprivate will not be
8662 copied to some constructs. */
8663
8664 static void
8665 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
8666 tree decl, bool implicit_p)
8667 {
8668 struct gimplify_omp_ctx *orig_octx = octx;
8669 for (; octx; octx = octx->outer_context)
8670 {
8671 if ((octx->region_type == ORT_COMBINED_PARALLEL
8672 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
8673 && splay_tree_lookup (octx->variables,
8674 (splay_tree_key) decl) == NULL)
8675 {
8676 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8677 continue;
8678 }
8679 if ((octx->region_type & ORT_TASK) != 0
8680 && octx->combined_loop
8681 && splay_tree_lookup (octx->variables,
8682 (splay_tree_key) decl) == NULL)
8683 {
8684 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8685 continue;
8686 }
8687 if (implicit_p
8688 && octx->region_type == ORT_WORKSHARE
8689 && octx->combined_loop
8690 && splay_tree_lookup (octx->variables,
8691 (splay_tree_key) decl) == NULL
8692 && octx->outer_context
8693 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
8694 && splay_tree_lookup (octx->outer_context->variables,
8695 (splay_tree_key) decl) == NULL)
8696 {
8697 octx = octx->outer_context;
8698 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8699 continue;
8700 }
8701 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
8702 && octx->combined_loop
8703 && splay_tree_lookup (octx->variables,
8704 (splay_tree_key) decl) == NULL
8705 && !omp_check_private (octx, decl, false))
8706 {
8707 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8708 continue;
8709 }
8710 if (octx->region_type == ORT_COMBINED_TARGET)
8711 {
8712 splay_tree_node n = splay_tree_lookup (octx->variables,
8713 (splay_tree_key) decl);
8714 if (n == NULL)
8715 {
8716 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
8717 octx = octx->outer_context;
8718 }
8719 else if (!implicit_p
8720 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
8721 {
8722 n->value &= ~(GOVD_FIRSTPRIVATE
8723 | GOVD_FIRSTPRIVATE_IMPLICIT
8724 | GOVD_EXPLICIT);
8725 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
8726 octx = octx->outer_context;
8727 }
8728 }
8729 break;
8730 }
8731 if (octx && (implicit_p || octx != orig_octx))
8732 omp_notice_variable (octx, decl, true);
8733 }
8734
8735 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8736 and previous omp contexts. */
8737
8738 static void
8739 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8740 enum omp_region_type region_type,
8741 enum tree_code code)
8742 {
8743 struct gimplify_omp_ctx *ctx, *outer_ctx;
8744 tree c;
8745 hash_map<tree, tree> *struct_map_to_clause = NULL;
8746 hash_set<tree> *struct_deref_set = NULL;
8747 tree *prev_list_p = NULL, *orig_list_p = list_p;
8748 int handled_depend_iterators = -1;
8749 int nowait = -1;
8750
8751 ctx = new_omp_context (region_type);
8752 ctx->code = code;
8753 outer_ctx = ctx->outer_context;
8754 if (code == OMP_TARGET)
8755 {
8756 if (!lang_GNU_Fortran ())
8757 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8758 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8759 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
8760 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
8761 }
8762 if (!lang_GNU_Fortran ())
8763 switch (code)
8764 {
8765 case OMP_TARGET:
8766 case OMP_TARGET_DATA:
8767 case OMP_TARGET_ENTER_DATA:
8768 case OMP_TARGET_EXIT_DATA:
8769 case OACC_DECLARE:
8770 case OACC_HOST_DATA:
8771 case OACC_PARALLEL:
8772 case OACC_KERNELS:
8773 ctx->target_firstprivatize_array_bases = true;
8774 default:
8775 break;
8776 }
8777
8778 if (code == OMP_TARGET
8779 || code == OMP_TARGET_DATA
8780 || code == OMP_TARGET_ENTER_DATA
8781 || code == OMP_TARGET_EXIT_DATA)
8782 omp_target_reorder_clauses (list_p);
8783
8784 while ((c = *list_p) != NULL)
8785 {
8786 bool remove = false;
8787 bool notice_outer = true;
8788 const char *check_non_private = NULL;
8789 unsigned int flags;
8790 tree decl;
8791
8792 switch (OMP_CLAUSE_CODE (c))
8793 {
8794 case OMP_CLAUSE_PRIVATE:
8795 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8796 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8797 {
8798 flags |= GOVD_PRIVATE_OUTER_REF;
8799 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8800 }
8801 else
8802 notice_outer = false;
8803 goto do_add;
8804 case OMP_CLAUSE_SHARED:
8805 flags = GOVD_SHARED | GOVD_EXPLICIT;
8806 goto do_add;
8807 case OMP_CLAUSE_FIRSTPRIVATE:
8808 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8809 check_non_private = "firstprivate";
8810 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8811 {
8812 gcc_assert (code == OMP_TARGET);
8813 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
8814 }
8815 goto do_add;
8816 case OMP_CLAUSE_LASTPRIVATE:
8817 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8818 switch (code)
8819 {
8820 case OMP_DISTRIBUTE:
8821 error_at (OMP_CLAUSE_LOCATION (c),
8822 "conditional %<lastprivate%> clause on "
8823 "%qs construct", "distribute");
8824 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8825 break;
8826 case OMP_TASKLOOP:
8827 error_at (OMP_CLAUSE_LOCATION (c),
8828 "conditional %<lastprivate%> clause on "
8829 "%qs construct", "taskloop");
8830 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8831 break;
8832 default:
8833 break;
8834 }
8835 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8836 if (code != OMP_LOOP)
8837 check_non_private = "lastprivate";
8838 decl = OMP_CLAUSE_DECL (c);
8839 if (error_operand_p (decl))
8840 goto do_add;
8841 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8842 && !lang_hooks.decls.omp_scalar_p (decl, true))
8843 {
8844 error_at (OMP_CLAUSE_LOCATION (c),
8845 "non-scalar variable %qD in conditional "
8846 "%<lastprivate%> clause", decl);
8847 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8848 }
8849 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8850 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8851 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
8852 false);
8853 goto do_add;
8854 case OMP_CLAUSE_REDUCTION:
8855 if (OMP_CLAUSE_REDUCTION_TASK (c))
8856 {
8857 if (region_type == ORT_WORKSHARE)
8858 {
8859 if (nowait == -1)
8860 nowait = omp_find_clause (*list_p,
8861 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8862 if (nowait
8863 && (outer_ctx == NULL
8864 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8865 {
8866 error_at (OMP_CLAUSE_LOCATION (c),
8867 "%<task%> reduction modifier on a construct "
8868 "with a %<nowait%> clause");
8869 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8870 }
8871 }
8872 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8873 {
8874 error_at (OMP_CLAUSE_LOCATION (c),
8875 "invalid %<task%> reduction modifier on construct "
8876 "other than %<parallel%>, %qs or %<sections%>",
8877 lang_GNU_Fortran () ? "do" : "for");
8878 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8879 }
8880 }
8881 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8882 switch (code)
8883 {
8884 case OMP_SECTIONS:
8885 error_at (OMP_CLAUSE_LOCATION (c),
8886 "%<inscan%> %<reduction%> clause on "
8887 "%qs construct", "sections");
8888 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8889 break;
8890 case OMP_PARALLEL:
8891 error_at (OMP_CLAUSE_LOCATION (c),
8892 "%<inscan%> %<reduction%> clause on "
8893 "%qs construct", "parallel");
8894 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8895 break;
8896 case OMP_TEAMS:
8897 error_at (OMP_CLAUSE_LOCATION (c),
8898 "%<inscan%> %<reduction%> clause on "
8899 "%qs construct", "teams");
8900 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8901 break;
8902 case OMP_TASKLOOP:
8903 error_at (OMP_CLAUSE_LOCATION (c),
8904 "%<inscan%> %<reduction%> clause on "
8905 "%qs construct", "taskloop");
8906 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8907 break;
8908 default:
8909 break;
8910 }
8911 /* FALLTHRU */
8912 case OMP_CLAUSE_IN_REDUCTION:
8913 case OMP_CLAUSE_TASK_REDUCTION:
8914 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8915 /* OpenACC permits reductions on private variables. */
8916 if (!(region_type & ORT_ACC)
8917 /* taskgroup is actually not a worksharing region. */
8918 && code != OMP_TASKGROUP)
8919 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8920 decl = OMP_CLAUSE_DECL (c);
8921 if (TREE_CODE (decl) == MEM_REF)
8922 {
8923 tree type = TREE_TYPE (decl);
8924 bool saved_into_ssa = gimplify_ctxp->into_ssa;
8925 gimplify_ctxp->into_ssa = false;
8926 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8927 NULL, is_gimple_val, fb_rvalue, false)
8928 == GS_ERROR)
8929 {
8930 gimplify_ctxp->into_ssa = saved_into_ssa;
8931 remove = true;
8932 break;
8933 }
8934 gimplify_ctxp->into_ssa = saved_into_ssa;
8935 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8936 if (DECL_P (v))
8937 {
8938 omp_firstprivatize_variable (ctx, v);
8939 omp_notice_variable (ctx, v, true);
8940 }
8941 decl = TREE_OPERAND (decl, 0);
8942 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8943 {
8944 gimplify_ctxp->into_ssa = false;
8945 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8946 NULL, is_gimple_val, fb_rvalue, false)
8947 == GS_ERROR)
8948 {
8949 gimplify_ctxp->into_ssa = saved_into_ssa;
8950 remove = true;
8951 break;
8952 }
8953 gimplify_ctxp->into_ssa = saved_into_ssa;
8954 v = TREE_OPERAND (decl, 1);
8955 if (DECL_P (v))
8956 {
8957 omp_firstprivatize_variable (ctx, v);
8958 omp_notice_variable (ctx, v, true);
8959 }
8960 decl = TREE_OPERAND (decl, 0);
8961 }
8962 if (TREE_CODE (decl) == ADDR_EXPR
8963 || TREE_CODE (decl) == INDIRECT_REF)
8964 decl = TREE_OPERAND (decl, 0);
8965 }
8966 goto do_add_decl;
8967 case OMP_CLAUSE_LINEAR:
8968 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8969 is_gimple_val, fb_rvalue) == GS_ERROR)
8970 {
8971 remove = true;
8972 break;
8973 }
8974 else
8975 {
8976 if (code == OMP_SIMD
8977 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8978 {
8979 struct gimplify_omp_ctx *octx = outer_ctx;
8980 if (octx
8981 && octx->region_type == ORT_WORKSHARE
8982 && octx->combined_loop
8983 && !octx->distribute)
8984 {
8985 if (octx->outer_context
8986 && (octx->outer_context->region_type
8987 == ORT_COMBINED_PARALLEL))
8988 octx = octx->outer_context->outer_context;
8989 else
8990 octx = octx->outer_context;
8991 }
8992 if (octx
8993 && octx->region_type == ORT_WORKSHARE
8994 && octx->combined_loop
8995 && octx->distribute)
8996 {
8997 error_at (OMP_CLAUSE_LOCATION (c),
8998 "%<linear%> clause for variable other than "
8999 "loop iterator specified on construct "
9000 "combined with %<distribute%>");
9001 remove = true;
9002 break;
9003 }
9004 }
9005 /* For combined #pragma omp parallel for simd, need to put
9006 lastprivate and perhaps firstprivate too on the
9007 parallel. Similarly for #pragma omp for simd. */
9008 struct gimplify_omp_ctx *octx = outer_ctx;
9009 bool taskloop_seen = false;
9010 decl = NULL_TREE;
9011 do
9012 {
9013 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
9014 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9015 break;
9016 decl = OMP_CLAUSE_DECL (c);
9017 if (error_operand_p (decl))
9018 {
9019 decl = NULL_TREE;
9020 break;
9021 }
9022 flags = GOVD_SEEN;
9023 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
9024 flags |= GOVD_FIRSTPRIVATE;
9025 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9026 flags |= GOVD_LASTPRIVATE;
9027 if (octx
9028 && octx->region_type == ORT_WORKSHARE
9029 && octx->combined_loop)
9030 {
9031 if (octx->outer_context
9032 && (octx->outer_context->region_type
9033 == ORT_COMBINED_PARALLEL))
9034 octx = octx->outer_context;
9035 else if (omp_check_private (octx, decl, false))
9036 break;
9037 }
9038 else if (octx
9039 && (octx->region_type & ORT_TASK) != 0
9040 && octx->combined_loop)
9041 taskloop_seen = true;
9042 else if (octx
9043 && octx->region_type == ORT_COMBINED_PARALLEL
9044 && ((ctx->region_type == ORT_WORKSHARE
9045 && octx == outer_ctx)
9046 || taskloop_seen))
9047 flags = GOVD_SEEN | GOVD_SHARED;
9048 else if (octx
9049 && ((octx->region_type & ORT_COMBINED_TEAMS)
9050 == ORT_COMBINED_TEAMS))
9051 flags = GOVD_SEEN | GOVD_SHARED;
9052 else if (octx
9053 && octx->region_type == ORT_COMBINED_TARGET)
9054 {
9055 if (flags & GOVD_LASTPRIVATE)
9056 flags = GOVD_SEEN | GOVD_MAP;
9057 }
9058 else
9059 break;
9060 splay_tree_node on
9061 = splay_tree_lookup (octx->variables,
9062 (splay_tree_key) decl);
9063 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
9064 {
9065 octx = NULL;
9066 break;
9067 }
9068 omp_add_variable (octx, decl, flags);
9069 if (octx->outer_context == NULL)
9070 break;
9071 octx = octx->outer_context;
9072 }
9073 while (1);
9074 if (octx
9075 && decl
9076 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
9077 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9078 omp_notice_variable (octx, decl, true);
9079 }
9080 flags = GOVD_LINEAR | GOVD_EXPLICIT;
9081 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
9082 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9083 {
9084 notice_outer = false;
9085 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9086 }
9087 goto do_add;
9088
9089 case OMP_CLAUSE_MAP:
9090 decl = OMP_CLAUSE_DECL (c);
9091 if (error_operand_p (decl))
9092 remove = true;
9093 switch (code)
9094 {
9095 case OMP_TARGET:
9096 break;
9097 case OACC_DATA:
9098 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
9099 break;
9100 /* FALLTHRU */
9101 case OMP_TARGET_DATA:
9102 case OMP_TARGET_ENTER_DATA:
9103 case OMP_TARGET_EXIT_DATA:
9104 case OACC_ENTER_DATA:
9105 case OACC_EXIT_DATA:
9106 case OACC_HOST_DATA:
9107 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9108 || (OMP_CLAUSE_MAP_KIND (c)
9109 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9110 /* For target {,enter ,exit }data only the array slice is
9111 mapped, but not the pointer to it. */
9112 remove = true;
9113 break;
9114 default:
9115 break;
9116 }
9117 /* For Fortran, not only the pointer to the data is mapped but also
9118 the address of the pointer, the array descriptor etc.; for
9119 'exit data' - and in particular for 'delete:' - having an 'alloc:'
9120 does not make sense. Likewise, for 'update' only transferring the
9121 data itself is needed as the rest has been handled in previous
9122 directives. However, for 'exit data', the array descriptor needs
9123 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
9124
9125 NOTE: Generally, it is not safe to perform "enter data" operations
9126 on arrays where the data *or the descriptor* may go out of scope
9127 before a corresponding "exit data" operation -- and such a
9128 descriptor may be synthesized temporarily, e.g. to pass an
9129 explicit-shape array to a function expecting an assumed-shape
9130 argument. Performing "enter data" inside the called function
9131 would thus be problematic. */
9132 if (code == OMP_TARGET_EXIT_DATA
9133 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
9134 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
9135 == GOMP_MAP_DELETE
9136 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
9137 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
9138 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9139 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
9140 remove = true;
9141
9142 if (remove)
9143 break;
9144 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
9145 {
9146 struct gimplify_omp_ctx *octx;
9147 for (octx = outer_ctx; octx; octx = octx->outer_context)
9148 {
9149 if (octx->region_type != ORT_ACC_HOST_DATA)
9150 break;
9151 splay_tree_node n2
9152 = splay_tree_lookup (octx->variables,
9153 (splay_tree_key) decl);
9154 if (n2)
9155 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
9156 "declared in enclosing %<host_data%> region",
9157 DECL_NAME (decl));
9158 }
9159 }
9160 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9161 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9162 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9163 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9164 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9165 {
9166 remove = true;
9167 break;
9168 }
9169 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9170 || (OMP_CLAUSE_MAP_KIND (c)
9171 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9172 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9173 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
9174 {
9175 OMP_CLAUSE_SIZE (c)
9176 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
9177 false);
9178 if ((region_type & ORT_TARGET) != 0)
9179 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
9180 GOVD_FIRSTPRIVATE | GOVD_SEEN);
9181 }
9182
9183 if (!DECL_P (decl))
9184 {
9185 tree d = decl, *pd;
9186 if (TREE_CODE (d) == ARRAY_REF)
9187 {
9188 while (TREE_CODE (d) == ARRAY_REF)
9189 d = TREE_OPERAND (d, 0);
9190 if (TREE_CODE (d) == COMPONENT_REF
9191 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
9192 decl = d;
9193 }
9194 pd = &OMP_CLAUSE_DECL (c);
9195 if (d == decl
9196 && TREE_CODE (decl) == INDIRECT_REF
9197 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9198 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9199 == REFERENCE_TYPE))
9200 {
9201 pd = &TREE_OPERAND (decl, 0);
9202 decl = TREE_OPERAND (decl, 0);
9203 }
9204 bool indir_p = false;
9205 tree orig_decl = decl;
9206 tree decl_ref = NULL_TREE;
9207 if ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA)) != 0
9208 && TREE_CODE (*pd) == COMPONENT_REF
9209 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
9210 && code != OACC_UPDATE)
9211 {
9212 while (TREE_CODE (decl) == COMPONENT_REF)
9213 {
9214 decl = TREE_OPERAND (decl, 0);
9215 if (((TREE_CODE (decl) == MEM_REF
9216 && integer_zerop (TREE_OPERAND (decl, 1)))
9217 || INDIRECT_REF_P (decl))
9218 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9219 == POINTER_TYPE))
9220 {
9221 indir_p = true;
9222 decl = TREE_OPERAND (decl, 0);
9223 }
9224 if (TREE_CODE (decl) == INDIRECT_REF
9225 && DECL_P (TREE_OPERAND (decl, 0))
9226 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9227 == REFERENCE_TYPE))
9228 {
9229 decl_ref = decl;
9230 decl = TREE_OPERAND (decl, 0);
9231 }
9232 }
9233 }
9234 else if (TREE_CODE (decl) == COMPONENT_REF)
9235 {
9236 while (TREE_CODE (decl) == COMPONENT_REF)
9237 decl = TREE_OPERAND (decl, 0);
9238 if (TREE_CODE (decl) == INDIRECT_REF
9239 && DECL_P (TREE_OPERAND (decl, 0))
9240 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9241 == REFERENCE_TYPE))
9242 decl = TREE_OPERAND (decl, 0);
9243 }
9244 if (decl != orig_decl && DECL_P (decl) && indir_p)
9245 {
9246 gomp_map_kind k
9247 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9248 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9249 /* We have a dereference of a struct member. Make this an
9250 attach/detach operation, and ensure the base pointer is
9251 mapped as a FIRSTPRIVATE_POINTER. */
9252 OMP_CLAUSE_SET_MAP_KIND (c, k);
9253 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
9254 tree next_clause = OMP_CLAUSE_CHAIN (c);
9255 if (k == GOMP_MAP_ATTACH
9256 && code != OACC_ENTER_DATA
9257 && code != OMP_TARGET_ENTER_DATA
9258 && (!next_clause
9259 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
9260 || (OMP_CLAUSE_MAP_KIND (next_clause)
9261 != GOMP_MAP_POINTER)
9262 || OMP_CLAUSE_DECL (next_clause) != decl)
9263 && (!struct_deref_set
9264 || !struct_deref_set->contains (decl)))
9265 {
9266 if (!struct_deref_set)
9267 struct_deref_set = new hash_set<tree> ();
9268 /* As well as the attach, we also need a
9269 FIRSTPRIVATE_POINTER clause to properly map the
9270 pointer to the struct base. */
9271 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9272 OMP_CLAUSE_MAP);
9273 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
9274 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
9275 = 1;
9276 tree charptr_zero
9277 = build_int_cst (build_pointer_type (char_type_node),
9278 0);
9279 OMP_CLAUSE_DECL (c2)
9280 = build2 (MEM_REF, char_type_node,
9281 decl_ref ? decl_ref : decl, charptr_zero);
9282 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9283 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9284 OMP_CLAUSE_MAP);
9285 OMP_CLAUSE_SET_MAP_KIND (c3,
9286 GOMP_MAP_FIRSTPRIVATE_POINTER);
9287 OMP_CLAUSE_DECL (c3) = decl;
9288 OMP_CLAUSE_SIZE (c3) = size_zero_node;
9289 tree mapgrp = *prev_list_p;
9290 *prev_list_p = c2;
9291 OMP_CLAUSE_CHAIN (c3) = mapgrp;
9292 OMP_CLAUSE_CHAIN (c2) = c3;
9293
9294 struct_deref_set->add (decl);
9295 }
9296 goto do_add_decl;
9297 }
9298 /* An "attach/detach" operation on an update directive should
9299 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9300 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9301 depends on the previous mapping. */
9302 if (code == OACC_UPDATE
9303 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9304 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
9305 if (DECL_P (decl)
9306 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9307 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
9308 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
9309 && code != OACC_UPDATE
9310 && code != OMP_TARGET_UPDATE)
9311 {
9312 if (error_operand_p (decl))
9313 {
9314 remove = true;
9315 break;
9316 }
9317
9318 tree stype = TREE_TYPE (decl);
9319 if (TREE_CODE (stype) == REFERENCE_TYPE)
9320 stype = TREE_TYPE (stype);
9321 if (TYPE_SIZE_UNIT (stype) == NULL
9322 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
9323 {
9324 error_at (OMP_CLAUSE_LOCATION (c),
9325 "mapping field %qE of variable length "
9326 "structure", OMP_CLAUSE_DECL (c));
9327 remove = true;
9328 break;
9329 }
9330
9331 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
9332 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9333 {
9334 /* Error recovery. */
9335 if (prev_list_p == NULL)
9336 {
9337 remove = true;
9338 break;
9339 }
9340 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
9341 {
9342 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
9343 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
9344 {
9345 remove = true;
9346 break;
9347 }
9348 }
9349 }
9350
9351 poly_offset_int offset1;
9352 poly_int64 bitpos1;
9353 tree base_ref;
9354
9355 tree base
9356 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
9357 &bitpos1, &offset1);
9358
9359 gcc_assert (base == decl);
9360
9361 splay_tree_node n
9362 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9363 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9364 == GOMP_MAP_ALWAYS_POINTER);
9365 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9366 == GOMP_MAP_ATTACH_DETACH);
9367 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9368 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9369 bool has_attachments = false;
9370 /* For OpenACC, pointers in structs should trigger an
9371 attach action. */
9372 if (attach_detach
9373 && ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA))
9374 || code == OMP_TARGET_ENTER_DATA
9375 || code == OMP_TARGET_EXIT_DATA))
9376
9377 {
9378 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9379 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9380 have detected a case that needs a GOMP_MAP_STRUCT
9381 mapping added. */
9382 gomp_map_kind k
9383 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9384 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9385 OMP_CLAUSE_SET_MAP_KIND (c, k);
9386 has_attachments = true;
9387 }
9388 if (n == NULL || (n->value & GOVD_MAP) == 0)
9389 {
9390 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9391 OMP_CLAUSE_MAP);
9392 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9393 : GOMP_MAP_STRUCT;
9394
9395 OMP_CLAUSE_SET_MAP_KIND (l, k);
9396 if (base_ref)
9397 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9398 else
9399 OMP_CLAUSE_DECL (l) = decl;
9400 OMP_CLAUSE_SIZE (l)
9401 = (!attach
9402 ? size_int (1)
9403 : DECL_P (OMP_CLAUSE_DECL (l))
9404 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9405 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9406 if (struct_map_to_clause == NULL)
9407 struct_map_to_clause = new hash_map<tree, tree>;
9408 struct_map_to_clause->put (decl, l);
9409 if (ptr || attach_detach)
9410 {
9411 insert_struct_comp_map (code, c, l, *prev_list_p,
9412 NULL);
9413 *prev_list_p = l;
9414 prev_list_p = NULL;
9415 }
9416 else
9417 {
9418 OMP_CLAUSE_CHAIN (l) = c;
9419 *list_p = l;
9420 list_p = &OMP_CLAUSE_CHAIN (l);
9421 }
9422 if (base_ref && code == OMP_TARGET)
9423 {
9424 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9425 OMP_CLAUSE_MAP);
9426 enum gomp_map_kind mkind
9427 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9428 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9429 OMP_CLAUSE_DECL (c2) = decl;
9430 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9431 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9432 OMP_CLAUSE_CHAIN (l) = c2;
9433 }
9434 flags = GOVD_MAP | GOVD_EXPLICIT;
9435 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9436 || ptr
9437 || attach_detach)
9438 flags |= GOVD_SEEN;
9439 if (has_attachments)
9440 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9441 goto do_add_decl;
9442 }
9443 else if (struct_map_to_clause)
9444 {
9445 tree *osc = struct_map_to_clause->get (decl);
9446 tree *sc = NULL, *scp = NULL;
9447 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9448 || ptr
9449 || attach_detach)
9450 n->value |= GOVD_SEEN;
9451 sc = &OMP_CLAUSE_CHAIN (*osc);
9452 if (*sc != c
9453 && (OMP_CLAUSE_MAP_KIND (*sc)
9454 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9455 sc = &OMP_CLAUSE_CHAIN (*sc);
9456 /* Here "prev_list_p" is the end of the inserted
9457 alloc/release nodes after the struct node, OSC. */
9458 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9459 if ((ptr || attach_detach) && sc == prev_list_p)
9460 break;
9461 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9462 != COMPONENT_REF
9463 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9464 != INDIRECT_REF)
9465 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9466 != ARRAY_REF))
9467 break;
9468 else
9469 {
9470 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9471 poly_offset_int offsetn;
9472 poly_int64 bitposn;
9473 tree base
9474 = extract_base_bit_offset (sc_decl, NULL,
9475 &bitposn, &offsetn);
9476 if (base != decl)
9477 break;
9478 if (scp)
9479 continue;
9480 if ((region_type & ORT_ACC) != 0)
9481 {
9482 /* This duplicate checking code is currently only
9483 enabled for OpenACC. */
9484 tree d1 = OMP_CLAUSE_DECL (*sc);
9485 tree d2 = OMP_CLAUSE_DECL (c);
9486 while (TREE_CODE (d1) == ARRAY_REF)
9487 d1 = TREE_OPERAND (d1, 0);
9488 while (TREE_CODE (d2) == ARRAY_REF)
9489 d2 = TREE_OPERAND (d2, 0);
9490 if (TREE_CODE (d1) == INDIRECT_REF)
9491 d1 = TREE_OPERAND (d1, 0);
9492 if (TREE_CODE (d2) == INDIRECT_REF)
9493 d2 = TREE_OPERAND (d2, 0);
9494 while (TREE_CODE (d1) == COMPONENT_REF)
9495 if (TREE_CODE (d2) == COMPONENT_REF
9496 && TREE_OPERAND (d1, 1)
9497 == TREE_OPERAND (d2, 1))
9498 {
9499 d1 = TREE_OPERAND (d1, 0);
9500 d2 = TREE_OPERAND (d2, 0);
9501 }
9502 else
9503 break;
9504 if (d1 == d2)
9505 {
9506 error_at (OMP_CLAUSE_LOCATION (c),
9507 "%qE appears more than once in map "
9508 "clauses", OMP_CLAUSE_DECL (c));
9509 remove = true;
9510 break;
9511 }
9512 }
9513 if (maybe_lt (offset1, offsetn)
9514 || (known_eq (offset1, offsetn)
9515 && maybe_lt (bitpos1, bitposn)))
9516 {
9517 if (ptr || attach_detach)
9518 scp = sc;
9519 else
9520 break;
9521 }
9522 }
9523 if (remove)
9524 break;
9525 if (!attach)
9526 OMP_CLAUSE_SIZE (*osc)
9527 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9528 size_one_node);
9529 if (ptr || attach_detach)
9530 {
9531 tree cl = insert_struct_comp_map (code, c, NULL,
9532 *prev_list_p, scp);
9533 if (sc == prev_list_p)
9534 {
9535 *sc = cl;
9536 prev_list_p = NULL;
9537 }
9538 else
9539 {
9540 *prev_list_p = OMP_CLAUSE_CHAIN (c);
9541 list_p = prev_list_p;
9542 prev_list_p = NULL;
9543 OMP_CLAUSE_CHAIN (c) = *sc;
9544 *sc = cl;
9545 continue;
9546 }
9547 }
9548 else if (*sc != c)
9549 {
9550 *list_p = OMP_CLAUSE_CHAIN (c);
9551 OMP_CLAUSE_CHAIN (c) = *sc;
9552 *sc = c;
9553 continue;
9554 }
9555 }
9556 }
9557 else if ((code == OACC_ENTER_DATA
9558 || code == OACC_EXIT_DATA
9559 || code == OACC_DATA
9560 || code == OACC_PARALLEL
9561 || code == OACC_KERNELS
9562 || code == OACC_SERIAL)
9563 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9564 {
9565 gomp_map_kind k = (code == OACC_EXIT_DATA
9566 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9567 OMP_CLAUSE_SET_MAP_KIND (c, k);
9568 }
9569
9570 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
9571 {
9572 /* Don't gimplify *pd fully at this point, as the base
9573 will need to be adjusted during omp lowering. */
9574 auto_vec<tree, 10> expr_stack;
9575 tree *p = pd;
9576 while (handled_component_p (*p)
9577 || TREE_CODE (*p) == INDIRECT_REF
9578 || TREE_CODE (*p) == ADDR_EXPR
9579 || TREE_CODE (*p) == MEM_REF
9580 || TREE_CODE (*p) == NON_LVALUE_EXPR)
9581 {
9582 expr_stack.safe_push (*p);
9583 p = &TREE_OPERAND (*p, 0);
9584 }
9585 for (int i = expr_stack.length () - 1; i >= 0; i--)
9586 {
9587 tree t = expr_stack[i];
9588 if (TREE_CODE (t) == ARRAY_REF
9589 || TREE_CODE (t) == ARRAY_RANGE_REF)
9590 {
9591 if (TREE_OPERAND (t, 2) == NULL_TREE)
9592 {
9593 tree low = unshare_expr (array_ref_low_bound (t));
9594 if (!is_gimple_min_invariant (low))
9595 {
9596 TREE_OPERAND (t, 2) = low;
9597 if (gimplify_expr (&TREE_OPERAND (t, 2),
9598 pre_p, NULL,
9599 is_gimple_reg,
9600 fb_rvalue) == GS_ERROR)
9601 remove = true;
9602 }
9603 }
9604 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
9605 NULL, is_gimple_reg,
9606 fb_rvalue) == GS_ERROR)
9607 remove = true;
9608 if (TREE_OPERAND (t, 3) == NULL_TREE)
9609 {
9610 tree elmt_size = array_ref_element_size (t);
9611 if (!is_gimple_min_invariant (elmt_size))
9612 {
9613 elmt_size = unshare_expr (elmt_size);
9614 tree elmt_type
9615 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
9616 0)));
9617 tree factor
9618 = size_int (TYPE_ALIGN_UNIT (elmt_type));
9619 elmt_size
9620 = size_binop (EXACT_DIV_EXPR, elmt_size,
9621 factor);
9622 TREE_OPERAND (t, 3) = elmt_size;
9623 if (gimplify_expr (&TREE_OPERAND (t, 3),
9624 pre_p, NULL,
9625 is_gimple_reg,
9626 fb_rvalue) == GS_ERROR)
9627 remove = true;
9628 }
9629 }
9630 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
9631 NULL, is_gimple_reg,
9632 fb_rvalue) == GS_ERROR)
9633 remove = true;
9634 }
9635 else if (TREE_CODE (t) == COMPONENT_REF)
9636 {
9637 if (TREE_OPERAND (t, 2) == NULL_TREE)
9638 {
9639 tree offset = component_ref_field_offset (t);
9640 if (!is_gimple_min_invariant (offset))
9641 {
9642 offset = unshare_expr (offset);
9643 tree field = TREE_OPERAND (t, 1);
9644 tree factor
9645 = size_int (DECL_OFFSET_ALIGN (field)
9646 / BITS_PER_UNIT);
9647 offset = size_binop (EXACT_DIV_EXPR, offset,
9648 factor);
9649 TREE_OPERAND (t, 2) = offset;
9650 if (gimplify_expr (&TREE_OPERAND (t, 2),
9651 pre_p, NULL,
9652 is_gimple_reg,
9653 fb_rvalue) == GS_ERROR)
9654 remove = true;
9655 }
9656 }
9657 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
9658 NULL, is_gimple_reg,
9659 fb_rvalue) == GS_ERROR)
9660 remove = true;
9661 }
9662 }
9663 for (; expr_stack.length () > 0; )
9664 {
9665 tree t = expr_stack.pop ();
9666
9667 if (TREE_CODE (t) == ARRAY_REF
9668 || TREE_CODE (t) == ARRAY_RANGE_REF)
9669 {
9670 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
9671 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
9672 NULL, is_gimple_val,
9673 fb_rvalue) == GS_ERROR)
9674 remove = true;
9675 }
9676 }
9677 }
9678 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
9679 fb_lvalue) == GS_ERROR)
9680 {
9681 remove = true;
9682 break;
9683 }
9684
9685 if (!remove
9686 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
9687 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9688 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9689 && OMP_CLAUSE_CHAIN (c)
9690 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
9691 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9692 == GOMP_MAP_ALWAYS_POINTER)
9693 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9694 == GOMP_MAP_ATTACH_DETACH)
9695 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9696 == GOMP_MAP_TO_PSET)))
9697 prev_list_p = list_p;
9698
9699 break;
9700 }
9701 else
9702 {
9703 /* DECL_P (decl) == true */
9704 tree *sc;
9705 if (struct_map_to_clause
9706 && (sc = struct_map_to_clause->get (decl)) != NULL
9707 && OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_STRUCT
9708 && decl == OMP_CLAUSE_DECL (*sc))
9709 {
9710 /* We have found a map of the whole structure after a
9711 leading GOMP_MAP_STRUCT has been created, so refill the
9712 leading clause into a map of the whole structure
9713 variable, and remove the current one.
9714 TODO: we should be able to remove some maps of the
9715 following structure element maps if they are of
9716 compatible TO/FROM/ALLOC type. */
9717 OMP_CLAUSE_SET_MAP_KIND (*sc, OMP_CLAUSE_MAP_KIND (c));
9718 OMP_CLAUSE_SIZE (*sc) = unshare_expr (OMP_CLAUSE_SIZE (c));
9719 remove = true;
9720 break;
9721 }
9722 }
9723 flags = GOVD_MAP | GOVD_EXPLICIT;
9724 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9725 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9726 flags |= GOVD_MAP_ALWAYS_TO;
9727
9728 if ((code == OMP_TARGET
9729 || code == OMP_TARGET_DATA
9730 || code == OMP_TARGET_ENTER_DATA
9731 || code == OMP_TARGET_EXIT_DATA)
9732 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9733 {
9734 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
9735 octx = octx->outer_context)
9736 {
9737 splay_tree_node n
9738 = splay_tree_lookup (octx->variables,
9739 (splay_tree_key) OMP_CLAUSE_DECL (c));
9740 /* If this is contained in an outer OpenMP region as a
9741 firstprivate value, remove the attach/detach. */
9742 if (n && (n->value & GOVD_FIRSTPRIVATE))
9743 {
9744 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
9745 goto do_add;
9746 }
9747 }
9748
9749 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
9750 ? GOMP_MAP_DETACH
9751 : GOMP_MAP_ATTACH);
9752 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
9753 }
9754
9755 goto do_add;
9756
9757 case OMP_CLAUSE_AFFINITY:
9758 gimplify_omp_affinity (list_p, pre_p);
9759 remove = true;
9760 break;
9761 case OMP_CLAUSE_DEPEND:
9762 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9763 {
9764 tree deps = OMP_CLAUSE_DECL (c);
9765 while (deps && TREE_CODE (deps) == TREE_LIST)
9766 {
9767 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9768 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9769 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9770 pre_p, NULL, is_gimple_val, fb_rvalue);
9771 deps = TREE_CHAIN (deps);
9772 }
9773 break;
9774 }
9775 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9776 break;
9777 if (handled_depend_iterators == -1)
9778 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9779 if (handled_depend_iterators)
9780 {
9781 if (handled_depend_iterators == 2)
9782 remove = true;
9783 break;
9784 }
9785 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9786 {
9787 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9788 NULL, is_gimple_val, fb_rvalue);
9789 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9790 }
9791 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9792 {
9793 remove = true;
9794 break;
9795 }
9796 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9797 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9798 is_gimple_val, fb_rvalue) == GS_ERROR)
9799 {
9800 remove = true;
9801 break;
9802 }
9803 if (code == OMP_TASK)
9804 ctx->has_depend = true;
9805 break;
9806
9807 case OMP_CLAUSE_TO:
9808 case OMP_CLAUSE_FROM:
9809 case OMP_CLAUSE__CACHE_:
9810 decl = OMP_CLAUSE_DECL (c);
9811 if (error_operand_p (decl))
9812 {
9813 remove = true;
9814 break;
9815 }
9816 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9817 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9818 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9819 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9820 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9821 {
9822 remove = true;
9823 break;
9824 }
9825 if (!DECL_P (decl))
9826 {
9827 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9828 NULL, is_gimple_lvalue, fb_lvalue)
9829 == GS_ERROR)
9830 {
9831 remove = true;
9832 break;
9833 }
9834 break;
9835 }
9836 goto do_notice;
9837
9838 case OMP_CLAUSE_USE_DEVICE_PTR:
9839 case OMP_CLAUSE_USE_DEVICE_ADDR:
9840 flags = GOVD_EXPLICIT;
9841 goto do_add;
9842
9843 case OMP_CLAUSE_IS_DEVICE_PTR:
9844 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9845 goto do_add;
9846
9847 do_add:
9848 decl = OMP_CLAUSE_DECL (c);
9849 do_add_decl:
9850 if (error_operand_p (decl))
9851 {
9852 remove = true;
9853 break;
9854 }
9855 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9856 {
9857 tree t = omp_member_access_dummy_var (decl);
9858 if (t)
9859 {
9860 tree v = DECL_VALUE_EXPR (decl);
9861 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9862 if (outer_ctx)
9863 omp_notice_variable (outer_ctx, t, true);
9864 }
9865 }
9866 if (code == OACC_DATA
9867 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9868 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9869 flags |= GOVD_MAP_0LEN_ARRAY;
9870 omp_add_variable (ctx, decl, flags);
9871 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9872 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9873 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9874 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9875 {
9876 struct gimplify_omp_ctx *pctx
9877 = code == OMP_TARGET ? outer_ctx : ctx;
9878 if (pctx)
9879 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9880 GOVD_LOCAL | GOVD_SEEN);
9881 if (pctx
9882 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9883 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9884 find_decl_expr,
9885 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9886 NULL) == NULL_TREE)
9887 omp_add_variable (pctx,
9888 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9889 GOVD_LOCAL | GOVD_SEEN);
9890 gimplify_omp_ctxp = pctx;
9891 push_gimplify_context ();
9892
9893 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9894 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9895
9896 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9897 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9898 pop_gimplify_context
9899 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9900 push_gimplify_context ();
9901 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9902 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9903 pop_gimplify_context
9904 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9905 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9906 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9907
9908 gimplify_omp_ctxp = outer_ctx;
9909 }
9910 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9911 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9912 {
9913 gimplify_omp_ctxp = ctx;
9914 push_gimplify_context ();
9915 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9916 {
9917 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9918 NULL, NULL);
9919 TREE_SIDE_EFFECTS (bind) = 1;
9920 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9921 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9922 }
9923 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9924 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9925 pop_gimplify_context
9926 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9927 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9928
9929 gimplify_omp_ctxp = outer_ctx;
9930 }
9931 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9932 && OMP_CLAUSE_LINEAR_STMT (c))
9933 {
9934 gimplify_omp_ctxp = ctx;
9935 push_gimplify_context ();
9936 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9937 {
9938 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9939 NULL, NULL);
9940 TREE_SIDE_EFFECTS (bind) = 1;
9941 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9942 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9943 }
9944 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9945 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9946 pop_gimplify_context
9947 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9948 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9949
9950 gimplify_omp_ctxp = outer_ctx;
9951 }
9952 if (notice_outer)
9953 goto do_notice;
9954 break;
9955
9956 case OMP_CLAUSE_COPYIN:
9957 case OMP_CLAUSE_COPYPRIVATE:
9958 decl = OMP_CLAUSE_DECL (c);
9959 if (error_operand_p (decl))
9960 {
9961 remove = true;
9962 break;
9963 }
9964 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9965 && !remove
9966 && !omp_check_private (ctx, decl, true))
9967 {
9968 remove = true;
9969 if (is_global_var (decl))
9970 {
9971 if (DECL_THREAD_LOCAL_P (decl))
9972 remove = false;
9973 else if (DECL_HAS_VALUE_EXPR_P (decl))
9974 {
9975 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9976
9977 if (value
9978 && DECL_P (value)
9979 && DECL_THREAD_LOCAL_P (value))
9980 remove = false;
9981 }
9982 }
9983 if (remove)
9984 error_at (OMP_CLAUSE_LOCATION (c),
9985 "copyprivate variable %qE is not threadprivate"
9986 " or private in outer context", DECL_NAME (decl));
9987 }
9988 do_notice:
9989 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9990 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9991 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9992 && outer_ctx
9993 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9994 || (region_type == ORT_WORKSHARE
9995 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9996 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9997 || code == OMP_LOOP)))
9998 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9999 || (code == OMP_LOOP
10000 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10001 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
10002 == ORT_COMBINED_TEAMS))))
10003 {
10004 splay_tree_node on
10005 = splay_tree_lookup (outer_ctx->variables,
10006 (splay_tree_key)decl);
10007 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
10008 {
10009 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10010 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
10011 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
10012 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10013 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
10014 == POINTER_TYPE))))
10015 omp_firstprivatize_variable (outer_ctx, decl);
10016 else
10017 {
10018 omp_add_variable (outer_ctx, decl,
10019 GOVD_SEEN | GOVD_SHARED);
10020 if (outer_ctx->outer_context)
10021 omp_notice_variable (outer_ctx->outer_context, decl,
10022 true);
10023 }
10024 }
10025 }
10026 if (outer_ctx)
10027 omp_notice_variable (outer_ctx, decl, true);
10028 if (check_non_private
10029 && region_type == ORT_WORKSHARE
10030 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
10031 || decl == OMP_CLAUSE_DECL (c)
10032 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
10033 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10034 == ADDR_EXPR
10035 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10036 == POINTER_PLUS_EXPR
10037 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
10038 (OMP_CLAUSE_DECL (c), 0), 0))
10039 == ADDR_EXPR)))))
10040 && omp_check_private (ctx, decl, false))
10041 {
10042 error ("%s variable %qE is private in outer context",
10043 check_non_private, DECL_NAME (decl));
10044 remove = true;
10045 }
10046 break;
10047
10048 case OMP_CLAUSE_DETACH:
10049 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
10050 goto do_add;
10051
10052 case OMP_CLAUSE_IF:
10053 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
10054 && OMP_CLAUSE_IF_MODIFIER (c) != code)
10055 {
10056 const char *p[2];
10057 for (int i = 0; i < 2; i++)
10058 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
10059 {
10060 case VOID_CST: p[i] = "cancel"; break;
10061 case OMP_PARALLEL: p[i] = "parallel"; break;
10062 case OMP_SIMD: p[i] = "simd"; break;
10063 case OMP_TASK: p[i] = "task"; break;
10064 case OMP_TASKLOOP: p[i] = "taskloop"; break;
10065 case OMP_TARGET_DATA: p[i] = "target data"; break;
10066 case OMP_TARGET: p[i] = "target"; break;
10067 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
10068 case OMP_TARGET_ENTER_DATA:
10069 p[i] = "target enter data"; break;
10070 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
10071 default: gcc_unreachable ();
10072 }
10073 error_at (OMP_CLAUSE_LOCATION (c),
10074 "expected %qs %<if%> clause modifier rather than %qs",
10075 p[0], p[1]);
10076 remove = true;
10077 }
10078 /* Fall through. */
10079
10080 case OMP_CLAUSE_FINAL:
10081 OMP_CLAUSE_OPERAND (c, 0)
10082 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
10083 /* Fall through. */
10084
10085 case OMP_CLAUSE_SCHEDULE:
10086 case OMP_CLAUSE_NUM_THREADS:
10087 case OMP_CLAUSE_NUM_TEAMS:
10088 case OMP_CLAUSE_THREAD_LIMIT:
10089 case OMP_CLAUSE_DIST_SCHEDULE:
10090 case OMP_CLAUSE_DEVICE:
10091 case OMP_CLAUSE_PRIORITY:
10092 case OMP_CLAUSE_GRAINSIZE:
10093 case OMP_CLAUSE_NUM_TASKS:
10094 case OMP_CLAUSE_HINT:
10095 case OMP_CLAUSE_ASYNC:
10096 case OMP_CLAUSE_WAIT:
10097 case OMP_CLAUSE_NUM_GANGS:
10098 case OMP_CLAUSE_NUM_WORKERS:
10099 case OMP_CLAUSE_VECTOR_LENGTH:
10100 case OMP_CLAUSE_WORKER:
10101 case OMP_CLAUSE_VECTOR:
10102 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
10103 is_gimple_val, fb_rvalue) == GS_ERROR)
10104 remove = true;
10105 break;
10106
10107 case OMP_CLAUSE_GANG:
10108 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
10109 is_gimple_val, fb_rvalue) == GS_ERROR)
10110 remove = true;
10111 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
10112 is_gimple_val, fb_rvalue) == GS_ERROR)
10113 remove = true;
10114 break;
10115
10116 case OMP_CLAUSE_NOWAIT:
10117 nowait = 1;
10118 break;
10119
10120 case OMP_CLAUSE_ORDERED:
10121 case OMP_CLAUSE_UNTIED:
10122 case OMP_CLAUSE_COLLAPSE:
10123 case OMP_CLAUSE_TILE:
10124 case OMP_CLAUSE_AUTO:
10125 case OMP_CLAUSE_SEQ:
10126 case OMP_CLAUSE_INDEPENDENT:
10127 case OMP_CLAUSE_MERGEABLE:
10128 case OMP_CLAUSE_PROC_BIND:
10129 case OMP_CLAUSE_SAFELEN:
10130 case OMP_CLAUSE_SIMDLEN:
10131 case OMP_CLAUSE_NOGROUP:
10132 case OMP_CLAUSE_THREADS:
10133 case OMP_CLAUSE_SIMD:
10134 case OMP_CLAUSE_BIND:
10135 case OMP_CLAUSE_IF_PRESENT:
10136 case OMP_CLAUSE_FINALIZE:
10137 break;
10138
10139 case OMP_CLAUSE_ORDER:
10140 ctx->order_concurrent = true;
10141 break;
10142
10143 case OMP_CLAUSE_DEFAULTMAP:
10144 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
10145 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
10146 {
10147 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
10148 gdmkmin = GDMK_SCALAR;
10149 gdmkmax = GDMK_POINTER;
10150 break;
10151 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
10152 gdmkmin = GDMK_SCALAR;
10153 gdmkmax = GDMK_SCALAR_TARGET;
10154 break;
10155 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
10156 gdmkmin = gdmkmax = GDMK_AGGREGATE;
10157 break;
10158 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
10159 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
10160 break;
10161 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
10162 gdmkmin = gdmkmax = GDMK_POINTER;
10163 break;
10164 default:
10165 gcc_unreachable ();
10166 }
10167 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
10168 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
10169 {
10170 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
10171 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
10172 break;
10173 case OMP_CLAUSE_DEFAULTMAP_TO:
10174 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
10175 break;
10176 case OMP_CLAUSE_DEFAULTMAP_FROM:
10177 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
10178 break;
10179 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
10180 ctx->defaultmap[gdmk] = GOVD_MAP;
10181 break;
10182 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
10183 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
10184 break;
10185 case OMP_CLAUSE_DEFAULTMAP_NONE:
10186 ctx->defaultmap[gdmk] = 0;
10187 break;
10188 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
10189 switch (gdmk)
10190 {
10191 case GDMK_SCALAR:
10192 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
10193 break;
10194 case GDMK_SCALAR_TARGET:
10195 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
10196 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10197 break;
10198 case GDMK_AGGREGATE:
10199 case GDMK_ALLOCATABLE:
10200 ctx->defaultmap[gdmk] = GOVD_MAP;
10201 break;
10202 case GDMK_POINTER:
10203 ctx->defaultmap[gdmk] = GOVD_MAP;
10204 if (!lang_GNU_Fortran ())
10205 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
10206 break;
10207 default:
10208 gcc_unreachable ();
10209 }
10210 break;
10211 default:
10212 gcc_unreachable ();
10213 }
10214 break;
10215
10216 case OMP_CLAUSE_ALIGNED:
10217 decl = OMP_CLAUSE_DECL (c);
10218 if (error_operand_p (decl))
10219 {
10220 remove = true;
10221 break;
10222 }
10223 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
10224 is_gimple_val, fb_rvalue) == GS_ERROR)
10225 {
10226 remove = true;
10227 break;
10228 }
10229 if (!is_global_var (decl)
10230 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10231 omp_add_variable (ctx, decl, GOVD_ALIGNED);
10232 break;
10233
10234 case OMP_CLAUSE_NONTEMPORAL:
10235 decl = OMP_CLAUSE_DECL (c);
10236 if (error_operand_p (decl))
10237 {
10238 remove = true;
10239 break;
10240 }
10241 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
10242 break;
10243
10244 case OMP_CLAUSE_ALLOCATE:
10245 decl = OMP_CLAUSE_DECL (c);
10246 if (error_operand_p (decl))
10247 {
10248 remove = true;
10249 break;
10250 }
10251 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
10252 is_gimple_val, fb_rvalue) == GS_ERROR)
10253 {
10254 remove = true;
10255 break;
10256 }
10257 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
10258 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
10259 == INTEGER_CST))
10260 ;
10261 else if (code == OMP_TASKLOOP
10262 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
10263 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
10264 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
10265 pre_p, NULL, false);
10266 break;
10267
10268 case OMP_CLAUSE_DEFAULT:
10269 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
10270 break;
10271
10272 case OMP_CLAUSE_INCLUSIVE:
10273 case OMP_CLAUSE_EXCLUSIVE:
10274 decl = OMP_CLAUSE_DECL (c);
10275 {
10276 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
10277 (splay_tree_key) decl);
10278 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
10279 {
10280 error_at (OMP_CLAUSE_LOCATION (c),
10281 "%qD specified in %qs clause but not in %<inscan%> "
10282 "%<reduction%> clause on the containing construct",
10283 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10284 remove = true;
10285 }
10286 else
10287 {
10288 n->value |= GOVD_REDUCTION_INSCAN;
10289 if (outer_ctx->region_type == ORT_SIMD
10290 && outer_ctx->outer_context
10291 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
10292 {
10293 n = splay_tree_lookup (outer_ctx->outer_context->variables,
10294 (splay_tree_key) decl);
10295 if (n && (n->value & GOVD_REDUCTION) != 0)
10296 n->value |= GOVD_REDUCTION_INSCAN;
10297 }
10298 }
10299 }
10300 break;
10301
10302 default:
10303 gcc_unreachable ();
10304 }
10305
10306 if (code == OACC_DATA
10307 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10308 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10309 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10310 remove = true;
10311 if (remove)
10312 *list_p = OMP_CLAUSE_CHAIN (c);
10313 else
10314 list_p = &OMP_CLAUSE_CHAIN (c);
10315 }
10316
10317 ctx->clauses = *orig_list_p;
10318 gimplify_omp_ctxp = ctx;
10319 if (struct_map_to_clause)
10320 delete struct_map_to_clause;
10321 if (struct_deref_set)
10322 delete struct_deref_set;
10323 }
10324
10325 /* Return true if DECL is a candidate for shared to firstprivate
10326 optimization. We only consider non-addressable scalars, not
10327 too big, and not references. */
10328
10329 static bool
10330 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
10331 {
10332 if (TREE_ADDRESSABLE (decl))
10333 return false;
10334 tree type = TREE_TYPE (decl);
10335 if (!is_gimple_reg_type (type)
10336 || TREE_CODE (type) == REFERENCE_TYPE
10337 || TREE_ADDRESSABLE (type))
10338 return false;
10339 /* Don't optimize too large decls, as each thread/task will have
10340 its own. */
10341 HOST_WIDE_INT len = int_size_in_bytes (type);
10342 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
10343 return false;
10344 if (lang_hooks.decls.omp_privatize_by_reference (decl))
10345 return false;
10346 return true;
10347 }
10348
10349 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
10350 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
10351 GOVD_WRITTEN in outer contexts. */
10352
10353 static void
10354 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
10355 {
10356 for (; ctx; ctx = ctx->outer_context)
10357 {
10358 splay_tree_node n = splay_tree_lookup (ctx->variables,
10359 (splay_tree_key) decl);
10360 if (n == NULL)
10361 continue;
10362 else if (n->value & GOVD_SHARED)
10363 {
10364 n->value |= GOVD_WRITTEN;
10365 return;
10366 }
10367 else if (n->value & GOVD_DATA_SHARE_CLASS)
10368 return;
10369 }
10370 }
10371
10372 /* Helper callback for walk_gimple_seq to discover possible stores
10373 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10374 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10375 for those. */
10376
10377 static tree
10378 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
10379 {
10380 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10381
10382 *walk_subtrees = 0;
10383 if (!wi->is_lhs)
10384 return NULL_TREE;
10385
10386 tree op = *tp;
10387 do
10388 {
10389 if (handled_component_p (op))
10390 op = TREE_OPERAND (op, 0);
10391 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
10392 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
10393 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
10394 else
10395 break;
10396 }
10397 while (1);
10398 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
10399 return NULL_TREE;
10400
10401 omp_mark_stores (gimplify_omp_ctxp, op);
10402 return NULL_TREE;
10403 }
10404
10405 /* Helper callback for walk_gimple_seq to discover possible stores
10406 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10407 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10408 for those. */
10409
10410 static tree
10411 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
10412 bool *handled_ops_p,
10413 struct walk_stmt_info *wi)
10414 {
10415 gimple *stmt = gsi_stmt (*gsi_p);
10416 switch (gimple_code (stmt))
10417 {
10418 /* Don't recurse on OpenMP constructs for which
10419 gimplify_adjust_omp_clauses already handled the bodies,
10420 except handle gimple_omp_for_pre_body. */
10421 case GIMPLE_OMP_FOR:
10422 *handled_ops_p = true;
10423 if (gimple_omp_for_pre_body (stmt))
10424 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10425 omp_find_stores_stmt, omp_find_stores_op, wi);
10426 break;
10427 case GIMPLE_OMP_PARALLEL:
10428 case GIMPLE_OMP_TASK:
10429 case GIMPLE_OMP_SECTIONS:
10430 case GIMPLE_OMP_SINGLE:
10431 case GIMPLE_OMP_TARGET:
10432 case GIMPLE_OMP_TEAMS:
10433 case GIMPLE_OMP_CRITICAL:
10434 *handled_ops_p = true;
10435 break;
10436 default:
10437 break;
10438 }
10439 return NULL_TREE;
10440 }
10441
10442 struct gimplify_adjust_omp_clauses_data
10443 {
10444 tree *list_p;
10445 gimple_seq *pre_p;
10446 };
10447
10448 /* For all variables that were not actually used within the context,
10449 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
10450
10451 static int
10452 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
10453 {
10454 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
10455 gimple_seq *pre_p
10456 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
10457 tree decl = (tree) n->key;
10458 unsigned flags = n->value;
10459 enum omp_clause_code code;
10460 tree clause;
10461 bool private_debug;
10462
10463 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10464 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
10465 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
10466 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
10467 return 0;
10468 if ((flags & GOVD_SEEN) == 0)
10469 return 0;
10470 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
10471 return 0;
10472 if (flags & GOVD_DEBUG_PRIVATE)
10473 {
10474 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
10475 private_debug = true;
10476 }
10477 else if (flags & GOVD_MAP)
10478 private_debug = false;
10479 else
10480 private_debug
10481 = lang_hooks.decls.omp_private_debug_clause (decl,
10482 !!(flags & GOVD_SHARED));
10483 if (private_debug)
10484 code = OMP_CLAUSE_PRIVATE;
10485 else if (flags & GOVD_MAP)
10486 {
10487 code = OMP_CLAUSE_MAP;
10488 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
10489 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
10490 {
10491 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
10492 return 0;
10493 }
10494 if (VAR_P (decl)
10495 && DECL_IN_CONSTANT_POOL (decl)
10496 && !lookup_attribute ("omp declare target",
10497 DECL_ATTRIBUTES (decl)))
10498 {
10499 tree id = get_identifier ("omp declare target");
10500 DECL_ATTRIBUTES (decl)
10501 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
10502 varpool_node *node = varpool_node::get (decl);
10503 if (node)
10504 {
10505 node->offloadable = 1;
10506 if (ENABLE_OFFLOADING)
10507 g->have_offload = true;
10508 }
10509 }
10510 }
10511 else if (flags & GOVD_SHARED)
10512 {
10513 if (is_global_var (decl))
10514 {
10515 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10516 while (ctx != NULL)
10517 {
10518 splay_tree_node on
10519 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10520 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
10521 | GOVD_PRIVATE | GOVD_REDUCTION
10522 | GOVD_LINEAR | GOVD_MAP)) != 0)
10523 break;
10524 ctx = ctx->outer_context;
10525 }
10526 if (ctx == NULL)
10527 return 0;
10528 }
10529 code = OMP_CLAUSE_SHARED;
10530 /* Don't optimize shared into firstprivate for read-only vars
10531 on tasks with depend clause, we shouldn't try to copy them
10532 until the dependencies are satisfied. */
10533 if (gimplify_omp_ctxp->has_depend)
10534 flags |= GOVD_WRITTEN;
10535 }
10536 else if (flags & GOVD_PRIVATE)
10537 code = OMP_CLAUSE_PRIVATE;
10538 else if (flags & GOVD_FIRSTPRIVATE)
10539 {
10540 code = OMP_CLAUSE_FIRSTPRIVATE;
10541 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
10542 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
10543 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
10544 {
10545 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
10546 "%<target%> construct", decl);
10547 return 0;
10548 }
10549 }
10550 else if (flags & GOVD_LASTPRIVATE)
10551 code = OMP_CLAUSE_LASTPRIVATE;
10552 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
10553 return 0;
10554 else if (flags & GOVD_CONDTEMP)
10555 {
10556 code = OMP_CLAUSE__CONDTEMP_;
10557 gimple_add_tmp_var (decl);
10558 }
10559 else
10560 gcc_unreachable ();
10561
10562 if (((flags & GOVD_LASTPRIVATE)
10563 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
10564 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10565 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10566
10567 tree chain = *list_p;
10568 clause = build_omp_clause (input_location, code);
10569 OMP_CLAUSE_DECL (clause) = decl;
10570 OMP_CLAUSE_CHAIN (clause) = chain;
10571 if (private_debug)
10572 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
10573 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
10574 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
10575 else if (code == OMP_CLAUSE_SHARED
10576 && (flags & GOVD_WRITTEN) == 0
10577 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10578 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
10579 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
10580 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
10581 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
10582 {
10583 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
10584 OMP_CLAUSE_DECL (nc) = decl;
10585 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10586 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10587 OMP_CLAUSE_DECL (clause)
10588 = build_simple_mem_ref_loc (input_location, decl);
10589 OMP_CLAUSE_DECL (clause)
10590 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
10591 build_int_cst (build_pointer_type (char_type_node), 0));
10592 OMP_CLAUSE_SIZE (clause) = size_zero_node;
10593 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10594 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
10595 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
10596 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10597 OMP_CLAUSE_CHAIN (nc) = chain;
10598 OMP_CLAUSE_CHAIN (clause) = nc;
10599 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10600 gimplify_omp_ctxp = ctx->outer_context;
10601 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
10602 pre_p, NULL, is_gimple_val, fb_rvalue);
10603 gimplify_omp_ctxp = ctx;
10604 }
10605 else if (code == OMP_CLAUSE_MAP)
10606 {
10607 int kind;
10608 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10609 switch (flags & (GOVD_MAP_TO_ONLY
10610 | GOVD_MAP_FORCE
10611 | GOVD_MAP_FORCE_PRESENT
10612 | GOVD_MAP_ALLOC_ONLY
10613 | GOVD_MAP_FROM_ONLY))
10614 {
10615 case 0:
10616 kind = GOMP_MAP_TOFROM;
10617 break;
10618 case GOVD_MAP_FORCE:
10619 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10620 break;
10621 case GOVD_MAP_TO_ONLY:
10622 kind = GOMP_MAP_TO;
10623 break;
10624 case GOVD_MAP_FROM_ONLY:
10625 kind = GOMP_MAP_FROM;
10626 break;
10627 case GOVD_MAP_ALLOC_ONLY:
10628 kind = GOMP_MAP_ALLOC;
10629 break;
10630 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10631 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10632 break;
10633 case GOVD_MAP_FORCE_PRESENT:
10634 kind = GOMP_MAP_FORCE_PRESENT;
10635 break;
10636 default:
10637 gcc_unreachable ();
10638 }
10639 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
10640 if (DECL_SIZE (decl)
10641 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10642 {
10643 tree decl2 = DECL_VALUE_EXPR (decl);
10644 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10645 decl2 = TREE_OPERAND (decl2, 0);
10646 gcc_assert (DECL_P (decl2));
10647 tree mem = build_simple_mem_ref (decl2);
10648 OMP_CLAUSE_DECL (clause) = mem;
10649 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10650 if (gimplify_omp_ctxp->outer_context)
10651 {
10652 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10653 omp_notice_variable (ctx, decl2, true);
10654 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10655 }
10656 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10657 OMP_CLAUSE_MAP);
10658 OMP_CLAUSE_DECL (nc) = decl;
10659 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10660 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10661 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10662 else
10663 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10664 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10665 OMP_CLAUSE_CHAIN (clause) = nc;
10666 }
10667 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10668 && lang_hooks.decls.omp_privatize_by_reference (decl))
10669 {
10670 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10671 OMP_CLAUSE_SIZE (clause)
10672 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10673 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10674 gimplify_omp_ctxp = ctx->outer_context;
10675 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10676 pre_p, NULL, is_gimple_val, fb_rvalue);
10677 gimplify_omp_ctxp = ctx;
10678 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10679 OMP_CLAUSE_MAP);
10680 OMP_CLAUSE_DECL (nc) = decl;
10681 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10682 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10683 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10684 OMP_CLAUSE_CHAIN (clause) = nc;
10685 }
10686 else
10687 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
10688 }
10689 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10690 {
10691 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10692 OMP_CLAUSE_DECL (nc) = decl;
10693 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
10694 OMP_CLAUSE_CHAIN (nc) = chain;
10695 OMP_CLAUSE_CHAIN (clause) = nc;
10696 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10697 gimplify_omp_ctxp = ctx->outer_context;
10698 lang_hooks.decls.omp_finish_clause (nc, pre_p,
10699 (ctx->region_type & ORT_ACC) != 0);
10700 gimplify_omp_ctxp = ctx;
10701 }
10702 *list_p = clause;
10703 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10704 gimplify_omp_ctxp = ctx->outer_context;
10705 lang_hooks.decls.omp_finish_clause (clause, pre_p,
10706 (ctx->region_type & ORT_ACC) != 0);
10707 if (gimplify_omp_ctxp)
10708 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10709 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10710 && DECL_P (OMP_CLAUSE_SIZE (clause)))
10711 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10712 true);
10713 gimplify_omp_ctxp = ctx;
10714 return 0;
10715 }
10716
10717 static void
10718 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
10719 enum tree_code code)
10720 {
10721 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10722 tree *orig_list_p = list_p;
10723 tree c, decl;
10724 bool has_inscan_reductions = false;
10725
10726 if (body)
10727 {
10728 struct gimplify_omp_ctx *octx;
10729 for (octx = ctx; octx; octx = octx->outer_context)
10730 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10731 break;
10732 if (octx)
10733 {
10734 struct walk_stmt_info wi;
10735 memset (&wi, 0, sizeof (wi));
10736 walk_gimple_seq (body, omp_find_stores_stmt,
10737 omp_find_stores_op, &wi);
10738 }
10739 }
10740
10741 if (ctx->add_safelen1)
10742 {
10743 /* If there are VLAs in the body of simd loop, prevent
10744 vectorization. */
10745 gcc_assert (ctx->region_type == ORT_SIMD);
10746 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10747 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10748 OMP_CLAUSE_CHAIN (c) = *list_p;
10749 *list_p = c;
10750 list_p = &OMP_CLAUSE_CHAIN (c);
10751 }
10752
10753 if (ctx->region_type == ORT_WORKSHARE
10754 && ctx->outer_context
10755 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10756 {
10757 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10758 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10759 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10760 {
10761 decl = OMP_CLAUSE_DECL (c);
10762 splay_tree_node n
10763 = splay_tree_lookup (ctx->outer_context->variables,
10764 (splay_tree_key) decl);
10765 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10766 (splay_tree_key) decl));
10767 omp_add_variable (ctx, decl, n->value);
10768 tree c2 = copy_node (c);
10769 OMP_CLAUSE_CHAIN (c2) = *list_p;
10770 *list_p = c2;
10771 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10772 continue;
10773 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10774 OMP_CLAUSE_FIRSTPRIVATE);
10775 OMP_CLAUSE_DECL (c2) = decl;
10776 OMP_CLAUSE_CHAIN (c2) = *list_p;
10777 *list_p = c2;
10778 }
10779 }
10780 while ((c = *list_p) != NULL)
10781 {
10782 splay_tree_node n;
10783 bool remove = false;
10784
10785 switch (OMP_CLAUSE_CODE (c))
10786 {
10787 case OMP_CLAUSE_FIRSTPRIVATE:
10788 if ((ctx->region_type & ORT_TARGET)
10789 && (ctx->region_type & ORT_ACC) == 0
10790 && TYPE_ATOMIC (strip_array_types
10791 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10792 {
10793 error_at (OMP_CLAUSE_LOCATION (c),
10794 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10795 "%<target%> construct", OMP_CLAUSE_DECL (c));
10796 remove = true;
10797 break;
10798 }
10799 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
10800 {
10801 decl = OMP_CLAUSE_DECL (c);
10802 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10803 if ((n->value & GOVD_MAP) != 0)
10804 {
10805 remove = true;
10806 break;
10807 }
10808 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
10809 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
10810 }
10811 /* FALLTHRU */
10812 case OMP_CLAUSE_PRIVATE:
10813 case OMP_CLAUSE_SHARED:
10814 case OMP_CLAUSE_LINEAR:
10815 decl = OMP_CLAUSE_DECL (c);
10816 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10817 remove = !(n->value & GOVD_SEEN);
10818 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10819 && code == OMP_PARALLEL
10820 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10821 remove = true;
10822 if (! remove)
10823 {
10824 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
10825 if ((n->value & GOVD_DEBUG_PRIVATE)
10826 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10827 {
10828 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10829 || ((n->value & GOVD_DATA_SHARE_CLASS)
10830 == GOVD_SHARED));
10831 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
10832 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10833 }
10834 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10835 && ctx->has_depend
10836 && DECL_P (decl))
10837 n->value |= GOVD_WRITTEN;
10838 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10839 && (n->value & GOVD_WRITTEN) == 0
10840 && DECL_P (decl)
10841 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10842 OMP_CLAUSE_SHARED_READONLY (c) = 1;
10843 else if (DECL_P (decl)
10844 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10845 && (n->value & GOVD_WRITTEN) != 0)
10846 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10847 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10848 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10849 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10850 }
10851 else
10852 n->value &= ~GOVD_EXPLICIT;
10853 break;
10854
10855 case OMP_CLAUSE_LASTPRIVATE:
10856 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10857 accurately reflect the presence of a FIRSTPRIVATE clause. */
10858 decl = OMP_CLAUSE_DECL (c);
10859 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10860 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10861 = (n->value & GOVD_FIRSTPRIVATE) != 0;
10862 if (code == OMP_DISTRIBUTE
10863 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10864 {
10865 remove = true;
10866 error_at (OMP_CLAUSE_LOCATION (c),
10867 "same variable used in %<firstprivate%> and "
10868 "%<lastprivate%> clauses on %<distribute%> "
10869 "construct");
10870 }
10871 if (!remove
10872 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10873 && DECL_P (decl)
10874 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10875 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10876 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10877 remove = true;
10878 break;
10879
10880 case OMP_CLAUSE_ALIGNED:
10881 decl = OMP_CLAUSE_DECL (c);
10882 if (!is_global_var (decl))
10883 {
10884 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10885 remove = n == NULL || !(n->value & GOVD_SEEN);
10886 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10887 {
10888 struct gimplify_omp_ctx *octx;
10889 if (n != NULL
10890 && (n->value & (GOVD_DATA_SHARE_CLASS
10891 & ~GOVD_FIRSTPRIVATE)))
10892 remove = true;
10893 else
10894 for (octx = ctx->outer_context; octx;
10895 octx = octx->outer_context)
10896 {
10897 n = splay_tree_lookup (octx->variables,
10898 (splay_tree_key) decl);
10899 if (n == NULL)
10900 continue;
10901 if (n->value & GOVD_LOCAL)
10902 break;
10903 /* We have to avoid assigning a shared variable
10904 to itself when trying to add
10905 __builtin_assume_aligned. */
10906 if (n->value & GOVD_SHARED)
10907 {
10908 remove = true;
10909 break;
10910 }
10911 }
10912 }
10913 }
10914 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10915 {
10916 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10917 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10918 remove = true;
10919 }
10920 break;
10921
10922 case OMP_CLAUSE_NONTEMPORAL:
10923 decl = OMP_CLAUSE_DECL (c);
10924 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10925 remove = n == NULL || !(n->value & GOVD_SEEN);
10926 break;
10927
10928 case OMP_CLAUSE_MAP:
10929 if (code == OMP_TARGET_EXIT_DATA
10930 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10931 {
10932 remove = true;
10933 break;
10934 }
10935 decl = OMP_CLAUSE_DECL (c);
10936 /* Data clauses associated with reductions must be
10937 compatible with present_or_copy. Warn and adjust the clause
10938 if that is not the case. */
10939 if (ctx->region_type == ORT_ACC_PARALLEL
10940 || ctx->region_type == ORT_ACC_SERIAL)
10941 {
10942 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10943 n = NULL;
10944
10945 if (DECL_P (t))
10946 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10947
10948 if (n && (n->value & GOVD_REDUCTION))
10949 {
10950 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10951
10952 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10953 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10954 && kind != GOMP_MAP_FORCE_PRESENT
10955 && kind != GOMP_MAP_POINTER)
10956 {
10957 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10958 "incompatible data clause with reduction "
10959 "on %qE; promoting to %<present_or_copy%>",
10960 DECL_NAME (t));
10961 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10962 }
10963 }
10964 }
10965 if (!DECL_P (decl))
10966 {
10967 if ((ctx->region_type & ORT_TARGET) != 0
10968 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10969 {
10970 if (TREE_CODE (decl) == INDIRECT_REF
10971 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10972 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10973 == REFERENCE_TYPE))
10974 decl = TREE_OPERAND (decl, 0);
10975 if (TREE_CODE (decl) == COMPONENT_REF)
10976 {
10977 while (TREE_CODE (decl) == COMPONENT_REF)
10978 decl = TREE_OPERAND (decl, 0);
10979 if (DECL_P (decl))
10980 {
10981 n = splay_tree_lookup (ctx->variables,
10982 (splay_tree_key) decl);
10983 if (!(n->value & GOVD_SEEN))
10984 remove = true;
10985 }
10986 }
10987 }
10988 break;
10989 }
10990 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10991 if ((ctx->region_type & ORT_TARGET) != 0
10992 && !(n->value & GOVD_SEEN)
10993 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10994 && (!is_global_var (decl)
10995 || !lookup_attribute ("omp declare target link",
10996 DECL_ATTRIBUTES (decl))))
10997 {
10998 remove = true;
10999 /* For struct element mapping, if struct is never referenced
11000 in target block and none of the mapping has always modifier,
11001 remove all the struct element mappings, which immediately
11002 follow the GOMP_MAP_STRUCT map clause. */
11003 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11004 {
11005 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
11006 while (cnt--)
11007 OMP_CLAUSE_CHAIN (c)
11008 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
11009 }
11010 }
11011 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
11012 && (code == OMP_TARGET_EXIT_DATA
11013 || code == OACC_EXIT_DATA))
11014 remove = true;
11015 else if (DECL_SIZE (decl)
11016 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
11017 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
11018 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
11019 && (OMP_CLAUSE_MAP_KIND (c)
11020 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11021 {
11022 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
11023 for these, TREE_CODE (DECL_SIZE (decl)) will always be
11024 INTEGER_CST. */
11025 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
11026
11027 tree decl2 = DECL_VALUE_EXPR (decl);
11028 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
11029 decl2 = TREE_OPERAND (decl2, 0);
11030 gcc_assert (DECL_P (decl2));
11031 tree mem = build_simple_mem_ref (decl2);
11032 OMP_CLAUSE_DECL (c) = mem;
11033 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
11034 if (ctx->outer_context)
11035 {
11036 omp_notice_variable (ctx->outer_context, decl2, true);
11037 omp_notice_variable (ctx->outer_context,
11038 OMP_CLAUSE_SIZE (c), true);
11039 }
11040 if (((ctx->region_type & ORT_TARGET) != 0
11041 || !ctx->target_firstprivatize_array_bases)
11042 && ((n->value & GOVD_SEEN) == 0
11043 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
11044 {
11045 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11046 OMP_CLAUSE_MAP);
11047 OMP_CLAUSE_DECL (nc) = decl;
11048 OMP_CLAUSE_SIZE (nc) = size_zero_node;
11049 if (ctx->target_firstprivatize_array_bases)
11050 OMP_CLAUSE_SET_MAP_KIND (nc,
11051 GOMP_MAP_FIRSTPRIVATE_POINTER);
11052 else
11053 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
11054 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
11055 OMP_CLAUSE_CHAIN (c) = nc;
11056 c = nc;
11057 }
11058 }
11059 else
11060 {
11061 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11062 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
11063 gcc_assert ((n->value & GOVD_SEEN) == 0
11064 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
11065 == 0));
11066 }
11067 break;
11068
11069 case OMP_CLAUSE_TO:
11070 case OMP_CLAUSE_FROM:
11071 case OMP_CLAUSE__CACHE_:
11072 decl = OMP_CLAUSE_DECL (c);
11073 if (!DECL_P (decl))
11074 break;
11075 if (DECL_SIZE (decl)
11076 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
11077 {
11078 tree decl2 = DECL_VALUE_EXPR (decl);
11079 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
11080 decl2 = TREE_OPERAND (decl2, 0);
11081 gcc_assert (DECL_P (decl2));
11082 tree mem = build_simple_mem_ref (decl2);
11083 OMP_CLAUSE_DECL (c) = mem;
11084 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
11085 if (ctx->outer_context)
11086 {
11087 omp_notice_variable (ctx->outer_context, decl2, true);
11088 omp_notice_variable (ctx->outer_context,
11089 OMP_CLAUSE_SIZE (c), true);
11090 }
11091 }
11092 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11093 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
11094 break;
11095
11096 case OMP_CLAUSE_REDUCTION:
11097 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
11098 {
11099 decl = OMP_CLAUSE_DECL (c);
11100 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11101 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
11102 {
11103 remove = true;
11104 error_at (OMP_CLAUSE_LOCATION (c),
11105 "%qD specified in %<inscan%> %<reduction%> clause "
11106 "but not in %<scan%> directive clause", decl);
11107 break;
11108 }
11109 has_inscan_reductions = true;
11110 }
11111 /* FALLTHRU */
11112 case OMP_CLAUSE_IN_REDUCTION:
11113 case OMP_CLAUSE_TASK_REDUCTION:
11114 decl = OMP_CLAUSE_DECL (c);
11115 /* OpenACC reductions need a present_or_copy data clause.
11116 Add one if necessary. Emit error when the reduction is private. */
11117 if (ctx->region_type == ORT_ACC_PARALLEL
11118 || ctx->region_type == ORT_ACC_SERIAL)
11119 {
11120 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11121 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
11122 {
11123 remove = true;
11124 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
11125 "reduction on %qE", DECL_NAME (decl));
11126 }
11127 else if ((n->value & GOVD_MAP) == 0)
11128 {
11129 tree next = OMP_CLAUSE_CHAIN (c);
11130 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
11131 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
11132 OMP_CLAUSE_DECL (nc) = decl;
11133 OMP_CLAUSE_CHAIN (c) = nc;
11134 lang_hooks.decls.omp_finish_clause (nc, pre_p,
11135 (ctx->region_type
11136 & ORT_ACC) != 0);
11137 while (1)
11138 {
11139 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
11140 if (OMP_CLAUSE_CHAIN (nc) == NULL)
11141 break;
11142 nc = OMP_CLAUSE_CHAIN (nc);
11143 }
11144 OMP_CLAUSE_CHAIN (nc) = next;
11145 n->value |= GOVD_MAP;
11146 }
11147 }
11148 if (DECL_P (decl)
11149 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11150 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
11151 break;
11152
11153 case OMP_CLAUSE_ALLOCATE:
11154 decl = OMP_CLAUSE_DECL (c);
11155 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11156 if (n != NULL && !(n->value & GOVD_SEEN))
11157 {
11158 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
11159 != 0
11160 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
11161 remove = true;
11162 }
11163 if (!remove
11164 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
11165 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
11166 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
11167 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
11168 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
11169 {
11170 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
11171 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
11172 if (n == NULL)
11173 {
11174 enum omp_clause_default_kind default_kind
11175 = ctx->default_kind;
11176 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
11177 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
11178 true);
11179 ctx->default_kind = default_kind;
11180 }
11181 else
11182 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
11183 true);
11184 }
11185 break;
11186
11187 case OMP_CLAUSE_COPYIN:
11188 case OMP_CLAUSE_COPYPRIVATE:
11189 case OMP_CLAUSE_IF:
11190 case OMP_CLAUSE_NUM_THREADS:
11191 case OMP_CLAUSE_NUM_TEAMS:
11192 case OMP_CLAUSE_THREAD_LIMIT:
11193 case OMP_CLAUSE_DIST_SCHEDULE:
11194 case OMP_CLAUSE_DEVICE:
11195 case OMP_CLAUSE_SCHEDULE:
11196 case OMP_CLAUSE_NOWAIT:
11197 case OMP_CLAUSE_ORDERED:
11198 case OMP_CLAUSE_DEFAULT:
11199 case OMP_CLAUSE_UNTIED:
11200 case OMP_CLAUSE_COLLAPSE:
11201 case OMP_CLAUSE_FINAL:
11202 case OMP_CLAUSE_MERGEABLE:
11203 case OMP_CLAUSE_PROC_BIND:
11204 case OMP_CLAUSE_SAFELEN:
11205 case OMP_CLAUSE_SIMDLEN:
11206 case OMP_CLAUSE_DEPEND:
11207 case OMP_CLAUSE_PRIORITY:
11208 case OMP_CLAUSE_GRAINSIZE:
11209 case OMP_CLAUSE_NUM_TASKS:
11210 case OMP_CLAUSE_NOGROUP:
11211 case OMP_CLAUSE_THREADS:
11212 case OMP_CLAUSE_SIMD:
11213 case OMP_CLAUSE_HINT:
11214 case OMP_CLAUSE_DEFAULTMAP:
11215 case OMP_CLAUSE_ORDER:
11216 case OMP_CLAUSE_BIND:
11217 case OMP_CLAUSE_DETACH:
11218 case OMP_CLAUSE_USE_DEVICE_PTR:
11219 case OMP_CLAUSE_USE_DEVICE_ADDR:
11220 case OMP_CLAUSE_IS_DEVICE_PTR:
11221 case OMP_CLAUSE_ASYNC:
11222 case OMP_CLAUSE_WAIT:
11223 case OMP_CLAUSE_INDEPENDENT:
11224 case OMP_CLAUSE_NUM_GANGS:
11225 case OMP_CLAUSE_NUM_WORKERS:
11226 case OMP_CLAUSE_VECTOR_LENGTH:
11227 case OMP_CLAUSE_GANG:
11228 case OMP_CLAUSE_WORKER:
11229 case OMP_CLAUSE_VECTOR:
11230 case OMP_CLAUSE_AUTO:
11231 case OMP_CLAUSE_SEQ:
11232 case OMP_CLAUSE_TILE:
11233 case OMP_CLAUSE_IF_PRESENT:
11234 case OMP_CLAUSE_FINALIZE:
11235 case OMP_CLAUSE_INCLUSIVE:
11236 case OMP_CLAUSE_EXCLUSIVE:
11237 break;
11238
11239 default:
11240 gcc_unreachable ();
11241 }
11242
11243 if (remove)
11244 *list_p = OMP_CLAUSE_CHAIN (c);
11245 else
11246 list_p = &OMP_CLAUSE_CHAIN (c);
11247 }
11248
11249 /* Add in any implicit data sharing. */
11250 struct gimplify_adjust_omp_clauses_data data;
11251 data.list_p = list_p;
11252 data.pre_p = pre_p;
11253 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
11254
11255 if (has_inscan_reductions)
11256 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
11257 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11258 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11259 {
11260 error_at (OMP_CLAUSE_LOCATION (c),
11261 "%<inscan%> %<reduction%> clause used together with "
11262 "%<linear%> clause for a variable other than loop "
11263 "iterator");
11264 break;
11265 }
11266
11267 gimplify_omp_ctxp = ctx->outer_context;
11268 delete_omp_context (ctx);
11269 }
11270
11271 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
11272 -1 if unknown yet (simd is involved, won't be known until vectorization)
11273 and 1 if they do. If SCORES is non-NULL, it should point to an array
11274 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
11275 of the CONSTRUCTS (position -1 if it will never match) followed by
11276 number of constructs in the OpenMP context construct trait. If the
11277 score depends on whether it will be in a declare simd clone or not,
11278 the function returns 2 and there will be two sets of the scores, the first
11279 one for the case that it is not in a declare simd clone, the other
11280 that it is in a declare simd clone. */
11281
11282 int
11283 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
11284 int *scores)
11285 {
11286 int matched = 0, cnt = 0;
11287 bool simd_seen = false;
11288 bool target_seen = false;
11289 int declare_simd_cnt = -1;
11290 auto_vec<enum tree_code, 16> codes;
11291 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
11292 {
11293 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
11294 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
11295 == ORT_TARGET && ctx->code == OMP_TARGET)
11296 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
11297 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
11298 || (ctx->region_type == ORT_SIMD
11299 && ctx->code == OMP_SIMD
11300 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
11301 {
11302 ++cnt;
11303 if (scores)
11304 codes.safe_push (ctx->code);
11305 else if (matched < nconstructs && ctx->code == constructs[matched])
11306 {
11307 if (ctx->code == OMP_SIMD)
11308 {
11309 if (matched)
11310 return 0;
11311 simd_seen = true;
11312 }
11313 ++matched;
11314 }
11315 if (ctx->code == OMP_TARGET)
11316 {
11317 if (scores == NULL)
11318 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
11319 target_seen = true;
11320 break;
11321 }
11322 }
11323 else if (ctx->region_type == ORT_WORKSHARE
11324 && ctx->code == OMP_LOOP
11325 && ctx->outer_context
11326 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
11327 && ctx->outer_context->outer_context
11328 && ctx->outer_context->outer_context->code == OMP_LOOP
11329 && ctx->outer_context->outer_context->distribute)
11330 ctx = ctx->outer_context->outer_context;
11331 ctx = ctx->outer_context;
11332 }
11333 if (!target_seen
11334 && lookup_attribute ("omp declare simd",
11335 DECL_ATTRIBUTES (current_function_decl)))
11336 {
11337 /* Declare simd is a maybe case, it is supposed to be added only to the
11338 omp-simd-clone.c added clones and not to the base function. */
11339 declare_simd_cnt = cnt++;
11340 if (scores)
11341 codes.safe_push (OMP_SIMD);
11342 else if (cnt == 0
11343 && constructs[0] == OMP_SIMD)
11344 {
11345 gcc_assert (matched == 0);
11346 simd_seen = true;
11347 if (++matched == nconstructs)
11348 return -1;
11349 }
11350 }
11351 if (tree attr = lookup_attribute ("omp declare variant variant",
11352 DECL_ATTRIBUTES (current_function_decl)))
11353 {
11354 enum tree_code variant_constructs[5];
11355 int variant_nconstructs = 0;
11356 if (!target_seen)
11357 variant_nconstructs
11358 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
11359 variant_constructs);
11360 for (int i = 0; i < variant_nconstructs; i++)
11361 {
11362 ++cnt;
11363 if (scores)
11364 codes.safe_push (variant_constructs[i]);
11365 else if (matched < nconstructs
11366 && variant_constructs[i] == constructs[matched])
11367 {
11368 if (variant_constructs[i] == OMP_SIMD)
11369 {
11370 if (matched)
11371 return 0;
11372 simd_seen = true;
11373 }
11374 ++matched;
11375 }
11376 }
11377 }
11378 if (!target_seen
11379 && lookup_attribute ("omp declare target block",
11380 DECL_ATTRIBUTES (current_function_decl)))
11381 {
11382 if (scores)
11383 codes.safe_push (OMP_TARGET);
11384 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
11385 ++matched;
11386 }
11387 if (scores)
11388 {
11389 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
11390 {
11391 int j = codes.length () - 1;
11392 for (int i = nconstructs - 1; i >= 0; i--)
11393 {
11394 while (j >= 0
11395 && (pass != 0 || declare_simd_cnt != j)
11396 && constructs[i] != codes[j])
11397 --j;
11398 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
11399 *scores++ = j - 1;
11400 else
11401 *scores++ = j;
11402 }
11403 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
11404 ? codes.length () - 1 : codes.length ());
11405 }
11406 return declare_simd_cnt == -1 ? 1 : 2;
11407 }
11408 if (matched == nconstructs)
11409 return simd_seen ? -1 : 1;
11410 return 0;
11411 }
11412
11413 /* Gimplify OACC_CACHE. */
11414
11415 static void
11416 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
11417 {
11418 tree expr = *expr_p;
11419
11420 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
11421 OACC_CACHE);
11422 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
11423 OACC_CACHE);
11424
11425 /* TODO: Do something sensible with this information. */
11426
11427 *expr_p = NULL_TREE;
11428 }
11429
11430 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
11431 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
11432 kind. The entry kind will replace the one in CLAUSE, while the exit
11433 kind will be used in a new omp_clause and returned to the caller. */
11434
11435 static tree
11436 gimplify_oacc_declare_1 (tree clause)
11437 {
11438 HOST_WIDE_INT kind, new_op;
11439 bool ret = false;
11440 tree c = NULL;
11441
11442 kind = OMP_CLAUSE_MAP_KIND (clause);
11443
11444 switch (kind)
11445 {
11446 case GOMP_MAP_ALLOC:
11447 new_op = GOMP_MAP_RELEASE;
11448 ret = true;
11449 break;
11450
11451 case GOMP_MAP_FROM:
11452 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
11453 new_op = GOMP_MAP_FROM;
11454 ret = true;
11455 break;
11456
11457 case GOMP_MAP_TOFROM:
11458 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
11459 new_op = GOMP_MAP_FROM;
11460 ret = true;
11461 break;
11462
11463 case GOMP_MAP_DEVICE_RESIDENT:
11464 case GOMP_MAP_FORCE_DEVICEPTR:
11465 case GOMP_MAP_FORCE_PRESENT:
11466 case GOMP_MAP_LINK:
11467 case GOMP_MAP_POINTER:
11468 case GOMP_MAP_TO:
11469 break;
11470
11471 default:
11472 gcc_unreachable ();
11473 break;
11474 }
11475
11476 if (ret)
11477 {
11478 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
11479 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
11480 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
11481 }
11482
11483 return c;
11484 }
11485
11486 /* Gimplify OACC_DECLARE. */
11487
11488 static void
11489 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
11490 {
11491 tree expr = *expr_p;
11492 gomp_target *stmt;
11493 tree clauses, t, decl;
11494
11495 clauses = OACC_DECLARE_CLAUSES (expr);
11496
11497 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
11498 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
11499
11500 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
11501 {
11502 decl = OMP_CLAUSE_DECL (t);
11503
11504 if (TREE_CODE (decl) == MEM_REF)
11505 decl = TREE_OPERAND (decl, 0);
11506
11507 if (VAR_P (decl) && !is_oacc_declared (decl))
11508 {
11509 tree attr = get_identifier ("oacc declare target");
11510 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
11511 DECL_ATTRIBUTES (decl));
11512 }
11513
11514 if (VAR_P (decl)
11515 && !is_global_var (decl)
11516 && DECL_CONTEXT (decl) == current_function_decl)
11517 {
11518 tree c = gimplify_oacc_declare_1 (t);
11519 if (c)
11520 {
11521 if (oacc_declare_returns == NULL)
11522 oacc_declare_returns = new hash_map<tree, tree>;
11523
11524 oacc_declare_returns->put (decl, c);
11525 }
11526 }
11527
11528 if (gimplify_omp_ctxp)
11529 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
11530 }
11531
11532 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
11533 clauses);
11534
11535 gimplify_seq_add_stmt (pre_p, stmt);
11536
11537 *expr_p = NULL_TREE;
11538 }
11539
11540 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
11541 gimplification of the body, as well as scanning the body for used
11542 variables. We need to do this scan now, because variable-sized
11543 decls will be decomposed during gimplification. */
11544
11545 static void
11546 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
11547 {
11548 tree expr = *expr_p;
11549 gimple *g;
11550 gimple_seq body = NULL;
11551
11552 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
11553 OMP_PARALLEL_COMBINED (expr)
11554 ? ORT_COMBINED_PARALLEL
11555 : ORT_PARALLEL, OMP_PARALLEL);
11556
11557 push_gimplify_context ();
11558
11559 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
11560 if (gimple_code (g) == GIMPLE_BIND)
11561 pop_gimplify_context (g);
11562 else
11563 pop_gimplify_context (NULL);
11564
11565 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
11566 OMP_PARALLEL);
11567
11568 g = gimple_build_omp_parallel (body,
11569 OMP_PARALLEL_CLAUSES (expr),
11570 NULL_TREE, NULL_TREE);
11571 if (OMP_PARALLEL_COMBINED (expr))
11572 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
11573 gimplify_seq_add_stmt (pre_p, g);
11574 *expr_p = NULL_TREE;
11575 }
11576
11577 /* Gimplify the contents of an OMP_TASK statement. This involves
11578 gimplification of the body, as well as scanning the body for used
11579 variables. We need to do this scan now, because variable-sized
11580 decls will be decomposed during gimplification. */
11581
11582 static void
11583 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
11584 {
11585 tree expr = *expr_p;
11586 gimple *g;
11587 gimple_seq body = NULL;
11588
11589 if (OMP_TASK_BODY (expr) == NULL_TREE)
11590 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11591 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11592 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
11593 {
11594 error_at (OMP_CLAUSE_LOCATION (c),
11595 "%<mutexinoutset%> kind in %<depend%> clause on a "
11596 "%<taskwait%> construct");
11597 break;
11598 }
11599
11600 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
11601 omp_find_clause (OMP_TASK_CLAUSES (expr),
11602 OMP_CLAUSE_UNTIED)
11603 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
11604
11605 if (OMP_TASK_BODY (expr))
11606 {
11607 push_gimplify_context ();
11608
11609 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
11610 if (gimple_code (g) == GIMPLE_BIND)
11611 pop_gimplify_context (g);
11612 else
11613 pop_gimplify_context (NULL);
11614 }
11615
11616 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
11617 OMP_TASK);
11618
11619 g = gimple_build_omp_task (body,
11620 OMP_TASK_CLAUSES (expr),
11621 NULL_TREE, NULL_TREE,
11622 NULL_TREE, NULL_TREE, NULL_TREE);
11623 if (OMP_TASK_BODY (expr) == NULL_TREE)
11624 gimple_omp_task_set_taskwait_p (g, true);
11625 gimplify_seq_add_stmt (pre_p, g);
11626 *expr_p = NULL_TREE;
11627 }
11628
11629 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
11630 force it into a temporary initialized in PRE_P and add firstprivate clause
11631 to ORIG_FOR_STMT. */
11632
11633 static void
11634 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
11635 tree orig_for_stmt)
11636 {
11637 if (*tp == NULL || is_gimple_constant (*tp))
11638 return;
11639
11640 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
11641 /* Reference to pointer conversion is considered useless,
11642 but is significant for firstprivate clause. Force it
11643 here. */
11644 if (type
11645 && TREE_CODE (type) == POINTER_TYPE
11646 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
11647 {
11648 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11649 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
11650 gimplify_and_add (m, pre_p);
11651 *tp = v;
11652 }
11653
11654 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
11655 OMP_CLAUSE_DECL (c) = *tp;
11656 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11657 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11658 }
11659
11660 /* Gimplify the gross structure of an OMP_FOR statement. */
11661
11662 static enum gimplify_status
11663 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
11664 {
11665 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
11666 enum gimplify_status ret = GS_ALL_DONE;
11667 enum gimplify_status tret;
11668 gomp_for *gfor;
11669 gimple_seq for_body, for_pre_body;
11670 int i;
11671 bitmap has_decl_expr = NULL;
11672 enum omp_region_type ort = ORT_WORKSHARE;
11673 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
11674
11675 orig_for_stmt = for_stmt = *expr_p;
11676
11677 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
11678 != NULL_TREE);
11679 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11680 {
11681 tree *data[4] = { NULL, NULL, NULL, NULL };
11682 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
11683 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
11684 find_combined_omp_for, data, NULL);
11685 if (inner_for_stmt == NULL_TREE)
11686 {
11687 gcc_assert (seen_error ());
11688 *expr_p = NULL_TREE;
11689 return GS_ERROR;
11690 }
11691 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
11692 {
11693 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
11694 &OMP_FOR_PRE_BODY (for_stmt));
11695 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11696 }
11697 if (OMP_FOR_PRE_BODY (inner_for_stmt))
11698 {
11699 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11700 &OMP_FOR_PRE_BODY (for_stmt));
11701 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11702 }
11703
11704 if (data[0])
11705 {
11706 /* We have some statements or variable declarations in between
11707 the composite construct directives. Move them around the
11708 inner_for_stmt. */
11709 data[0] = expr_p;
11710 for (i = 0; i < 3; i++)
11711 if (data[i])
11712 {
11713 tree t = *data[i];
11714 if (i < 2 && data[i + 1] == &OMP_BODY (t))
11715 data[i + 1] = data[i];
11716 *data[i] = OMP_BODY (t);
11717 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11718 NULL_TREE, make_node (BLOCK));
11719 OMP_BODY (t) = body;
11720 append_to_statement_list_force (inner_for_stmt,
11721 &BIND_EXPR_BODY (body));
11722 *data[3] = t;
11723 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11724 gcc_assert (*data[3] == inner_for_stmt);
11725 }
11726 return GS_OK;
11727 }
11728
11729 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11730 if (!loop_p
11731 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
11732 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11733 i)) == TREE_LIST
11734 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11735 i)))
11736 {
11737 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11738 /* Class iterators aren't allowed on OMP_SIMD, so the only
11739 case we need to solve is distribute parallel for. They are
11740 allowed on the loop construct, but that is already handled
11741 in gimplify_omp_loop. */
11742 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11743 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11744 && data[1]);
11745 tree orig_decl = TREE_PURPOSE (orig);
11746 tree last = TREE_VALUE (orig);
11747 tree *pc;
11748 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11749 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11750 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11751 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11752 && OMP_CLAUSE_DECL (*pc) == orig_decl)
11753 break;
11754 if (*pc == NULL_TREE)
11755 {
11756 tree *spc;
11757 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11758 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11759 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11760 && OMP_CLAUSE_DECL (*spc) == orig_decl)
11761 break;
11762 if (*spc)
11763 {
11764 tree c = *spc;
11765 *spc = OMP_CLAUSE_CHAIN (c);
11766 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11767 *pc = c;
11768 }
11769 }
11770 if (*pc == NULL_TREE)
11771 ;
11772 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11773 {
11774 /* private clause will appear only on inner_for_stmt.
11775 Change it into firstprivate, and add private clause
11776 on for_stmt. */
11777 tree c = copy_node (*pc);
11778 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11779 OMP_FOR_CLAUSES (for_stmt) = c;
11780 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11781 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
11782 }
11783 else
11784 {
11785 /* lastprivate clause will appear on both inner_for_stmt
11786 and for_stmt. Add firstprivate clause to
11787 inner_for_stmt. */
11788 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11789 OMP_CLAUSE_FIRSTPRIVATE);
11790 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11791 OMP_CLAUSE_CHAIN (c) = *pc;
11792 *pc = c;
11793 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
11794 }
11795 tree c = build_omp_clause (UNKNOWN_LOCATION,
11796 OMP_CLAUSE_FIRSTPRIVATE);
11797 OMP_CLAUSE_DECL (c) = last;
11798 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11799 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11800 c = build_omp_clause (UNKNOWN_LOCATION,
11801 *pc ? OMP_CLAUSE_SHARED
11802 : OMP_CLAUSE_FIRSTPRIVATE);
11803 OMP_CLAUSE_DECL (c) = orig_decl;
11804 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11805 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11806 }
11807 /* Similarly, take care of C++ range for temporaries, those should
11808 be firstprivate on OMP_PARALLEL if any. */
11809 if (data[1])
11810 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11811 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11812 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11813 i)) == TREE_LIST
11814 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11815 i)))
11816 {
11817 tree orig
11818 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11819 tree v = TREE_CHAIN (orig);
11820 tree c = build_omp_clause (UNKNOWN_LOCATION,
11821 OMP_CLAUSE_FIRSTPRIVATE);
11822 /* First add firstprivate clause for the __for_end artificial
11823 decl. */
11824 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11825 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11826 == REFERENCE_TYPE)
11827 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11828 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11829 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11830 if (TREE_VEC_ELT (v, 0))
11831 {
11832 /* And now the same for __for_range artificial decl if it
11833 exists. */
11834 c = build_omp_clause (UNKNOWN_LOCATION,
11835 OMP_CLAUSE_FIRSTPRIVATE);
11836 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11837 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11838 == REFERENCE_TYPE)
11839 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11840 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11841 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11842 }
11843 }
11844 }
11845
11846 switch (TREE_CODE (for_stmt))
11847 {
11848 case OMP_FOR:
11849 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
11850 {
11851 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11852 OMP_CLAUSE_SCHEDULE))
11853 error_at (EXPR_LOCATION (for_stmt),
11854 "%qs clause may not appear on non-rectangular %qs",
11855 "schedule", "for");
11856 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
11857 error_at (EXPR_LOCATION (for_stmt),
11858 "%qs clause may not appear on non-rectangular %qs",
11859 "ordered", "for");
11860 }
11861 break;
11862 case OMP_DISTRIBUTE:
11863 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
11864 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11865 OMP_CLAUSE_DIST_SCHEDULE))
11866 error_at (EXPR_LOCATION (for_stmt),
11867 "%qs clause may not appear on non-rectangular %qs",
11868 "dist_schedule", "distribute");
11869 break;
11870 case OACC_LOOP:
11871 ort = ORT_ACC;
11872 break;
11873 case OMP_TASKLOOP:
11874 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
11875 ort = ORT_UNTIED_TASKLOOP;
11876 else
11877 ort = ORT_TASKLOOP;
11878 break;
11879 case OMP_SIMD:
11880 ort = ORT_SIMD;
11881 break;
11882 default:
11883 gcc_unreachable ();
11884 }
11885
11886 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11887 clause for the IV. */
11888 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11889 {
11890 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11891 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11892 decl = TREE_OPERAND (t, 0);
11893 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11894 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11895 && OMP_CLAUSE_DECL (c) == decl)
11896 {
11897 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11898 break;
11899 }
11900 }
11901
11902 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11903 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
11904 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11905 ? OMP_LOOP : TREE_CODE (for_stmt));
11906
11907 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11908 gimplify_omp_ctxp->distribute = true;
11909
11910 /* Handle OMP_FOR_INIT. */
11911 for_pre_body = NULL;
11912 if ((ort == ORT_SIMD
11913 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11914 && OMP_FOR_PRE_BODY (for_stmt))
11915 {
11916 has_decl_expr = BITMAP_ALLOC (NULL);
11917 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11918 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
11919 == VAR_DECL)
11920 {
11921 t = OMP_FOR_PRE_BODY (for_stmt);
11922 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11923 }
11924 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11925 {
11926 tree_stmt_iterator si;
11927 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11928 tsi_next (&si))
11929 {
11930 t = tsi_stmt (si);
11931 if (TREE_CODE (t) == DECL_EXPR
11932 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11933 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11934 }
11935 }
11936 }
11937 if (OMP_FOR_PRE_BODY (for_stmt))
11938 {
11939 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11940 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11941 else
11942 {
11943 struct gimplify_omp_ctx ctx;
11944 memset (&ctx, 0, sizeof (ctx));
11945 ctx.region_type = ORT_NONE;
11946 gimplify_omp_ctxp = &ctx;
11947 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11948 gimplify_omp_ctxp = NULL;
11949 }
11950 }
11951 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
11952
11953 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11954 for_stmt = inner_for_stmt;
11955
11956 /* For taskloop, need to gimplify the start, end and step before the
11957 taskloop, outside of the taskloop omp context. */
11958 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11959 {
11960 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11961 {
11962 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11963 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
11964 ? pre_p : &for_pre_body);
11965 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11966 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11967 {
11968 tree v = TREE_OPERAND (t, 1);
11969 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
11970 for_pre_p, orig_for_stmt);
11971 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
11972 for_pre_p, orig_for_stmt);
11973 }
11974 else
11975 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
11976 orig_for_stmt);
11977
11978 /* Handle OMP_FOR_COND. */
11979 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11980 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11981 {
11982 tree v = TREE_OPERAND (t, 1);
11983 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
11984 for_pre_p, orig_for_stmt);
11985 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
11986 for_pre_p, orig_for_stmt);
11987 }
11988 else
11989 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
11990 orig_for_stmt);
11991
11992 /* Handle OMP_FOR_INCR. */
11993 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11994 if (TREE_CODE (t) == MODIFY_EXPR)
11995 {
11996 decl = TREE_OPERAND (t, 0);
11997 t = TREE_OPERAND (t, 1);
11998 tree *tp = &TREE_OPERAND (t, 1);
11999 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
12000 tp = &TREE_OPERAND (t, 0);
12001
12002 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
12003 orig_for_stmt);
12004 }
12005 }
12006
12007 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
12008 OMP_TASKLOOP);
12009 }
12010
12011 if (orig_for_stmt != for_stmt)
12012 gimplify_omp_ctxp->combined_loop = true;
12013
12014 for_body = NULL;
12015 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
12016 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
12017 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
12018 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
12019
12020 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
12021 bool is_doacross = false;
12022 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
12023 {
12024 is_doacross = true;
12025 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
12026 (OMP_FOR_INIT (for_stmt))
12027 * 2);
12028 }
12029 int collapse = 1, tile = 0;
12030 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
12031 if (c)
12032 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
12033 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
12034 if (c)
12035 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
12036 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
12037 hash_set<tree> *allocate_uids = NULL;
12038 if (c)
12039 {
12040 allocate_uids = new hash_set<tree>;
12041 for (; c; c = OMP_CLAUSE_CHAIN (c))
12042 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
12043 allocate_uids->add (OMP_CLAUSE_DECL (c));
12044 }
12045 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12046 {
12047 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12048 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12049 decl = TREE_OPERAND (t, 0);
12050 gcc_assert (DECL_P (decl));
12051 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
12052 || POINTER_TYPE_P (TREE_TYPE (decl)));
12053 if (is_doacross)
12054 {
12055 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
12056 {
12057 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12058 if (TREE_CODE (orig_decl) == TREE_LIST)
12059 {
12060 orig_decl = TREE_PURPOSE (orig_decl);
12061 if (!orig_decl)
12062 orig_decl = decl;
12063 }
12064 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
12065 }
12066 else
12067 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
12068 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
12069 }
12070
12071 /* Make sure the iteration variable is private. */
12072 tree c = NULL_TREE;
12073 tree c2 = NULL_TREE;
12074 if (orig_for_stmt != for_stmt)
12075 {
12076 /* Preserve this information until we gimplify the inner simd. */
12077 if (has_decl_expr
12078 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
12079 TREE_PRIVATE (t) = 1;
12080 }
12081 else if (ort == ORT_SIMD)
12082 {
12083 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12084 (splay_tree_key) decl);
12085 omp_is_private (gimplify_omp_ctxp, decl,
12086 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
12087 != 1));
12088 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12089 {
12090 omp_notice_variable (gimplify_omp_ctxp, decl, true);
12091 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
12092 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12093 OMP_CLAUSE_LASTPRIVATE);
12094 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
12095 OMP_CLAUSE_LASTPRIVATE))
12096 if (OMP_CLAUSE_DECL (c3) == decl)
12097 {
12098 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
12099 "conditional %<lastprivate%> on loop "
12100 "iterator %qD ignored", decl);
12101 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
12102 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
12103 }
12104 }
12105 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
12106 {
12107 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
12108 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
12109 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
12110 if ((has_decl_expr
12111 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
12112 || TREE_PRIVATE (t))
12113 {
12114 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
12115 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12116 }
12117 struct gimplify_omp_ctx *outer
12118 = gimplify_omp_ctxp->outer_context;
12119 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12120 {
12121 if (outer->region_type == ORT_WORKSHARE
12122 && outer->combined_loop)
12123 {
12124 n = splay_tree_lookup (outer->variables,
12125 (splay_tree_key)decl);
12126 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
12127 {
12128 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
12129 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12130 }
12131 else
12132 {
12133 struct gimplify_omp_ctx *octx = outer->outer_context;
12134 if (octx
12135 && octx->region_type == ORT_COMBINED_PARALLEL
12136 && octx->outer_context
12137 && (octx->outer_context->region_type
12138 == ORT_WORKSHARE)
12139 && octx->outer_context->combined_loop)
12140 {
12141 octx = octx->outer_context;
12142 n = splay_tree_lookup (octx->variables,
12143 (splay_tree_key)decl);
12144 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
12145 {
12146 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
12147 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12148 }
12149 }
12150 }
12151 }
12152 }
12153
12154 OMP_CLAUSE_DECL (c) = decl;
12155 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
12156 OMP_FOR_CLAUSES (for_stmt) = c;
12157 omp_add_variable (gimplify_omp_ctxp, decl, flags);
12158 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12159 omp_lastprivate_for_combined_outer_constructs (outer, decl,
12160 true);
12161 }
12162 else
12163 {
12164 bool lastprivate
12165 = (!has_decl_expr
12166 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
12167 if (TREE_PRIVATE (t))
12168 lastprivate = false;
12169 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
12170 {
12171 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12172 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
12173 lastprivate = false;
12174 }
12175
12176 struct gimplify_omp_ctx *outer
12177 = gimplify_omp_ctxp->outer_context;
12178 if (outer && lastprivate)
12179 omp_lastprivate_for_combined_outer_constructs (outer, decl,
12180 true);
12181
12182 c = build_omp_clause (input_location,
12183 lastprivate ? OMP_CLAUSE_LASTPRIVATE
12184 : OMP_CLAUSE_PRIVATE);
12185 OMP_CLAUSE_DECL (c) = decl;
12186 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
12187 OMP_FOR_CLAUSES (for_stmt) = c;
12188 omp_add_variable (gimplify_omp_ctxp, decl,
12189 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
12190 | GOVD_EXPLICIT | GOVD_SEEN);
12191 c = NULL_TREE;
12192 }
12193 }
12194 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
12195 {
12196 omp_notice_variable (gimplify_omp_ctxp, decl, true);
12197 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12198 (splay_tree_key) decl);
12199 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
12200 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12201 OMP_CLAUSE_LASTPRIVATE);
12202 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
12203 OMP_CLAUSE_LASTPRIVATE))
12204 if (OMP_CLAUSE_DECL (c3) == decl)
12205 {
12206 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
12207 "conditional %<lastprivate%> on loop "
12208 "iterator %qD ignored", decl);
12209 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
12210 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
12211 }
12212 }
12213 else
12214 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
12215
12216 /* If DECL is not a gimple register, create a temporary variable to act
12217 as an iteration counter. This is valid, since DECL cannot be
12218 modified in the body of the loop. Similarly for any iteration vars
12219 in simd with collapse > 1 where the iterator vars must be
12220 lastprivate. And similarly for vars mentioned in allocate clauses. */
12221 if (orig_for_stmt != for_stmt)
12222 var = decl;
12223 else if (!is_gimple_reg (decl)
12224 || (ort == ORT_SIMD
12225 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
12226 || (allocate_uids && allocate_uids->contains (decl)))
12227 {
12228 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12229 /* Make sure omp_add_variable is not called on it prematurely.
12230 We call it ourselves a few lines later. */
12231 gimplify_omp_ctxp = NULL;
12232 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
12233 gimplify_omp_ctxp = ctx;
12234 TREE_OPERAND (t, 0) = var;
12235
12236 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
12237
12238 if (ort == ORT_SIMD
12239 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
12240 {
12241 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
12242 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
12243 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
12244 OMP_CLAUSE_DECL (c2) = var;
12245 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
12246 OMP_FOR_CLAUSES (for_stmt) = c2;
12247 omp_add_variable (gimplify_omp_ctxp, var,
12248 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
12249 if (c == NULL_TREE)
12250 {
12251 c = c2;
12252 c2 = NULL_TREE;
12253 }
12254 }
12255 else
12256 omp_add_variable (gimplify_omp_ctxp, var,
12257 GOVD_PRIVATE | GOVD_SEEN);
12258 }
12259 else
12260 var = decl;
12261
12262 gimplify_omp_ctxp->in_for_exprs = true;
12263 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12264 {
12265 tree lb = TREE_OPERAND (t, 1);
12266 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
12267 is_gimple_val, fb_rvalue, false);
12268 ret = MIN (ret, tret);
12269 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
12270 is_gimple_val, fb_rvalue, false);
12271 }
12272 else
12273 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12274 is_gimple_val, fb_rvalue, false);
12275 gimplify_omp_ctxp->in_for_exprs = false;
12276 ret = MIN (ret, tret);
12277 if (ret == GS_ERROR)
12278 return ret;
12279
12280 /* Handle OMP_FOR_COND. */
12281 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12282 gcc_assert (COMPARISON_CLASS_P (t));
12283 gcc_assert (TREE_OPERAND (t, 0) == decl);
12284
12285 gimplify_omp_ctxp->in_for_exprs = true;
12286 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12287 {
12288 tree ub = TREE_OPERAND (t, 1);
12289 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
12290 is_gimple_val, fb_rvalue, false);
12291 ret = MIN (ret, tret);
12292 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
12293 is_gimple_val, fb_rvalue, false);
12294 }
12295 else
12296 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12297 is_gimple_val, fb_rvalue, false);
12298 gimplify_omp_ctxp->in_for_exprs = false;
12299 ret = MIN (ret, tret);
12300
12301 /* Handle OMP_FOR_INCR. */
12302 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12303 switch (TREE_CODE (t))
12304 {
12305 case PREINCREMENT_EXPR:
12306 case POSTINCREMENT_EXPR:
12307 {
12308 tree decl = TREE_OPERAND (t, 0);
12309 /* c_omp_for_incr_canonicalize_ptr() should have been
12310 called to massage things appropriately. */
12311 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
12312
12313 if (orig_for_stmt != for_stmt)
12314 break;
12315 t = build_int_cst (TREE_TYPE (decl), 1);
12316 if (c)
12317 OMP_CLAUSE_LINEAR_STEP (c) = t;
12318 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
12319 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
12320 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
12321 break;
12322 }
12323
12324 case PREDECREMENT_EXPR:
12325 case POSTDECREMENT_EXPR:
12326 /* c_omp_for_incr_canonicalize_ptr() should have been
12327 called to massage things appropriately. */
12328 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
12329 if (orig_for_stmt != for_stmt)
12330 break;
12331 t = build_int_cst (TREE_TYPE (decl), -1);
12332 if (c)
12333 OMP_CLAUSE_LINEAR_STEP (c) = t;
12334 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
12335 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
12336 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
12337 break;
12338
12339 case MODIFY_EXPR:
12340 gcc_assert (TREE_OPERAND (t, 0) == decl);
12341 TREE_OPERAND (t, 0) = var;
12342
12343 t = TREE_OPERAND (t, 1);
12344 switch (TREE_CODE (t))
12345 {
12346 case PLUS_EXPR:
12347 if (TREE_OPERAND (t, 1) == decl)
12348 {
12349 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
12350 TREE_OPERAND (t, 0) = var;
12351 break;
12352 }
12353
12354 /* Fallthru. */
12355 case MINUS_EXPR:
12356 case POINTER_PLUS_EXPR:
12357 gcc_assert (TREE_OPERAND (t, 0) == decl);
12358 TREE_OPERAND (t, 0) = var;
12359 break;
12360 default:
12361 gcc_unreachable ();
12362 }
12363
12364 gimplify_omp_ctxp->in_for_exprs = true;
12365 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12366 is_gimple_val, fb_rvalue, false);
12367 ret = MIN (ret, tret);
12368 if (c)
12369 {
12370 tree step = TREE_OPERAND (t, 1);
12371 tree stept = TREE_TYPE (decl);
12372 if (POINTER_TYPE_P (stept))
12373 stept = sizetype;
12374 step = fold_convert (stept, step);
12375 if (TREE_CODE (t) == MINUS_EXPR)
12376 step = fold_build1 (NEGATE_EXPR, stept, step);
12377 OMP_CLAUSE_LINEAR_STEP (c) = step;
12378 if (step != TREE_OPERAND (t, 1))
12379 {
12380 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
12381 &for_pre_body, NULL,
12382 is_gimple_val, fb_rvalue, false);
12383 ret = MIN (ret, tret);
12384 }
12385 }
12386 gimplify_omp_ctxp->in_for_exprs = false;
12387 break;
12388
12389 default:
12390 gcc_unreachable ();
12391 }
12392
12393 if (c2)
12394 {
12395 gcc_assert (c);
12396 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
12397 }
12398
12399 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
12400 {
12401 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
12402 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12403 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
12404 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12405 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
12406 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
12407 && OMP_CLAUSE_DECL (c) == decl)
12408 {
12409 if (is_doacross && (collapse == 1 || i >= collapse))
12410 t = var;
12411 else
12412 {
12413 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12414 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12415 gcc_assert (TREE_OPERAND (t, 0) == var);
12416 t = TREE_OPERAND (t, 1);
12417 gcc_assert (TREE_CODE (t) == PLUS_EXPR
12418 || TREE_CODE (t) == MINUS_EXPR
12419 || TREE_CODE (t) == POINTER_PLUS_EXPR);
12420 gcc_assert (TREE_OPERAND (t, 0) == var);
12421 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
12422 is_doacross ? var : decl,
12423 TREE_OPERAND (t, 1));
12424 }
12425 gimple_seq *seq;
12426 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
12427 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
12428 else
12429 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
12430 push_gimplify_context ();
12431 gimplify_assign (decl, t, seq);
12432 gimple *bind = NULL;
12433 if (gimplify_ctxp->temps)
12434 {
12435 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
12436 *seq = NULL;
12437 gimplify_seq_add_stmt (seq, bind);
12438 }
12439 pop_gimplify_context (bind);
12440 }
12441 }
12442 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
12443 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
12444 {
12445 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
12446 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12447 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12448 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12449 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12450 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
12451 gcc_assert (COMPARISON_CLASS_P (t));
12452 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12453 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12454 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12455 }
12456 }
12457
12458 BITMAP_FREE (has_decl_expr);
12459 delete allocate_uids;
12460
12461 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
12462 || (loop_p && orig_for_stmt == for_stmt))
12463 {
12464 push_gimplify_context ();
12465 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
12466 {
12467 OMP_FOR_BODY (orig_for_stmt)
12468 = build3 (BIND_EXPR, void_type_node, NULL,
12469 OMP_FOR_BODY (orig_for_stmt), NULL);
12470 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
12471 }
12472 }
12473
12474 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
12475 &for_body);
12476
12477 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
12478 || (loop_p && orig_for_stmt == for_stmt))
12479 {
12480 if (gimple_code (g) == GIMPLE_BIND)
12481 pop_gimplify_context (g);
12482 else
12483 pop_gimplify_context (NULL);
12484 }
12485
12486 if (orig_for_stmt != for_stmt)
12487 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12488 {
12489 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12490 decl = TREE_OPERAND (t, 0);
12491 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12492 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12493 gimplify_omp_ctxp = ctx->outer_context;
12494 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
12495 gimplify_omp_ctxp = ctx;
12496 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
12497 TREE_OPERAND (t, 0) = var;
12498 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12499 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12500 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
12501 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
12502 for (int j = i + 1;
12503 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
12504 {
12505 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
12506 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12507 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12508 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12509 {
12510 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12511 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12512 }
12513 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
12514 gcc_assert (COMPARISON_CLASS_P (t));
12515 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12516 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12517 {
12518 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12519 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12520 }
12521 }
12522 }
12523
12524 gimplify_adjust_omp_clauses (pre_p, for_body,
12525 &OMP_FOR_CLAUSES (orig_for_stmt),
12526 TREE_CODE (orig_for_stmt));
12527
12528 int kind;
12529 switch (TREE_CODE (orig_for_stmt))
12530 {
12531 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
12532 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
12533 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
12534 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
12535 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
12536 default:
12537 gcc_unreachable ();
12538 }
12539 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
12540 {
12541 gimplify_seq_add_seq (pre_p, for_pre_body);
12542 for_pre_body = NULL;
12543 }
12544 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
12545 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
12546 for_pre_body);
12547 if (orig_for_stmt != for_stmt)
12548 gimple_omp_for_set_combined_p (gfor, true);
12549 if (gimplify_omp_ctxp
12550 && (gimplify_omp_ctxp->combined_loop
12551 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12552 && gimplify_omp_ctxp->outer_context
12553 && gimplify_omp_ctxp->outer_context->combined_loop)))
12554 {
12555 gimple_omp_for_set_combined_into_p (gfor, true);
12556 if (gimplify_omp_ctxp->combined_loop)
12557 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
12558 else
12559 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
12560 }
12561
12562 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12563 {
12564 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12565 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
12566 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
12567 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12568 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
12569 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
12570 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12571 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
12572 }
12573
12574 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12575 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12576 The outer taskloop stands for computing the number of iterations,
12577 counts for collapsed loops and holding taskloop specific clauses.
12578 The task construct stands for the effect of data sharing on the
12579 explicit task it creates and the inner taskloop stands for expansion
12580 of the static loop inside of the explicit task construct. */
12581 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12582 {
12583 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
12584 tree task_clauses = NULL_TREE;
12585 tree c = *gfor_clauses_ptr;
12586 tree *gtask_clauses_ptr = &task_clauses;
12587 tree outer_for_clauses = NULL_TREE;
12588 tree *gforo_clauses_ptr = &outer_for_clauses;
12589 bitmap lastprivate_uids = NULL;
12590 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
12591 {
12592 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
12593 if (c)
12594 {
12595 lastprivate_uids = BITMAP_ALLOC (NULL);
12596 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12597 OMP_CLAUSE_LASTPRIVATE))
12598 bitmap_set_bit (lastprivate_uids,
12599 DECL_UID (OMP_CLAUSE_DECL (c)));
12600 }
12601 c = *gfor_clauses_ptr;
12602 }
12603 for (; c; c = OMP_CLAUSE_CHAIN (c))
12604 switch (OMP_CLAUSE_CODE (c))
12605 {
12606 /* These clauses are allowed on task, move them there. */
12607 case OMP_CLAUSE_SHARED:
12608 case OMP_CLAUSE_FIRSTPRIVATE:
12609 case OMP_CLAUSE_DEFAULT:
12610 case OMP_CLAUSE_IF:
12611 case OMP_CLAUSE_UNTIED:
12612 case OMP_CLAUSE_FINAL:
12613 case OMP_CLAUSE_MERGEABLE:
12614 case OMP_CLAUSE_PRIORITY:
12615 case OMP_CLAUSE_REDUCTION:
12616 case OMP_CLAUSE_IN_REDUCTION:
12617 *gtask_clauses_ptr = c;
12618 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12619 break;
12620 case OMP_CLAUSE_PRIVATE:
12621 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12622 {
12623 /* We want private on outer for and firstprivate
12624 on task. */
12625 *gtask_clauses_ptr
12626 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12627 OMP_CLAUSE_FIRSTPRIVATE);
12628 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12629 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
12630 openacc);
12631 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12632 *gforo_clauses_ptr = c;
12633 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12634 }
12635 else
12636 {
12637 *gtask_clauses_ptr = c;
12638 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12639 }
12640 break;
12641 /* These clauses go into outer taskloop clauses. */
12642 case OMP_CLAUSE_GRAINSIZE:
12643 case OMP_CLAUSE_NUM_TASKS:
12644 case OMP_CLAUSE_NOGROUP:
12645 *gforo_clauses_ptr = c;
12646 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12647 break;
12648 /* Collapse clause we duplicate on both taskloops. */
12649 case OMP_CLAUSE_COLLAPSE:
12650 *gfor_clauses_ptr = c;
12651 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12652 *gforo_clauses_ptr = copy_node (c);
12653 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12654 break;
12655 /* For lastprivate, keep the clause on inner taskloop, and add
12656 a shared clause on task. If the same decl is also firstprivate,
12657 add also firstprivate clause on the inner taskloop. */
12658 case OMP_CLAUSE_LASTPRIVATE:
12659 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12660 {
12661 /* For taskloop C++ lastprivate IVs, we want:
12662 1) private on outer taskloop
12663 2) firstprivate and shared on task
12664 3) lastprivate on inner taskloop */
12665 *gtask_clauses_ptr
12666 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12667 OMP_CLAUSE_FIRSTPRIVATE);
12668 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12669 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
12670 openacc);
12671 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12672 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12673 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12674 OMP_CLAUSE_PRIVATE);
12675 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12676 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12677 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12678 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12679 }
12680 *gfor_clauses_ptr = c;
12681 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12682 *gtask_clauses_ptr
12683 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12684 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12685 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12686 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12687 gtask_clauses_ptr
12688 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12689 break;
12690 /* Allocate clause we duplicate on task and inner taskloop
12691 if the decl is lastprivate, otherwise just put on task. */
12692 case OMP_CLAUSE_ALLOCATE:
12693 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12694 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12695 {
12696 /* Additionally, put firstprivate clause on task
12697 for the allocator if it is not constant. */
12698 *gtask_clauses_ptr
12699 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12700 OMP_CLAUSE_FIRSTPRIVATE);
12701 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
12702 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
12703 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12704 }
12705 if (lastprivate_uids
12706 && bitmap_bit_p (lastprivate_uids,
12707 DECL_UID (OMP_CLAUSE_DECL (c))))
12708 {
12709 *gfor_clauses_ptr = c;
12710 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12711 *gtask_clauses_ptr = copy_node (c);
12712 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12713 }
12714 else
12715 {
12716 *gtask_clauses_ptr = c;
12717 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12718 }
12719 break;
12720 default:
12721 gcc_unreachable ();
12722 }
12723 *gfor_clauses_ptr = NULL_TREE;
12724 *gtask_clauses_ptr = NULL_TREE;
12725 *gforo_clauses_ptr = NULL_TREE;
12726 BITMAP_FREE (lastprivate_uids);
12727 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12728 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12729 NULL_TREE, NULL_TREE, NULL_TREE);
12730 gimple_omp_task_set_taskloop_p (g, true);
12731 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12732 gomp_for *gforo
12733 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12734 gimple_omp_for_collapse (gfor),
12735 gimple_omp_for_pre_body (gfor));
12736 gimple_omp_for_set_pre_body (gfor, NULL);
12737 gimple_omp_for_set_combined_p (gforo, true);
12738 gimple_omp_for_set_combined_into_p (gfor, true);
12739 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12740 {
12741 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12742 tree v = create_tmp_var (type);
12743 gimple_omp_for_set_index (gforo, i, v);
12744 t = unshare_expr (gimple_omp_for_initial (gfor, i));
12745 gimple_omp_for_set_initial (gforo, i, t);
12746 gimple_omp_for_set_cond (gforo, i,
12747 gimple_omp_for_cond (gfor, i));
12748 t = unshare_expr (gimple_omp_for_final (gfor, i));
12749 gimple_omp_for_set_final (gforo, i, t);
12750 t = unshare_expr (gimple_omp_for_incr (gfor, i));
12751 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12752 TREE_OPERAND (t, 0) = v;
12753 gimple_omp_for_set_incr (gforo, i, t);
12754 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12755 OMP_CLAUSE_DECL (t) = v;
12756 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12757 gimple_omp_for_set_clauses (gforo, t);
12758 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
12759 {
12760 tree *p1 = NULL, *p2 = NULL;
12761 t = gimple_omp_for_initial (gforo, i);
12762 if (TREE_CODE (t) == TREE_VEC)
12763 p1 = &TREE_VEC_ELT (t, 0);
12764 t = gimple_omp_for_final (gforo, i);
12765 if (TREE_CODE (t) == TREE_VEC)
12766 {
12767 if (p1)
12768 p2 = &TREE_VEC_ELT (t, 0);
12769 else
12770 p1 = &TREE_VEC_ELT (t, 0);
12771 }
12772 if (p1)
12773 {
12774 int j;
12775 for (j = 0; j < i; j++)
12776 if (*p1 == gimple_omp_for_index (gfor, j))
12777 {
12778 *p1 = gimple_omp_for_index (gforo, j);
12779 if (p2)
12780 *p2 = *p1;
12781 break;
12782 }
12783 gcc_assert (j < i);
12784 }
12785 }
12786 }
12787 gimplify_seq_add_stmt (pre_p, gforo);
12788 }
12789 else
12790 gimplify_seq_add_stmt (pre_p, gfor);
12791
12792 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12793 {
12794 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12795 unsigned lastprivate_conditional = 0;
12796 while (ctx
12797 && (ctx->region_type == ORT_TARGET_DATA
12798 || ctx->region_type == ORT_TASKGROUP))
12799 ctx = ctx->outer_context;
12800 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12801 for (tree c = gimple_omp_for_clauses (gfor);
12802 c; c = OMP_CLAUSE_CHAIN (c))
12803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12804 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12805 ++lastprivate_conditional;
12806 if (lastprivate_conditional)
12807 {
12808 struct omp_for_data fd;
12809 omp_extract_for_data (gfor, &fd, NULL);
12810 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12811 lastprivate_conditional);
12812 tree var = create_tmp_var_raw (type);
12813 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12814 OMP_CLAUSE_DECL (c) = var;
12815 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12816 gimple_omp_for_set_clauses (gfor, c);
12817 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12818 }
12819 }
12820 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12821 {
12822 unsigned lastprivate_conditional = 0;
12823 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12824 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12825 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12826 ++lastprivate_conditional;
12827 if (lastprivate_conditional)
12828 {
12829 struct omp_for_data fd;
12830 omp_extract_for_data (gfor, &fd, NULL);
12831 tree type = unsigned_type_for (fd.iter_type);
12832 while (lastprivate_conditional--)
12833 {
12834 tree c = build_omp_clause (UNKNOWN_LOCATION,
12835 OMP_CLAUSE__CONDTEMP_);
12836 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12837 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12838 gimple_omp_for_set_clauses (gfor, c);
12839 }
12840 }
12841 }
12842
12843 if (ret != GS_ALL_DONE)
12844 return GS_ERROR;
12845 *expr_p = NULL_TREE;
12846 return GS_ALL_DONE;
12847 }
12848
12849 /* Helper for gimplify_omp_loop, called through walk_tree. */
12850
12851 static tree
12852 replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
12853 {
12854 if (DECL_P (*tp))
12855 {
12856 tree *d = (tree *) data;
12857 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
12858 {
12859 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
12860 *walk_subtrees = 0;
12861 }
12862 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
12863 {
12864 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
12865 *walk_subtrees = 0;
12866 }
12867 }
12868 return NULL_TREE;
12869 }
12870
12871 /* Gimplify the gross structure of an OMP_LOOP statement. */
12872
12873 static enum gimplify_status
12874 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12875 {
12876 tree for_stmt = *expr_p;
12877 tree clauses = OMP_FOR_CLAUSES (for_stmt);
12878 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12879 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12880 int i;
12881
12882 /* If order is not present, the behavior is as if order(concurrent)
12883 appeared. */
12884 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12885 if (order == NULL_TREE)
12886 {
12887 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12888 OMP_CLAUSE_CHAIN (order) = clauses;
12889 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12890 }
12891
12892 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12893 if (bind == NULL_TREE)
12894 {
12895 if (!flag_openmp) /* flag_openmp_simd */
12896 ;
12897 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12898 kind = OMP_CLAUSE_BIND_TEAMS;
12899 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12900 kind = OMP_CLAUSE_BIND_PARALLEL;
12901 else
12902 {
12903 for (; octx; octx = octx->outer_context)
12904 {
12905 if ((octx->region_type & ORT_ACC) != 0
12906 || octx->region_type == ORT_NONE
12907 || octx->region_type == ORT_IMPLICIT_TARGET)
12908 continue;
12909 break;
12910 }
12911 if (octx == NULL && !in_omp_construct)
12912 error_at (EXPR_LOCATION (for_stmt),
12913 "%<bind%> clause not specified on a %<loop%> "
12914 "construct not nested inside another OpenMP construct");
12915 }
12916 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12917 OMP_CLAUSE_CHAIN (bind) = clauses;
12918 OMP_CLAUSE_BIND_KIND (bind) = kind;
12919 OMP_FOR_CLAUSES (for_stmt) = bind;
12920 }
12921 else
12922 switch (OMP_CLAUSE_BIND_KIND (bind))
12923 {
12924 case OMP_CLAUSE_BIND_THREAD:
12925 break;
12926 case OMP_CLAUSE_BIND_PARALLEL:
12927 if (!flag_openmp) /* flag_openmp_simd */
12928 {
12929 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12930 break;
12931 }
12932 for (; octx; octx = octx->outer_context)
12933 if (octx->region_type == ORT_SIMD
12934 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12935 {
12936 error_at (EXPR_LOCATION (for_stmt),
12937 "%<bind(parallel)%> on a %<loop%> construct nested "
12938 "inside %<simd%> construct");
12939 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12940 break;
12941 }
12942 kind = OMP_CLAUSE_BIND_PARALLEL;
12943 break;
12944 case OMP_CLAUSE_BIND_TEAMS:
12945 if (!flag_openmp) /* flag_openmp_simd */
12946 {
12947 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12948 break;
12949 }
12950 if ((octx
12951 && octx->region_type != ORT_IMPLICIT_TARGET
12952 && octx->region_type != ORT_NONE
12953 && (octx->region_type & ORT_TEAMS) == 0)
12954 || in_omp_construct)
12955 {
12956 error_at (EXPR_LOCATION (for_stmt),
12957 "%<bind(teams)%> on a %<loop%> region not strictly "
12958 "nested inside of a %<teams%> region");
12959 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12960 break;
12961 }
12962 kind = OMP_CLAUSE_BIND_TEAMS;
12963 break;
12964 default:
12965 gcc_unreachable ();
12966 }
12967
12968 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12969 switch (OMP_CLAUSE_CODE (*pc))
12970 {
12971 case OMP_CLAUSE_REDUCTION:
12972 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12973 {
12974 error_at (OMP_CLAUSE_LOCATION (*pc),
12975 "%<inscan%> %<reduction%> clause on "
12976 "%qs construct", "loop");
12977 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12978 }
12979 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12980 {
12981 error_at (OMP_CLAUSE_LOCATION (*pc),
12982 "invalid %<task%> reduction modifier on construct "
12983 "other than %<parallel%>, %qs or %<sections%>",
12984 lang_GNU_Fortran () ? "do" : "for");
12985 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12986 }
12987 pc = &OMP_CLAUSE_CHAIN (*pc);
12988 break;
12989 case OMP_CLAUSE_LASTPRIVATE:
12990 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12991 {
12992 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12993 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12994 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12995 break;
12996 if (OMP_FOR_ORIG_DECLS (for_stmt)
12997 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12998 i)) == TREE_LIST
12999 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
13000 i)))
13001 {
13002 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13003 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
13004 break;
13005 }
13006 }
13007 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
13008 {
13009 error_at (OMP_CLAUSE_LOCATION (*pc),
13010 "%<lastprivate%> clause on a %<loop%> construct refers "
13011 "to a variable %qD which is not the loop iterator",
13012 OMP_CLAUSE_DECL (*pc));
13013 *pc = OMP_CLAUSE_CHAIN (*pc);
13014 break;
13015 }
13016 pc = &OMP_CLAUSE_CHAIN (*pc);
13017 break;
13018 default:
13019 pc = &OMP_CLAUSE_CHAIN (*pc);
13020 break;
13021 }
13022
13023 TREE_SET_CODE (for_stmt, OMP_SIMD);
13024
13025 int last;
13026 switch (kind)
13027 {
13028 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
13029 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
13030 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
13031 }
13032 for (int pass = 1; pass <= last; pass++)
13033 {
13034 if (pass == 2)
13035 {
13036 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
13037 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
13038 *expr_p = make_node (OMP_PARALLEL);
13039 TREE_TYPE (*expr_p) = void_type_node;
13040 OMP_PARALLEL_BODY (*expr_p) = bind;
13041 OMP_PARALLEL_COMBINED (*expr_p) = 1;
13042 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
13043 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
13044 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13045 if (OMP_FOR_ORIG_DECLS (for_stmt)
13046 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
13047 == TREE_LIST))
13048 {
13049 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13050 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
13051 {
13052 *pc = build_omp_clause (UNKNOWN_LOCATION,
13053 OMP_CLAUSE_FIRSTPRIVATE);
13054 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
13055 pc = &OMP_CLAUSE_CHAIN (*pc);
13056 }
13057 }
13058 }
13059 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
13060 tree *pc = &OMP_FOR_CLAUSES (t);
13061 TREE_TYPE (t) = void_type_node;
13062 OMP_FOR_BODY (t) = *expr_p;
13063 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
13064 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13065 switch (OMP_CLAUSE_CODE (c))
13066 {
13067 case OMP_CLAUSE_BIND:
13068 case OMP_CLAUSE_ORDER:
13069 case OMP_CLAUSE_COLLAPSE:
13070 *pc = copy_node (c);
13071 pc = &OMP_CLAUSE_CHAIN (*pc);
13072 break;
13073 case OMP_CLAUSE_PRIVATE:
13074 case OMP_CLAUSE_FIRSTPRIVATE:
13075 /* Only needed on innermost. */
13076 break;
13077 case OMP_CLAUSE_LASTPRIVATE:
13078 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
13079 {
13080 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13081 OMP_CLAUSE_FIRSTPRIVATE);
13082 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
13083 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
13084 pc = &OMP_CLAUSE_CHAIN (*pc);
13085 }
13086 *pc = copy_node (c);
13087 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
13088 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
13089 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
13090 {
13091 if (pass != last)
13092 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
13093 else
13094 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
13095 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
13096 }
13097 pc = &OMP_CLAUSE_CHAIN (*pc);
13098 break;
13099 case OMP_CLAUSE_REDUCTION:
13100 *pc = copy_node (c);
13101 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
13102 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
13103 OMP_CLAUSE_REDUCTION_INIT (*pc)
13104 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
13105 OMP_CLAUSE_REDUCTION_MERGE (*pc)
13106 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
13107 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
13108 {
13109 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
13110 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
13111 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
13112 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
13113 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
13114 tree nc = *pc;
13115 tree data[2] = { c, nc };
13116 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
13117 replace_reduction_placeholders,
13118 data);
13119 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
13120 replace_reduction_placeholders,
13121 data);
13122 }
13123 pc = &OMP_CLAUSE_CHAIN (*pc);
13124 break;
13125 default:
13126 gcc_unreachable ();
13127 }
13128 *pc = NULL_TREE;
13129 *expr_p = t;
13130 }
13131 return gimplify_omp_for (expr_p, pre_p);
13132 }
13133
13134
13135 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
13136 of OMP_TARGET's body. */
13137
13138 static tree
13139 find_omp_teams (tree *tp, int *walk_subtrees, void *)
13140 {
13141 *walk_subtrees = 0;
13142 switch (TREE_CODE (*tp))
13143 {
13144 case OMP_TEAMS:
13145 return *tp;
13146 case BIND_EXPR:
13147 case STATEMENT_LIST:
13148 *walk_subtrees = 1;
13149 break;
13150 default:
13151 break;
13152 }
13153 return NULL_TREE;
13154 }
13155
13156 /* Helper function of optimize_target_teams, determine if the expression
13157 can be computed safely before the target construct on the host. */
13158
13159 static tree
13160 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
13161 {
13162 splay_tree_node n;
13163
13164 if (TYPE_P (*tp))
13165 {
13166 *walk_subtrees = 0;
13167 return NULL_TREE;
13168 }
13169 switch (TREE_CODE (*tp))
13170 {
13171 case VAR_DECL:
13172 case PARM_DECL:
13173 case RESULT_DECL:
13174 *walk_subtrees = 0;
13175 if (error_operand_p (*tp)
13176 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
13177 || DECL_HAS_VALUE_EXPR_P (*tp)
13178 || DECL_THREAD_LOCAL_P (*tp)
13179 || TREE_SIDE_EFFECTS (*tp)
13180 || TREE_THIS_VOLATILE (*tp))
13181 return *tp;
13182 if (is_global_var (*tp)
13183 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
13184 || lookup_attribute ("omp declare target link",
13185 DECL_ATTRIBUTES (*tp))))
13186 return *tp;
13187 if (VAR_P (*tp)
13188 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
13189 && !is_global_var (*tp)
13190 && decl_function_context (*tp) == current_function_decl)
13191 return *tp;
13192 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
13193 (splay_tree_key) *tp);
13194 if (n == NULL)
13195 {
13196 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
13197 return NULL_TREE;
13198 return *tp;
13199 }
13200 else if (n->value & GOVD_LOCAL)
13201 return *tp;
13202 else if (n->value & GOVD_FIRSTPRIVATE)
13203 return NULL_TREE;
13204 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
13205 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
13206 return NULL_TREE;
13207 return *tp;
13208 case INTEGER_CST:
13209 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
13210 return *tp;
13211 return NULL_TREE;
13212 case TARGET_EXPR:
13213 if (TARGET_EXPR_INITIAL (*tp)
13214 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
13215 return *tp;
13216 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
13217 walk_subtrees, NULL);
13218 /* Allow some reasonable subset of integral arithmetics. */
13219 case PLUS_EXPR:
13220 case MINUS_EXPR:
13221 case MULT_EXPR:
13222 case TRUNC_DIV_EXPR:
13223 case CEIL_DIV_EXPR:
13224 case FLOOR_DIV_EXPR:
13225 case ROUND_DIV_EXPR:
13226 case TRUNC_MOD_EXPR:
13227 case CEIL_MOD_EXPR:
13228 case FLOOR_MOD_EXPR:
13229 case ROUND_MOD_EXPR:
13230 case RDIV_EXPR:
13231 case EXACT_DIV_EXPR:
13232 case MIN_EXPR:
13233 case MAX_EXPR:
13234 case LSHIFT_EXPR:
13235 case RSHIFT_EXPR:
13236 case BIT_IOR_EXPR:
13237 case BIT_XOR_EXPR:
13238 case BIT_AND_EXPR:
13239 case NEGATE_EXPR:
13240 case ABS_EXPR:
13241 case BIT_NOT_EXPR:
13242 case NON_LVALUE_EXPR:
13243 CASE_CONVERT:
13244 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
13245 return *tp;
13246 return NULL_TREE;
13247 /* And disallow anything else, except for comparisons. */
13248 default:
13249 if (COMPARISON_CLASS_P (*tp))
13250 return NULL_TREE;
13251 return *tp;
13252 }
13253 }
13254
13255 /* Try to determine if the num_teams and/or thread_limit expressions
13256 can have their values determined already before entering the
13257 target construct.
13258 INTEGER_CSTs trivially are,
13259 integral decls that are firstprivate (explicitly or implicitly)
13260 or explicitly map(always, to:) or map(always, tofrom:) on the target
13261 region too, and expressions involving simple arithmetics on those
13262 too, function calls are not ok, dereferencing something neither etc.
13263 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
13264 EXPR based on what we find:
13265 0 stands for clause not specified at all, use implementation default
13266 -1 stands for value that can't be determined easily before entering
13267 the target construct.
13268 If teams construct is not present at all, use 1 for num_teams
13269 and 0 for thread_limit (only one team is involved, and the thread
13270 limit is implementation defined. */
13271
13272 static void
13273 optimize_target_teams (tree target, gimple_seq *pre_p)
13274 {
13275 tree body = OMP_BODY (target);
13276 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
13277 tree num_teams = integer_zero_node;
13278 tree thread_limit = integer_zero_node;
13279 location_t num_teams_loc = EXPR_LOCATION (target);
13280 location_t thread_limit_loc = EXPR_LOCATION (target);
13281 tree c, *p, expr;
13282 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
13283
13284 if (teams == NULL_TREE)
13285 num_teams = integer_one_node;
13286 else
13287 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
13288 {
13289 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
13290 {
13291 p = &num_teams;
13292 num_teams_loc = OMP_CLAUSE_LOCATION (c);
13293 }
13294 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
13295 {
13296 p = &thread_limit;
13297 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
13298 }
13299 else
13300 continue;
13301 expr = OMP_CLAUSE_OPERAND (c, 0);
13302 if (TREE_CODE (expr) == INTEGER_CST)
13303 {
13304 *p = expr;
13305 continue;
13306 }
13307 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
13308 {
13309 *p = integer_minus_one_node;
13310 continue;
13311 }
13312 *p = expr;
13313 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
13314 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
13315 == GS_ERROR)
13316 {
13317 gimplify_omp_ctxp = target_ctx;
13318 *p = integer_minus_one_node;
13319 continue;
13320 }
13321 gimplify_omp_ctxp = target_ctx;
13322 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
13323 OMP_CLAUSE_OPERAND (c, 0) = *p;
13324 }
13325 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
13326 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
13327 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
13328 OMP_TARGET_CLAUSES (target) = c;
13329 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
13330 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
13331 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
13332 OMP_TARGET_CLAUSES (target) = c;
13333 }
13334
13335 /* Gimplify the gross structure of several OMP constructs. */
13336
13337 static void
13338 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
13339 {
13340 tree expr = *expr_p;
13341 gimple *stmt;
13342 gimple_seq body = NULL;
13343 enum omp_region_type ort;
13344
13345 switch (TREE_CODE (expr))
13346 {
13347 case OMP_SECTIONS:
13348 case OMP_SINGLE:
13349 ort = ORT_WORKSHARE;
13350 break;
13351 case OMP_TARGET:
13352 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
13353 break;
13354 case OACC_KERNELS:
13355 ort = ORT_ACC_KERNELS;
13356 break;
13357 case OACC_PARALLEL:
13358 ort = ORT_ACC_PARALLEL;
13359 break;
13360 case OACC_SERIAL:
13361 ort = ORT_ACC_SERIAL;
13362 break;
13363 case OACC_DATA:
13364 ort = ORT_ACC_DATA;
13365 break;
13366 case OMP_TARGET_DATA:
13367 ort = ORT_TARGET_DATA;
13368 break;
13369 case OMP_TEAMS:
13370 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
13371 if (gimplify_omp_ctxp == NULL
13372 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
13373 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
13374 break;
13375 case OACC_HOST_DATA:
13376 ort = ORT_ACC_HOST_DATA;
13377 break;
13378 default:
13379 gcc_unreachable ();
13380 }
13381
13382 bool save_in_omp_construct = in_omp_construct;
13383 if ((ort & ORT_ACC) == 0)
13384 in_omp_construct = false;
13385 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
13386 TREE_CODE (expr));
13387 if (TREE_CODE (expr) == OMP_TARGET)
13388 optimize_target_teams (expr, pre_p);
13389 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
13390 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
13391 {
13392 push_gimplify_context ();
13393 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
13394 if (gimple_code (g) == GIMPLE_BIND)
13395 pop_gimplify_context (g);
13396 else
13397 pop_gimplify_context (NULL);
13398 if ((ort & ORT_TARGET_DATA) != 0)
13399 {
13400 enum built_in_function end_ix;
13401 switch (TREE_CODE (expr))
13402 {
13403 case OACC_DATA:
13404 case OACC_HOST_DATA:
13405 end_ix = BUILT_IN_GOACC_DATA_END;
13406 break;
13407 case OMP_TARGET_DATA:
13408 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
13409 break;
13410 default:
13411 gcc_unreachable ();
13412 }
13413 tree fn = builtin_decl_explicit (end_ix);
13414 g = gimple_build_call (fn, 0);
13415 gimple_seq cleanup = NULL;
13416 gimple_seq_add_stmt (&cleanup, g);
13417 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
13418 body = NULL;
13419 gimple_seq_add_stmt (&body, g);
13420 }
13421 }
13422 else
13423 gimplify_and_add (OMP_BODY (expr), &body);
13424 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
13425 TREE_CODE (expr));
13426 in_omp_construct = save_in_omp_construct;
13427
13428 switch (TREE_CODE (expr))
13429 {
13430 case OACC_DATA:
13431 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
13432 OMP_CLAUSES (expr));
13433 break;
13434 case OACC_HOST_DATA:
13435 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
13436 {
13437 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13438 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
13439 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
13440 }
13441
13442 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
13443 OMP_CLAUSES (expr));
13444 break;
13445 case OACC_KERNELS:
13446 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
13447 OMP_CLAUSES (expr));
13448 break;
13449 case OACC_PARALLEL:
13450 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
13451 OMP_CLAUSES (expr));
13452 break;
13453 case OACC_SERIAL:
13454 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
13455 OMP_CLAUSES (expr));
13456 break;
13457 case OMP_SECTIONS:
13458 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
13459 break;
13460 case OMP_SINGLE:
13461 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
13462 break;
13463 case OMP_TARGET:
13464 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
13465 OMP_CLAUSES (expr));
13466 break;
13467 case OMP_TARGET_DATA:
13468 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
13469 to be evaluated before the use_device_{ptr,addr} clauses if they
13470 refer to the same variables. */
13471 {
13472 tree use_device_clauses;
13473 tree *pc, *uc = &use_device_clauses;
13474 for (pc = &OMP_CLAUSES (expr); *pc; )
13475 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
13476 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
13477 {
13478 *uc = *pc;
13479 *pc = OMP_CLAUSE_CHAIN (*pc);
13480 uc = &OMP_CLAUSE_CHAIN (*uc);
13481 }
13482 else
13483 pc = &OMP_CLAUSE_CHAIN (*pc);
13484 *uc = NULL_TREE;
13485 *pc = use_device_clauses;
13486 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
13487 OMP_CLAUSES (expr));
13488 }
13489 break;
13490 case OMP_TEAMS:
13491 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
13492 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
13493 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
13494 break;
13495 default:
13496 gcc_unreachable ();
13497 }
13498
13499 gimplify_seq_add_stmt (pre_p, stmt);
13500 *expr_p = NULL_TREE;
13501 }
13502
13503 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
13504 target update constructs. */
13505
13506 static void
13507 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
13508 {
13509 tree expr = *expr_p;
13510 int kind;
13511 gomp_target *stmt;
13512 enum omp_region_type ort = ORT_WORKSHARE;
13513
13514 switch (TREE_CODE (expr))
13515 {
13516 case OACC_ENTER_DATA:
13517 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
13518 ort = ORT_ACC;
13519 break;
13520 case OACC_EXIT_DATA:
13521 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
13522 ort = ORT_ACC;
13523 break;
13524 case OACC_UPDATE:
13525 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
13526 ort = ORT_ACC;
13527 break;
13528 case OMP_TARGET_UPDATE:
13529 kind = GF_OMP_TARGET_KIND_UPDATE;
13530 break;
13531 case OMP_TARGET_ENTER_DATA:
13532 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
13533 break;
13534 case OMP_TARGET_EXIT_DATA:
13535 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
13536 break;
13537 default:
13538 gcc_unreachable ();
13539 }
13540 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
13541 ort, TREE_CODE (expr));
13542 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
13543 TREE_CODE (expr));
13544 if (TREE_CODE (expr) == OACC_UPDATE
13545 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
13546 OMP_CLAUSE_IF_PRESENT))
13547 {
13548 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
13549 clause. */
13550 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13551 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
13552 switch (OMP_CLAUSE_MAP_KIND (c))
13553 {
13554 case GOMP_MAP_FORCE_TO:
13555 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
13556 break;
13557 case GOMP_MAP_FORCE_FROM:
13558 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
13559 break;
13560 default:
13561 break;
13562 }
13563 }
13564 else if (TREE_CODE (expr) == OACC_EXIT_DATA
13565 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
13566 OMP_CLAUSE_FINALIZE))
13567 {
13568 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
13569 semantics. */
13570 bool have_clause = false;
13571 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13572 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
13573 switch (OMP_CLAUSE_MAP_KIND (c))
13574 {
13575 case GOMP_MAP_FROM:
13576 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
13577 have_clause = true;
13578 break;
13579 case GOMP_MAP_RELEASE:
13580 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
13581 have_clause = true;
13582 break;
13583 case GOMP_MAP_TO_PSET:
13584 /* Fortran arrays with descriptors must map that descriptor when
13585 doing standalone "attach" operations (in OpenACC). In that
13586 case GOMP_MAP_TO_PSET appears by itself with no preceding
13587 clause (see trans-openmp.c:gfc_trans_omp_clauses). */
13588 break;
13589 case GOMP_MAP_POINTER:
13590 /* TODO PR92929: we may see these here, but they'll always follow
13591 one of the clauses above, and will be handled by libgomp as
13592 one group, so no handling required here. */
13593 gcc_assert (have_clause);
13594 break;
13595 case GOMP_MAP_DETACH:
13596 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
13597 have_clause = false;
13598 break;
13599 case GOMP_MAP_STRUCT:
13600 have_clause = false;
13601 break;
13602 default:
13603 gcc_unreachable ();
13604 }
13605 }
13606 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
13607
13608 gimplify_seq_add_stmt (pre_p, stmt);
13609 *expr_p = NULL_TREE;
13610 }
13611
13612 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
13613 stabilized the lhs of the atomic operation as *ADDR. Return true if
13614 EXPR is this stabilized form. */
13615
13616 static bool
13617 goa_lhs_expr_p (tree expr, tree addr)
13618 {
13619 /* Also include casts to other type variants. The C front end is fond
13620 of adding these for e.g. volatile variables. This is like
13621 STRIP_TYPE_NOPS but includes the main variant lookup. */
13622 STRIP_USELESS_TYPE_CONVERSION (expr);
13623
13624 if (TREE_CODE (expr) == INDIRECT_REF)
13625 {
13626 expr = TREE_OPERAND (expr, 0);
13627 while (expr != addr
13628 && (CONVERT_EXPR_P (expr)
13629 || TREE_CODE (expr) == NON_LVALUE_EXPR)
13630 && TREE_CODE (expr) == TREE_CODE (addr)
13631 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
13632 {
13633 expr = TREE_OPERAND (expr, 0);
13634 addr = TREE_OPERAND (addr, 0);
13635 }
13636 if (expr == addr)
13637 return true;
13638 return (TREE_CODE (addr) == ADDR_EXPR
13639 && TREE_CODE (expr) == ADDR_EXPR
13640 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
13641 }
13642 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
13643 return true;
13644 return false;
13645 }
13646
13647 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13648 expression does not involve the lhs, evaluate it into a temporary.
13649 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13650 or -1 if an error was encountered. */
13651
13652 static int
13653 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
13654 tree lhs_var)
13655 {
13656 tree expr = *expr_p;
13657 int saw_lhs;
13658
13659 if (goa_lhs_expr_p (expr, lhs_addr))
13660 {
13661 *expr_p = lhs_var;
13662 return 1;
13663 }
13664 if (is_gimple_val (expr))
13665 return 0;
13666
13667 saw_lhs = 0;
13668 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
13669 {
13670 case tcc_binary:
13671 case tcc_comparison:
13672 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
13673 lhs_var);
13674 /* FALLTHRU */
13675 case tcc_unary:
13676 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
13677 lhs_var);
13678 break;
13679 case tcc_expression:
13680 switch (TREE_CODE (expr))
13681 {
13682 case TRUTH_ANDIF_EXPR:
13683 case TRUTH_ORIF_EXPR:
13684 case TRUTH_AND_EXPR:
13685 case TRUTH_OR_EXPR:
13686 case TRUTH_XOR_EXPR:
13687 case BIT_INSERT_EXPR:
13688 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13689 lhs_addr, lhs_var);
13690 /* FALLTHRU */
13691 case TRUTH_NOT_EXPR:
13692 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13693 lhs_addr, lhs_var);
13694 break;
13695 case COMPOUND_EXPR:
13696 /* Break out any preevaluations from cp_build_modify_expr. */
13697 for (; TREE_CODE (expr) == COMPOUND_EXPR;
13698 expr = TREE_OPERAND (expr, 1))
13699 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13700 *expr_p = expr;
13701 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
13702 default:
13703 break;
13704 }
13705 break;
13706 case tcc_reference:
13707 if (TREE_CODE (expr) == BIT_FIELD_REF)
13708 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13709 lhs_addr, lhs_var);
13710 break;
13711 default:
13712 break;
13713 }
13714
13715 if (saw_lhs == 0)
13716 {
13717 enum gimplify_status gs;
13718 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13719 if (gs != GS_ALL_DONE)
13720 saw_lhs = -1;
13721 }
13722
13723 return saw_lhs;
13724 }
13725
13726 /* Gimplify an OMP_ATOMIC statement. */
13727
13728 static enum gimplify_status
13729 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
13730 {
13731 tree addr = TREE_OPERAND (*expr_p, 0);
13732 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13733 ? NULL : TREE_OPERAND (*expr_p, 1);
13734 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
13735 tree tmp_load;
13736 gomp_atomic_load *loadstmt;
13737 gomp_atomic_store *storestmt;
13738
13739 tmp_load = create_tmp_reg (type);
13740 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13741 return GS_ERROR;
13742
13743 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13744 != GS_ALL_DONE)
13745 return GS_ERROR;
13746
13747 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13748 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13749 gimplify_seq_add_stmt (pre_p, loadstmt);
13750 if (rhs)
13751 {
13752 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13753 representatives. Use BIT_FIELD_REF on the lhs instead. */
13754 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13755 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13756 {
13757 tree bitpos = TREE_OPERAND (rhs, 2);
13758 tree op1 = TREE_OPERAND (rhs, 1);
13759 tree bitsize;
13760 tree tmp_store = tmp_load;
13761 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
13762 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
13763 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13764 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13765 else
13766 bitsize = TYPE_SIZE (TREE_TYPE (op1));
13767 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13768 tree t = build2_loc (EXPR_LOCATION (rhs),
13769 MODIFY_EXPR, void_type_node,
13770 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13771 TREE_TYPE (op1), tmp_store, bitsize,
13772 bitpos), op1);
13773 gimplify_and_add (t, pre_p);
13774 rhs = tmp_store;
13775 }
13776 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13777 != GS_ALL_DONE)
13778 return GS_ERROR;
13779 }
13780
13781 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13782 rhs = tmp_load;
13783 storestmt
13784 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13785 gimplify_seq_add_stmt (pre_p, storestmt);
13786 switch (TREE_CODE (*expr_p))
13787 {
13788 case OMP_ATOMIC_READ:
13789 case OMP_ATOMIC_CAPTURE_OLD:
13790 *expr_p = tmp_load;
13791 gimple_omp_atomic_set_need_value (loadstmt);
13792 break;
13793 case OMP_ATOMIC_CAPTURE_NEW:
13794 *expr_p = rhs;
13795 gimple_omp_atomic_set_need_value (storestmt);
13796 break;
13797 default:
13798 *expr_p = NULL;
13799 break;
13800 }
13801
13802 return GS_ALL_DONE;
13803 }
13804
13805 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13806 body, and adding some EH bits. */
13807
13808 static enum gimplify_status
13809 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13810 {
13811 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
13812 gimple *body_stmt;
13813 gtransaction *trans_stmt;
13814 gimple_seq body = NULL;
13815 int subcode = 0;
13816
13817 /* Wrap the transaction body in a BIND_EXPR so we have a context
13818 where to put decls for OMP. */
13819 if (TREE_CODE (tbody) != BIND_EXPR)
13820 {
13821 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13822 TREE_SIDE_EFFECTS (bind) = 1;
13823 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13824 TRANSACTION_EXPR_BODY (expr) = bind;
13825 }
13826
13827 push_gimplify_context ();
13828 temp = voidify_wrapper_expr (*expr_p, NULL);
13829
13830 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13831 pop_gimplify_context (body_stmt);
13832
13833 trans_stmt = gimple_build_transaction (body);
13834 if (TRANSACTION_EXPR_OUTER (expr))
13835 subcode = GTMA_IS_OUTER;
13836 else if (TRANSACTION_EXPR_RELAXED (expr))
13837 subcode = GTMA_IS_RELAXED;
13838 gimple_transaction_set_subcode (trans_stmt, subcode);
13839
13840 gimplify_seq_add_stmt (pre_p, trans_stmt);
13841
13842 if (temp)
13843 {
13844 *expr_p = temp;
13845 return GS_OK;
13846 }
13847
13848 *expr_p = NULL_TREE;
13849 return GS_ALL_DONE;
13850 }
13851
13852 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13853 is the OMP_BODY of the original EXPR (which has already been
13854 gimplified so it's not present in the EXPR).
13855
13856 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13857
13858 static gimple *
13859 gimplify_omp_ordered (tree expr, gimple_seq body)
13860 {
13861 tree c, decls;
13862 int failures = 0;
13863 unsigned int i;
13864 tree source_c = NULL_TREE;
13865 tree sink_c = NULL_TREE;
13866
13867 if (gimplify_omp_ctxp)
13868 {
13869 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13870 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13871 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13872 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13873 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13874 {
13875 error_at (OMP_CLAUSE_LOCATION (c),
13876 "%<ordered%> construct with %<depend%> clause must be "
13877 "closely nested inside a loop with %<ordered%> clause "
13878 "with a parameter");
13879 failures++;
13880 }
13881 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13882 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13883 {
13884 bool fail = false;
13885 for (decls = OMP_CLAUSE_DECL (c), i = 0;
13886 decls && TREE_CODE (decls) == TREE_LIST;
13887 decls = TREE_CHAIN (decls), ++i)
13888 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13889 continue;
13890 else if (TREE_VALUE (decls)
13891 != gimplify_omp_ctxp->loop_iter_var[2 * i])
13892 {
13893 error_at (OMP_CLAUSE_LOCATION (c),
13894 "variable %qE is not an iteration "
13895 "of outermost loop %d, expected %qE",
13896 TREE_VALUE (decls), i + 1,
13897 gimplify_omp_ctxp->loop_iter_var[2 * i]);
13898 fail = true;
13899 failures++;
13900 }
13901 else
13902 TREE_VALUE (decls)
13903 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13904 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13905 {
13906 error_at (OMP_CLAUSE_LOCATION (c),
13907 "number of variables in %<depend%> clause with "
13908 "%<sink%> modifier does not match number of "
13909 "iteration variables");
13910 failures++;
13911 }
13912 sink_c = c;
13913 }
13914 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13915 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13916 {
13917 if (source_c)
13918 {
13919 error_at (OMP_CLAUSE_LOCATION (c),
13920 "more than one %<depend%> clause with %<source%> "
13921 "modifier on an %<ordered%> construct");
13922 failures++;
13923 }
13924 else
13925 source_c = c;
13926 }
13927 }
13928 if (source_c && sink_c)
13929 {
13930 error_at (OMP_CLAUSE_LOCATION (source_c),
13931 "%<depend%> clause with %<source%> modifier specified "
13932 "together with %<depend%> clauses with %<sink%> modifier "
13933 "on the same construct");
13934 failures++;
13935 }
13936
13937 if (failures)
13938 return gimple_build_nop ();
13939 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13940 }
13941
13942 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13943 expression produces a value to be used as an operand inside a GIMPLE
13944 statement, the value will be stored back in *EXPR_P. This value will
13945 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13946 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13947 emitted in PRE_P and POST_P.
13948
13949 Additionally, this process may overwrite parts of the input
13950 expression during gimplification. Ideally, it should be
13951 possible to do non-destructive gimplification.
13952
13953 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13954 the expression needs to evaluate to a value to be used as
13955 an operand in a GIMPLE statement, this value will be stored in
13956 *EXPR_P on exit. This happens when the caller specifies one
13957 of fb_lvalue or fb_rvalue fallback flags.
13958
13959 PRE_P will contain the sequence of GIMPLE statements corresponding
13960 to the evaluation of EXPR and all the side-effects that must
13961 be executed before the main expression. On exit, the last
13962 statement of PRE_P is the core statement being gimplified. For
13963 instance, when gimplifying 'if (++a)' the last statement in
13964 PRE_P will be 'if (t.1)' where t.1 is the result of
13965 pre-incrementing 'a'.
13966
13967 POST_P will contain the sequence of GIMPLE statements corresponding
13968 to the evaluation of all the side-effects that must be executed
13969 after the main expression. If this is NULL, the post
13970 side-effects are stored at the end of PRE_P.
13971
13972 The reason why the output is split in two is to handle post
13973 side-effects explicitly. In some cases, an expression may have
13974 inner and outer post side-effects which need to be emitted in
13975 an order different from the one given by the recursive
13976 traversal. For instance, for the expression (*p--)++ the post
13977 side-effects of '--' must actually occur *after* the post
13978 side-effects of '++'. However, gimplification will first visit
13979 the inner expression, so if a separate POST sequence was not
13980 used, the resulting sequence would be:
13981
13982 1 t.1 = *p
13983 2 p = p - 1
13984 3 t.2 = t.1 + 1
13985 4 *p = t.2
13986
13987 However, the post-decrement operation in line #2 must not be
13988 evaluated until after the store to *p at line #4, so the
13989 correct sequence should be:
13990
13991 1 t.1 = *p
13992 2 t.2 = t.1 + 1
13993 3 *p = t.2
13994 4 p = p - 1
13995
13996 So, by specifying a separate post queue, it is possible
13997 to emit the post side-effects in the correct order.
13998 If POST_P is NULL, an internal queue will be used. Before
13999 returning to the caller, the sequence POST_P is appended to
14000 the main output sequence PRE_P.
14001
14002 GIMPLE_TEST_F points to a function that takes a tree T and
14003 returns nonzero if T is in the GIMPLE form requested by the
14004 caller. The GIMPLE predicates are in gimple.c.
14005
14006 FALLBACK tells the function what sort of a temporary we want if
14007 gimplification cannot produce an expression that complies with
14008 GIMPLE_TEST_F.
14009
14010 fb_none means that no temporary should be generated
14011 fb_rvalue means that an rvalue is OK to generate
14012 fb_lvalue means that an lvalue is OK to generate
14013 fb_either means that either is OK, but an lvalue is preferable.
14014 fb_mayfail means that gimplification may fail (in which case
14015 GS_ERROR will be returned)
14016
14017 The return value is either GS_ERROR or GS_ALL_DONE, since this
14018 function iterates until EXPR is completely gimplified or an error
14019 occurs. */
14020
14021 enum gimplify_status
14022 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14023 bool (*gimple_test_f) (tree), fallback_t fallback)
14024 {
14025 tree tmp;
14026 gimple_seq internal_pre = NULL;
14027 gimple_seq internal_post = NULL;
14028 tree save_expr;
14029 bool is_statement;
14030 location_t saved_location;
14031 enum gimplify_status ret;
14032 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
14033 tree label;
14034
14035 save_expr = *expr_p;
14036 if (save_expr == NULL_TREE)
14037 return GS_ALL_DONE;
14038
14039 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
14040 is_statement = gimple_test_f == is_gimple_stmt;
14041 if (is_statement)
14042 gcc_assert (pre_p);
14043
14044 /* Consistency checks. */
14045 if (gimple_test_f == is_gimple_reg)
14046 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
14047 else if (gimple_test_f == is_gimple_val
14048 || gimple_test_f == is_gimple_call_addr
14049 || gimple_test_f == is_gimple_condexpr
14050 || gimple_test_f == is_gimple_condexpr_for_cond
14051 || gimple_test_f == is_gimple_mem_rhs
14052 || gimple_test_f == is_gimple_mem_rhs_or_call
14053 || gimple_test_f == is_gimple_reg_rhs
14054 || gimple_test_f == is_gimple_reg_rhs_or_call
14055 || gimple_test_f == is_gimple_asm_val
14056 || gimple_test_f == is_gimple_mem_ref_addr)
14057 gcc_assert (fallback & fb_rvalue);
14058 else if (gimple_test_f == is_gimple_min_lval
14059 || gimple_test_f == is_gimple_lvalue)
14060 gcc_assert (fallback & fb_lvalue);
14061 else if (gimple_test_f == is_gimple_addressable)
14062 gcc_assert (fallback & fb_either);
14063 else if (gimple_test_f == is_gimple_stmt)
14064 gcc_assert (fallback == fb_none);
14065 else
14066 {
14067 /* We should have recognized the GIMPLE_TEST_F predicate to
14068 know what kind of fallback to use in case a temporary is
14069 needed to hold the value or address of *EXPR_P. */
14070 gcc_unreachable ();
14071 }
14072
14073 /* We used to check the predicate here and return immediately if it
14074 succeeds. This is wrong; the design is for gimplification to be
14075 idempotent, and for the predicates to only test for valid forms, not
14076 whether they are fully simplified. */
14077 if (pre_p == NULL)
14078 pre_p = &internal_pre;
14079
14080 if (post_p == NULL)
14081 post_p = &internal_post;
14082
14083 /* Remember the last statements added to PRE_P and POST_P. Every
14084 new statement added by the gimplification helpers needs to be
14085 annotated with location information. To centralize the
14086 responsibility, we remember the last statement that had been
14087 added to both queues before gimplifying *EXPR_P. If
14088 gimplification produces new statements in PRE_P and POST_P, those
14089 statements will be annotated with the same location information
14090 as *EXPR_P. */
14091 pre_last_gsi = gsi_last (*pre_p);
14092 post_last_gsi = gsi_last (*post_p);
14093
14094 saved_location = input_location;
14095 if (save_expr != error_mark_node
14096 && EXPR_HAS_LOCATION (*expr_p))
14097 input_location = EXPR_LOCATION (*expr_p);
14098
14099 /* Loop over the specific gimplifiers until the toplevel node
14100 remains the same. */
14101 do
14102 {
14103 /* Strip away as many useless type conversions as possible
14104 at the toplevel. */
14105 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
14106
14107 /* Remember the expr. */
14108 save_expr = *expr_p;
14109
14110 /* Die, die, die, my darling. */
14111 if (error_operand_p (save_expr))
14112 {
14113 ret = GS_ERROR;
14114 break;
14115 }
14116
14117 /* Do any language-specific gimplification. */
14118 ret = ((enum gimplify_status)
14119 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
14120 if (ret == GS_OK)
14121 {
14122 if (*expr_p == NULL_TREE)
14123 break;
14124 if (*expr_p != save_expr)
14125 continue;
14126 }
14127 else if (ret != GS_UNHANDLED)
14128 break;
14129
14130 /* Make sure that all the cases set 'ret' appropriately. */
14131 ret = GS_UNHANDLED;
14132 switch (TREE_CODE (*expr_p))
14133 {
14134 /* First deal with the special cases. */
14135
14136 case POSTINCREMENT_EXPR:
14137 case POSTDECREMENT_EXPR:
14138 case PREINCREMENT_EXPR:
14139 case PREDECREMENT_EXPR:
14140 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
14141 fallback != fb_none,
14142 TREE_TYPE (*expr_p));
14143 break;
14144
14145 case VIEW_CONVERT_EXPR:
14146 if ((fallback & fb_rvalue)
14147 && is_gimple_reg_type (TREE_TYPE (*expr_p))
14148 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
14149 {
14150 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14151 post_p, is_gimple_val, fb_rvalue);
14152 recalculate_side_effects (*expr_p);
14153 break;
14154 }
14155 /* Fallthru. */
14156
14157 case ARRAY_REF:
14158 case ARRAY_RANGE_REF:
14159 case REALPART_EXPR:
14160 case IMAGPART_EXPR:
14161 case COMPONENT_REF:
14162 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
14163 fallback ? fallback : fb_rvalue);
14164 break;
14165
14166 case COND_EXPR:
14167 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
14168
14169 /* C99 code may assign to an array in a structure value of a
14170 conditional expression, and this has undefined behavior
14171 only on execution, so create a temporary if an lvalue is
14172 required. */
14173 if (fallback == fb_lvalue)
14174 {
14175 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
14176 mark_addressable (*expr_p);
14177 ret = GS_OK;
14178 }
14179 break;
14180
14181 case CALL_EXPR:
14182 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
14183
14184 /* C99 code may assign to an array in a structure returned
14185 from a function, and this has undefined behavior only on
14186 execution, so create a temporary if an lvalue is
14187 required. */
14188 if (fallback == fb_lvalue)
14189 {
14190 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
14191 mark_addressable (*expr_p);
14192 ret = GS_OK;
14193 }
14194 break;
14195
14196 case TREE_LIST:
14197 gcc_unreachable ();
14198
14199 case COMPOUND_EXPR:
14200 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
14201 break;
14202
14203 case COMPOUND_LITERAL_EXPR:
14204 ret = gimplify_compound_literal_expr (expr_p, pre_p,
14205 gimple_test_f, fallback);
14206 break;
14207
14208 case MODIFY_EXPR:
14209 case INIT_EXPR:
14210 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
14211 fallback != fb_none);
14212 break;
14213
14214 case TRUTH_ANDIF_EXPR:
14215 case TRUTH_ORIF_EXPR:
14216 {
14217 /* Preserve the original type of the expression and the
14218 source location of the outer expression. */
14219 tree org_type = TREE_TYPE (*expr_p);
14220 *expr_p = gimple_boolify (*expr_p);
14221 *expr_p = build3_loc (input_location, COND_EXPR,
14222 org_type, *expr_p,
14223 fold_convert_loc
14224 (input_location,
14225 org_type, boolean_true_node),
14226 fold_convert_loc
14227 (input_location,
14228 org_type, boolean_false_node));
14229 ret = GS_OK;
14230 break;
14231 }
14232
14233 case TRUTH_NOT_EXPR:
14234 {
14235 tree type = TREE_TYPE (*expr_p);
14236 /* The parsers are careful to generate TRUTH_NOT_EXPR
14237 only with operands that are always zero or one.
14238 We do not fold here but handle the only interesting case
14239 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
14240 *expr_p = gimple_boolify (*expr_p);
14241 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
14242 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
14243 TREE_TYPE (*expr_p),
14244 TREE_OPERAND (*expr_p, 0));
14245 else
14246 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
14247 TREE_TYPE (*expr_p),
14248 TREE_OPERAND (*expr_p, 0),
14249 build_int_cst (TREE_TYPE (*expr_p), 1));
14250 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
14251 *expr_p = fold_convert_loc (input_location, type, *expr_p);
14252 ret = GS_OK;
14253 break;
14254 }
14255
14256 case ADDR_EXPR:
14257 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
14258 break;
14259
14260 case ANNOTATE_EXPR:
14261 {
14262 tree cond = TREE_OPERAND (*expr_p, 0);
14263 tree kind = TREE_OPERAND (*expr_p, 1);
14264 tree data = TREE_OPERAND (*expr_p, 2);
14265 tree type = TREE_TYPE (cond);
14266 if (!INTEGRAL_TYPE_P (type))
14267 {
14268 *expr_p = cond;
14269 ret = GS_OK;
14270 break;
14271 }
14272 tree tmp = create_tmp_var (type);
14273 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
14274 gcall *call
14275 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
14276 gimple_call_set_lhs (call, tmp);
14277 gimplify_seq_add_stmt (pre_p, call);
14278 *expr_p = tmp;
14279 ret = GS_ALL_DONE;
14280 break;
14281 }
14282
14283 case VA_ARG_EXPR:
14284 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
14285 break;
14286
14287 CASE_CONVERT:
14288 if (IS_EMPTY_STMT (*expr_p))
14289 {
14290 ret = GS_ALL_DONE;
14291 break;
14292 }
14293
14294 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
14295 || fallback == fb_none)
14296 {
14297 /* Just strip a conversion to void (or in void context) and
14298 try again. */
14299 *expr_p = TREE_OPERAND (*expr_p, 0);
14300 ret = GS_OK;
14301 break;
14302 }
14303
14304 ret = gimplify_conversion (expr_p);
14305 if (ret == GS_ERROR)
14306 break;
14307 if (*expr_p != save_expr)
14308 break;
14309 /* FALLTHRU */
14310
14311 case FIX_TRUNC_EXPR:
14312 /* unary_expr: ... | '(' cast ')' val | ... */
14313 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14314 is_gimple_val, fb_rvalue);
14315 recalculate_side_effects (*expr_p);
14316 break;
14317
14318 case INDIRECT_REF:
14319 {
14320 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
14321 bool notrap = TREE_THIS_NOTRAP (*expr_p);
14322 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
14323
14324 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
14325 if (*expr_p != save_expr)
14326 {
14327 ret = GS_OK;
14328 break;
14329 }
14330
14331 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14332 is_gimple_reg, fb_rvalue);
14333 if (ret == GS_ERROR)
14334 break;
14335
14336 recalculate_side_effects (*expr_p);
14337 *expr_p = fold_build2_loc (input_location, MEM_REF,
14338 TREE_TYPE (*expr_p),
14339 TREE_OPERAND (*expr_p, 0),
14340 build_int_cst (saved_ptr_type, 0));
14341 TREE_THIS_VOLATILE (*expr_p) = volatilep;
14342 TREE_THIS_NOTRAP (*expr_p) = notrap;
14343 ret = GS_OK;
14344 break;
14345 }
14346
14347 /* We arrive here through the various re-gimplifcation paths. */
14348 case MEM_REF:
14349 /* First try re-folding the whole thing. */
14350 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
14351 TREE_OPERAND (*expr_p, 0),
14352 TREE_OPERAND (*expr_p, 1));
14353 if (tmp)
14354 {
14355 REF_REVERSE_STORAGE_ORDER (tmp)
14356 = REF_REVERSE_STORAGE_ORDER (*expr_p);
14357 *expr_p = tmp;
14358 recalculate_side_effects (*expr_p);
14359 ret = GS_OK;
14360 break;
14361 }
14362 /* Avoid re-gimplifying the address operand if it is already
14363 in suitable form. Re-gimplifying would mark the address
14364 operand addressable. Always gimplify when not in SSA form
14365 as we still may have to gimplify decls with value-exprs. */
14366 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
14367 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
14368 {
14369 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14370 is_gimple_mem_ref_addr, fb_rvalue);
14371 if (ret == GS_ERROR)
14372 break;
14373 }
14374 recalculate_side_effects (*expr_p);
14375 ret = GS_ALL_DONE;
14376 break;
14377
14378 /* Constants need not be gimplified. */
14379 case INTEGER_CST:
14380 case REAL_CST:
14381 case FIXED_CST:
14382 case STRING_CST:
14383 case COMPLEX_CST:
14384 case VECTOR_CST:
14385 /* Drop the overflow flag on constants, we do not want
14386 that in the GIMPLE IL. */
14387 if (TREE_OVERFLOW_P (*expr_p))
14388 *expr_p = drop_tree_overflow (*expr_p);
14389 ret = GS_ALL_DONE;
14390 break;
14391
14392 case CONST_DECL:
14393 /* If we require an lvalue, such as for ADDR_EXPR, retain the
14394 CONST_DECL node. Otherwise the decl is replaceable by its
14395 value. */
14396 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
14397 if (fallback & fb_lvalue)
14398 ret = GS_ALL_DONE;
14399 else
14400 {
14401 *expr_p = DECL_INITIAL (*expr_p);
14402 ret = GS_OK;
14403 }
14404 break;
14405
14406 case DECL_EXPR:
14407 ret = gimplify_decl_expr (expr_p, pre_p);
14408 break;
14409
14410 case BIND_EXPR:
14411 ret = gimplify_bind_expr (expr_p, pre_p);
14412 break;
14413
14414 case LOOP_EXPR:
14415 ret = gimplify_loop_expr (expr_p, pre_p);
14416 break;
14417
14418 case SWITCH_EXPR:
14419 ret = gimplify_switch_expr (expr_p, pre_p);
14420 break;
14421
14422 case EXIT_EXPR:
14423 ret = gimplify_exit_expr (expr_p);
14424 break;
14425
14426 case GOTO_EXPR:
14427 /* If the target is not LABEL, then it is a computed jump
14428 and the target needs to be gimplified. */
14429 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
14430 {
14431 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
14432 NULL, is_gimple_val, fb_rvalue);
14433 if (ret == GS_ERROR)
14434 break;
14435 }
14436 gimplify_seq_add_stmt (pre_p,
14437 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
14438 ret = GS_ALL_DONE;
14439 break;
14440
14441 case PREDICT_EXPR:
14442 gimplify_seq_add_stmt (pre_p,
14443 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
14444 PREDICT_EXPR_OUTCOME (*expr_p)));
14445 ret = GS_ALL_DONE;
14446 break;
14447
14448 case LABEL_EXPR:
14449 ret = gimplify_label_expr (expr_p, pre_p);
14450 label = LABEL_EXPR_LABEL (*expr_p);
14451 gcc_assert (decl_function_context (label) == current_function_decl);
14452
14453 /* If the label is used in a goto statement, or address of the label
14454 is taken, we need to unpoison all variables that were seen so far.
14455 Doing so would prevent us from reporting a false positives. */
14456 if (asan_poisoned_variables
14457 && asan_used_labels != NULL
14458 && asan_used_labels->contains (label)
14459 && !gimplify_omp_ctxp)
14460 asan_poison_variables (asan_poisoned_variables, false, pre_p);
14461 break;
14462
14463 case CASE_LABEL_EXPR:
14464 ret = gimplify_case_label_expr (expr_p, pre_p);
14465
14466 if (gimplify_ctxp->live_switch_vars)
14467 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
14468 pre_p);
14469 break;
14470
14471 case RETURN_EXPR:
14472 ret = gimplify_return_expr (*expr_p, pre_p);
14473 break;
14474
14475 case CONSTRUCTOR:
14476 /* Don't reduce this in place; let gimplify_init_constructor work its
14477 magic. Buf if we're just elaborating this for side effects, just
14478 gimplify any element that has side-effects. */
14479 if (fallback == fb_none)
14480 {
14481 unsigned HOST_WIDE_INT ix;
14482 tree val;
14483 tree temp = NULL_TREE;
14484 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
14485 if (TREE_SIDE_EFFECTS (val))
14486 append_to_statement_list (val, &temp);
14487
14488 *expr_p = temp;
14489 ret = temp ? GS_OK : GS_ALL_DONE;
14490 }
14491 /* C99 code may assign to an array in a constructed
14492 structure or union, and this has undefined behavior only
14493 on execution, so create a temporary if an lvalue is
14494 required. */
14495 else if (fallback == fb_lvalue)
14496 {
14497 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
14498 mark_addressable (*expr_p);
14499 ret = GS_OK;
14500 }
14501 else
14502 ret = GS_ALL_DONE;
14503 break;
14504
14505 /* The following are special cases that are not handled by the
14506 original GIMPLE grammar. */
14507
14508 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
14509 eliminated. */
14510 case SAVE_EXPR:
14511 ret = gimplify_save_expr (expr_p, pre_p, post_p);
14512 break;
14513
14514 case BIT_FIELD_REF:
14515 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14516 post_p, is_gimple_lvalue, fb_either);
14517 recalculate_side_effects (*expr_p);
14518 break;
14519
14520 case TARGET_MEM_REF:
14521 {
14522 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
14523
14524 if (TMR_BASE (*expr_p))
14525 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
14526 post_p, is_gimple_mem_ref_addr, fb_either);
14527 if (TMR_INDEX (*expr_p))
14528 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
14529 post_p, is_gimple_val, fb_rvalue);
14530 if (TMR_INDEX2 (*expr_p))
14531 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
14532 post_p, is_gimple_val, fb_rvalue);
14533 /* TMR_STEP and TMR_OFFSET are always integer constants. */
14534 ret = MIN (r0, r1);
14535 }
14536 break;
14537
14538 case NON_LVALUE_EXPR:
14539 /* This should have been stripped above. */
14540 gcc_unreachable ();
14541
14542 case ASM_EXPR:
14543 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
14544 break;
14545
14546 case TRY_FINALLY_EXPR:
14547 case TRY_CATCH_EXPR:
14548 {
14549 gimple_seq eval, cleanup;
14550 gtry *try_;
14551
14552 /* Calls to destructors are generated automatically in FINALLY/CATCH
14553 block. They should have location as UNKNOWN_LOCATION. However,
14554 gimplify_call_expr will reset these call stmts to input_location
14555 if it finds stmt's location is unknown. To prevent resetting for
14556 destructors, we set the input_location to unknown.
14557 Note that this only affects the destructor calls in FINALLY/CATCH
14558 block, and will automatically reset to its original value by the
14559 end of gimplify_expr. */
14560 input_location = UNKNOWN_LOCATION;
14561 eval = cleanup = NULL;
14562 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
14563 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
14564 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
14565 {
14566 gimple_seq n = NULL, e = NULL;
14567 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
14568 0), &n);
14569 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
14570 1), &e);
14571 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
14572 {
14573 geh_else *stmt = gimple_build_eh_else (n, e);
14574 gimple_seq_add_stmt (&cleanup, stmt);
14575 }
14576 }
14577 else
14578 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
14579 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
14580 if (gimple_seq_empty_p (cleanup))
14581 {
14582 gimple_seq_add_seq (pre_p, eval);
14583 ret = GS_ALL_DONE;
14584 break;
14585 }
14586 try_ = gimple_build_try (eval, cleanup,
14587 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
14588 ? GIMPLE_TRY_FINALLY
14589 : GIMPLE_TRY_CATCH);
14590 if (EXPR_HAS_LOCATION (save_expr))
14591 gimple_set_location (try_, EXPR_LOCATION (save_expr));
14592 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
14593 gimple_set_location (try_, saved_location);
14594 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
14595 gimple_try_set_catch_is_cleanup (try_,
14596 TRY_CATCH_IS_CLEANUP (*expr_p));
14597 gimplify_seq_add_stmt (pre_p, try_);
14598 ret = GS_ALL_DONE;
14599 break;
14600 }
14601
14602 case CLEANUP_POINT_EXPR:
14603 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
14604 break;
14605
14606 case TARGET_EXPR:
14607 ret = gimplify_target_expr (expr_p, pre_p, post_p);
14608 break;
14609
14610 case CATCH_EXPR:
14611 {
14612 gimple *c;
14613 gimple_seq handler = NULL;
14614 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
14615 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
14616 gimplify_seq_add_stmt (pre_p, c);
14617 ret = GS_ALL_DONE;
14618 break;
14619 }
14620
14621 case EH_FILTER_EXPR:
14622 {
14623 gimple *ehf;
14624 gimple_seq failure = NULL;
14625
14626 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
14627 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
14628 copy_warning (ehf, *expr_p);
14629 gimplify_seq_add_stmt (pre_p, ehf);
14630 ret = GS_ALL_DONE;
14631 break;
14632 }
14633
14634 case OBJ_TYPE_REF:
14635 {
14636 enum gimplify_status r0, r1;
14637 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
14638 post_p, is_gimple_val, fb_rvalue);
14639 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
14640 post_p, is_gimple_val, fb_rvalue);
14641 TREE_SIDE_EFFECTS (*expr_p) = 0;
14642 ret = MIN (r0, r1);
14643 }
14644 break;
14645
14646 case LABEL_DECL:
14647 /* We get here when taking the address of a label. We mark
14648 the label as "forced"; meaning it can never be removed and
14649 it is a potential target for any computed goto. */
14650 FORCED_LABEL (*expr_p) = 1;
14651 ret = GS_ALL_DONE;
14652 break;
14653
14654 case STATEMENT_LIST:
14655 ret = gimplify_statement_list (expr_p, pre_p);
14656 break;
14657
14658 case WITH_SIZE_EXPR:
14659 {
14660 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14661 post_p == &internal_post ? NULL : post_p,
14662 gimple_test_f, fallback);
14663 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14664 is_gimple_val, fb_rvalue);
14665 ret = GS_ALL_DONE;
14666 }
14667 break;
14668
14669 case VAR_DECL:
14670 case PARM_DECL:
14671 ret = gimplify_var_or_parm_decl (expr_p);
14672 break;
14673
14674 case RESULT_DECL:
14675 /* When within an OMP context, notice uses of variables. */
14676 if (gimplify_omp_ctxp)
14677 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
14678 ret = GS_ALL_DONE;
14679 break;
14680
14681 case DEBUG_EXPR_DECL:
14682 gcc_unreachable ();
14683
14684 case DEBUG_BEGIN_STMT:
14685 gimplify_seq_add_stmt (pre_p,
14686 gimple_build_debug_begin_stmt
14687 (TREE_BLOCK (*expr_p),
14688 EXPR_LOCATION (*expr_p)));
14689 ret = GS_ALL_DONE;
14690 *expr_p = NULL;
14691 break;
14692
14693 case SSA_NAME:
14694 /* Allow callbacks into the gimplifier during optimization. */
14695 ret = GS_ALL_DONE;
14696 break;
14697
14698 case OMP_PARALLEL:
14699 gimplify_omp_parallel (expr_p, pre_p);
14700 ret = GS_ALL_DONE;
14701 break;
14702
14703 case OMP_TASK:
14704 gimplify_omp_task (expr_p, pre_p);
14705 ret = GS_ALL_DONE;
14706 break;
14707
14708 case OMP_FOR:
14709 case OMP_SIMD:
14710 case OMP_DISTRIBUTE:
14711 case OMP_TASKLOOP:
14712 case OACC_LOOP:
14713 ret = gimplify_omp_for (expr_p, pre_p);
14714 break;
14715
14716 case OMP_LOOP:
14717 ret = gimplify_omp_loop (expr_p, pre_p);
14718 break;
14719
14720 case OACC_CACHE:
14721 gimplify_oacc_cache (expr_p, pre_p);
14722 ret = GS_ALL_DONE;
14723 break;
14724
14725 case OACC_DECLARE:
14726 gimplify_oacc_declare (expr_p, pre_p);
14727 ret = GS_ALL_DONE;
14728 break;
14729
14730 case OACC_HOST_DATA:
14731 case OACC_DATA:
14732 case OACC_KERNELS:
14733 case OACC_PARALLEL:
14734 case OACC_SERIAL:
14735 case OMP_SECTIONS:
14736 case OMP_SINGLE:
14737 case OMP_TARGET:
14738 case OMP_TARGET_DATA:
14739 case OMP_TEAMS:
14740 gimplify_omp_workshare (expr_p, pre_p);
14741 ret = GS_ALL_DONE;
14742 break;
14743
14744 case OACC_ENTER_DATA:
14745 case OACC_EXIT_DATA:
14746 case OACC_UPDATE:
14747 case OMP_TARGET_UPDATE:
14748 case OMP_TARGET_ENTER_DATA:
14749 case OMP_TARGET_EXIT_DATA:
14750 gimplify_omp_target_update (expr_p, pre_p);
14751 ret = GS_ALL_DONE;
14752 break;
14753
14754 case OMP_SECTION:
14755 case OMP_MASTER:
14756 case OMP_ORDERED:
14757 case OMP_CRITICAL:
14758 case OMP_SCAN:
14759 {
14760 gimple_seq body = NULL;
14761 gimple *g;
14762 bool saved_in_omp_construct = in_omp_construct;
14763
14764 in_omp_construct = true;
14765 gimplify_and_add (OMP_BODY (*expr_p), &body);
14766 in_omp_construct = saved_in_omp_construct;
14767 switch (TREE_CODE (*expr_p))
14768 {
14769 case OMP_SECTION:
14770 g = gimple_build_omp_section (body);
14771 break;
14772 case OMP_MASTER:
14773 g = gimple_build_omp_master (body);
14774 break;
14775 case OMP_ORDERED:
14776 g = gimplify_omp_ordered (*expr_p, body);
14777 break;
14778 case OMP_CRITICAL:
14779 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14780 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
14781 gimplify_adjust_omp_clauses (pre_p, body,
14782 &OMP_CRITICAL_CLAUSES (*expr_p),
14783 OMP_CRITICAL);
14784 g = gimple_build_omp_critical (body,
14785 OMP_CRITICAL_NAME (*expr_p),
14786 OMP_CRITICAL_CLAUSES (*expr_p));
14787 break;
14788 case OMP_SCAN:
14789 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14790 pre_p, ORT_WORKSHARE, OMP_SCAN);
14791 gimplify_adjust_omp_clauses (pre_p, body,
14792 &OMP_SCAN_CLAUSES (*expr_p),
14793 OMP_SCAN);
14794 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14795 break;
14796 default:
14797 gcc_unreachable ();
14798 }
14799 gimplify_seq_add_stmt (pre_p, g);
14800 ret = GS_ALL_DONE;
14801 break;
14802 }
14803
14804 case OMP_TASKGROUP:
14805 {
14806 gimple_seq body = NULL;
14807
14808 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
14809 bool saved_in_omp_construct = in_omp_construct;
14810 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14811 OMP_TASKGROUP);
14812 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
14813
14814 in_omp_construct = true;
14815 gimplify_and_add (OMP_BODY (*expr_p), &body);
14816 in_omp_construct = saved_in_omp_construct;
14817 gimple_seq cleanup = NULL;
14818 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14819 gimple *g = gimple_build_call (fn, 0);
14820 gimple_seq_add_stmt (&cleanup, g);
14821 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14822 body = NULL;
14823 gimple_seq_add_stmt (&body, g);
14824 g = gimple_build_omp_taskgroup (body, *pclauses);
14825 gimplify_seq_add_stmt (pre_p, g);
14826 ret = GS_ALL_DONE;
14827 break;
14828 }
14829
14830 case OMP_ATOMIC:
14831 case OMP_ATOMIC_READ:
14832 case OMP_ATOMIC_CAPTURE_OLD:
14833 case OMP_ATOMIC_CAPTURE_NEW:
14834 ret = gimplify_omp_atomic (expr_p, pre_p);
14835 break;
14836
14837 case TRANSACTION_EXPR:
14838 ret = gimplify_transaction (expr_p, pre_p);
14839 break;
14840
14841 case TRUTH_AND_EXPR:
14842 case TRUTH_OR_EXPR:
14843 case TRUTH_XOR_EXPR:
14844 {
14845 tree orig_type = TREE_TYPE (*expr_p);
14846 tree new_type, xop0, xop1;
14847 *expr_p = gimple_boolify (*expr_p);
14848 new_type = TREE_TYPE (*expr_p);
14849 if (!useless_type_conversion_p (orig_type, new_type))
14850 {
14851 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
14852 ret = GS_OK;
14853 break;
14854 }
14855
14856 /* Boolified binary truth expressions are semantically equivalent
14857 to bitwise binary expressions. Canonicalize them to the
14858 bitwise variant. */
14859 switch (TREE_CODE (*expr_p))
14860 {
14861 case TRUTH_AND_EXPR:
14862 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14863 break;
14864 case TRUTH_OR_EXPR:
14865 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14866 break;
14867 case TRUTH_XOR_EXPR:
14868 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14869 break;
14870 default:
14871 break;
14872 }
14873 /* Now make sure that operands have compatible type to
14874 expression's new_type. */
14875 xop0 = TREE_OPERAND (*expr_p, 0);
14876 xop1 = TREE_OPERAND (*expr_p, 1);
14877 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14878 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14879 new_type,
14880 xop0);
14881 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14882 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14883 new_type,
14884 xop1);
14885 /* Continue classified as tcc_binary. */
14886 goto expr_2;
14887 }
14888
14889 case VEC_COND_EXPR:
14890 goto expr_3;
14891
14892 case VEC_PERM_EXPR:
14893 /* Classified as tcc_expression. */
14894 goto expr_3;
14895
14896 case BIT_INSERT_EXPR:
14897 /* Argument 3 is a constant. */
14898 goto expr_2;
14899
14900 case POINTER_PLUS_EXPR:
14901 {
14902 enum gimplify_status r0, r1;
14903 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14904 post_p, is_gimple_val, fb_rvalue);
14905 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14906 post_p, is_gimple_val, fb_rvalue);
14907 recalculate_side_effects (*expr_p);
14908 ret = MIN (r0, r1);
14909 break;
14910 }
14911
14912 default:
14913 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
14914 {
14915 case tcc_comparison:
14916 /* Handle comparison of objects of non scalar mode aggregates
14917 with a call to memcmp. It would be nice to only have to do
14918 this for variable-sized objects, but then we'd have to allow
14919 the same nest of reference nodes we allow for MODIFY_EXPR and
14920 that's too complex.
14921
14922 Compare scalar mode aggregates as scalar mode values. Using
14923 memcmp for them would be very inefficient at best, and is
14924 plain wrong if bitfields are involved. */
14925 {
14926 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
14927
14928 /* Vector comparisons need no boolification. */
14929 if (TREE_CODE (type) == VECTOR_TYPE)
14930 goto expr_2;
14931 else if (!AGGREGATE_TYPE_P (type))
14932 {
14933 tree org_type = TREE_TYPE (*expr_p);
14934 *expr_p = gimple_boolify (*expr_p);
14935 if (!useless_type_conversion_p (org_type,
14936 TREE_TYPE (*expr_p)))
14937 {
14938 *expr_p = fold_convert_loc (input_location,
14939 org_type, *expr_p);
14940 ret = GS_OK;
14941 }
14942 else
14943 goto expr_2;
14944 }
14945 else if (TYPE_MODE (type) != BLKmode)
14946 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14947 else
14948 ret = gimplify_variable_sized_compare (expr_p);
14949
14950 break;
14951 }
14952
14953 /* If *EXPR_P does not need to be special-cased, handle it
14954 according to its class. */
14955 case tcc_unary:
14956 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14957 post_p, is_gimple_val, fb_rvalue);
14958 break;
14959
14960 case tcc_binary:
14961 expr_2:
14962 {
14963 enum gimplify_status r0, r1;
14964
14965 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14966 post_p, is_gimple_val, fb_rvalue);
14967 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14968 post_p, is_gimple_val, fb_rvalue);
14969
14970 ret = MIN (r0, r1);
14971 break;
14972 }
14973
14974 expr_3:
14975 {
14976 enum gimplify_status r0, r1, r2;
14977
14978 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14979 post_p, is_gimple_val, fb_rvalue);
14980 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14981 post_p, is_gimple_val, fb_rvalue);
14982 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14983 post_p, is_gimple_val, fb_rvalue);
14984
14985 ret = MIN (MIN (r0, r1), r2);
14986 break;
14987 }
14988
14989 case tcc_declaration:
14990 case tcc_constant:
14991 ret = GS_ALL_DONE;
14992 goto dont_recalculate;
14993
14994 default:
14995 gcc_unreachable ();
14996 }
14997
14998 recalculate_side_effects (*expr_p);
14999
15000 dont_recalculate:
15001 break;
15002 }
15003
15004 gcc_assert (*expr_p || ret != GS_OK);
15005 }
15006 while (ret == GS_OK);
15007
15008 /* If we encountered an error_mark somewhere nested inside, either
15009 stub out the statement or propagate the error back out. */
15010 if (ret == GS_ERROR)
15011 {
15012 if (is_statement)
15013 *expr_p = NULL;
15014 goto out;
15015 }
15016
15017 /* This was only valid as a return value from the langhook, which
15018 we handled. Make sure it doesn't escape from any other context. */
15019 gcc_assert (ret != GS_UNHANDLED);
15020
15021 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
15022 {
15023 /* We aren't looking for a value, and we don't have a valid
15024 statement. If it doesn't have side-effects, throw it away.
15025 We can also get here with code such as "*&&L;", where L is
15026 a LABEL_DECL that is marked as FORCED_LABEL. */
15027 if (TREE_CODE (*expr_p) == LABEL_DECL
15028 || !TREE_SIDE_EFFECTS (*expr_p))
15029 *expr_p = NULL;
15030 else if (!TREE_THIS_VOLATILE (*expr_p))
15031 {
15032 /* This is probably a _REF that contains something nested that
15033 has side effects. Recurse through the operands to find it. */
15034 enum tree_code code = TREE_CODE (*expr_p);
15035
15036 switch (code)
15037 {
15038 case COMPONENT_REF:
15039 case REALPART_EXPR:
15040 case IMAGPART_EXPR:
15041 case VIEW_CONVERT_EXPR:
15042 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15043 gimple_test_f, fallback);
15044 break;
15045
15046 case ARRAY_REF:
15047 case ARRAY_RANGE_REF:
15048 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15049 gimple_test_f, fallback);
15050 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
15051 gimple_test_f, fallback);
15052 break;
15053
15054 default:
15055 /* Anything else with side-effects must be converted to
15056 a valid statement before we get here. */
15057 gcc_unreachable ();
15058 }
15059
15060 *expr_p = NULL;
15061 }
15062 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
15063 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
15064 {
15065 /* Historically, the compiler has treated a bare reference
15066 to a non-BLKmode volatile lvalue as forcing a load. */
15067 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
15068
15069 /* Normally, we do not want to create a temporary for a
15070 TREE_ADDRESSABLE type because such a type should not be
15071 copied by bitwise-assignment. However, we make an
15072 exception here, as all we are doing here is ensuring that
15073 we read the bytes that make up the type. We use
15074 create_tmp_var_raw because create_tmp_var will abort when
15075 given a TREE_ADDRESSABLE type. */
15076 tree tmp = create_tmp_var_raw (type, "vol");
15077 gimple_add_tmp_var (tmp);
15078 gimplify_assign (tmp, *expr_p, pre_p);
15079 *expr_p = NULL;
15080 }
15081 else
15082 /* We can't do anything useful with a volatile reference to
15083 an incomplete type, so just throw it away. Likewise for
15084 a BLKmode type, since any implicit inner load should
15085 already have been turned into an explicit one by the
15086 gimplification process. */
15087 *expr_p = NULL;
15088 }
15089
15090 /* If we are gimplifying at the statement level, we're done. Tack
15091 everything together and return. */
15092 if (fallback == fb_none || is_statement)
15093 {
15094 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
15095 it out for GC to reclaim it. */
15096 *expr_p = NULL_TREE;
15097
15098 if (!gimple_seq_empty_p (internal_pre)
15099 || !gimple_seq_empty_p (internal_post))
15100 {
15101 gimplify_seq_add_seq (&internal_pre, internal_post);
15102 gimplify_seq_add_seq (pre_p, internal_pre);
15103 }
15104
15105 /* The result of gimplifying *EXPR_P is going to be the last few
15106 statements in *PRE_P and *POST_P. Add location information
15107 to all the statements that were added by the gimplification
15108 helpers. */
15109 if (!gimple_seq_empty_p (*pre_p))
15110 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
15111
15112 if (!gimple_seq_empty_p (*post_p))
15113 annotate_all_with_location_after (*post_p, post_last_gsi,
15114 input_location);
15115
15116 goto out;
15117 }
15118
15119 #ifdef ENABLE_GIMPLE_CHECKING
15120 if (*expr_p)
15121 {
15122 enum tree_code code = TREE_CODE (*expr_p);
15123 /* These expressions should already be in gimple IR form. */
15124 gcc_assert (code != MODIFY_EXPR
15125 && code != ASM_EXPR
15126 && code != BIND_EXPR
15127 && code != CATCH_EXPR
15128 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
15129 && code != EH_FILTER_EXPR
15130 && code != GOTO_EXPR
15131 && code != LABEL_EXPR
15132 && code != LOOP_EXPR
15133 && code != SWITCH_EXPR
15134 && code != TRY_FINALLY_EXPR
15135 && code != EH_ELSE_EXPR
15136 && code != OACC_PARALLEL
15137 && code != OACC_KERNELS
15138 && code != OACC_SERIAL
15139 && code != OACC_DATA
15140 && code != OACC_HOST_DATA
15141 && code != OACC_DECLARE
15142 && code != OACC_UPDATE
15143 && code != OACC_ENTER_DATA
15144 && code != OACC_EXIT_DATA
15145 && code != OACC_CACHE
15146 && code != OMP_CRITICAL
15147 && code != OMP_FOR
15148 && code != OACC_LOOP
15149 && code != OMP_MASTER
15150 && code != OMP_TASKGROUP
15151 && code != OMP_ORDERED
15152 && code != OMP_PARALLEL
15153 && code != OMP_SCAN
15154 && code != OMP_SECTIONS
15155 && code != OMP_SECTION
15156 && code != OMP_SINGLE);
15157 }
15158 #endif
15159
15160 /* Otherwise we're gimplifying a subexpression, so the resulting
15161 value is interesting. If it's a valid operand that matches
15162 GIMPLE_TEST_F, we're done. Unless we are handling some
15163 post-effects internally; if that's the case, we need to copy into
15164 a temporary before adding the post-effects to POST_P. */
15165 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
15166 goto out;
15167
15168 /* Otherwise, we need to create a new temporary for the gimplified
15169 expression. */
15170
15171 /* We can't return an lvalue if we have an internal postqueue. The
15172 object the lvalue refers to would (probably) be modified by the
15173 postqueue; we need to copy the value out first, which means an
15174 rvalue. */
15175 if ((fallback & fb_lvalue)
15176 && gimple_seq_empty_p (internal_post)
15177 && is_gimple_addressable (*expr_p))
15178 {
15179 /* An lvalue will do. Take the address of the expression, store it
15180 in a temporary, and replace the expression with an INDIRECT_REF of
15181 that temporary. */
15182 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
15183 unsigned int ref_align = get_object_alignment (*expr_p);
15184 tree ref_type = TREE_TYPE (*expr_p);
15185 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
15186 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
15187 if (TYPE_ALIGN (ref_type) != ref_align)
15188 ref_type = build_aligned_type (ref_type, ref_align);
15189 *expr_p = build2 (MEM_REF, ref_type,
15190 tmp, build_zero_cst (ref_alias_type));
15191 }
15192 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
15193 {
15194 /* An rvalue will do. Assign the gimplified expression into a
15195 new temporary TMP and replace the original expression with
15196 TMP. First, make sure that the expression has a type so that
15197 it can be assigned into a temporary. */
15198 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
15199 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
15200 }
15201 else
15202 {
15203 #ifdef ENABLE_GIMPLE_CHECKING
15204 if (!(fallback & fb_mayfail))
15205 {
15206 fprintf (stderr, "gimplification failed:\n");
15207 print_generic_expr (stderr, *expr_p);
15208 debug_tree (*expr_p);
15209 internal_error ("gimplification failed");
15210 }
15211 #endif
15212 gcc_assert (fallback & fb_mayfail);
15213
15214 /* If this is an asm statement, and the user asked for the
15215 impossible, don't die. Fail and let gimplify_asm_expr
15216 issue an error. */
15217 ret = GS_ERROR;
15218 goto out;
15219 }
15220
15221 /* Make sure the temporary matches our predicate. */
15222 gcc_assert ((*gimple_test_f) (*expr_p));
15223
15224 if (!gimple_seq_empty_p (internal_post))
15225 {
15226 annotate_all_with_location (internal_post, input_location);
15227 gimplify_seq_add_seq (pre_p, internal_post);
15228 }
15229
15230 out:
15231 input_location = saved_location;
15232 return ret;
15233 }
15234
15235 /* Like gimplify_expr but make sure the gimplified result is not itself
15236 a SSA name (but a decl if it were). Temporaries required by
15237 evaluating *EXPR_P may be still SSA names. */
15238
15239 static enum gimplify_status
15240 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
15241 bool (*gimple_test_f) (tree), fallback_t fallback,
15242 bool allow_ssa)
15243 {
15244 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
15245 gimple_test_f, fallback);
15246 if (! allow_ssa
15247 && TREE_CODE (*expr_p) == SSA_NAME)
15248 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
15249 return ret;
15250 }
15251
15252 /* Look through TYPE for variable-sized objects and gimplify each such
15253 size that we find. Add to LIST_P any statements generated. */
15254
15255 void
15256 gimplify_type_sizes (tree type, gimple_seq *list_p)
15257 {
15258 if (type == NULL || type == error_mark_node)
15259 return;
15260
15261 const bool ignored_p
15262 = TYPE_NAME (type)
15263 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
15264 && DECL_IGNORED_P (TYPE_NAME (type));
15265 tree t;
15266
15267 /* We first do the main variant, then copy into any other variants. */
15268 type = TYPE_MAIN_VARIANT (type);
15269
15270 /* Avoid infinite recursion. */
15271 if (TYPE_SIZES_GIMPLIFIED (type))
15272 return;
15273
15274 TYPE_SIZES_GIMPLIFIED (type) = 1;
15275
15276 switch (TREE_CODE (type))
15277 {
15278 case INTEGER_TYPE:
15279 case ENUMERAL_TYPE:
15280 case BOOLEAN_TYPE:
15281 case REAL_TYPE:
15282 case FIXED_POINT_TYPE:
15283 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
15284 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
15285
15286 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
15287 {
15288 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
15289 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
15290 }
15291 break;
15292
15293 case ARRAY_TYPE:
15294 /* These types may not have declarations, so handle them here. */
15295 gimplify_type_sizes (TREE_TYPE (type), list_p);
15296 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
15297 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
15298 with assigned stack slots, for -O1+ -g they should be tracked
15299 by VTA. */
15300 if (!ignored_p
15301 && TYPE_DOMAIN (type)
15302 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
15303 {
15304 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
15305 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
15306 DECL_IGNORED_P (t) = 0;
15307 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
15308 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
15309 DECL_IGNORED_P (t) = 0;
15310 }
15311 break;
15312
15313 case RECORD_TYPE:
15314 case UNION_TYPE:
15315 case QUAL_UNION_TYPE:
15316 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15317 if (TREE_CODE (field) == FIELD_DECL)
15318 {
15319 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
15320 /* Likewise, ensure variable offsets aren't removed. */
15321 if (!ignored_p
15322 && (t = DECL_FIELD_OFFSET (field))
15323 && VAR_P (t)
15324 && DECL_ARTIFICIAL (t))
15325 DECL_IGNORED_P (t) = 0;
15326 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
15327 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
15328 gimplify_type_sizes (TREE_TYPE (field), list_p);
15329 }
15330 break;
15331
15332 case POINTER_TYPE:
15333 case REFERENCE_TYPE:
15334 /* We used to recurse on the pointed-to type here, which turned out to
15335 be incorrect because its definition might refer to variables not
15336 yet initialized at this point if a forward declaration is involved.
15337
15338 It was actually useful for anonymous pointed-to types to ensure
15339 that the sizes evaluation dominates every possible later use of the
15340 values. Restricting to such types here would be safe since there
15341 is no possible forward declaration around, but would introduce an
15342 undesirable middle-end semantic to anonymity. We then defer to
15343 front-ends the responsibility of ensuring that the sizes are
15344 evaluated both early and late enough, e.g. by attaching artificial
15345 type declarations to the tree. */
15346 break;
15347
15348 default:
15349 break;
15350 }
15351
15352 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
15353 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
15354
15355 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
15356 {
15357 TYPE_SIZE (t) = TYPE_SIZE (type);
15358 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
15359 TYPE_SIZES_GIMPLIFIED (t) = 1;
15360 }
15361 }
15362
15363 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
15364 a size or position, has had all of its SAVE_EXPRs evaluated.
15365 We add any required statements to *STMT_P. */
15366
15367 void
15368 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
15369 {
15370 tree expr = *expr_p;
15371
15372 /* We don't do anything if the value isn't there, is constant, or contains
15373 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
15374 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
15375 will want to replace it with a new variable, but that will cause problems
15376 if this type is from outside the function. It's OK to have that here. */
15377 if (expr == NULL_TREE
15378 || is_gimple_constant (expr)
15379 || TREE_CODE (expr) == VAR_DECL
15380 || CONTAINS_PLACEHOLDER_P (expr))
15381 return;
15382
15383 *expr_p = unshare_expr (expr);
15384
15385 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
15386 if the def vanishes. */
15387 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
15388
15389 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
15390 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
15391 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
15392 if (is_gimple_constant (*expr_p))
15393 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
15394 }
15395
15396 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
15397 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
15398 is true, also gimplify the parameters. */
15399
15400 gbind *
15401 gimplify_body (tree fndecl, bool do_parms)
15402 {
15403 location_t saved_location = input_location;
15404 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
15405 gimple *outer_stmt;
15406 gbind *outer_bind;
15407
15408 timevar_push (TV_TREE_GIMPLIFY);
15409
15410 init_tree_ssa (cfun);
15411
15412 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
15413 gimplification. */
15414 default_rtl_profile ();
15415
15416 gcc_assert (gimplify_ctxp == NULL);
15417 push_gimplify_context (true);
15418
15419 if (flag_openacc || flag_openmp)
15420 {
15421 gcc_assert (gimplify_omp_ctxp == NULL);
15422 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
15423 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
15424 }
15425
15426 /* Unshare most shared trees in the body and in that of any nested functions.
15427 It would seem we don't have to do this for nested functions because
15428 they are supposed to be output and then the outer function gimplified
15429 first, but the g++ front end doesn't always do it that way. */
15430 unshare_body (fndecl);
15431 unvisit_body (fndecl);
15432
15433 /* Make sure input_location isn't set to something weird. */
15434 input_location = DECL_SOURCE_LOCATION (fndecl);
15435
15436 /* Resolve callee-copies. This has to be done before processing
15437 the body so that DECL_VALUE_EXPR gets processed correctly. */
15438 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
15439
15440 /* Gimplify the function's body. */
15441 seq = NULL;
15442 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
15443 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
15444 if (!outer_stmt)
15445 {
15446 outer_stmt = gimple_build_nop ();
15447 gimplify_seq_add_stmt (&seq, outer_stmt);
15448 }
15449
15450 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
15451 not the case, wrap everything in a GIMPLE_BIND to make it so. */
15452 if (gimple_code (outer_stmt) == GIMPLE_BIND
15453 && (gimple_seq_first_nondebug_stmt (seq)
15454 == gimple_seq_last_nondebug_stmt (seq)))
15455 {
15456 outer_bind = as_a <gbind *> (outer_stmt);
15457 if (gimple_seq_first_stmt (seq) != outer_stmt
15458 || gimple_seq_last_stmt (seq) != outer_stmt)
15459 {
15460 /* If there are debug stmts before or after outer_stmt, move them
15461 inside of outer_bind body. */
15462 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
15463 gimple_seq second_seq = NULL;
15464 if (gimple_seq_first_stmt (seq) != outer_stmt
15465 && gimple_seq_last_stmt (seq) != outer_stmt)
15466 {
15467 second_seq = gsi_split_seq_after (gsi);
15468 gsi_remove (&gsi, false);
15469 }
15470 else if (gimple_seq_first_stmt (seq) != outer_stmt)
15471 gsi_remove (&gsi, false);
15472 else
15473 {
15474 gsi_remove (&gsi, false);
15475 second_seq = seq;
15476 seq = NULL;
15477 }
15478 gimple_seq_add_seq_without_update (&seq,
15479 gimple_bind_body (outer_bind));
15480 gimple_seq_add_seq_without_update (&seq, second_seq);
15481 gimple_bind_set_body (outer_bind, seq);
15482 }
15483 }
15484 else
15485 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
15486
15487 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15488
15489 /* If we had callee-copies statements, insert them at the beginning
15490 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
15491 if (!gimple_seq_empty_p (parm_stmts))
15492 {
15493 tree parm;
15494
15495 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
15496 if (parm_cleanup)
15497 {
15498 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
15499 GIMPLE_TRY_FINALLY);
15500 parm_stmts = NULL;
15501 gimple_seq_add_stmt (&parm_stmts, g);
15502 }
15503 gimple_bind_set_body (outer_bind, parm_stmts);
15504
15505 for (parm = DECL_ARGUMENTS (current_function_decl);
15506 parm; parm = DECL_CHAIN (parm))
15507 if (DECL_HAS_VALUE_EXPR_P (parm))
15508 {
15509 DECL_HAS_VALUE_EXPR_P (parm) = 0;
15510 DECL_IGNORED_P (parm) = 0;
15511 }
15512 }
15513
15514 if ((flag_openacc || flag_openmp || flag_openmp_simd)
15515 && gimplify_omp_ctxp)
15516 {
15517 delete_omp_context (gimplify_omp_ctxp);
15518 gimplify_omp_ctxp = NULL;
15519 }
15520
15521 pop_gimplify_context (outer_bind);
15522 gcc_assert (gimplify_ctxp == NULL);
15523
15524 if (flag_checking && !seen_error ())
15525 verify_gimple_in_seq (gimple_bind_body (outer_bind));
15526
15527 timevar_pop (TV_TREE_GIMPLIFY);
15528 input_location = saved_location;
15529
15530 return outer_bind;
15531 }
15532
15533 typedef char *char_p; /* For DEF_VEC_P. */
15534
15535 /* Return whether we should exclude FNDECL from instrumentation. */
15536
15537 static bool
15538 flag_instrument_functions_exclude_p (tree fndecl)
15539 {
15540 vec<char_p> *v;
15541
15542 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
15543 if (v && v->length () > 0)
15544 {
15545 const char *name;
15546 int i;
15547 char *s;
15548
15549 name = lang_hooks.decl_printable_name (fndecl, 1);
15550 FOR_EACH_VEC_ELT (*v, i, s)
15551 if (strstr (name, s) != NULL)
15552 return true;
15553 }
15554
15555 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
15556 if (v && v->length () > 0)
15557 {
15558 const char *name;
15559 int i;
15560 char *s;
15561
15562 name = DECL_SOURCE_FILE (fndecl);
15563 FOR_EACH_VEC_ELT (*v, i, s)
15564 if (strstr (name, s) != NULL)
15565 return true;
15566 }
15567
15568 return false;
15569 }
15570
15571 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
15572 node for the function we want to gimplify.
15573
15574 Return the sequence of GIMPLE statements corresponding to the body
15575 of FNDECL. */
15576
15577 void
15578 gimplify_function_tree (tree fndecl)
15579 {
15580 gimple_seq seq;
15581 gbind *bind;
15582
15583 gcc_assert (!gimple_body (fndecl));
15584
15585 if (DECL_STRUCT_FUNCTION (fndecl))
15586 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
15587 else
15588 push_struct_function (fndecl);
15589
15590 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
15591 if necessary. */
15592 cfun->curr_properties |= PROP_gimple_lva;
15593
15594 if (asan_sanitize_use_after_scope ())
15595 asan_poisoned_variables = new hash_set<tree> ();
15596 bind = gimplify_body (fndecl, true);
15597 if (asan_poisoned_variables)
15598 {
15599 delete asan_poisoned_variables;
15600 asan_poisoned_variables = NULL;
15601 }
15602
15603 /* The tree body of the function is no longer needed, replace it
15604 with the new GIMPLE body. */
15605 seq = NULL;
15606 gimple_seq_add_stmt (&seq, bind);
15607 gimple_set_body (fndecl, seq);
15608
15609 /* If we're instrumenting function entry/exit, then prepend the call to
15610 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15611 catch the exit hook. */
15612 /* ??? Add some way to ignore exceptions for this TFE. */
15613 if (flag_instrument_function_entry_exit
15614 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
15615 /* Do not instrument extern inline functions. */
15616 && !(DECL_DECLARED_INLINE_P (fndecl)
15617 && DECL_EXTERNAL (fndecl)
15618 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
15619 && !flag_instrument_functions_exclude_p (fndecl))
15620 {
15621 tree x;
15622 gbind *new_bind;
15623 gimple *tf;
15624 gimple_seq cleanup = NULL, body = NULL;
15625 tree tmp_var, this_fn_addr;
15626 gcall *call;
15627
15628 /* The instrumentation hooks aren't going to call the instrumented
15629 function and the address they receive is expected to be matchable
15630 against symbol addresses. Make sure we don't create a trampoline,
15631 in case the current function is nested. */
15632 this_fn_addr = build_fold_addr_expr (current_function_decl);
15633 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
15634
15635 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15636 call = gimple_build_call (x, 1, integer_zero_node);
15637 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15638 gimple_call_set_lhs (call, tmp_var);
15639 gimplify_seq_add_stmt (&cleanup, call);
15640 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
15641 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15642 gimplify_seq_add_stmt (&cleanup, call);
15643 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
15644
15645 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15646 call = gimple_build_call (x, 1, integer_zero_node);
15647 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15648 gimple_call_set_lhs (call, tmp_var);
15649 gimplify_seq_add_stmt (&body, call);
15650 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
15651 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15652 gimplify_seq_add_stmt (&body, call);
15653 gimplify_seq_add_stmt (&body, tf);
15654 new_bind = gimple_build_bind (NULL, body, NULL);
15655
15656 /* Replace the current function body with the body
15657 wrapped in the try/finally TF. */
15658 seq = NULL;
15659 gimple_seq_add_stmt (&seq, new_bind);
15660 gimple_set_body (fndecl, seq);
15661 bind = new_bind;
15662 }
15663
15664 if (sanitize_flags_p (SANITIZE_THREAD)
15665 && param_tsan_instrument_func_entry_exit)
15666 {
15667 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
15668 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
15669 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
15670 /* Replace the current function body with the body
15671 wrapped in the try/finally TF. */
15672 seq = NULL;
15673 gimple_seq_add_stmt (&seq, new_bind);
15674 gimple_set_body (fndecl, seq);
15675 }
15676
15677 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15678 cfun->curr_properties |= PROP_gimple_any;
15679
15680 pop_cfun ();
15681
15682 dump_function (TDI_gimple, fndecl);
15683 }
15684
15685 /* Return a dummy expression of type TYPE in order to keep going after an
15686 error. */
15687
15688 static tree
15689 dummy_object (tree type)
15690 {
15691 tree t = build_int_cst (build_pointer_type (type), 0);
15692 return build2 (MEM_REF, type, t, t);
15693 }
15694
15695 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15696 builtin function, but a very special sort of operator. */
15697
15698 enum gimplify_status
15699 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15700 gimple_seq *post_p ATTRIBUTE_UNUSED)
15701 {
15702 tree promoted_type, have_va_type;
15703 tree valist = TREE_OPERAND (*expr_p, 0);
15704 tree type = TREE_TYPE (*expr_p);
15705 tree t, tag, aptag;
15706 location_t loc = EXPR_LOCATION (*expr_p);
15707
15708 /* Verify that valist is of the proper type. */
15709 have_va_type = TREE_TYPE (valist);
15710 if (have_va_type == error_mark_node)
15711 return GS_ERROR;
15712 have_va_type = targetm.canonical_va_list_type (have_va_type);
15713 if (have_va_type == NULL_TREE
15714 && POINTER_TYPE_P (TREE_TYPE (valist)))
15715 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15716 have_va_type
15717 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
15718 gcc_assert (have_va_type != NULL_TREE);
15719
15720 /* Generate a diagnostic for requesting data of a type that cannot
15721 be passed through `...' due to type promotion at the call site. */
15722 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15723 != type)
15724 {
15725 static bool gave_help;
15726 bool warned;
15727 /* Use the expansion point to handle cases such as passing bool (defined
15728 in a system header) through `...'. */
15729 location_t xloc
15730 = expansion_point_location_if_in_system_header (loc);
15731
15732 /* Unfortunately, this is merely undefined, rather than a constraint
15733 violation, so we cannot make this an error. If this call is never
15734 executed, the program is still strictly conforming. */
15735 auto_diagnostic_group d;
15736 warned = warning_at (xloc, 0,
15737 "%qT is promoted to %qT when passed through %<...%>",
15738 type, promoted_type);
15739 if (!gave_help && warned)
15740 {
15741 gave_help = true;
15742 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
15743 promoted_type, type);
15744 }
15745
15746 /* We can, however, treat "undefined" any way we please.
15747 Call abort to encourage the user to fix the program. */
15748 if (warned)
15749 inform (xloc, "if this code is reached, the program will abort");
15750 /* Before the abort, allow the evaluation of the va_list
15751 expression to exit or longjmp. */
15752 gimplify_and_add (valist, pre_p);
15753 t = build_call_expr_loc (loc,
15754 builtin_decl_implicit (BUILT_IN_TRAP), 0);
15755 gimplify_and_add (t, pre_p);
15756
15757 /* This is dead code, but go ahead and finish so that the
15758 mode of the result comes out right. */
15759 *expr_p = dummy_object (type);
15760 return GS_ALL_DONE;
15761 }
15762
15763 tag = build_int_cst (build_pointer_type (type), 0);
15764 aptag = build_int_cst (TREE_TYPE (valist), 0);
15765
15766 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15767 valist, tag, aptag);
15768
15769 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15770 needs to be expanded. */
15771 cfun->curr_properties &= ~PROP_gimple_lva;
15772
15773 return GS_OK;
15774 }
15775
15776 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15777
15778 DST/SRC are the destination and source respectively. You can pass
15779 ungimplified trees in DST or SRC, in which case they will be
15780 converted to a gimple operand if necessary.
15781
15782 This function returns the newly created GIMPLE_ASSIGN tuple. */
15783
15784 gimple *
15785 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15786 {
15787 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15788 gimplify_and_add (t, seq_p);
15789 ggc_free (t);
15790 return gimple_seq_last_stmt (*seq_p);
15791 }
15792
15793 inline hashval_t
15794 gimplify_hasher::hash (const elt_t *p)
15795 {
15796 tree t = p->val;
15797 return iterative_hash_expr (t, 0);
15798 }
15799
15800 inline bool
15801 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
15802 {
15803 tree t1 = p1->val;
15804 tree t2 = p2->val;
15805 enum tree_code code = TREE_CODE (t1);
15806
15807 if (TREE_CODE (t2) != code
15808 || TREE_TYPE (t1) != TREE_TYPE (t2))
15809 return false;
15810
15811 if (!operand_equal_p (t1, t2, 0))
15812 return false;
15813
15814 /* Only allow them to compare equal if they also hash equal; otherwise
15815 results are nondeterminate, and we fail bootstrap comparison. */
15816 gcc_checking_assert (hash (p1) == hash (p2));
15817
15818 return true;
15819 }